Browse code

* s3cmd.1: Document all the new options and commands. * s3cmd, S3/Config.py: Updated some help texts. Removed option --debug-syncmatch along the way (because --dry-run with --debug is good enough). * TODO: Updated.

git-svn-id: https://s3tools.svn.sourceforge.net/svnroot/s3tools/s3cmd/trunk@376 830e0280-6d2a-0410-9c65-932aecc39d9d

Michal Ludvig authored on 2009/02/16 21:54:49
Showing 5 changed files
... ...
@@ -1,3 +1,11 @@
1
+2009-02-17  Michal Ludvig  <michal@logix.cz>
2
+
3
+	* s3cmd.1: Document all the new options and commands.
4
+	* s3cmd, S3/Config.py: Updated some help texts. Removed
5
+	  option --debug-syncmatch along the way (because --dry-run
6
+	  with --debug is good enough).
7
+	* TODO: Updated.
8
+
1 9
 2009-02-16  Michal Ludvig  <michal@logix.cz>
2 10
 
3 11
 	* s3cmd: Check Python version >= 2.4 as soon as possible.
... ...
@@ -59,7 +59,6 @@ class Config(object):
59 59
 	bucket_location = "US"
60 60
 	default_mime_type = "binary/octet-stream"
61 61
 	guess_mime_type = True
62
-	debug_syncmatch = False
63 62
 	# List of checks to be performed for 'sync'
64 63
 	sync_checks = ['size', 'md5']	# 'weak-timestamp'
65 64
 	# List of compiled REGEXPs
... ...
@@ -6,12 +6,13 @@ TODO list for s3cmd project
6 6
     (at the moment it'll always download).
7 7
   - Enable --exclude for [del], [setacl], [ls].
8 8
   - Enable --dry-run for [del], [setacl], reject for all others.
9
-  - Add testsuite for new put, get and sync semantic.
10 9
   - Recursive cp/mv on remote "folders".
11
-  - Document --recursive and --force for buckets, CloudFront,
12
-    new options --dry-run, --include, etc.
13 10
   - Allow change /tmp to somewhere else
14 11
   - With --guess-mime use 'magic' module if available.
12
+  - Support --preserve for [put] and [get]. Update manpage.
13
+  - Don't let --continue fail if the file is already fully downloaded.
14
+  - Option --mime-type should set mime type with 'cp' and 'mv'. 
15
+    If possible --guess-mime-type should do as well.
15 16
 
16 17
 - For 1.0.0
17 18
   - Add 'geturl' command, both Unicode and urlencoded output.
... ...
@@ -692,12 +692,9 @@ def _compare_filelists(src_list, dst_list, src_is_local_and_dst_is_remote):
692 692
 	info(u"Verifying attributes...")
693 693
 	cfg = Config()
694 694
 	exists_list = SortedDict()
695
-	if cfg.debug_syncmatch:
696
-		logging.root.setLevel(logging.DEBUG)
697 695
 
698 696
 	for file in src_list.keys():
699
-		if not cfg.debug_syncmatch:
700
-			debug(u"CHECK: %s" % file)
697
+		debug(u"CHECK: %s" % file)
701 698
 		if dst_list.has_key(file):
702 699
 			## Was --skip-existing requested?
703 700
 			if cfg.skip_existing:
... ...
@@ -736,10 +733,6 @@ def _compare_filelists(src_list, dst_list, src_is_local_and_dst_is_remote):
736 736
 			## Remove from destination-list, all that is left there will be deleted
737 737
 			del(dst_list[file])
738 738
 
739
-	if cfg.debug_syncmatch:
740
-		warning(u"Exiting because of --debug-syncmatch")
741
-		sys.exit(1)
742
-
743 739
 	return src_list, dst_list, exists_list
744 740
 
745 741
 def cmd_sync_remote2local(args):
... ...
@@ -1350,7 +1343,7 @@ def main():
1350 1350
 	optparser.add_option("-c", "--config", dest="config", metavar="FILE", help="Config file name. Defaults to %default")
1351 1351
 	optparser.add_option(      "--dump-config", dest="dump_config", action="store_true", help="Dump current configuration after parsing config files and command line options and exit.")
1352 1352
 
1353
-	optparser.add_option("-n", "--dry-run", dest="dry_run", action="store_true", help="Only show what should be uploaded or downloaded but don't actually do it. May still perform S3 requests to get bucket listings and other information though (only for [sync] command)")
1353
+	optparser.add_option("-n", "--dry-run", dest="dry_run", action="store_true", help="Only show what should be uploaded or downloaded but don't actually do it. May still perform S3 requests to get bucket listings and other information though (only for file transfer commands)")
1354 1354
 
1355 1355
 	optparser.add_option("-e", "--encrypt", dest="encrypt", action="store_true", help="Encrypt files before uploading to S3.")
1356 1356
 	optparser.add_option(      "--no-encrypt", dest="encrypt", action="store_false", help="Don't encrypt files.")
... ...
@@ -1372,19 +1365,18 @@ def main():
1372 1372
 	optparser.add_option(      "--include-from", dest="include_from", action="append", metavar="FILE", help="Read --include GLOBs from FILE")
1373 1373
 	optparser.add_option(      "--rinclude", dest="rinclude", action="append", metavar="REGEXP", help="Same as --include but uses REGEXP (regular expression) instead of GLOB")
1374 1374
 	optparser.add_option(      "--rinclude-from", dest="rinclude_from", action="append", metavar="FILE", help="Read --rinclude REGEXPs from FILE")
1375
-	optparser.add_option(      "--debug-syncmatch", "--debug-exclude", dest="debug_syncmatch", action="store_true", help="Output detailed information about remote vs. local filelist matching and --exclude processing and then exit")
1376 1375
 
1377 1376
 	optparser.add_option(      "--bucket-location", dest="bucket_location", help="Datacentre to create bucket in. Either EU or US (default)")
1378 1377
 
1379 1378
 	optparser.add_option("-m", "--mime-type", dest="default_mime_type", type="mimetype", metavar="MIME/TYPE", help="Default MIME-type to be set for objects stored.")
1380 1379
 	optparser.add_option("-M", "--guess-mime-type", dest="guess_mime_type", action="store_true", help="Guess MIME-type of files by their extension. Falls back to default MIME-Type as specified by --mime-type option")
1381 1380
 
1382
-	optparser.add_option(      "--add-header", dest="add_header", action="append", metavar="NAME:VALUE", help="Add a given HTTP header to the upload request. Can be used multiple times. (only for [put] and [sync] commands).")
1381
+	optparser.add_option(      "--add-header", dest="add_header", action="append", metavar="NAME:VALUE", help="Add a given HTTP header to the upload request. Can be used multiple times. For instance set 'Expires' or 'Cache-Control' headers (or both) using this options if you like.")
1383 1382
 
1384 1383
 	optparser.add_option(      "--encoding", dest="encoding", metavar="ENCODING", help="Override autodetected terminal and filesystem encoding (character set). Autodetected: %s" % preferred_encoding)
1385 1384
 
1386 1385
 	optparser.add_option(      "--list-md5", dest="list_md5", action="store_true", help="Include MD5 sums in bucket listings (only for 'ls' command).")
1387
-	optparser.add_option("-H", "--human-readable-sizes", dest="human_readable_sizes", action="store_true", help="Print sizes in human readable form.")
1386
+	optparser.add_option("-H", "--human-readable-sizes", dest="human_readable_sizes", action="store_true", help="Print sizes in human readable form (eg 1kB instead of 1234).")
1388 1387
 
1389 1388
 	optparser.add_option(      "--progress", dest="progress_meter", action="store_true", help="Display progress meter (default on TTY).")
1390 1389
 	optparser.add_option(      "--no-progress", dest="progress_meter", action="store_false", help="Don't display progress meter (default on non-TTY).")
... ...
@@ -1,6 +1,6 @@
1 1
 .TH s3cmd 1
2 2
 .SH NAME
3
-s3cmd \- tool for managing Amazon S3 storage space
3
+s3cmd \- tool for managing Amazon S3 storage space and Amazon CloudFront content delivery network
4 4
 .SH SYNOPSIS
5 5
 .B s3cmd
6 6
 [\fIOPTIONS\fR] \fICOMMAND\fR [\fIPARAMETERS\fR]
... ...
@@ -42,13 +42,16 @@ Backup a directory tree to S3
42 42
 \fBsync\fR \fIs3://BUCKET[/PREFIX] LOCAL_DIR\fR
43 43
 Restore a tree from S3 to local directory
44 44
 .TP
45
-\fBcp\fR \fIs3://BUCKET1/OBJECT1 s3://BUCKET2[/OBJECT2]\fR 
46
-\fBmv\fR \fIs3://BUCKET1/OBJECT1 s3://BUCKET2[/OBJECT2]\fR 
45
+\fBcp\fR \fIs3://BUCKET1/OBJECT1 s3://BUCKET2[/OBJECT2]\fR, \fBmv\fR \fIs3://BUCKET1/OBJECT1 s3://BUCKET2[/OBJECT2]\fR 
47 46
 Make a copy of a file (\fIcp\fR) or move a file (\fImv\fR). 
48 47
 Destination can be in the same bucket with a different name
49 48
 or in another bucket with the same or different name.
50 49
 Adding \fI\-\-acl\-public\fR will make the destination object 
51 50
 publicly accessible (see below).
51
+.TP
52
+\fBsetacl\fR \fIs3://BUCKET[/OBJECT]\fR
53
+Modify \fIAccess control list\fI for Bucket or Files. Use with 
54
+\fI\-\-acl\-public\fR or \fI\-\-acl\-private\fR
52 55
 .TP 
53 56
 \fBinfo\fR \fIs3://BUCKET[/OBJECT]\fR
54 57
 Get various information about a Bucket or Object
... ...
@@ -56,6 +59,24 @@ Get various information about a Bucket or Object
56 56
 \fBdu\fR \fI[s3://BUCKET[/PREFIX]]\fR
57 57
 Disk usage \- amount of data stored in S3
58 58
 
59
+.PP
60
+Commands for CloudFront management
61
+.TP
62
+\fBcflist\fR
63
+List CloudFront distribution points
64
+.TP
65
+\fBcfinfo\fR [\fIcf://DIST_ID\fR]
66
+Display CloudFront distribution point parameters
67
+.TP
68
+\fBcfcreate\fR \fIs3://BUCKET\fR
69
+Create CloudFront distribution point
70
+.TP
71
+\fBcfdelete\fR \fIcf://DIST_ID\fR
72
+Delete CloudFront distribution point
73
+.TP
74
+\fBcfmodify\fR \fIcf://DIST_ID\fR
75
+Change CloudFront distribution point parameters
76
+
59 77
 .SH OPTIONS
60 78
 .PP
61 79
 Some of the below specified options can have their default 
... ...
@@ -63,9 +84,9 @@ values set in
63 63
 .B s3cmd
64 64
 config file (by default $HOME/.s3cmd). As it's a simple text file 
65 65
 feel free to open it with your favorite text editor and do any
66
-changes you like.
66
+changes you like. 
67 67
 .PP
68
-Config file related options.
68
+\fIConfig file related options.\fR
69 69
 .TP
70 70
 \fB\-\-configure\fR
71 71
 Invoke interactive (re)configuration tool. Don't worry, you won't 
... ...
@@ -78,24 +99,26 @@ Config file name. Defaults to $HOME/.s3cfg
78 78
 Dump current configuration after parsing config files
79 79
 and command line options and exit.
80 80
 .PP
81
-Most options can have a default value set in the above specified config file.
82
-.PP
83
-Options specific to \fBsync\fR command:
81
+\fIOptions specific for \fIfile transfer commands\fR (\fBsync\fR, \fBput\fR and \fBget\fR):
82
+.TP
83
+\fB\-n\fR, \fB\-\-dry\-run\fR
84
+Only show what should be uploaded or downloaded but don't actually do it. May still perform S3 requests to get bucket listings and other in
85
+formation though.
84 86
 .TP
85 87
 \fB\-\-delete\-removed\fR
86 88
 Delete remote objects with no corresponding local file when \fIsync\fRing \fBto\fR S3 or delete local files with no corresponding object in S3 when \fIsync\fRing \fBfrom\fR S3.
87 89
 .TP
88 90
 \fB\-\-no\-delete\-removed\fR
89
-Don't delete remote objects. Default for 'sync' command.
91
+Don't delete remote objects. Default for \fIsync\fR command.
90 92
 .TP
91 93
 \fB\-p\fR, \fB\-\-preserve\fR
92
-Preserve filesystem attributes (mode, ownership, timestamps). Default for 'sync' command.
94
+Preserve filesystem attributes (mode, ownership, timestamps). Default for \fIsync\fR command.
93 95
 .TP
94 96
 \fB\-\-no\-preserve\fR
95 97
 Don't store filesystem attributes with uploaded files.
96 98
 .TP
97 99
 \fB\-\-exclude GLOB\fR
98
-Exclude files matching GLOB (a.k.a. shell-style wildcard) from \fIsync\fI. See SYNC COMMAND section for more information.
100
+Exclude files matching GLOB (a.k.a. shell-style wildcard) from \fIsync\fI. See FILE TRANSFERS section and \fIhttp://s3tools.org/s3cmd-sync\fR for more information.
99 101
 .TP
100 102
 \fB\-\-exclude\-from FILE\fR
101 103
 Same as \-\-exclude but reads GLOBs from the given FILE instead of expecting them on the command line.
... ...
@@ -106,31 +129,14 @@ Same as \-\-exclude but works with REGEXPs (Regular expressions).
106 106
 \fB\-\-rexclude\-from FILE\fR
107 107
 Same as \-\-exclude\-from but works with REGEXPs.
108 108
 .TP
109
-\fB\-\-debug\-syncmatch\fR or \fB\-\-debug\-exclude\fR (alias)
110
-Display detailed information about matching file names against exclude\-rules as well as information about remote vs local filelists matching. S3cmd exits after performing the match and no actual transfer takes place.
111
-.\".TP
112
-.\"\fB\-n\fR, \fB\-\-dry\-run\fR
113
-.\"Only show what would be uploaded or downloaded but don't actually do it. May still perform S3 requests to get bucket listings and other information though.
114
-.PP
115
-Options common for all commands (where it makes sense indeed):
116
-.TP
117
-\fB\-f\fR, \fB\-\-force\fR
118
-Force overwrite and other dangerous operations.
109
+\fB\-\-include=GLOB\fR, \fB\-\-include\-from=FILE\fR, \fB\-\-rinclude=REGEXP\fR, \fB\-\-rinclude\-from=FILE\fR
110
+Filenames and paths matching GLOB or REGEXP will be included even if previously excluded by one of \-\-(r)exclude(\-from) patterns
119 111
 .TP
120 112
 \fB\-\-continue\fR
121
-Continue getting a partially downloaded file (only for \fIget\fR command). This comes handy once download of a large file, say an ISO image, from a S3 bucket fails and a partially downloaded file is left on the disk. Unfortunately \fIput\fR command doesn't support restarting of failed upload due to Amazon S3 limitation.
122
-.TP
123
-\fB\-P\fR, \fB\-\-acl\-public\fR
124
-Store objects with permissions allowing read for anyone.
125
-.TP
126
-\fB\-\-acl\-private\fR
127
-Store objects with default ACL allowing access for you only.
128
-.TP
129
-\fB\-\-bucket\-location\fR=BUCKET_LOCATION
130
-Specify datacentre where to create the bucket. Possible values are \fIUS\fR (default) or \fIEU\fR.
113
+Continue getting a partially downloaded file (only for \fIget\fR command). This comes handy once download of a large file, say an ISO image, from a S3 bucket fails and a partially downloaded file is left on the disk. Unfortunately \fIput\fR command doesn't support restarting of failed upload due to Amazon S3 limitations.
131 114
 .TP
132
-\fB\-e\fR, \fB\-\-encrypt\fR
133
-Use GPG encryption to protect stored objects from unauthorized access.
115
+\fB\-\-skip\-existing\fR
116
+Skip over files that exist at the destination (only for \fIget\fR and \fIsync\fR commands).
134 117
 .TP
135 118
 \fB\-m\fR MIME/TYPE, \fB\-\-mime\-type\fR=MIME/TYPE
136 119
 Default MIME\-type to be set for objects stored.
... ...
@@ -140,15 +146,65 @@ Guess MIME\(hytype of files by their extension. Falls
140 140
 back to default MIME\(hyType as specified by \fB\-\-mime\-type\fR
141 141
 option
142 142
 .TP
143
+\fB\-\-add\-header=NAME:VALUE\fR
144
+Add a given HTTP header to the upload request. Can be used multiple times with different header names. For instance set 'Expires' or 'Cache-Control' headers (or both) using this options if you like.
145
+.TP
146
+\fB\-P\fR, \fB\-\-acl\-public\fR
147
+Store objects with permissions allowing read for anyone. See \fIhttp://s3tools.org/s3cmd-public\fR for details and hints for storing publicly accessible files.
148
+.TP
149
+\fB\-\-acl\-private\fR
150
+Store objects with default ACL allowing access for you only.
151
+.TP
152
+\fB\-e\fR, \fB\-\-encrypt\fR
153
+Use GPG encryption to protect stored objects from unauthorized access. See \fIhttp://s3tools.org/s3cmd-public\fR for details about encryption.
154
+.TP
155
+\fB\-\-no\-encrypt\fR
156
+Don't encrypt files.
157
+.PP
158
+\fIOptions for CloudFront commands\fR:
159
+.PP
160
+See \fIhttp://s3tools.org/s3cmd-cloudfront\fR for more details.
161
+.TP
162
+\fB\-\-enable\fR
163
+Enable given CloudFront distribution (only for \fIcfmodify\fR command)
164
+.TP
165
+\fB\-\-disable\fR
166
+Enable given CloudFront distribution (only for \fIcfmodify\fR command)
167
+.TP
168
+\fB\-\-cf\-add\-cname=CNAME\fR
169
+Add given CNAME to a CloudFront distribution (only for \fIcfcreate\fR and \fIcfmodify\fR commands)
170
+.TP
171
+\fB\-\-cf\-remove\-cname=CNAME\fR
172
+Remove given CNAME from a CloudFront distribution (only for \fIcfmodify\fR command)
173
+.TP
174
+\fB\-\-cf\-comment=COMMENT\fR
175
+Set COMMENT for a given CloudFront distribution (only for \fIcfcreate\fR and \fIcfmodify\fR commands)
176
+.PP
177
+\fIOptions common for all commands\fR (where it makes sense indeed):
178
+.TP
179
+\fB\-r\fR, \fB\-\-recursive\fR
180
+Recursive upload, download or removal. When used with \fIdel\fR it can
181
+remove all the files in a bucket.
182
+.TP
183
+\fB\-f\fR, \fB\-\-force\fR
184
+Force overwrite and other dangerous operations. Can be used to remove 
185
+a non\-empty buckets with \fIs3cmd rb \-\-force s3://bkt\fR
186
+.TP
187
+\fB\-\-bucket\-location\fR=BUCKET_LOCATION
188
+Specify datacentre where to create the bucket. Possible values are \fIUS\fR (default) or \fIEU\fR.
189
+.TP
143 190
 \fB\-H\fR, \fB\-\-human\-readable\-sizes\fR
144 191
 Print sizes in human readable form.
145
-.\".TP
146
-.\"\fB\-u\fR, \fB\-\-show\-uri\fR
147
-.\"Show complete S3 URI in listings.
192
+.TP
193
+\fB\-\-list\-md5\fR
194
+Include MD5 sums in bucket listings (only for \fIls\fR command).
148 195
 .TP
149 196
 \fB\-\-progress\fR, \fB\-\-no\-progress\fR
150 197
 Display or don't display progress meter. When running on TTY (e.g. console or xterm) the default is to display progress meter. If not on TTY (e.g. output is redirected somewhere or running from cron) the default is to not display progress meter.
151 198
 .TP
199
+\fB\-\-encoding=ENCODING\fR
200
+Override autodetected terminal and filesystem encoding (character set).
201
+.TP
152 202
 \fB\-v\fR, \fB\-\-verbose\fR
153 203
 Enable verbose output.
154 204
 .TP
... ...
@@ -163,77 +219,101 @@ Show
163 163
 .B s3cmd
164 164
 version and exit.
165 165
 
166
-.SH SYNC COMMAND
166
+.SH FILE TRANSFERS
167 167
 One of the most powerful commands of \fIs3cmd\fR is \fBs3cmd sync\fR used for 
168
-synchronising complete directory trees to or from remote S3 storage. 
168
+synchronising complete directory trees to or from remote S3 storage. To some extent 
169
+\fBs3cmd put\fR and \fBs3cmd get\fR share a similar behaviour with \fBsync\fR.
169 170
 .PP
170 171
 Basic usage common in backup scenarios is as simple as:
171 172
 .nf
172
-	s3cmd sync /local/path s3://test-bucket/backup
173
+	s3cmd sync /local/path/ s3://test-bucket/backup/
173 174
 .fi
174 175
 .PP
175 176
 This command will find all files under /local/path directory and copy them 
176 177
 to corresponding paths under s3://test-bucket/backup on the remote side.
177 178
 For example:
178 179
 .nf
179
-/local/path\fB/file1.ext\fR         \->  s3://test-bucket/backup\fB/file1.ext\fR
180
-/local/path\fB/dir123/file2.bin\fR  \->  s3://test-bucket/backup\fB/dir123/file2.bin\fR
180
+	/local/path/\fBfile1.ext\fR         \->  s3://bucket/backup/\fBfile1.ext\fR
181
+	/local/path/\fBdir123/file2.bin\fR  \->  s3://bucket/backup/\fBdir123/file2.bin\fR
181 182
 .fi
182
-
183
+.PP
184
+However if the local path doesn't end with a slash the last directory's name
185
+is used on the remote side as well. Compare these with the previous example:
186
+.nf
187
+	s3cmd sync /local/path s3://test-bucket/backup/
188
+.fi
189
+will sync:
190
+.nf
191
+	/local/\fBpath/file1.ext\fR         \->  s3://bucket/backup/\fBpath/file1.ext\fR
192
+	/local/\fBpath/dir123/file2.bin\fR  \->  s3://bucket/backup/\fBpath/dir123/file2.bin\fR
193
+.fi
194
+.PP
183 195
 To retrieve the files back from S3 use inverted syntax:
184 196
 .nf
185
-	s3cmd sync s3://test-bucket/backup/ /tmp/restore
197
+	s3cmd sync s3://test-bucket/backup/ /tmp/restore/
186 198
 .fi
187 199
 that will download files:
188 200
 .nf
189
-s3://test-bucket/backup\fB/file1.ext\fR         \->  /tmp/restore\fB/file1.ext\fR       
190
-s3://test-bucket/backup\fB/dir123/file2.bin\fR  \->  /tmp/restore\fB/dir123/file2.bin\fR
201
+	s3://bucket/backup/\fBfile1.ext\fR         \->  /tmp/restore/\fBfile1.ext\fR       
202
+	s3://bucket/backup/\fBdir123/file2.bin\fR  \->  /tmp/restore/\fBdir123/file2.bin\fR
191 203
 .fi
192
-
193
-For the purpose of \fB\-\-exclude\fR and \fB\-\-exclude\-from\fR matching the file name 
194
-\fIalways\fR begins with \fB/\fR (slash) and has the local or remote common part removed.
195
-For instance in the previous example the file names tested against \-\-exclude list
196
-will be \fB/\fRfile1.ext and \fB/\fRdir123/file2.bin, that is both with the leading 
197
-slash regardless whether you specified s3://test-bucket/backup or 
198
-s3://test-bucket/backup/ (note the trailing slash) on the command line.
199
-
200
-Both \fB\-\-exclude\fR and \fB\-\-exclude\-from\fR work with shell-style wildcards (a.k.a. GLOB).
201
-For a greater flexibility s3cmd provides Regular-expression versions of the two exclude options 
202
-named \fB\-\-rexclude\fR and \fB\-\-rexclude\-from\fR. 
203
-
204
-Run s3cmd with \fB\-\-debug\-syncmatch\fR to get detailed information
205
-about matching file names against exclude rules.
206
-
207
-For example to exclude all files with ".bin" extension with a REGEXP use:
208 204
 .PP
209
-	\-\-rexclude '\.bin$'
205
+Without the trailing slash on source the behaviour is similar to 
206
+what has been demonstrated with upload:
207
+.nf
208
+	s3cmd sync s3://test-bucket/backup /tmp/restore/
209
+.fi
210
+will download the files as:
211
+.nf
212
+	s3://bucket/\fBbackup/file1.ext\fR         \->  /tmp/restore/\fBbackup/file1.ext\fR       
213
+	s3://bucket/\fBbackup/dir123/file2.bin\fR  \->  /tmp/restore/\fBbackup/dir123/file2.bin\fR
214
+.fi
215
+.PP
216
+All source file names, the bold ones above, are matched against \fBexclude\fR 
217
+rules and those that match are then re\-checked against \fBinclude\fR rules to see
218
+whether they should be excluded or kept in the source list.
219
+.PP
220
+For the purpose of \fB\-\-exclude\fR and \fB\-\-include\fR matching only the 
221
+bold file names above are used. For instance only \fBpath/file1.ext\fR is tested
222
+against the patterns, not \fI/local/\fBpath/file1.ext\fR
210 223
 .PP
211
-to exclude all hidden files and subdirectories (i.e. those whose name begins with dot ".") use GLOB:
224
+Both \fB\-\-exclude\fR and \fB\-\-include\fR work with shell-style wildcards (a.k.a. GLOB).
225
+For a greater flexibility s3cmd provides Regular-expression versions of the two exclude options 
226
+named \fB\-\-rexclude\fR and \fB\-\-rinclude\fR. 
227
+The options with ...\fB\-from\fR suffix (eg \-\-rinclude\-from) expect a filename as
228
+an argument. Each line of such a file is treated as one pattern.
212 229
 .PP
213
-	\-\-exclude '/.*'
230
+There is only one set of patterns built from all \fB\-\-(r)exclude(\-from)\fR options
231
+and similarly for include variant. Any file excluded with eg \-\-exclude can 
232
+be put back with a pattern found in \-\-rinclude\-from list.
214 233
 .PP
215
-on the other hand to exclude only hidden files but not hidden subdirectories use REGEXP:
234
+Run s3cmd with \fB\-\-dry\-run\fR to verify that your rules work as expected. 
235
+Use together with \fB\-\-debug\fR get detailed information
236
+about matching file names against exclude and include rules.
216 237
 .PP
217
-	\-\-rexclude '/\.[^/]*$'
238
+For example to exclude all files with ".jpg" extension except those beginning with a number use:
218 239
 .PP
219
-etc...
240
+	\-\-exclude '*.jpg' \-\-rinclude '[0-9].*\.jpg'
241
+
242
+.SH SEE ALSO
243
+For the most up to date list of options run 
244
+.B s3cmd \-\-help
245
+.br
246
+For more info about usage, examples and other related info visit project homepage at
247
+.br
248
+.B http://s3tools.org
220 249
 
221 250
 .SH AUTHOR
222 251
 Written by Michal Ludvig <michal@logix.cz>
252
+.SH CONTACT, SUPPORT
253
+Prefered way to get support is our mailing list:
254
+.I s3tools\-general@lists.sourceforge.net
223 255
 .SH REPORTING BUGS
224 256
 Report bugs to 
225
-.I s3tools\-general@lists.sourceforge.net
257
+.I s3tools\-bugs@lists.sourceforge.net
226 258
 .SH COPYRIGHT
227
-Copyright \(co 2007,2008 Michal Ludvig <http://www.logix.cz/michal>
259
+Copyright \(co 2007,2008,2009 Michal Ludvig <http://www.logix.cz/michal>
228 260
 .br
229 261
 This is free software.  You may redistribute copies of it under the terms of
230 262
 the GNU General Public License version 2 <http://www.gnu.org/licenses/gpl.html>.
231 263
 There is NO WARRANTY, to the extent permitted by law.
232
-.SH SEE ALSO
233
-For the most up to date list of options run 
234
-.B s3cmd \-\-help
235
-.br
236
-For more info about usage, examples and other related info visit project homepage at
237
-.br
238
-.B http://s3tools.org
239
-