git-svn-id: https://s3tools.svn.sourceforge.net/svnroot/s3tools/s3cmd/trunk@362 830e0280-6d2a-0410-9c65-932aecc39d9d
Michal Ludvig authored on 2009/01/29 20:04:55... | ... |
@@ -132,7 +132,7 @@ class ConfigParser(object): |
132 | 132 |
self.parse_file(file, sections) |
133 | 133 |
|
134 | 134 |
def parse_file(self, file, sections = []): |
135 |
- info("ConfigParser: Reading file '%s'" % file) |
|
135 |
+ debug("ConfigParser: Reading file '%s'" % file) |
|
136 | 136 |
if type(sections) != type([]): |
137 | 137 |
sections = [sections] |
138 | 138 |
in_our_section = True |
... | ... |
@@ -1,7 +1,9 @@ |
1 | 1 |
TODO list for s3cmd project |
2 | 2 |
=========================== |
3 | 3 |
|
4 |
-- For 0.9.9 |
|
4 |
+- For 0.9.9.x |
|
5 |
+ - Make 'sync s3://bkt/some-filename local/other-filename' work |
|
6 |
+ (at the moment it'll always download). |
|
5 | 7 |
- Enable --exclude for [del] |
6 | 8 |
- Enable --dry-run for [del], reject for all others. |
7 | 9 |
- Sync should work for one file, for example |
... | ... |
@@ -173,6 +173,7 @@ def cmd_bucket_delete(args): |
173 | 173 |
def fetch_local_list(args, recursive = None): |
174 | 174 |
local_uris = [] |
175 | 175 |
local_list = SortedDict() |
176 |
+ single_file = False |
|
176 | 177 |
|
177 | 178 |
if type(args) not in (list, tuple): |
178 | 179 |
args = [args] |
... | ... |
@@ -189,9 +190,18 @@ def fetch_local_list(args, recursive = None): |
189 | 189 |
local_uris.append(uri) |
190 | 190 |
|
191 | 191 |
for uri in local_uris: |
192 |
- local_list.update(_get_filelist_local(uri)) |
|
192 |
+ list_for_uri, single_file = _get_filelist_local(uri) |
|
193 |
+ local_list.update(list_for_uri) |
|
193 | 194 |
|
194 |
- return local_list |
|
195 |
+ ## Single file is True if and only if the user |
|
196 |
+ ## specified one local URI and that URI represents |
|
197 |
+ ## a FILE. Ie it is False if the URI was of a DIR |
|
198 |
+ ## and that dir contained only one FILE. That's not |
|
199 |
+ ## a case of single_file==True. |
|
200 |
+ if len(local_list) > 1: |
|
201 |
+ single_file = False |
|
202 |
+ |
|
203 |
+ return local_list, single_file |
|
195 | 204 |
|
196 | 205 |
def fetch_remote_list(args, require_attribs = False, recursive = None): |
197 | 206 |
remote_uris = [] |
... | ... |
@@ -213,10 +223,6 @@ def fetch_remote_list(args, require_attribs = False, recursive = None): |
213 | 213 |
for uri in remote_uris: |
214 | 214 |
objectlist = _get_filelist_remote(uri) |
215 | 215 |
for key in objectlist: |
216 |
- #object = S3Uri(objectlist[key]['object_uri_str']) |
|
217 |
- ## Remove leading '/' from remote filenames |
|
218 |
- #if key.find("/") == 0: |
|
219 |
- # key = key[1:] |
|
220 | 216 |
remote_list[key] = objectlist[key] |
221 | 217 |
else: |
222 | 218 |
for uri in remote_uris: |
... | ... |
@@ -272,18 +278,22 @@ def cmd_object_put(args): |
272 | 272 |
if len(args) == 0: |
273 | 273 |
raise ParameterError("Nothing to upload. Expecting a local file or directory.") |
274 | 274 |
|
275 |
- local_list = fetch_local_list(args) |
|
276 |
- local_count = len(local_list) |
|
275 |
+ local_list, single_file_local = fetch_local_list(args) |
|
277 | 276 |
|
278 | 277 |
local_list, exclude_list = _filelist_filter_exclude_include(local_list) |
279 | 278 |
|
280 |
- if not destination_base.endswith("/"): |
|
281 |
- if local_count > 1: |
|
282 |
- raise ParameterError("Destination S3 URI must end with '/' (ie must refer to a directory on the remote side).") |
|
283 |
- local_list[local_list.keys()[0]]['remote_uri'] = unicodise(destination_base) |
|
284 |
- else: |
|
285 |
- for key in local_list: |
|
286 |
- local_list[key]['remote_uri'] = unicodise(destination_base + key) |
|
279 |
+ local_count = len(local_list) |
|
280 |
+ |
|
281 |
+ info(u"Summary: %d local files to upload" % local_count) |
|
282 |
+ |
|
283 |
+ if local_count > 0: |
|
284 |
+ if not destination_base.endswith("/"): |
|
285 |
+ if not single_file_local: |
|
286 |
+ raise ParameterError("Destination S3 URI must end with '/' (ie must refer to a directory on the remote side).") |
|
287 |
+ local_list[local_list.keys()[0]]['remote_uri'] = unicodise(destination_base) |
|
288 |
+ else: |
|
289 |
+ for key in local_list: |
|
290 |
+ local_list[key]['remote_uri'] = unicodise(destination_base + key) |
|
287 | 291 |
|
288 | 292 |
if cfg.dry_run: |
289 | 293 |
for key in exclude_list: |
... | ... |
@@ -373,22 +383,25 @@ def cmd_object_get(args): |
373 | 373 |
raise ParameterError("Nothing to download. Expecting S3 URI.") |
374 | 374 |
|
375 | 375 |
remote_list = fetch_remote_list(args, require_attribs = False) |
376 |
- remote_count = len(remote_list) |
|
377 |
- |
|
378 | 376 |
remote_list, exclude_list = _filelist_filter_exclude_include(remote_list) |
379 | 377 |
|
380 |
- if not os.path.isdir(destination_base) or destination_base == '-': |
|
381 |
- ## We were either given a file name (existing or not) or want STDOUT |
|
382 |
- if remote_count > 1: |
|
383 |
- raise ParameterError("Destination must be a directory when downloading multiple sources.") |
|
384 |
- remote_list[remote_list.keys()[0]]['local_filename'] = deunicodise(destination_base) |
|
385 |
- elif os.path.isdir(destination_base): |
|
386 |
- if destination_base[-1] != os.path.sep: |
|
387 |
- destination_base += os.path.sep |
|
388 |
- for key in remote_list: |
|
389 |
- remote_list[key]['local_filename'] = destination_base + key |
|
390 |
- else: |
|
391 |
- raise InternalError("WTF? Is it a dir or not? -- %s" % destination_base) |
|
378 |
+ remote_count = len(remote_list) |
|
379 |
+ |
|
380 |
+ info(u"Summary: %d remote files to download" % remote_count) |
|
381 |
+ |
|
382 |
+ if remote_count > 0: |
|
383 |
+ if not os.path.isdir(destination_base) or destination_base == '-': |
|
384 |
+ ## We were either given a file name (existing or not) or want STDOUT |
|
385 |
+ if remote_count > 1: |
|
386 |
+ raise ParameterError("Destination must be a directory when downloading multiple sources.") |
|
387 |
+ remote_list[remote_list.keys()[0]]['local_filename'] = deunicodise(destination_base) |
|
388 |
+ elif os.path.isdir(destination_base): |
|
389 |
+ if destination_base[-1] != os.path.sep: |
|
390 |
+ destination_base += os.path.sep |
|
391 |
+ for key in remote_list: |
|
392 |
+ remote_list[key]['local_filename'] = destination_base + key |
|
393 |
+ else: |
|
394 |
+ raise InternalError("WTF? Is it a dir or not? -- %s" % destination_base) |
|
392 | 395 |
|
393 | 396 |
if cfg.dry_run: |
394 | 397 |
for key in exclude_list: |
... | ... |
@@ -549,10 +562,12 @@ def _get_filelist_local(local_uri): |
549 | 549 |
local_base = deunicodise(local_uri.basename()) |
550 | 550 |
local_path = deunicodise(local_uri.path()) |
551 | 551 |
filelist = os.walk(local_path) |
552 |
+ single_file = False |
|
552 | 553 |
else: |
553 | 554 |
local_base = "" |
554 | 555 |
local_path = deunicodise(local_uri.dirname()) |
555 | 556 |
filelist = [( local_path, [], [deunicodise(local_uri.basename())] )] |
557 |
+ single_file = True |
|
556 | 558 |
loc_list = SortedDict() |
557 | 559 |
for root, dirs, files in filelist: |
558 | 560 |
rel_root = root.replace(local_path, local_base, 1) |
... | ... |
@@ -575,7 +590,7 @@ def _get_filelist_local(local_uri): |
575 | 575 |
'mtime' : sr.st_mtime, |
576 | 576 |
## TODO: Possibly more to save here... |
577 | 577 |
} |
578 |
- return loc_list |
|
578 |
+ return loc_list, single_file |
|
579 | 579 |
|
580 | 580 |
def _get_filelist_remote(remote_uri, recursive = True): |
581 | 581 |
## If remote_uri ends with '/' then all remote files will have |
... | ... |
@@ -722,7 +737,7 @@ def cmd_sync_remote2local(args): |
722 | 722 |
s3 = S3(Config()) |
723 | 723 |
|
724 | 724 |
destination_base = args[-1] |
725 |
- local_list = fetch_local_list(destination_base, recursive = True) |
|
725 |
+ local_list, single_file_local = fetch_local_list(destination_base, recursive = True) |
|
726 | 726 |
remote_list = fetch_remote_list(args[:-1], recursive = True, require_attribs = True) |
727 | 727 |
|
728 | 728 |
local_count = len(local_list) |
... | ... |
@@ -737,6 +752,8 @@ def cmd_sync_remote2local(args): |
737 | 737 |
local_count = len(local_list) |
738 | 738 |
remote_count = len(remote_list) |
739 | 739 |
|
740 |
+ info(u"Summary: %d remote files to download, %d local files to delete" % (remote_count, local_count)) |
|
741 |
+ |
|
740 | 742 |
if not os.path.isdir(destination_base): |
741 | 743 |
## We were either given a file name (existing or not) or want STDOUT |
742 | 744 |
if remote_count > 1: |
... | ... |
@@ -748,8 +765,6 @@ def cmd_sync_remote2local(args): |
748 | 748 |
for key in remote_list: |
749 | 749 |
remote_list[key]['local_filename'] = deunicodise(destination_base + key) |
750 | 750 |
|
751 |
- info(u"Summary: %d remote files to download, %d local files to delete" % (remote_count, local_count)) |
|
752 |
- |
|
753 | 751 |
if cfg.dry_run: |
754 | 752 |
for key in exclude_list: |
755 | 753 |
output(u"exclude: %s" % unicodise(key)) |
... | ... |
@@ -891,7 +906,7 @@ def cmd_sync_local2remote(args): |
891 | 891 |
sys.exit(1) |
892 | 892 |
|
893 | 893 |
destination_base = args[-1] |
894 |
- local_list = fetch_local_list(args[:-1], recursive = True) |
|
894 |
+ local_list, single_file_local = fetch_local_list(args[:-1], recursive = True) |
|
895 | 895 |
remote_list = fetch_remote_list(destination_base, recursive = True, require_attribs = True) |
896 | 896 |
|
897 | 897 |
local_count = len(local_list) |
... | ... |
@@ -901,21 +916,29 @@ def cmd_sync_local2remote(args): |
901 | 901 |
|
902 | 902 |
local_list, exclude_list = _filelist_filter_exclude_include(local_list) |
903 | 903 |
|
904 |
+ if single_file_local and len(local_list) == 1 and len(remote_list) == 1: |
|
905 |
+ ## Make remote_key same as local_key for comparison if we're dealing with only one file |
|
906 |
+ remote_list_entry = remote_list[remote_list.keys()[0]] |
|
907 |
+ # Flush remote_list, by the way |
|
908 |
+ remote_list = { local_list.keys()[0] : remote_list_entry } |
|
909 |
+ |
|
904 | 910 |
local_list, remote_list, existing_list = _compare_filelists(local_list, remote_list, True) |
905 | 911 |
|
906 | 912 |
local_count = len(local_list) |
907 | 913 |
remote_count = len(remote_list) |
908 | 914 |
|
909 |
- if not destination_base.endswith("/"): |
|
910 |
- if local_count > 1: |
|
911 |
- raise ParameterError("Destination S3 URI must end with '/' (ie must refer to a directory on the remote side).") |
|
912 |
- local_list[local_list.keys()[0]]['remote_uri'] = unicodise(destination_base) |
|
913 |
- else: |
|
914 |
- for key in local_list: |
|
915 |
- local_list[key]['remote_uri'] = unicodise(destination_base + key) |
|
916 |
- |
|
917 | 915 |
info(u"Summary: %d local files to upload, %d remote files to delete" % (local_count, remote_count)) |
918 | 916 |
|
917 |
+ if local_count > 0: |
|
918 |
+ ## Populate 'remote_uri' only if we've got something to upload |
|
919 |
+ if not destination_base.endswith("/"): |
|
920 |
+ if not single_file_local: |
|
921 |
+ raise ParameterError("Destination S3 URI must end with '/' (ie must refer to a directory on the remote side).") |
|
922 |
+ local_list[local_list.keys()[0]]['remote_uri'] = unicodise(destination_base) |
|
923 |
+ else: |
|
924 |
+ for key in local_list: |
|
925 |
+ local_list[key]['remote_uri'] = unicodise(destination_base + key) |
|
926 |
+ |
|
919 | 927 |
if cfg.dry_run: |
920 | 928 |
for key in exclude_list: |
921 | 929 |
output(u"exclude: %s" % unicodise(key)) |