Browse code

add --delay-updates option

Matt Domsch authored on 2012/03/02 05:45:06
Showing 4 changed files
... ...
@@ -55,6 +55,7 @@ class Config(object):
55 55
     delete_removed = False
56 56
     delete_after = False
57 57
     _doc['delete_removed'] = "[sync] Remove remote S3 objects when local file has been deleted"
58
+    delay_updates = False
58 59
     gpg_passphrase = ""
59 60
     gpg_command = ""
60 61
     gpg_encrypt = "%(gpg_command)s -c --verbose --no-use-agent --batch --yes --passphrase-fd %(passphrase_fd)s -o %(output_file)s %(input_file)s"
... ...
@@ -323,7 +323,7 @@ def fetch_remote_list(args, require_attribs = False, recursive = None):
323 323
                 remote_list[key] = remote_item
324 324
     return remote_list
325 325
 
326
-def compare_filelists(src_list, dst_list, src_remote, dst_remote):
326
+def compare_filelists(src_list, dst_list, src_remote, dst_remote, delay_updates = False):
327 327
     def __direction_str(is_remote):
328 328
         return is_remote and "remote" or "local"
329 329
 
... ...
@@ -333,6 +333,7 @@ def compare_filelists(src_list, dst_list, src_remote, dst_remote):
333 333
     info(u"Verifying attributes...")
334 334
     cfg = Config()
335 335
     exists_list = SortedDict(ignore_case = False)
336
+    update_list = SortedDict(ignore_case = False)
336 337
 
337 338
     debug("Comparing filelists (direction: %s -> %s)" % (__direction_str(src_remote), __direction_str(dst_remote)))
338 339
     debug("src_list.keys: %s" % src_list.keys())
... ...
@@ -392,10 +393,17 @@ def compare_filelists(src_list, dst_list, src_remote, dst_remote):
392 392
                 debug(u"IGNR: %s (transfer not needed)" % file)
393 393
                 exists_list[file] = src_list[file]
394 394
                 del(src_list[file])
395
+	    else:
396
+	        if delay_updates:
397
+	            ## Remove from source-list, all that is left there will be transferred
398
+		    ## Add to update-list to transfer last
399
+		    debug(u"XFER UPDATE: %s" % file)
400
+		    update_list[file] = src_list[file]
401
+		    del(src_list[file])
395 402
 
396 403
             ## Remove from destination-list, all that is left there will be deleted
397 404
             del(dst_list[file])
398 405
 
399
-    return src_list, dst_list, exists_list
406
+    return src_list, dst_list, exists_list, update_list
400 407
 
401 408
 # vim:et:ts=4:sts=4:ai
... ...
@@ -608,9 +608,10 @@ def cmd_sync_remote2remote(args):
608 608
 
609 609
     src_list, exclude_list = filter_exclude_include(src_list)
610 610
 
611
-    src_list, dst_list, existing_list = compare_filelists(src_list, dst_list, src_remote = True, dst_remote = True)
611
+    src_list, dst_list, existing_list, update_list = compare_filelists(src_list, dst_list, src_remote = True, dst_remote = True, delay_updates = cfg.delay_updates)
612 612
 
613 613
     src_count = len(src_list)
614
+    update_count = len(update_list)
614 615
     dst_count = len(dst_list)
615 616
 
616 617
     print(u"Summary: %d source files to copy, %d files at destination to delete" % (src_count, dst_count))
... ...
@@ -635,23 +636,29 @@ def cmd_sync_remote2remote(args):
635 635
     if cfg.delete_removed and not cfg.delete_after:
636 636
         _do_deletes(s3, dst_list)
637 637
 
638
+    def _upload(src_list, seq, src_count):
639
+        file_list = src_list.keys()
640
+        file_list.sort()
641
+        for file in file_list:
642
+            seq += 1
643
+            item = src_list[file]
644
+            src_uri = S3Uri(item['object_uri_str'])
645
+            dst_uri = S3Uri(item['target_uri'])
646
+            seq_label = "[%d of %d]" % (seq, src_count)
647
+            extra_headers = copy(cfg.extra_headers)
648
+            try:
649
+                response = s3.object_copy(src_uri, dst_uri, extra_headers)
650
+                output("File %(src)s copied to %(dst)s" % { "src" : src_uri, "dst" : dst_uri })
651
+            except S3Error, e:
652
+                error("File %(src)s could not be copied: %(e)s" % { "src" : src_uri, "e" : e })
653
+        return seq
654
+
638 655
     # Perform the synchronization of files
639 656
     timestamp_start = time.time()
640 657
     seq = 0
641
-    file_list = src_list.keys()
642
-    file_list.sort()
643
-    for file in file_list:
644
-        seq += 1
645
-        item = src_list[file]
646
-        src_uri = S3Uri(item['object_uri_str'])
647
-        dst_uri = S3Uri(item['target_uri'])
648
-        seq_label = "[%d of %d]" % (seq, src_count)
649
-        extra_headers = copy(cfg.extra_headers)
650
-        try:
651
-            response = s3.object_copy(src_uri, dst_uri, extra_headers)
652
-            output("File %(src)s copied to %(dst)s" % { "src" : src_uri, "dst" : dst_uri })
653
-        except S3Error, e:
654
-            error("File %(src)s could not be copied: %(e)s" % { "src" : src_uri, "e" : e })
658
+    seq = _upload(src_list, seq, src_count + update_count)
659
+    seq = _upload(update_list, seq, src_count + update_count)
660
+
655 661
     total_elapsed = time.time() - timestamp_start
656 662
     outstr = "Done. Copied %d files in %0.1f seconds, %0.2f files/s" % (seq, total_elapsed, seq/total_elapsed)
657 663
     if seq > 0:
... ...
@@ -689,27 +696,32 @@ def cmd_sync_remote2local(args):
689 689
 
690 690
     remote_list, exclude_list = filter_exclude_include(remote_list)
691 691
 
692
-    remote_list, local_list, existing_list = compare_filelists(remote_list, local_list, src_remote = True, dst_remote = False)
692
+    remote_list, local_list, existing_list, update_list = compare_filelists(remote_list, local_list, src_remote = True, dst_remote = False, delay_updates = cfg.delay_updates)
693 693
 
694 694
     local_count = len(local_list)
695 695
     remote_count = len(remote_list)
696
+    update_count = len(update_list)
696 697
 
697
-    info(u"Summary: %d remote files to download, %d local files to delete" % (remote_count, local_count))
698
-
699
-    if not os.path.isdir(destination_base):
700
-        ## We were either given a file name (existing or not) or want STDOUT
701
-        if remote_count > 1:
702
-            raise ParameterError("Destination must be a directory when downloading multiple sources.")
703
-        remote_list[remote_list.keys()[0]]['local_filename'] = deunicodise(destination_base)
704
-    else:
705
-        if destination_base[-1] != os.path.sep:
706
-            destination_base += os.path.sep
707
-        for key in remote_list:
708
-            local_filename = destination_base + key
709
-            if os.path.sep != "/":
710
-                local_filename = os.path.sep.join(local_filename.split("/"))
711
-            remote_list[key]['local_filename'] = deunicodise(local_filename)
698
+    info(u"Summary: %d remote files to download, %d local files to delete" % (remote_count + update_count, local_count))
712 699
 
700
+    def _set_local_filename(remote_list, destination_base):
701
+        if not os.path.isdir(destination_base):
702
+            ## We were either given a file name (existing or not) or want STDOUT
703
+            if len(remote_list) > 1:
704
+                raise ParameterError("Destination must be a directory when downloading multiple sources.")
705
+            remote_list[remote_list.keys()[0]]['local_filename'] = deunicodise(destination_base)
706
+        else:
707
+            if destination_base[-1] != os.path.sep:
708
+                destination_base += os.path.sep
709
+            for key in remote_list:
710
+                local_filename = destination_base + key
711
+                if os.path.sep != "/":
712
+                    local_filename = os.path.sep.join(local_filename.split("/"))
713
+                remote_list[key]['local_filename'] = deunicodise(local_filename)
714
+
715
+    _set_local_filename(remote_list, destination_base)
716
+    _set_local_filename(update_list, destination_base)
717
+    
713 718
     if cfg.dry_run:
714 719
         for key in exclude_list:
715 720
             output(u"exclude: %s" % unicodise(key))
... ...
@@ -718,6 +730,8 @@ def cmd_sync_remote2local(args):
718 718
                 output(u"delete: %s" % local_list[key]['full_name_unicode'])
719 719
         for key in remote_list:
720 720
             output(u"download: %s -> %s" % (remote_list[key]['object_uri_str'], remote_list[key]['local_filename']))
721
+        for key in update_list:
722
+            output(u"download: %s -> %s" % (update_list[key]['object_uri_str'], update_list[key]['local_filename']))
721 723
 
722 724
         warning(u"Exitting now because of --dry-run")
723 725
         return
... ...
@@ -725,85 +739,90 @@ def cmd_sync_remote2local(args):
725 725
     if cfg.delete_removed and not cfg.delete_after:
726 726
         _do_deletes(local_list)
727 727
 
728
-    total_size = 0
729
-    total_elapsed = 0.0
730
-    timestamp_start = time.time()
731
-    seq = 0
732
-    dir_cache = {}
733
-    file_list = remote_list.keys()
734
-    file_list.sort()
735
-    for file in file_list:
736
-        seq += 1
737
-        item = remote_list[file]
738
-        uri = S3Uri(item['object_uri_str'])
739
-        dst_file = item['local_filename']
740
-        seq_label = "[%d of %d]" % (seq, remote_count)
741
-        try:
742
-            dst_dir = os.path.dirname(dst_file)
743
-            if not dir_cache.has_key(dst_dir):
744
-                dir_cache[dst_dir] = Utils.mkdir_with_parents(dst_dir)
745
-            if dir_cache[dst_dir] == False:
746
-                warning(u"%s: destination directory not writable: %s" % (file, dst_dir))
747
-                continue
728
+    def _download(remote_list, seq, total, total_size, dir_cache):
729
+        file_list = remote_list.keys()
730
+        file_list.sort()
731
+        for file in file_list:
732
+            seq += 1
733
+            item = remote_list[file]
734
+            uri = S3Uri(item['object_uri_str'])
735
+            dst_file = item['local_filename']
736
+            seq_label = "[%d of %d]" % (seq, total)
748 737
             try:
749
-                open_flags = os.O_CREAT
750
-                open_flags |= os.O_TRUNC
751
-                # open_flags |= os.O_EXCL
752
-
753
-                debug(u"dst_file=%s" % unicodise(dst_file))
754
-                # This will have failed should the file exist
755
-                os.close(os.open(dst_file, open_flags))
756
-                # Yeah I know there is a race condition here. Sadly I don't know how to open() in exclusive mode.
757
-                dst_stream = open(dst_file, "wb")
758
-                response = s3.object_get(uri, dst_stream, extra_label = seq_label)
759
-                dst_stream.close()
760
-                if response['headers'].has_key('x-amz-meta-s3cmd-attrs') and cfg.preserve_attrs:
761
-                    attrs = _parse_attrs_header(response['headers']['x-amz-meta-s3cmd-attrs'])
762
-                    if attrs.has_key('mode'):
763
-                        os.chmod(dst_file, int(attrs['mode']))
764
-                    if attrs.has_key('mtime') or attrs.has_key('atime'):
765
-                        mtime = attrs.has_key('mtime') and int(attrs['mtime']) or int(time.time())
766
-                        atime = attrs.has_key('atime') and int(attrs['atime']) or int(time.time())
767
-                        os.utime(dst_file, (atime, mtime))
768
-                    ## FIXME: uid/gid / uname/gname handling comes here! TODO
769
-            except OSError, e:
770
-                try: dst_stream.close()
771
-                except: pass
772
-                if e.errno == errno.EEXIST:
773
-                    warning(u"%s exists - not overwriting" % (dst_file))
774
-                    continue
775
-                if e.errno in (errno.EPERM, errno.EACCES):
776
-                    warning(u"%s not writable: %s" % (dst_file, e.strerror))
738
+                dst_dir = os.path.dirname(dst_file)
739
+                if not dir_cache.has_key(dst_dir):
740
+                    dir_cache[dst_dir] = Utils.mkdir_with_parents(dst_dir)
741
+                if dir_cache[dst_dir] == False:
742
+                    warning(u"%s: destination directory not writable: %s" % (file, dst_dir))
777 743
                     continue
778
-                if e.errno == errno.EISDIR:
779
-                    warning(u"%s is a directory - skipping over" % dst_file)
744
+                try:
745
+                    open_flags = os.O_CREAT
746
+                    open_flags |= os.O_TRUNC
747
+                    # open_flags |= os.O_EXCL
748
+
749
+                    debug(u"dst_file=%s" % unicodise(dst_file))
750
+                    # This will have failed should the file exist
751
+                    os.close(os.open(dst_file, open_flags))
752
+                    # Yeah I know there is a race condition here. Sadly I don't know how to open() in exclusive mode.
753
+                    dst_stream = open(dst_file, "wb")
754
+                    response = s3.object_get(uri, dst_stream, extra_label = seq_label)
755
+                    dst_stream.close()
756
+                    if response['headers'].has_key('x-amz-meta-s3cmd-attrs') and cfg.preserve_attrs:
757
+                        attrs = _parse_attrs_header(response['headers']['x-amz-meta-s3cmd-attrs'])
758
+                        if attrs.has_key('mode'):
759
+                            os.chmod(dst_file, int(attrs['mode']))
760
+                        if attrs.has_key('mtime') or attrs.has_key('atime'):
761
+                            mtime = attrs.has_key('mtime') and int(attrs['mtime']) or int(time.time())
762
+                            atime = attrs.has_key('atime') and int(attrs['atime']) or int(time.time())
763
+                            os.utime(dst_file, (atime, mtime))
764
+                        ## FIXME: uid/gid / uname/gname handling comes here! TODO
765
+                except OSError, e:
766
+                    try: dst_stream.close()
767
+                    except: pass
768
+                    if e.errno == errno.EEXIST:
769
+                        warning(u"%s exists - not overwriting" % (dst_file))
770
+                        continue
771
+                    if e.errno in (errno.EPERM, errno.EACCES):
772
+                        warning(u"%s not writable: %s" % (dst_file, e.strerror))
773
+                        continue
774
+                    if e.errno == errno.EISDIR:
775
+                        warning(u"%s is a directory - skipping over" % dst_file)
776
+                        continue
777
+                    raise e
778
+                except KeyboardInterrupt:
779
+                    try: dst_stream.close()
780
+                    except: pass
781
+                    warning(u"Exiting after keyboard interrupt")
782
+                    return
783
+                except Exception, e:
784
+                    try: dst_stream.close()
785
+                    except: pass
786
+                    error(u"%s: %s" % (file, e))
780 787
                     continue
781
-                raise e
782
-            except KeyboardInterrupt:
788
+                # We have to keep repeating this call because
789
+                # Python 2.4 doesn't support try/except/finally
790
+                # construction :-(
783 791
                 try: dst_stream.close()
784 792
                 except: pass
785
-                warning(u"Exiting after keyboard interrupt")
786
-                return
787
-            except Exception, e:
788
-                try: dst_stream.close()
789
-                except: pass
790
-                error(u"%s: %s" % (file, e))
793
+            except S3DownloadError, e:
794
+                error(u"%s: download failed too many times. Skipping that file." % file)
791 795
                 continue
792
-            # We have to keep repeating this call because
793
-            # Python 2.4 doesn't support try/except/finally
794
-            # construction :-(
795
-            try: dst_stream.close()
796
-            except: pass
797
-        except S3DownloadError, e:
798
-            error(u"%s: download failed too many times. Skipping that file." % file)
799
-            continue
800
-        speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True)
801
-        if not Config().progress_meter:
802
-            output(u"File '%s' stored as '%s' (%d bytes in %0.1f seconds, %0.2f %sB/s) %s" %
803
-                (uri, unicodise(dst_file), response["size"], response["elapsed"], speed_fmt[0], speed_fmt[1],
804
-                seq_label))
805
-        total_size += response["size"]
796
+            speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True)
797
+            if not Config().progress_meter:
798
+                output(u"File '%s' stored as '%s' (%d bytes in %0.1f seconds, %0.2f %sB/s) %s" %
799
+                    (uri, unicodise(dst_file), response["size"], response["elapsed"], speed_fmt[0], speed_fmt[1],
800
+                    seq_label))
801
+            total_size += response["size"]
802
+        return seq, total_size
806 803
 
804
+    total_size = 0
805
+    total_elapsed = 0.0
806
+    timestamp_start = time.time()
807
+    dir_cache = {}
808
+    seq = 0
809
+    seq, total_size = _download(remote_list, seq, remote_count + update_count, total_size, dir_cache)
810
+    seq, total_size = _download(update_list, seq, remote_count + update_count, total_size, dir_cache)
811
+    
807 812
     total_elapsed = time.time() - timestamp_start
808 813
     speed_fmt = formatSize(total_size/total_elapsed, human_readable = True, floating_point = True)
809 814
 
... ...
@@ -878,30 +897,37 @@ def cmd_sync_local2remote(args):
878 878
     info(u"Found %d local files, %d remote files" % (local_count, remote_count))
879 879
 
880 880
     local_list, exclude_list = filter_exclude_include(local_list)
881
-
881
+        
882 882
     if single_file_local and len(local_list) == 1 and len(remote_list) == 1:
883 883
         ## Make remote_key same as local_key for comparison if we're dealing with only one file
884 884
         remote_list_entry = remote_list[remote_list.keys()[0]]
885 885
         # Flush remote_list, by the way
886 886
         remote_list = { local_list.keys()[0] : remote_list_entry }
887 887
 
888
-    local_list, remote_list, existing_list = compare_filelists(local_list, remote_list, src_remote = False, dst_remote = True)
888
+    local_list, remote_list, existing_list, update_list = compare_filelists(local_list, remote_list, src_remote = False, dst_remote = True, delay_updates = cfg.delay_updates)
889
+
889 890
 
890 891
     local_count = len(local_list)
892
+    update_count = len(update_list)
891 893
     remote_count = len(remote_list)
892 894
 
893
-    info(u"Summary: %d local files to upload, %d remote files to delete" % (local_count, remote_count))
895
+    info(u"Summary: %d local files to upload, %d remote files to delete" % (local_count + update_count, remote_count))
894 896
 
895
-    if local_count > 0:
896
-        ## Populate 'remote_uri' only if we've got something to upload
897
-        if not destination_base.endswith("/"):
898
-            if not single_file_local:
899
-                raise ParameterError("Destination S3 URI must end with '/' (ie must refer to a directory on the remote side).")
900
-            local_list[local_list.keys()[0]]['remote_uri'] = unicodise(destination_base)
901
-        else:
902
-            for key in local_list:
903
-                local_list[key]['remote_uri'] = unicodise(destination_base + key)
904 897
 
898
+    def _set_remote_uri(local_list, destination_base, single_file_local):
899
+        if len(local_list) > 0:
900
+            ## Populate 'remote_uri' only if we've got something to upload
901
+            if not destination_base.endswith("/"):
902
+                if not single_file_local:
903
+                    raise ParameterError("Destination S3 URI must end with '/' (ie must refer to a directory on the remote side).")
904
+                local_list[local_list.keys()[0]]['remote_uri'] = unicodise(destination_base)
905
+            else:
906
+                for key in local_list:
907
+                    local_list[key]['remote_uri'] = unicodise(destination_base + key)
908
+
909
+    _set_remote_uri(local_list, destination_base, single_file_local)
910
+    _set_remote_uri(update_list, destination_base, single_file_local)
911
+    
905 912
     if cfg.dry_run:
906 913
         for key in exclude_list:
907 914
             output(u"exclude: %s" % unicodise(key))
... ...
@@ -910,6 +936,8 @@ def cmd_sync_local2remote(args):
910 910
                 output(u"delete: %s" % remote_list[key]['object_uri_str'])
911 911
         for key in local_list:
912 912
             output(u"upload: %s -> %s" % (local_list[key]['full_name_unicode'], local_list[key]['remote_uri']))
913
+        for key in update_list:
914
+            output(u"upload: %s -> %s" % (update_list[key]['full_name_unicode'], update_list[key]['remote_uri']))
913 915
 
914 916
         warning(u"Exitting now because of --dry-run")
915 917
         return
... ...
@@ -921,36 +949,40 @@ def cmd_sync_local2remote(args):
921 921
     total_size = 0
922 922
     total_elapsed = 0.0
923 923
     timestamp_start = time.time()
924
-    seq = 0
925
-    file_list = local_list.keys()
926
-    file_list.sort()
927
-    for file in file_list:
928
-        seq += 1
929
-        item = local_list[file]
930
-        src = item['full_name']
931
-        uri = S3Uri(item['remote_uri'])
932
-        seq_label = "[%d of %d]" % (seq, local_count)
933
-        extra_headers = copy(cfg.extra_headers)
934
-        try:
935
-            if cfg.preserve_attrs:
936
-                attr_header = _build_attr_header(src)
937
-                debug(u"attr_header: %s" % attr_header)
938
-                extra_headers.update(attr_header)
939
-            response = s3.object_put(src, uri, extra_headers, extra_label = seq_label)
940
-        except InvalidFileError, e:
941
-            warning(u"File can not be uploaded: %s" % e)
942
-            continue
943
-        except S3UploadError, e:
944
-            error(u"%s: upload failed too many times. Skipping that file." % item['full_name_unicode'])
945
-            continue
946
-        speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True)
947
-        if not cfg.progress_meter:
948
-            output(u"File '%s' stored as '%s' (%d bytes in %0.1f seconds, %0.2f %sB/s) %s" %
949
-                (item['full_name_unicode'], uri, response["size"], response["elapsed"],
950
-                speed_fmt[0], speed_fmt[1], seq_label))
951
-        total_size += response["size"]
952
-        uploaded_objects_list.append(uri.object())
953 924
 
925
+    def _upload(local_list, seq, total, total_size):
926
+        file_list = local_list.keys()
927
+        file_list.sort()
928
+        for file in file_list:
929
+            seq += 1
930
+            item = local_list[file]
931
+            src = item['full_name']
932
+            uri = S3Uri(item['remote_uri'])
933
+            seq_label = "[%d of %d]" % (seq, total)
934
+            extra_headers = copy(cfg.extra_headers)
935
+            try:
936
+                if cfg.preserve_attrs:
937
+                    attr_header = _build_attr_header(src)
938
+                    debug(u"attr_header: %s" % attr_header)
939
+                    extra_headers.update(attr_header)
940
+                response = s3.object_put(src, uri, extra_headers, extra_label = seq_label)
941
+            except InvalidFileError, e:
942
+                warning(u"File can not be uploaded: %s" % e)
943
+                continue
944
+            except S3UploadError, e:
945
+                error(u"%s: upload failed too many times. Skipping that file." % item['full_name_unicode'])
946
+                continue
947
+            speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True)
948
+            if not cfg.progress_meter:
949
+                output(u"File '%s' stored as '%s' (%d bytes in %0.1f seconds, %0.2f %sB/s) %s" %
950
+                    (item['full_name_unicode'], uri, response["size"], response["elapsed"],
951
+                    speed_fmt[0], speed_fmt[1], seq_label))
952
+            total_size += response["size"]
953
+            uploaded_objects_list.append(uri.object())
954
+        return seq, total_size
955
+
956
+    n, total_size = _upload(local_list, 0, local_count, total_size)
957
+    n, total_size = _upload(update_list, n, local_count, total_size)
954 958
     total_elapsed = time.time() - timestamp_start
955 959
     total_speed = total_elapsed and total_size/total_elapsed or 0.0
956 960
     speed_fmt = formatSize(total_speed, human_readable = True, floating_point = True)
... ...
@@ -1520,6 +1552,7 @@ def main():
1520 1520
     optparser.add_option(      "--delete-removed", dest="delete_removed", action="store_true", help="Delete remote objects with no corresponding local file [sync]")
1521 1521
     optparser.add_option(      "--no-delete-removed", dest="delete_removed", action="store_false", help="Don't delete remote objects.")
1522 1522
     optparser.add_option(      "--delete-after", dest="delete_after", action="store_true", help="Perform deletes after new uploads [sync]")
1523
+    optparser.add_option(      "--delay-updates", dest="delay_updates", action="store_true", help="Put all updated files into place at end [sync]")
1523 1524
     optparser.add_option("-p", "--preserve", dest="preserve_attrs", action="store_true", help="Preserve filesystem attributes (mode, ownership, timestamps). Default for [sync] command.")
1524 1525
     optparser.add_option(      "--no-preserve", dest="preserve_attrs", action="store_false", help="Don't store FS attributes")
1525 1526
     optparser.add_option(      "--exclude", dest="exclude", action="append", metavar="GLOB", help="Filenames and paths matching GLOB will be excluded from sync")
... ...
@@ -189,6 +189,9 @@ Don't delete remote objects.
189 189
 \fB\-\-delete\-after\fR
190 190
 Perform deletes after new uploads [sync].
191 191
 .TP
192
+\fB\-\-delay\-updates\fR
193
+Put all updated files into place at end [sync]
194
+.TP
192 195
 \fB\-p\fR, \fB\-\-preserve\fR
193 196
 Preserve filesystem attributes (mode, ownership,
194 197
 timestamps). Default for [sync] command.