* s3cmd: Support for storing file attributes (like ownership,
mode, etc) in sync operation.
git-svn-id: https://s3tools.svn.sourceforge.net/svnroot/s3tools/s3cmd/trunk@153 830e0280-6d2a-0410-9c65-932aecc39d9d
... | ... |
@@ -23,6 +23,18 @@ class Config(object): |
23 | 23 |
proxy_host = "" |
24 | 24 |
proxy_port = 3128 |
25 | 25 |
encrypt = False |
26 |
+ dry_run = False |
|
27 |
+ preserve_attrs = True |
|
28 |
+ preserve_attrs_list = [ |
|
29 |
+ 'uname', # Verbose owner Name (e.g. 'root') |
|
30 |
+ #'uid', # Numeric user ID (e.g. 0) |
|
31 |
+ 'gname', # Group name (e.g. 'users') |
|
32 |
+ #'gid', # Numeric group ID (e.g. 100) |
|
33 |
+ 'mtime', # Modification timestamp |
|
34 |
+ 'ctime', # Creation timestamp |
|
35 |
+ 'mode', # File mode (e.g. rwxr-xr-x = 755) |
|
36 |
+ #'acl', # Full ACL (not yet supported) |
|
37 |
+ ] |
|
26 | 38 |
delete_removed = False |
27 | 39 |
_doc['delete_removed'] = "[sync] Remove remote S3 objects when local file has been deleted" |
28 | 40 |
gpg_passphrase = "" |
... | ... |
@@ -11,6 +11,7 @@ import time |
11 | 11 |
import os |
12 | 12 |
import re |
13 | 13 |
import errno |
14 |
+import pwd, grp |
|
14 | 15 |
|
15 | 16 |
from copy import copy |
16 | 17 |
from optparse import OptionParser, Option, OptionValueError, IndentedHelpFormatter |
... | ... |
@@ -262,6 +263,20 @@ def cmd_object_del(args): |
262 | 262 |
output("Object %s deleted" % uri) |
263 | 263 |
|
264 | 264 |
def cmd_sync(args): |
265 |
+ def _build_attr_header(src): |
|
266 |
+ attrs = {} |
|
267 |
+ st = os.stat_result(os.stat(src)) |
|
268 |
+ for attr in cfg.preserve_attrs_list: |
|
269 |
+ if attr == 'uname': |
|
270 |
+ val = pwd.getpwuid(st.st_uid).pw_name |
|
271 |
+ elif attr == 'gname': |
|
272 |
+ val = grp.getgrgid(st.st_gid).gr_name |
|
273 |
+ else: |
|
274 |
+ val = getattr(st, 'st_' + attr) |
|
275 |
+ attrs[attr] = val |
|
276 |
+ result = "" |
|
277 |
+ for k in attrs: result += "%s:%s/" % (k, attrs[k]) |
|
278 |
+ return { 'x-amz-meta-s3cmd-attrs' : result[:-1] } |
|
265 | 279 |
src = args.pop(0) |
266 | 280 |
if S3Uri(src).type != "file": |
267 | 281 |
raise ParameterError("Source must be a local path instead of: %s" % src) |
... | ... |
@@ -277,10 +292,15 @@ def cmd_sync(args): |
277 | 277 |
s3 = S3(Config()) |
278 | 278 |
|
279 | 279 |
output("Compiling list of local files...") |
280 |
- loc_base = os.path.join(src, "") |
|
280 |
+ if os.path.isdir(src): |
|
281 |
+ loc_base = os.path.join(src, "") |
|
282 |
+ filelist = os.walk(src) |
|
283 |
+ else: |
|
284 |
+ loc_base = "./" |
|
285 |
+ filelist = [( '.', [], [src] )] |
|
281 | 286 |
loc_base_len = len(loc_base) |
282 | 287 |
loc_list = {} |
283 |
- for root, dirs, files in os.walk(src): |
|
288 |
+ for root, dirs, files in filelist: |
|
284 | 289 |
## TODO: implement explicit exclude |
285 | 290 |
for f in files: |
286 | 291 |
full_name = os.path.join(root, f) |
... | ... |
@@ -362,7 +382,10 @@ def cmd_sync(args): |
362 | 362 |
seq += 1 |
363 | 363 |
src = loc_list[file]['full_name'] |
364 | 364 |
uri = S3Uri(dst_base + file) |
365 |
- response = s3.object_put_uri(src, uri) |
|
365 |
+ if cfg.preserve_attrs: |
|
366 |
+ attr_header = _build_attr_header(src) |
|
367 |
+ debug(attr_header) |
|
368 |
+ response = s3.object_put_uri(src, uri, attr_header) |
|
366 | 369 |
output("stored '%s' as '%s' (%d bytes) [%d of %d]" % (src, uri, response["size"], seq, total_count)) |
367 | 370 |
total_size += response["size"] |
368 | 371 |
output("Done. Uploaded %d bytes." % total_size) |