git-svn-id: https://s3tools.svn.sourceforge.net/svnroot/s3tools/s3cmd/trunk@303 830e0280-6d2a-0410-9c65-932aecc39d9d
Michal Ludvig authored on 2008/12/29 21:28:19... | ... |
@@ -14,21 +14,17 @@ import errno |
14 | 14 |
import glob |
15 | 15 |
import traceback |
16 | 16 |
import codecs |
17 |
+import locale |
|
17 | 18 |
|
18 | 19 |
from copy import copy |
19 | 20 |
from optparse import OptionParser, Option, OptionValueError, IndentedHelpFormatter |
20 | 21 |
from logging import debug, info, warning, error |
21 | 22 |
from distutils.spawn import find_executable |
22 | 23 |
|
23 |
-## Output native on TTY, UTF-8 otherwise (redirects) |
|
24 |
-#_stdout = sys.stdout.isatty() and sys.stdout or codecs.getwriter("utf-8")(sys.stdout) |
|
25 |
-#_stderr = sys.stderr.isatty() and sys.stderr or codecs.getwriter("utf-8")(sys.stderr) |
|
26 | 24 |
## Output UTF-8 in all cases |
27 |
-_stdout = codecs.getwriter("utf-8")(sys.stdout) |
|
28 |
-_stderr = codecs.getwriter("utf-8")(sys.stderr) |
|
29 |
-## Leave it to the terminal |
|
30 |
-#_stdout = sys.stdout |
|
31 |
-#_stderr = sys.stderr |
|
25 |
+encoding = locale.getpreferredencoding() or "utf-8" |
|
26 |
+_stdout = codecs.getwriter(encoding)(sys.stdout, "replace") |
|
27 |
+_stderr = codecs.getwriter(encoding)(sys.stderr, "replace") |
|
32 | 28 |
|
33 | 29 |
def output(message): |
34 | 30 |
_stdout.write(message + "\n") |
... | ... |
@@ -57,8 +53,8 @@ def subcmd_bucket_usage_all(s3): |
57 | 57 |
buckets_size += size |
58 | 58 |
total_size, size_coeff = formatSize(buckets_size, Config().human_readable_sizes) |
59 | 59 |
total_size_str = str(total_size) + size_coeff |
60 |
- output("".rjust(8, "-")) |
|
61 |
- output("%s Total" % (total_size_str.ljust(8))) |
|
60 |
+ output(u"".rjust(8, "-")) |
|
61 |
+ output(u"%s Total" % (total_size_str.ljust(8))) |
|
62 | 62 |
|
63 | 63 |
def subcmd_bucket_usage(s3, uri): |
64 | 64 |
bucket = uri.bucket() |
... | ... |
@@ -80,7 +76,7 @@ def subcmd_bucket_usage(s3, uri): |
80 | 80 |
bucket_size += size |
81 | 81 |
total_size, size_coeff = formatSize(bucket_size, Config().human_readable_sizes) |
82 | 82 |
total_size_str = str(total_size) + size_coeff |
83 |
- output("%s %s" % (total_size_str.ljust(8), uri)) |
|
83 |
+ output(u"%s %s" % (total_size_str.ljust(8), uri)) |
|
84 | 84 |
return bucket_size |
85 | 85 |
|
86 | 86 |
def cmd_ls(args): |
... | ... |
@@ -99,13 +95,13 @@ def cmd_buckets_list_all_all(args): |
99 | 99 |
|
100 | 100 |
for bucket in response["list"]: |
101 | 101 |
subcmd_bucket_list(s3, S3Uri("s3://" + bucket["Name"])) |
102 |
- output("") |
|
102 |
+ output(u"") |
|
103 | 103 |
|
104 | 104 |
|
105 | 105 |
def subcmd_buckets_list_all(s3): |
106 | 106 |
response = s3.list_all_buckets() |
107 | 107 |
for bucket in response["list"]: |
108 |
- output("%s s3://%s" % ( |
|
108 |
+ output(u"%s s3://%s" % ( |
|
109 | 109 |
formatDateTime(bucket["CreationDate"]), |
110 | 110 |
bucket["Name"], |
111 | 111 |
)) |
... | ... |
@@ -114,7 +110,7 @@ def subcmd_bucket_list(s3, uri): |
114 | 114 |
bucket = uri.bucket() |
115 | 115 |
prefix = uri.object() |
116 | 116 |
|
117 |
- debug("Bucket 's3://%s':" % bucket) |
|
117 |
+ debug(u"Bucket 's3://%s':" % bucket) |
|
118 | 118 |
if prefix.endswith('*'): |
119 | 119 |
prefix = prefix[:-1] |
120 | 120 |
try: |
... | ... |
@@ -127,13 +123,13 @@ def subcmd_bucket_list(s3, uri): |
127 | 127 |
raise |
128 | 128 |
|
129 | 129 |
for prefix in response['common_prefixes']: |
130 |
- output("%s %s" % ( |
|
130 |
+ output(u"%s %s" % ( |
|
131 | 131 |
"D".rjust(28), |
132 | 132 |
uri.compose_uri(bucket, prefix["Prefix"]))) |
133 | 133 |
|
134 | 134 |
for object in response["list"]: |
135 | 135 |
size, size_coeff = formatSize(object["Size"], Config().human_readable_sizes) |
136 |
- output("%s %s%s %s" % ( |
|
136 |
+ output(u"%s %s%s %s" % ( |
|
137 | 137 |
formatDateTime(object["LastModified"]), |
138 | 138 |
str(size).rjust(8), size_coeff.ljust(1), |
139 | 139 |
uri.compose_uri(bucket, object["Key"]), |
... | ... |
@@ -147,7 +143,7 @@ def cmd_bucket_create(args): |
147 | 147 |
raise ParameterError("Expecting S3 URI with just the bucket name set instead of '%s'" % arg) |
148 | 148 |
try: |
149 | 149 |
response = s3.bucket_create(uri.bucket(), cfg.bucket_location) |
150 |
- output("Bucket '%s' created" % uri.uri()) |
|
150 |
+ output(u"Bucket '%s' created" % uri.uri()) |
|
151 | 151 |
except S3Error, e: |
152 | 152 |
if S3.codes.has_key(e.info["Code"]): |
153 | 153 |
error(S3.codes[e.info["Code"]] % uri.bucket()) |
... | ... |
@@ -161,7 +157,7 @@ def cmd_bucket_delete(args): |
161 | 161 |
response = s3.bucket_delete(uri.bucket()) |
162 | 162 |
except S3Error, e: |
163 | 163 |
if e.info['Code'] == 'BucketNotEmpty' and (cfg.force or cfg.recursive): |
164 |
- warning("Bucket is not empty. Removing all the objects from it first. This may take some time...") |
|
164 |
+ warning(u"Bucket is not empty. Removing all the objects from it first. This may take some time...") |
|
165 | 165 |
subcmd_object_del_uri(uri, recursive = True) |
166 | 166 |
return _bucket_delete_one(uri) |
167 | 167 |
elif S3.codes.has_key(e.info["Code"]): |
... | ... |
@@ -176,7 +172,7 @@ def cmd_bucket_delete(args): |
176 | 176 |
if not uri.type == "s3" or not uri.has_bucket() or uri.has_object(): |
177 | 177 |
raise ParameterError("Expecting S3 URI with just the bucket name set instead of '%s'" % arg) |
178 | 178 |
_bucket_delete_one(uri) |
179 |
- output("Bucket '%s' removed" % uri.uri()) |
|
179 |
+ output(u"Bucket '%s' removed" % uri.uri()) |
|
180 | 180 |
|
181 | 181 |
def cmd_object_put(args): |
182 | 182 |
s3 = S3(Config()) |
... | ... |
@@ -189,11 +185,11 @@ def cmd_object_put(args): |
189 | 189 |
raise ParameterError("Expecting S3 URI instead of '%s'" % uri_arg) |
190 | 190 |
|
191 | 191 |
if len(args) > 1 and uri.object() != "" and not Config().force: |
192 |
- error("When uploading multiple files the last argument must") |
|
193 |
- error("be a S3 URI specifying just the bucket name") |
|
194 |
- error("WITHOUT object name!") |
|
195 |
- error("Alternatively use --force argument and the specified") |
|
196 |
- error("object name will be prefixed to all stored filenames.") |
|
192 |
+ error(u"When uploading multiple files the last argument must") |
|
193 |
+ error(u"be a S3 URI specifying just the bucket name") |
|
194 |
+ error(u"WITHOUT object name!") |
|
195 |
+ error(u"Alternatively use --force argument and the specified") |
|
196 |
+ error(u"object name will be prefixed to all stored filenames.") |
|
197 | 197 |
sys.exit(1) |
198 | 198 |
|
199 | 199 |
seq = 0 |
... | ... |
@@ -213,21 +209,21 @@ def cmd_object_put(args): |
213 | 213 |
try: |
214 | 214 |
response = s3.object_put(real_filename, uri_final, extra_headers, extra_label = seq_label) |
215 | 215 |
except S3UploadError, e: |
216 |
- error("Upload of '%s' failed too many times. Skipping that file." % real_filename) |
|
216 |
+ error(u"Upload of '%s' failed too many times. Skipping that file." % real_filename) |
|
217 | 217 |
continue |
218 | 218 |
except InvalidFileError, e: |
219 |
- warning("File can not be uploaded: %s" % e) |
|
219 |
+ warning(u"File can not be uploaded: %s" % e) |
|
220 | 220 |
continue |
221 | 221 |
speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True) |
222 | 222 |
if not Config().progress_meter: |
223 |
- output("File '%s' stored as %s (%d bytes in %0.1f seconds, %0.2f %sB/s) %s" % |
|
223 |
+ output(u"File '%s' stored as %s (%d bytes in %0.1f seconds, %0.2f %sB/s) %s" % |
|
224 | 224 |
(file, uri_final, response["size"], response["elapsed"], speed_fmt[0], speed_fmt[1], |
225 | 225 |
seq_label)) |
226 | 226 |
if Config().acl_public: |
227 |
- output("Public URL of the object is: %s" % |
|
227 |
+ output(u"Public URL of the object is: %s" % |
|
228 | 228 |
(uri_final.public_url())) |
229 | 229 |
if Config().encrypt and real_filename != file: |
230 |
- debug("Removing temporary encrypted file: %s" % real_filename) |
|
230 |
+ debug(u"Removing temporary encrypted file: %s" % real_filename) |
|
231 | 231 |
os.remove(real_filename) |
232 | 232 |
|
233 | 233 |
def cmd_object_get(args): |
... | ... |
@@ -386,7 +382,7 @@ def cmd_object_get(args): |
386 | 386 |
dst_stream.close() |
387 | 387 |
raise ParameterError("File %s already exists. Use either of --force / --continue / --skip-existing or give it a new name." % destination) |
388 | 388 |
except IOError, e: |
389 |
- error("Skipping %s: %s" % (destination, e.strerror)) |
|
389 |
+ error(u"Skipping %s: %s" % (destination, e.strerror)) |
|
390 | 390 |
continue |
391 | 391 |
response = s3.object_get(uri, dst_stream, start_position = start_position, extra_label = seq_label) |
392 | 392 |
if response["headers"].has_key("x-amz-meta-s3tools-gpgenc"): |
... | ... |
@@ -394,7 +390,7 @@ def cmd_object_get(args): |
394 | 394 |
response["size"] = os.stat(destination)[6] |
395 | 395 |
if not Config().progress_meter and destination != "-": |
396 | 396 |
speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True) |
397 |
- output("File %s saved as '%s' (%d bytes in %0.1f seconds, %0.2f %sB/s)" % |
|
397 |
+ output(u"File %s saved as '%s' (%d bytes in %0.1f seconds, %0.2f %sB/s)" % |
|
398 | 398 |
(uri, destination, response["size"], response["elapsed"], speed_fmt[0], speed_fmt[1])) |
399 | 399 |
|
400 | 400 |
def cmd_object_del(args): |
... | ... |
@@ -420,13 +416,13 @@ def subcmd_object_del_uri(uri, recursive = None): |
420 | 420 |
uri_base = 's3://' + uri.bucket() + "/" |
421 | 421 |
for idx in filelist: |
422 | 422 |
object = filelist[idx] |
423 |
- debug("Adding URI " + uri_base + object['object_key']) |
|
423 |
+ debug(u"Adding URI " + uri_base + object['object_key']) |
|
424 | 424 |
uri_list.append(S3Uri(uri_base + object['object_key'])) |
425 | 425 |
else: |
426 | 426 |
uri_list.append(uri) |
427 | 427 |
for _uri in uri_list: |
428 | 428 |
response = s3.object_delete(_uri) |
429 |
- output("Object %s deleted" % _uri) |
|
429 |
+ output(u"Object %s deleted" % _uri) |
|
430 | 430 |
|
431 | 431 |
def subcmd_cp_mv(args, process_fce, message): |
432 | 432 |
src_uri = S3Uri(args.pop(0)) |
... | ... |
@@ -444,7 +440,7 @@ def subcmd_cp_mv(args, process_fce, message): |
444 | 444 |
response = process_fce(src_uri, dst_uri) |
445 | 445 |
output(message % { "src" : src_uri, "dst" : dst_uri}) |
446 | 446 |
if Config().acl_public: |
447 |
- output("Public URL is: %s" % dst_uri.public_url()) |
|
447 |
+ output(u"Public URL is: %s" % dst_uri.public_url()) |
|
448 | 448 |
|
449 | 449 |
def cmd_cp(args): |
450 | 450 |
s3 = S3(Config()) |
... | ... |
@@ -466,18 +462,18 @@ def cmd_info(args): |
466 | 466 |
try: |
467 | 467 |
if uri.has_object(): |
468 | 468 |
info = s3.object_info(uri) |
469 |
- output("%s (object):" % uri.uri()) |
|
470 |
- output(" File size: %s" % info['headers']['content-length']) |
|
471 |
- output(" Last mod: %s" % info['headers']['last-modified']) |
|
472 |
- output(" MIME type: %s" % info['headers']['content-type']) |
|
473 |
- output(" MD5 sum: %s" % info['headers']['etag'].strip('"')) |
|
469 |
+ output(u"%s (object):" % uri.uri()) |
|
470 |
+ output(u" File size: %s" % info['headers']['content-length']) |
|
471 |
+ output(u" Last mod: %s" % info['headers']['last-modified']) |
|
472 |
+ output(u" MIME type: %s" % info['headers']['content-type']) |
|
473 |
+ output(u" MD5 sum: %s" % info['headers']['etag'].strip('"')) |
|
474 | 474 |
else: |
475 | 475 |
info = s3.bucket_info(uri) |
476 |
- output("%s (bucket):" % uri.uri()) |
|
477 |
- output(" Location: %s" % info['bucket-location']) |
|
476 |
+ output(u"%s (bucket):" % uri.uri()) |
|
477 |
+ output(u" Location: %s" % info['bucket-location']) |
|
478 | 478 |
acl = s3.get_acl(uri) |
479 | 479 |
for user in acl.keys(): |
480 |
- output(" ACL: %s: %s" % (user, acl[user])) |
|
480 |
+ output(u" ACL: %s: %s" % (user, acl[user])) |
|
481 | 481 |
except S3Error, e: |
482 | 482 |
if S3.codes.has_key(e.info["Code"]): |
483 | 483 |
error(S3.codes[e.info["Code"]] % uri.bucket()) |
... | ... |
@@ -576,7 +572,7 @@ def _compare_filelists(src_list, dst_list, src_is_local_and_dst_is_remote): |
576 | 576 |
logging.root.setLevel(logging.DEBUG) |
577 | 577 |
for file in src_list.keys(): |
578 | 578 |
if not cfg.debug_syncmatch: |
579 |
- debug("CHECK: %s" % (os.sep + file)) |
|
579 |
+ debug(u"CHECK: %s" % (os.sep + file)) |
|
580 | 580 |
excluded = False |
581 | 581 |
for r in cfg.exclude: |
582 | 582 |
## all paths start with '/' from the base dir |
... | ... |
@@ -585,8 +581,8 @@ def _compare_filelists(src_list, dst_list, src_is_local_and_dst_is_remote): |
585 | 585 |
## therefore this awkward excluded switch :-( |
586 | 586 |
excluded = True |
587 | 587 |
if cfg.debug_syncmatch: |
588 |
- debug("EXCL: %s" % (os.sep + file)) |
|
589 |
- debug("RULE: '%s'" % (cfg.debug_exclude[r])) |
|
588 |
+ debug(u"EXCL: %s" % (os.sep + file)) |
|
589 |
+ debug(u"RULE: '%s'" % (cfg.debug_exclude[r])) |
|
590 | 590 |
else: |
591 | 591 |
info("%s: excluded" % file) |
592 | 592 |
break |
... | ... |
@@ -595,11 +591,11 @@ def _compare_filelists(src_list, dst_list, src_is_local_and_dst_is_remote): |
595 | 595 |
del(src_list[file]) |
596 | 596 |
continue |
597 | 597 |
else: |
598 |
- debug("PASS: %s" % (os.sep + file)) |
|
598 |
+ debug(u"PASS: %s" % (os.sep + file)) |
|
599 | 599 |
if dst_list.has_key(file): |
600 | 600 |
## Was --skip-existing requested? |
601 | 601 |
if cfg.skip_existing: |
602 |
- debug("IGNR: %s (used --skip-existing)" % (file)) |
|
602 |
+ debug(u"IGNR: %s (used --skip-existing)" % (file)) |
|
603 | 603 |
exists_list[file] = src_list[file] |
604 | 604 |
del(src_list[file]) |
605 | 605 |
## Remove from destination-list, all that is left there will be deleted |
... | ... |
@@ -608,7 +604,7 @@ def _compare_filelists(src_list, dst_list, src_is_local_and_dst_is_remote): |
608 | 608 |
|
609 | 609 |
## Check size first |
610 | 610 |
if dst_list[file]['size'] == src_list[file]['size']: |
611 |
- #debug("%s same size: %s" % (file, dst_list[file]['size'])) |
|
611 |
+ #debug(u"%s same size: %s" % (file, dst_list[file]['size'])) |
|
612 | 612 |
## ... same size, check MD5 |
613 | 613 |
if src_is_local_and_dst_is_remote: |
614 | 614 |
src_md5 = Utils.hash_file_md5(src_list[file]['full_name']) |
... | ... |
@@ -617,22 +613,22 @@ def _compare_filelists(src_list, dst_list, src_is_local_and_dst_is_remote): |
617 | 617 |
src_md5 = src_list[file]['md5'] |
618 | 618 |
dst_md5 = Utils.hash_file_md5(dst_list[file]['full_name']) |
619 | 619 |
if src_md5 == dst_md5: |
620 |
- #debug("%s md5 matches: %s" % (file, dst_md5)) |
|
620 |
+ #debug(u"%s md5 matches: %s" % (file, dst_md5)) |
|
621 | 621 |
## Checksums are the same. |
622 | 622 |
## Remove from source-list, all that is left there will be transferred |
623 |
- debug("IGNR: %s (transfer not needed: MD5 OK, Size OK)" % file) |
|
623 |
+ debug(u"IGNR: %s (transfer not needed: MD5 OK, Size OK)" % file) |
|
624 | 624 |
exists_list[file] = src_list[file] |
625 | 625 |
del(src_list[file]) |
626 | 626 |
else: |
627 |
- debug("XFER: %s (md5 mismatch: src=%s dst=%s)" % (file, src_md5, dst_md5)) |
|
627 |
+ debug(u"XFER: %s (md5 mismatch: src=%s dst=%s)" % (file, src_md5, dst_md5)) |
|
628 | 628 |
else: |
629 |
- debug("XFER: %s (size mismatch: src=%s dst=%s)" % (file, src_list[file]['size'], dst_list[file]['size'])) |
|
629 |
+ debug(u"XFER: %s (size mismatch: src=%s dst=%s)" % (file, src_list[file]['size'], dst_list[file]['size'])) |
|
630 | 630 |
|
631 | 631 |
## Remove from destination-list, all that is left there will be deleted |
632 |
- #debug("%s removed from destination list" % file) |
|
632 |
+ #debug(u"%s removed from destination list" % file) |
|
633 | 633 |
del(dst_list[file]) |
634 | 634 |
if cfg.debug_syncmatch: |
635 |
- warning("Exiting because of --debug-syncmatch") |
|
635 |
+ warning(u"Exiting because of --debug-syncmatch") |
|
636 | 636 |
sys.exit(0) |
637 | 637 |
|
638 | 638 |
return src_list, dst_list, exists_list, exclude_list |
... | ... |
@@ -669,9 +665,9 @@ def cmd_sync_remote2local(src, dst): |
669 | 669 |
for file in loc_list: |
670 | 670 |
if cfg.delete_removed: |
671 | 671 |
os.unlink(dst_base + file) |
672 |
- output("deleted '%s'" % (dst_base + file)) |
|
672 |
+ output(u"deleted '%s'" % (dst_base + file)) |
|
673 | 673 |
else: |
674 |
- output("not-deleted '%s'" % file) |
|
674 |
+ output(u"not-deleted '%s'" % file) |
|
675 | 675 |
|
676 | 676 |
total_size = 0 |
677 | 677 |
total_count = len(rem_list) |
... | ... |
@@ -691,7 +687,7 @@ def cmd_sync_remote2local(src, dst): |
691 | 691 |
if not dir_cache.has_key(dst_dir): |
692 | 692 |
dir_cache[dst_dir] = Utils.mkdir_with_parents(dst_dir) |
693 | 693 |
if dir_cache[dst_dir] == False: |
694 |
- warning("%s: destination directory not writable: %s" % (file, dst_dir)) |
|
694 |
+ warning(u"%s: destination directory not writable: %s" % (file, dst_dir)) |
|
695 | 695 |
continue |
696 | 696 |
try: |
697 | 697 |
open_flags = os.O_CREAT |
... | ... |
@@ -700,7 +696,7 @@ def cmd_sync_remote2local(src, dst): |
700 | 700 |
else: |
701 | 701 |
open_flags |= os.O_EXCL |
702 | 702 |
|
703 |
- debug("dst_file=%s" % dst_file) |
|
703 |
+ debug(u"dst_file=%s" % dst_file) |
|
704 | 704 |
# This will have failed should the file exist |
705 | 705 |
os.close(os.open(dst_file, open_flags)) |
706 | 706 |
# Yeah I know there is a race condition here. Sadly I don't know how to open() in exclusive mode. |
... | ... |
@@ -720,21 +716,21 @@ def cmd_sync_remote2local(src, dst): |
720 | 720 |
try: dst_stream.close() |
721 | 721 |
except: pass |
722 | 722 |
if e.errno == errno.EEXIST: |
723 |
- warning("%s exists - not overwriting" % (dst_file)) |
|
723 |
+ warning(u"%s exists - not overwriting" % (dst_file)) |
|
724 | 724 |
continue |
725 | 725 |
if e.errno in (errno.EPERM, errno.EACCES): |
726 |
- warning("%s not writable: %s" % (dst_file, e.strerror)) |
|
726 |
+ warning(u"%s not writable: %s" % (dst_file, e.strerror)) |
|
727 | 727 |
continue |
728 | 728 |
raise e |
729 | 729 |
except KeyboardInterrupt: |
730 | 730 |
try: dst_stream.close() |
731 | 731 |
except: pass |
732 |
- warning("Exiting after keyboard interrupt") |
|
732 |
+ warning(u"Exiting after keyboard interrupt") |
|
733 | 733 |
return |
734 | 734 |
except Exception, e: |
735 | 735 |
try: dst_stream.close() |
736 | 736 |
except: pass |
737 |
- error("%s: %s" % (file, e)) |
|
737 |
+ error(u"%s: %s" % (file, e)) |
|
738 | 738 |
continue |
739 | 739 |
# We have to keep repeating this call because |
740 | 740 |
# Python 2.4 doesn't support try/except/finally |
... | ... |
@@ -742,11 +738,11 @@ def cmd_sync_remote2local(src, dst): |
742 | 742 |
try: dst_stream.close() |
743 | 743 |
except: pass |
744 | 744 |
except S3DownloadError, e: |
745 |
- error("%s: download failed too many times. Skipping that file." % file) |
|
745 |
+ error(u"%s: download failed too many times. Skipping that file." % file) |
|
746 | 746 |
continue |
747 | 747 |
speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True) |
748 | 748 |
if not Config().progress_meter: |
749 |
- output("File '%s' stored as %s (%d bytes in %0.1f seconds, %0.2f %sB/s) %s" % |
|
749 |
+ output(u"File '%s' stored as %s (%d bytes in %0.1f seconds, %0.2f %sB/s) %s" % |
|
750 | 750 |
(uri, dst_file, response["size"], response["elapsed"], speed_fmt[0], speed_fmt[1], |
751 | 751 |
seq_label)) |
752 | 752 |
total_size += response["size"] |
... | ... |
@@ -774,14 +770,14 @@ def cmd_sync_local2remote(src, dst): |
774 | 774 |
except KeyError: |
775 | 775 |
attr = "uid" |
776 | 776 |
val = st.st_uid |
777 |
- warning("%s: Owner username not known. Storing UID=%d instead." % (src, val)) |
|
777 |
+ warning(u"%s: Owner username not known. Storing UID=%d instead." % (src, val)) |
|
778 | 778 |
elif attr == 'gname': |
779 | 779 |
try: |
780 | 780 |
val = grp.getgrgid(st.st_gid).gr_name |
781 | 781 |
except KeyError: |
782 | 782 |
attr = "gid" |
783 | 783 |
val = st.st_gid |
784 |
- warning("%s: Owner groupname not known. Storing GID=%d instead." % (src, val)) |
|
784 |
+ warning(u"%s: Owner groupname not known. Storing GID=%d instead." % (src, val)) |
|
785 | 785 |
else: |
786 | 786 |
val = getattr(st, 'st_' + attr) |
787 | 787 |
attrs[attr] = val |
... | ... |
@@ -792,9 +788,9 @@ def cmd_sync_local2remote(src, dst): |
792 | 792 |
s3 = S3(cfg) |
793 | 793 |
|
794 | 794 |
if cfg.encrypt: |
795 |
- error("S3cmd 'sync' doesn't support GPG encryption, sorry.") |
|
796 |
- error("Either use unconditional 's3cmd put --recursive'") |
|
797 |
- error("or disable encryption with --no-encrypt parameter.") |
|
795 |
+ error(u"S3cmd 'sync' doesn't support GPG encryption, sorry.") |
|
796 |
+ error(u"Either use unconditional 's3cmd put --recursive'") |
|
797 |
+ error(u"or disable encryption with --no-encrypt parameter.") |
|
798 | 798 |
sys.exit(1) |
799 | 799 |
|
800 | 800 |
|
... | ... |
@@ -817,9 +813,9 @@ def cmd_sync_local2remote(src, dst): |
817 | 817 |
uri = S3Uri("s3://" + dst_uri.bucket()+"/"+rem_list[file]['object_key']) |
818 | 818 |
if cfg.delete_removed: |
819 | 819 |
response = s3.object_delete(uri) |
820 |
- output("deleted '%s'" % uri) |
|
820 |
+ output(u"deleted '%s'" % uri) |
|
821 | 821 |
else: |
822 |
- output("not-deleted '%s'" % uri) |
|
822 |
+ output(u"not-deleted '%s'" % uri) |
|
823 | 823 |
|
824 | 824 |
total_size = 0 |
825 | 825 |
total_count = len(loc_list) |
... | ... |
@@ -842,14 +838,14 @@ def cmd_sync_local2remote(src, dst): |
842 | 842 |
try: |
843 | 843 |
response = s3.object_put(src, uri, attr_header, extra_label = seq_label) |
844 | 844 |
except S3UploadError, e: |
845 |
- error("%s: upload failed too many times. Skipping that file." % src) |
|
845 |
+ error(u"%s: upload failed too many times. Skipping that file." % src) |
|
846 | 846 |
continue |
847 | 847 |
except InvalidFileError, e: |
848 |
- warning("File can not be uploaded: %s" % e) |
|
848 |
+ warning(u"File can not be uploaded: %s" % e) |
|
849 | 849 |
continue |
850 | 850 |
speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True) |
851 | 851 |
if not cfg.progress_meter: |
852 |
- output("File '%s' stored as %s (%d bytes in %0.1f seconds, %0.2f %sB/s) %s" % |
|
852 |
+ output(u"File '%s' stored as %s (%d bytes in %0.1f seconds, %0.2f %sB/s) %s" % |
|
853 | 853 |
(src, uri, response["size"], response["elapsed"], speed_fmt[0], speed_fmt[1], |
854 | 854 |
seq_label)) |
855 | 855 |
total_size += response["size"] |
... | ... |
@@ -923,7 +919,7 @@ def gpg_decrypt(filename, gpgenc_header = "", in_place = True): |
923 | 923 |
command = resolve_list(cfg.gpg_decrypt.split(" "), args) |
924 | 924 |
code = gpg_command(command, cfg.gpg_passphrase) |
925 | 925 |
if code == 0 and in_place: |
926 |
- debug("Renaming %s to %s" % (tmp_filename, filename)) |
|
926 |
+ debug(u"Renaming %s to %s" % (tmp_filename, filename)) |
|
927 | 927 |
os.unlink(filename) |
928 | 928 |
os.rename(tmp_filename, filename) |
929 | 929 |
tmp_filename = filename |
... | ... |
@@ -952,8 +948,8 @@ def run_configure(config_file): |
952 | 952 |
|
953 | 953 |
try: |
954 | 954 |
while 1: |
955 |
- output("\nEnter new values or accept defaults in brackets with Enter.") |
|
956 |
- output("Refer to user manual for detailed description of all options.") |
|
955 |
+ output(u"\nEnter new values or accept defaults in brackets with Enter.") |
|
956 |
+ output(u"Refer to user manual for detailed description of all options.") |
|
957 | 957 |
for option in options: |
958 | 958 |
prompt = option[1] |
959 | 959 |
## Option-specific handling |
... | ... |
@@ -974,7 +970,7 @@ def run_configure(config_file): |
974 | 974 |
pass |
975 | 975 |
|
976 | 976 |
if len(option) >= 3: |
977 |
- output("\n%s" % option[2]) |
|
977 |
+ output(u"\n%s" % option[2]) |
|
978 | 978 |
|
979 | 979 |
val = raw_input(prompt + ": ") |
980 | 980 |
if val != "": |
... | ... |
@@ -982,19 +978,19 @@ def run_configure(config_file): |
982 | 982 |
# Turn 'Yes' into True, everything else into False |
983 | 983 |
val = val.lower().startswith('y') |
984 | 984 |
setattr(cfg, option[0], val) |
985 |
- output("\nNew settings:") |
|
985 |
+ output(u"\nNew settings:") |
|
986 | 986 |
for option in options: |
987 |
- output(" %s: %s" % (option[1], getattr(cfg, option[0]))) |
|
987 |
+ output(u" %s: %s" % (option[1], getattr(cfg, option[0]))) |
|
988 | 988 |
val = raw_input("\nTest access with supplied credentials? [Y/n] ") |
989 | 989 |
if val.lower().startswith("y") or val == "": |
990 | 990 |
try: |
991 |
- output("Please wait...") |
|
991 |
+ output(u"Please wait...") |
|
992 | 992 |
S3(Config()).bucket_list("", "") |
993 |
- output("Success. Your access key and secret key worked fine :-)") |
|
993 |
+ output(u"Success. Your access key and secret key worked fine :-)") |
|
994 | 994 |
|
995 |
- output("\nNow verifying that encryption works...") |
|
995 |
+ output(u"\nNow verifying that encryption works...") |
|
996 | 996 |
if not getattr(cfg, "gpg_command") or not getattr(cfg, "gpg_passphrase"): |
997 |
- output("Not configured. Never mind.") |
|
997 |
+ output(u"Not configured. Never mind.") |
|
998 | 998 |
else: |
999 | 999 |
if not getattr(cfg, "gpg_command"): |
1000 | 1000 |
raise Exception("Path to GPG program not set") |
... | ... |
@@ -1020,7 +1016,7 @@ def run_configure(config_file): |
1020 | 1020 |
raise Exception("Encryption verification error.") |
1021 | 1021 |
|
1022 | 1022 |
except Exception, e: |
1023 |
- error("Test failed: %s" % (e)) |
|
1023 |
+ error(u"Test failed: %s" % (e)) |
|
1024 | 1024 |
val = raw_input("\nRetry configuration? [Y/n] ") |
1025 | 1025 |
if val.lower().startswith("y") or val == "": |
1026 | 1026 |
continue |
... | ... |
@@ -1044,14 +1040,14 @@ def run_configure(config_file): |
1044 | 1044 |
os.umask(old_mask) |
1045 | 1045 |
cfg.dump_config(f) |
1046 | 1046 |
f.close() |
1047 |
- output("Configuration saved to '%s'" % config_file) |
|
1047 |
+ output(u"Configuration saved to '%s'" % config_file) |
|
1048 | 1048 |
|
1049 | 1049 |
except (EOFError, KeyboardInterrupt): |
1050 |
- output("\nConfiguration aborted. Changes were NOT saved.") |
|
1050 |
+ output(u"\nConfiguration aborted. Changes were NOT saved.") |
|
1051 | 1051 |
return |
1052 | 1052 |
|
1053 | 1053 |
except IOError, e: |
1054 |
- error("Writing config file failed: %s: %s" % (config_file, e.strerror)) |
|
1054 |
+ error(u"Writing config file failed: %s: %s" % (config_file, e.strerror)) |
|
1055 | 1055 |
sys.exit(1) |
1056 | 1056 |
|
1057 | 1057 |
def process_exclude_from_file(exf, exclude_array): |
... | ... |
@@ -1060,7 +1056,7 @@ def process_exclude_from_file(exf, exclude_array): |
1060 | 1060 |
ex = ex.strip() |
1061 | 1061 |
if re.match("^#", ex) or re.match("^\s*$", ex): |
1062 | 1062 |
continue |
1063 |
- debug("adding rule: %s" % ex) |
|
1063 |
+ debug(u"adding rule: %s" % ex) |
|
1064 | 1064 |
exclude_array.append(ex) |
1065 | 1065 |
|
1066 | 1066 |
commands = {} |
... | ... |
@@ -1181,12 +1177,12 @@ def main(): |
1181 | 1181 |
stream = _stderr) |
1182 | 1182 |
|
1183 | 1183 |
if options.show_version: |
1184 |
- output("s3cmd version %s" % PkgInfo.version) |
|
1184 |
+ output(u"s3cmd version %s" % PkgInfo.version) |
|
1185 | 1185 |
sys.exit(0) |
1186 | 1186 |
|
1187 | 1187 |
## Now finally parse the config file |
1188 | 1188 |
if not options.config: |
1189 |
- error("Can't find a config file. Please use --config option.") |
|
1189 |
+ error(u"Can't find a config file. Please use --config option.") |
|
1190 | 1190 |
sys.exit(1) |
1191 | 1191 |
|
1192 | 1192 |
try: |
... | ... |
@@ -1195,9 +1191,9 @@ def main(): |
1195 | 1195 |
if options.run_configure: |
1196 | 1196 |
cfg = Config() |
1197 | 1197 |
else: |
1198 |
- error("%s: %s" % (options.config, e.strerror)) |
|
1199 |
- error("Configuration file not available.") |
|
1200 |
- error("Consider using --configure parameter to create one.") |
|
1198 |
+ error(u"%s: %s" % (options.config, e.strerror)) |
|
1199 |
+ error(u"Configuration file not available.") |
|
1200 |
+ error(u"Consider using --configure parameter to create one.") |
|
1201 | 1201 |
sys.exit(1) |
1202 | 1202 |
|
1203 | 1203 |
## And again some logging level adjustments |
... | ... |
@@ -1216,17 +1212,17 @@ def main(): |
1216 | 1216 |
## Unsupported features on Win32 platform |
1217 | 1217 |
if os.name == "nt": |
1218 | 1218 |
if cfg.preserve_attrs: |
1219 |
- error("Option --preserve is not yet supported on MS Windows platform. Assuming --no-preserve.") |
|
1219 |
+ error(u"Option --preserve is not yet supported on MS Windows platform. Assuming --no-preserve.") |
|
1220 | 1220 |
cfg.preserve_attrs = False |
1221 | 1221 |
if cfg.progress_meter: |
1222 |
- error("Option --progress is not yet supported on MS Windows platform. Assuming --no-progress.") |
|
1222 |
+ error(u"Option --progress is not yet supported on MS Windows platform. Assuming --no-progress.") |
|
1223 | 1223 |
cfg.progress_meter = False |
1224 | 1224 |
|
1225 | 1225 |
## Update Config with other parameters |
1226 | 1226 |
for option in cfg.option_list(): |
1227 | 1227 |
try: |
1228 | 1228 |
if getattr(options, option) != None: |
1229 |
- debug("Updating %s -> %s" % (option, getattr(options, option))) |
|
1229 |
+ debug(u"Updating %s -> %s" % (option, getattr(options, option))) |
|
1230 | 1230 |
cfg.update_option(option, getattr(options, option)) |
1231 | 1231 |
except AttributeError: |
1232 | 1232 |
## Some Config() options are not settable from command line |
... | ... |
@@ -1238,12 +1234,12 @@ def main(): |
1238 | 1238 |
|
1239 | 1239 |
if options.exclude_from: |
1240 | 1240 |
for exf in options.exclude_from: |
1241 |
- debug("processing --exclude-from %s" % exf) |
|
1241 |
+ debug(u"processing --exclude-from %s" % exf) |
|
1242 | 1242 |
process_exclude_from_file(exf, options.exclude) |
1243 | 1243 |
|
1244 | 1244 |
if options.exclude: |
1245 | 1245 |
for ex in options.exclude: |
1246 |
- debug("processing rule: %s" % ex) |
|
1246 |
+ debug(u"processing rule: %s" % ex) |
|
1247 | 1247 |
exc = re.compile(glob.fnmatch.translate(ex)) |
1248 | 1248 |
cfg.exclude.append(exc) |
1249 | 1249 |
if options.debug_syncmatch: |
... | ... |
@@ -1255,20 +1251,20 @@ def main(): |
1255 | 1255 |
|
1256 | 1256 |
if options.rexclude_from: |
1257 | 1257 |
for exf in options.rexclude_from: |
1258 |
- debug("processing --rexclude-from %s" % exf) |
|
1258 |
+ debug(u"processing --rexclude-from %s" % exf) |
|
1259 | 1259 |
process_exclude_from_file(exf, options.rexclude) |
1260 | 1260 |
|
1261 | 1261 |
if options.rexclude: |
1262 | 1262 |
for ex in options.rexclude: |
1263 |
- debug("processing rule: %s" % ex) |
|
1263 |
+ debug(u"processing rule: %s" % ex) |
|
1264 | 1264 |
exc = re.compile(ex) |
1265 | 1265 |
cfg.exclude.append(exc) |
1266 | 1266 |
if options.debug_syncmatch: |
1267 | 1267 |
cfg.debug_exclude[exc] = ex |
1268 | 1268 |
|
1269 | 1269 |
if cfg.encrypt and cfg.gpg_passphrase == "": |
1270 |
- error("Encryption requested but no passphrase set in config file.") |
|
1271 |
- error("Please re-run 's3cmd --configure' and supply it.") |
|
1270 |
+ error(u"Encryption requested but no passphrase set in config file.") |
|
1271 |
+ error(u"Please re-run 's3cmd --configure' and supply it.") |
|
1272 | 1272 |
sys.exit(1) |
1273 | 1273 |
|
1274 | 1274 |
if options.dump_config: |
... | ... |
@@ -1280,7 +1276,7 @@ def main(): |
1280 | 1280 |
sys.exit(0) |
1281 | 1281 |
|
1282 | 1282 |
if len(args) < 1: |
1283 |
- error("Missing command. Please run with --help for more information.") |
|
1283 |
+ error(u"Missing command. Please run with --help for more information.") |
|
1284 | 1284 |
sys.exit(1) |
1285 | 1285 |
|
1286 | 1286 |
## Unicodise all remaining arguments: |
... | ... |
@@ -1288,28 +1284,28 @@ def main(): |
1288 | 1288 |
|
1289 | 1289 |
command = args.pop(0) |
1290 | 1290 |
try: |
1291 |
- debug("Command: %s" % commands[command]["cmd"]) |
|
1291 |
+ debug(u"Command: %s" % commands[command]["cmd"]) |
|
1292 | 1292 |
## We must do this lookup in extra step to |
1293 | 1293 |
## avoid catching all KeyError exceptions |
1294 | 1294 |
## from inner functions. |
1295 | 1295 |
cmd_func = commands[command]["func"] |
1296 | 1296 |
except KeyError, e: |
1297 |
- error("Invalid command: %s" % e) |
|
1297 |
+ error(u"Invalid command: %s" % e) |
|
1298 | 1298 |
sys.exit(1) |
1299 | 1299 |
|
1300 | 1300 |
if len(args) < commands[command]["argc"]: |
1301 |
- error("Not enough paramters for command '%s'" % command) |
|
1301 |
+ error(u"Not enough paramters for command '%s'" % command) |
|
1302 | 1302 |
sys.exit(1) |
1303 | 1303 |
|
1304 | 1304 |
try: |
1305 | 1305 |
cmd_func(args) |
1306 | 1306 |
except S3Error, e: |
1307 |
- error("S3 error: %s" % e) |
|
1307 |
+ error(u"S3 error: %s" % e) |
|
1308 | 1308 |
if e.info.has_key("Message"): |
1309 | 1309 |
error(e.info['Message']) |
1310 | 1310 |
sys.exit(1) |
1311 | 1311 |
except ParameterError, e: |
1312 |
- error("Parameter problem: %s" % e) |
|
1312 |
+ error(u"Parameter problem: %s" % e) |
|
1313 | 1313 |
sys.exit(1) |
1314 | 1314 |
|
1315 | 1315 |
if __name__ == '__main__': |