s3cmd
3cc025ae
 #!/usr/bin/env python
 
 ## Amazon S3 manager
 ## Author: Michal Ludvig <michal@logix.cz>
 ##         http://www.logix.cz/michal
 ## License: GPL Version 2
 
 import sys
5c805fd7
 
 if float("%d.%d" %(sys.version_info[0], sys.version_info[1])) < 2.4:
 	sys.stderr.write("ERROR: Python 2.4 or higher required, sorry.\n")
 	sys.exit(1)
 
3cc025ae
 import logging
 import time
8a4a98b1
 import os
 import re
ac9940ec
 import errno
2d7d5543
 import glob
4a52baa8
 import traceback
4da602a5
 import codecs
315e527b
 import locale
e3afa96a
 import subprocess
3c07424d
 import htmlentitydefs
3cc025ae
 
9b7618ae
 from copy import copy
f4555c39
 from optparse import OptionParser, Option, OptionValueError, IndentedHelpFormatter
3cc025ae
 from logging import debug, info, warning, error
49731b40
 from distutils.spawn import find_executable
3cc025ae
 
 def output(message):
82d9eafa
 	sys.stdout.write(message + "\n")
3cc025ae
 
7c07fd66
 def check_args_type(args, type, verbose_type):
 	for arg in args:
 		if S3Uri(arg).type != type:
 			raise ParameterError("Expecting %s instead of '%s'" % (verbose_type, arg))
 
b96ddebe
 def cmd_du(args):
 	s3 = S3(Config())
 	if len(args) > 0:
 		uri = S3Uri(args[0])
 		if uri.type == "s3" and uri.has_bucket():
 			subcmd_bucket_usage(s3, uri)
 			return
 	subcmd_bucket_usage_all(s3)
 
 def subcmd_bucket_usage_all(s3):
 	response = s3.list_all_buckets()
 
 	buckets_size = 0
 	for bucket in response["list"]:
 		size = subcmd_bucket_usage(s3, S3Uri("s3://" + bucket["Name"]))
 		if size != None:
 			buckets_size += size
 	total_size, size_coeff = formatSize(buckets_size, Config().human_readable_sizes)
475b5bc2
 	total_size_str = str(total_size) + size_coeff 
315e527b
 	output(u"".rjust(8, "-"))
 	output(u"%s Total" % (total_size_str.ljust(8)))
b96ddebe
 
 def subcmd_bucket_usage(s3, uri):
 	bucket = uri.bucket()
 	object = uri.object()
 
 	if object.endswith('*'):
 		object = object[:-1]
 	try:
416741b2
 		response = s3.bucket_list(bucket, prefix = object, recursive = True)
b96ddebe
 	except S3Error, e:
 		if S3.codes.has_key(e.Code):
 			error(S3.codes[e.Code] % bucket)
 			return
 		else:
 			raise
 	bucket_size = 0
 	for object in response["list"]:
 		size, size_coeff = formatSize(object["Size"], False)
 		bucket_size += size
 	total_size, size_coeff = formatSize(bucket_size, Config().human_readable_sizes)
475b5bc2
 	total_size_str = str(total_size) + size_coeff 
315e527b
 	output(u"%s %s" % (total_size_str.ljust(8), uri))
475b5bc2
 	return bucket_size
b96ddebe
 
9081133d
 def cmd_ls(args):
9b7618ae
 	s3 = S3(Config())
9081133d
 	if len(args) > 0:
b819c70c
 		uri = S3Uri(args[0])
 		if uri.type == "s3" and uri.has_bucket():
 			subcmd_bucket_list(s3, uri)
 			return
 	subcmd_buckets_list_all(s3)
3cc025ae
 
 def cmd_buckets_list_all_all(args):
9b7618ae
 	s3 = S3(Config())
b819c70c
 
3cc025ae
 	response = s3.list_all_buckets()
 
 	for bucket in response["list"]:
b819c70c
 		subcmd_bucket_list(s3, S3Uri("s3://" + bucket["Name"]))
315e527b
 		output(u"")
3cc025ae
 
 
b819c70c
 def subcmd_buckets_list_all(s3):
 	response = s3.list_all_buckets()
 	for bucket in response["list"]:
315e527b
 		output(u"%s  s3://%s" % (
b819c70c
 			formatDateTime(bucket["CreationDate"]),
 			bucket["Name"],
 			))
 
 def subcmd_bucket_list(s3, uri):
 	bucket = uri.bucket()
36cfce67
 	prefix = uri.object()
b819c70c
 
315e527b
 	debug(u"Bucket 's3://%s':" % bucket)
36cfce67
 	if prefix.endswith('*'):
 		prefix = prefix[:-1]
3cc025ae
 	try:
36cfce67
 		response = s3.bucket_list(bucket, prefix = prefix)
3cc025ae
 	except S3Error, e:
75405909
 		if S3.codes.has_key(e.info["Code"]):
 			error(S3.codes[e.info["Code"]] % bucket)
3cc025ae
 			return
 		else:
 			raise
36cfce67
 
8567b8ed
 	if cfg.list_md5:
 		format_string = u"%(timestamp)16s %(size)9s%(coeff)1s  %(md5)32s  %(uri)s"
 	else:
 		format_string = u"%(timestamp)16s %(size)9s%(coeff)1s  %(uri)s"
 
36cfce67
 	for prefix in response['common_prefixes']:
8567b8ed
 		output(format_string % {
 			"timestamp": "",
 			"size": "DIR",
 			"coeff": "",
 			"md5": "",
 			"uri": uri.compose_uri(bucket, prefix["Prefix"])})
36cfce67
 
3cc025ae
 	for object in response["list"]:
9b7618ae
 		size, size_coeff = formatSize(object["Size"], Config().human_readable_sizes)
8567b8ed
 		output(format_string % {
 			"timestamp": formatDateTime(object["LastModified"]),
 			"size" : str(size), 
 			"coeff": size_coeff,
 			"md5" : object['ETag'].strip('"'),
 			"uri": uri.compose_uri(bucket, object["Key"]),
 			})
3cc025ae
 
 def cmd_bucket_create(args):
7406fc6c
 	s3 = S3(Config())
 	for arg in args:
 		uri = S3Uri(arg)
 		if not uri.type == "s3" or not uri.has_bucket() or uri.has_object():
 			raise ParameterError("Expecting S3 URI with just the bucket name set instead of '%s'" % arg)
 		try:
 			response = s3.bucket_create(uri.bucket(), cfg.bucket_location)
315e527b
 			output(u"Bucket '%s' created" % uri.uri())
7406fc6c
 		except S3Error, e:
 			if S3.codes.has_key(e.info["Code"]):
 				error(S3.codes[e.info["Code"]] % uri.bucket())
 				return
 			else:
 				raise
3cc025ae
 
 def cmd_bucket_delete(args):
7406fc6c
 	def _bucket_delete_one(uri):
 		try:
 			response = s3.bucket_delete(uri.bucket())
 		except S3Error, e:
 			if e.info['Code'] == 'BucketNotEmpty' and (cfg.force or cfg.recursive):
315e527b
 				warning(u"Bucket is not empty. Removing all the objects from it first. This may take some time...")
1ae39a8d
 				subcmd_object_del_uri(uri.uri(), recursive = True)
7406fc6c
 				return _bucket_delete_one(uri)
 			elif S3.codes.has_key(e.info["Code"]):
 				error(S3.codes[e.info["Code"]] % uri.bucket())
 				return
 			else:
 				raise
 		
 	s3 = S3(Config())
 	for arg in args:
 		uri = S3Uri(arg)
 		if not uri.type == "s3" or not uri.has_bucket() or uri.has_object():
 			raise ParameterError("Expecting S3 URI with just the bucket name set instead of '%s'" % arg)
 		_bucket_delete_one(uri)
315e527b
 		output(u"Bucket '%s' removed" % uri.uri())
f4555c39
 
227fabf8
 def fetch_local_list(args, recursive = None):
a7ef3595
 	local_uris = []
dc1c96cf
 	local_list = SortedDict(ignore_case = False)
59864e57
 	single_file = False
a7ef3595
 
227fabf8
 	if type(args) not in (list, tuple):
 		args = [args]
 
 	if recursive == None:
 		recursive = cfg.recursive
 
a7ef3595
 	for arg in args:
 		uri = S3Uri(arg)
 		if not uri.type == 'file':
 			raise ParameterError("Expecting filename or directory instead of: %s" % arg)
227fabf8
 		if uri.isdir() and not recursive:
a7ef3595
 			raise ParameterError("Use --recursive to upload a directory: %s" % arg)
 		local_uris.append(uri)
 
 	for uri in local_uris:
59864e57
 		list_for_uri, single_file = _get_filelist_local(uri)
 		local_list.update(list_for_uri)
a7ef3595
 
59864e57
 	## Single file is True if and only if the user 
 	## specified one local URI and that URI represents
 	## a FILE. Ie it is False if the URI was of a DIR
 	## and that dir contained only one FILE. That's not
 	## a case of single_file==True.
 	if len(local_list) > 1:
 		single_file = False
 
 	return local_list, single_file
a7ef3595
 
227fabf8
 def fetch_remote_list(args, require_attribs = False, recursive = None):
90137a39
 	remote_uris = []
dc1c96cf
 	remote_list = SortedDict(ignore_case = False)
227fabf8
 
 	if type(args) not in (list, tuple):
 		args = [args]
 
 	if recursive == None:
 		recursive = cfg.recursive
90137a39
 
 	for arg in args:
 		uri = S3Uri(arg)
 		if not uri.type == 's3':
 			raise ParameterError("Expecting S3 URI instead of '%s'" % arg)
 		remote_uris.append(uri)
 
227fabf8
 	if recursive:
90137a39
 		for uri in remote_uris:
 			objectlist = _get_filelist_remote(uri)
227fabf8
 			for key in objectlist:
 				remote_list[key] = objectlist[key]
90137a39
 	else:
 		for uri in remote_uris:
 			uri_str = str(uri)
 			## Wildcards used in remote URI?
 			## If yes we'll need a bucket listing...
 			if uri_str.find('*') > -1 or uri_str.find('?') > -1:
 				first_wildcard = uri_str.find('*')
 				first_questionmark = uri_str.find('?')
 				if first_questionmark > -1 and first_questionmark < first_wildcard:
 					first_wildcard = first_questionmark
 				prefix = uri_str[:first_wildcard]
 				rest = uri_str[first_wildcard+1:]
 				## Only request recursive listing if the 'rest' of the URI,
 				## i.e. the part after first wildcard, contains '/'
 				need_recursion = rest.find('/') > -1
 				objectlist = _get_filelist_remote(S3Uri(prefix), recursive = need_recursion)
 				for key in objectlist:
 					## Check whether the 'key' matches the requested wildcards
 					if glob.fnmatch.fnmatch(objectlist[key]['object_uri_str'], uri_str):
227fabf8
 						remote_list[key] = objectlist[key]
90137a39
 			else:
 				## No wildcards - simply append the given URI to the list
 				key = os.path.basename(uri.object())
 				if not key:
 					raise ParameterError(u"Expecting S3 URI with a filename or --recursive: %s" % uri.uri())
227fabf8
 				remote_item = {
 					'base_uri': uri,
 					'object_uri_str': unicode(uri),
 					'object_key': uri.object()
90137a39
 				}
227fabf8
 				if require_attribs:
 					response = S3(cfg).object_info(uri)
 					remote_item.update({
 					'size': int(response['headers']['content-length']),
 					'md5': response['headers']['etag'].strip('"\''),
 					'timestamp' : Utils.dateRFC822toUnix(response['headers']['date'])
 					})
 				remote_list[key] = remote_item
42320d03
 	return remote_list
90137a39
 
3cc025ae
 def cmd_object_put(args):
a7ef3595
 	cfg = Config()
 	s3 = S3(cfg)
 
 	if len(args) == 0:
c35d55d9
 		raise ParameterError("Nothing to upload. Expecting a local file or directory and a S3 URI destination.")
a7ef3595
 
4245892a
 	## Normalize URI to convert s3://bkt to s3://bkt/ (trailing slash)
 	destination_base_uri = S3Uri(args.pop())
 	if destination_base_uri.type != 's3':
 		raise ParameterError("Destination must be S3Uri. Got: %s" % destination_base_uri)
 	destination_base = str(destination_base_uri)
a7ef3595
 
 	if len(args) == 0:
 		raise ParameterError("Nothing to upload. Expecting a local file or directory.")
 
59864e57
 	local_list, single_file_local = fetch_local_list(args)
a7ef3595
 
c35d55d9
 	local_list, exclude_list = _filelist_filter_exclude_include(local_list)
 
59864e57
 	local_count = len(local_list)
 
 	info(u"Summary: %d local files to upload" % local_count)
 
 	if local_count > 0:
 		if not destination_base.endswith("/"):
 			if not single_file_local:
 				raise ParameterError("Destination S3 URI must end with '/' (ie must refer to a directory on the remote side).")
 			local_list[local_list.keys()[0]]['remote_uri'] = unicodise(destination_base)
 		else:
 			for key in local_list:
 				local_list[key]['remote_uri'] = unicodise(destination_base + key)
c35d55d9
 
 	if cfg.dry_run:
 		for key in exclude_list:
 			output(u"exclude: %s" % unicodise(key))
 		for key in local_list:
 			output(u"upload: %s -> %s" % (local_list[key]['full_name_unicode'], local_list[key]['remote_uri']))
 
 		warning(u"Exitting now because of --dry-run")
 		return
a7ef3595
 
d9777ac6
 	seq = 0
c35d55d9
 	for key in local_list:
d9777ac6
 		seq += 1
a7ef3595
 
c35d55d9
 		uri_final = S3Uri(local_list[key]['remote_uri'])
a7ef3595
 
fa664913
 		extra_headers = copy(cfg.extra_headers)
a7ef3595
 		full_name_orig = local_list[key]['full_name']
 		full_name = full_name_orig
 		seq_label = "[%d of %d]" % (seq, local_count)
8ec1807f
 		if Config().encrypt:
a7ef3595
 			exitcode, full_name, extra_headers["x-amz-meta-s3tools-gpgenc"] = gpg_encrypt(full_name_orig)
63ba9974
 		try:
a7ef3595
 			response = s3.object_put(full_name, uri_final, extra_headers, extra_label = seq_label)
63ba9974
 		except S3UploadError, e:
5c6fd739
 			error(u"Upload of '%s' failed too many times. Skipping that file." % full_name_orig)
63ba9974
 			continue
451a19a2
 		except InvalidFileError, e:
315e527b
 			warning(u"File can not be uploaded: %s" % e)
451a19a2
 			continue
63ba9974
 		speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True)
4396d217
 		if not Config().progress_meter:
3894a49a
 			output(u"File '%s' stored as '%s' (%d bytes in %0.1f seconds, %0.2f %sB/s) %s" %
a7ef3595
 				(unicodise(full_name_orig), uri_final, response["size"], response["elapsed"], 
 				speed_fmt[0], speed_fmt[1], seq_label))
72d9ddf5
 		if Config().acl_public:
315e527b
 			output(u"Public URL of the object is: %s" %
3fc8c43b
 				(uri_final.public_url()))
a7ef3595
 		if Config().encrypt and full_name != full_name_orig:
 			debug(u"Removing temporary encrypted file: %s" % unicodise(full_name))
 			os.remove(full_name)
3cc025ae
 
 def cmd_object_get(args):
7c0863d5
 	cfg = Config()
 	s3 = S3(cfg)
 
 	## Check arguments:
 	## if not --recursive:
 	##   - first N arguments must be S3Uri
 	##   - if the last one is S3 make current dir the destination_base
 	##   - if the last one is a directory:
 	##       - take all 'basenames' of the remote objects and
 	##         make the destination name be 'destination_base'+'basename'
 	##   - if the last one is a file or not existing:
 	##       - if the number of sources (N, above) == 1 treat it
 	##         as a filename and save the object there.
 	##       - if there's more sources -> Error
 	## if --recursive:
 	##   - first N arguments must be S3Uri
 	##       - for each Uri get a list of remote objects with that Uri as a prefix
 	##       - apply exclude/include rules
 	##       - each list item will have MD5sum, Timestamp and pointer to S3Uri
 	##         used as a prefix.
 	##   - the last arg may be a local directory - destination_base
 	##   - if the last one is S3 make current dir the destination_base
 	##   - if the last one doesn't exist check remote list:
 	##       - if there is only one item and its_prefix==its_name 
 	##         download that item to the name given in last arg.
 	##       - if there are more remote items use the last arg as a destination_base
 	##         and try to create the directory (incl. all parents).
 	##
 	## In both cases we end up with a list mapping remote object names (keys) to local file names.
 
6f933653
 	## Each item will be a dict with the following attributes
 	# {'remote_uri', 'local_filename'}
559c963f
 	download_list = []
7c0863d5
 
 	if len(args) == 0:
 		raise ParameterError("Nothing to download. Expecting S3 URI.")
 
559c963f
 	if S3Uri(args[-1]).type == 'file':
7c0863d5
 		destination_base = args.pop()
 	else:
 		destination_base = "."
b819c70c
 
7c0863d5
 	if len(args) == 0:
 		raise ParameterError("Nothing to download. Expecting S3 URI.")
b819c70c
 
227fabf8
 	remote_list = fetch_remote_list(args, require_attribs = False)
c35d55d9
 	remote_list, exclude_list = _filelist_filter_exclude_include(remote_list)
 
59864e57
 	remote_count = len(remote_list)
 
 	info(u"Summary: %d remote files to download" % remote_count)
 
 	if remote_count > 0:
 		if not os.path.isdir(destination_base) or destination_base == '-':
 			## We were either given a file name (existing or not) or want STDOUT
 			if remote_count > 1:
 				raise ParameterError("Destination must be a directory when downloading multiple sources.")
 			remote_list[remote_list.keys()[0]]['local_filename'] = deunicodise(destination_base)
 		elif os.path.isdir(destination_base):
 			if destination_base[-1] != os.path.sep:
 				destination_base += os.path.sep
 			for key in remote_list:
 				remote_list[key]['local_filename'] = destination_base + key
 		else:
 			raise InternalError("WTF? Is it a dir or not? -- %s" % destination_base)
6f933653
 
c35d55d9
 	if cfg.dry_run:
 		for key in exclude_list:
 			output(u"exclude: %s" % unicodise(key))
 		for key in remote_list:
 			output(u"download: %s -> %s" % (remote_list[key]['object_uri_str'], remote_list[key]['local_filename']))
 
 		warning(u"Exitting now because of --dry-run")
 		return
 
6f933653
 	seq = 0
227fabf8
 	for key in remote_list:
6f933653
 		seq += 1
227fabf8
 		item = remote_list[key]
 		uri = S3Uri(item['object_uri_str'])
5f7a2d5f
 		## Encode / Decode destination with "replace" to make sure it's compatible with current encoding
 		destination = unicodise_safe(item['local_filename'])
42320d03
 		seq_label = "[%d of %d]" % (seq, remote_count)
7c07fd66
 
9197e62e
 		start_position = 0
7c0863d5
 
ed27a45e
 		if destination == "-":
 			## stdout
82d9eafa
 			dst_stream = sys.__stdout__
ed27a45e
 		else:
 			## File
 			try:
7e3782c5
 				file_exists = os.path.exists(destination)
559c963f
 				try:
 					dst_stream = open(destination, "ab")
 				except IOError, e:
 					if e.errno == errno.ENOENT:
 						basename = destination[:destination.rindex(os.path.sep)]
a1e3fd9c
 						info(u"Creating directory: %s" % basename)
559c963f
 						os.makedirs(basename)
 						dst_stream = open(destination, "ab")
 					else:
 						raise
7e3782c5
 				if file_exists:
9197e62e
 					if Config().get_continue:
 						start_position = dst_stream.tell()
 					elif Config().force:
 						start_position = 0L
 						dst_stream.seek(0L)
 						dst_stream.truncate()
559c963f
 					elif Config().skip_existing:
a1e3fd9c
 						info(u"Skipping over existing file: %s" % (destination))
559c963f
 						continue
9197e62e
 					else:
7e3782c5
 						dst_stream.close()
a1e3fd9c
 						raise ParameterError(u"File %s already exists. Use either of --force / --continue / --skip-existing or give it a new name." % destination)
ed27a45e
 			except IOError, e:
315e527b
 				error(u"Skipping %s: %s" % (destination, e.strerror))
ed27a45e
 				continue
6f933653
 		response = s3.object_get(uri, dst_stream, start_position = start_position, extra_label = seq_label)
7c07fd66
 		if response["headers"].has_key("x-amz-meta-s3tools-gpgenc"):
 			gpg_decrypt(destination, response["headers"]["x-amz-meta-s3tools-gpgenc"])
 			response["size"] = os.stat(destination)[6]
4396d217
 		if not Config().progress_meter and destination != "-":
63ba9974
 			speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True)
315e527b
 			output(u"File %s saved as '%s' (%d bytes in %0.1f seconds, %0.2f %sB/s)" %
63ba9974
 				(uri, destination, response["size"], response["elapsed"], speed_fmt[0], speed_fmt[1]))
3cc025ae
 
 def cmd_object_del(args):
1ae39a8d
 	for uri_str in args:
 		uri = S3Uri(uri_str)
a120a4eb
 		if uri.type != "s3":
1ae39a8d
 			raise ParameterError("Expecting S3 URI instead of '%s'" % uri_str)
a120a4eb
 		if not uri.has_object():
 			if Config().recursive and not Config().force:
1ae39a8d
 				raise ParameterError("Please use --force to delete ALL contents of %s" % uri_str)
a120a4eb
 			elif not Config().recursive:
1ae39a8d
 				raise ParameterError("File name required, not only the bucket name. Alternatively use --recursive")
 		subcmd_object_del_uri(uri_str)
 
 def subcmd_object_del_uri(uri_str, recursive = None):
 	s3 = S3(cfg)
7406fc6c
 
 	if recursive is None:
 		recursive = cfg.recursive
1ae39a8d
 
 	remote_list = fetch_remote_list(uri_str, require_attribs = False, recursive = recursive)
 	remote_list, exclude_list = _filelist_filter_exclude_include(remote_list)
 
 	remote_count = len(remote_list)
 
 	info(u"Summary: %d remote files to delete" % remote_count)
 
 	if cfg.dry_run:
 		for key in exclude_list:
 			output(u"exclude: %s" % unicodise(key))
 		for key in remote_list:
 			output(u"delete: %s" % remote_list[key]['object_uri_str'])
 
 		warning(u"Exitting now because of --dry-run")
 		return
 
 	for key in remote_list:
 		item = remote_list[key]
 		response = s3.object_delete(S3Uri(item['object_uri_str']))
 		output(u"File %s deleted" % item['object_uri_str'])
3cc025ae
 
e0b946c0
 def subcmd_cp_mv(args, process_fce, action_str, message):
 	if len(args) < 2:
 		raise ParameterError("Expecting two or more S3 URIs for " + action_str)
 	dst_base_uri = S3Uri(args.pop())
 	if dst_base_uri.type != "s3":
 		raise ParameterError("Destination must be S3 URI. To download a file use 'get' or 'sync'.")
 	destination_base = dst_base_uri.uri()
7d0ac8ee
 
e0b946c0
 	remote_list = fetch_remote_list(args, require_attribs = False)
 	remote_list, exclude_list = _filelist_filter_exclude_include(remote_list)
 
 	remote_count = len(remote_list)
7d0ac8ee
 
e0b946c0
 	info(u"Summary: %d remote files to %s" % (remote_count, action_str))
7d0ac8ee
 
e0b946c0
 	if cfg.recursive:
 		if not destination_base.endswith("/"):
 			destination_base += "/"
 		for key in remote_list:
 			remote_list[key]['dest_name'] = destination_base + key
 	else:
 		key = remote_list.keys()[0]
 		if destination_base.endswith("/"):
 			remote_list[key]['dest_name'] = destination_base + key
 		else:
 			remote_list[key]['dest_name'] = destination_base
 
 	if cfg.dry_run:
 		for key in exclude_list:
 			output(u"exclude: %s" % unicodise(key))
 		for key in remote_list:
 			output(u"%s: %s -> %s" % (action_str, remote_list[key]['object_uri_str'], remote_list[key]['dest_name']))
fa664913
 
e0b946c0
 		warning(u"Exitting now because of --dry-run")
 		return
 
 	seq = 0
 	for key in remote_list:
 		seq += 1
 		seq_label = "[%d of %d]" % (seq, remote_count)
 
 		item = remote_list[key]
 		src_uri = S3Uri(item['object_uri_str'])
 		dst_uri = S3Uri(item['dest_name'])
 
 		extra_headers = copy(cfg.extra_headers)
 		response = process_fce(src_uri, dst_uri, extra_headers) 
 		output(message % { "src" : src_uri, "dst" : dst_uri })
 		if Config().acl_public:
 			info(u"Public URL is: %s" % dst_uri.public_url())
7d0ac8ee
 
7d61be89
 def cmd_cp(args):
 	s3 = S3(Config())
e0b946c0
 	subcmd_cp_mv(args, s3.object_copy, "copy", "File %(src)s copied to %(dst)s")
7d61be89
 
 def cmd_mv(args):
 	s3 = S3(Config())
e0b946c0
 	subcmd_cp_mv(args, s3.object_move, "move", "File %(src)s moved to %(dst)s")
7d61be89
 
e5c6f6c5
 def cmd_info(args):
 	s3 = S3(Config())
 
 	while (len(args)):
 		uri_arg = args.pop(0)
 		uri = S3Uri(uri_arg)
 		if uri.type != "s3" or not uri.has_bucket():
 			raise ParameterError("Expecting S3 URI instead of '%s'" % uri_arg)
 
 		try:
 			if uri.has_object():
 				info = s3.object_info(uri)
315e527b
 				output(u"%s (object):" % uri.uri())
 				output(u"   File size: %s" % info['headers']['content-length'])
 				output(u"   Last mod:  %s" % info['headers']['last-modified'])
 				output(u"   MIME type: %s" % info['headers']['content-type'])
 				output(u"   MD5 sum:   %s" % info['headers']['etag'].strip('"'))
e5c6f6c5
 			else:
 				info = s3.bucket_info(uri)
315e527b
 				output(u"%s (bucket):" % uri.uri())
 				output(u"   Location:  %s" % info['bucket-location'])
e5c6f6c5
 			acl = s3.get_acl(uri)
68afbd78
 			acl_grant_list = acl.getGrantList()
 			for grant in acl_grant_list:
 				output(u"   ACL:       %s: %s" % (grant['grantee'], grant['permission']))
90137a39
 			if acl.isAnonRead():
 				output(u"   URL:       %s" % uri.public_url())
e5c6f6c5
 		except S3Error, e:
 			if S3.codes.has_key(e.info["Code"]):
 				error(S3.codes[e.info["Code"]] % uri.bucket())
 				return
 			else:
 				raise
 
01fe3a25
 def _get_filelist_local(local_uri):
a1e3fd9c
 	info(u"Compiling list of local files...")
a7ef3595
 	if local_uri.isdir():
d7251ccb
 		local_base = deunicodise(local_uri.basename())
a7ef3595
 		local_path = deunicodise(local_uri.path())
01fe3a25
 		filelist = os.walk(local_path)
59864e57
 		single_file = False
a368faf1
 	else:
a7ef3595
 		local_base = ""
 		local_path = deunicodise(local_uri.dirname())
 		filelist = [( local_path, [], [deunicodise(local_uri.basename())] )]
59864e57
 		single_file = True
dc1c96cf
 	loc_list = SortedDict(ignore_case = False)
a368faf1
 	for root, dirs, files in filelist:
a7ef3595
 		rel_root = root.replace(local_path, local_base, 1)
0d91ff3f
 		for f in files:
 			full_name = os.path.join(root, f)
 			if not os.path.isfile(full_name):
 				continue
d9777ac6
 			if os.path.islink(full_name):
7b5df262
                                 if not cfg.follow_symlinks:
                                         continue
d7251ccb
 			relative_file = unicodise(os.path.join(rel_root, f))
d852cbb2
 			if os.path.sep != "/":
 				# Convert non-unix dir separators to '/'
 				relative_file = "/".join(relative_file.split(os.path.sep))
3c07424d
 			if cfg.urlencoding_mode == "normal":
b40dd815
 				relative_file = replace_nonprintables(relative_file)
d7251ccb
 			if relative_file.startswith('./'):
 				relative_file = relative_file[2:]
0d91ff3f
 			sr = os.stat_result(os.lstat(full_name))
a7ef3595
 			loc_list[relative_file] = {
 				'full_name_unicode' : unicodise(full_name),
0d91ff3f
 				'full_name' : full_name,
 				'size' : sr.st_size, 
 				'mtime' : sr.st_mtime,
 				## TODO: Possibly more to save here...
 			}
59864e57
 	return loc_list, single_file
01fe3a25
 
6f933653
 def _get_filelist_remote(remote_uri, recursive = True):
 	## If remote_uri ends with '/' then all remote files will have 
 	## the remote_uri prefix removed in the relative path.
 	## If, on the other hand, the remote_uri ends with something else
 	## (probably alphanumeric symbol) we'll use the last path part 
 	## in the relative path.
 	##
 	## Complicated, eh? See an example:
 	## _get_filelist_remote("s3://bckt/abc/def") may yield:
 	## { 'def/file1.jpg' : {}, 'def/xyz/blah.txt' : {} }
 	## _get_filelist_remote("s3://bckt/abc/def/") will yield:
 	## { 'file1.jpg' : {}, 'xyz/blah.txt' : {} }
 	## Furthermore a prefix-magic can restrict the return list:
 	## _get_filelist_remote("s3://bckt/abc/def/x") yields:
 	## { 'xyz/blah.txt' : {} }
 
a1e3fd9c
 	info(u"Retrieving list of remote files for %s ..." % remote_uri)
0d91ff3f
 
01fe3a25
 	s3 = S3(Config())
6f933653
 	response = s3.bucket_list(remote_uri.bucket(), prefix = remote_uri.object(), recursive = recursive)
01fe3a25
 
6f933653
 	rem_base_original = rem_base = remote_uri.object()
 	remote_uri_original = remote_uri
 	if rem_base != '' and rem_base[-1] != '/':
 		rem_base = rem_base[:rem_base.rfind('/')+1]
 		remote_uri = S3Uri("s3://%s/%s" % (remote_uri.bucket(), rem_base))
0d91ff3f
 	rem_base_len = len(rem_base)
dc1c96cf
 	rem_list = SortedDict(ignore_case = False)
559c963f
 	break_now = False
0d91ff3f
 	for object in response['list']:
6f933653
 		if object['Key'] == rem_base_original and object['Key'][-1] != os.path.sep:
559c963f
 			## We asked for one file and we got that file :-)
 			key = os.path.basename(object['Key'])
6f933653
 			object_uri_str = remote_uri_original.uri()
559c963f
 			break_now = True
 			rem_list = {}	## Remove whatever has already been put to rem_list
 		else:
 			key = object['Key'][rem_base_len:]		## Beware - this may be '' if object['Key']==rem_base !!
 			object_uri_str = remote_uri.uri() + key
0d91ff3f
 		rem_list[key] = { 
 			'size' : int(object['Size']),
7c0863d5
 			'timestamp' : dateS3toUnix(object['LastModified']), ## Sadly it's upload time, not our lastmod time :-(
0d91ff3f
 			'md5' : object['ETag'][1:-1],
7c0863d5
 			'object_key' : object['Key'],
559c963f
 			'object_uri_str' : object_uri_str,
 			'base_uri' : remote_uri,
0d91ff3f
 		}
559c963f
 		if break_now:
 			break
01fe3a25
 	return rem_list
7c0863d5
 
227fabf8
 def _filelist_filter_exclude_include(src_list):
 	info(u"Applying --exclude/--include")
d5e87cdf
 	cfg = Config()
dc1c96cf
 	exclude_list = SortedDict(ignore_case = False)
01fe3a25
 	for file in src_list.keys():
227fabf8
 		debug(u"CHECK: %s" % file)
8829e891
 		excluded = False
d5e87cdf
 		for r in cfg.exclude:
227fabf8
 			if r.search(file):
8829e891
 				excluded = True
227fabf8
 				debug(u"EXCL-MATCH: '%s'" % (cfg.debug_exclude[r]))
8829e891
 				break
 		if excluded:
227fabf8
 			## No need to check for --include if not excluded
 			for r in cfg.include:
 				if r.search(file):
 					excluded = False
 					debug(u"INCL-MATCH: '%s'" % (cfg.debug_include[r]))
 					break
 		if excluded:
 			## Still excluded - ok, action it
 			debug(u"EXCLUDE: %s" % file)
 			exclude_list[file] = src_list[file]
8829e891
 			del(src_list[file])
 			continue
d5e87cdf
 		else:
227fabf8
 			debug(u"PASS: %s" % (file))
 	return src_list, exclude_list
 
 def _compare_filelists(src_list, dst_list, src_is_local_and_dst_is_remote):
 	info(u"Verifying attributes...")
 	cfg = Config()
dc1c96cf
 	exists_list = SortedDict(ignore_case = False)
227fabf8
 
d852cbb2
 	debug("Comparing filelists (src_is_local_and_dst_is_remote=%s)" % src_is_local_and_dst_is_remote)
 	debug("src_list.keys: %s" % src_list.keys())
 	debug("dst_list.keys: %s" % dst_list.keys())
 
227fabf8
 	for file in src_list.keys():
01a80524
 		debug(u"CHECK: %s" % file)
01fe3a25
 		if dst_list.has_key(file):
559c963f
 			## Was --skip-existing requested?
 			if cfg.skip_existing:
315e527b
 				debug(u"IGNR: %s (used --skip-existing)" % (file))
559c963f
 				exists_list[file] = src_list[file]
 				del(src_list[file])
 				## Remove from destination-list, all that is left there will be deleted
 				del(dst_list[file])
 				continue
 
227fabf8
 			attribs_match = True
0d91ff3f
 			## Check size first
227fabf8
 			if 'size' in cfg.sync_checks and dst_list[file]['size'] != src_list[file]['size']:
 				debug(u"XFER: %s (size mismatch: src=%s dst=%s)" % (file, src_list[file]['size'], dst_list[file]['size']))
 				attribs_match = False
 			
 			if attribs_match and 'md5' in cfg.sync_checks:
0d91ff3f
 				## ... same size, check MD5
01fe3a25
 				if src_is_local_and_dst_is_remote:
 					src_md5 = Utils.hash_file_md5(src_list[file]['full_name'])
 					dst_md5 = dst_list[file]['md5']
 				else:
 					src_md5 = src_list[file]['md5']
 					dst_md5 = Utils.hash_file_md5(dst_list[file]['full_name'])
227fabf8
 				if src_md5 != dst_md5:
 					## Checksums are different.
 					attribs_match = False
315e527b
 					debug(u"XFER: %s (md5 mismatch: src=%s dst=%s)" % (file, src_md5, dst_md5))
227fabf8
 
 			if attribs_match:
 				## Remove from source-list, all that is left there will be transferred
 				debug(u"IGNR: %s (transfer not needed)" % file)
 				exists_list[file] = src_list[file]
 				del(src_list[file])
 
01fe3a25
 			## Remove from destination-list, all that is left there will be deleted
 			del(dst_list[file])
227fabf8
 
 	return src_list, dst_list, exists_list
01fe3a25
 
227fabf8
 def cmd_sync_remote2local(args):
ed27a45e
 	def _parse_attrs_header(attrs_header):
 		attrs = {}
 		for attr in attrs_header.split("/"):
 			key, val = attr.split(":")
 			attrs[key] = val
 		return attrs
 		
 	s3 = S3(Config())
 
227fabf8
 	destination_base = args[-1]
59864e57
 	local_list, single_file_local = fetch_local_list(destination_base, recursive = True)
227fabf8
 	remote_list = fetch_remote_list(args[:-1], recursive = True, require_attribs = True)
 
 	local_count = len(local_list)
 	remote_count = len(remote_list)
ed27a45e
 
227fabf8
 	info(u"Found %d remote files, %d local files" % (remote_count, local_count))
35920829
 
227fabf8
 	remote_list, exclude_list = _filelist_filter_exclude_include(remote_list)
ed27a45e
 
227fabf8
 	remote_list, local_list, existing_list = _compare_filelists(remote_list, local_list, False)
 
 	local_count = len(local_list)
 	remote_count = len(remote_list)
ed27a45e
 
59864e57
 	info(u"Summary: %d remote files to download, %d local files to delete" % (remote_count, local_count))
 
227fabf8
 	if not os.path.isdir(destination_base):
 		## We were either given a file name (existing or not) or want STDOUT
 		if remote_count > 1:
 			raise ParameterError("Destination must be a directory when downloading multiple sources.")
 		remote_list[remote_list.keys()[0]]['local_filename'] = deunicodise(destination_base)
 	else:
 		if destination_base[-1] != os.path.sep:
 			destination_base += os.path.sep
 		for key in remote_list:
d852cbb2
 			local_filename = destination_base + key
 			if os.path.sep != "/":
 				local_filename = os.path.sep.join(local_filename.split("/"))
 			remote_list[key]['local_filename'] = deunicodise(local_filename)
ed27a45e
 
d7251ccb
 	if cfg.dry_run:
227fabf8
 		for key in exclude_list:
c35d55d9
 			output(u"exclude: %s" % unicodise(key))
30888dac
 		if cfg.delete_removed:
 			for key in local_list:
 				output(u"delete: %s" % local_list[key]['full_name_unicode'])
227fabf8
 		for key in remote_list:
d7251ccb
 			output(u"download: %s -> %s" % (remote_list[key]['object_uri_str'], remote_list[key]['local_filename']))
227fabf8
 
 		warning(u"Exitting now because of --dry-run")
 		return
ed27a45e
 
d7251ccb
 	if cfg.delete_removed:
 		for key in local_list:
 			os.unlink(local_list[key]['full_name'])
 			output(u"deleted: %s" % local_list[key]['full_name_unicode'])
 
ed27a45e
 	total_size = 0
 	total_elapsed = 0.0
 	timestamp_start = time.time()
 	seq = 0
 	dir_cache = {}
227fabf8
 	file_list = remote_list.keys()
ed27a45e
 	file_list.sort()
 	for file in file_list:
 		seq += 1
227fabf8
 		item = remote_list[file]
 		uri = S3Uri(item['object_uri_str'])
 		dst_file = item['local_filename']
 		seq_label = "[%d of %d]" % (seq, remote_count)
ed27a45e
 		try:
 			dst_dir = os.path.dirname(dst_file)
 			if not dir_cache.has_key(dst_dir):
bc4c306d
 				dir_cache[dst_dir] = Utils.mkdir_with_parents(dst_dir)
ed27a45e
 			if dir_cache[dst_dir] == False:
315e527b
 				warning(u"%s: destination directory not writable: %s" % (file, dst_dir))
ed27a45e
 				continue
 			try:
 				open_flags = os.O_CREAT
227fabf8
 				open_flags |= os.O_TRUNC
 				# open_flags |= os.O_EXCL
ed27a45e
 
3894a49a
 				debug(u"dst_file=%s" % unicodise(dst_file))
ed27a45e
 				# This will have failed should the file exist
0a38dc64
 				os.close(os.open(dst_file, open_flags))
ed27a45e
 				# Yeah I know there is a race condition here. Sadly I don't know how to open() in exclusive mode.
 				dst_stream = open(dst_file, "wb")
688964d7
 				response = s3.object_get(uri, dst_stream, extra_label = seq_label)
ed27a45e
 				dst_stream.close()
 				if response['headers'].has_key('x-amz-meta-s3cmd-attrs') and cfg.preserve_attrs:
 					attrs = _parse_attrs_header(response['headers']['x-amz-meta-s3cmd-attrs'])
 					if attrs.has_key('mode'):
 						os.chmod(dst_file, int(attrs['mode']))
bc4c306d
 					if attrs.has_key('mtime') or attrs.has_key('atime'):
 						mtime = attrs.has_key('mtime') and int(attrs['mtime']) or int(time.time())
 						atime = attrs.has_key('atime') and int(attrs['atime']) or int(time.time())
 						os.utime(dst_file, (atime, mtime))
 					## FIXME: uid/gid / uname/gname handling comes here! TODO
ed27a45e
 			except OSError, e:
d5e87cdf
 				try: dst_stream.close()
 				except: pass
ed27a45e
 				if e.errno == errno.EEXIST:
315e527b
 					warning(u"%s exists - not overwriting" % (dst_file))
ed27a45e
 					continue
bc4c306d
 				if e.errno in (errno.EPERM, errno.EACCES):
315e527b
 					warning(u"%s not writable: %s" % (dst_file, e.strerror))
bc4c306d
 					continue
d412a82b
 				raise e
d5e87cdf
 			except KeyboardInterrupt:
 				try: dst_stream.close()
 				except: pass
315e527b
 				warning(u"Exiting after keyboard interrupt")
d5e87cdf
 				return
bc4c306d
 			except Exception, e:
d5e87cdf
 				try: dst_stream.close()
 				except: pass
315e527b
 				error(u"%s: %s" % (file, e))
ed27a45e
 				continue
4f209fa4
 			# We have to keep repeating this call because 
 			# Python 2.4 doesn't support try/except/finally
 			# construction :-(
d5e87cdf
 			try: dst_stream.close()
 			except: pass
ed27a45e
 		except S3DownloadError, e:
315e527b
 			error(u"%s: download failed too many times. Skipping that file." % file)
ed27a45e
 			continue
 		speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True)
4396d217
 		if not Config().progress_meter:
3894a49a
 			output(u"File '%s' stored as '%s' (%d bytes in %0.1f seconds, %0.2f %sB/s) %s" %
 				(uri, unicodise(dst_file), response["size"], response["elapsed"], speed_fmt[0], speed_fmt[1],
688964d7
 				seq_label))
ed27a45e
 		total_size += response["size"]
 
 	total_elapsed = time.time() - timestamp_start
 	speed_fmt = formatSize(total_size/total_elapsed, human_readable = True, floating_point = True)
89a93383
 
 	# Only print out the result if any work has been done or 
 	# if the user asked for verbose output
 	outstr = "Done. Downloaded %d bytes in %0.1f seconds, %0.2f %sB/s" % (total_size, total_elapsed, speed_fmt[0], speed_fmt[1])
 	if total_size > 0:
 		output(outstr)
 	else:
 		info(outstr)
01fe3a25
 
d7251ccb
 def cmd_sync_local2remote(args):
db340c09
 	def _build_attr_header(src):
9856527a
 		import pwd, grp
01fe3a25
 		attrs = {}
5f7a2d5f
 		src = deunicodise(src)
01fe3a25
 		st = os.stat_result(os.stat(src))
 		for attr in cfg.preserve_attrs_list:
 			if attr == 'uname':
 				try:
 					val = pwd.getpwuid(st.st_uid).pw_name
 				except KeyError:
 					attr = "uid"
 					val = st.st_uid
6fa688fa
 					warning(u"%s: Owner username not known. Storing UID=%d instead." % (unicodise(src), val))
01fe3a25
 			elif attr == 'gname':
 				try:
 					val = grp.getgrgid(st.st_gid).gr_name
 				except KeyError:
 					attr = "gid"
 					val = st.st_gid
6fa688fa
 					warning(u"%s: Owner groupname not known. Storing GID=%d instead." % (unicodise(src), val))
01fe3a25
 			else:
 				val = getattr(st, 'st_' + attr)
 			attrs[attr] = val
 		result = ""
 		for k in attrs: result += "%s:%s/" % (k, attrs[k])
 		return { 'x-amz-meta-s3cmd-attrs' : result[:-1] }
 
ec00bb88
 	s3 = S3(cfg)
 
 	if cfg.encrypt:
d7251ccb
 		error(u"S3cmd 'sync' doesn't yet support GPG encryption, sorry.")
315e527b
 		error(u"Either use unconditional 's3cmd put --recursive'")
 		error(u"or disable encryption with --no-encrypt parameter.")
ec00bb88
 		sys.exit(1)
 
4245892a
 	## Normalize URI to convert s3://bkt to s3://bkt/ (trailing slash)
 	destination_base_uri = S3Uri(args[-1])
 	if destination_base_uri.type != 's3':
 		raise ParameterError("Destination must be S3Uri. Got: %s" % destination_base_uri)
 	destination_base = str(destination_base_uri)
 
59864e57
 	local_list, single_file_local = fetch_local_list(args[:-1], recursive = True)
d7251ccb
 	remote_list = fetch_remote_list(destination_base, recursive = True, require_attribs = True)
01fe3a25
 
d7251ccb
 	local_count = len(local_list)
 	remote_count = len(remote_list)
01fe3a25
 
d7251ccb
 	info(u"Found %d local files, %d remote files" % (local_count, remote_count))
01fe3a25
 
d7251ccb
 	local_list, exclude_list = _filelist_filter_exclude_include(local_list)
01fe3a25
 
59864e57
 	if single_file_local and len(local_list) == 1 and len(remote_list) == 1:
 		## Make remote_key same as local_key for comparison if we're dealing with only one file
 		remote_list_entry = remote_list[remote_list.keys()[0]]
 		# Flush remote_list, by the way
 		remote_list = { local_list.keys()[0] : remote_list_entry }
 
d7251ccb
 	local_list, remote_list, existing_list = _compare_filelists(local_list, remote_list, True)
0d91ff3f
 
d7251ccb
 	local_count = len(local_list)
 	remote_count = len(remote_list)
ed27a45e
 
d7251ccb
 	info(u"Summary: %d local files to upload, %d remote files to delete" % (local_count, remote_count))
 
59864e57
 	if local_count > 0:
 		## Populate 'remote_uri' only if we've got something to upload
 		if not destination_base.endswith("/"):
 			if not single_file_local:
 				raise ParameterError("Destination S3 URI must end with '/' (ie must refer to a directory on the remote side).")
 			local_list[local_list.keys()[0]]['remote_uri'] = unicodise(destination_base)
 		else:
 			for key in local_list:
 				local_list[key]['remote_uri'] = unicodise(destination_base + key)
 
d7251ccb
 	if cfg.dry_run:
 		for key in exclude_list:
c35d55d9
 			output(u"exclude: %s" % unicodise(key))
30888dac
 		if cfg.delete_removed:
 			for key in remote_list:
 				output(u"delete: %s" % remote_list[key]['object_uri_str'])
d7251ccb
 		for key in local_list:
 			output(u"upload: %s -> %s" % (local_list[key]['full_name_unicode'], local_list[key]['remote_uri']))
 
 		warning(u"Exitting now because of --dry-run")
 		return
 
 	if cfg.delete_removed:
 		for key in remote_list:
 			uri = S3Uri(remote_list[key]['object_uri_str'])
 			s3.object_delete(uri)
 			output(u"deleted: '%s'" % uri)
0d91ff3f
 
 	total_size = 0
63ba9974
 	total_elapsed = 0.0
 	timestamp_start = time.time()
d9777ac6
 	seq = 0
d7251ccb
 	file_list = local_list.keys()
0d91ff3f
 	file_list.sort()
 	for file in file_list:
d9777ac6
 		seq += 1
d7251ccb
 		item = local_list[file]
 		src = item['full_name']
 		uri = S3Uri(item['remote_uri'])
 		seq_label = "[%d of %d]" % (seq, local_count)
fa664913
 		extra_headers = copy(cfg.extra_headers)
a368faf1
 		if cfg.preserve_attrs:
d7251ccb
 			attr_header = _build_attr_header(src)
fa664913
 			debug(u"attr_header: %s" % attr_header)
 			extra_headers.update(attr_header)
63ba9974
 		try:
fa664913
 			response = s3.object_put(src, uri, extra_headers, extra_label = seq_label)
63ba9974
 		except S3UploadError, e:
d7251ccb
 			error(u"%s: upload failed too many times. Skipping that file." % item['full_name_unicode'])
63ba9974
 			continue
451a19a2
 		except InvalidFileError, e:
315e527b
 			warning(u"File can not be uploaded: %s" % e)
451a19a2
 			continue
63ba9974
 		speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True)
4396d217
 		if not cfg.progress_meter:
3894a49a
 			output(u"File '%s' stored as '%s' (%d bytes in %0.1f seconds, %0.2f %sB/s) %s" %
d7251ccb
 				(item['full_name_unicode'], uri, response["size"], response["elapsed"], 
 				speed_fmt[0], speed_fmt[1], seq_label))
0d91ff3f
 		total_size += response["size"]
63ba9974
 
 	total_elapsed = time.time() - timestamp_start
b5f1867d
 	total_speed = total_elapsed and total_size/total_elapsed or 0.0
 	speed_fmt = formatSize(total_speed, human_readable = True, floating_point = True)
89a93383
 
 	# Only print out the result if any work has been done or 
 	# if the user asked for verbose output
 	outstr = "Done. Uploaded %d bytes in %0.1f seconds, %0.2f %sB/s" % (total_size, total_elapsed, speed_fmt[0], speed_fmt[1])
 	if total_size > 0:
 		output(outstr)
 	else:
 		info(outstr)
0d91ff3f
 
01fe3a25
 def cmd_sync(args):
227fabf8
 	if (len(args) < 2):
 		raise ParameterError("Too few parameters! Expected: %s" % commands['sync']['param'])
01fe3a25
 
227fabf8
 	if S3Uri(args[0]).type == "file" and S3Uri(args[-1]).type == "s3":
 		return cmd_sync_local2remote(args)
 	if S3Uri(args[0]).type == "s3" and S3Uri(args[-1]).type == "file":
 		return cmd_sync_remote2local(args)
 	raise ParameterError("Invalid source/destination: '%s'" % "' '".join(args))
585c735a
 
 def cmd_setacl(args):
ad50316c
 	def _update_acl(uri, seq_label = ""):
4f11bf57
 		something_changed = False
ad50316c
 		acl = s3.get_acl(uri)
 		debug(u"acl: %s - %r" % (uri, acl.grantees))
41e77b56
 		if cfg.acl_public == True:
ad50316c
 			if acl.isAnonRead():
 				info(u"%s: already Public, skipping %s" % (uri, seq_label))
41e77b56
 			else:
 				acl.grantAnonRead()
4f11bf57
 				something_changed = True
41e77b56
 		elif cfg.acl_public == False: # we explicitely check for False, because it could be None
ad50316c
 			if not acl.isAnonRead():
 				info(u"%s: already Private, skipping %s" % (uri, seq_label))
41e77b56
 			else:
 				acl.revokeAnonRead()
4f11bf57
 				something_changed = True
41e77b56
 
 		# update acl with arguments
 		# grant first and revoke later, because revoke has priority
 		if cfg.acl_grants:
4f11bf57
 			something_changed = True
41e77b56
 			for grant in cfg.acl_grants:
05cc3185
 				acl.grant(**grant);
41e77b56
 
 		if cfg.acl_revokes:
4f11bf57
 			something_changed = True
41e77b56
 			for revoke in cfg.acl_revokes:
05cc3185
 				acl.revoke(**revoke);
41e77b56
 
4f11bf57
 		if not something_changed:
 			return
 
ad50316c
 		retsponse = s3.set_acl(uri, acl)
 		if retsponse['status'] == 200:
4f11bf57
 			if cfg.acl_public in (True, False):
41e77b56
 				output(u"%s: ACL set to %s  %s" % (uri, set_to_acl, seq_label))
 			else:
4f11bf57
 				output(u"%s: ACL updated" % uri)
ad50316c
 
585c735a
 	s3 = S3(cfg)
 
 	set_to_acl = cfg.acl_public and "Public" or "Private"
 
ad50316c
 	if not cfg.recursive:
 		old_args = args
 		args = []
 		for arg in old_args:
 			uri = S3Uri(arg)
 			if not uri.has_object():
41e77b56
 				if cfg.acl_public != None:
 					info("Setting bucket-level ACL for %s to %s" % (uri.uri(), set_to_acl))
 				else:
 					info("Setting bucket-level ACL for %s" % (uri.uri()))
ad50316c
 				if not cfg.dry_run:
 					_update_acl(uri)
 			else:
 				args.append(arg)
 
42320d03
 	remote_list = fetch_remote_list(args)
c74f3dfe
 	remote_list, exclude_list = _filelist_filter_exclude_include(remote_list)
 
42320d03
 	remote_count = len(remote_list)
c74f3dfe
 
 	info(u"Summary: %d remote files to update" % remote_count)
 
 	if cfg.dry_run:
 		for key in exclude_list:
 			output(u"exclude: %s" % unicodise(key))
 		for key in remote_list:
 			output(u"setacl: %s" % remote_list[key]['object_uri_str'])
 
 		warning(u"Exitting now because of --dry-run")
 		return
 
585c735a
 	seq = 0
42320d03
 	for key in remote_list:
585c735a
 		seq += 1
42320d03
 		seq_label = "[%d of %d]" % (seq, remote_count)
3894a49a
 		uri = S3Uri(remote_list[key]['object_uri_str'])
ad50316c
 		_update_acl(uri, seq_label)
585c735a
 
cb0bbaef
 def cmd_accesslog(args):
 	s3 = S3(cfg)
 	bucket_uri = S3Uri(args.pop())
 	if bucket_uri.object():
 		raise ParameterError("Only bucket name is required for [accesslog] command")
d7693f3c
 	if cfg.log_target_prefix == False:
 		accesslog, response = s3.set_accesslog(bucket_uri, enable = False)
 	elif cfg.log_target_prefix:
cb0bbaef
 		log_target_prefix_uri = S3Uri(cfg.log_target_prefix)
 		if log_target_prefix_uri.type != "s3":
 			raise ParameterError("--log-target-prefix must be a S3 URI")
 		accesslog, response = s3.set_accesslog(bucket_uri, enable = True, log_target_prefix_uri = log_target_prefix_uri, acl_public = cfg.acl_public)
d7693f3c
 	else:	# cfg.log_target_prefix == None
cb0bbaef
 		accesslog = s3.get_accesslog(bucket_uri)
 
 	output(u"Access logging for: %s" % bucket_uri.uri())
 	output(u"   Logging Enabled: %s" % accesslog.isLoggingEnabled())
 	if accesslog.isLoggingEnabled():
 		output(u"     Target prefix: %s" % accesslog.targetPrefix().uri())
 		#output(u"   Public Access:   %s" % accesslog.isAclPublic())
 		
0b8ea559
 def cmd_sign(args):
 	string_to_sign = args.pop()
 	debug("string-to-sign: %r" % string_to_sign)
 	signature = Utils.sign_string(string_to_sign)
 	output("Signature: %s" % signature)
 
3c07424d
 def cmd_fixbucket(args):
 	def _unescape(text):
 		##
 		# Removes HTML or XML character references and entities from a text string.
 		#
 		# @param text The HTML (or XML) source text.
 		# @return The plain text, as a Unicode string, if necessary.
 		# 
 		# From: http://effbot.org/zone/re-sub.htm#unescape-html
 		def _unescape_fixup(m):
 			text = m.group(0)
 			if not htmlentitydefs.name2codepoint.has_key('apos'):
 				htmlentitydefs.name2codepoint['apos'] = ord("'")
 			if text[:2] == "&#":
 				# character reference
 				try:
 					if text[:3] == "&#x":
 						return unichr(int(text[3:-1], 16))
 					else:
 						return unichr(int(text[2:-1]))
 				except ValueError:
 					pass
 			else:
 				# named entity
 				try:
 					text = unichr(htmlentitydefs.name2codepoint[text[1:-1]])
 				except KeyError:
 					pass
 			return text # leave as is
 		return re.sub("&#?\w+;", _unescape_fixup, text)
 
 	cfg.urlencoding_mode = "fixbucket"
 	s3 = S3(cfg)
 
 	count = 0
 	for arg in args:
 		culprit = S3Uri(arg)
 		if culprit.type != "s3":
 			raise ParameterError("Expecting S3Uri instead of: %s" % arg)
 		response = s3.bucket_list_noparse(culprit.bucket(), culprit.object(), recursive = True)
 		r_xent = re.compile("&#x[\da-fA-F]+;")
 		keys = re.findall("<Key>(.*?)</Key>", response['data'], re.MULTILINE)
 		debug("Keys: %r" % keys)
 		for key in keys:
 			if r_xent.search(key):
 				info("Fixing: %s" % key)
 				debug("Step 1: Transforming %s" % key)
 				key_bin = _unescape(key)
 				debug("Step 2:       ... to %s" % key_bin)
 				key_new = replace_nonprintables(key_bin)
 				debug("Step 3:  ... then to %s" % key_new)
 				src = S3Uri("s3://%s/%s" % (culprit.bucket(), key_bin))
 				dst = S3Uri("s3://%s/%s" % (culprit.bucket(), key_new))
 				resp_move = s3.object_move(src, dst)
 				if resp_move['status'] == 200:
 					output("File %r renamed to %s" % (key_bin, key_new))
 					count += 1
 				else:
 					error("Something went wrong for: %r" % key)
 					error("Please report the problem to s3tools-bugs@lists.sourceforge.net")
 	if count > 0:
 		warning("Fixed %d files' names. Their ACL were reset to Private." % count)
 		warning("Use 's3cmd setacl --acl-public s3://...' to make")
 		warning("them publicly readable if required.")
 
8ec1807f
 def resolve_list(lst, args):
 	retval = []
 	for item in lst:
 		retval.append(item % args)
 	return retval
 
 def gpg_command(command, passphrase = ""):
e3afa96a
 	debug("GPG command: " + " ".join(command))
 	p = subprocess.Popen(command, stdin = subprocess.PIPE, stdout = subprocess.PIPE, stderr = subprocess.STDOUT)
 	p_stdout, p_stderr = p.communicate(passphrase + "\n")
 	debug("GPG output:")
 	for line in p_stdout.split("\n"):
 		debug("GPG: " + line)
 	p_exitcode = p.wait()
8ec1807f
 	return p_exitcode
 
 def gpg_encrypt(filename):
 	tmp_filename = Utils.mktmpfile()
 	args = {
 		"gpg_command" : cfg.gpg_command,
 		"passphrase_fd" : "0",
 		"input_file" : filename, 
 		"output_file" : tmp_filename,
 	}
a1e3fd9c
 	info(u"Encrypting file %(input_file)s to %(output_file)s..." % args)
8ec1807f
 	command = resolve_list(cfg.gpg_encrypt.split(" "), args)
 	code = gpg_command(command, cfg.gpg_passphrase)
 	return (code, tmp_filename, "gpg")
 
49731b40
 def gpg_decrypt(filename, gpgenc_header = "", in_place = True):
8ec1807f
 	tmp_filename = Utils.mktmpfile(filename)
 	args = {
 		"gpg_command" : cfg.gpg_command,
 		"passphrase_fd" : "0",
 		"input_file" : filename, 
 		"output_file" : tmp_filename,
 	}
a1e3fd9c
 	info(u"Decrypting file %(input_file)s to %(output_file)s..." % args)
8ec1807f
 	command = resolve_list(cfg.gpg_decrypt.split(" "), args)
 	code = gpg_command(command, cfg.gpg_passphrase)
49731b40
 	if code == 0 and in_place:
315e527b
 		debug(u"Renaming %s to %s" % (tmp_filename, filename))
8ec1807f
 		os.unlink(filename)
 		os.rename(tmp_filename, filename)
49731b40
 		tmp_filename = filename
 	return (code, tmp_filename)
8ec1807f
 
5a736f08
 def run_configure(config_file):
 	cfg = Config()
 	options = [
796e95db
 		("access_key", "Access Key", "Access key and Secret key are your identifiers for Amazon S3"),
5a736f08
 		("secret_key", "Secret Key"),
49731b40
 		("gpg_passphrase", "Encryption password", "Encryption password is used to protect your files from reading\nby unauthorized persons while in transfer to S3"),
 		("gpg_command", "Path to GPG program"),
d35b41f4
 		("use_https", "Use HTTPS protocol", "When using secure HTTPS protocol all communication with Amazon S3\nservers is protected from 3rd party eavesdropping. This method is\nslower than plain HTTP and can't be used if you're behind a proxy"),
8a4a98b1
 		("proxy_host", "HTTP Proxy server name", "On some networks all internet access must go through a HTTP proxy.\nTry setting it here if you can't conect to S3 directly"),
 		("proxy_port", "HTTP Proxy server port"),
5a736f08
 		]
8a4a98b1
 	## Option-specfic defaults
49731b40
 	if getattr(cfg, "gpg_command") == "":
 		setattr(cfg, "gpg_command", find_executable("gpg"))
 
8a4a98b1
 	if getattr(cfg, "proxy_host") == "" and os.getenv("http_proxy"):
 		re_match=re.match("(http://)?([^:]+):(\d+)", os.getenv("http_proxy"))
 		if re_match:
 			setattr(cfg, "proxy_host", re_match.groups()[1])
 			setattr(cfg, "proxy_port", re_match.groups()[2])
 
5a736f08
 	try:
 		while 1:
315e527b
 			output(u"\nEnter new values or accept defaults in brackets with Enter.")
 			output(u"Refer to user manual for detailed description of all options.")
5a736f08
 			for option in options:
 				prompt = option[1]
d35b41f4
 				## Option-specific handling
 				if option[0] == 'proxy_host' and getattr(cfg, 'use_https') == True:
 					setattr(cfg, option[0], "")
 					continue
 				if option[0] == 'proxy_port' and getattr(cfg, 'proxy_host') == "":
 					setattr(cfg, option[0], 0)
 					continue
 
5a736f08
 				try:
 					val = getattr(cfg, option[0])
d35b41f4
 					if type(val) is bool:
 						val = val and "Yes" or "No"
5a736f08
 					if val not in (None, ""):
 						prompt += " [%s]" % val
 				except AttributeError:
 					pass
 
 				if len(option) >= 3:
315e527b
 					output(u"\n%s" % option[2])
5a736f08
 
 				val = raw_input(prompt + ": ")
 				if val != "":
d35b41f4
 					if type(getattr(cfg, option[0])) is bool:
 						# Turn 'Yes' into True, everything else into False
 						val = val.lower().startswith('y')
5a736f08
 					setattr(cfg, option[0], val)
315e527b
 			output(u"\nNew settings:")
5a736f08
 			for option in options:
315e527b
 				output(u"  %s: %s" % (option[1], getattr(cfg, option[0])))
18485e25
 			val = raw_input("\nTest access with supplied credentials? [Y/n] ")
 			if val.lower().startswith("y") or val == "":
 				try:
315e527b
 					output(u"Please wait...")
18485e25
 					S3(Config()).bucket_list("", "")
315e527b
 					output(u"Success. Your access key and secret key worked fine :-)")
49731b40
 
315e527b
 					output(u"\nNow verifying that encryption works...")
aa1c976f
 					if not getattr(cfg, "gpg_command") or not getattr(cfg, "gpg_passphrase"):
315e527b
 						output(u"Not configured. Never mind.")
49731b40
 					else:
aa1c976f
 						if not getattr(cfg, "gpg_command"):
 							raise Exception("Path to GPG program not set")
 						if not os.path.isfile(getattr(cfg, "gpg_command")):
 							raise Exception("GPG program not found")
 						filename = Utils.mktmpfile()
 						f = open(filename, "w")
 						f.write(os.sys.copyright)
 						f.close()
 						ret_enc = gpg_encrypt(filename)
 						ret_dec = gpg_decrypt(ret_enc[1], ret_enc[2], False)
 						hash = [
 							Utils.hash_file_md5(filename),
 							Utils.hash_file_md5(ret_enc[1]),
 							Utils.hash_file_md5(ret_dec[1]),
 						]
 						os.unlink(filename)
 						os.unlink(ret_enc[1])
 						os.unlink(ret_dec[1])
 						if hash[0] == hash[2] and hash[0] != hash[1]:
 							output ("Success. Encryption and decryption worked fine :-)") 
 						else:
 							raise Exception("Encryption verification error.")
49731b40
 
 				except Exception, e:
315e527b
 					error(u"Test failed: %s" % (e))
18485e25
 					val = raw_input("\nRetry configuration? [Y/n] ")
 					if val.lower().startswith("y") or val == "":
 						continue
49731b40
 					
18485e25
 
 			val = raw_input("\nSave settings? [y/N] ")
 			if val.lower().startswith("y"):
5a736f08
 				break
18485e25
 			val = raw_input("Retry configuration? [Y/n] ")
 			if val.lower().startswith("n"):
 				raise EOFError()
ac9940ec
 
 		## Overwrite existing config file, make it user-readable only
 		old_mask = os.umask(0077)
 		try:
 			os.remove(config_file)
 		except OSError, e:
 			if e.errno != errno.ENOENT:
 				raise
5a736f08
 		f = open(config_file, "w")
ac9940ec
 		os.umask(old_mask)
5a736f08
 		cfg.dump_config(f)
 		f.close()
315e527b
 		output(u"Configuration saved to '%s'" % config_file)
5a736f08
 
 	except (EOFError, KeyboardInterrupt):
315e527b
 		output(u"\nConfiguration aborted. Changes were NOT saved.")
5a736f08
 		return
 	
 	except IOError, e:
315e527b
 		error(u"Writing config file failed: %s: %s" % (config_file, e.strerror))
1f7d2de3
 		sys.exit(1)
5a736f08
 
7484d6c8
 def process_patterns_from_file(fname, patterns_list):
cb303737
 	try:
7484d6c8
 		fn = open(fname, "rt")
cb303737
 	except IOError, e:
 		error(e)
 		sys.exit(1)
7484d6c8
 	for pattern in fn:
 		pattern = pattern.strip()
 		if re.match("^#", pattern) or re.match("^\s*$", pattern):
2d7d5543
 			continue
7484d6c8
 		debug(u"%s: adding rule: %s" % (fname, pattern))
 		patterns_list.append(pattern)
 
 	return patterns_list
 
 def process_patterns(patterns_list, patterns_from, is_glob, option_txt = ""):
 	"""
 	process_patterns(patterns, patterns_from, is_glob, option_txt = "")
 	Process --exclude / --include GLOB and REGEXP patterns.
 	'option_txt' is 'exclude' / 'include' / 'rexclude' / 'rinclude'
 	Returns: patterns_compiled, patterns_text
 	"""
 
 	patterns_compiled = []
 	patterns_textual = {}
 
 	if patterns_list is None:
 		patterns_list = []
 
 	if patterns_from:
 		## Append patterns from glob_from 
 		for fname in patterns_from:
 			debug(u"processing --%s-from %s" % (option_txt, fname))
 			patterns_list = process_patterns_from_file(fname, patterns_list)
 
 	for pattern in patterns_list:
 		debug(u"processing %s rule: %s" % (option_txt, patterns_list))
 		if is_glob:
 			pattern = glob.fnmatch.translate(pattern)
 		r = re.compile(pattern)
 		patterns_compiled.append(r)
 		patterns_textual[r] = pattern
 
 	return patterns_compiled, patterns_textual
2d7d5543
 
b3488bab
 def get_commands_list():
 	return [
5a736f08
 	{"cmd":"mb", "label":"Make bucket", "param":"s3://BUCKET", "func":cmd_bucket_create, "argc":1},
 	{"cmd":"rb", "label":"Remove bucket", "param":"s3://BUCKET", "func":cmd_bucket_delete, "argc":1},
 	{"cmd":"ls", "label":"List objects or buckets", "param":"[s3://BUCKET[/PREFIX]]", "func":cmd_ls, "argc":0},
 	{"cmd":"la", "label":"List all object in all buckets", "param":"", "func":cmd_buckets_list_all_all, "argc":0},
 	{"cmd":"put", "label":"Put file into bucket", "param":"FILE [FILE...] s3://BUCKET[/PREFIX]", "func":cmd_object_put, "argc":2},
 	{"cmd":"get", "label":"Get file from bucket", "param":"s3://BUCKET/OBJECT LOCAL_FILE", "func":cmd_object_get, "argc":1},
 	{"cmd":"del", "label":"Delete file from bucket", "param":"s3://BUCKET/OBJECT", "func":cmd_object_del, "argc":1},
0d91ff3f
 	#{"cmd":"mkdir", "label":"Make a virtual S3 directory", "param":"s3://BUCKET/path/to/dir", "func":cmd_mkdir, "argc":1},
29893afc
 	{"cmd":"sync", "label":"Synchronize a directory tree to S3", "param":"LOCAL_DIR s3://BUCKET[/PREFIX] or s3://BUCKET[/PREFIX] LOCAL_DIR", "func":cmd_sync, "argc":2},
f298b348
 	{"cmd":"du", "label":"Disk usage by buckets", "param":"[s3://BUCKET[/PREFIX]]", "func":cmd_du, "argc":0},
4927c909
 	{"cmd":"info", "label":"Get various information about Buckets or Files", "param":"s3://BUCKET[/OBJECT]", "func":cmd_info, "argc":1},
7d0ac8ee
 	{"cmd":"cp", "label":"Copy object", "param":"s3://BUCKET1/OBJECT1 s3://BUCKET2[/OBJECT2]", "func":cmd_cp, "argc":2},
7d61be89
 	{"cmd":"mv", "label":"Move object", "param":"s3://BUCKET1/OBJECT1 s3://BUCKET2[/OBJECT2]", "func":cmd_mv, "argc":2},
4927c909
 	{"cmd":"setacl", "label":"Modify Access control list for Bucket or Files", "param":"s3://BUCKET[/OBJECT]", "func":cmd_setacl, "argc":1},
cb0bbaef
 	{"cmd":"accesslog", "label":"Enable/disable bucket access logging", "param":"s3://BUCKET", "func":cmd_accesslog, "argc":1},
0b8ea559
 	{"cmd":"sign", "label":"Sign arbitrary string using the secret key", "param":"STRING-TO-SIGN", "func":cmd_sign, "argc":1},
3c07424d
 	{"cmd":"fixbucket", "label":"Fix invalid file names in a bucket", "param":"s3://BUCKET[/PREFIX]", "func":cmd_fixbucket, "argc":1},
0b8ea559
 
b3488bab
 	## CloudFront commands
84d1dc36
 	{"cmd":"cflist", "label":"List CloudFront distribution points", "param":"", "func":CfCmd.info, "argc":0},
 	{"cmd":"cfinfo", "label":"Display CloudFront distribution point parameters", "param":"[cf://DIST_ID]", "func":CfCmd.info, "argc":0},
bf68d329
 	{"cmd":"cfcreate", "label":"Create CloudFront distribution point", "param":"s3://BUCKET", "func":CfCmd.create, "argc":1},
42fd22b5
 	{"cmd":"cfdelete", "label":"Delete CloudFront distribution point", "param":"cf://DIST_ID", "func":CfCmd.delete, "argc":1},
87dc3cc3
 	{"cmd":"cfmodify", "label":"Change CloudFront distribution point parameters", "param":"cf://DIST_ID", "func":CfCmd.modify, "argc":1},
5a736f08
 	]
3cc025ae
 
ccb78539
 def format_commands(progname, commands_list):
f4555c39
 	help = "Commands:\n"
5a736f08
 	for cmd in commands_list:
 		help += "  %s\n      %s %s %s\n" % (cmd["label"], progname, cmd["cmd"], cmd["param"])
f4555c39
 	return help
 
9b7618ae
 class OptionMimeType(Option):
 	def check_mimetype(option, opt, value):
 		if re.compile("^[a-z0-9]+/[a-z0-9+\.-]+$", re.IGNORECASE).match(value):
 			return value
 		raise OptionValueError("option %s: invalid MIME-Type format: %r" % (opt, value))
 
4f11bf57
 class OptionS3ACL(Option):
 	def check_s3acl(option, opt, value):
 		permissions = ('read', 'write', 'read_acp', 'write_acp', 'full_control', 'all')
 		try:
 			permission, grantee = re.compile("^(\w+):(.+)$", re.IGNORECASE).match(value).groups()
 			if not permission or not grantee:
 				raise
 			if permission in permissions:
 				return { 'name' : grantee, 'permission' : permission.upper() }
 			else:
 				raise OptionValueError("option %s: invalid S3 ACL permission: %s (valid values: %s)" % 
 					(opt, permission, ", ".join(permissions)))
 		except:
 			raise OptionValueError("option %s: invalid S3 ACL format: %r" % (opt, value))
 
 class OptionAll(OptionMimeType, OptionS3ACL):
9b7618ae
 	TYPE_CHECKER = copy(Option.TYPE_CHECKER)
4f11bf57
 	TYPE_CHECKER["mimetype"] = OptionMimeType.check_mimetype
 	TYPE_CHECKER["s3acl"] = OptionS3ACL.check_s3acl
 	TYPES = Option.TYPES + ("mimetype", "s3acl")
9b7618ae
 
f4555c39
 class MyHelpFormatter(IndentedHelpFormatter):
 	def format_epilog(self, epilog):
 		if epilog:
 			return "\n" + epilog + "\n"
 		else:
 			return ""
 
4a52baa8
 def main():
87c0b03a
 	global cfg
3cc025ae
 
b3488bab
 	commands_list = get_commands_list()
 	commands = {}
ccb78539
 
5a736f08
 	## Populate "commands" from "commands_list"
 	for cmd in commands_list:
 		if cmd.has_key("cmd"):
 			commands[cmd["cmd"]] = cmd
 
9b7618ae
 	default_verbosity = Config().verbosity
4f11bf57
 	optparser = OptionParser(option_class=OptionAll, formatter=MyHelpFormatter())
9b7618ae
 	#optparser.disable_interspersed_args()
db340c09
 
7c1c5a19
 	config_file = None
db340c09
 	if os.getenv("HOME"):
 		config_file = os.path.join(os.getenv("HOME"), ".s3cfg")
 	elif os.name == "nt" and os.getenv("USERPROFILE"):
 		config_file = os.path.join(os.getenv("USERPROFILE"), "Application Data", "s3cmd.ini")
 
82d9eafa
 	preferred_encoding = locale.getpreferredencoding() or "UTF-8"
 
 	optparser.set_defaults(encoding = preferred_encoding)
 	optparser.set_defaults(config = config_file)
3cc025ae
 	optparser.set_defaults(verbosity = default_verbosity)
747ddb2a
 
09b29caf
 	optparser.add_option(      "--configure", dest="run_configure", action="store_true", help="Invoke interactive (re)configuration tool.")
747ddb2a
 	optparser.add_option("-c", "--config", dest="config", metavar="FILE", help="Config file name. Defaults to %default")
09b29caf
 	optparser.add_option(      "--dump-config", dest="dump_config", action="store_true", help="Dump current configuration after parsing config files and command line options and exit.")
 
01a80524
 	optparser.add_option("-n", "--dry-run", dest="dry_run", action="store_true", help="Only show what should be uploaded or downloaded but don't actually do it. May still perform S3 requests to get bucket listings and other information though (only for file transfer commands)")
03575797
 
8ec1807f
 	optparser.add_option("-e", "--encrypt", dest="encrypt", action="store_true", help="Encrypt files before uploading to S3.")
03575797
 	optparser.add_option(      "--no-encrypt", dest="encrypt", action="store_false", help="Don't encrypt files.")
9b7618ae
 	optparser.add_option("-f", "--force", dest="force", action="store_true", help="Force overwrite and other dangerous operations.")
9197e62e
 	optparser.add_option(      "--continue", dest="get_continue", action="store_true", help="Continue getting a partially downloaded file (only for [get] command).")
559c963f
 	optparser.add_option(      "--skip-existing", dest="skip_existing", action="store_true", help="Skip over files that exist at the destination (only for [get] and [sync] commands).")
7406fc6c
 	optparser.add_option("-r", "--recursive", dest="recursive", action="store_true", help="Recursive upload, download or removal.")
7393bdba
 	optparser.add_option("-P", "--acl-public", dest="acl_public", action="store_true", help="Store objects with ACL allowing read for anyone.")
 	optparser.add_option(      "--acl-private", dest="acl_public", action="store_false", help="Store objects with default ACL allowing access for you only.")
4f11bf57
 	optparser.add_option(      "--acl-grant", dest="acl_grants", type="s3acl", action="append", metavar="PERMISSION:EMAIL or USER_CANONICAL_ID", help="Grant stated permission to a given amazon user. Permission is one of: read, write, read_acp, write_acp, full_control, all")
 	optparser.add_option(      "--acl-revoke", dest="acl_revokes", type="s3acl", action="append", metavar="PERMISSION:USER_CANONICAL_ID", help="Revoke stated permission for a given amazon user. Permission is one of: read, write, read_acp, wr     ite_acp, full_control, all")
41e77b56
 
0d91ff3f
 	optparser.add_option(      "--delete-removed", dest="delete_removed", action="store_true", help="Delete remote objects with no corresponding local file [sync]")
03575797
 	optparser.add_option(      "--no-delete-removed", dest="delete_removed", action="store_false", help="Don't delete remote objects.")
 	optparser.add_option("-p", "--preserve", dest="preserve_attrs", action="store_true", help="Preserve filesystem attributes (mode, ownership, timestamps). Default for [sync] command.")
 	optparser.add_option(      "--no-preserve", dest="preserve_attrs", action="store_false", help="Don't store FS attributes")
2d7d5543
 	optparser.add_option(      "--exclude", dest="exclude", action="append", metavar="GLOB", help="Filenames and paths matching GLOB will be excluded from sync")
 	optparser.add_option(      "--exclude-from", dest="exclude_from", action="append", metavar="FILE", help="Read --exclude GLOBs from FILE")
 	optparser.add_option(      "--rexclude", dest="rexclude", action="append", metavar="REGEXP", help="Filenames and paths matching REGEXP (regular expression) will be excluded from sync")
 	optparser.add_option(      "--rexclude-from", dest="rexclude_from", action="append", metavar="FILE", help="Read --rexclude REGEXPs from FILE")
7484d6c8
 	optparser.add_option(      "--include", dest="include", action="append", metavar="GLOB", help="Filenames and paths matching GLOB will be included even if previously excluded by one of --(r)exclude(-from) patterns")
 	optparser.add_option(      "--include-from", dest="include_from", action="append", metavar="FILE", help="Read --include GLOBs from FILE")
 	optparser.add_option(      "--rinclude", dest="rinclude", action="append", metavar="REGEXP", help="Same as --include but uses REGEXP (regular expression) instead of GLOB")
 	optparser.add_option(      "--rinclude-from", dest="rinclude_from", action="append", metavar="FILE", help="Read --rinclude REGEXPs from FILE")
8829e891
 
9b07e5f2
 	optparser.add_option(      "--bucket-location", dest="bucket_location", help="Datacentre to create bucket in. As of now the datacenters are: US (default), EU, us-west-1, and ap-southeast-1")
a7ea0bee
 	optparser.add_option(      "--reduced-redundancy", "--rr", dest="reduced_redundancy", action="store_true", help="Store object with 'Reduced redundancy'. Lower per-GB price. [put, cp, mv]")
09b29caf
 
d7693f3c
 	optparser.add_option(      "--access-logging-target-prefix", dest="log_target_prefix", help="Target prefix for access logs (S3 URI) (for [cfmodify] and [accesslog] commands)")
 	optparser.add_option(      "--no-access-logging", dest="log_target_prefix", action="store_false", help="Disable access logging (for [cfmodify] and [accesslog] commands)")
cb0bbaef
 
9b7618ae
 	optparser.add_option("-m", "--mime-type", dest="default_mime_type", type="mimetype", metavar="MIME/TYPE", help="Default MIME-type to be set for objects stored.")
 	optparser.add_option("-M", "--guess-mime-type", dest="guess_mime_type", action="store_true", help="Guess MIME-type of files by their extension. Falls back to default MIME-Type as specified by --mime-type option")
09b29caf
 
01a80524
 	optparser.add_option(      "--add-header", dest="add_header", action="append", metavar="NAME:VALUE", help="Add a given HTTP header to the upload request. Can be used multiple times. For instance set 'Expires' or 'Cache-Control' headers (or both) using this options if you like.")
fa664913
 
82d9eafa
 	optparser.add_option(      "--encoding", dest="encoding", metavar="ENCODING", help="Override autodetected terminal and filesystem encoding (character set). Autodetected: %s" % preferred_encoding)
3c07424d
 	optparser.add_option(      "--verbatim", dest="urlencoding_mode", action="store_const", const="verbatim", help="Use the S3 name as given on the command line. No pre-processing, encoding, etc. Use with caution!")
82d9eafa
 
8567b8ed
 	optparser.add_option(      "--list-md5", dest="list_md5", action="store_true", help="Include MD5 sums in bucket listings (only for 'ls' command).")
01a80524
 	optparser.add_option("-H", "--human-readable-sizes", dest="human_readable_sizes", action="store_true", help="Print sizes in human readable form (eg 1kB instead of 1234).")
03575797
 
25f6f8c9
 	optparser.add_option(      "--progress", dest="progress_meter", action="store_true", help="Display progress meter (default on TTY).")
 	optparser.add_option(      "--no-progress", dest="progress_meter", action="store_false", help="Don't display progress meter (default on non-TTY).")
d7693f3c
 	optparser.add_option(      "--enable", dest="enable", action="store_true", help="Enable given CloudFront distribution (only for [cfmodify] command)")
 	optparser.add_option(      "--disable", dest="enable", action="store_false", help="Enable given CloudFront distribution (only for [cfmodify] command)")
87dc3cc3
 	optparser.add_option(      "--cf-add-cname", dest="cf_cnames_add", action="append", metavar="CNAME", help="Add given CNAME to a CloudFront distribution (only for [cfcreate] and [cfmodify] commands)")
 	optparser.add_option(      "--cf-remove-cname", dest="cf_cnames_remove", action="append", metavar="CNAME", help="Remove given CNAME from a CloudFront distribution (only for [cfmodify] command)")
 	optparser.add_option(      "--cf-comment", dest="cf_comment", action="store", metavar="COMMENT", help="Set COMMENT for a given CloudFront distribution (only for [cfcreate] and [cfmodify] commands)")
747ddb2a
 	optparser.add_option("-v", "--verbose", dest="verbosity", action="store_const", const=logging.INFO, help="Enable verbose output.")
09b29caf
 	optparser.add_option("-d", "--debug", dest="verbosity", action="store_const", const=logging.DEBUG, help="Enable debug output.")
ed61a5fa
 	optparser.add_option(      "--version", dest="show_version", action="store_true", help="Show s3cmd version (%s) and exit." % (PkgInfo.version))
7b5df262
 	optparser.add_option("-F", "--follow-symlinks", dest="follow_symlinks", action="store_true", default=False, help="Follow symbolic links as if they are regular files")
3cc025ae
 
f4555c39
 	optparser.set_usage(optparser.usage + " COMMAND [parameters]")
09b29caf
 	optparser.set_description('S3cmd is a tool for managing objects in '+
f45580a2
 		'Amazon S3 storage. It allows for making and removing '+
 		'"buckets" and uploading, downloading and removing '+
 		'"objects" from these buckets.')
ccb78539
 	optparser.epilog = format_commands(optparser.get_prog_name(), commands_list)
ed61a5fa
 	optparser.epilog += ("\nSee program homepage for more information at\n%s\n" % PkgInfo.url)
f45580a2
 
3cc025ae
 	(options, args) = optparser.parse_args()
 
 	## Some mucking with logging levels to enable 
 	## debugging/verbose output for config file parser on request
53abee97
 	logging.basicConfig(level=options.verbosity,
 	                    format='%(levelname)s: %(message)s',
82d9eafa
 	                    stream = sys.stderr)
3cc025ae
 	
747ddb2a
 	if options.show_version:
315e527b
 		output(u"s3cmd version %s" % PkgInfo.version)
747ddb2a
 		sys.exit(0)
 
3cc025ae
 	## Now finally parse the config file
ca168590
 	if not options.config:
315e527b
 		error(u"Can't find a config file. Please use --config option.")
ca168590
 		sys.exit(1)
 
5a736f08
 	try:
 		cfg = Config(options.config)
 	except IOError, e:
 		if options.run_configure:
 			cfg = Config()
 		else:
315e527b
 			error(u"%s: %s"  % (options.config, e.strerror))
 			error(u"Configuration file not available.")
 			error(u"Consider using --configure parameter to create one.")
1f7d2de3
 			sys.exit(1)
3cc025ae
 
 	## And again some logging level adjustments
 	## according to configfile and command line parameters
 	if options.verbosity != default_verbosity:
5a736f08
 		cfg.verbosity = options.verbosity
 	logging.root.setLevel(cfg.verbosity)
9b7618ae
 
25f6f8c9
 	## Default to --progress on TTY devices, --no-progress elsewhere
 	## Can be overriden by actual --(no-)progress parameter
 	cfg.update_option('progress_meter', sys.stdout.isatty())
 
db340c09
 	## Unsupported features on Win32 platform
 	if os.name == "nt":
 		if cfg.preserve_attrs:
 			error(u"Option --preserve is not yet supported on MS Windows platform. Assuming --no-preserve.")
 			cfg.preserve_attrs = False
 		if cfg.progress_meter:
 			error(u"Option --progress is not yet supported on MS Windows platform. Assuming --no-progress.")
 			cfg.progress_meter = False
a120a4eb
 
fa664913
 	## Pre-process --add-header's and put them to Config.extra_headers SortedDict()
 	if options.add_header:
 		for hdr in options.add_header:
 			try:
 				key, val = hdr.split(":", 1)
 			except ValueError:
 				raise ParameterError("Invalid header format: %s" % hdr)
 			key_inval = re.sub("[a-zA-Z0-9-.]", "", key)
 			if key_inval:
 				key_inval = key_inval.replace(" ", "<space>")
 				key_inval = key_inval.replace("\t", "<tab>")
 				raise ParameterError("Invalid character(s) in header name '%s': \"%s\"" % (key, key_inval))
 			debug(u"Updating Config.Config extra_headers[%s] -> %s" % (key.strip(), val.strip()))
 			cfg.extra_headers[key.strip()] = val.strip()
 
4f11bf57
 	## --acl-grant/--acl-revoke arguments are pre-parsed by OptionS3ACL()
41e77b56
 	if options.acl_grants:
 		for grant in options.acl_grants:
4f11bf57
 			cfg.acl_grants.append(grant)
41e77b56
 
 	if options.acl_revokes:
4f11bf57
 		for grant in options.acl_revokes:
 			cfg.acl_revokes.append(grant)
41e77b56
 
9b7618ae
 	## Update Config with other parameters
5a736f08
 	for option in cfg.option_list():
 		try:
 			if getattr(options, option) != None:
87dc3cc3
 				debug(u"Updating Config.Config %s -> %s" % (option, getattr(options, option)))
5a736f08
 				cfg.update_option(option, getattr(options, option))
 		except AttributeError:
 			## Some Config() options are not settable from command line
 			pass
cb0bbaef
 
 	## Special handling for tri-state options (True, False, None)
 	cfg.update_option("enable", options.enable)
41e77b56
 	cfg.update_option("acl_public", options.acl_public)
cb0bbaef
 
 	## CloudFront's cf_enable and Config's enable share the same --enable switch
 	options.cf_enable = options.enable
 
b020ea02
 	## CloudFront's cf_logging and Config's log_target_prefix share the same --log-target-prefix switch
 	options.cf_logging = options.log_target_prefix
 
87dc3cc3
 	## Update CloudFront options if some were set
 	for option in CfCmd.options.option_list():
 		try:
 			if getattr(options, option) != None:
 				debug(u"Updating CloudFront.Cmd %s -> %s" % (option, getattr(options, option)))
 				CfCmd.options.update_option(option, getattr(options, option))
 		except AttributeError:
 			## Some CloudFront.Cmd.Options() options are not settable from command line
 			pass
5a736f08
 
82d9eafa
 	## Set output and filesystem encoding for printing out filenames.
 	sys.stdout = codecs.getwriter(cfg.encoding)(sys.stdout, "replace")
 	sys.stderr = codecs.getwriter(cfg.encoding)(sys.stderr, "replace")
 
7484d6c8
 	## Process --exclude and --exclude-from
 	patterns_list, patterns_textual = process_patterns(options.exclude, options.exclude_from, is_glob = True, option_txt = "exclude")
 	cfg.exclude.extend(patterns_list)
 	cfg.debug_exclude.update(patterns_textual)
 
 	## Process --rexclude and --rexclude-from
 	patterns_list, patterns_textual = process_patterns(options.rexclude, options.rexclude_from, is_glob = False, option_txt = "rexclude")
 	cfg.exclude.extend(patterns_list)
 	cfg.debug_exclude.update(patterns_textual)
 
 	## Process --include and --include-from
 	patterns_list, patterns_textual = process_patterns(options.include, options.include_from, is_glob = True, option_txt = "include")
 	cfg.include.extend(patterns_list)
 	cfg.debug_include.update(patterns_textual)
 
 	## Process --rinclude and --rinclude-from
 	patterns_list, patterns_textual = process_patterns(options.rinclude, options.rinclude_from, is_glob = False, option_txt = "rinclude")
 	cfg.include.extend(patterns_list)
 	cfg.debug_include.update(patterns_textual)
8829e891
 
7b5df262
         ## Process --follow-symlinks
 	cfg.update_option("follow_symlinks", options.follow_symlinks)
 
8ec1807f
 	if cfg.encrypt and cfg.gpg_passphrase == "":
315e527b
 		error(u"Encryption requested but no passphrase set in config file.")
 		error(u"Please re-run 's3cmd --configure' and supply it.")
8ec1807f
 		sys.exit(1)
 
5a736f08
 	if options.dump_config:
 		cfg.dump_config(sys.stdout)
1f7d2de3
 		sys.exit(0)
5a736f08
 
 	if options.run_configure:
 		run_configure(options.config)
1f7d2de3
 		sys.exit(0)
3cc025ae
 
 	if len(args) < 1:
315e527b
 		error(u"Missing command. Please run with --help for more information.")
1f7d2de3
 		sys.exit(1)
3cc025ae
 
d90a7929
 	## Unicodise all remaining arguments:
 	args = [unicodise(arg) for arg in args]
 
3cc025ae
 	command = args.pop(0)
 	try:
315e527b
 		debug(u"Command: %s" % commands[command]["cmd"])
3cc025ae
 		## We must do this lookup in extra step to 
 		## avoid catching all KeyError exceptions
 		## from inner functions.
5a736f08
 		cmd_func = commands[command]["func"]
3cc025ae
 	except KeyError, e:
315e527b
 		error(u"Invalid command: %s" % e)
1f7d2de3
 		sys.exit(1)
3cc025ae
 
5a736f08
 	if len(args) < commands[command]["argc"]:
315e527b
 		error(u"Not enough paramters for command '%s'" % command)
1f7d2de3
 		sys.exit(1)
3cc025ae
 
 	try:
 		cmd_func(args)
 	except S3Error, e:
315e527b
 		error(u"S3 error: %s" % e)
85baf810
 		sys.exit(1)
3cc025ae
 
1c88eb58
 def report_exception(e):
4a52baa8
 		sys.stderr.write("""
 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
     An unexpected error has occurred.
   Please report the following lines to:
1e2d476a
    s3tools-bugs@lists.sourceforge.net
4a52baa8
 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
 
 """)
2031f301
 		tb = traceback.format_exc(sys.exc_info())
 		e_class = str(e.__class__)
 		e_class = e_class[e_class.rfind(".")+1 : -2]
a1e3fd9c
 		sys.stderr.write(u"Problem: %s: %s\n" % (e_class, e))
2031f301
 		try:
 			sys.stderr.write("S3cmd:   %s\n" % PkgInfo.version)
 		except NameError:
 			sys.stderr.write("S3cmd:   unknown version. Module import problem?\n")
26b4a8e5
 		sys.stderr.write("\n")
a1e3fd9c
 		sys.stderr.write(unicode(tb, errors="replace"))
1c88eb58
 
2031f301
 		if type(e) == ImportError:
 			sys.stderr.write("\n")
 			sys.stderr.write("Your sys.path contains these entries:\n")
 			for path in sys.path:
a1e3fd9c
 				sys.stderr.write(u"\t%s\n" % path)
1c88eb58
 			sys.stderr.write("Now the question is where have the s3cmd modules been installed?\n")
 
4a52baa8
 		sys.stderr.write("""
 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
     An unexpected error has occurred.
     Please report the above lines to:
1e2d476a
    s3tools-bugs@lists.sourceforge.net
4a52baa8
 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
 """)
1c88eb58
 
 if __name__ == '__main__':
 	try:
 		## Our modules
 		## Keep them in try/except block to 
 		## detect any syntax errors in there
17901aa1
 		from S3.Exceptions import *
1c88eb58
 		from S3 import PkgInfo
cb0bbaef
 		from S3.S3 import S3
1c88eb58
 		from S3.Config import Config
26ce7bed
 		from S3.SortedDict import SortedDict
cb0bbaef
 		from S3.S3Uri import S3Uri
1c88eb58
 		from S3 import Utils
26ce7bed
 		from S3.Utils import *
1c88eb58
 		from S3.Progress import Progress
 		from S3.CloudFront import Cmd as CfCmd
 
 		main()
 		sys.exit(0)
 
 	except ImportError, e:
 		report_exception(e)
 		sys.exit(1)
 		
 	except ParameterError, e:
 		error(u"Parameter problem: %s" % e)
 		sys.exit(1)
 
 	except SystemExit, e:
 		sys.exit(e.code)
 
 	except KeyboardInterrupt:
 		sys.stderr.write("See ya!\n")
 		sys.exit(1)
 
 	except Exception, e:
 		report_exception(e)
4a52baa8
 		sys.exit(1)