s3cmd
3cc025ae
 #!/usr/bin/env python
 
 ## Amazon S3 manager
 ## Author: Michal Ludvig <michal@logix.cz>
 ##         http://www.logix.cz/michal
 ## License: GPL Version 2
 
 import sys
 import logging
 import time
8a4a98b1
 import os
 import re
ac9940ec
 import errno
a368faf1
 import pwd, grp
2d7d5543
 import glob
4a52baa8
 import traceback
819ece94
 import codecs
3cc025ae
 
9b7618ae
 from copy import copy
f4555c39
 from optparse import OptionParser, Option, OptionValueError, IndentedHelpFormatter
3cc025ae
 from logging import debug, info, warning, error
49731b40
 from distutils.spawn import find_executable
3cc025ae
 
fa0dfb92
 ## Output native on TTY, UTF-8 otherwise (redirects)
19e4df84
 #_stdout = sys.stdout.isatty() and sys.stdout or codecs.getwriter("utf-8")(sys.stdout)
 #_stderr = sys.stderr.isatty() and sys.stderr or codecs.getwriter("utf-8")(sys.stderr)
 ## Output UTF-8 in all cases
 _stdout = codecs.getwriter("utf-8")(sys.stdout)
 _stderr = codecs.getwriter("utf-8")(sys.stderr)
 ## Leave it to the terminal
 #_stdout = sys.stdout
 #_stderr = sys.stderr
6513d3d9
 
3cc025ae
 def output(message):
fa0dfb92
 	_stdout.write(message + "\n")
3cc025ae
 
7c07fd66
 def check_args_type(args, type, verbose_type):
 	for arg in args:
 		if S3Uri(arg).type != type:
 			raise ParameterError("Expecting %s instead of '%s'" % (verbose_type, arg))
 
b96ddebe
 def cmd_du(args):
 	s3 = S3(Config())
 	if len(args) > 0:
 		uri = S3Uri(args[0])
 		if uri.type == "s3" and uri.has_bucket():
 			subcmd_bucket_usage(s3, uri)
 			return
 	subcmd_bucket_usage_all(s3)
 
 def subcmd_bucket_usage_all(s3):
 	response = s3.list_all_buckets()
 
 	buckets_size = 0
 	for bucket in response["list"]:
 		size = subcmd_bucket_usage(s3, S3Uri("s3://" + bucket["Name"]))
 		if size != None:
 			buckets_size += size
 	total_size, size_coeff = formatSize(buckets_size, Config().human_readable_sizes)
475b5bc2
 	total_size_str = str(total_size) + size_coeff 
 	output("".rjust(8, "-"))
 	output("%s Total" % (total_size_str.ljust(8)))
b96ddebe
 
 def subcmd_bucket_usage(s3, uri):
 	bucket = uri.bucket()
 	object = uri.object()
 
 	if object.endswith('*'):
 		object = object[:-1]
 	try:
 		response = s3.bucket_list(bucket, prefix = object)
 	except S3Error, e:
 		if S3.codes.has_key(e.Code):
 			error(S3.codes[e.Code] % bucket)
 			return
 		else:
 			raise
 	bucket_size = 0
 	for object in response["list"]:
 		size, size_coeff = formatSize(object["Size"], False)
 		bucket_size += size
 	total_size, size_coeff = formatSize(bucket_size, Config().human_readable_sizes)
475b5bc2
 	total_size_str = str(total_size) + size_coeff 
 	output("%s %s" % (total_size_str.ljust(8), uri))
 	return bucket_size
b96ddebe
 
9081133d
 def cmd_ls(args):
9b7618ae
 	s3 = S3(Config())
9081133d
 	if len(args) > 0:
b819c70c
 		uri = S3Uri(args[0])
 		if uri.type == "s3" and uri.has_bucket():
 			subcmd_bucket_list(s3, uri)
 			return
 	subcmd_buckets_list_all(s3)
3cc025ae
 
 def cmd_buckets_list_all_all(args):
9b7618ae
 	s3 = S3(Config())
b819c70c
 
3cc025ae
 	response = s3.list_all_buckets()
 
 	for bucket in response["list"]:
b819c70c
 		subcmd_bucket_list(s3, S3Uri("s3://" + bucket["Name"]))
3cc025ae
 		output("")
 
 
b819c70c
 def subcmd_buckets_list_all(s3):
 	response = s3.list_all_buckets()
 	for bucket in response["list"]:
 		output("%s  s3://%s" % (
 			formatDateTime(bucket["CreationDate"]),
 			bucket["Name"],
 			))
 
 def subcmd_bucket_list(s3, uri):
 	bucket = uri.bucket()
 	object = uri.object()
 
6513d3d9
 	output("Bucket 's3://%s':" % bucket)
f4555c39
 	if object.endswith('*'):
 		object = object[:-1]
3cc025ae
 	try:
f4555c39
 		response = s3.bucket_list(bucket, prefix = object)
3cc025ae
 	except S3Error, e:
75405909
 		if S3.codes.has_key(e.info["Code"]):
 			error(S3.codes[e.info["Code"]] % bucket)
3cc025ae
 			return
 		else:
 			raise
 	for object in response["list"]:
9b7618ae
 		size, size_coeff = formatSize(object["Size"], Config().human_readable_sizes)
3cc025ae
 		output("%s  %s%s  %s" % (
 			formatDateTime(object["LastModified"]),
 			str(size).rjust(8), size_coeff.ljust(1),
ec50b5a7
 			uri.compose_uri(bucket, object["Key"]),
3cc025ae
 			))
 
 def cmd_bucket_create(args):
b819c70c
 	uri = S3Uri(args[0])
 	if not uri.type == "s3" or not uri.has_bucket() or uri.has_object():
 		raise ParameterError("Expecting S3 URI with just the bucket name set instead of '%s'" % args[0])
3cc025ae
 	try:
b819c70c
 		s3 = S3(Config())
dc758146
 		response = s3.bucket_create(uri.bucket(), cfg.bucket_location)
3cc025ae
 	except S3Error, e:
75405909
 		if S3.codes.has_key(e.info["Code"]):
 			error(S3.codes[e.info["Code"]] % uri.bucket())
3cc025ae
 			return
 		else:
 			raise
b819c70c
 	output("Bucket '%s' created" % uri.bucket())
3cc025ae
 
 def cmd_bucket_delete(args):
b819c70c
 	uri = S3Uri(args[0])
 	if not uri.type == "s3" or not uri.has_bucket() or uri.has_object():
 		raise ParameterError("Expecting S3 URI with just the bucket name set instead of '%s'" % args[0])
3cc025ae
 	try:
b819c70c
 		s3 = S3(Config())
 		response = s3.bucket_delete(uri.bucket())
3cc025ae
 	except S3Error, e:
75405909
 		if S3.codes.has_key(e.info["Code"]):
 			error(S3.codes[e.info["Code"]] % uri.bucket())
3cc025ae
 			return
 		else:
 			raise
b819c70c
 	output("Bucket '%s' removed" % uri.bucket())
f4555c39
 
3cc025ae
 def cmd_object_put(args):
9b7618ae
 	s3 = S3(Config())
3cc025ae
 
b819c70c
 	uri_arg = args.pop()
7c07fd66
 	check_args_type(args, 'file', 'filename')
3cc025ae
 
b819c70c
 	uri = S3Uri(uri_arg)
af3425b6
 	if uri.type != "s3":
b819c70c
 		raise ParameterError("Expecting S3 URI instead of '%s'" % uri_arg)
3cc025ae
 
7c07fd66
 	if len(args) > 1 and uri.object() != "" and not Config().force:
3cc025ae
 		error("When uploading multiple files the last argument must")
 		error("be a S3 URI specifying just the bucket name")
 		error("WITHOUT object name!")
 		error("Alternatively use --force argument and the specified")
1f7d2de3
 		error("object name will be prefixed to all stored filenames.")
 		sys.exit(1)
747ddb2a
 	
d9777ac6
 	seq = 0
 	total = len(args)
7c07fd66
 	for file in args:
d9777ac6
 		seq += 1
b819c70c
 		uri_arg_final = str(uri)
7c07fd66
 		if len(args) > 1 or uri.object() == "":
b819c70c
 			uri_arg_final += os.path.basename(file)
8ec1807f
 		
b819c70c
 		uri_final = S3Uri(uri_arg_final)
8ec1807f
 		extra_headers = {}
 		real_filename = file
 		if Config().encrypt:
 			exitcode, real_filename, extra_headers["x-amz-meta-s3tools-gpgenc"] = gpg_encrypt(file)
63ba9974
 		try:
 			response = s3.object_put_uri(real_filename, uri_final, extra_headers)
 		except S3UploadError, e:
 			error("Upload of '%s' failed too many times. Skipping that file." % real_filename)
 			continue
b8720142
 		except InvalidFileError, e:
 			warning("File can not be uploaded: %s" % e)
 			continue
63ba9974
 		speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True)
 		output("File '%s' stored as %s (%d bytes in %0.1f seconds, %0.2f %sB/s) [%d of %d]" %
 			(file, uri_final, response["size"], response["elapsed"], speed_fmt[0], speed_fmt[1],
d9777ac6
 			seq, total))
72d9ddf5
 		if Config().acl_public:
 			output("Public URL of the object is: %s" %
3fc8c43b
 				(uri_final.public_url()))
8ec1807f
 		if Config().encrypt and real_filename != file:
 			debug("Removing temporary encrypted file: %s" % real_filename)
 			os.remove(real_filename)
3cc025ae
 
 def cmd_object_get(args):
9b7618ae
 	s3 = S3(Config())
b819c70c
 
7c07fd66
 	if not S3Uri(args[0]).type == 's3':
 		raise ParameterError("Expecting S3 URI instead of '%s'" % args[0])
b819c70c
 
7c07fd66
 	destination_dir = None
 	destination_file = None
 	if len(args) > 1:
 		if S3Uri(args[-1]).type == 's3':
 			# all S3, use object names to local dir
 			check_args_type(args, type="s3", verbose_type="S3 URI")	# May raise ParameterError
 		else:
 			if (len(args) > 2):
 				# last must be dir, all preceding S3
 				if not os.path.isdir(args[-1]):
 					raise ParameterError("Last parameter must be a directory")
 				destination_dir = args.pop()
 				check_args_type(args, type="s3", verbose_type="S3 URI")	# May raise ParameterError
 			else:
 				# last must be a dir or a filename
 				if os.path.isdir(args[-1]):
 					destination_dir = args.pop()
 				else:
 					destination_file = args.pop()
 
 	while (len(args)):
 		uri_arg = args.pop(0)
 		uri = S3Uri(uri_arg)
 
 		if destination_file:
 			destination = destination_file
 		elif destination_dir:
 			destination = destination_dir + "/" + uri.object()
 		else:
 			# By default the destination filename is the object name
 			destination = uri.object()
ed27a45e
 		if destination == "-":
 			## stdout
 			dst_stream = sys.stdout
 		else:
 			## File
 			if not Config().force and os.path.exists(destination):
 				raise ParameterError("File %s already exists. Use --force to overwrite it" % destination)
 			try:
 				dst_stream = open(destination, "wb")
 			except IOError, e:
 				error("Skipping %s: %s" % (destination, e.strerror))
 				continue
 		response = s3.object_get_uri(uri, dst_stream)
7c07fd66
 		if response["headers"].has_key("x-amz-meta-s3tools-gpgenc"):
 			gpg_decrypt(destination, response["headers"]["x-amz-meta-s3tools-gpgenc"])
 			response["size"] = os.stat(destination)[6]
 		if destination != "-":
63ba9974
 			speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True)
 			output("Object %s saved as '%s' (%d bytes in %0.1f seconds, %0.2f %sB/s)" %
 				(uri, destination, response["size"], response["elapsed"], speed_fmt[0], speed_fmt[1]))
3cc025ae
 
 def cmd_object_del(args):
9b7618ae
 	s3 = S3(Config())
b819c70c
 
7c07fd66
 	while (len(args)):
 		uri_arg = args.pop(0)
 		uri = S3Uri(uri_arg)
 		if uri.type != "s3" or not uri.has_object():
 			raise ParameterError("Expecting S3 URI instead of '%s'" % uri_arg)
b819c70c
 
7c07fd66
 		response = s3.object_delete_uri(uri)
 		output("Object %s deleted" % uri)
3cc025ae
 
e5c6f6c5
 def cmd_info(args):
 	s3 = S3(Config())
 
 	while (len(args)):
 		uri_arg = args.pop(0)
 		uri = S3Uri(uri_arg)
 		if uri.type != "s3" or not uri.has_bucket():
 			raise ParameterError("Expecting S3 URI instead of '%s'" % uri_arg)
 
 		try:
 			if uri.has_object():
 				info = s3.object_info(uri)
 				output("%s (object):" % uri.uri())
 				output("   File size: %s" % info['headers']['content-length'])
 				output("   Last mod:  %s" % info['headers']['last-modified'])
 				output("   MIME type: %s" % info['headers']['content-type'])
 				output("   MD5 sum:   %s" % info['headers']['etag'].strip('"'))
 			else:
 				info = s3.bucket_info(uri)
 				output("%s (bucket):" % uri.uri())
 				output("   Location:  %s" % info['bucket-location'])
 			acl = s3.get_acl(uri)
 			for user in acl.keys():
 				output("   ACL:       %s: %s" % (user, acl[user]))
 		except S3Error, e:
 			if S3.codes.has_key(e.info["Code"]):
 				error(S3.codes[e.info["Code"]] % uri.bucket())
 				return
 			else:
 				raise
 
01fe3a25
 def _get_filelist_local(local_uri):
0d91ff3f
 	output("Compiling list of local files...")
01fe3a25
 	local_path = local_uri.path()
 	if os.path.isdir(local_path):
 		loc_base = os.path.join(local_path, "")
 		filelist = os.walk(local_path)
a368faf1
 	else:
 		loc_base = "./"
01fe3a25
 		filelist = [( '.', [], [local_path] )]
0d91ff3f
 	loc_base_len = len(loc_base)
 	loc_list = {}
a368faf1
 	for root, dirs, files in filelist:
0d91ff3f
 		## TODO: implement explicit exclude
 		for f in files:
 			full_name = os.path.join(root, f)
 			if not os.path.isfile(full_name):
 				continue
d9777ac6
 			if os.path.islink(full_name):
 				## Synchronize symlinks... one day
 				## for now skip over
 				continue
0d91ff3f
 			file = full_name[loc_base_len:]
 			sr = os.stat_result(os.lstat(full_name))
 			loc_list[file] = {
 				'full_name' : full_name,
 				'size' : sr.st_size, 
 				'mtime' : sr.st_mtime,
 				## TODO: Possibly more to save here...
 			}
01fe3a25
 	return loc_list
 
 def _get_filelist_remote(remote_uri):
0d91ff3f
 	output("Retrieving list of remote files...")
 
01fe3a25
 	s3 = S3(Config())
 	response = s3.bucket_list(remote_uri.bucket(), prefix = remote_uri.object())
 
 	rem_base = remote_uri.object()
0d91ff3f
 	rem_base_len = len(rem_base)
 	rem_list = {}
 	for object in response['list']:
6513d3d9
 		key = object['Key'][rem_base_len:]
0d91ff3f
 		rem_list[key] = { 
 			'size' : int(object['Size']),
 			# 'mtime' : dateS3toUnix(object['LastModified']), ## That's upload time, not our lastmod time :-(
 			'md5' : object['ETag'][1:-1],
6513d3d9
 			'object_key' : object['Key']
0d91ff3f
 		}
01fe3a25
 	return rem_list
 	
 def _compare_filelists(src_list, dst_list, src_is_local_and_dst_is_remote):
0d91ff3f
 	output("Verifying checksums...")
d5e87cdf
 	cfg = Config()
01fe3a25
 	exists_list = {}
8829e891
 	exclude_list = {}
d5e87cdf
 	if cfg.debug_syncmatch:
 		logging.root.setLevel(logging.DEBUG)
01fe3a25
 	for file in src_list.keys():
d5e87cdf
 		if not cfg.debug_syncmatch:
 			debug("CHECK: %s" % (os.sep + file))
8829e891
 		excluded = False
d5e87cdf
 		for r in cfg.exclude:
8829e891
 			## all paths start with '/' from the base dir
 			if r.search(os.sep + file):
 				## Can't directly 'continue' to the outer loop
 				## therefore this awkward excluded switch :-(
 				excluded = True
d5e87cdf
 				if cfg.debug_syncmatch:
 					debug("EXCL: %s" % (os.sep + file))
 					debug("RULE: '%s'" % (cfg.debug_exclude[r]))
 				else:
 					info("%s: excluded" % file)
8829e891
 				break
 		if excluded:
 			exclude_list = src_list[file]
 			del(src_list[file])
 			continue
d5e87cdf
 		else:
 			debug("PASS: %s" % (os.sep + file))
01fe3a25
 		if dst_list.has_key(file):
0d91ff3f
 			## Check size first
01fe3a25
 			if dst_list[file]['size'] == src_list[file]['size']:
d5e87cdf
 				#debug("%s same size: %s" % (file, dst_list[file]['size']))
0d91ff3f
 				## ... same size, check MD5
01fe3a25
 				if src_is_local_and_dst_is_remote:
 					src_md5 = Utils.hash_file_md5(src_list[file]['full_name'])
 					dst_md5 = dst_list[file]['md5']
 				else:
 					src_md5 = src_list[file]['md5']
 					dst_md5 = Utils.hash_file_md5(dst_list[file]['full_name'])
 				if src_md5 == dst_md5:
d5e87cdf
 					#debug("%s md5 matches: %s" % (file, dst_md5))
0d91ff3f
 					## Checksums are the same.
01fe3a25
 					## Remove from source-list, all that is left there will be transferred
d5e87cdf
 					debug("IGNR: %s (transfer not needed: MD5 OK, Size OK)" % file)
01fe3a25
 					exists_list[file] = src_list[file]
 					del(src_list[file])
0d91ff3f
 				else:
d5e87cdf
 					debug("XFER: %s (md5 mismatch: src=%s dst=%s)" % (file, src_md5, dst_md5))
0d91ff3f
 			else:
d5e87cdf
 				debug("XFER: %s (size mismatch: src=%s dst=%s)" % (file, src_list[file]['size'], dst_list[file]['size']))
0d91ff3f
                         
01fe3a25
 			## Remove from destination-list, all that is left there will be deleted
d5e87cdf
 			#debug("%s removed from destination list" % file)
01fe3a25
 			del(dst_list[file])
d5e87cdf
 	if cfg.debug_syncmatch:
 		warning("Exiting because of --debug-syncmatch")
 		sys.exit(0)
 
8829e891
 	return src_list, dst_list, exists_list, exclude_list
01fe3a25
 
 def cmd_sync_remote2local(src, dst):
ed27a45e
 	def _parse_attrs_header(attrs_header):
 		attrs = {}
 		for attr in attrs_header.split("/"):
 			key, val = attr.split(":")
 			attrs[key] = val
 		return attrs
 		
 	s3 = S3(Config())
 
 	src_uri = S3Uri(src)
 	dst_uri = S3Uri(dst)
 
 	rem_list = _get_filelist_remote(src_uri)
 	rem_count = len(rem_list)
 
 	loc_list = _get_filelist_local(dst_uri)
 	loc_count = len(loc_list)
 	
 	output("Found %d remote files, %d local files" % (rem_count, loc_count))
 
 	_compare_filelists(rem_list, loc_list, False)
 
 	output("Summary: %d remote files to download, %d local files to delete" % (len(rem_list), len(loc_list)))
 
 	for file in loc_list:
 		if cfg.delete_removed:
 			# os.unlink(file)
 			output("deleted '%s'" % file)
 		else:
 			output("not-deleted '%s'" % file)
 
 	total_size = 0
 	total_count = len(rem_list)
 	total_elapsed = 0.0
 	timestamp_start = time.time()
 	seq = 0
 	dir_cache = {}
 	src_base = src_uri.uri()
 	dst_base = dst_uri.path()
 	if not src_base[-1] == "/": src_base += "/"
 	file_list = rem_list.keys()
 	file_list.sort()
 	for file in file_list:
 		seq += 1
 		uri = S3Uri(src_base + file)
 		dst_file = dst_base + file
 		try:
 			dst_dir = os.path.dirname(dst_file)
 			if not dir_cache.has_key(dst_dir):
bc4c306d
 				dir_cache[dst_dir] = Utils.mkdir_with_parents(dst_dir)
ed27a45e
 			if dir_cache[dst_dir] == False:
 				warning("%s: destination directory not writable: %s" % (file, dst_dir))
 				continue
 			try:
 				open_flags = os.O_CREAT
 				if cfg.force:
 					open_flags |= os.O_TRUNC
 				else:
 					open_flags |= os.O_EXCL
 
 				debug("dst_file=%s" % dst_file)
 				# This will have failed should the file exist
3657ea07
 				os.close(os.open(dst_file, open_flags))
ed27a45e
 				# Yeah I know there is a race condition here. Sadly I don't know how to open() in exclusive mode.
 				dst_stream = open(dst_file, "wb")
 				response = s3.object_get_uri(uri, dst_stream)
 				dst_stream.close()
 				if response['headers'].has_key('x-amz-meta-s3cmd-attrs') and cfg.preserve_attrs:
 					attrs = _parse_attrs_header(response['headers']['x-amz-meta-s3cmd-attrs'])
 					if attrs.has_key('mode'):
 						os.chmod(dst_file, int(attrs['mode']))
bc4c306d
 					if attrs.has_key('mtime') or attrs.has_key('atime'):
 						mtime = attrs.has_key('mtime') and int(attrs['mtime']) or int(time.time())
 						atime = attrs.has_key('atime') and int(attrs['atime']) or int(time.time())
 						os.utime(dst_file, (atime, mtime))
 					## FIXME: uid/gid / uname/gname handling comes here! TODO
ed27a45e
 			except OSError, e:
d5e87cdf
 				try: dst_stream.close()
 				except: pass
ed27a45e
 				if e.errno == errno.EEXIST:
 					warning("%s exists - not overwriting" % (dst_file))
 					continue
bc4c306d
 				if e.errno in (errno.EPERM, errno.EACCES):
 					warning("%s not writable: %s" % (dst_file, e.strerror))
 					continue
f03c0beb
 				raise e
d5e87cdf
 			except KeyboardInterrupt:
 				try: dst_stream.close()
 				except: pass
 				warning("Exiting after keyboard interrupt")
 				return
bc4c306d
 			except Exception, e:
d5e87cdf
 				try: dst_stream.close()
 				except: pass
ed27a45e
 				error("%s: %s" % (file, e))
 				continue
4f209fa4
 			# We have to keep repeating this call because 
 			# Python 2.4 doesn't support try/except/finally
 			# construction :-(
d5e87cdf
 			try: dst_stream.close()
 			except: pass
ed27a45e
 		except S3DownloadError, e:
 			error("%s: download failed too many times. Skipping that file." % file)
 			continue
 		speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True)
 		output("File '%s' stored as %s (%d bytes in %0.1f seconds, %0.2f %sB/s) [%d of %d]" %
 			(uri, dst_file, response["size"], response["elapsed"], speed_fmt[0], speed_fmt[1],
 			seq, total_count))
 		total_size += response["size"]
 
 	total_elapsed = time.time() - timestamp_start
 	speed_fmt = formatSize(total_size/total_elapsed, human_readable = True, floating_point = True)
 	output("Done. Downloaded %d bytes in %0.1f seconds, %0.2f %sB/s" % 
 	       (total_size, total_elapsed, speed_fmt[0], speed_fmt[1]))
01fe3a25
 
 def cmd_sync_local2remote(src, dst):
 	def _build_attr_header(src):
 		attrs = {}
 		st = os.stat_result(os.stat(src))
 		for attr in cfg.preserve_attrs_list:
 			if attr == 'uname':
 				try:
 					val = pwd.getpwuid(st.st_uid).pw_name
 				except KeyError:
 					attr = "uid"
 					val = st.st_uid
 					warning("%s: Owner username not known. Storing UID=%d instead." % (src, val))
 			elif attr == 'gname':
 				try:
 					val = grp.getgrgid(st.st_gid).gr_name
 				except KeyError:
 					attr = "gid"
 					val = st.st_gid
 					warning("%s: Owner groupname not known. Storing GID=%d instead." % (src, val))
 			else:
 				val = getattr(st, 'st_' + attr)
 			attrs[attr] = val
 		result = ""
 		for k in attrs: result += "%s:%s/" % (k, attrs[k])
 		return { 'x-amz-meta-s3cmd-attrs' : result[:-1] }
 
a91f6982
 	s3 = S3(cfg)
 
 	if cfg.encrypt:
 		error("S3cmd 'sync' doesn't support GPG encryption, sorry.")
 		error("Either use unconditional 's3cmd put --recursive'")
 		error("or disable encryption with --no-encryption parameter.")
 		sys.exit(1)
 
01fe3a25
 
 	src_uri = S3Uri(src)
 	dst_uri = S3Uri(dst)
 
 	loc_list = _get_filelist_local(src_uri)
 	loc_count = len(loc_list)
 	
 	rem_list = _get_filelist_remote(dst_uri)
 	rem_count = len(rem_list)
 
 	output("Found %d local files, %d remote files" % (loc_count, rem_count))
 
 	_compare_filelists(loc_list, rem_list, True)
0d91ff3f
 
 	output("Summary: %d local files to upload, %d remote files to delete" % (len(loc_list), len(rem_list)))
ed27a45e
 
d9777ac6
 	for file in rem_list:
 		uri = S3Uri("s3://" + dst_uri.bucket()+"/"+rem_list[file]['object_key'])
 		if cfg.delete_removed:
0d91ff3f
 			response = s3.object_delete_uri(uri)
d9777ac6
 			output("deleted '%s'" % uri)
 		else:
 			output("not-deleted '%s'" % uri)
0d91ff3f
 
 	total_size = 0
d9777ac6
 	total_count = len(loc_list)
63ba9974
 	total_elapsed = 0.0
 	timestamp_start = time.time()
d9777ac6
 	seq = 0
0d91ff3f
 	dst_base = dst_uri.uri()
 	if not dst_base[-1] == "/": dst_base += "/"
 	file_list = loc_list.keys()
 	file_list.sort()
 	for file in file_list:
d9777ac6
 		seq += 1
0d91ff3f
 		src = loc_list[file]['full_name']
 		uri = S3Uri(dst_base + file)
a368faf1
 		if cfg.preserve_attrs:
 			attr_header = _build_attr_header(src)
 			debug(attr_header)
63ba9974
 		try:
 			response = s3.object_put_uri(src, uri, attr_header)
 		except S3UploadError, e:
ed27a45e
 			error("%s: upload failed too many times. Skipping that file." % src)
63ba9974
 			continue
b8720142
 		except InvalidFileError, e:
 			warning("File can not be uploaded: %s" % e)
 			continue
63ba9974
 		speed_fmt = formatSize(response["speed"], human_readable = True, floating_point = True)
 		output("File '%s' stored as %s (%d bytes in %0.1f seconds, %0.2f %sB/s) [%d of %d]" %
 			(src, uri, response["size"], response["elapsed"], speed_fmt[0], speed_fmt[1],
 			seq, total_count))
0d91ff3f
 		total_size += response["size"]
63ba9974
 
 	total_elapsed = time.time() - timestamp_start
 	speed_fmt = formatSize(total_size/total_elapsed, human_readable = True, floating_point = True)
 	output("Done. Uploaded %d bytes in %0.1f seconds, %0.2f %sB/s" % 
 	       (total_size, total_elapsed, speed_fmt[0], speed_fmt[1]))
0d91ff3f
 
01fe3a25
 def cmd_sync(args):
 	src = args.pop(0)
 	dst = args.pop(0)
 	if (len(args)):
 		raise ParameterError("Too many parameters! Expected: %s" % commands['sync']['param'])
 
ed27a45e
 	if S3Uri(src).type == "s3" and not src.endswith('/'):
 		src += "/"
 
01fe3a25
 	if not dst.endswith('/'):
 		dst += "/"
 
 	if S3Uri(src).type == "file" and S3Uri(dst).type == "s3":
 		return cmd_sync_local2remote(src, dst)
 	if S3Uri(src).type == "s3" and S3Uri(dst).type == "file":
 		return cmd_sync_remote2local(src, dst)
 	
8ec1807f
 def resolve_list(lst, args):
 	retval = []
 	for item in lst:
 		retval.append(item % args)
 	return retval
 
 def gpg_command(command, passphrase = ""):
 	p_in, p_out = os.popen4(command)
 	if command.count("--passphrase-fd"):
 		p_in.write(passphrase+"\n")
 		p_in.flush()
 	for line in p_out:
 		info(line.strip())
 	p_pid, p_exitcode = os.wait()
 	return p_exitcode
 
 def gpg_encrypt(filename):
 	tmp_filename = Utils.mktmpfile()
 	args = {
 		"gpg_command" : cfg.gpg_command,
 		"passphrase_fd" : "0",
 		"input_file" : filename, 
 		"output_file" : tmp_filename,
 	}
 	info("Encrypting file %(input_file)s to %(output_file)s..." % args)
 	command = resolve_list(cfg.gpg_encrypt.split(" "), args)
 	code = gpg_command(command, cfg.gpg_passphrase)
 	return (code, tmp_filename, "gpg")
 
49731b40
 def gpg_decrypt(filename, gpgenc_header = "", in_place = True):
8ec1807f
 	tmp_filename = Utils.mktmpfile(filename)
 	args = {
 		"gpg_command" : cfg.gpg_command,
 		"passphrase_fd" : "0",
 		"input_file" : filename, 
 		"output_file" : tmp_filename,
 	}
 	info("Decrypting file %(input_file)s to %(output_file)s..." % args)
 	command = resolve_list(cfg.gpg_decrypt.split(" "), args)
 	code = gpg_command(command, cfg.gpg_passphrase)
49731b40
 	if code == 0 and in_place:
8ec1807f
 		debug("Renaming %s to %s" % (tmp_filename, filename))
 		os.unlink(filename)
 		os.rename(tmp_filename, filename)
49731b40
 		tmp_filename = filename
 	return (code, tmp_filename)
8ec1807f
 
5a736f08
 def run_configure(config_file):
 	cfg = Config()
 	options = [
796e95db
 		("access_key", "Access Key", "Access key and Secret key are your identifiers for Amazon S3"),
5a736f08
 		("secret_key", "Secret Key"),
49731b40
 		("gpg_passphrase", "Encryption password", "Encryption password is used to protect your files from reading\nby unauthorized persons while in transfer to S3"),
 		("gpg_command", "Path to GPG program"),
d35b41f4
 		("use_https", "Use HTTPS protocol", "When using secure HTTPS protocol all communication with Amazon S3\nservers is protected from 3rd party eavesdropping. This method is\nslower than plain HTTP and can't be used if you're behind a proxy"),
8a4a98b1
 		("proxy_host", "HTTP Proxy server name", "On some networks all internet access must go through a HTTP proxy.\nTry setting it here if you can't conect to S3 directly"),
 		("proxy_port", "HTTP Proxy server port"),
5a736f08
 		]
8a4a98b1
 	## Option-specfic defaults
49731b40
 	if getattr(cfg, "gpg_command") == "":
 		setattr(cfg, "gpg_command", find_executable("gpg"))
 
8a4a98b1
 	if getattr(cfg, "proxy_host") == "" and os.getenv("http_proxy"):
 		re_match=re.match("(http://)?([^:]+):(\d+)", os.getenv("http_proxy"))
 		if re_match:
 			setattr(cfg, "proxy_host", re_match.groups()[1])
 			setattr(cfg, "proxy_port", re_match.groups()[2])
 
5a736f08
 	try:
 		while 1:
 			output("\nEnter new values or accept defaults in brackets with Enter.")
49731b40
 			output("Refer to user manual for detailed description of all options.")
5a736f08
 			for option in options:
 				prompt = option[1]
d35b41f4
 				## Option-specific handling
 				if option[0] == 'proxy_host' and getattr(cfg, 'use_https') == True:
 					setattr(cfg, option[0], "")
 					continue
 				if option[0] == 'proxy_port' and getattr(cfg, 'proxy_host') == "":
 					setattr(cfg, option[0], 0)
 					continue
 
5a736f08
 				try:
 					val = getattr(cfg, option[0])
d35b41f4
 					if type(val) is bool:
 						val = val and "Yes" or "No"
5a736f08
 					if val not in (None, ""):
 						prompt += " [%s]" % val
 				except AttributeError:
 					pass
 
 				if len(option) >= 3:
49731b40
 					output("\n%s" % option[2])
5a736f08
 
 				val = raw_input(prompt + ": ")
 				if val != "":
d35b41f4
 					if type(getattr(cfg, option[0])) is bool:
 						# Turn 'Yes' into True, everything else into False
 						val = val.lower().startswith('y')
5a736f08
 					setattr(cfg, option[0], val)
 			output("\nNew settings:")
 			for option in options:
 				output("  %s: %s" % (option[1], getattr(cfg, option[0])))
18485e25
 			val = raw_input("\nTest access with supplied credentials? [Y/n] ")
 			if val.lower().startswith("y") or val == "":
 				try:
 					output("Please wait...")
 					S3(Config()).bucket_list("", "")
49731b40
 					output("Success. Your access key and secret key worked fine :-)")
 
 					output("\nNow verifying that encryption works...")
aa1c976f
 					if not getattr(cfg, "gpg_command") or not getattr(cfg, "gpg_passphrase"):
 						output("Not configured. Never mind.")
49731b40
 					else:
aa1c976f
 						if not getattr(cfg, "gpg_command"):
 							raise Exception("Path to GPG program not set")
 						if not os.path.isfile(getattr(cfg, "gpg_command")):
 							raise Exception("GPG program not found")
 						filename = Utils.mktmpfile()
 						f = open(filename, "w")
 						f.write(os.sys.copyright)
 						f.close()
 						ret_enc = gpg_encrypt(filename)
 						ret_dec = gpg_decrypt(ret_enc[1], ret_enc[2], False)
 						hash = [
 							Utils.hash_file_md5(filename),
 							Utils.hash_file_md5(ret_enc[1]),
 							Utils.hash_file_md5(ret_dec[1]),
 						]
 						os.unlink(filename)
 						os.unlink(ret_enc[1])
 						os.unlink(ret_dec[1])
 						if hash[0] == hash[2] and hash[0] != hash[1]:
 							output ("Success. Encryption and decryption worked fine :-)") 
 						else:
 							raise Exception("Encryption verification error.")
49731b40
 
 				except Exception, e:
18485e25
 					error("Test failed: %s" % (e))
 					val = raw_input("\nRetry configuration? [Y/n] ")
 					if val.lower().startswith("y") or val == "":
 						continue
49731b40
 					
18485e25
 
 			val = raw_input("\nSave settings? [y/N] ")
 			if val.lower().startswith("y"):
5a736f08
 				break
18485e25
 			val = raw_input("Retry configuration? [Y/n] ")
 			if val.lower().startswith("n"):
 				raise EOFError()
ac9940ec
 
 		## Overwrite existing config file, make it user-readable only
 		old_mask = os.umask(0077)
 		try:
 			os.remove(config_file)
 		except OSError, e:
 			if e.errno != errno.ENOENT:
 				raise
5a736f08
 		f = open(config_file, "w")
ac9940ec
 		os.umask(old_mask)
5a736f08
 		cfg.dump_config(f)
 		f.close()
 		output("Configuration saved to '%s'" % config_file)
 
 	except (EOFError, KeyboardInterrupt):
 		output("\nConfiguration aborted. Changes were NOT saved.")
 		return
 	
 	except IOError, e:
 		error("Writing config file failed: %s: %s" % (config_file, e.strerror))
1f7d2de3
 		sys.exit(1)
5a736f08
 
2d7d5543
 def process_exclude_from_file(exf, exclude_array):
 	exfi = open(exf, "rt")
 	for ex in exfi:
 		ex = ex.strip()
 		if re.match("^#", ex) or re.match("^\s*$", ex):
 			continue
 		debug("adding rule: %s" % ex)
 		exclude_array.append(ex)
 
5a736f08
 commands = {}
 commands_list = [
 	{"cmd":"mb", "label":"Make bucket", "param":"s3://BUCKET", "func":cmd_bucket_create, "argc":1},
 	{"cmd":"rb", "label":"Remove bucket", "param":"s3://BUCKET", "func":cmd_bucket_delete, "argc":1},
 	{"cmd":"ls", "label":"List objects or buckets", "param":"[s3://BUCKET[/PREFIX]]", "func":cmd_ls, "argc":0},
 	{"cmd":"la", "label":"List all object in all buckets", "param":"", "func":cmd_buckets_list_all_all, "argc":0},
 	{"cmd":"put", "label":"Put file into bucket", "param":"FILE [FILE...] s3://BUCKET[/PREFIX]", "func":cmd_object_put, "argc":2},
 	{"cmd":"get", "label":"Get file from bucket", "param":"s3://BUCKET/OBJECT LOCAL_FILE", "func":cmd_object_get, "argc":1},
 	{"cmd":"del", "label":"Delete file from bucket", "param":"s3://BUCKET/OBJECT", "func":cmd_object_del, "argc":1},
0d91ff3f
 	#{"cmd":"mkdir", "label":"Make a virtual S3 directory", "param":"s3://BUCKET/path/to/dir", "func":cmd_mkdir, "argc":1},
29893afc
 	{"cmd":"sync", "label":"Synchronize a directory tree to S3", "param":"LOCAL_DIR s3://BUCKET[/PREFIX] or s3://BUCKET[/PREFIX] LOCAL_DIR", "func":cmd_sync, "argc":2},
f298b348
 	{"cmd":"du", "label":"Disk usage by buckets", "param":"[s3://BUCKET[/PREFIX]]", "func":cmd_du, "argc":0},
7393bdba
 	{"cmd":"info", "label":"Get various information about Buckets or Objects", "param":"s3://BUCKET[/OBJECT]", "func":cmd_info, "argc":1},
e5c6f6c5
 	#{"cmd":"setacl", "label":"Modify Access control list for Bucket or Object", "param":"s3://BUCKET[/OBJECT]", "func":cmd_setacl, "argc":1},
5a736f08
 	]
3cc025ae
 
f4555c39
 def format_commands(progname):
 	help = "Commands:\n"
5a736f08
 	for cmd in commands_list:
 		help += "  %s\n      %s %s %s\n" % (cmd["label"], progname, cmd["cmd"], cmd["param"])
f4555c39
 	return help
 
9b7618ae
 class OptionMimeType(Option):
 	def check_mimetype(option, opt, value):
 		if re.compile("^[a-z0-9]+/[a-z0-9+\.-]+$", re.IGNORECASE).match(value):
 			return value
 		raise OptionValueError("option %s: invalid MIME-Type format: %r" % (opt, value))
 
 	TYPES = Option.TYPES + ("mimetype",)
 	TYPE_CHECKER = copy(Option.TYPE_CHECKER)
 	TYPE_CHECKER["mimetype"] = check_mimetype
 
f4555c39
 class MyHelpFormatter(IndentedHelpFormatter):
 	def format_epilog(self, epilog):
 		if epilog:
 			return "\n" + epilog + "\n"
 		else:
 			return ""
 
4a52baa8
 def main():
87c0b03a
 	global cfg
1f7d2de3
 	if float("%d.%d" %(sys.version_info[0], sys.version_info[1])) < 2.4:
 		sys.stderr.write("ERROR: Python 2.4 or higher required, sorry.\n")
 		sys.exit(1)
3cc025ae
 
5a736f08
 	## Populate "commands" from "commands_list"
 	for cmd in commands_list:
 		if cmd.has_key("cmd"):
 			commands[cmd["cmd"]] = cmd
 
9b7618ae
 	default_verbosity = Config().verbosity
f4555c39
 	optparser = OptionParser(option_class=OptionMimeType, formatter=MyHelpFormatter())
9b7618ae
 	#optparser.disable_interspersed_args()
747ddb2a
 
ca168590
 	if os.getenv("HOME"):
 		optparser.set_defaults(config=os.getenv("HOME")+"/.s3cfg")
 
3cc025ae
 	optparser.set_defaults(verbosity = default_verbosity)
747ddb2a
 
09b29caf
 	optparser.add_option(      "--configure", dest="run_configure", action="store_true", help="Invoke interactive (re)configuration tool.")
747ddb2a
 	optparser.add_option("-c", "--config", dest="config", metavar="FILE", help="Config file name. Defaults to %default")
09b29caf
 	optparser.add_option(      "--dump-config", dest="dump_config", action="store_true", help="Dump current configuration after parsing config files and command line options and exit.")
 
29893afc
 	#optparser.add_option("-n", "--dry-run", dest="dry_run", action="store_true", help="Only show what should be uploaded or downloaded but don't actually do it. May still perform S3 requests to get bucket listings and other information though.")
03575797
 
8ec1807f
 	optparser.add_option("-e", "--encrypt", dest="encrypt", action="store_true", help="Encrypt files before uploading to S3.")
03575797
 	optparser.add_option(      "--no-encrypt", dest="encrypt", action="store_false", help="Don't encrypt files.")
9b7618ae
 	optparser.add_option("-f", "--force", dest="force", action="store_true", help="Force overwrite and other dangerous operations.")
7393bdba
 	optparser.add_option("-P", "--acl-public", dest="acl_public", action="store_true", help="Store objects with ACL allowing read for anyone.")
 	optparser.add_option(      "--acl-private", dest="acl_public", action="store_false", help="Store objects with default ACL allowing access for you only.")
0d91ff3f
 	optparser.add_option(      "--delete-removed", dest="delete_removed", action="store_true", help="Delete remote objects with no corresponding local file [sync]")
03575797
 	optparser.add_option(      "--no-delete-removed", dest="delete_removed", action="store_false", help="Don't delete remote objects.")
 	optparser.add_option("-p", "--preserve", dest="preserve_attrs", action="store_true", help="Preserve filesystem attributes (mode, ownership, timestamps). Default for [sync] command.")
 	optparser.add_option(      "--no-preserve", dest="preserve_attrs", action="store_false", help="Don't store FS attributes")
2d7d5543
 	optparser.add_option(      "--exclude", dest="exclude", action="append", metavar="GLOB", help="Filenames and paths matching GLOB will be excluded from sync")
 	optparser.add_option(      "--exclude-from", dest="exclude_from", action="append", metavar="FILE", help="Read --exclude GLOBs from FILE")
 	optparser.add_option(      "--rexclude", dest="rexclude", action="append", metavar="REGEXP", help="Filenames and paths matching REGEXP (regular expression) will be excluded from sync")
 	optparser.add_option(      "--rexclude-from", dest="rexclude_from", action="append", metavar="FILE", help="Read --rexclude REGEXPs from FILE")
3490bb40
 	optparser.add_option(      "--debug-syncmatch", "--debug-exclude", dest="debug_syncmatch", action="store_true", help="Output detailed information about remote vs. local filelist matching and --exclude processing and then exit")
8829e891
 
dc758146
 	optparser.add_option(      "--bucket-location", dest="bucket_location", help="Datacentre to create bucket in. Either EU or US (default)")
09b29caf
 
9b7618ae
 	optparser.add_option("-m", "--mime-type", dest="default_mime_type", type="mimetype", metavar="MIME/TYPE", help="Default MIME-type to be set for objects stored.")
 	optparser.add_option("-M", "--guess-mime-type", dest="guess_mime_type", action="store_true", help="Guess MIME-type of files by their extension. Falls back to default MIME-Type as specified by --mime-type option")
09b29caf
 
 	optparser.add_option("-H", "--human-readable-sizes", dest="human_readable_sizes", action="store_true", help="Print sizes in human readable form.")
03575797
 
747ddb2a
 	optparser.add_option("-v", "--verbose", dest="verbosity", action="store_const", const=logging.INFO, help="Enable verbose output.")
09b29caf
 	optparser.add_option("-d", "--debug", dest="verbosity", action="store_const", const=logging.DEBUG, help="Enable debug output.")
ed61a5fa
 	optparser.add_option(      "--version", dest="show_version", action="store_true", help="Show s3cmd version (%s) and exit." % (PkgInfo.version))
3cc025ae
 
f4555c39
 	optparser.set_usage(optparser.usage + " COMMAND [parameters]")
09b29caf
 	optparser.set_description('S3cmd is a tool for managing objects in '+
f45580a2
 		'Amazon S3 storage. It allows for making and removing '+
 		'"buckets" and uploading, downloading and removing '+
 		'"objects" from these buckets.')
f4555c39
 	optparser.epilog = format_commands(optparser.get_prog_name())
ed61a5fa
 	optparser.epilog += ("\nSee program homepage for more information at\n%s\n" % PkgInfo.url)
f45580a2
 
3cc025ae
 	(options, args) = optparser.parse_args()
 
 	## Some mucking with logging levels to enable 
 	## debugging/verbose output for config file parser on request
6513d3d9
 	logging.basicConfig(level=options.verbosity,
 	                    format='%(levelname)s: %(message)s',
fa0dfb92
 	                    stream = _stderr)
3cc025ae
 	
747ddb2a
 	if options.show_version:
ed61a5fa
 		output("s3cmd version %s" % PkgInfo.version)
747ddb2a
 		sys.exit(0)
 
3cc025ae
 	## Now finally parse the config file
ca168590
 	if not options.config:
 		error("Can't find a config file. Please use --config option.")
 		sys.exit(1)
 
5a736f08
 	try:
 		cfg = Config(options.config)
 	except IOError, e:
 		if options.run_configure:
 			cfg = Config()
 		else:
 			error("%s: %s"  % (options.config, e.strerror))
 			error("Configuration file not available.")
 			error("Consider using --configure parameter to create one.")
1f7d2de3
 			sys.exit(1)
3cc025ae
 
 	## And again some logging level adjustments
 	## according to configfile and command line parameters
 	if options.verbosity != default_verbosity:
5a736f08
 		cfg.verbosity = options.verbosity
 	logging.root.setLevel(cfg.verbosity)
9b7618ae
 
 	## Update Config with other parameters
5a736f08
 	for option in cfg.option_list():
 		try:
 			if getattr(options, option) != None:
 				debug("Updating %s -> %s" % (option, getattr(options, option)))
 				cfg.update_option(option, getattr(options, option))
 		except AttributeError:
 			## Some Config() options are not settable from command line
 			pass
 
2d7d5543
 	## Process GLOB (shell wildcard style) excludes
d5e87cdf
 	if options.exclude is None:
 		options.exclude = []
 
 	if options.exclude_from:
 		for exf in options.exclude_from:
 			debug("processing --exclude-from %s" % exf)
2d7d5543
 			process_exclude_from_file(exf, options.exclude)
d5e87cdf
 
 	if options.exclude:
 		for ex in options.exclude:
 			debug("processing rule: %s" % ex)
2d7d5543
 			exc = re.compile(glob.fnmatch.translate(ex))
 			cfg.exclude.append(exc)
 			if options.debug_syncmatch:
 				cfg.debug_exclude[exc] = ex
 
 	## Process REGEXP style excludes
 	if options.rexclude is None:
 		options.rexclude = []
 
 	if options.rexclude_from:
 		for exf in options.rexclude_from:
 			debug("processing --rexclude-from %s" % exf)
 			process_exclude_from_file(exf, options.rexclude)
 
 	if options.rexclude:
 		for ex in options.rexclude:
 			debug("processing rule: %s" % ex)
d5e87cdf
 			exc = re.compile(ex)
 			cfg.exclude.append(exc)
 			if options.debug_syncmatch:
 				cfg.debug_exclude[exc] = ex
8829e891
 
8ec1807f
 	if cfg.encrypt and cfg.gpg_passphrase == "":
 		error("Encryption requested but no passphrase set in config file.")
 		error("Please re-run 's3cmd --configure' and supply it.")
 		sys.exit(1)
 
5a736f08
 	if options.dump_config:
 		cfg.dump_config(sys.stdout)
1f7d2de3
 		sys.exit(0)
5a736f08
 
 	if options.run_configure:
 		run_configure(options.config)
1f7d2de3
 		sys.exit(0)
3cc025ae
 
 	if len(args) < 1:
 		error("Missing command. Please run with --help for more information.")
1f7d2de3
 		sys.exit(1)
3cc025ae
 
fa0dfb92
 	## Unicodise all remaining arguments:
 	args = [unicodise(arg) for arg in args]
 
3cc025ae
 	command = args.pop(0)
 	try:
fa0dfb92
 		debug("Command: %s" % commands[command]["cmd"])
3cc025ae
 		## We must do this lookup in extra step to 
 		## avoid catching all KeyError exceptions
 		## from inner functions.
5a736f08
 		cmd_func = commands[command]["func"]
3cc025ae
 	except KeyError, e:
 		error("Invalid command: %s" % e)
1f7d2de3
 		sys.exit(1)
3cc025ae
 
5a736f08
 	if len(args) < commands[command]["argc"]:
3cc025ae
 		error("Not enough paramters for command '%s'" % command)
1f7d2de3
 		sys.exit(1)
3cc025ae
 
 	try:
 		cmd_func(args)
 	except S3Error, e:
fa0dfb92
 		error("S3 error: %s" % e)
85baf810
 		sys.exit(1)
3cc025ae
 	except ParameterError, e:
fa0dfb92
 		error("Parameter problem: %s" % e)
85baf810
 		sys.exit(1)
3cc025ae
 
4a52baa8
 if __name__ == '__main__':
 	try:
 		## Our modules
 		## Keep them in try/except block to 
 		## detect any syntax errors in there
 		from S3 import PkgInfo
 		from S3.S3 import *
 		from S3.Config import Config
 		from S3.S3Uri import *
 		from S3 import Utils
 		from S3.Exceptions import *
fa0dfb92
 		from S3.Utils import unicodise
4a52baa8
 
 		main()
 		sys.exit(0)
2dad9f86
 	except SystemExit, e:
 		sys.exit(e.code)
 
4a52baa8
 	except Exception, e:
 		sys.stderr.write("""
 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
     An unexpected error has occurred.
   Please report the following lines to:
   s3tools-general@lists.sourceforge.net
 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
 
 """)
26b4a8e5
 		sys.stderr.write("S3cmd:  %s\n" % PkgInfo.version)
 		sys.stderr.write("Python: %s\n" % sys.version.replace('\n', ' '))
 		sys.stderr.write("\n")
4a52baa8
 		sys.stderr.write(traceback.format_exc(sys.exc_info())+"\n")
 		sys.stderr.write("""
 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
     An unexpected error has occurred.
     Please report the above lines to:
   s3tools-general@lists.sourceforge.net
 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
 """)
 		sys.exit(1)