Browse code

All classes from s3.py go to S3/S3.py Added setup.py

git-svn-id: https://s3tools.svn.sourceforge.net/svnroot/s3tools/s3py/trunk@45 830e0280-6d2a-0410-9c65-932aecc39d9d

Michal Ludvig authored on 2007/01/13 22:19:43
Showing 3 changed files
1 1
new file mode 100644
... ...
@@ -0,0 +1,305 @@
0
+import os, os.path
1
+import base64
2
+import hmac
3
+import hashlib
4
+import httplib
5
+import logging
6
+from logging import debug, info, warning, error
7
+from stat import ST_SIZE
8
+
9
+from Utils import *
10
+from SortedDict import SortedDict
11
+from BidirMap import BidirMap
12
+from ConfigParser import ConfigParser
13
+
14
+class AwsConfig:
15
+	parsed_files = []
16
+	access_key = ""
17
+	secret_key = ""
18
+	host = "s3.amazonaws.com"
19
+	verbosity = logging.WARNING
20
+	send_chunk = 4096
21
+	recv_chunk = 4096
22
+	human_readable_sizes = False
23
+	force = False
24
+	show_uri = False
25
+
26
+	def __init__(self, configfile = None):
27
+		if configfile:
28
+			self.read_config_file(configfile)
29
+
30
+	def read_config_file(self, configfile):
31
+		cp = ConfigParser(configfile)
32
+		AwsConfig.access_key = cp.get("access_key", AwsConfig.access_key)
33
+		AwsConfig.secret_key = cp.get("secret_key", AwsConfig.secret_key)
34
+		AwsConfig.host = cp.get("host", AwsConfig.host)
35
+		verbosity = cp.get("verbosity", "WARNING")
36
+		try:
37
+			AwsConfig.verbosity = logging._levelNames[verbosity]
38
+		except KeyError:
39
+			error("AwsConfig: verbosity level '%s' is not valid" % verbosity)
40
+		AwsConfig.parsed_files.append(configfile)
41
+
42
+class S3Error (Exception):
43
+	def __init__(self, response):
44
+		self.status = response["status"]
45
+		self.reason = response["reason"]
46
+		debug("S3Error: %s (%s)" % (self.status, self.reason))
47
+		if response.has_key("headers"):
48
+			for header in response["headers"]:
49
+				debug("HttpHeader: %s: %s" % (header, response["headers"][header]))
50
+		if response.has_key("data"):
51
+			tree = ET.fromstring(response["data"])
52
+			for child in tree.getchildren():
53
+				if child.text != "":
54
+					debug("ErrorXML: " + child.tag + ": " + repr(child.text))
55
+					self.__setattr__(child.tag, child.text)
56
+
57
+	def __str__(self):
58
+		retval = "%d (%s)" % (self.status, self.reason)
59
+		try:
60
+			retval += (": %s" % self.Code)
61
+		except AttributeError:
62
+			pass
63
+		return retval
64
+
65
+class ParameterError(Exception):
66
+	pass
67
+
68
+class S3:
69
+	http_methods = BidirMap(
70
+		GET = 0x01,
71
+		PUT = 0x02,
72
+		HEAD = 0x04,
73
+		DELETE = 0x08,
74
+		MASK = 0x0F,
75
+		)
76
+	
77
+	targets = BidirMap(
78
+		SERVICE = 0x0100,
79
+		BUCKET = 0x0200,
80
+		OBJECT = 0x0400,
81
+		MASK = 0x0700,
82
+		)
83
+
84
+	operations = BidirMap(
85
+		UNDFINED = 0x0000,
86
+		LIST_ALL_BUCKETS = targets["SERVICE"] | http_methods["GET"],
87
+		BUCKET_CREATE = targets["BUCKET"] | http_methods["PUT"],
88
+		BUCKET_LIST = targets["BUCKET"] | http_methods["GET"],
89
+		BUCKET_DELETE = targets["BUCKET"] | http_methods["DELETE"],
90
+		OBJECT_PUT = targets["OBJECT"] | http_methods["PUT"],
91
+		OBJECT_GET = targets["OBJECT"] | http_methods["GET"],
92
+		OBJECT_HEAD = targets["OBJECT"] | http_methods["HEAD"],
93
+		OBJECT_DELETE = targets["OBJECT"] | http_methods["DELETE"],
94
+	)
95
+
96
+	codes = {
97
+		"NoSuchBucket" : "Bucket '%s' does not exist",
98
+		"AccessDenied" : "Access to bucket '%s' was denied",
99
+		"BucketAlreadyExists" : "Bucket '%s' already exists",
100
+		}
101
+
102
+	def __init__(self, config):
103
+		self.config = config
104
+
105
+	def list_all_buckets(self):
106
+		request = self.create_request("LIST_ALL_BUCKETS")
107
+		response = self.send_request(request)
108
+		response["list"] = getListFromXml(response["data"], "Bucket")
109
+		return response
110
+	
111
+	def bucket_list(self, bucket):
112
+		request = self.create_request("BUCKET_LIST", bucket = bucket)
113
+		response = self.send_request(request)
114
+		response["list"] = getListFromXml(response["data"], "Contents")
115
+		return response
116
+
117
+	def bucket_create(self, bucket):
118
+		self.check_bucket_name(bucket)
119
+		request = self.create_request("BUCKET_CREATE", bucket = bucket)
120
+		response = self.send_request(request)
121
+		return response
122
+
123
+	def bucket_delete(self, bucket):
124
+		request = self.create_request("BUCKET_DELETE", bucket = bucket)
125
+		response = self.send_request(request)
126
+		return response
127
+
128
+	def object_put(self, filename, bucket, object):
129
+		if not os.path.isfile(filename):
130
+			raise ParameterError("%s is not a regular file" % filename)
131
+		try:
132
+			file = open(filename, "r")
133
+			size = os.stat(filename)[ST_SIZE]
134
+		except IOError, e:
135
+			raise ParameterError("%s: %s" % (filename, e.strerror))
136
+		headers = SortedDict()
137
+		headers["content-length"] = size
138
+		request = self.create_request("OBJECT_PUT", bucket = bucket, object = object, headers = headers)
139
+		response = self.send_file(request, file)
140
+		response["size"] = size
141
+		return response
142
+
143
+	def object_get(self, filename, bucket, object):
144
+		try:
145
+			file = open(filename, "w")
146
+		except IOError, e:
147
+			raise ParameterError("%s: %s" % (filename, e.strerror))
148
+		request = self.create_request("OBJECT_GET", bucket = bucket, object = object)
149
+		response = self.recv_file(request, file)
150
+		response["size"] = int(response["headers"]["content-length"])
151
+		return response
152
+
153
+	def object_delete(self, bucket, object):
154
+		request = self.create_request("OBJECT_DELETE", bucket = bucket, object = object)
155
+		response = self.send_request(request)
156
+		return response
157
+
158
+	def create_request(self, operation, bucket = None, object = None, headers = None):
159
+		resource = "/"
160
+		if bucket:
161
+			resource += str(bucket)
162
+			if object:
163
+				resource += "/"+str(object)
164
+
165
+		if not headers:
166
+			headers = SortedDict()
167
+
168
+		if headers.has_key("date"):
169
+			if not headers.has_key("x-amz-date"):
170
+				headers["x-amz-date"] = headers["date"]
171
+			del(headers["date"])
172
+		
173
+		if not headers.has_key("x-amz-date"):
174
+			headers["x-amz-date"] = time.strftime("%a, %d %b %Y %H:%M:%S %z", time.gmtime(time.time()))
175
+
176
+		method_string = S3.http_methods.getkey(S3.operations[operation] & S3.http_methods["MASK"])
177
+		signature = self.sign_headers(method_string, resource, headers)
178
+		headers["Authorization"] = "AWS "+self.config.access_key+":"+signature
179
+		return (method_string, resource, headers)
180
+	
181
+	def send_request(self, request):
182
+		method_string, resource, headers = request
183
+		info("Processing request, please wait...")
184
+		conn = httplib.HTTPConnection(self.config.host)
185
+		conn.request(method_string, resource, {}, headers)
186
+		response = {}
187
+		http_response = conn.getresponse()
188
+		response["status"] = http_response.status
189
+		response["reason"] = http_response.reason
190
+		response["headers"] = convertTupleListToDict(http_response.getheaders())
191
+		response["data"] =  http_response.read()
192
+		conn.close()
193
+		if response["status"] < 200 or response["status"] > 299:
194
+			raise S3Error(response)
195
+		return response
196
+
197
+	def send_file(self, request, file):
198
+		method_string, resource, headers = request
199
+		info("Sending file '%s', please wait..." % file.name)
200
+		conn = httplib.HTTPConnection(self.config.host)
201
+		conn.connect()
202
+		conn.putrequest(method_string, resource)
203
+		for header in headers.keys():
204
+			conn.putheader(header, str(headers[header]))
205
+		conn.endheaders()
206
+		size_left = size_total = headers.get("content-length")
207
+		while (size_left > 0):
208
+			debug("SendFile: Reading up to %d bytes from '%s'" % (AwsConfig.send_chunk, file.name))
209
+			data = file.read(AwsConfig.send_chunk)
210
+			debug("SendFile: Sending %d bytes to the server" % len(data))
211
+			conn.send(data)
212
+			size_left -= len(data)
213
+			info("Sent %d bytes (%d %% of %d)" % (
214
+				(size_total - size_left),
215
+				(size_total - size_left) * 100 / size_total,
216
+				size_total))
217
+		response = {}
218
+		http_response = conn.getresponse()
219
+		response["status"] = http_response.status
220
+		response["reason"] = http_response.reason
221
+		response["headers"] = convertTupleListToDict(http_response.getheaders())
222
+		response["data"] =  http_response.read()
223
+		conn.close()
224
+		if response["status"] < 200 or response["status"] > 299:
225
+			raise S3Error(response)
226
+		return response
227
+
228
+	def recv_file(self, request, file):
229
+		method_string, resource, headers = request
230
+		info("Receiving file '%s', please wait..." % file.name)
231
+		conn = httplib.HTTPConnection(self.config.host)
232
+		conn.connect()
233
+		conn.putrequest(method_string, resource)
234
+		for header in headers.keys():
235
+			conn.putheader(header, str(headers[header]))
236
+		conn.endheaders()
237
+		response = {}
238
+		http_response = conn.getresponse()
239
+		response["status"] = http_response.status
240
+		response["reason"] = http_response.reason
241
+		response["headers"] = convertTupleListToDict(http_response.getheaders())
242
+		if response["status"] < 200 or response["status"] > 299:
243
+			raise S3Error(response)
244
+
245
+		md5=hashlib.new("md5")
246
+		size_left = size_total = int(response["headers"]["content-length"])
247
+		while (size_left > 0):
248
+			this_chunk = size_left > AwsConfig.recv_chunk and AwsConfig.recv_chunk or size_left
249
+			debug("ReceiveFile: Receiving up to %d bytes from the server" % this_chunk)
250
+			data = http_response.read(this_chunk)
251
+			debug("ReceiveFile: Writing %d bytes to file '%s'" % (len(data), file.name))
252
+			file.write(data)
253
+			md5.update(data)
254
+			size_left -= len(data)
255
+			info("Received %d bytes (%d %% of %d)" % (
256
+				(size_total - size_left),
257
+				(size_total - size_left) * 100 / size_total,
258
+				size_total))
259
+		conn.close()
260
+		response["md5"] = md5.hexdigest()
261
+		response["md5match"] = response["headers"]["etag"].find(response["md5"]) >= 0
262
+		debug("ReceiveFile: Computed MD5 = %s" % response["md5"])
263
+		if not response["md5match"]:
264
+			warning("MD5 signatures do not match: computed=%s, received=%s" % (
265
+				response["md5"], response["headers"]["etag"]))
266
+
267
+		return response
268
+
269
+	def sign_headers(self, method, resource, headers):
270
+		h  = method+"\n"
271
+		h += headers.get("content-md5", "")+"\n"
272
+		h += headers.get("content-type", "")+"\n"
273
+		h += headers.get("date", "")+"\n"
274
+		for header in headers.keys():
275
+			if header.startswith("x-amz-"):
276
+				h += header+":"+str(headers[header])+"\n"
277
+		h += resource
278
+		return base64.encodestring(hmac.new(self.config.secret_key, h, hashlib.sha1).digest()).strip()
279
+
280
+	def check_bucket_name(self, bucket):
281
+		if re.compile("[^A-Za-z0-9\._-]").search(bucket):
282
+			raise ParameterError("Bucket name '%s' contains unallowed characters" % bucket)
283
+		if len(bucket) < 3:
284
+			raise ParameterError("Bucket name '%s' is too short (min 3 characters)" % bucket)
285
+		if len(bucket) > 255:
286
+			raise ParameterError("Bucket name '%s' is too long (max 255 characters)" % bucket)
287
+		return True
288
+
289
+	def compose_uri(self, bucket, object = None):
290
+		if AwsConfig.show_uri:
291
+			uri = "s3://" + bucket
292
+			if object:
293
+				uri += "/"+object
294
+			return uri
295
+		else:
296
+			return object and object or bucket
297
+
298
+	def parse_s3_uri(self, uri):
299
+		match = re.compile("^s3://([^/]*)/?(.*)").match(uri)
300
+		if match:
301
+			return (True,) + match.groups()
302
+		else:
303
+			return (False, "", "")
304
+
... ...
@@ -5,317 +5,16 @@
5 5
 ##         http://www.logix.cz/michal
6 6
 ## License: GPL Version 2
7 7
 
8
-import httplib2
9 8
 import sys
10
-import os, os.path
11 9
 import logging
12 10
 import time
13
-import base64
14
-import hmac
15
-import hashlib
16
-import httplib
17 11
 
18 12
 from optparse import OptionParser
19 13
 from logging import debug, info, warning, error
20
-from stat import ST_SIZE
21 14
 import elementtree.ElementTree as ET
22 15
 
23 16
 ## Our modules
24
-from S3.Utils import *
25
-from S3.SortedDict import SortedDict
26
-from S3.BidirMap import BidirMap
27
-from S3.ConfigParser import ConfigParser
28
-
29
-class AwsConfig:
30
-	parsed_files = []
31
-	access_key = ""
32
-	secret_key = ""
33
-	host = "s3.amazonaws.com"
34
-	verbosity = logging.WARNING
35
-	send_chunk = 4096
36
-	recv_chunk = 4096
37
-	human_readable_sizes = False
38
-	force = False
39
-	show_uri = False
40
-
41
-	def __init__(self, configfile = None):
42
-		if configfile:
43
-			self.read_config_file(configfile)
44
-
45
-	def read_config_file(self, configfile):
46
-		cp = ConfigParser(configfile)
47
-		AwsConfig.access_key = cp.get("access_key", AwsConfig.access_key)
48
-		AwsConfig.secret_key = cp.get("secret_key", AwsConfig.secret_key)
49
-		AwsConfig.host = cp.get("host", AwsConfig.host)
50
-		verbosity = cp.get("verbosity", "WARNING")
51
-		try:
52
-			AwsConfig.verbosity = logging._levelNames[verbosity]
53
-		except KeyError:
54
-			error("AwsConfig: verbosity level '%s' is not valid" % verbosity)
55
-		AwsConfig.parsed_files.append(configfile)
56
-
57
-class S3Error (Exception):
58
-	def __init__(self, response):
59
-		self.status = response["status"]
60
-		self.reason = response["reason"]
61
-		debug("S3Error: %s (%s)" % (self.status, self.reason))
62
-		if response.has_key("headers"):
63
-			for header in response["headers"]:
64
-				debug("HttpHeader: %s: %s" % (header, response["headers"][header]))
65
-		if response.has_key("data"):
66
-			tree = ET.fromstring(response["data"])
67
-			for child in tree.getchildren():
68
-				if child.text != "":
69
-					debug("ErrorXML: " + child.tag + ": " + repr(child.text))
70
-					self.__setattr__(child.tag, child.text)
71
-
72
-	def __str__(self):
73
-		retval = "%d (%s)" % (self.status, self.reason)
74
-		try:
75
-			retval += (": %s" % self.Code)
76
-		except AttributeError:
77
-			pass
78
-		return retval
79
-
80
-class ParameterError(Exception):
81
-	pass
82
-
83
-class S3:
84
-	http_methods = BidirMap(
85
-		GET = 0x01,
86
-		PUT = 0x02,
87
-		HEAD = 0x04,
88
-		DELETE = 0x08,
89
-		MASK = 0x0F,
90
-		)
91
-	
92
-	targets = BidirMap(
93
-		SERVICE = 0x0100,
94
-		BUCKET = 0x0200,
95
-		OBJECT = 0x0400,
96
-		MASK = 0x0700,
97
-		)
98
-
99
-	operations = BidirMap(
100
-		UNDFINED = 0x0000,
101
-		LIST_ALL_BUCKETS = targets["SERVICE"] | http_methods["GET"],
102
-		BUCKET_CREATE = targets["BUCKET"] | http_methods["PUT"],
103
-		BUCKET_LIST = targets["BUCKET"] | http_methods["GET"],
104
-		BUCKET_DELETE = targets["BUCKET"] | http_methods["DELETE"],
105
-		OBJECT_PUT = targets["OBJECT"] | http_methods["PUT"],
106
-		OBJECT_GET = targets["OBJECT"] | http_methods["GET"],
107
-		OBJECT_HEAD = targets["OBJECT"] | http_methods["HEAD"],
108
-		OBJECT_DELETE = targets["OBJECT"] | http_methods["DELETE"],
109
-	)
110
-
111
-	codes = {
112
-		"NoSuchBucket" : "Bucket '%s' does not exist",
113
-		"AccessDenied" : "Access to bucket '%s' was denied",
114
-		"BucketAlreadyExists" : "Bucket '%s' already exists",
115
-		}
116
-
117
-	def __init__(self, config):
118
-		self.config = config
119
-
120
-	def list_all_buckets(self):
121
-		request = self.create_request("LIST_ALL_BUCKETS")
122
-		response = self.send_request(request)
123
-		response["list"] = getListFromXml(response["data"], "Bucket")
124
-		return response
125
-	
126
-	def bucket_list(self, bucket):
127
-		request = self.create_request("BUCKET_LIST", bucket = bucket)
128
-		response = self.send_request(request)
129
-		response["list"] = getListFromXml(response["data"], "Contents")
130
-		return response
131
-
132
-	def bucket_create(self, bucket):
133
-		self.check_bucket_name(bucket)
134
-		request = self.create_request("BUCKET_CREATE", bucket = bucket)
135
-		response = self.send_request(request)
136
-		return response
137
-
138
-	def bucket_delete(self, bucket):
139
-		request = self.create_request("BUCKET_DELETE", bucket = bucket)
140
-		response = self.send_request(request)
141
-		return response
142
-
143
-	def object_put(self, filename, bucket, object):
144
-		if not os.path.isfile(filename):
145
-			raise ParameterError("%s is not a regular file" % filename)
146
-		try:
147
-			file = open(filename, "r")
148
-			size = os.stat(filename)[ST_SIZE]
149
-		except IOError, e:
150
-			raise ParameterError("%s: %s" % (filename, e.strerror))
151
-		headers = SortedDict()
152
-		headers["content-length"] = size
153
-		request = self.create_request("OBJECT_PUT", bucket = bucket, object = object, headers = headers)
154
-		response = self.send_file(request, file)
155
-		response["size"] = size
156
-		return response
157
-
158
-	def object_get(self, filename, bucket, object):
159
-		try:
160
-			file = open(filename, "w")
161
-		except IOError, e:
162
-			raise ParameterError("%s: %s" % (filename, e.strerror))
163
-		request = self.create_request("OBJECT_GET", bucket = bucket, object = object)
164
-		response = self.recv_file(request, file)
165
-		response["size"] = int(response["headers"]["content-length"])
166
-		return response
167
-
168
-	def object_delete(self, bucket, object):
169
-		request = self.create_request("OBJECT_DELETE", bucket = bucket, object = object)
170
-		response = self.send_request(request)
171
-		return response
172
-
173
-	def create_request(self, operation, bucket = None, object = None, headers = None):
174
-		resource = "/"
175
-		if bucket:
176
-			resource += str(bucket)
177
-			if object:
178
-				resource += "/"+str(object)
179
-
180
-		if not headers:
181
-			headers = SortedDict()
182
-
183
-		if headers.has_key("date"):
184
-			if not headers.has_key("x-amz-date"):
185
-				headers["x-amz-date"] = headers["date"]
186
-			del(headers["date"])
187
-		
188
-		if not headers.has_key("x-amz-date"):
189
-			headers["x-amz-date"] = time.strftime("%a, %d %b %Y %H:%M:%S %z", time.gmtime(time.time()))
190
-
191
-		method_string = S3.http_methods.getkey(S3.operations[operation] & S3.http_methods["MASK"])
192
-		signature = self.sign_headers(method_string, resource, headers)
193
-		headers["Authorization"] = "AWS "+self.config.access_key+":"+signature
194
-		return (method_string, resource, headers)
195
-	
196
-	def send_request(self, request):
197
-		method_string, resource, headers = request
198
-		info("Processing request, please wait...")
199
-		conn = httplib.HTTPConnection(self.config.host)
200
-		conn.request(method_string, resource, {}, headers)
201
-		response = {}
202
-		http_response = conn.getresponse()
203
-		response["status"] = http_response.status
204
-		response["reason"] = http_response.reason
205
-		response["headers"] = convertTupleListToDict(http_response.getheaders())
206
-		response["data"] =  http_response.read()
207
-		conn.close()
208
-		if response["status"] < 200 or response["status"] > 299:
209
-			raise S3Error(response)
210
-		return response
211
-
212
-	def send_file(self, request, file):
213
-		method_string, resource, headers = request
214
-		info("Sending file '%s', please wait..." % file.name)
215
-		conn = httplib.HTTPConnection(self.config.host)
216
-		conn.connect()
217
-		conn.putrequest(method_string, resource)
218
-		for header in headers.keys():
219
-			conn.putheader(header, str(headers[header]))
220
-		conn.endheaders()
221
-		size_left = size_total = headers.get("content-length")
222
-		while (size_left > 0):
223
-			debug("SendFile: Reading up to %d bytes from '%s'" % (AwsConfig.send_chunk, file.name))
224
-			data = file.read(AwsConfig.send_chunk)
225
-			debug("SendFile: Sending %d bytes to the server" % len(data))
226
-			conn.send(data)
227
-			size_left -= len(data)
228
-			info("Sent %d bytes (%d %% of %d)" % (
229
-				(size_total - size_left),
230
-				(size_total - size_left) * 100 / size_total,
231
-				size_total))
232
-		response = {}
233
-		http_response = conn.getresponse()
234
-		response["status"] = http_response.status
235
-		response["reason"] = http_response.reason
236
-		response["headers"] = convertTupleListToDict(http_response.getheaders())
237
-		response["data"] =  http_response.read()
238
-		conn.close()
239
-		if response["status"] < 200 or response["status"] > 299:
240
-			raise S3Error(response)
241
-		return response
242
-
243
-	def recv_file(self, request, file):
244
-		method_string, resource, headers = request
245
-		info("Receiving file '%s', please wait..." % file.name)
246
-		conn = httplib.HTTPConnection(self.config.host)
247
-		conn.connect()
248
-		conn.putrequest(method_string, resource)
249
-		for header in headers.keys():
250
-			conn.putheader(header, str(headers[header]))
251
-		conn.endheaders()
252
-		response = {}
253
-		http_response = conn.getresponse()
254
-		response["status"] = http_response.status
255
-		response["reason"] = http_response.reason
256
-		response["headers"] = convertTupleListToDict(http_response.getheaders())
257
-		if response["status"] < 200 or response["status"] > 299:
258
-			raise S3Error(response)
259
-
260
-		md5=hashlib.new("md5")
261
-		size_left = size_total = int(response["headers"]["content-length"])
262
-		while (size_left > 0):
263
-			this_chunk = size_left > AwsConfig.recv_chunk and AwsConfig.recv_chunk or size_left
264
-			debug("ReceiveFile: Receiving up to %d bytes from the server" % this_chunk)
265
-			data = http_response.read(this_chunk)
266
-			debug("ReceiveFile: Writing %d bytes to file '%s'" % (len(data), file.name))
267
-			file.write(data)
268
-			md5.update(data)
269
-			size_left -= len(data)
270
-			info("Received %d bytes (%d %% of %d)" % (
271
-				(size_total - size_left),
272
-				(size_total - size_left) * 100 / size_total,
273
-				size_total))
274
-		conn.close()
275
-		response["md5"] = md5.hexdigest()
276
-		response["md5match"] = response["headers"]["etag"].find(response["md5"]) >= 0
277
-		debug("ReceiveFile: Computed MD5 = %s" % response["md5"])
278
-		if not response["md5match"]:
279
-			warning("MD5 signatures do not match: computed=%s, received=%s" % (
280
-				response["md5"], response["headers"]["etag"]))
281
-
282
-		return response
283
-
284
-	def sign_headers(self, method, resource, headers):
285
-		h  = method+"\n"
286
-		h += headers.get("content-md5", "")+"\n"
287
-		h += headers.get("content-type", "")+"\n"
288
-		h += headers.get("date", "")+"\n"
289
-		for header in headers.keys():
290
-			if header.startswith("x-amz-"):
291
-				h += header+":"+str(headers[header])+"\n"
292
-		h += resource
293
-		return base64.encodestring(hmac.new(self.config.secret_key, h, hashlib.sha1).digest()).strip()
294
-
295
-	def check_bucket_name(self, bucket):
296
-		if re.compile("[^A-Za-z0-9\._-]").search(bucket):
297
-			raise ParameterError("Bucket name '%s' contains unallowed characters" % bucket)
298
-		if len(bucket) < 3:
299
-			raise ParameterError("Bucket name '%s' is too short (min 3 characters)" % bucket)
300
-		if len(bucket) > 255:
301
-			raise ParameterError("Bucket name '%s' is too long (max 255 characters)" % bucket)
302
-		return True
303
-
304
-	def compose_uri(self, bucket, object = None):
305
-		if AwsConfig.show_uri:
306
-			uri = "s3://" + bucket
307
-			if object:
308
-				uri += "/"+object
309
-			return uri
310
-		else:
311
-			return object and object or bucket
312
-
313
-	def parse_s3_uri(self, uri):
314
-		match = re.compile("^s3://([^/]*)/?(.*)").match(uri)
315
-		if match:
316
-			return (True,) + match.groups()
317
-		else:
318
-			return (False, "", "")
17
+from S3.S3 import *
319 18
 
320 19
 def output(message):
321 20
 	print message
322 21
new file mode 100644
... ...
@@ -0,0 +1,8 @@
0
+from distutils.core import setup
1
+setup(name = "s3cmd",
2
+	version = "0.9.0a1",
3
+	author = "Michal Ludvig",
4
+	author_email = "michal@logix.cz",
5
+	packages = [ 'S3' ],
6
+	scripts = ['s3.py'],
7
+	)