aboutsummaryrefslogtreecommitdiffstats
path: root/src/amazons3/django/__init__.py
diff options
context:
space:
mode:
Diffstat (limited to 'src/amazons3/django/__init__.py')
-rw-r--r--src/amazons3/django/__init__.py181
1 files changed, 181 insertions, 0 deletions
diff --git a/src/amazons3/django/__init__.py b/src/amazons3/django/__init__.py
new file mode 100644
index 0000000..3e57e15
--- /dev/null
+++ b/src/amazons3/django/__init__.py
@@ -0,0 +1,181 @@
1import os
2from StringIO import StringIO
3from django.conf import settings
4from amazons3 import S3
5
6from django.core.files.storage import Storage
7
8class S3OpenFile(StringIO):
9 """
10 Wrapper for StringIO which allows open() to be called on it.
11
12 This is for FileField form fields, which expect to be able to call open()
13 and then retrieve data from the file.
14 ** NOTE: The behavior of calling open() and then writing to the file is
15 currently unknown. **
16 """
17 def open(self, *args, **kwargs):
18 self.seek(0)
19
20class S3Error(Exception):
21 "Misc. S3 Service Error"
22 pass
23
24class S3Storage(Storage):
25 options = None
26
27 def __init__(self, options=None):
28 if not options:
29 options = settings.S3_SETTINGS
30 self.options = options
31 self.perm_tuple = (
32 'private',
33 'public-read',
34 'public-read-write',
35 'authenticated-read'
36 )
37 if self.options['default_perm'] not in self.perm_tuple:
38 self.options['default_perm'] = 'private'
39
40 self.connect()
41
42 def connect(self):
43 self.conn = S3.AWSAuthConnection(self.options['aws_key'], self.options['aws_secret_key'])
44
45 res = self.conn.check_bucket_exists(self.options['bucket'])
46
47 if res.status != 200:
48 res = self.conn.create_bucket(self.options['bucket'])
49 if res.http_response.status != 200:
50 raise S3Error, 'Unable to create bucket %s' % (self.options['bucket'])
51
52 return True
53
54 def exists(self, filename):
55 contents = self.conn.list_bucket(self.options['bucket'], {'prefix': os.path.dirname(filename)})
56 if filename in [f.key for f in contents.entries]:
57 return True
58 else:
59 return False
60
61 def size(self, filename):
62 contents = self.conn.list_bucket(self.options['bucket'], {'prefix': os.path.dirname(filename)} )
63 for f in contents.entries:
64 if f.key == filename:
65 return f.size
66
67 return False
68
69 def url(self, filename):
70 server = self.options['bucket']
71 if not self.options['vanity_url']:
72 server += '.s3.amazonaws.com'
73 else:
74 server = self.options['vanity_url']
75 return 'http://' + server + '/' + filename
76
77
78 def _save(self, filename, content):
79 # a stupid hack
80 try:
81 content.url = self.url
82 except AttributeError, e:
83 content = content.file
84
85 try:
86 data = content.read()
87 except IOError, err:
88 raise S3Error, 'Unable to read %s: %s' % (filename, err.strerror)
89
90 guess_type = False
91 try:
92 content.content_type
93 except AttributeError, e:
94 guess_type = True
95
96 if guess_type or not content.content_type:
97 import mimetypes
98 content_type = mimetypes.guess_type(filename)[0]
99 if content_type is None:
100 content_type = 'text/plain'
101 else:
102 content_type = content.content_type
103
104 perm = self.options['default_perm']
105
106 res = self.conn.put(
107 self.options['bucket'],
108 filename,
109 S3.S3Object(data),
110 {
111 'x-amz-acl': perm,
112 'Content-Type': content_type
113 }
114 )
115
116 if res.http_response.status != 200:
117 raise S3Error, 'Unable to upload file %s: Error code %s: %s' % (filename, self.options['bucket'], res.body)
118
119
120 content.filename = filename
121 content.url = self.url(filename)
122
123 return filename
124
125 def delete(self, filename):
126 res = self.conn.delete(self.options['bucket'], filename)
127 if res.http_response.status != 204:
128 pass
129 #raise S3Error, 'Unable to delete file %s' % (filename)
130
131 return (res.http_response.status == 204)
132
133 def path(self, filename):
134 raise NotImplementedError
135
136 def open(self, filename, mode):
137 from urllib import urlopen
138 # Download data from S3 and save
139 # into a file wrapper, which allows its
140 # use as normal in FileFields.
141 #
142 # Note: This saves the file data into memory.
143 data = urlopen(self.url(filename))
144 openfile = S3OpenFile()
145 openfile.write(data.read())
146 return openfile
147
148 def get_available_name(self, filename):
149 import os
150 basefilename = os.path.splitext(filename)
151 i = 1
152 while self.exists(filename):
153 i += 1
154 filename = '%s-%d%s' % (basefilename[0], i, basefilename[1])
155
156 return filename
157
158class CxStorage(S3Storage):
159 """
160 This storage engine provides the naming scheme for phonese3. It hashes
161 the file names before storage.
162 To use, set DEFAULT_STORAGE_ENGINE="CxStorage"
163
164 Author: Jason Braegger
165 License: AGPLv3
166 Source: http://code.twi.gs/phonese3/
167 """
168 def get_valid_name(self, name):
169 """
170 This returns a hashed name to use for storage on the filesystem
171 """
172 import os.path
173 from hashlib import md5
174 import time
175
176 extension = os.path.splitext(name)[1].lower()
177 # Ensure an ascii string for .hexdigest() later.
178 name = name.encode('ascii', 'ignore')
179
180 return str(md5(str(time.time()) + name).hexdigest()) + \
181 str(extension)