1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
|
import os
from StringIO import StringIO
from django.conf import settings
from amazons3 import S3
from django.core.files.storage import Storage
class S3OpenFile(StringIO):
"""
Wrapper for StringIO which allows open() to be called on it.
This is for FileField form fields, which expect to be able to call open()
and then retrieve data from the file.
** NOTE: The behavior of calling open() and then writing to the file is
currently unknown. **
"""
def open(self, *args, **kwargs):
self.seek(0)
class S3Error(Exception):
"Misc. S3 Service Error"
pass
class S3Storage(Storage):
options = None
def __init__(self, options=None):
if not options:
options = settings.S3_SETTINGS
self.options = options
self.perm_tuple = (
'private',
'public-read',
'public-read-write',
'authenticated-read'
)
if self.options['default_perm'] not in self.perm_tuple:
self.options['default_perm'] = 'private'
self.connect()
def connect(self):
self.conn = S3.AWSAuthConnection(self.options['aws_key'], self.options['aws_secret_key'])
res = self.conn.check_bucket_exists(self.options['bucket'])
if res.status != 200:
res = self.conn.create_bucket(self.options['bucket'])
if res.http_response.status != 200:
raise S3Error, 'Unable to create bucket %s' % (self.options['bucket'])
return True
def exists(self, filename):
contents = self.conn.list_bucket(self.options['bucket'], {'prefix': os.path.dirname(filename)})
if filename in [f.key for f in contents.entries]:
return True
else:
return False
def size(self, filename):
contents = self.conn.list_bucket(self.options['bucket'], {'prefix': os.path.dirname(filename)} )
for f in contents.entries:
if f.key == filename:
return f.size
return False
def url(self, filename):
server = self.options['bucket']
if not self.options['vanity_url']:
server += '.s3.amazonaws.com'
else:
server = self.options['vanity_url']
return 'http://' + server + '/' + filename
def _save(self, filename, content):
# a stupid hack
try:
content.url = self.url
except AttributeError, e:
content = content.file
try:
data = content.read()
except IOError, err:
raise S3Error, 'Unable to read %s: %s' % (filename, err.strerror)
guess_type = False
try:
content.content_type
except AttributeError, e:
guess_type = True
if guess_type or not content.content_type:
import mimetypes
content_type = mimetypes.guess_type(filename)[0]
if content_type is None:
content_type = 'text/plain'
else:
content_type = content.content_type
perm = self.options['default_perm']
res = self.conn.put(
self.options['bucket'],
filename,
S3.S3Object(data),
{
'x-amz-acl': perm,
'Content-Type': content_type
}
)
if res.http_response.status != 200:
raise S3Error, 'Unable to upload file %s: Error code %s: %s' % (filename, self.options['bucket'], res.body)
content.filename = filename
content.url = self.url(filename)
return filename
def delete(self, filename):
res = self.conn.delete(self.options['bucket'], filename)
if res.http_response.status != 204:
pass
#raise S3Error, 'Unable to delete file %s' % (filename)
return (res.http_response.status == 204)
def path(self, filename):
raise NotImplementedError
def open(self, filename, mode):
from urllib import urlopen
# Download data from S3 and save
# into a file wrapper, which allows its
# use as normal in FileFields.
#
# Note: This saves the file data into memory.
data = urlopen(self.url(filename))
openfile = S3OpenFile()
openfile.write(data.read())
return openfile
def get_available_name(self, filename):
import os
basefilename = os.path.splitext(filename)
i = 1
while self.exists(filename):
i += 1
filename = '%s-%d%s' % (basefilename[0], i, basefilename[1])
return filename
class CxStorage(S3Storage):
"""
This storage engine provides the naming scheme for phonese3. It hashes
the file names before storage.
To use, set DEFAULT_STORAGE_ENGINE="CxStorage"
Author: Jason Braegger
License: AGPLv3
Source: http://code.twi.gs/phonese3/
"""
def get_valid_name(self, name):
"""
This returns a hashed name to use for storage on the filesystem
"""
import os.path
from hashlib import md5
import time
extension = os.path.splitext(name)[1].lower()
# Ensure an ascii string for .hexdigest() later.
name = name.encode('ascii', 'ignore')
return str(md5(str(time.time()) + name).hexdigest()) + \
str(extension)
|