- Author:
- natebeacham
- Posted:
- April 4, 2010
- Language:
- Python
- Version:
- 1.1
- Score:
- 2 (after 2 ratings)
Allows Amazon S3 storage aware file fields to be dropped in a model. Requires the boto library.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 | # ########################################################
# S3FileField.py
# Extended FileField and ImageField for use with Django and Boto.
#
# Required settings:
# USE_AMAZON_S3 - Boolean, self explanatory
# DEFAULT_BUCKET - String, represents the default bucket name to use if one isn't provided
# AWS_ACCESS_KEY_ID - String
# AWS_SECRET_ACCESS_KEY - String
#
# ########################################################
from django.db import models
from django.conf import settings
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from django.core.files.storage import FileSystemStorage
from django.core.files import File
import os
class S3Storage(FileSystemStorage):
def __init__(self, bucket=None, location=None, base_url=None):
assert bucket
if location is None:
location = settings.MEDIA_ROOT
if base_url is None:
base_url = settings.MEDIA_URL
self.location = os.path.abspath(location)
self.bucket = bucket
self.base_url = base_url
def _open(self, name, mode='rb'):
class S3File(File):
def __init__(self, key):
self.key = key
def size(self):
return self.key.size
def read(self, *args, **kwargs):
return self.key.read(*args, **kwargs)
def write(self, content):
self.key.set_contents_from_string(content)
def close(self):
self.key.close()
return S3File(Key(self.bucket, name))
def _save(self, name, content):
key = Key(self.bucket, name)
if hasattr(content, 'temporary_file_path'):
key.set_contents_from_filename(content.temporary_file_path())
elif isinstance(content, File):
key.set_contents_from_file(content)
else:
key.set_contents_from_string(content)
return name
def delete(self, name):
self.bucket.delete_key(name)
def exists(self, name):
return Key(self.bucket, name).exists()
def listdir(self, path):
return [key.name for key in self.bucket.list()]
def path(self, name):
raise NotImplementedError
def size(self, name):
return self.bucket.get_key(name).size
def url(self, name):
return Key(self.bucket, name).generate_url(100000)
def get_available_name(self, name):
return name
class S3EnabledFileField(models.FileField):
def __init__(self, bucket=settings.DEFAULT_BUCKET, verbose_name=None, name=None, upload_to='', storage=None, **kwargs):
if settings.USE_AMAZON_S3:
self.connection = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
if not self.connection.lookup(bucket):
self.connection.create_bucket(bucket)
self.bucket = self.connection.get_bucket(bucket)
storage = S3Storage(self.bucket)
super(S3EnabledFileField, self).__init__(verbose_name, name, upload_to, storage, **kwargs)
class S3EnabledImageField(models.ImageField):
def __init__(self, bucket=settings.DEFAULT_BUCKET, verbose_name=None, name=None, width_field=None, height_field=None, **kwargs):
if settings.USE_AMAZON_S3:
self.connection = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
if not self.connection.lookup(bucket):
self.connection.create_bucket(bucket)
self.bucket = self.connection.get_bucket(bucket)
kwargs['storage'] = S3Storage(self.bucket)
super(S3EnabledImageField, self).__init__(verbose_name, name, width_field, height_field, **kwargs)
|
More like this
- Template tag - list punctuation for a list of items by shapiromatron 10 months, 1 week ago
- JSONRequestMiddleware adds a .json() method to your HttpRequests by cdcarter 10 months, 2 weeks ago
- Serializer factory with Django Rest Framework by julio 1 year, 5 months ago
- Image compression before saving the new model / work with JPG, PNG by Schleidens 1 year, 6 months ago
- Help text hyperlinks by sa2812 1 year, 6 months ago
Comments
Am I mistaken or does this code mean that Django is actually retrieving the file from S3 in a synchronous action and then delivering that data in a response object?
#
In the init on S3EnabledFileField and S3EnabledImageField, .lookup and .get_bucket both end up calling get_bucket. So I was getting two hits to S3 every time even when I know the bucket is always going to exists.
I changed it around to first call .get_bucket and then if that returns None then I call .create_bucket. No need for .lookup really.
I also changed it to pass in validate=False into .get_bucket. validate=True causes it to do an unnecessary get_all_keys causing more unnecessary hits to S3
#
Please login first before commenting.