Skip to content

Commit

Permalink
fix for files with size more than buffer size
Browse files Browse the repository at this point in the history
  • Loading branch information
vinayinvicible committed Sep 6, 2016
1 parent d4c7c56 commit 538c4d8
Show file tree
Hide file tree
Showing 2 changed files with 50 additions and 3 deletions.
15 changes: 12 additions & 3 deletions storages/backends/s3boto.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,8 @@ def __init__(self, name, mode, storage, buffer_size=None):
if buffer_size is not None:
self.buffer_size = buffer_size
self._write_counter = 0
# file position of the latest part file uploaded
self._last_part_pos = 0

@property
def size(self):
Expand Down Expand Up @@ -149,10 +151,14 @@ def write(self, content, *args, **kwargs):
reduced_redundancy=self._storage.reduced_redundancy,
encrypt_key=self._storage.encryption,
)
if self.buffer_size <= self._buffer_file_size:
if self.buffer_size <= self._file_part_size:
self._flush_write_buffer()
return super(S3BotoStorageFile, self).write(force_bytes(content), *args, **kwargs)

@property
def _file_part_size(self):
return self._buffer_file_size - self._last_part_pos

@property
def _buffer_file_size(self):
pos = self.file.tell()
Expand All @@ -165,12 +171,15 @@ def _flush_write_buffer(self):
"""
Flushes the write buffer.
"""
if self._buffer_file_size:
if self._file_part_size:
self._write_counter += 1
self.file.seek(0)
pos = self.file.tell()
self.file.seek(self._last_part_pos)
headers = self._storage.headers.copy()
self._multipart.upload_part_from_file(
self.file, self._write_counter, headers=headers)
self.file.seek(pos)
self._last_part_pos = self._buffer_file_size

def close(self):
if self._is_dirty:
Expand Down
38 changes: 38 additions & 0 deletions tests/test_s3boto.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import unittest
import os

try:
from unittest import mock
except ImportError: # Python 3.2 and below
Expand Down Expand Up @@ -338,3 +340,39 @@ def test_max_length_compat_okay(self):
self.storage.file_overwrite = False
self.storage.exists = lambda name: False
self.storage.get_available_name('gogogo', max_length=255)

def test_file_greater_than_5MB(self):
name = 'test_storage_save.txt'
content = ContentFile('0' * 10 * 1024 * 1024)

# Set the encryption flag used for multipart uploads
self.storage.encryption = True
# Set the ACL header used when creating/writing data.
self.storage.bucket.connection.provider.acl_header = 'x-amz-acl'
# Set the mocked key's bucket
self.storage.bucket.get_key.return_value.bucket = self.storage.bucket
# Set the name of the mock object
self.storage.bucket.get_key.return_value.name = name

def get_upload_file_size(fp):
pos = fp.tell()
fp.seek(0, os.SEEK_END)
length = fp.tell() - pos
fp.seek(pos)
return length

def upload_part_from_file(fp, part_num, *args, **kwargs):
if len(file_part_size) != part_num:
file_part_size.append(get_upload_file_size(fp))

file_part_size = []
f = self.storage.open(name, 'w')

# initiate the multipart upload
f.write('')
f._multipart.upload_part_from_file = upload_part_from_file
for chunk in content.chunks():
f.write(chunk)
f.close()

assert content.size == sum(file_part_size)

0 comments on commit 538c4d8

Please sign in to comment.