Skip to content

Commit

Permalink
Merge pull request googleapis#3051 from tseaver/2991-storage-per_obje…
Browse files Browse the repository at this point in the history
…ct_storage_class

Add 'Blob.update_storage_class' API method.
  • Loading branch information
tseaver authored Feb 27, 2017
2 parents 00c3011 + 7239c9e commit 9c87e04
Show file tree
Hide file tree
Showing 3 changed files with 161 additions and 2 deletions.
52 changes: 52 additions & 0 deletions storage/google/cloud/storage/blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.

# pylint: disable=too-many-lines

"""Create / interact with Google Cloud Storage blobs."""

import base64
Expand Down Expand Up @@ -73,6 +75,29 @@ class Blob(_PropertyMixin):
_CHUNK_SIZE_MULTIPLE = 256 * 1024
"""Number (256 KB, in bytes) that must divide the chunk size."""

_STORAGE_CLASSES = (
'NEARLINE',
'MULTI_REGIONAL',
'REGIONAL',
'COLDLINE',
'STANDARD', # alias for MULTI_REGIONAL/REGIONAL, based on location
)
"""Allowed values for :attr:`storage_class`.
See:
https://cloud.google.com/storage/docs/json_api/v1/objects#storageClass
https://cloud.google.com/storage/docs/per-object-storage-class
.. note::
This list does not include 'DURABLE_REDUCED_AVAILABILITY', which
is only documented for buckets (and deprectated.
.. note::
The documentation does *not* mention 'STANDARD', but it is the value
assigned by the back-end for objects created in buckets with 'STANDARD'
set as their 'storage_class'.
"""

def __init__(self, name, bucket, chunk_size=None, encryption_key=None):
super(Blob, self).__init__(name=name)

Expand Down Expand Up @@ -852,6 +877,33 @@ def rewrite(self, source, token=None, client=None):

return api_response['rewriteToken'], rewritten, size

def update_storage_class(self, new_class, client=None):
"""Update blob's storage class via a rewrite-in-place.
See:
https://cloud.google.com/storage/docs/per-object-storage-class
:type new_class: str
:param new_class: new storage class for the object
:type client: :class:`~google.cloud.storage.client.Client`
:param client: Optional. The client to use. If not passed, falls back
to the ``client`` stored on the blob's bucket.
"""
if new_class not in self._STORAGE_CLASSES:
raise ValueError("Invalid storage class: %s" % (new_class,))

client = self._require_client(client)
headers = _get_encryption_headers(self._encryption_key)
headers.update(_get_encryption_headers(
self._encryption_key, source=True))

api_response = client._connection.api_request(
method='POST', path=self.path + '/rewriteTo' + self.path,
data={'storageClass': new_class}, headers=headers,
_target_object=self)
self._set_properties(api_response['resource'])

cache_control = _scalar_property('cacheControl')
"""HTTP 'Cache-Control' header for this object.
Expand Down
16 changes: 14 additions & 2 deletions storage/google/cloud/storage/bucket.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,8 +90,20 @@ class Bucket(_PropertyMixin):
This is used in Bucket.delete() and Bucket.make_public().
"""

_STORAGE_CLASSES = ('STANDARD', 'NEARLINE', 'DURABLE_REDUCED_AVAILABILITY',
'MULTI_REGIONAL', 'REGIONAL', 'COLDLINE')
_STORAGE_CLASSES = (
'MULTI_REGIONAL',
'REGIONAL',
'NEARLINE',
'COLDLINE',
'STANDARD', # alias for MULTI_REGIONAL/REGIONAL, based on location
'DURABLE_REDUCED_AVAILABILITY', # deprecated
)
"""Allowed values for :attr:`storage_class`.
See:
https://cloud.google.com/storage/docs/json_api/v1/buckets#storageClass
https://cloud.google.com/storage/docs/storage-classes
"""

def __init__(self, client, name=None):
super(Bucket, self).__init__(name=name)
Expand Down
95 changes: 95 additions & 0 deletions storage/unit_tests/test_blob.py
Original file line number Diff line number Diff line change
Expand Up @@ -1446,6 +1446,101 @@ def test_rewrite_same_name_no_key_new_key_w_token(self):
self.assertEqual(
headers['X-Goog-Encryption-Key-Sha256'], DEST_KEY_HASH_B64)

def test_update_storage_class_invalid(self):
BLOB_NAME = 'blob-name'
bucket = _Bucket()
blob = self._make_one(BLOB_NAME, bucket=bucket)
with self.assertRaises(ValueError):
blob.update_storage_class(u'BOGUS')

def test_update_storage_class_wo_encryption_key(self):
from six.moves.http_client import OK
BLOB_NAME = 'blob-name'
STORAGE_CLASS = u'NEARLINE'
RESPONSE = {
'resource': {'storageClass': STORAGE_CLASS},
}
response = ({'status': OK}, RESPONSE)
connection = _Connection(response)
client = _Client(connection)
bucket = _Bucket(client=client)
blob = self._make_one(BLOB_NAME, bucket=bucket)

blob.update_storage_class('NEARLINE')

self.assertEqual(blob.storage_class, 'NEARLINE')

kw = connection._requested
self.assertEqual(len(kw), 1)
self.assertEqual(kw[0]['method'], 'POST')
PATH = '/b/name/o/%s/rewriteTo/b/name/o/%s' % (BLOB_NAME, BLOB_NAME)
self.assertEqual(kw[0]['path'], PATH)
self.assertNotIn('query_params', kw[0])
SENT = {'storageClass': STORAGE_CLASS}
self.assertEqual(kw[0]['data'], SENT)

headers = {
key.title(): str(value) for key, value in kw[0]['headers'].items()}
# Blob has no key, and therefore the relevant headers are not sent.
self.assertNotIn('X-Goog-Copy-Source-Encryption-Algorithm', headers)
self.assertNotIn('X-Goog-Copy-Source-Encryption-Key', headers)
self.assertNotIn('X-Goog-Copy-Source-Encryption-Key-Sha256', headers)
self.assertNotIn('X-Goog-Encryption-Algorithm', headers)
self.assertNotIn('X-Goog-Encryption-Key', headers)
self.assertNotIn('X-Goog-Encryption-Key-Sha256', headers)

def test_update_storage_class_w_encryption_key(self):
import base64
import hashlib
from six.moves.http_client import OK

BLOB_NAME = 'blob-name'
BLOB_KEY = b'01234567890123456789012345678901' # 32 bytes
BLOB_KEY_B64 = base64.b64encode(BLOB_KEY).rstrip().decode('ascii')
BLOB_KEY_HASH = hashlib.sha256(BLOB_KEY).digest()
BLOB_KEY_HASH_B64 = base64.b64encode(
BLOB_KEY_HASH).rstrip().decode('ascii')
STORAGE_CLASS = u'NEARLINE'
RESPONSE = {
'resource': {'storageClass': STORAGE_CLASS},
}
response = ({'status': OK}, RESPONSE)
connection = _Connection(response)
client = _Client(connection)
bucket = _Bucket(client=client)
blob = self._make_one(
BLOB_NAME, bucket=bucket, encryption_key=BLOB_KEY)

blob.update_storage_class('NEARLINE')

self.assertEqual(blob.storage_class, 'NEARLINE')

kw = connection._requested
self.assertEqual(len(kw), 1)
self.assertEqual(kw[0]['method'], 'POST')
PATH = '/b/name/o/%s/rewriteTo/b/name/o/%s' % (BLOB_NAME, BLOB_NAME)
self.assertEqual(kw[0]['path'], PATH)
self.assertNotIn('query_params', kw[0])
SENT = {'storageClass': STORAGE_CLASS}
self.assertEqual(kw[0]['data'], SENT)

headers = {
key.title(): str(value) for key, value in kw[0]['headers'].items()}
# Blob has key, and therefore the relevant headers are sent.
self.assertEqual(
headers['X-Goog-Copy-Source-Encryption-Algorithm'], 'AES256')
self.assertEqual(
headers['X-Goog-Copy-Source-Encryption-Key'], BLOB_KEY_B64)
self.assertEqual(
headers['X-Goog-Copy-Source-Encryption-Key-Sha256'],
BLOB_KEY_HASH_B64)
self.assertEqual(
headers['X-Goog-Encryption-Algorithm'], 'AES256')
self.assertEqual(
headers['X-Goog-Encryption-Key'], BLOB_KEY_B64)
self.assertEqual(
headers['X-Goog-Encryption-Key-Sha256'], BLOB_KEY_HASH_B64)

def test_cache_control_getter(self):
BLOB_NAME = 'blob-name'
bucket = _Bucket()
Expand Down

0 comments on commit 9c87e04

Please sign in to comment.