Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,19 @@
# limitations under the License.

from google.cloud import _storage_v2
from google.protobuf import timestamp_pb2

# Map Python Blob attributes to GCS V2 Object proto field names.
_BLOB_ATTR_TO_PROTO_FIELD = {
"content_type": "content_type",
"metadata": "metadata",
"kms_key_name": "kms_key",
"cache_control": "cache_control",
"content_disposition": "content_disposition",
"content_encoding": "content_encoding",
"content_language": "content_language",
"temporary_hold": "temporary_hold",
"event_based_hold": "event_based_hold",
}


Expand All @@ -37,4 +44,65 @@ def blob_to_proto(blob):
if value is not None:
resource_params[proto_field] = value

# custom_time (field 26): google.protobuf.Timestamp
custom_time = getattr(blob, "custom_time", None)
if custom_time is not None:
custom_time_proto = timestamp_pb2.Timestamp()
custom_time_proto.FromDatetime(custom_time)
resource_params["custom_time"] = custom_time_proto

# acl (field 10): repeated ObjectAccessControl
acl = getattr(blob, "acl", None)
if acl is not None and getattr(acl, "loaded", False):
acl_entries = []
for entry in acl:
acl_entries.append(
_storage_v2.ObjectAccessControl(
role=entry["role"],
entity=entry["entity"],
)
)
if acl_entries:
resource_params["acl"] = acl_entries
Comment on lines +56 to +66
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The check getattr(acl, "loaded", False) is too restrictive for an upload/write operation. In the google-cloud-storage library, a Blob's ACL object may have locally-added entries even if it hasn't been "loaded" from the server (which is always the case for a new blob). Checking if acl: is sufficient to determine if there are entries to be sent, and using a list comprehension makes the conversion more concise.

Suggested change
if acl is not None and getattr(acl, "loaded", False):
acl_entries = []
for entry in acl:
acl_entries.append(
_storage_v2.ObjectAccessControl(
role=entry["role"],
entity=entry["entity"],
)
)
if acl_entries:
resource_params["acl"] = acl_entries
if acl:
acl_entries = [
_storage_v2.ObjectAccessControl(
role=entry["role"],
entity=entry["entity"],
)
for entry in acl
]
if acl_entries:
resource_params["acl"] = acl_entries


# contexts (field 38): ObjectContexts
contexts = getattr(blob, "contexts", None)
if contexts is not None:
custom_map = {}
# contexts is expected to be a dict of key-value pairs
if isinstance(contexts, dict):
for k, v in contexts.items():
if isinstance(v, str):
payload = _storage_v2.ObjectCustomContextPayload(value=v)
else:
payload = v
custom_map[k] = payload

if custom_map:
resource_params["contexts"] = _storage_v2.ObjectContexts(custom=custom_map)

# retention (field 30): Object.Retention
retention = getattr(blob, "retention", None)
if retention:
mode_str = retention.get("mode")
mode = _storage_v2.Object.Retention.Mode.MODE_UNSPECIFIED
if mode_str:
# GCS retention modes are 'Locked' or 'Unlocked'
mode = getattr(
_storage_v2.Object.Retention.Mode,
mode_str.upper(),
_storage_v2.Object.Retention.Mode.MODE_UNSPECIFIED,
)

retain_until_time_proto = None
retain_until_time = retention.get("retain_until_time")
if retain_until_time is not None:
retain_until_time_proto = timestamp_pb2.Timestamp()
retain_until_time_proto.FromDatetime(retain_until_time)

resource_params["retention"] = _storage_v2.Object.Retention(
mode=mode,
retain_until_time=retain_until_time_proto,
)

return _storage_v2.Object(**resource_params)
16 changes: 16 additions & 0 deletions packages/google-cloud-storage/tests/system/test_zonal.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# py standard imports
import asyncio
import datetime
import gc
import os
import random
Expand Down Expand Up @@ -348,13 +349,23 @@ def test_write_from_blob(
object_name = f"test_from_blob-{str(uuid.uuid4())[:4]}"
content_type = "text/plain"
metadata = {"environment": "system-test"}
cache_control = "public, max-age=3600"
content_disposition = "attachment; filename=test.txt"
content_encoding = "identity"
content_language = "en"
custom_time = datetime.datetime(2025, 1, 1, 12, 0, 0, tzinfo=datetime.timezone.utc)
test_data = b"system-test-data"

async def _run():
# 1. Create a Blob instance
blob = storage_client.bucket(_ZONAL_BUCKET).blob(object_name)
blob.content_type = content_type
blob.metadata = metadata
blob.cache_control = cache_control
blob.content_disposition = content_disposition
blob.content_encoding = content_encoding
blob.content_language = content_language
blob.custom_time = custom_time

# 2. Use from_blob to create the writer
writer = AsyncAppendableObjectWriter.from_blob(grpc_client, blob)
Expand All @@ -370,6 +381,11 @@ async def _run():

assert obj.content_type == content_type
assert obj.metadata["environment"] == "system-test"
assert obj.cache_control == cache_control
assert obj.content_disposition == content_disposition
assert obj.content_encoding == content_encoding
assert obj.content_language == content_language
assert int(obj.custom_time.timestamp()) == int(custom_time.timestamp())

blobs_to_delete.append(blob)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,16 @@ async def test_open_new_object_with_blob_sync_attrs(
mock_blob.content_type = "text/plain"
mock_blob.metadata = {"test-key": "test-value"}
mock_blob.kms_key_name = None
mock_blob.cache_control = None
mock_blob.content_disposition = None
mock_blob.content_encoding = None
mock_blob.content_language = None
mock_blob.temporary_hold = None
mock_blob.event_based_hold = None
mock_blob.custom_time = None
mock_blob.acl = None
mock_blob.retention = None
mock_blob.contexts = None

stream = _AsyncWriteObjectStream(mock_client, BUCKET, OBJECT, blob=mock_blob)
await stream.open()
Expand Down
139 changes: 139 additions & 0 deletions packages/google-cloud-storage/tests/unit/test__grpc_conversions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,139 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import datetime
from unittest import mock

from google.cloud.storage import _grpc_conversions
from google.cloud import _storage_v2
from google.protobuf import timestamp_pb2


def test_blob_to_proto_simple_fields():
blob = mock.Mock(spec=["name", "bucket", "content_type", "metadata", "kms_key_name", "cache_control", "content_disposition", "content_encoding", "content_language", "temporary_hold", "event_based_hold", "custom_time", "acl", "retention", "contexts"])
blob.name = "blob-name"
blob.bucket.name = "bucket-name"
blob.content_type = "text/plain"
blob.metadata = {"key": "value"}
blob.kms_key_name = "kms-key"
blob.cache_control = "no-cache"
blob.content_disposition = "attachment"
blob.content_encoding = "gzip"
blob.content_language = "en"
blob.temporary_hold = True
blob.event_based_hold = False
blob.custom_time = None
blob.acl = None
blob.retention = None
blob.contexts = None

proto = _grpc_conversions.blob_to_proto(blob)

assert proto.name == "blob-name"
assert proto.bucket == "projects/_/buckets/bucket-name"
assert proto.content_type == "text/plain"
assert proto.metadata == {"key": "value"}
assert proto.kms_key == "kms-key"
assert proto.cache_control == "no-cache"
assert proto.content_disposition == "attachment"
assert proto.content_encoding == "gzip"
assert proto.content_language == "en"
assert proto.temporary_hold is True
assert proto.event_based_hold is False


def test_blob_to_proto_custom_time():
blob = mock.Mock(spec=["name", "bucket", "custom_time", "acl", "retention", "contexts"])
blob.name = "blob-name"
blob.bucket.name = "bucket-name"
blob.custom_time = datetime.datetime(2025, 1, 1, 12, 0, 0, tzinfo=datetime.timezone.utc)
blob.acl = None
blob.retention = None
blob.contexts = None
# ensure other fields don't cause issues if missing
for attr in _grpc_conversions._BLOB_ATTR_TO_PROTO_FIELD:
setattr(blob, attr, None)

proto = _grpc_conversions.blob_to_proto(blob)

assert int(proto.custom_time.timestamp()) == int(blob.custom_time.timestamp())


def test_blob_to_proto_acl():
blob = mock.Mock(spec=["name", "bucket", "acl", "custom_time", "retention", "contexts"])
blob.name = "blob-name"
blob.bucket.name = "bucket-name"

acl_mock = mock.MagicMock()
acl_mock.loaded = True
acl_mock.__iter__.return_value = iter([
{"role": "READER", "entity": "allUsers"},
{"role": "OWNER", "entity": "user-123"},
])
blob.acl = acl_mock

blob.custom_time = None
blob.retention = None
blob.contexts = None
for attr in _grpc_conversions._BLOB_ATTR_TO_PROTO_FIELD:
setattr(blob, attr, None)

proto = _grpc_conversions.blob_to_proto(blob)

assert len(proto.acl) == 2
assert proto.acl[0].role == "READER"
assert proto.acl[0].entity == "allUsers"
assert proto.acl[1].role == "OWNER"
assert proto.acl[1].entity == "user-123"


def test_blob_to_proto_contexts():
blob = mock.Mock(spec=["name", "bucket", "contexts", "custom_time", "acl", "retention"])
blob.name = "blob-name"
blob.bucket.name = "bucket-name"
blob.contexts = {"c1": "v1", "c2": "v2"}
blob.custom_time = None
blob.acl = None
blob.retention = None
for attr in _grpc_conversions._BLOB_ATTR_TO_PROTO_FIELD:
setattr(blob, attr, None)

proto = _grpc_conversions.blob_to_proto(blob)

assert len(proto.contexts.custom) == 2
contexts_dict = {k: v.value for k, v in proto.contexts.custom.items()}
assert contexts_dict == {"c1": "v1", "c2": "v2"}


def test_blob_to_proto_retention():
blob = mock.Mock(spec=["name", "bucket", "retention", "custom_time", "acl", "contexts"])
blob.name = "blob-name"
blob.bucket.name = "bucket-name"

retain_until_time = datetime.datetime(2026, 1, 1, tzinfo=datetime.timezone.utc)
blob.retention = {
"mode": "Locked",
"retain_until_time": retain_until_time
}

blob.custom_time = None
blob.acl = None
blob.contexts = None
for attr in _grpc_conversions._BLOB_ATTR_TO_PROTO_FIELD:
setattr(blob, attr, None)

proto = _grpc_conversions.blob_to_proto(blob)

assert proto.retention.mode == _storage_v2.Object.Retention.Mode.LOCKED
assert int(proto.retention.retain_until_time.timestamp()) == int(retain_until_time.timestamp())
Loading