diff --git a/CHANGELOG.md b/CHANGELOG.md index 727a3a8cc..144092d1c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,12 @@ [1]: https://pypi.org/project/google-cloud-storage/#history +## [3.4.1](https://github.com/googleapis/python-storage/compare/v3.4.0...v3.5.0) (2025-10-08) + +### Bug Fixes + +* Fixes [#1561](https://github.com/googleapis/python-storage/issues/1561) by adding an option to specify the entire object checksum for resumable uploads via the `upload_from_string`, `upload_from_file`, and `upload_from_filename` methods ([acb918e](https://github.com/googleapis/python-storage/commit/acb918e20f7092e13d72fc63fe4ae2560bfecd40)) + ## [3.4.0](https://github.com/googleapis/python-storage/compare/v3.3.1...v3.4.0) (2025-09-15) diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index 0d6bef271..381ce5c9d 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -235,6 +235,7 @@ def __init__( if generation is not None: self._properties["generation"] = generation + @property def bucket(self): """Bucket which contains the object. @@ -2178,12 +2179,12 @@ def _initiate_resumable_upload( unless otherwise directed. :type crc32c_checksum_value: str - :param crc32c_checksum_value: (Optional) This should be the checksum of + :param crc32c_checksum_value: (Optional) This should be the checksum of the entire contents of `file`. Applicable while uploading object greater than `_MAX_MULTIPART_SIZE` bytes. It can be obtained by running - + `gcloud storage hash /path/to/your/file` or @@ -2204,7 +2205,7 @@ def _initiate_resumable_upload( Above code block prints 8 char string of base64 encoded big-endian bytes of 32 bit CRC32c integer. - + More details on CRC32c can be found in Appendix B: https://datatracker.ietf.org/doc/html/rfc4960#appendix-B and base64: https://datatracker.ietf.org/doc/html/rfc4648#section-4 @@ -2396,12 +2397,12 @@ def _do_resumable_upload( unless otherwise directed. :type crc32c_checksum_value: str - :param crc32c_checksum_value: (Optional) This should be the checksum of + :param crc32c_checksum_value: (Optional) This should be the checksum of the entire contents of `stream`. Applicable while uploading object greater than `_MAX_MULTIPART_SIZE` bytes. It can be obtained by running - + `gcloud storage hash /path/to/your/file` or @@ -2422,7 +2423,7 @@ def _do_resumable_upload( Above code block prints 8 char string of base64 encoded big-endian bytes of 32 bit CRC32c integer. - + More details on CRC32c can be found in Appendix B: https://datatracker.ietf.org/doc/html/rfc4960#appendix-B and base64: https://datatracker.ietf.org/doc/html/rfc4648#section-4 @@ -2578,12 +2579,12 @@ def _do_upload( unless otherwise directed. :type crc32c_checksum_value: str - :param crc32c_checksum_value: (Optional) This should be the checksum of + :param crc32c_checksum_value: (Optional) This should be the checksum of the entire contents of `file_obj`. Applicable while uploading object greater than `_MAX_MULTIPART_SIZE` bytes. It can be obtained by running - + `gcloud storage hash /path/to/your/file` or @@ -2604,7 +2605,7 @@ def _do_upload( Above code block prints 8 char string of base64 encoded big-endian bytes of 32 bit CRC32c integer. - + More details on CRC32c can be found in Appendix B: https://datatracker.ietf.org/doc/html/rfc4960#appendix-B and base64: https://datatracker.ietf.org/doc/html/rfc4648#section-4 @@ -2795,12 +2796,12 @@ def _prep_and_do_upload( unless otherwise directed. :type crc32c_checksum_value: str - :param crc32c_checksum_value: (Optional) This should be the checksum of + :param crc32c_checksum_value: (Optional) This should be the checksum of the entire contents of `file_obj`. Applicable while uploading object greater than `_MAX_MULTIPART_SIZE` bytes. It can be obtained by running - + `gcloud storage hash /path/to/your/file` or @@ -2821,7 +2822,7 @@ def _prep_and_do_upload( Above code block prints 8 char string of base64 encoded big-endian bytes of 32 bit CRC32c integer. - + More details on CRC32c can be found in Appendix B: https://datatracker.ietf.org/doc/html/rfc4960#appendix-B and base64: https://datatracker.ietf.org/doc/html/rfc4648#section-4 @@ -2978,12 +2979,12 @@ def upload_from_file( to configure them. :type crc32c_checksum_value: str - :param crc32c_checksum_value: (Optional) This should be the checksum of + :param crc32c_checksum_value: (Optional) This should be the checksum of the entire contents of `file_obj`. Applicable while uploading object greater than `_MAX_MULTIPART_SIZE` bytes. It can be obtained by running - + `gcloud storage hash /path/to/your/file` or @@ -3004,7 +3005,7 @@ def upload_from_file( Above code block prints 8 char string of base64 encoded big-endian bytes of 32 bit CRC32c integer. - + More details on CRC32c can be found in Appendix B: https://datatracker.ietf.org/doc/html/rfc4960#appendix-B and base64: https://datatracker.ietf.org/doc/html/rfc4648#section-4 @@ -3068,7 +3069,6 @@ def upload_from_filename( checksum="auto", retry=DEFAULT_RETRY, crc32c_checksum_value=None, - ): """Upload this blob's contents from the content of a named file. @@ -3163,14 +3163,14 @@ def upload_from_filename( See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for information on retry types and how to configure them. - + :type crc32c_checksum_value: str - :param crc32c_checksum_value: (Optional) This should be the checksum of + :param crc32c_checksum_value: (Optional) This should be the checksum of the entire contents of `filename`. Applicable while uploading object greater than `_MAX_MULTIPART_SIZE` bytes. It can be obtained by running - + `gcloud storage hash /path/to/your/file` or @@ -3191,7 +3191,7 @@ def upload_from_filename( Above code block prints 8 char string of base64 encoded big-endian bytes of 32 bit CRC32c integer. - + More details on CRC32c can be found in Appendix B: https://datatracker.ietf.org/doc/html/rfc4960#appendix-B and base64: https://datatracker.ietf.org/doc/html/rfc4648#section-4 @@ -3314,12 +3314,12 @@ def upload_from_string( to configure them. :type crc32c_checksum_value: str - :param crc32c_checksum_value: (Optional) This should be the checksum of + :param crc32c_checksum_value: (Optional) This should be the checksum of the entire contents of `file_obj`. Applicable while uploading object greater than `_MAX_MULTIPART_SIZE` bytes. It can be obtained by running - + `gcloud storage hash /path/to/your/file` or @@ -3340,7 +3340,7 @@ def upload_from_string( Above code block prints 8 char string of base64 encoded big-endian bytes of 32 bit CRC32c integer. - + More details on CRC32c can be found in Appendix B: https://datatracker.ietf.org/doc/html/rfc4960#appendix-B and base64: https://datatracker.ietf.org/doc/html/rfc4648#section-4 diff --git a/google/cloud/storage/version.py b/google/cloud/storage/version.py index 6b822f0c1..71133df01 100644 --- a/google/cloud/storage/version.py +++ b/google/cloud/storage/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "3.4.0" +__version__ = "3.4.1" diff --git a/tests/system/test_blob.py b/tests/system/test_blob.py index add994d17..59c665cfa 100644 --- a/tests/system/test_blob.py +++ b/tests/system/test_blob.py @@ -62,10 +62,11 @@ def test_large_file_write_from_stream_w_user_provided_wrong_checksum( info = file_data["big_9MiB"] with pytest.raises(exceptions.BadRequest) as excep_info: with open(info["path"], "rb") as file_obj: - blob.upload_from_file(file_obj,crc32c_checksum_value="A0tD7w==") + blob.upload_from_file(file_obj, crc32c_checksum_value="A0tD7w==") blobs_to_delete.append(blob) assert excep_info.value.code == 400 + def test_touch_and_write_large_file_w_user_provided_checksum( shared_bucket, blobs_to_delete, diff --git a/tests/unit/asyncio/test_async_read_object_stream.py b/tests/unit/asyncio/test_async_read_object_stream.py index ae5a48d76..4e4c93dd3 100644 --- a/tests/unit/asyncio/test_async_read_object_stream.py +++ b/tests/unit/asyncio/test_async_read_object_stream.py @@ -275,10 +275,17 @@ async def test_recv_without_open_should_raise_error( # assert assert str(exc.value) == "Stream is not open" -@mock.patch("google.cloud.storage._experimental.asyncio.async_read_object_stream.AsyncBidiRpc") -@mock.patch("google.cloud.storage._experimental.asyncio.async_grpc_client.AsyncGrpcClient.grpc_client") + +@mock.patch( + "google.cloud.storage._experimental.asyncio.async_read_object_stream.AsyncBidiRpc" +) +@mock.patch( + "google.cloud.storage._experimental.asyncio.async_grpc_client.AsyncGrpcClient.grpc_client" +) @pytest.mark.asyncio -async def test_recv_updates_read_handle_on_refresh(mock_client, mock_cls_async_bidi_rpc): +async def test_recv_updates_read_handle_on_refresh( + mock_client, mock_cls_async_bidi_rpc +): """ Verify that the `recv` method correctly updates the stream's handle when a new one is provided in a server response. @@ -289,11 +296,15 @@ async def test_recv_updates_read_handle_on_refresh(mock_client, mock_cls_async_b socket_like_rpc.open = AsyncMock() initial_handle = _storage_v2.BidiReadHandle(handle=b"initial-handle-token") - response_with_initial_handle = _storage_v2.BidiReadObjectResponse(read_handle=initial_handle) + response_with_initial_handle = _storage_v2.BidiReadObjectResponse( + read_handle=initial_handle + ) response_without_handle = _storage_v2.BidiReadObjectResponse(read_handle=None) refreshed_handle = _storage_v2.BidiReadHandle(handle=b"new-refreshed-handle-token") - response_with_refreshed_handle = _storage_v2.BidiReadObjectResponse(read_handle=refreshed_handle) + response_with_refreshed_handle = _storage_v2.BidiReadObjectResponse( + read_handle=refreshed_handle + ) socket_like_rpc.recv.side_effect = [ response_with_initial_handle, diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index 4404cdf56..f3b6da5d1 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -3572,7 +3572,7 @@ def _do_upload_mock_call_helper( kwargs, { "timeout": expected_timeout, - 'crc32c_checksum_value': None, + "crc32c_checksum_value": None, "checksum": None, "retry": retry, "command": None,