From 7c8a178978d2022482afd301242ae79b2f9c737a Mon Sep 17 00:00:00 2001 From: cojenco Date: Thu, 20 Oct 2022 15:50:05 -0700 Subject: [PATCH] docs: streamline docs for c.g.c migration (#876) * docs: remove literalinclude in client module * remove literalinclude and update samples in Blob * replace literalinclude with samples in Bucket * update docstrings and remove inline samples * update docstrings --- google/cloud/storage/_signing.py | 29 ++-- google/cloud/storage/blob.py | 242 ++++++++------------------ google/cloud/storage/bucket.py | 250 +++++++-------------------- google/cloud/storage/client.py | 134 ++------------ google/cloud/storage/fileio.py | 2 + google/cloud/storage/hmac_key.py | 5 + google/cloud/storage/notification.py | 16 +- google/cloud/storage/retry.py | 5 + 8 files changed, 179 insertions(+), 504 deletions(-) diff --git a/google/cloud/storage/_signing.py b/google/cloud/storage/_signing.py index 036ea6385..fb50a2acc 100644 --- a/google/cloud/storage/_signing.py +++ b/google/cloud/storage/_signing.py @@ -282,15 +282,11 @@ def generate_signed_url_v2( .. note:: If you are on Google Compute Engine, you can't generate a signed URL. - Follow `Issue 922`_ for updates on this. If you'd like to be able to - generate a signed URL from GCE, you can use a standard service account - from a JSON file rather than a GCE service account. + If you'd like to be able to generate a signed URL from GCE, you can use a + standard service account from a JSON file rather than a GCE service account. - See headers `reference`_ for more details on optional arguments. - - .. _Issue 922: https://github.com/GoogleCloudPlatform/\ - google-cloud-python/issues/922 - .. _reference: https://cloud.google.com/storage/docs/reference-headers + See headers [reference](https://cloud.google.com/storage/docs/reference-headers) + for more details on optional arguments. :type credentials: :class:`google.auth.credentials.Signing` :param credentials: Credentials object with an associated private key to @@ -382,6 +378,8 @@ def generate_signed_url_v2( elements_to_sign.append(canonical.resource) string_to_sign = "\n".join(elements_to_sign) + # If you are on Google Compute Engine, you can't generate a signed URL. + # See https://github.com/googleapis/google-cloud-python/issues/922 # Set the right query parameters. if access_token and service_account_email: signature = _sign_message(string_to_sign, access_token, service_account_email) @@ -446,16 +444,11 @@ def generate_signed_url_v4( .. note:: If you are on Google Compute Engine, you can't generate a signed URL. - Follow `Issue 922`_ for updates on this. If you'd like to be able to - generate a signed URL from GCE, you can use a standard service account - from a JSON file rather than a GCE service account. - - See headers `reference`_ for more details on optional arguments. - - .. _Issue 922: https://github.com/GoogleCloudPlatform/\ - google-cloud-python/issues/922 - .. _reference: https://cloud.google.com/storage/docs/reference-headers + If you'd like to be able to generate a signed URL from GCE,you can use a + standard service account from a JSON file rather than a GCE service account. + See headers [reference](https://cloud.google.com/storage/docs/reference-headers) + for more details on optional arguments. :type credentials: :class:`google.auth.credentials.Signing` :param credentials: Credentials object with an associated private key to @@ -543,6 +536,8 @@ def generate_signed_url_v4( request_timestamp = _request_timestamp datestamp = _request_timestamp[:8] + # If you are on Google Compute Engine, you can't generate a signed URL. + # See https://github.com/googleapis/google-cloud-python/issues/922 client_email = service_account_email if not access_token or not service_account_email: ensure_signed_credentials(credentials) diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index d465039ea..1a151b096 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -379,6 +379,13 @@ def public_url(self): def from_string(cls, uri, client=None): """Get a constructor for blob object by URI. + .. code-block:: python + + from google.cloud import storage + from google.cloud.storage.blob import Blob + client = storage.Client() + blob = Blob.from_string("gs://bucket/object", client=client) + :type uri: str :param uri: The blob uri pass to get blob object. @@ -389,14 +396,6 @@ def from_string(cls, uri, client=None): :rtype: :class:`google.cloud.storage.blob.Blob` :returns: The blob object created. - - Example: - Get a constructor for blob object by URI. - - >>> from google.cloud import storage - >>> from google.cloud.storage.blob import Blob - >>> client = storage.Client() - >>> blob = Blob.from_string("gs://bucket/object", client=client) """ from google.cloud.storage.bucket import Bucket @@ -433,37 +432,24 @@ def generate_signed_url( .. note:: If you are on Google Compute Engine, you can't generate a signed - URL using GCE service account. Follow `Issue 50`_ for updates on - this. If you'd like to be able to generate a signed URL from GCE, + URL using GCE service account. + If you'd like to be able to generate a signed URL from GCE, you can use a standard service account from a JSON file rather than a GCE service account. - .. _Issue 50: https://github.com/GoogleCloudPlatform/\ - google-auth-library-python/issues/50 - If you have a blob that you want to allow access to for a set amount of time, you can use this method to generate a URL that is only valid within a certain time period. - If ``bucket_bound_hostname`` is set as an argument of :attr:`api_access_endpoint`, - ``https`` works only if using a ``CDN``. - - Example: - Generates a signed URL for this blob using bucket_bound_hostname and scheme. - - >>> from google.cloud import storage - >>> client = storage.Client() - >>> bucket = client.get_bucket('my-bucket-name') - >>> blob = bucket.get_blob('my-blob-name') - >>> url = blob.generate_signed_url(expiration='url-expiration-time', bucket_bound_hostname='mydomain.tld', - >>> version='v4') - >>> url = blob.generate_signed_url(expiration='url-expiration-time', bucket_bound_hostname='mydomain.tld', - >>> version='v4',scheme='https') # If using ``CDN`` + See a [code sample](https://cloud.google.com/storage/docs/samples/storage-generate-signed-url-v4#storage_generate_signed_url_v4-python). This is particularly useful if you don't want publicly accessible blobs, but don't want to require users to explicitly log in. + If ``bucket_bound_hostname`` is set as an argument of :attr:`api_access_endpoint`, + ``https`` works only if using a ``CDN``. + :type expiration: Union[Integer, datetime.datetime, datetime.timedelta] :param expiration: Point in time when the signed URL should expire. If a ``datetime`` @@ -574,6 +560,9 @@ def generate_signed_url( quoted_name = _quote(self.name, safe=b"/~") + # If you are on Google Compute Engine, you can't generate a signed URL + # using GCE service account. + # See https://github.com/googleapis/google-auth-library-python/issues/50 if virtual_hosted_style: api_access_endpoint = f"https://{self.bucket.name}.storage.googleapis.com" elif bucket_bound_hostname: @@ -1036,18 +1025,6 @@ def download_to_file( If the server-set property, :attr:`media_link`, is not yet initialized, makes an additional API request to load it. - Downloading a file that has been encrypted with a - [`customer-supplied`](https://cloud.google.com/storage/docs/encryption#customer-supplied) - encryption key: - - .. literalinclude:: snippets.py - :start-after: START download_to_file - :end-before: END download_to_file - :dedent: 4 - - The ``encryption_key`` should be a str or bytes with a length of at - least 32. - If the :attr:`chunk_size` of a current blob is `None`, will download data in single download request otherwise it will download the :attr:`chunk_size` of data in each request. @@ -1182,6 +1159,9 @@ def download_to_filename( If :attr:`user_project` is set on the bucket, bills the API request to that project. + See a [code sample](https://cloud.google.com/storage/docs/samples/storage-download-encrypted-file#storage_download_encrypted_file-python) + to download a file with a [`customer-supplied encryption key`](https://cloud.google.com/storage/docs/encryption#customer-supplied). + :type filename: str :param filename: A filename to be passed to ``open``. @@ -2431,19 +2411,9 @@ def upload_from_file( bucket. In the absence of those policies, upload will overwrite any existing contents. - See the `object versioning`_ and `lifecycle`_ API documents - for details. - - Uploading a file with a - [`customer-supplied`](https://cloud.google.com/storage/docs/encryption#customer-supplied) encryption key: - - .. literalinclude:: snippets.py - :start-after: START upload_from_file - :end-before: END upload_from_file - :dedent: 4 - - The ``encryption_key`` should be a str or bytes with a length of at - least 32. + See the [`object versioning`](https://cloud.google.com/storage/docs/object-versioning) + and [`lifecycle`](https://cloud.google.com/storage/docs/lifecycle) + API documents for details. If the size of the data to be uploaded exceeds 8 MB a resumable media request will be used, otherwise the content and the metadata will be @@ -2547,10 +2517,6 @@ def upload_from_file( :raises: :class:`~google.cloud.exceptions.GoogleCloudError` if the upload response returns an error status. - - .. _object versioning: https://cloud.google.com/storage/\ - docs/object-versioning - .. _lifecycle: https://cloud.google.com/storage/docs/lifecycle """ if num_retries is not None: warnings.warn(_NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2) @@ -2614,14 +2580,17 @@ def upload_from_filename( bucket. In the absence of those policies, upload will overwrite any existing contents. - See the `object versioning - `_ and - `lifecycle `_ + See the [`object versioning`](https://cloud.google.com/storage/docs/object-versioning) + and [`lifecycle`](https://cloud.google.com/storage/docs/lifecycle) API documents for details. If :attr:`user_project` is set on the bucket, bills the API request to that project. + See a [code sample](https://cloud.google.com/storage/docs/samples/storage-upload-encrypted-file#storage_upload_encrypted_file-python) + to upload a file with a + [`customer-supplied encryption key`](https://cloud.google.com/storage/docs/encryption#customer-supplied). + :type filename: str :param filename: The path to the file. @@ -2744,9 +2713,8 @@ def upload_from_string( bucket. In the absence of those policies, upload will overwrite any existing contents. - See the `object versioning - `_ and - `lifecycle `_ + See the [`object versioning`](https://cloud.google.com/storage/docs/object-versioning) + and [`lifecycle`](https://cloud.google.com/storage/docs/lifecycle) API documents for details. If :attr:`user_project` is set on the bucket, bills the API request @@ -2876,12 +2844,10 @@ def create_resumable_upload_session( passes the session URL to the client that will upload the binary data. The client performs a PUT request on the session URL to complete the upload. This process allows untrusted clients to upload to an - access-controlled bucket. For more details, see the - `documentation on signed URLs`_. + access-controlled bucket. - .. _documentation on signed URLs: - https://cloud.google.com/storage/\ - docs/access-control/signed-urls#signing-resumable + For more details, see the + documentation on [`signed URLs`](https://cloud.google.com/storage/docs/access-control/signed-urls#signing-resumable). The content type of the upload will be determined in order of precedence: @@ -2896,9 +2862,8 @@ def create_resumable_upload_session( bucket. In the absence of those policies, upload will overwrite any existing contents. - See the `object versioning - `_ and - `lifecycle `_ + See the [`object versioning`](https://cloud.google.com/storage/docs/object-versioning) + and [`lifecycle`](https://cloud.google.com/storage/docs/lifecycle) API documents for details. If :attr:`encryption_key` is set, the blob will be encrypted with @@ -3339,6 +3304,9 @@ def compose( If :attr:`user_project` is set on the bucket, bills the API request to that project. + See [API reference docs](https://cloud.google.com/storage/docs/json_api/v1/objects/compose) + and a [code sample](https://cloud.google.com/storage/docs/samples/storage-compose-file#storage_compose_file-python). + :type sources: list of :class:`Blob` :param sources: Blobs whose contents will be composed into this blob. @@ -3358,14 +3326,11 @@ def compose( destination object's current generation matches the given value. Setting to 0 makes the operation succeed only if there are no live versions of the object. - - .. note:: - - In a previous version, this argument worked identically to the - ``if_source_generation_match`` argument. For - backwards-compatibility reasons, if a list is passed in, - this argument will behave like ``if_source_generation_match`` - and also issue a DeprecationWarning. + Note: In a previous version, this argument worked identically to the + ``if_source_generation_match`` argument. For + backwards-compatibility reasons, if a list is passed in, + this argument will behave like ``if_source_generation_match`` + and also issue a DeprecationWarning. :type if_metageneration_match: long :param if_metageneration_match: @@ -3386,20 +3351,6 @@ def compose( :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy :param retry: (Optional) How to retry the RPC. See: :ref:`configuring_retries` - - Example: - Compose blobs using source generation match preconditions. - - >>> from google.cloud import storage - >>> client = storage.Client() - >>> bucket = client.bucket("bucket-name") - - >>> blobs = [bucket.blob("blob-name-1"), bucket.blob("blob-name-2")] - >>> if_source_generation_match = [None] * len(blobs) - >>> if_source_generation_match[0] = "123" # precondition for "blob-name-1" - - >>> composed_blob = bucket.blob("composed-name") - >>> composed_blob.compose(blobs, if_source_generation_match=if_source_generation_match) """ sources_len = len(sources) client = self._require_client(client) @@ -3780,6 +3731,36 @@ def open( which do not provide checksums to validate. See https://cloud.google.com/storage/docs/hashes-etags for details. + See a [code sample](https://github.com/googleapis/python-storage/blob/main/samples/snippets/storage_fileio_write_read.py). + + Keyword arguments to pass to the underlying API calls. + For both uploads and downloads, the following arguments are + supported: + + - ``if_generation_match`` + - ``if_generation_not_match`` + - ``if_metageneration_match`` + - ``if_metageneration_not_match`` + - ``timeout`` + - ``retry`` + + For downloads only, the following additional arguments are supported: + + - ``raw_download`` + + For uploads only, the following additional arguments are supported: + + - ``content_type`` + - ``num_retries`` + - ``predefined_acl`` + - ``checksum`` + + .. note:: + + ``num_retries`` is supported for backwards-compatibility + reasons only; please use ``retry`` with a Retry object or + ConditionalRetryPolicy instead. + :type mode: str :param mode: (Optional) A mode string, as per standard Python `open()` semantics.The first @@ -3834,53 +3815,9 @@ def open( newline mode" and writes use the system default. See the Python 'io' module documentation for 'io.TextIOWrapper' for details. - :param kwargs: - Keyword arguments to pass to the underlying API calls. - For both uploads and downloads, the following arguments are - supported: - - - ``if_generation_match`` - - ``if_generation_not_match`` - - ``if_metageneration_match`` - - ``if_metageneration_not_match`` - - ``timeout`` - - ``retry`` - - For downloads only, the following additional arguments are supported: - - - ``raw_download`` - - For uploads only, the following additional arguments are supported: - - - ``content_type`` - - ``num_retries`` - - ``predefined_acl`` - - ``checksum`` - - .. note:: - - ``num_retries`` is supported for backwards-compatibility - reasons only; please use ``retry`` with a Retry object or - ConditionalRetryPolicy instead. - :returns: A 'BlobReader' or 'BlobWriter' from 'google.cloud.storage.fileio', or an 'io.TextIOWrapper' around one of those classes, depending on the 'mode' argument. - - Example: - Read from a text blob by using open() as context manager. - - Using bucket.get_blob() fetches metadata such as the generation, - which prevents race conditions in case the blob is modified. - - >>> from google.cloud import storage - >>> client = storage.Client() - >>> bucket = client.bucket("bucket-name") - - >>> blob = bucket.blob("blob-name.txt") - >>> with blob.open("rt") as f: - >>> print(f.read()) - """ if mode == "rb": if encoding or errors or newline: @@ -3986,23 +3923,6 @@ def open( If not set before upload, the server will compute the hash. :rtype: str or ``NoneType`` - - - Example: - Retrieve the crc32c hash of blob. - - >>> from google.cloud import storage - >>> client = storage.Client() - >>> bucket = client.get_bucket("my-bucket-name") - >>> blob = bucket.blob('my-blob') - - >>> blob.crc32c # return None - >>> blob.reload() - >>> blob.crc32c # return crc32c hash - - >>> # Another approach - >>> blob = bucket.get_blob('my-blob') - >>> blob.crc32c # return crc32c hash """ @property @@ -4084,22 +4004,6 @@ def id(self): If not set before upload, the server will compute the hash. :rtype: str or ``NoneType`` - - Example: - Retrieve the md5 hash of blob. - - >>> from google.cloud import storage - >>> client = storage.Client() - >>> bucket = client.get_bucket("my-bucket-name") - >>> blob = bucket.blob('my-blob') - - >>> blob.md5_hash # return None - >>> blob.reload() - >>> blob.md5_hash # return md5 hash - - >>> # Another approach - >>> blob = bucket.get_blob('my-blob') - >>> blob.md5_hash # return md5 hash """ @property diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index 98cbf892b..0a7b09bbb 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -730,6 +730,13 @@ def user_project(self): def from_string(cls, uri, client=None): """Get a constructor for bucket object by URI. + .. code-block:: python + + from google.cloud import storage + from google.cloud.storage.bucket import Bucket + client = storage.Client() + bucket = Bucket.from_string("gs://bucket", client=client) + :type uri: str :param uri: The bucket uri pass to get bucket object. @@ -740,14 +747,6 @@ def from_string(cls, uri, client=None): :rtype: :class:`google.cloud.storage.bucket.Bucket` :returns: The bucket object created. - - Example: - Get a constructor for bucket object by URI.. - - >>> from google.cloud import storage - >>> from google.cloud.storage.bucket import Bucket - >>> client = storage.Client() - >>> bucket = Bucket.from_string("gs://bucket", client=client) """ scheme, netloc, path, query, frag = urlsplit(uri) @@ -1196,12 +1195,8 @@ def get_blob( ): """Get a blob object by name. - This will return None if the blob doesn't exist: - - .. literalinclude:: snippets.py - :start-after: START get_blob - :end-before: END get_blob - :dedent: 4 + See a [code sample](https://cloud.google.com/storage/docs/samples/storage-get-metadata#storage_get_metadata-python) + on how to retrieve metadata of an object. If :attr:`user_project` is set, bills the API request to that project. @@ -1385,15 +1380,6 @@ def list_blobs( :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of all :class:`~google.cloud.storage.blob.Blob` in this bucket matching the arguments. - - Example: - List blobs in the bucket with user_project. - - >>> from google.cloud import storage - >>> client = storage.Client() - - >>> bucket = storage.Bucket(client, "my-bucket-name", user_project="my-project") - >>> all_blobs = list(client.list_blobs(bucket)) """ client = self._require_client(client) return client.list_blobs( @@ -1458,8 +1444,8 @@ def get_notification( ): """Get Pub / Sub notification for this bucket. - See: - https://cloud.google.com/storage/docs/json_api/v1/notifications/get + See [API reference docs](https://cloud.google.com/storage/docs/json_api/v1/notifications/get) + and a [code sample](https://cloud.google.com/storage/docs/samples/storage-print-pubsub-bucket-notification#storage_print_pubsub_bucket_notification-python). If :attr:`user_project` is set, bills the API request to that project. @@ -1481,15 +1467,6 @@ def get_notification( :rtype: :class:`.BucketNotification` :returns: notification instance. - - Example: - Get notification using notification id. - - >>> from google.cloud import storage - >>> client = storage.Client() - >>> bucket = client.get_bucket('my-bucket-name') # API request. - >>> notification = bucket.get_notification(notification_id='id') # API request. - """ notification = self.notification(notification_id=notification_id) notification.reload(client=client, timeout=timeout, retry=retry) @@ -1612,16 +1589,6 @@ def delete_blob( ): """Deletes a blob from the current bucket. - If the blob isn't found (backend 404), raises a - :class:`google.cloud.exceptions.NotFound`. - - For example: - - .. literalinclude:: snippets.py - :start-after: START delete_blob - :end-before: END delete_blob - :dedent: 4 - If :attr:`user_project` is set, bills the API request to that project. :type blob_name: str @@ -1661,15 +1628,10 @@ def delete_blob( :param retry: (Optional) How to retry the RPC. See: :ref:`configuring_retries` - :raises: :class:`google.cloud.exceptions.NotFound` (to suppress - the exception, call ``delete_blobs``, passing a no-op - ``on_error`` callback, e.g.: - - .. literalinclude:: snippets.py - :start-after: START delete_blobs - :end-before: END delete_blobs - :dedent: 4 - + :raises: :class:`google.cloud.exceptions.NotFound` Raises a NotFound + if the blob isn't found. To suppress + the exception, use :meth:`delete_blobs` by passing a no-op + ``on_error`` callback. """ client = self._require_client(client) blob = Blob(blob_name, bucket=self, generation=generation) @@ -1721,8 +1683,8 @@ def delete_blobs( blob names to delete. :type on_error: callable - :param on_error: (Optional) Takes single argument: ``blob``. Called - called once for each blob raising + :param on_error: (Optional) Takes single argument: ``blob``. + Called once for each blob raising :class:`~google.cloud.exceptions.NotFound`; otherwise, the exception is propagated. @@ -1768,20 +1730,6 @@ def delete_blobs( :raises: :class:`~google.cloud.exceptions.NotFound` (if `on_error` is not passed). - - Example: - Delete blobs using generation match preconditions. - - >>> from google.cloud import storage - - >>> client = storage.Client() - >>> bucket = client.bucket("bucket-name") - - >>> blobs = [bucket.blob("blob-name-1"), bucket.blob("blob-name-2")] - >>> if_generation_match = [None] * len(blobs) - >>> if_generation_match[0] = "123" # precondition for "blob-name-1" - - >>> bucket.delete_blobs(blobs, if_generation_match=if_generation_match) """ _raise_if_len_differs( len(blobs), @@ -1843,6 +1791,9 @@ def copy_blob( If :attr:`user_project` is set, bills the API request to that project. + See [API reference docs](https://cloud.google.com/storage/docs/json_api/v1/objects/copy) + and a [code sample](https://cloud.google.com/storage/docs/samples/storage-copy-file#storage_copy_file-python). + :type blob: :class:`google.cloud.storage.blob.Blob` :param blob: The blob to be copied. @@ -1922,20 +1873,6 @@ def copy_blob( :rtype: :class:`google.cloud.storage.blob.Blob` :returns: The new Blob. - - Example: - Copy a blob including ACL. - - >>> from google.cloud import storage - - >>> client = storage.Client(project="project") - - >>> bucket = client.bucket("bucket") - >>> dst_bucket = client.bucket("destination-bucket") - - >>> blob = bucket.blob("file.ext") - >>> new_blob = bucket.copy_blob(blob, dst_bucket) - >>> new_blob.acl.save(blob.acl) """ client = self._require_client(client) query_params = {} @@ -2300,8 +2237,8 @@ def lifecycle_rules(self): >>> bucket.lifecycle_rules = rules >>> bucket.update() - :setter: Set lifestyle rules for this bucket. - :getter: Gets the lifestyle rules for this bucket. + :setter: Set lifecycle rules for this bucket. + :getter: Gets the lifecycle rules for this bucket. :rtype: generator(dict) :returns: A sequence of mappings describing each lifecycle rule. @@ -2326,7 +2263,7 @@ def lifecycle_rules(self): @lifecycle_rules.setter def lifecycle_rules(self, rules): - """Set lifestyle rules configured for this bucket. + """Set lifecycle rules configured for this bucket. See https://cloud.google.com/storage/docs/lifecycle and https://cloud.google.com/storage/docs/json_api/v1/buckets @@ -2338,7 +2275,7 @@ def lifecycle_rules(self, rules): self._patch_property("lifecycle", {"rule": rules}) def clear_lifecyle_rules(self): - """Set lifestyle rules configured for this bucket. + """Clear lifecycle rules configured for this bucket. See https://cloud.google.com/storage/docs/lifecycle and https://cloud.google.com/storage/docs/json_api/v1/buckets @@ -2346,15 +2283,12 @@ def clear_lifecyle_rules(self): self.lifecycle_rules = [] def add_lifecycle_delete_rule(self, **kw): - """Add a "delete" rule to lifestyle rules configured for this bucket. + """Add a "delete" rule to lifecycle rules configured for this bucket. - See https://cloud.google.com/storage/docs/lifecycle and - https://cloud.google.com/storage/docs/json_api/v1/buckets - - .. literalinclude:: snippets.py - :start-after: START add_lifecycle_delete_rule - :end-before: END add_lifecycle_delete_rule - :dedent: 4 + This defines a [lifecycle configuration](https://cloud.google.com/storage/docs/lifecycle), + which is set on the bucket. For the general format of a lifecycle configuration, see the + [bucket resource representation for JSON](https://cloud.google.com/storage/docs/json_api/v1/buckets). + See also a [code sample](https://cloud.google.com/storage/docs/samples/storage-enable-bucket-lifecycle-management#storage_enable_bucket_lifecycle_management-python). :type kw: dict :params kw: arguments passed to :class:`LifecycleRuleConditions`. @@ -2364,15 +2298,11 @@ def add_lifecycle_delete_rule(self, **kw): self.lifecycle_rules = rules def add_lifecycle_set_storage_class_rule(self, storage_class, **kw): - """Add a "set storage class" rule to lifestyle rules. - - See https://cloud.google.com/storage/docs/lifecycle and - https://cloud.google.com/storage/docs/json_api/v1/buckets + """Add a "set storage class" rule to lifecycle rules. - .. literalinclude:: snippets.py - :start-after: START add_lifecycle_set_storage_class_rule - :end-before: END add_lifecycle_set_storage_class_rule - :dedent: 4 + This defines a [lifecycle configuration](https://cloud.google.com/storage/docs/lifecycle), + which is set on the bucket. For the general format of a lifecycle configuration, see the + [bucket resource representation for JSON](https://cloud.google.com/storage/docs/json_api/v1/buckets). :type storage_class: str, one of :attr:`STORAGE_CLASSES`. :param storage_class: new storage class to assign to matching items. @@ -2385,13 +2315,15 @@ def add_lifecycle_set_storage_class_rule(self, storage_class, **kw): self.lifecycle_rules = rules def add_lifecycle_abort_incomplete_multipart_upload_rule(self, **kw): - """Add a "abort incomplete multipart upload" rule to lifestyle rules. + """Add a "abort incomplete multipart upload" rule to lifecycle rules. - Note that the "age" lifecycle condition is the only supported condition - for this rule. + .. note:: + The "age" lifecycle condition is the only supported condition + for this rule. - See https://cloud.google.com/storage/docs/lifecycle and - https://cloud.google.com/storage/docs/json_api/v1/buckets + This defines a [lifecycle configuration](https://cloud.google.com/storage/docs/lifecycle), + which is set on the bucket. For the general format of a lifecycle configuration, see the + [bucket resource representation for JSON](https://cloud.google.com/storage/docs/json_api/v1/buckets). :type kw: dict :params kw: arguments passed to :class:`LifecycleRuleConditions`. @@ -2731,31 +2663,14 @@ def requester_pays(self, value): def configure_website(self, main_page_suffix=None, not_found_page=None): """Configure website-related properties. - See https://cloud.google.com/storage/docs/hosting-static-website + See https://cloud.google.com/storage/docs/static-website .. note:: - This (apparently) only works - if your bucket name is a domain name - (and to do that, you need to get approved somehow...). - - If you want this bucket to host a website, just provide the name - of an index page and a page to use when a blob isn't found: - - .. literalinclude:: snippets.py - :start-after: START configure_website - :end-before: END configure_website - :dedent: 4 - - You probably should also make the whole bucket public: - - .. literalinclude:: snippets.py - :start-after: START make_public - :end-before: END make_public - :dedent: 4 - - This says: "Make the bucket public, and all the stuff already in - the bucket, and anything else I add to the bucket. Just make it - all public." + This configures the bucket's website-related properties,controlling how + the service behaves when accessing bucket contents as a web site. + See [tutorials](https://cloud.google.com/storage/docs/hosting-static-website) and + [code samples](https://cloud.google.com/storage/docs/samples/storage-define-bucket-website-configuration#storage_define_bucket_website_configuration-python) + for more information. :type main_page_suffix: str :param main_page_suffix: The page to use as the main page @@ -2785,8 +2700,8 @@ def get_iam_policy( ): """Retrieve the IAM policy for the bucket. - See - https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy + See [API reference docs](https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy) + and a [code sample](https://cloud.google.com/storage/docs/samples/storage-view-bucket-iam-members#storage_view_bucket_iam_members-python). If :attr:`user_project` is set, bills the API request to that project. @@ -2819,30 +2734,6 @@ def get_iam_policy( :rtype: :class:`google.api_core.iam.Policy` :returns: the policy instance, based on the resource returned from the ``getIamPolicy`` API request. - - Example: - - .. code-block:: python - - from google.cloud.storage.iam import STORAGE_OBJECT_VIEWER_ROLE - - policy = bucket.get_iam_policy(requested_policy_version=3) - - policy.version = 3 - - # Add a binding to the policy via it's bindings property - policy.bindings.append({ - "role": STORAGE_OBJECT_VIEWER_ROLE, - "members": {"serviceAccount:account@project.iam.gserviceaccount.com", ...}, - # Optional: - "condition": { - "title": "prefix" - "description": "Objects matching prefix" - "expression": "resource.name.startsWith(\"projects/project-name/buckets/bucket-name/objects/prefix\")" - } - }) - - bucket.set_iam_policy(policy) """ client = self._require_client(client) query_params = {} @@ -3159,19 +3050,10 @@ def generate_upload_policy(self, conditions, expiration=None, client=None): """Create a signed upload policy for uploading objects. This method generates and signs a policy document. You can use - `policy documents`_ to allow visitors to a website to upload files to + [`policy documents`](https://cloud.google.com/storage/docs/xml-api/post-object-forms) + to allow visitors to a website to upload files to Google Cloud Storage without giving them direct write access. - - For example: - - .. literalinclude:: snippets.py - :start-after: START policy_document - :end-before: END policy_document - :dedent: 4 - - .. _policy documents: - https://cloud.google.com/storage/docs/xml-api\ - /post-object#policydocument + See a [code sample](https://cloud.google.com/storage/docs/xml-api/post-object-forms#python). :type expiration: datetime :param expiration: (Optional) Expiration in UTC. If not specified, the @@ -3179,7 +3061,7 @@ def generate_upload_policy(self, conditions, expiration=None, client=None): :type conditions: list :param conditions: A list of conditions as described in the - `policy documents`_ documentation. + `policy documents` documentation. :type client: :class:`~google.cloud.storage.client.Client` :param client: (Optional) The client to use. If not passed, falls back @@ -3290,13 +3172,9 @@ def generate_signed_url( .. note:: If you are on Google Compute Engine, you can't generate a signed - URL using GCE service account. Follow `Issue 50`_ for updates on - this. If you'd like to be able to generate a signed URL from GCE, - you can use a standard service account from a JSON file rather - than a GCE service account. - - .. _Issue 50: https://github.com/GoogleCloudPlatform/\ - google-auth-library-python/issues/50 + URL using GCE service account. If you'd like to be able to generate + a signed URL from GCE, you can use a standard service account from a + JSON file rather than a GCE service account. If you have a bucket that you want to allow access to for a set amount of time, you can use this method to generate a URL that @@ -3305,21 +3183,6 @@ def generate_signed_url( If ``bucket_bound_hostname`` is set as an argument of :attr:`api_access_endpoint`, ``https`` works only if using a ``CDN``. - Example: - Generates a signed URL for this bucket using bucket_bound_hostname and scheme. - - >>> from google.cloud import storage - >>> client = storage.Client() - >>> bucket = client.get_bucket('my-bucket-name') - >>> url = bucket.generate_signed_url(expiration='url-expiration-time', bucket_bound_hostname='mydomain.tld', - >>> version='v4') - >>> url = bucket.generate_signed_url(expiration='url-expiration-time', bucket_bound_hostname='mydomain.tld', - >>> version='v4',scheme='https') # If using ``CDN`` - - This is particularly useful if you don't want publicly - accessible buckets, but don't want to require users to explicitly - log in. - :type expiration: Union[Integer, datetime.datetime, datetime.timedelta] :param expiration: Point in time when the signed URL should expire. If a ``datetime`` instance is passed without an explicit @@ -3393,6 +3256,9 @@ def generate_signed_url( elif version not in ("v2", "v4"): raise ValueError("'version' must be either 'v2' or 'v4'") + # If you are on Google Compute Engine, you can't generate a signed URL + # using GCE service account. + # See https://github.com/googleapis/google-auth-library-python/issues/50 if virtual_hosted_style: api_access_endpoint = f"https://{self.name}.storage.googleapis.com" elif bucket_bound_hostname: diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py index 9cccf413b..56bfa67cf 100644 --- a/google/cloud/storage/client.py +++ b/google/cloud/storage/client.py @@ -710,10 +710,9 @@ def get_bucket( if_metageneration_not_match=None, retry=DEFAULT_RETRY, ): - """API call: retrieve a bucket via a GET request. + """Retrieve a bucket via a GET request. - See - https://cloud.google.com/storage/docs/json_api/v1/buckets/get + See [API reference docs](https://cloud.google.com/storage/docs/json_api/v1/buckets/get) and a [code sample](https://cloud.google.com/storage/docs/samples/storage-get-bucket-metadata#storage_get_bucket_metadata-python). Args: bucket_or_name (Union[ \ @@ -757,27 +756,6 @@ def get_bucket( Raises: google.cloud.exceptions.NotFound If the bucket is not found. - - Examples: - Retrieve a bucket using a string. - - .. literalinclude:: snippets.py - :start-after: START get_bucket - :end-before: END get_bucket - :dedent: 4 - - Get a bucket using a resource. - - >>> from google.cloud import storage - >>> client = storage.Client() - - >>> # Set properties on a plain resource object. - >>> bucket = client.get_bucket("my-bucket-name") - - >>> # Time passes. Another program may have modified the bucket - ... # in the meantime, so you want to get the latest state. - >>> bucket = client.get_bucket(bucket) # API request. - """ bucket = self._bucket_arg_to_bucket(bucket_or_name) bucket.reload( @@ -800,12 +778,7 @@ def lookup_bucket( """Get a bucket by name, returning None if not found. You can use this if you would rather check for a None value - than catching an exception: - - .. literalinclude:: snippets.py - :start-after: START lookup_bucket - :end-before: END lookup_bucket - :dedent: 4 + than catching a NotFound exception. :type bucket_name: str :param bucket_name: The name of the bucket to get. @@ -827,7 +800,7 @@ def lookup_bucket( :param retry: (Optional) How to retry the RPC. See: :ref:`configuring_retries` - :rtype: :class:`google.cloud.storage.bucket.Bucket` + :rtype: :class:`google.cloud.storage.bucket.Bucket` or ``NoneType`` :returns: The bucket matching the name provided or None if not found. """ try: @@ -854,10 +827,9 @@ def create_bucket( timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, ): - """API call: create a new bucket via a POST request. + """Create a new bucket via a POST request. - See - https://cloud.google.com/storage/docs/json_api/v1/buckets/insert + See [API reference docs](https://cloud.google.com/storage/docs/json_api/v1/buckets/insert) and a [code sample](https://cloud.google.com/storage/docs/samples/storage-create-bucket#storage_create_bucket-python). Args: bucket_or_name (Union[ \ @@ -878,7 +850,7 @@ def create_bucket( location (str): (Optional) The location of the bucket. If not passed, the default location, US, will be used. If specifying a dual-region, - `data_locations` should be set in conjunction.. See: + `data_locations` should be set in conjunction. See: https://cloud.google.com/storage/docs/locations data_locations (list of str): (Optional) The list of regional locations of a custom dual-region bucket. @@ -917,28 +889,6 @@ def create_bucket( Raises: google.cloud.exceptions.Conflict If the bucket already exists. - - Examples: - Create a bucket using a string. - - .. literalinclude:: snippets.py - :start-after: START create_bucket - :end-before: END create_bucket - :dedent: 4 - - Create a bucket using a resource. - - >>> from google.cloud import storage - >>> client = storage.Client() - - >>> # Set properties on a plain resource object. - >>> bucket = storage.Bucket("my-bucket-name") - >>> bucket.location = "europe-west6" - >>> bucket.storage_class = "COLDLINE" - - >>> # Pass that resource object to the client. - >>> bucket = client.create_bucket(bucket) # API request. - """ bucket = self._bucket_arg_to_bucket(bucket_or_name) query_params = {} @@ -1019,6 +969,8 @@ def download_blob_to_file( ): """Download the contents of a blob object or blob URI into a file-like object. + See https://cloud.google.com/storage/docs/downloading-objects + Args: blob_or_uri (Union[ \ :class:`~google.cloud.storage.blob.Blob`, \ @@ -1090,30 +1042,6 @@ def download_blob_to_file( predicates in a Retry object. The default will always be used. Other configuration changes for Retry objects such as delays and deadlines are respected. - - Examples: - Download a blob using a blob resource. - - >>> from google.cloud import storage - >>> client = storage.Client() - - >>> bucket = client.get_bucket('my-bucket-name') - >>> blob = storage.Blob('path/to/blob', bucket) - - >>> with open('file-to-download-to', 'w') as file_obj: - >>> client.download_blob_to_file(blob, file_obj) # API request. - - - Download a blob using a URI. - - >>> from google.cloud import storage - >>> client = storage.Client() - - >>> with open('file-to-download-to', 'wb') as file_obj: - >>> client.download_blob_to_file( - >>> 'gs://bucket_name/path/to/blob', file_obj) - - """ # Handle ConditionalRetryPolicy. @@ -1184,6 +1112,11 @@ def list_blobs( If :attr:`user_project` is set, bills the API request to that project. + .. note:: + List prefixes (directories) in a bucket using a prefix and delimiter. + See a [code sample](https://cloud.google.com/storage/docs/samples/storage-list-files-with-prefix#storage_list_files_with_prefix-python) + listing objects using a prefix filter. + Args: bucket_or_name (Union[ \ :class:`~google.cloud.storage.bucket.Bucket`, \ @@ -1274,12 +1207,6 @@ def list_blobs( As part of the response, you'll also get back an iterator.prefixes entity that lists object names up to and including the requested delimiter. Duplicate entries are omitted from this list. - - .. note:: - List prefixes (directories) in a bucket using a prefix and delimiter. - See a [sample](https://cloud.google.com/storage/docs/samples/storage-list-files-with-prefix#storage_list_files_with_prefix-python) - listing objects using a prefix filter. - """ bucket = self._bucket_arg_to_bucket(bucket_or_name) @@ -1342,12 +1269,7 @@ def list_buckets( This will not populate the list of blobs available in each bucket. - .. literalinclude:: snippets.py - :start-after: START list_buckets - :end-before: END list_buckets - :dedent: 4 - - This implements "storage.buckets.list". + See [API reference docs](https://cloud.google.com/storage/docs/json_api/v1/buckets/list) and a [code sample](https://cloud.google.com/storage/docs/samples/storage-list-buckets#storage_list_buckets-python). :type max_results: int :param max_results: (Optional) The maximum number of buckets to return. @@ -1606,7 +1528,7 @@ def generate_signed_post_policy_v4( service_account_email=None, access_token=None, ): - """Generate a V4 signed policy object. + """Generate a V4 signed policy object. Generated policy object allows user to upload objects with a POST request. .. note:: @@ -1615,7 +1537,7 @@ def generate_signed_post_policy_v4( ``credentials`` has a ``service_account_email`` property which identifies the credentials. - Generated policy object allows user to upload objects with a POST request. + See a [code sample](https://github.com/googleapis/python-storage/blob/main/samples/snippets/storage_generate_signed_post_policy_v4.py). :type bucket_name: str :param bucket_name: Bucket name. @@ -1663,28 +1585,6 @@ def generate_signed_post_policy_v4( :rtype: dict :returns: Signed POST policy. - - Example: - Generate signed POST policy and upload a file. - - >>> import datetime - >>> from google.cloud import storage - >>> client = storage.Client() - >>> tz = datetime.timezone(datetime.timedelta(hours=1), 'CET') - >>> policy = client.generate_signed_post_policy_v4( - "bucket-name", - "blob-name", - expiration=datetime.datetime(2020, 3, 17, tzinfo=tz), - conditions=[ - ["content-length-range", 0, 255] - ], - fields=[ - "x-goog-meta-hello" => "world" - ], - ) - >>> with open("bucket-name", "rb") as f: - files = {"file": ("bucket-name", f)} - requests.post(policy["url"], data=policy["fields"], files=files) """ credentials = self._credentials if credentials is None else credentials ensure_signed_credentials(credentials) diff --git a/google/cloud/storage/fileio.py b/google/cloud/storage/fileio.py index dfdb90c7c..d3ae135bb 100644 --- a/google/cloud/storage/fileio.py +++ b/google/cloud/storage/fileio.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +"""Support for file-like I/O.""" + import io import warnings diff --git a/google/cloud/storage/hmac_key.py b/google/cloud/storage/hmac_key.py index 7f6de7eee..41f513ec6 100644 --- a/google/cloud/storage/hmac_key.py +++ b/google/cloud/storage/hmac_key.py @@ -12,6 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +"""Configure HMAC keys that can be used to authenticate requests to Google Cloud Storage. + +See [HMAC keys documentation](https://cloud.google.com/storage/docs/authentication/hmackeys) +""" + from google.cloud.exceptions import NotFound from google.cloud._helpers import _rfc3339_nanos_to_datetime diff --git a/google/cloud/storage/notification.py b/google/cloud/storage/notification.py index d9fa79ac6..4eb807fa9 100644 --- a/google/cloud/storage/notification.py +++ b/google/cloud/storage/notification.py @@ -12,7 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Support for bucket notification resources.""" +"""Configure bucket notification resources to interact with Google Cloud Pub/Sub. + +See [Cloud Pub/Sub Notifications for Google Cloud Storage](https://cloud.google.com/storage/docs/pubsub-notifications) +""" import re @@ -414,22 +417,17 @@ def delete(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): def _parse_topic_path(topic_path): """Verify that a topic path is in the correct format. - .. _resource manager docs: https://cloud.google.com/resource-manager/\ - reference/rest/v1beta1/projects#\ - Project.FIELDS.project_id - .. _topic spec: https://cloud.google.com/storage/docs/json_api/v1/\ - notifications/insert#topic - Expected to be of the form: //pubsub.googleapis.com/projects/{project}/topics/{topic} where the ``project`` value must be "6 to 30 lowercase letters, digits, or hyphens. It must start with a letter. Trailing hyphens are prohibited." - (see `resource manager docs`_) and ``topic`` must have length at least two, + (see [`resource manager docs`](https://cloud.google.com/resource-manager/reference/rest/v1beta1/projects#Project.FIELDS.project_id)) + and ``topic`` must have length at least two, must start with a letter and may only contain alphanumeric characters or ``-``, ``_``, ``.``, ``~``, ``+`` or ``%`` (i.e characters used for URL - encoding, see `topic spec`_). + encoding, see [`topic spec`](https://cloud.google.com/storage/docs/json_api/v1/notifications/insert#topic)). Args: topic_path (str): The topic path to be verified. diff --git a/google/cloud/storage/retry.py b/google/cloud/storage/retry.py index a9fb3bb3f..3ea3ae4a0 100644 --- a/google/cloud/storage/retry.py +++ b/google/cloud/storage/retry.py @@ -12,6 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. +"""Helpers for configuring retries with exponential back-off. + +See [Retry Strategy for Google Cloud Storage](https://cloud.google.com/storage/docs/retry-strategy#client-libraries) +""" + import requests import requests.exceptions as requests_exceptions