diff --git a/modules/google/testcontainers/google/__init__.py b/modules/google/testcontainers/google/__init__.py index 92c782efc..45c704411 100644 --- a/modules/google/testcontainers/google/__init__.py +++ b/modules/google/testcontainers/google/__init__.py @@ -1,2 +1,4 @@ from .datastore import DatastoreContainer # noqa: F401 from .pubsub import PubSubContainer # noqa: F401 +from .bigquery import BigQueryContainer # noqa: F401 +from .gcs import GoogleCloudStorageContainer # noqa: F401 \ No newline at end of file diff --git a/modules/google/testcontainers/google/bigquery.py b/modules/google/testcontainers/google/bigquery.py new file mode 100644 index 000000000..2a333985d --- /dev/null +++ b/modules/google/testcontainers/google/bigquery.py @@ -0,0 +1,61 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. + +from google.cloud import bigquery + +from testcontainers.core.container import DockerContainer + + +class BigQueryContainer(DockerContainer): + """ + BigQUery container for testing bigquery warehouse. + + Example: + + The example will spin up a BigQuery emulator that you can use for integration + tests. The :code:`bigquery` instance provides convenience method :code:`get_bigquery_client` + to interact with the fake BigQuery Server. + + .. doctest:: + + >>> from testcontainers.google import BigQueryContainer + + >>> config = BigQueryContainer() + >>> with config as bigquery: + ... bq = bigquery.get_bigquery_client() + ... datasets = bq.list_datasets("test-project") + """ + + def __init__( + self, image: str = "ghcr.io/goccy/bigquery-emulator:latest", project: str = "test-project", + dataset: str = "test-containers", port: int = 9050, **kwargs + ) -> None: + super().__init__(image=image, platform="linux/x86_64", **kwargs) + self.project = project + self.dataset = dataset + self.port = port + self.with_exposed_ports(self.port) + self.with_command(f"--project {self.project} --dataset {self.dataset} --port {self.port}") + + def get_bigquery_emulator_host(self) -> str: + return f"http://{self.get_container_host_ip()}:{self.get_exposed_port(self.port)}" + + def _get_client(self, cls: type, **kwargs) -> dict: + return cls(**kwargs) + + def get_bigquery_client(self, **kwargs) -> bigquery.Client: + from google.auth import credentials + + kwargs["client_options"] = {"api_endpoint": self.get_bigquery_emulator_host()} + kwargs["credentials"] = credentials.AnonymousCredentials() + return self._get_client(bigquery.Client, **kwargs) diff --git a/modules/google/testcontainers/google/gcs.py b/modules/google/testcontainers/google/gcs.py new file mode 100644 index 000000000..f27b074cb --- /dev/null +++ b/modules/google/testcontainers/google/gcs.py @@ -0,0 +1,76 @@ +# +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +import os +from typing import Optional +from unittest.mock import patch + +from google.cloud import storage +from testcontainers.core.container import DockerContainer + + +class GoogleCloudStorageContainer(DockerContainer): + """ + GoogleCloudStorage container for testing managed object storage buckets. + + Example: + + The example will spin up a Google Cloud Storage Fake Server that you can use for integration + tests. The :code:`storage` instance provides convenience methods :code:`get_storage_client` to interact with + the fake GCS server without having to set the environment variable :code:`STORAGE_EMULATOR_HOST`. + + .. doctest:: + + >>> from testcontainers.google import GoogleCloudStorageContainer + + >>> config = GoogleCloudStorageContainer() + >>> with config as gcs: + ... client = gcs.get_storage_client() + ... bucket = client.create_bucket("test-bucket") + ... bucket = client.create_bucket("test-bucket") + """ + + def __init__( + self, image: str = "fsouza/fake-gcs-server", + location: str = "US-CENTRAL1", + scheme: str = "http", + port_http: int = 8000, + data: Optional[str] = None, + **kwargs + ) -> None: + super().__init__(image=image, **kwargs) + self.location = location + self.scheme = scheme + self.port_http = port_http + self.data = data + self.with_exposed_ports(self.port_http) + command = f"-location {location} -scheme={scheme} -port={port_http}" + if self.data: + self.with_volume_mapping(self.data, "/data") + command += " -data /data" + + + + self.with_command(command) + + def get_gcs_emulator_host(self) -> str: + return f"{self.scheme}://{self.get_container_host_ip()}:{self.get_exposed_port(self.port_http)}" + + def _get_client(self, cls: type, **kwargs) -> dict: + with patch.dict(os.environ, STORAGE_EMULATOR_HOST=self.get_gcs_emulator_host()): + return cls(**kwargs) + + def get_storage_client(self, **kwargs) -> storage.Client: + from google.auth import credentials + + kwargs["credentials"] = credentials.AnonymousCredentials() + return self._get_client(storage.Client, **kwargs) diff --git a/modules/google/tests/test_google.py b/modules/google/tests/test_google.py index c91793741..667d3cf0f 100644 --- a/modules/google/tests/test_google.py +++ b/modules/google/tests/test_google.py @@ -1,8 +1,8 @@ from queue import Queue -from google.cloud.datastore import Entity +from google.cloud.datastore import Entity from testcontainers.core.waiting_utils import wait_for_logs -from testcontainers.google import PubSubContainer, DatastoreContainer +from testcontainers.google import PubSubContainer, DatastoreContainer, GoogleCloudStorageContainer, BigQueryContainer def test_pubsub_container(): @@ -74,3 +74,31 @@ def test_datastore_container_isolation(): client2 = datastore2.get_datastore_client() fetched_entity2 = client2.get(key) assert fetched_entity2 is None, "Entity was found in the datastore." + + +def test_gcs_container(): + from google.cloud import storage + + gcs: GoogleCloudStorageContainer + with GoogleCloudStorageContainer() as gcs: + wait_for_logs(gcs, 'level=INFO msg="server started at', timeout=60) + # Create a new topic + client: storage.Client = gcs.get_storage_client() + client.create_bucket("test-bucket") + + buckets = list(client.list_buckets()) + + assert any(bucket.name == "test-bucket" for bucket in buckets) + + +def test_bigquery_container(): + from google.cloud import bigquery + + bigquery: BigQueryContainer + with BigQueryContainer() as bigquery: + wait_for_logs(bigquery, r"[bigquery-emulator] REST server listening at", timeout=60) + # Create a new topic + client: bigquery.Client = bigquery.get_bigquery_client() + datasets = client.list_datasets(project="test-project") + + assert any(dataset.dataset_id == "test-containers" for dataset in datasets) diff --git a/poetry.lock b/poetry.lock index dc6f2843c..1bc2168cc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1715,6 +1715,44 @@ type = "legacy" url = "https://pypi.org/simple" reference = "PyPI-public" +[[package]] +name = "google-cloud-bigquery" +version = "3.30.0" +description = "Google BigQuery API client library" +optional = true +python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"google\"" +files = [ + {file = "google_cloud_bigquery-3.30.0-py2.py3-none-any.whl", hash = "sha256:f4d28d846a727f20569c9b2d2f4fa703242daadcb2ec4240905aa485ba461877"}, + {file = "google_cloud_bigquery-3.30.0.tar.gz", hash = "sha256:7e27fbafc8ed33cc200fe05af12ecd74d279fe3da6692585a3cef7aee90575b6"}, +] + +[package.dependencies] +google-api-core = {version = ">=2.11.1,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<3.0.0dev" +google-cloud-core = ">=2.4.1,<3.0.0dev" +google-resumable-media = ">=2.0.0,<3.0dev" +packaging = ">=20.0.0" +python-dateutil = ">=2.7.3,<3.0dev" +requests = ">=2.21.0,<3.0.0dev" + +[package.extras] +all = ["google-cloud-bigquery[bigquery-v2,bqstorage,geopandas,ipython,ipywidgets,opentelemetry,pandas,tqdm]"] +bigquery-v2 = ["proto-plus (>=1.22.3,<2.0.0dev)", "protobuf (>=3.20.2,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0dev)"] +bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "pyarrow (>=3.0.0)"] +geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<2.0dev)"] +ipython = ["bigquery-magics (>=0.1.0)"] +ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"] +opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"] +pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev) ; python_version >= \"3.11\"", "importlib-metadata (>=1.0.0) ; python_version < \"3.8\"", "pandas (>=1.1.0)", "pandas-gbq (>=0.26.1) ; python_version >= \"3.8\"", "pyarrow (>=3.0.0)"] +tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] + +[package.source] +type = "legacy" +url = "https://pypi.org/simple" +reference = "PyPI-public" + [[package]] name = "google-cloud-core" version = "2.4.1" @@ -1808,6 +1846,114 @@ type = "legacy" url = "https://pypi.org/simple" reference = "PyPI-public" +[[package]] +name = "google-cloud-storage" +version = "3.0.0" +description = "Google Cloud Storage API client library" +optional = true +python-versions = ">=3.7" +groups = ["main"] +markers = "extra == \"google\"" +files = [ + {file = "google_cloud_storage-3.0.0-py2.py3-none-any.whl", hash = "sha256:f85fd059650d2dbb0ac158a9a6b304b66143b35ed2419afec2905ca522eb2c6a"}, + {file = "google_cloud_storage-3.0.0.tar.gz", hash = "sha256:2accb3e828e584888beff1165e5f3ac61aa9088965eb0165794a82d8c7f95297"}, +] + +[package.dependencies] +google-api-core = ">=2.15.0,<3.0.0dev" +google-auth = ">=2.26.1,<3.0dev" +google-cloud-core = ">=2.3.0,<3.0dev" +google-crc32c = ">=1.0,<2.0dev" +google-resumable-media = ">=2.7.2" +requests = ">=2.18.0,<3.0.0dev" + +[package.extras] +protobuf = ["protobuf (<6.0.0dev)"] +tracing = ["opentelemetry-api (>=1.1.0)"] + +[package.source] +type = "legacy" +url = "https://pypi.org/simple" +reference = "PyPI-public" + +[[package]] +name = "google-crc32c" +version = "1.7.1" +description = "A python wrapper of the C library 'Google CRC32C'" +optional = true +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"google\"" +files = [ + {file = "google_crc32c-1.7.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:b07d48faf8292b4db7c3d64ab86f950c2e94e93a11fd47271c28ba458e4a0d76"}, + {file = "google_crc32c-1.7.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:7cc81b3a2fbd932a4313eb53cc7d9dde424088ca3a0337160f35d91826880c1d"}, + {file = "google_crc32c-1.7.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1c67ca0a1f5b56162951a9dae987988679a7db682d6f97ce0f6381ebf0fbea4c"}, + {file = "google_crc32c-1.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc5319db92daa516b653600794d5b9f9439a9a121f3e162f94b0e1891c7933cb"}, + {file = "google_crc32c-1.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcdf5a64adb747610140572ed18d011896e3b9ae5195f2514b7ff678c80f1603"}, + {file = "google_crc32c-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:754561c6c66e89d55754106739e22fdaa93fafa8da7221b29c8b8e8270c6ec8a"}, + {file = "google_crc32c-1.7.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6fbab4b935989e2c3610371963ba1b86afb09537fd0c633049be82afe153ac06"}, + {file = "google_crc32c-1.7.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:ed66cbe1ed9cbaaad9392b5259b3eba4a9e565420d734e6238813c428c3336c9"}, + {file = "google_crc32c-1.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee6547b657621b6cbed3562ea7826c3e11cab01cd33b74e1f677690652883e77"}, + {file = "google_crc32c-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d68e17bad8f7dd9a49181a1f5a8f4b251c6dbc8cc96fb79f1d321dfd57d66f53"}, + {file = "google_crc32c-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:6335de12921f06e1f774d0dd1fbea6bf610abe0887a1638f64d694013138be5d"}, + {file = "google_crc32c-1.7.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2d73a68a653c57281401871dd4aeebbb6af3191dcac751a76ce430df4d403194"}, + {file = "google_crc32c-1.7.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:22beacf83baaf59f9d3ab2bbb4db0fb018da8e5aebdce07ef9f09fce8220285e"}, + {file = "google_crc32c-1.7.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19eafa0e4af11b0a4eb3974483d55d2d77ad1911e6cf6f832e1574f6781fd337"}, + {file = "google_crc32c-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6d86616faaea68101195c6bdc40c494e4d76f41e07a37ffdef270879c15fb65"}, + {file = "google_crc32c-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:b7491bdc0c7564fcf48c0179d2048ab2f7c7ba36b84ccd3a3e1c3f7a72d3bba6"}, + {file = "google_crc32c-1.7.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:df8b38bdaf1629d62d51be8bdd04888f37c451564c2042d36e5812da9eff3c35"}, + {file = "google_crc32c-1.7.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:e42e20a83a29aa2709a0cf271c7f8aefaa23b7ab52e53b322585297bb94d4638"}, + {file = "google_crc32c-1.7.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:905a385140bf492ac300026717af339790921f411c0dfd9aa5a9e69a08ed32eb"}, + {file = "google_crc32c-1.7.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b211ddaf20f7ebeec5c333448582c224a7c90a9d98826fbab82c0ddc11348e6"}, + {file = "google_crc32c-1.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:0f99eaa09a9a7e642a61e06742856eec8b19fc0037832e03f941fe7cf0c8e4db"}, + {file = "google_crc32c-1.7.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32d1da0d74ec5634a05f53ef7df18fc646666a25efaaca9fc7dcfd4caf1d98c3"}, + {file = "google_crc32c-1.7.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e10554d4abc5238823112c2ad7e4560f96c7bf3820b202660373d769d9e6e4c9"}, + {file = "google_crc32c-1.7.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:9fc196f0b8d8bd2789352c6a522db03f89e83a0ed6b64315923c396d7a932315"}, + {file = "google_crc32c-1.7.1-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:bb5e35dcd8552f76eed9461a23de1030920a3c953c1982f324be8f97946e7127"}, + {file = "google_crc32c-1.7.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f2226b6a8da04f1d9e61d3e357f2460b9551c5e6950071437e122c958a18ae14"}, + {file = "google_crc32c-1.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f2b3522222746fff0e04a9bd0a23ea003ba3cccc8cf21385c564deb1f223242"}, + {file = "google_crc32c-1.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3bda0fcb632d390e3ea8b6b07bf6b4f4a66c9d02dcd6fbf7ba00a197c143f582"}, + {file = "google_crc32c-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:713121af19f1a617054c41f952294764e0c5443d5a5d9034b2cd60f5dd7e0349"}, + {file = "google_crc32c-1.7.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8e9afc74168b0b2232fb32dd202c93e46b7d5e4bf03e66ba5dc273bb3559589"}, + {file = "google_crc32c-1.7.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa8136cc14dd27f34a3221c0f16fd42d8a40e4778273e61a3c19aedaa44daf6b"}, + {file = "google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85fef7fae11494e747c9fd1359a527e5970fc9603c90764843caabd3a16a0a48"}, + {file = "google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6efb97eb4369d52593ad6f75e7e10d053cf00c48983f7a973105bc70b0ac4d82"}, + {file = "google_crc32c-1.7.1.tar.gz", hash = "sha256:2bff2305f98846f3e825dbeec9ee406f89da7962accdb29356e4eadc251bd472"}, +] + +[package.extras] +testing = ["pytest"] + +[package.source] +type = "legacy" +url = "https://pypi.org/simple" +reference = "PyPI-public" + +[[package]] +name = "google-resumable-media" +version = "2.8.0" +description = "Utilities for Google Media Downloads and Resumable Uploads" +optional = true +python-versions = ">= 3.7" +groups = ["main"] +markers = "extra == \"google\"" +files = [ + {file = "google_resumable_media-2.8.0-py3-none-any.whl", hash = "sha256:dd14a116af303845a8d932ddae161a26e86cc229645bc98b39f026f9b1717582"}, + {file = "google_resumable_media-2.8.0.tar.gz", hash = "sha256:f1157ed8b46994d60a1bc432544db62352043113684d4e030ee02e77ebe9a1ae"}, +] + +[package.dependencies] +google-crc32c = ">=1.0.0,<2.0.0" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0)", "google-auth (>=1.22.0,<2.0.0)"] +requests = ["requests (>=2.18.0,<3.0.0)"] + +[package.source] +type = "legacy" +url = "https://pypi.org/simple" +reference = "PyPI-public" + [[package]] name = "googleapis-common-protos" version = "1.70.0" @@ -4056,7 +4202,7 @@ files = [ {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] -markers = {main = "(extra == \"arangodb\" or extra == \"keycloak\" or extra == \"weaviate\" or python_version >= \"3.10\") and (extra == \"arangodb\" or extra == \"keycloak\" or extra == \"weaviate\" or extra == \"openfga\") and (python_version <= \"3.10\" or extra == \"openfga\" or extra == \"arangodb\" or extra == \"weaviate\" or extra == \"keycloak\") and (python_version < \"3.13\" or extra == \"openfga\" or extra == \"arangodb\" or extra == \"weaviate\")"} +markers = {main = "(extra == \"arangodb\" or extra == \"google\" or extra == \"keycloak\" or extra == \"weaviate\" or python_version >= \"3.10\") and (extra == \"arangodb\" or extra == \"google\" or extra == \"keycloak\" or extra == \"weaviate\" or extra == \"openfga\") and (python_version <= \"3.10\" or extra == \"openfga\" or extra == \"arangodb\" or extra == \"google\" or extra == \"weaviate\" or extra == \"keycloak\") and (python_version < \"3.13\" or extra == \"openfga\" or extra == \"arangodb\" or extra == \"google\" or extra == \"weaviate\")"} [package.source] type = "legacy" @@ -7472,7 +7618,7 @@ cosmosdb = ["azure-cosmos"] db2 = ["ibm_db_sa", "sqlalchemy"] elasticsearch = [] generic = ["httpx", "redis"] -google = ["google-cloud-datastore", "google-cloud-pubsub"] +google = ["google-cloud-bigquery", "google-cloud-datastore", "google-cloud-pubsub", "google-cloud-storage"] influxdb = ["influxdb", "influxdb-client"] k3s = ["kubernetes", "pyyaml"] kafka = [] @@ -7510,4 +7656,4 @@ weaviate = ["weaviate-client"] [metadata] lock-version = "2.1" python-versions = ">=3.9.2" -content-hash = "9a3a047c18407dec1b8e4add0c59b44b9613f208803e4ca83abfb3c60c1c757f" +content-hash = "df6cbd2490bba62a68d9e6d4dde9529afd052cf9968f9410ca5aed2970f075ee" diff --git a/pyproject.toml b/pyproject.toml index 1a0231c51..ffb3b9d6f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -96,6 +96,8 @@ cassandra-driver = { version = "3.29.1", optional = true } #clickhouse-driver = { version = "*", optional = true } google-cloud-pubsub = { version = ">=2", optional = true } google-cloud-datastore = { version = ">=2", optional = true } +google-cloud-storage = { version = ">=2", optional = true } +google-cloud-bigquery = { version = ">=2", optional = true } influxdb = { version = "*", optional = true } influxdb-client = { version = "*", optional = true } kubernetes = { version = "*", optional = true } @@ -160,7 +162,7 @@ generic = [ "redis", ] # The advance doctests for ServerContainer require redis test_module_import = ["httpx"] -google = ["google-cloud-pubsub", "google-cloud-datastore"] +google = ["google-cloud-pubsub", "google-cloud-datastore", "google-cloud-storage", "google-cloud-bigquery"] influxdb = ["influxdb", "influxdb-client"] k3s = ["kubernetes", "pyyaml"] kafka = []