Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions modules/google/testcontainers/google/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
from .datastore import DatastoreContainer # noqa: F401
from .pubsub import PubSubContainer # noqa: F401
from .bigquery import BigQueryContainer # noqa: F401
from .gcs import GoogleCloudStorageContainer # noqa: F401
61 changes: 61 additions & 0 deletions modules/google/testcontainers/google/bigquery.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.

from google.cloud import bigquery

from testcontainers.core.container import DockerContainer


class BigQueryContainer(DockerContainer):
"""
BigQUery container for testing bigquery warehouse.

Example:

The example will spin up a BigQuery emulator that you can use for integration
tests. The :code:`bigquery` instance provides convenience method :code:`get_bigquery_client`
to interact with the fake BigQuery Server.

.. doctest::

>>> from testcontainers.google import BigQueryContainer

>>> config = BigQueryContainer()
>>> with config as bigquery:
... bq = bigquery.get_bigquery_client()
... datasets = bq.list_datasets("test-project")
"""

def __init__(
self, image: str = "ghcr.io/goccy/bigquery-emulator:latest", project: str = "test-project",
dataset: str = "test-containers", port: int = 9050, **kwargs
) -> None:
super().__init__(image=image, platform="linux/x86_64", **kwargs)
self.project = project
self.dataset = dataset
self.port = port
self.with_exposed_ports(self.port)
self.with_command(f"--project {self.project} --dataset {self.dataset} --port {self.port}")

def get_bigquery_emulator_host(self) -> str:
return f"http://{self.get_container_host_ip()}:{self.get_exposed_port(self.port)}"

def _get_client(self, cls: type, **kwargs) -> dict:
return cls(**kwargs)

def get_bigquery_client(self, **kwargs) -> bigquery.Client:
from google.auth import credentials

kwargs["client_options"] = {"api_endpoint": self.get_bigquery_emulator_host()}
kwargs["credentials"] = credentials.AnonymousCredentials()
return self._get_client(bigquery.Client, **kwargs)
76 changes: 76 additions & 0 deletions modules/google/testcontainers/google/gcs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from typing import Optional
from unittest.mock import patch

from google.cloud import storage
from testcontainers.core.container import DockerContainer


class GoogleCloudStorageContainer(DockerContainer):
"""
GoogleCloudStorage container for testing managed object storage buckets.

Example:

The example will spin up a Google Cloud Storage Fake Server that you can use for integration
tests. The :code:`storage` instance provides convenience methods :code:`get_storage_client` to interact with
the fake GCS server without having to set the environment variable :code:`STORAGE_EMULATOR_HOST`.

.. doctest::

>>> from testcontainers.google import GoogleCloudStorageContainer

>>> config = GoogleCloudStorageContainer()
>>> with config as gcs:
... client = gcs.get_storage_client()
... bucket = client.create_bucket("test-bucket")
... bucket = client.create_bucket("test-bucket")
"""

def __init__(
self, image: str = "fsouza/fake-gcs-server",
location: str = "US-CENTRAL1",
scheme: str = "http",
port_http: int = 8000,
data: Optional[str] = None,
**kwargs
) -> None:
super().__init__(image=image, **kwargs)
self.location = location
self.scheme = scheme
self.port_http = port_http
self.data = data
self.with_exposed_ports(self.port_http)
command = f"-location {location} -scheme={scheme} -port={port_http}"
if self.data:
self.with_volume_mapping(self.data, "/data")
command += " -data /data"



self.with_command(command)

def get_gcs_emulator_host(self) -> str:
return f"{self.scheme}://{self.get_container_host_ip()}:{self.get_exposed_port(self.port_http)}"

def _get_client(self, cls: type, **kwargs) -> dict:
with patch.dict(os.environ, STORAGE_EMULATOR_HOST=self.get_gcs_emulator_host()):
return cls(**kwargs)

def get_storage_client(self, **kwargs) -> storage.Client:
from google.auth import credentials

kwargs["credentials"] = credentials.AnonymousCredentials()
return self._get_client(storage.Client, **kwargs)
32 changes: 30 additions & 2 deletions modules/google/tests/test_google.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from queue import Queue
from google.cloud.datastore import Entity

from google.cloud.datastore import Entity
from testcontainers.core.waiting_utils import wait_for_logs
from testcontainers.google import PubSubContainer, DatastoreContainer
from testcontainers.google import PubSubContainer, DatastoreContainer, GoogleCloudStorageContainer, BigQueryContainer


def test_pubsub_container():
Expand Down Expand Up @@ -74,3 +74,31 @@ def test_datastore_container_isolation():
client2 = datastore2.get_datastore_client()
fetched_entity2 = client2.get(key)
assert fetched_entity2 is None, "Entity was found in the datastore."


def test_gcs_container():
from google.cloud import storage

gcs: GoogleCloudStorageContainer
with GoogleCloudStorageContainer() as gcs:
wait_for_logs(gcs, 'level=INFO msg="server started at', timeout=60)
# Create a new topic
client: storage.Client = gcs.get_storage_client()
client.create_bucket("test-bucket")

buckets = list(client.list_buckets())

assert any(bucket.name == "test-bucket" for bucket in buckets)


def test_bigquery_container():
from google.cloud import bigquery

bigquery: BigQueryContainer
with BigQueryContainer() as bigquery:
wait_for_logs(bigquery, r"[bigquery-emulator] REST server listening at", timeout=60)
# Create a new topic
client: bigquery.Client = bigquery.get_bigquery_client()
datasets = client.list_datasets(project="test-project")

assert any(dataset.dataset_id == "test-containers" for dataset in datasets)
152 changes: 149 additions & 3 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 3 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,8 @@ cassandra-driver = { version = "3.29.1", optional = true }
#clickhouse-driver = { version = "*", optional = true }
google-cloud-pubsub = { version = ">=2", optional = true }
google-cloud-datastore = { version = ">=2", optional = true }
google-cloud-storage = { version = ">=2", optional = true }
google-cloud-bigquery = { version = ">=2", optional = true }
influxdb = { version = "*", optional = true }
influxdb-client = { version = "*", optional = true }
kubernetes = { version = "*", optional = true }
Expand Down Expand Up @@ -160,7 +162,7 @@ generic = [
"redis",
] # The advance doctests for ServerContainer require redis
test_module_import = ["httpx"]
google = ["google-cloud-pubsub", "google-cloud-datastore"]
google = ["google-cloud-pubsub", "google-cloud-datastore", "google-cloud-storage", "google-cloud-bigquery"]
influxdb = ["influxdb", "influxdb-client"]
k3s = ["kubernetes", "pyyaml"]
kafka = []
Expand Down
Loading