Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixes 16652: add GCS storage service #16917

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions ingestion/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
"avro": "avro>=1.11.3,<1.12",
"boto3": "boto3>=1.20,<2.0", # No need to add botocore separately. It's a dep from boto3
"geoalchemy2": "GeoAlchemy2~=0.12",
"google-cloud-monitoring": "google-cloud-monitoring>=2.0.0",
"google-cloud-storage": "google-cloud-storage==1.43.0",
"gcsfs": "gcsfs>=2023.1.0",
"great-expectations": "great-expectations>=0.18.0,<0.18.14",
Expand Down Expand Up @@ -198,6 +199,7 @@
*COMMONS["datalake"],
},
"datalake-gcs": {
VERSIONS["google-cloud-monitoring"],
VERSIONS["google-cloud-storage"],
VERSIONS["gcsfs"],
*COMMONS["datalake"],
Expand Down
63 changes: 63 additions & 0 deletions ingestion/src/metadata/ingestion/source/storage/gcs/client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
# Copyright 2024 Collate
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A client for Google Cloud Storage that supports multiple projects."""
from functools import partial
from typing import List, Optional, Type, Union

from google import auth
from google.cloud.monitoring_v3 import MetricServiceClient
from google.cloud.storage import Client

NoProject = object()


class MultiProjectClient:
"""Google Cloud Client does not support ad-hoc project switching. This class wraps the client and allows
switching between projects. If no project is specified, the client will not have a project set and will try
to resolve it from ADC.
Example usage:
```
from google.cloud.storage import Client
client = MultiProjectClient(Client, project_ids=["project1", "project2"])
buckets_project1 = client.list_buckets("project1")
buckets_project2 = client.list_buckets("project2")
"""

def __init__(
self,
client_class: Union[Type[Client], Type[MetricServiceClient]],
project_ids: Optional[List[str]] = None,
**client_kwargs,
):
if project_ids:
self.clients = {
project_id: client_class(project=project_id, **client_kwargs)
for project_id in project_ids
}
else:
self.clients = {NoProject: client_class(**client_kwargs)}

def project_ids(self):
if NoProject in self.clients:
_, project_id = auth.default()
return [project_id]
return list(self.clients.keys())

def __getattr__(self, client_method):
"""Return the underlying client method as a partial function so we can inject the project_id."""
return partial(self._call, client_method)

def _call(self, method, project_id, *args, **kwargs):
"""Call the method on the client for the given project_id. The args and kwargs are passed through."""
client = self.clients.get(project_id, self.clients.get(NoProject))
if not client:
raise ValueError(f"Project {project_id} not found")
return getattr(client, method)(*args, **kwargs)
159 changes: 159 additions & 0 deletions ingestion/src/metadata/ingestion/source/storage/gcs/connection.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,159 @@
# Copyright 2024 Collate
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GCS storage connection"""
from dataclasses import dataclass
from typing import Optional

from google.cloud.exceptions import NotFound
from google.cloud.monitoring_v3 import MetricServiceClient
from google.cloud.storage import Client

from metadata.generated.schema.entity.automations.workflow import (
Workflow as AutomationWorkflow,
)
from metadata.generated.schema.entity.services.connections.storage.gcsConnection import (
GcsConnection,
)
from metadata.generated.schema.security.credentials.gcpValues import (
GcpCredentialsValues,
SingleProjectId,
)
from metadata.ingestion.connections.test_connections import (
SourceConnectionException,
test_connection_steps,
)
from metadata.ingestion.ometa.ometa_api import OpenMetadata
from metadata.ingestion.source.storage.gcs.client import MultiProjectClient
from metadata.utils.credentials import set_google_credentials
from metadata.utils.logger import ingestion_logger

logger = ingestion_logger()


@dataclass
class GcsObjectStoreClient:
storage_client: MultiProjectClient
metrics_client: MetricServiceClient


def get_connection(connection: GcsConnection):
set_google_credentials(connection.credentials)
project_ids = None
if isinstance(connection.credentials.gcpConfig, GcpCredentialsValues):
project_ids = (
[connection.credentials.gcpConfig.projectId.root]
if isinstance(connection.credentials.gcpConfig.projectId, SingleProjectId)
else connection.credentials.gcpConfig.projectId.root
)
return GcsObjectStoreClient(
storage_client=MultiProjectClient(client_class=Client, project_ids=project_ids),
metrics_client=MetricServiceClient(),
)


@dataclass
class BucketTestState:
project_id: str
bucket_name: str
blob_name: str = None


class Tester:
"""
A wrapper class that holds state. We need it because the different testing stages
are not independent of each other. For example, we need to list buckets before we can list
blobs within a bucket.
"""

def __init__(self, client: GcsObjectStoreClient, connection: GcsConnection):
self.client = client
self.connection = connection
self.bucket_tests = []

def list_buckets(self):
if self.connection.bucketNames:
for bucket_name in self.connection.bucketNames:
for project_id, client in self.client.storage_client.clients.items():
try:
client.get_bucket(bucket_name)
except NotFound:
continue
else:
self.bucket_tests.append(
BucketTestState(project_id, bucket_name)
)
break
else:
raise SourceConnectionException(
f"Bucket {bucket_name} not found in provided projects."
)
return
else:
for project_id, client in self.client.storage_client.clients.items():
bucket = next(client.list_buckets())
self.bucket_tests.append(BucketTestState(project_id, bucket.name))

def get_bucket(self):
if not self.bucket_tests:
raise SourceConnectionException("No buckets found in provided projects")
for bucket_test in self.bucket_tests:
client = self.client.storage_client.clients[bucket_test.project_id]
client.get_bucket(bucket_test.bucket_name)

def list_blobs(self):
if not self.bucket_tests:
raise SourceConnectionException("No buckets found in provided projects")
for bucket_test in self.bucket_tests:
client = self.client.storage_client.clients[bucket_test.project_id]
blob = next(client.list_blobs(bucket_test.bucket_name))
bucket_test.blob_name = blob.name

def get_blob(self):
if not self.bucket_tests:
raise SourceConnectionException("No buckets found in provided projects")
for bucket_test in self.bucket_tests:
client = self.client.storage_client.clients[bucket_test.project_id]
bucket = client.get_bucket(bucket_test.bucket_name)
bucket.get_blob(bucket_test.blob_name)

def get_metrics(self):
for project_id in self.client.storage_client.clients.keys():
self.client.metrics_client.list_metric_descriptors(
name=f"projects/{project_id}"
)


def test_connection(
metadata: OpenMetadata,
client: GcsObjectStoreClient,
service_connection: GcsConnection,
automation_workflow: Optional[AutomationWorkflow] = None,
) -> None:
"""
Test connection. This can be executed either as part
of a metadata workflow or during an Automation Workflow
"""
tester = Tester(client, service_connection)

test_fn = {
"ListBuckets": tester.list_buckets,
"GetBucket": tester.get_bucket,
"ListBlobs": tester.list_blobs,
"GetBlob": tester.get_blob,
"GetMetrics": tester.get_metrics,
}

test_connection_steps(
metadata=metadata,
test_fn=test_fn,
service_type=service_connection.type.value,
automation_workflow=automation_workflow,
)
Loading
Loading