-
Notifications
You must be signed in to change notification settings - Fork 1.1k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Browse files
Browse the repository at this point in the history
- Loading branch information
Showing
50 changed files
with
1,068 additions
and
629 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
29 changes: 29 additions & 0 deletions
29
ingestion/src/metadata/ingestion/source/database/datalake/columns.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,29 @@ | ||
# Copyright 2021 Collate | ||
# Licensed under the Apache License, Version 2.0 (the "License"); | ||
# you may not use this file except in compliance with the License. | ||
# You may obtain a copy of the License at | ||
# http://www.apache.org/licenses/LICENSE-2.0 | ||
# Unless required by applicable law or agreed to in writing, software | ||
# distributed under the License is distributed on an "AS IS" BASIS, | ||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
# See the License for the specific language governing permissions and | ||
# limitations under the License. | ||
|
||
""" | ||
Handle column logic when reading data from DataLake | ||
""" | ||
from metadata.utils.constants import COMPLEX_COLUMN_SEPARATOR | ||
|
||
|
||
def _get_root_col(col_name: str) -> str: | ||
return col_name.split(COMPLEX_COLUMN_SEPARATOR)[1] | ||
|
||
|
||
def clean_dataframe(df): | ||
all_complex_root_columns = set( | ||
_get_root_col(col) for col in df if COMPLEX_COLUMN_SEPARATOR in col | ||
) | ||
for complex_col in all_complex_root_columns: | ||
if complex_col in df.columns: | ||
df = df.drop(complex_col, axis=1) | ||
return df |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Empty file.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,82 @@ | ||
# Copyright 2021 Collate | ||
# Licensed under the Apache License, Version 2.0 (the "License"); | ||
# you may not use this file except in compliance with the License. | ||
# You may obtain a copy of the License at | ||
# http://www.apache.org/licenses/LICENSE-2.0 | ||
# Unless required by applicable law or agreed to in writing, software | ||
# distributed under the License is distributed on an "AS IS" BASIS, | ||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
# See the License for the specific language governing permissions and | ||
# limitations under the License. | ||
|
||
""" | ||
Avro DataFrame reader | ||
""" | ||
import io | ||
|
||
from avro.datafile import DataFileReader | ||
from avro.errors import InvalidAvroBinaryEncoding | ||
from avro.io import DatumReader | ||
|
||
from metadata.generated.schema.entity.data.table import Column | ||
from metadata.generated.schema.type.schema import DataTypeTopic | ||
from metadata.parsers.avro_parser import parse_avro_schema | ||
from metadata.readers.dataframe.base import DataFrameReader | ||
from metadata.readers.dataframe.common import dataframe_to_chunks | ||
from metadata.readers.dataframe.models import DatalakeColumnWrapper | ||
from metadata.utils.constants import UTF_8 | ||
|
||
PD_AVRO_FIELD_MAP = { | ||
DataTypeTopic.BOOLEAN.value: "bool", | ||
DataTypeTopic.INT.value: "int", | ||
DataTypeTopic.LONG.value: "float", | ||
DataTypeTopic.FLOAT.value: "float", | ||
DataTypeTopic.DOUBLE.value: "float", | ||
DataTypeTopic.TIMESTAMP.value: "float", | ||
DataTypeTopic.TIMESTAMPZ.value: "float", | ||
} | ||
|
||
AVRO_SCHEMA = "avro.schema" | ||
|
||
|
||
class AvroDataFrameReader(DataFrameReader): | ||
""" | ||
Manage the implementation to read Avro dataframes | ||
from any source based on its init client. | ||
""" | ||
|
||
@staticmethod | ||
def read_from_avro(avro_text: bytes) -> DatalakeColumnWrapper: | ||
""" | ||
Method to parse the avro data from storage sources | ||
""" | ||
# pylint: disable=import-outside-toplevel | ||
from pandas import DataFrame, Series | ||
|
||
try: | ||
elements = DataFileReader(io.BytesIO(avro_text), DatumReader()) | ||
if elements.meta.get(AVRO_SCHEMA): | ||
return DatalakeColumnWrapper( | ||
columns=parse_avro_schema( | ||
schema=elements.meta.get(AVRO_SCHEMA).decode(UTF_8), cls=Column | ||
), | ||
dataframes=dataframe_to_chunks(DataFrame.from_records(elements)), | ||
) | ||
return DatalakeColumnWrapper( | ||
dataframes=dataframe_to_chunks(DataFrame.from_records(elements)) | ||
) | ||
except (AssertionError, InvalidAvroBinaryEncoding): | ||
columns = parse_avro_schema(schema=avro_text, cls=Column) | ||
field_map = { | ||
col.name.__root__: Series( | ||
PD_AVRO_FIELD_MAP.get(col.dataType.value, "str") | ||
) | ||
for col in columns | ||
} | ||
return DatalakeColumnWrapper( | ||
columns=columns, dataframes=dataframe_to_chunks(DataFrame(field_map)) | ||
) | ||
|
||
def _read(self, *, key: str, bucket_name: str, **__) -> DatalakeColumnWrapper: | ||
text = self.reader.read(key, bucket_name=bucket_name) | ||
return self.read_from_avro(text) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,71 @@ | ||
# Copyright 2021 Collate | ||
# Licensed under the Apache License, Version 2.0 (the "License"); | ||
# you may not use this file except in compliance with the License. | ||
# You may obtain a copy of the License at | ||
# http://www.apache.org/licenses/LICENSE-2.0 | ||
# Unless required by applicable law or agreed to in writing, software | ||
# distributed under the License is distributed on an "AS IS" BASIS, | ||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
# See the License for the specific language governing permissions and | ||
# limitations under the License. | ||
|
||
""" | ||
Dataframe base reader | ||
""" | ||
|
||
from abc import ABC, abstractmethod | ||
from typing import Any, Optional | ||
|
||
from metadata.readers.dataframe.models import DatalakeColumnWrapper | ||
from metadata.readers.file.base import Reader | ||
from metadata.readers.file.config_source_factory import get_reader | ||
from metadata.readers.models import ConfigSource | ||
from metadata.utils.logger import ingestion_logger | ||
|
||
logger = ingestion_logger() | ||
|
||
|
||
class FileFormatException(Exception): | ||
def __init__(self, config_source: Any, file_name: str) -> None: | ||
message = f"Missing implementation for {config_source.__class__.__name__} for {file_name}" | ||
super().__init__(message) | ||
|
||
|
||
class DataFrameReadException(Exception): | ||
""" | ||
To be raised by any errors with the read calls | ||
""" | ||
|
||
|
||
class DataFrameReader(ABC): | ||
""" | ||
Abstract class for all readers. | ||
Readers are organized by Format, not by Source Type (S3, GCS or ADLS). | ||
Some DF readers first need to read the full file and then prepare the | ||
dataframe. This is why we add the File Reader as well. | ||
""" | ||
|
||
config_source: ConfigSource | ||
reader: Reader | ||
|
||
def __init__(self, config_source: ConfigSource, client: Optional[Any]): | ||
self.config_source = config_source | ||
self.client = client | ||
|
||
self.reader = get_reader(config_source=config_source, client=client) | ||
|
||
@abstractmethod | ||
def _read(self, *, key: str, bucket_name: str, **kwargs) -> DatalakeColumnWrapper: | ||
""" | ||
Pass the path, bucket, or any other necessary details | ||
to read the dataframe from the source. | ||
""" | ||
raise NotImplementedError("Missing read implementation") | ||
|
||
def read(self, *, key: str, bucket_name: str, **kwargs) -> DatalakeColumnWrapper: | ||
try: | ||
return self._read(key=key, bucket_name=bucket_name, **kwargs) | ||
except Exception as err: | ||
raise DataFrameReadException(f"Error reading dataframe due to [{err}]") |
Oops, something went wrong.