Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

PII operator. #395

Merged
merged 8 commits into from
Nov 15, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions ads/common/decorator/runtime_dependency.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ class OptionalDependency:
SPARK = "oracle-ads[spark]"
HUGGINGFACE = "oracle-ads[huggingface]"
FORECAST = "oracle-ads[forecast]"
PII = "oracle-ads[pii]"


def runtime_dependency(
Expand Down
5 changes: 4 additions & 1 deletion ads/data_labeling/mixin/data_labeling.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/usr/bin/env python
# -*- coding: utf-8; -*-

# Copyright (c) 2021, 2022 Oracle and/or its affiliates.
# Copyright (c) 2021, 2023 Oracle and/or its affiliates.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/

from typing import Dict, List
Expand Down Expand Up @@ -188,6 +188,7 @@ def render_ner(
content_column: str = "Content",
annotations_column: str = "Annotations",
limit: int = ROWS_TO_RENDER_LIMIT,
return_html: bool = False,
) -> None:
"""Renders NER dataset. Displays only first 50 rows.

Expand Down Expand Up @@ -223,6 +224,8 @@ def render_ner(
annotations_column=annotations_column,
)
result_html = text_visualizer.render(items=items, options=options)
if return_html:
return result_html

from IPython.core.display import HTML, Markdown, display

Expand Down
5 changes: 3 additions & 2 deletions ads/opctl/operator/cmd.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,9 +179,10 @@ def init(
) as f:
f.write(yaml.dump(operator_config))
except Exception as ex:
logger.info(
logger.warning(
"The operator's specification was not generated "
f"because it is not supported by the `{operator_info.type}` operator."
f"because it is not supported by the `{operator_info.type}` operator. "
"Use --debug option to see the error details."
)
logger.debug(ex)

Expand Down
8 changes: 5 additions & 3 deletions ads/opctl/operator/common/operator_yaml_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def _check_condition(
Returns
-------
bool
True if the condition fulfils, false otherwise.
True if the condition fulfills, false otherwise.
"""
for key, value in condition.items():
if key not in example or example[key] != value:
Expand All @@ -103,8 +103,9 @@ def _generate_example(
The result config.
"""
example = {}

for key, value in schema.items():
# only generate values fro required fields
# only generate values for required fields
if (
value.get("required", False)
or value.get("dependencies", False)
Expand All @@ -125,7 +126,8 @@ def _generate_example(
example[key] = 1
elif data_type == "boolean":
example[key] = True
elif data_type == "array":
elif data_type == "list":
# TODO: Handle list of dict
example[key] = ["item1", "item2"]
elif data_type == "dict":
example[key] = self._generate_example(
Expand Down
15 changes: 15 additions & 0 deletions ads/opctl/operator/lowcode/pii/MLoperator
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
type: pii
version: v1
name: PII Operator
conda_type: published
conda: pii_v1
gpu: no
keywords:
- PII
- Spacy
backends:
- job
description: |
PII operator, that detects detect and redact Personally Identifiable Information
(PII) data in datasets by combining pattern match and machine learning solution.
Use `ads operator info -t pii` to get more details about the pii operator."
207 changes: 207 additions & 0 deletions ads/opctl/operator/lowcode/pii/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,207 @@
# PII Operator


The PII Operator aims to detect and redact Personally Identifiable Information (PII) in datasets. PII data includes information such as names, addresses, and social security numbers, which can be used to identify individuals. This operator combine pattern matching and machine learning solution to identify PII, and then redacts or anonymizes it to protect the privacy of individuals.

Below are the steps to configure and run the PII Operator on different resources.

## 1. Prerequisites

Follow the [CLI Configuration](https://accelerated-data-science.readthedocs.io/en/latest/user_guide/cli/opctl/configure.html) steps from the ADS documentation. This step is mandatory as it sets up default values for different options while running the PII Operator on OCI Data Science jobs.

## 2. Generating configs

To generate starter configs, run the command below. This will create a list of YAML configs and place them in the `output` folder.

```bash
ads operator init -t pii --overwrite --output ~/pii/
```

The most important files expected to be generated are:

- `pii.yaml`: Contains pii-related configuration.
- `pii_operator_local_python.yaml`: This includes a local backend configuration for running pii operator in a local environment. The environment should be set up manually before running the operator.
- `pii_operator_local_container.yaml`: This includes a local backend configuration for running pii operator within a local container. The container should be built before running the operator. Please refer to the instructions below for details on how to accomplish this.
- `pii_job_container.yaml`: Contains Data Science job-related config to run pii operator in a Data Science job within a container (BYOC) runtime. The container should be built and published before running the operator. Please refer to the instructions below for details on how to accomplish this.
- `pii_job_python.yaml`: Contains Data Science job-related config to run pii operator in a Data Science job within a conda runtime. The conda should be built and published before running the operator.

All generated configurations should be ready to use without the need for any additional adjustments. However, they are provided as starter kit configurations that can be customized as needed.

## 3. Running Pii on the local conda environment

To run pii operator locally, create and activate a new conda environment (`ads-pii`). Install all the required libraries listed in the `environment.yaml` file.

```yaml
- aiohttp
- datapane
- gender_guesser
- nameparser
- plotly
- spacy_transformers
- scrubadub
- scrubadub_spacy
- oracle_ads[opctl]
```

Please review the previously generated `pii.yaml` file using the `init` command, and make any necessary adjustments to the input and output file locations. By default, it assumes that the files should be located in the same folder from which the `init` command was executed.

Use the command below to verify the pii config.

```bash
ads operator verify -f ~/pii/pii.yaml
```

Use the following command to run the pii operator within the `ads-pii` conda environment.

```bash
ads operator run -f ~/pii/pii.yaml -b local
```

The operator will run in your local environment without requiring any additional modifications.

## 4. Running pii on the local container

To run the pii operator within a local container, follow these steps:

Use the command below to build the pii container.

```bash
ads operator build-image -t pii
```

This will create a new `pii:v1` image, with `/etc/operator` as the designated working directory within the container.


Check the `pii_operator_local_container.yaml` config file. By default, it should have a `volume` section with the `.oci` configs folder mounted.

```yaml
volume:
- "/Users/<user>/.oci:/root/.oci"
```

Mounting the OCI configs folder is only required if an OCI Object Storage bucket will be used to store the input data or output result. The input/output folders can also be mounted to the container.

```yaml
volume:
- /Users/<user>/.oci:/root/.oci
- /Users/<user>/pii/data:/etc/operator/data
- /Users/<user>/pii/result:/etc/operator/result
```

The full config can look like:
```yaml
kind: operator.local
spec:
image: pii:v1
volume:
- /Users/<user>/.oci:/root/.oci
- /Users/<user>/pii/data:/etc/operator/data
- /Users/<user>/pii/result:/etc/operator/result
type: container
version: v1
```

Run the pii operator within a container using the command below:

```bash
ads operator run -f ~/pii/pii.yaml --backend-config ~/pii/pii_operator_local_container.yaml
```

## 5. Running pii in the Data Science job within container runtime

To execute the pii operator within a Data Science job using container runtime, please follow the steps outlined below:

You can use the following command to build the forecast container. This step can be skipped if you have already done this for running the operator within a local container.

```bash
ads operator build-image -t pii
```

This will create a new `pii:v1` image, with `/etc/operator` as the designated working directory within the container.

Publish the `pii:v1` container to the [Oracle Container Registry](https://docs.public.oneportal.content.oci.oraclecloud.com/en-us/iaas/Content/Registry/home.htm). To become familiar with OCI, read the documentation links posted below.

- [Access Container Registry](https://docs.public.oneportal.content.oci.oraclecloud.com/en-us/iaas/Content/Registry/Concepts/registryoverview.htm#access)
- [Create repositories](https://docs.public.oneportal.content.oci.oraclecloud.com/en-us/iaas/Content/Registry/Tasks/registrycreatingarepository.htm#top)
- [Push images](https://docs.public.oneportal.content.oci.oraclecloud.com/en-us/iaas/Content/Registry/Tasks/registrypushingimagesusingthedockercli.htm#Pushing_Images_Using_the_Docker_CLI)

To publish `pii:v1` to OCR, use the command posted below:

```bash
ads operator publish-image pii:v1 --registry <iad.ocir.io/tenancy/>
```

After the container is published to OCR, it can be used within Data Science jobs service. Check the `backend_job_container_config.yaml` config file. It should contain pre-populated infrastructure and runtime sections. The runtime section should contain an image property, something like `image: iad.ocir.io/<tenancy>/pii:v1`. More details about supported options can be found in the ADS Jobs documentation - [Run a Container](https://accelerated-data-science.readthedocs.io/en/latest/user_guide/jobs/run_container.html).

Adjust the `pii.yaml` config with proper input/output folders. When the operator is run in the Data Science job, it will not have access to local folders. Therefore, input data and output folders should be placed in the Object Storage bucket. Open the `pii.yaml` and adjust the following fields:

```yaml
input_data:
url: oci://bucket@namespace/pii/input_data/data.csv
output_directory:
url: oci://bucket@namespace/pii/result/
```

Run the pii operator on the Data Science jobs using the command posted below:

```bash
ads operator run -f ~/pii/pii.yaml --backend-config ~/pii/pii_job_container.yaml
```

The logs can be monitored using the `ads opctl watch` command.

```bash
ads opctl watch <OCID>
```


## 6. Running pii in the Data Science job within conda runtime

To execute the pii operator within a Data Science job using conda runtime, please follow the steps outlined below:

You can use the following command to build the pii conda environment.

```bash
ads operator build-conda -t pii
```

This will create a new `pii_v1` conda environment and place it in the folder specified within `ads opctl configure` command.

Use the command below to Publish the `pii_v1` conda environment to the Object Storage bucket.

```bash
ads opctl conda publish pii_v1
```
More details about configuring CLI can be found here - [Configuring CLI](https://accelerated-data-science.readthedocs.io/en/latest/user_guide/cli/opctl/configure.html)


After the conda environment is published to Object Storage, it can be used within Data Science jobs service. Check the `pii_job_python.yaml` config file. It should contain pre-populated infrastructure and runtime sections. The runtime section should contain a `conda` section.

```yaml
conda:
type: published
uri: oci://bucket@namespace/conda_environments/cpu/pii/1/pii_v1
```

More details about supported options can be found in the ADS Jobs documentation - [Run a Python Workload](https://accelerated-data-science.readthedocs.io/en/latest/user_guide/jobs/run_python.html).

Adjust the `pii.yaml` config with proper input/output folders. When the pii is run in the Data Science job, it will not have access to local folders. Therefore, input data and output folders should be placed in the Object Storage bucket. Open the `pii.yaml` and adjust the following fields:

```yaml
input_data:
url: oci://bucket@namespace/pii/input_data/data.csv
output_directory:
url: oci://bucket@namespace/pii/result/
```

Run the pii on the Data Science jobs using the command posted below:

```bash
ads operator run -f ~/pii/pii.yaml --backend-config ~/pii/pii_job_python.yaml
```

The logs can be monitored using the `ads opctl watch` command.

```bash
ads opctl watch <OCID>
```
5 changes: 5 additions & 0 deletions ads/opctl/operator/lowcode/pii/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*--

# Copyright (c) 2023 Oracle and/or its affiliates.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
78 changes: 78 additions & 0 deletions ads/opctl/operator/lowcode/pii/__main__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*--

# Copyright (c) 2023 Oracle and/or its affiliates.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/

import json
import os
import sys
from typing import Dict, List

import yaml

from ads.opctl import logger
from ads.opctl.operator.common.const import ENV_OPERATOR_ARGS
from ads.opctl.operator.common.utils import _parse_input_args

from .model.guardrails import PIIGuardrail
from .operator_config import PiiOperatorConfig


def operate(operator_config: PiiOperatorConfig) -> None:
"""Runs the PII operator."""
guard = PIIGuardrail(config=operator_config)
guard.process()


def verify(spec: Dict, **kwargs: Dict) -> bool:
"""Verifies the PII operator config."""
operator = PiiOperatorConfig.from_dict(spec)
msg_header = (
f"{'*' * 30} The operator config has been successfully verified {'*' * 30}"
)
print(msg_header)
print(operator.to_yaml())
print("*" * len(msg_header))


def main(raw_args: List[str]):
"""The entry point of the PII the operator."""
args, _ = _parse_input_args(raw_args)
if not args.file and not args.spec and not os.environ.get(ENV_OPERATOR_ARGS):
logger.info(
"Please specify -f[--file] or -s[--spec] or "
f"pass operator's arguments via {ENV_OPERATOR_ARGS} environment variable."
)
return

logger.info("-" * 100)
logger.info(f"{'Running' if not args.verify else 'Verifying'} the operator...")

# if spec provided as input string, then convert the string into YAML
yaml_string = ""
if args.spec or os.environ.get(ENV_OPERATOR_ARGS):
operator_spec_str = args.spec or os.environ.get(ENV_OPERATOR_ARGS)
try:
yaml_string = yaml.safe_dump(json.loads(operator_spec_str))
except json.JSONDecodeError:
yaml_string = yaml.safe_dump(yaml.safe_load(operator_spec_str))
except:
yaml_string = operator_spec_str

operator_config = PiiOperatorConfig.from_yaml(
uri=args.file,
yaml_string=yaml_string,
)

logger.info(operator_config.to_yaml())

# run operator
if args.verify:
verify(operator_config)
else:
operate(operator_config)


if __name__ == "__main__":
main(sys.argv[1:])
Loading
Loading