-
Notifications
You must be signed in to change notification settings - Fork 5
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
5 changed files
with
132 additions
and
12 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,6 @@ | ||
from .comprehend import ComprehendPIIClassifier, ComprehendToxicityClassifier | ||
|
||
__all__ = [ | ||
"ComprehendPIIClassifier", | ||
"ComprehendToxicityClassifier", | ||
] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,64 @@ | ||
from dataclasses import dataclass, field | ||
from typing import Any, Dict, Generic, List, TypeVar | ||
|
||
import boto3 | ||
|
||
from ...core import BaseTextClassifier, Score | ||
|
||
T = TypeVar("T") | ||
|
||
|
||
@dataclass | ||
class BaseComprehendClassifier(BaseTextClassifier[T], Generic[T]): | ||
session: boto3.Session = field(default_factory=lambda: boto3.Session()) | ||
region_name: str = "us-east-1" | ||
|
||
def __post_init__(self): | ||
self._client = self.session.client("comprehend", region_name=self.region_name) | ||
|
||
|
||
@dataclass | ||
class ComprehendPIIClassifier(BaseComprehendClassifier[List[Any]]): | ||
language: str = "en" | ||
threshold: float = 0.7 | ||
|
||
def score(self, input: str) -> Score[List[Any]]: | ||
response = self._client.detect_pii_entities(Text=input, LanguageCode=self.language) | ||
|
||
entities = [entity for entity in response["Entities"] if entity["Score"] >= self.threshold] | ||
|
||
return Score[List[Any]]( | ||
flagged=len(entities) > 0, | ||
value=entities, | ||
description="Returns True if entities are found in the input", | ||
explanation=( | ||
f"Found {len(entities)} entities in input" if len(entities) > 0 else "Did not find entities in input" | ||
), | ||
) | ||
|
||
|
||
@dataclass | ||
class ComprehendToxicityClassifier(BaseComprehendClassifier[Dict[str, Any]]): | ||
language: str = "en" | ||
threshold: float = 0.7 | ||
|
||
def score(self, input: str) -> Score[Dict[str, Any]]: | ||
response = self._client.detect_toxic_content( | ||
TextSegments=[ | ||
{'Text': input}, | ||
], | ||
LanguageCode=self.language, | ||
) | ||
|
||
toxicity = response["ResultList"][0]["Toxicity"] | ||
labels = response["ResultList"][0]["Labels"] | ||
|
||
return Score[Dict[str, Any]]( | ||
flagged=toxicity >= self.threshold, | ||
value={ | ||
"Toxicity": toxicity, | ||
"Labels": labels, | ||
}, | ||
description="Returns True if the overall toxicity score is greater than or equal to the threshold", | ||
explanation=f"The overall toxicity score for the input is {toxicity}", | ||
) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters