Skip to content

Commit

Permalink
using string types instead of file
Browse files Browse the repository at this point in the history
  • Loading branch information
Vinicius Vaz committed Oct 23, 2023
1 parent 1a04177 commit e95390e
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 13 deletions.
2 changes: 1 addition & 1 deletion config.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,4 @@ REPOSITORY_LABEL = "OpenAI Domino Pieces"

# The version of this Pieces release
# Attention: changing this will create a new release
VERSION = "0.4.0"
VERSION = "0.4.0"
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from domino.testing import piece_dry_run
import tiktoken
import os
from pathlib import Path



def run_piece(
Expand Down Expand Up @@ -30,12 +30,12 @@ def run_piece(
}
)

def test_piece():
def test_prompt_creator_for_image_generator_piece():
piece_kwargs = {
"context": "Explorers dive into a mesmerizing underwater city, discovering ancient secrets, mysterious symbols, and evidence of an advanced civilization.",
"art_style": "surrealistic oceanic exploration",
"completion_max_tokens": 350,
"output_type": "file",
"output_type": "file_and_string",
"openai_model": "gpt-3.5-turbo",
"temperature": 0.7,
}
Expand All @@ -47,7 +47,7 @@ def test_piece():
if piece_kwargs["output_type"] == "file":
assert output.get("generated_prompt_string") == None
assert output.get("generated_prompt_file_path").endswith(".txt")
generated_prompt_path = Path(output.get("generated_prompt_file_path"))
generated_prompt_path = output.get("generated_prompt_file_path")
with open(generated_prompt_path, "r") as f:
generated_prompt = f.read()

Expand Down
11 changes: 3 additions & 8 deletions pieces/TextGeneratorPiece/test_text_generator_piece.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import tiktoken
import os


def run_piece(
template: str,
prompt_args: List[dict],
Expand All @@ -29,14 +30,14 @@ def run_piece(
}
)

def test_piece():
def test_text_generator_piece():
template = "tell me about the history of {event_history}"
prompt_args = [{"arg_name": "event_history", "arg_value": "artifical intelligence"}]

piece_kwargs = {
"template": template,
"prompt_args": prompt_args,
"output_type": "file",
"output_type": "file_and_string",
"completion_max_tokens": 500,
"openai_model": "gpt-3.5-turbo",
}
Expand All @@ -48,9 +49,6 @@ def test_piece():
if piece_kwargs["output_type"] == "file":
assert output.get("string_generated_text") == None
assert output.get("file_path_generated_text").endswith(".txt")
generated_prompt_path = output.get("file_path_generated_text")
with open(generated_prompt_path, "r") as f:
generated_prompt = f.read()
if piece_kwargs["output_type"] == "string":
assert output.get("string_generated_text") != None and type(output.get("string_generated_text")) == str
assert output.get("file_path_generated_text") == None
Expand All @@ -64,6 +62,3 @@ def test_piece():
text_tokens = encoding.encode(text=generated_prompt)
assert len(text_tokens) <= piece_kwargs["completion_max_tokens"]


if __name__ == "__main__":
test_piece()

0 comments on commit e95390e

Please sign in to comment.