Skip to content
This repository has been archived by the owner on Sep 19, 2024. It is now read-only.

work on examples #11

Closed
wants to merge 15 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
7 changes: 7 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,13 @@ jobs:
image: falkordb/falkordb:latest
ports:
- 6379:6379
ollama:
image: ollama/ollama:latest
ports:
- 1434:11434
volumes:
- ollama:/root/.ollama

steps:
- uses: actions/checkout@v4

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,10 @@
"from dotenv import load_dotenv\n",
"\n",
"load_dotenv()\n",
"from falkordb_gemini_kg.classes.model_config import (\n",
" KnowledgeGraphModelConfig,\n",
" StepModelConfig,\n",
")\n",
"from falkordb_gemini_kg.classes.model_config import KnowledgeGraphModelConfig\n",
"from falkordb_gemini_kg.models.openai import OpenAiGenerativeModel\n",
"from falkordb_gemini_kg import KnowledgeGraph, Ontology\n",
"from falkordb_gemini_kg.classes.source import HTML\n",
"from falkordb_gemini_kg.classes.source import URL\n",
"import vertexai\n",
"import os\n",
"from random import shuffle\n",
Expand Down Expand Up @@ -46,7 +44,9 @@
"\n",
"shuffle(source_urls)\n",
"\n",
"sources = [HTML(url) for url in source_urls]"
"sources = [URL(url) for url in source_urls]\n",
"\n",
"model = OpenAiGenerativeModel(model_name=\"gpt-4o\")"
]
},
{
Expand All @@ -58,7 +58,7 @@
},
{
"cell_type": "code",
"execution_count": 7,
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -67,7 +67,7 @@
"ontology = Ontology.from_sources(\n",
" sources=sources[: round(len(sources) * 0.2)],\n",
" boundaries=boundaries,\n",
" model_config=StepModelConfig(model=\"gemini-1.5-flash-001\"),\n",
" model=model,\n",
")\n",
"\n",
"\n",
Expand Down Expand Up @@ -99,7 +99,7 @@
"\n",
"kg = KnowledgeGraph(\n",
" name=\"movies\",\n",
" model_config=KnowledgeGraphModelConfig.from_dict(d={\"model\": \"gemini-1.5-flash-001\"}),\n",
" model_config=KnowledgeGraphModelConfig.with_model(model),\n",
" ontology=ontology,\n",
")"
]
Expand All @@ -113,7 +113,7 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": null,
"metadata": {},
"outputs": [
{
Expand All @@ -137,7 +137,7 @@
},
{
"cell_type": "code",
"execution_count": 5,
"execution_count": null,
"metadata": {},
"outputs": [
{
Expand All @@ -164,7 +164,7 @@
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": 7,
"metadata": {},
"outputs": [
{
Expand Down Expand Up @@ -202,7 +202,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.4"
"version": "3.12.4"
}
},
"nbformat": 4,
Expand Down
Loading
Loading