-
-
Notifications
You must be signed in to change notification settings - Fork 24
/
Copy pathutil.ts
119 lines (109 loc) · 2.46 KB
/
util.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
import { type ParsedURL, parseURL } from "ufo";
import { match, placeholder as _ } from "@core/match";
import { assert, ensure, is } from "@core/unknownutil";
/**
* Validates a URL
*/
export function validateURL(url: string) {
assert(url, is.String);
if (!URL.canParse(url)) {
throw new Error("Invalid URL");
}
return url;
}
/**
* Converts a URL to a local Ollama endpoint
*/
export function convertToCustomEndpoint(
url: string,
endpoint: ParsedURL,
): string {
const _url = new URL(url);
_url.protocol = ensure(endpoint.protocol, is.String);
_url.host = ensure(endpoint.host, is.String);
return _url.toString();
}
Deno.test("convertToOllamaEndpoint", async () => {
const { assertEquals } = await import("jsr:@std/assert@1.0.7");
const url = "https://api.openai.com/v1/chat/completions" as const;
const result = convertToCustomEndpoint(
url,
parseURL("http://localhost:11434"),
);
assertEquals(result, "http://localhost:11434/v1/chat/completions");
});
/**
* Chooses the appropriate endpoint based on the model name
*/
export function chooseEndpoint(
{
model,
ollamaEndpoint,
openAIEndpoint,
}: {
model: string;
ollamaEndpoint: string;
openAIEndpoint: string;
},
): string {
if (match(_`gpt-${_("v")}`, model) != null) {
return openAIEndpoint;
}
return ollamaEndpoint;
}
Deno.test("chooseEndpoint", async () => {
const { assertEquals } = await import("jsr:@std/assert@1.0.7");
const basseOption = {
ollamaEndpoint: "http://localhost:11434",
openAIEndpoint: "https://api.openai.com",
};
assertEquals(
chooseEndpoint({
model: "gpt-3.5",
...basseOption,
}),
"https://api.openai.com",
);
assertEquals(
chooseEndpoint({
model: "gpt-3.5-turbo",
...basseOption,
}),
"https://api.openai.com",
);
assertEquals(
chooseEndpoint({
model: "gpt-4-turbo",
...basseOption,
}),
"https://api.openai.com",
);
assertEquals(
chooseEndpoint({
model: "gpt-4-1106-preview",
...basseOption,
}),
"https://api.openai.com",
);
assertEquals(
chooseEndpoint({
model: "llama3",
...basseOption,
}),
"http://localhost:11434",
);
assertEquals(
chooseEndpoint({
model: "mistral-7b-b1.58",
...basseOption,
}),
"http://localhost:11434",
);
assertEquals(
chooseEndpoint({
model: "command-r:35b",
...basseOption,
}),
"http://localhost:11434",
);
});