Skip to content

Commit

Permalink
Merge pull request #191 from solaoi/feature/add-openai-models
Browse files Browse the repository at this point in the history
add openai models
  • Loading branch information
solaoi authored Jan 2, 2025
2 parents 0199d38 + 319e6e1 commit 4ae35ba
Show file tree
Hide file tree
Showing 4 changed files with 71 additions and 48 deletions.
21 changes: 15 additions & 6 deletions src-tauri/src/module/action.rs
Original file line number Diff line number Diff line change
Expand Up @@ -131,8 +131,9 @@ impl Action {
}
prompt.push_str("\n回答の際は、上記の手順に従い、情報を適切に統合し、構造化された形で提供してください。次のユーザーの質問に直接答え、必要に応じて追加の洞察や説明を加えてください。");

// 将来的には、『assistant』roleではなく『developer』roleにする必要がある。現時点ではAPI側が未対応@2025/01/02
messages.push(json!({
"role": "system",
"role": if model == "o1" || model == "o1-mini" || model == "o1-preview" {"assistant"} else {"system"},
"content": prompt
}));
messages.push(json!({
Expand All @@ -143,11 +144,19 @@ impl Action {
// for debugging
// println!("messages: {:?}", messages);

let post_body = json!({
"model": model,
"temperature": temperature,
"messages": messages
});
// 現時点ではAPI側がtemparatureパラメータに未対応@2025/01/02
let post_body = if model == "o1" || model == "o1-mini" || model == "o1-preview" {
json!({
"model": model,
"messages": messages
})
} else {
json!({
"model": model,
"temperature": temperature,
"messages": messages
})
};

let response = client
.post(url)
Expand Down
92 changes: 53 additions & 39 deletions src-tauri/src/module/chat_online.rs
Original file line number Diff line number Diff line change
Expand Up @@ -177,58 +177,72 @@ impl ChatOnline {
headers.insert(CONTENT_TYPE, HeaderValue::from_static("application/json"));

let post_body = if !template.is_empty() {
if !functions.is_empty() {
let func: Value = serde_json::from_str(functions).unwrap();
if !function_call.is_empty() {
json!({
"model": model,
"temperature": temperature,
"messages": [{"role": "system", "content": template},{"role": "user", "content": question}],
"functions": func,
"function_call" : json!({"name": function_call})
})
if model == "o1" || model == "o1-mini" || model == "o1-preview" {
json!({
"model": model,
"messages": [{"role": "assistant", "content": template},{"role": "user", "content": question}]
})
} else {
if !functions.is_empty() {
let func: Value = serde_json::from_str(functions).unwrap();
if !function_call.is_empty() {
json!({
"model": model,
"temperature": temperature,
"messages": [{"role": "system", "content": template},{"role": "user", "content": question}],
"functions": func,
"function_call" : json!({"name": function_call})
})
} else {
json!({
"model": model,
"temperature": temperature,
"messages": [{"role": "system", "content": template},{"role": "user", "content": question}],
"functions": func,
"function_call" : "auto"
})
}
} else {
json!({
"model": model,
"temperature": temperature,
"messages": [{"role": "system", "content": template},{"role": "user", "content": question}],
"functions": func,
"function_call" : "auto"
"messages": [{"role": "system", "content": template},{"role": "user", "content": question}]
})
}
} else {
json!({
"model": model,
"temperature": temperature,
"messages": [{"role": "system", "content": template},{"role": "user", "content": question}]
})
}
} else {
if !functions.is_empty() {
let func: Value = serde_json::from_str(functions).unwrap();
if !function_call.is_empty() {
json!({
"model": model,
"temperature": temperature,
"messages": [{"role": "user", "content": question}],
"functions": func,
"function_call" : json!({"name": function_call})
})
if model == "o1" || model == "o1-mini" || model == "o1-preview" {
json!({
"model": model,
"messages": [{"role": "user", "content": question}]
})
} else {
if !functions.is_empty() {
let func: Value = serde_json::from_str(functions).unwrap();
if !function_call.is_empty() {
json!({
"model": model,
"temperature": temperature,
"messages": [{"role": "user", "content": question}],
"functions": func,
"function_call" : json!({"name": function_call})
})
} else {
json!({
"model": model,
"temperature": temperature,
"messages": [{"role": "user", "content": question}],
"functions": func,
"function_call" : "auto"
})
}
} else {
json!({
"model": model,
"temperature": temperature,
"messages": [{"role": "user", "content": question}],
"functions": func,
"function_call" : "auto"
"messages": [{"role": "user", "content": question}]
})
}
} else {
json!({
"model": model,
"temperature": temperature,
"messages": [{"role": "user", "content": question}]
})
}
};

Expand All @@ -243,7 +257,7 @@ impl ChatOnline {
let json_response: Value = response.json().await?;

let response_text = if status == 200 {
if !functions.is_empty() {
if !functions.is_empty() && model != "o1" && model != "o1-mini" && model != "o1-preview" {
let name = serde_json::to_string(
&json_response["choices"][0]["message"]["function_call"]["name"],
)
Expand Down
2 changes: 1 addition & 1 deletion src/components/molecules/SettingModel.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { useRecoilState } from 'recoil';
import { settingKeyState } from "../../store/atoms/settingKeyState";

const SettingModel = (): JSX.Element => {
const settingModels = ["gpt-3.5-turbo", "gpt-3.5-turbo-1106", "gpt-4", "gpt-4-1106-preview", "gpt-4-turbo-preview", "gpt-4-turbo", "gpt-4o-2024-05-13", "gpt-4o", "gpt-4o-mini"]
const settingModels = ["gpt-3.5-turbo", "gpt-3.5-turbo-1106", "gpt-4", "gpt-4-1106-preview", "gpt-4-turbo-preview", "gpt-4-turbo", "gpt-4o-2024-05-13", "gpt-4o-2024-08-06", "gpt-4o-2024-11-20", "gpt-4o", "chatgpt-4o-latest", "gpt-4o-mini", "o1-preview", "o1", "o1-mini"]
const [settingKey, setSettingKey] = useRecoilState(settingKeyState("settingModel"))

const change = (e: ChangeEvent<HTMLSelectElement>) => {
Expand Down
4 changes: 2 additions & 2 deletions src/components/molecules/TranscriptionAccuracy.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ const TranscriptionAccuracy = (): JSX.Element => {
case "online-amivoice":
return "AmiVoice";
case "online-chat":
return "ChatGPT";
return "ChatGPT(一問一答)";
case "hybrid-transcript":
return "Lycoris";
case "small-translate-to-en":
Expand Down Expand Up @@ -397,7 +397,7 @@ const TranscriptionAccuracy = (): JSX.Element => {
<li key="transcription-accuracy_online-chat">
<label className="label inline-flex active:!bg-inherit">
<input type="radio" name="trace-option" className="radio radio-accent" onChange={change} value="online-chat" checked={"online-chat" === transcriptionAccuracy} />
<a className="grow">ChatGPT</a>
<a className="grow">ChatGPT(一問一答)</a>
</label>
</li>
</>}
Expand Down

0 comments on commit 4ae35ba

Please sign in to comment.