Skip to content

Commit

Permalink
Merge pull request #17 from ivanyu/ollama
Browse files Browse the repository at this point in the history
Support Ollama
  • Loading branch information
ivanyu authored Dec 14, 2024
2 parents b8224ce + a49c82b commit 92c2945
Show file tree
Hide file tree
Showing 4 changed files with 100 additions and 26 deletions.
36 changes: 34 additions & 2 deletions src/background.js
Original file line number Diff line number Diff line change
Expand Up @@ -49,16 +49,20 @@ async function getSummary(pageContent) {
OPENAI_MODEL_CONF,
ANTHROPIC_API_KEY_CONF,
ANTHROPIC_MODEL_CONF,
OLLAMA_URL_CONF,
OLLAMA_MODEL_CONF,
]);

var url = "";
const headers = {
'content-type': 'application/json'
};
const promptOpening = "I want you to summarize the following HTML body:";
const promptClosing = "Please return only the summary, no other text or comments. Do not call it 'HTML body', but 'page'.";
const requestContent = [
{ "type": "text", "text": "I want you to summarize the following HTML body:" },
{ "type": "text", "text": promptOpening },
{ "type": "text", "text": pageContent },
{ "type": "text", "text": "Please return only the summary, no other text or comments. Do not call it 'HTML body', but 'page'." },
{ "type": "text", "text": promptClosing },
];
const body = {
"messages": [{ "role": "user", "content": requestContent }],
Expand Down Expand Up @@ -106,6 +110,28 @@ async function getSummary(pageContent) {
body['max_tokens'] = maxTokens;
break;

case OLLAMA_PROVIDER:
if (!options[OLLAMA_URL_CONF]) {
throw new Error('Ollama URL not found');
}
if (!options[OLLAMA_MODEL_CONF]) {
throw new Error('Ollama model not found');
}
// Remove any trailing slash from base URL before appending path
const baseUrl = options[OLLAMA_URL_CONF].replace(/\/+$/, '');
url = baseUrl + '/api/generate';

model = options[OLLAMA_MODEL_CONF];
body['stream'] = false;
body['system'] = systemPrompt;
delete body['messages'];
body['prompt'] = promptOpening + '\n' + pageContent + '\n' + promptClosing;
body['options'] = {
'num_predict': 1000,
};

break;

default:
throw new Error('Unknown provider: ' + options[PROVIDER_CONF]);
}
Expand Down Expand Up @@ -146,6 +172,12 @@ async function getSummary(pageContent) {
result['input_tokens'] = responseJson.usage.input_tokens;
result['output_tokens'] = responseJson.usage.output_tokens;
break;

case OLLAMA_PROVIDER:
result['summary'].push(responseJson.response);
result['input_tokens'] = responseJson.prompt_eval_count;
result['output_tokens'] = responseJson.eval_count;
break;
}
return result;
}
Expand Down
23 changes: 23 additions & 0 deletions src/options.html
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,29 @@
</select>
</div>
</div>

<div id="ollama-options" class="options-group">
<div class="options-row">
<input type="radio" id="provider-ollama" name="provider" value="ollama">
<label for="provider-ollama">Ollama</label>
</div>

<div class="options-row">
<label for="ollama-url" style="align-self: flex-start;">URL: </label>
<div class="options-row-input">
<input type="text" id="ollama-url" style="width: 100%;" placeholder="e.g. http://localhost:11434/">
<br>
<span style="font-size: small; font-style: italic;">
Run Ollama with <code>OLLAMA_ORIGINS=chrome-extension://*,moz-extension://*</code> (or one of those) to allow the extension to connect to it.
</span>
</div>
</div>

<div class="options-row">
<label for="ollama-model">Model: </label>
<input type="text" id="ollama-model" class="options-row-input" placeholder="e.g. llama3.2:1b">
</div>
</div>
</form>

<script src="browser-polyfill.js"></script>
Expand Down
64 changes: 40 additions & 24 deletions src/options.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,17 @@ async function loadOptions() {
OPENAI_MODEL_CONF,
ANTHROPIC_API_KEY_CONF,
ANTHROPIC_MODEL_CONF,
OLLAMA_MODEL_CONF,
OLLAMA_URL_CONF,
]);

const provider = options[PROVIDER_CONF] || DEFAULT_PROVIDER;
if (provider === OPENAI_PROVIDER) {
document.querySelector("#provider-openai").checked = true;
} else if (provider === ANTHROPIC_PROVIDER) {
document.querySelector("#provider-anthropic").checked = true;
} else if (provider === OLLAMA_PROVIDER) {
document.querySelector("#provider-ollama").checked = true;
}

const openaiApiKey = options[OPENAI_API_KEY_CONF];
Expand All @@ -40,37 +44,49 @@ async function loadOptions() {

const anthropicModel = options[ANTHROPIC_MODEL_CONF] || DEFAULT_ANTHROPIC_MODEL;
document.querySelector("#anthropic-model").value = anthropicModel;

const ollamaUrl = options[OLLAMA_URL_CONF];
if (ollamaUrl) {
document.querySelector("#ollama-url").value = ollamaUrl;
}
const ollamaModel = options[OLLAMA_MODEL_CONF];
if (ollamaModel) {
document.querySelector("#ollama-model").value = ollamaModel;
}
}

document.addEventListener('DOMContentLoaded', loadOptions);

document.addEventListener('DOMContentLoaded', function () {
const openaiProviderRadio = document.getElementById('provider-openai');
openaiProviderRadio.addEventListener('change', async (event) => {
await saveOption(event, PROVIDER_CONF, OPENAI_PROVIDER);
});
const anthropicProviderRadio = document.getElementById('provider-anthropic');
anthropicProviderRadio.addEventListener('change', async (event) => {
await saveOption(event, PROVIDER_CONF, ANTHROPIC_PROVIDER);
});
function createChangeListener(confName) {
return async (event) => {
await saveOption(event, confName, event.target.value);
}
}

// OpenAI
document.getElementById('provider-openai')
.addEventListener('change', createChangeListener(PROVIDER_CONF));
document.getElementById('openai-api-key')
.addEventListener('blur', createChangeListener(OPENAI_API_KEY_CONF));
document.getElementById('openai-model')
.addEventListener('change', createChangeListener(OPENAI_MODEL_CONF));

const openaiApiKeyInput = document.getElementById('openai-api-key');
openaiApiKeyInput.addEventListener('blur', async (event) => {
await saveOption(event, OPENAI_API_KEY_CONF, event.target.value);
});

const openaiModelSelect = document.getElementById('openai-model');
openaiModelSelect.addEventListener('change', async (event) => {
await saveOption(event, OPENAI_MODEL_CONF, event.target.value);
});
// Anthropic
document.getElementById('provider-anthropic')
.addEventListener('change', createChangeListener(PROVIDER_CONF));
document.getElementById('anthropic-api-key')
.addEventListener('blur', createChangeListener(ANTHROPIC_API_KEY_CONF));
document.getElementById('anthropic-model')
.addEventListener('change', createChangeListener(ANTHROPIC_MODEL_CONF));

const anthropicApiKeyInput = document.getElementById('anthropic-api-key');
anthropicApiKeyInput.addEventListener('blur', async (event) => {
await saveOption(event, ANTHROPIC_API_KEY_CONF, event.target.value);
});

const anthropicModelSelect = document.getElementById('anthropic-model');
anthropicModelSelect.addEventListener('change', async (event) => {
await saveOption(event, ANTHROPIC_MODEL_CONF, event.target.value);
});
// Ollama
document.getElementById('provider-ollama')
.addEventListener('change', createChangeListener(PROVIDER_CONF));
document.getElementById('ollama-url')
.addEventListener('blur', createChangeListener(OLLAMA_URL_CONF));
document.getElementById('ollama-model')
.addEventListener('blur', createChangeListener(OLLAMA_MODEL_CONF));
});
3 changes: 3 additions & 0 deletions src/options_const.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,12 @@ const OPENAI_API_KEY_CONF = 'options.openai.api_key';
const OPENAI_MODEL_CONF = 'options.openai.model';
const ANTHROPIC_API_KEY_CONF = 'options.anthropic.api_key';
const ANTHROPIC_MODEL_CONF = 'options.anthropic.model';
const OLLAMA_URL_CONF = 'options.ollama.url';
const OLLAMA_MODEL_CONF = 'options.ollama.model';

const OPENAI_PROVIDER = 'openai';
const ANTHROPIC_PROVIDER = 'anthropic';
const OLLAMA_PROVIDER = 'ollama';
const DEFAULT_PROVIDER = OPENAI_PROVIDER;

const DEFAULT_OPENAI_MODEL = 'gpt-4o-mini';
Expand Down

0 comments on commit 92c2945

Please sign in to comment.