diff --git a/.devcontainer.json b/.devcontainer.json new file mode 100644 index 0000000..e473238 --- /dev/null +++ b/.devcontainer.json @@ -0,0 +1,16 @@ +{ + "image":"k33g/genai-go-workspace:0.0.7", + "mounts": [ + "source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind" + ], + "customizations": { + "vscode": { + "extensions": [ + "Codeium.codeium", + ] + } + }, + "remoteEnv": { + "OLLAMA_URL": "http://host.docker.internal:11434" + } +} diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 0000000..37145cc --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,8 @@ +{ + "recommendations": [ + "pkief.material-icon-theme", + "pkief.material-product-icons", + "aaron-bond.better-comments", + "Codeium.codeium", + ] +} \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..00065dd --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,13 @@ +{ + "workbench.iconTheme": "material-icon-theme", + "workbench.colorTheme": "Idea intellij light theme", + "editor.fontSize": 18, + "terminal.integrated.fontSize": 18, + "editor.insertSpaces": true, + "editor.tabSize": 4, + "editor.detectIndentation": true, + "files.autoSave": "afterDelay", + "files.autoSaveDelay": 1000, + "editor.fontFamily": "Menlo", + +} diff --git a/README.md b/README.md index b1d9e72..d182ef4 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,120 @@ -# parakeet4shell -🦜πŸͺΊ 🐚 Parakeet4Shell is a set of scripts, made to simplify the development of small generative AI applications with Ollama πŸ¦™. +# Parakeet4Shell + +🦜πŸͺΊπŸš **Parakeet4Shell** is a set of scripts, made to simplify the development of small generative **Bash** AI applications with **Ollama** πŸ¦™. + +## Requirements + +- Linux (right now, it's just tested on Ubuntu and does not work on MacOS - 🚧 wip) +- jq (https://stedolan.github.io/jq/) - optional but useful +- curl (https://curl.se/) + +## How to use + +Add this at the beginning of your script: + +```bash +. "./lib/parakeet.sh" +``` +> Let's have a look to the `example` folder. + +### Chat completion without streaming + +```bash +#!/bin/bash +. "./lib/parakeet.sh" + +OLLAMA_URL=${OLLAMA_URL:-http://localhost:11434} + +MODEL="tinyllama" + +read -r -d '' SYSTEM_CONTENT <<- EOM +You are an expert of the StarTrek universe. +Your name is Seven of Nine. +Speak like a Borg. +EOM + +read -r -d '' USER_CONTENT <<- EOM +Who is Jean-Luc Picard? +EOM + +SYSTEM_CONTENT=$(Sanitize "${SYSTEM_CONTENT}") +USER_CONTENT=$(Sanitize "${USER_CONTENT}") + +read -r -d '' DATA <<- EOM +{ + "model":"${MODEL}", + "options": { + "temperature": 0.5, + "repeat_last_n": 2 + }, + "messages": [ + {"role":"system", "content": "${SYSTEM_CONTENT}"}, + {"role":"user", "content": "${USER_CONTENT}"} + ], + "stream": false, + "raw": false +} +EOM + +jsonResult=$(Chat "${OLLAMA_URL}" "${DATA}") + +messageContent=$(echo "${jsonResult}" | jq '.message.content') + +echo "${messageContent}" +``` + +### Chat completion with streaming + +```bash +#!/bin/bash +. "./lib/parakeet.sh" + +OLLAMA_URL=${OLLAMA_URL:-http://localhost:11434} + +MODEL="tinyllama" + +# System instructions +read -r -d '' SYSTEM_CONTENT <<- EOM +You are an expert of the StarTrek universe. +Your name is Seven of Nine. +Speak like a Borg. +EOM + +# User message +read -r -d '' USER_CONTENT <<- EOM +Who is Jean-Luc Picard? +EOM + +SYSTEM_CONTENT=$(Sanitize "${SYSTEM_CONTENT}") +USER_CONTENT=$(Sanitize "${USER_CONTENT}") + +# Payload to send to Ollama +read -r -d '' DATA <<- EOM +{ + "model":"${MODEL}", + "options": { + "temperature": 0.5, + "repeat_last_n": 2 + }, + "messages": [ + {"role":"system", "content": "${SYSTEM_CONTENT}"}, + {"role":"user", "content": "${USER_CONTENT}"} + ], + "stream": true +} +EOM + +# This function will be called for each chunk of the response +function onChunk() { + chunk=$1 + data=$(echo ${chunk} | jq -r '.message.content') + echo -n "${data}" +} + +ChatStream "${OLLAMA_URL}" "${DATA}" onChunk +``` + +## Acknowledgments: + +- Thanks to [Sylvain](https://github.com/swallez) for the discussion on curl callbacks. +- Thanks to [Gemini](https://gemini.google.com/app) for all the discussions on Bash. diff --git a/examples/.gitignore b/examples/.gitignore new file mode 100644 index 0000000..f0a0a18 --- /dev/null +++ b/examples/.gitignore @@ -0,0 +1 @@ +context.save diff --git a/examples/01-generate-completion.sh b/examples/01-generate-completion.sh new file mode 100755 index 0000000..2a3c73b --- /dev/null +++ b/examples/01-generate-completion.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +. "../lib/parakeet.sh" + +OLLAMA_URL=${OLLAMA_URL:-http://localhost:11434} + +MODEL="tinyllama" + +read -r -d '' USER_CONTENT <<- EOM +Who is James T Kirk? +EOM + +USER_CONTENT=$(Sanitize "${USER_CONTENT}") + +read -r -d '' DATA <<- EOM +{ + "model":"${MODEL}", + "options": { + "temperature": 0.5, + "repeat_last_n": 2 + }, + "prompt": "${USER_CONTENT}", + "stream": false +} +EOM + +jsonResult=$(Generate "${OLLAMA_URL}" "${DATA}") + +context=$(echo ${jsonResult} | jq -r '.context') +echo "${context}" + +response=$(echo ${jsonResult} | jq -r '.response') +echo "${response}" diff --git a/examples/02-generate-stream-completion.sh b/examples/02-generate-stream-completion.sh new file mode 100755 index 0000000..5d96235 --- /dev/null +++ b/examples/02-generate-stream-completion.sh @@ -0,0 +1,50 @@ +#!/bin/bash + +. "../lib/parakeet.sh" + +OLLAMA_URL=${OLLAMA_URL:-http://localhost:11434} + +MODEL="tinyllama" + +read -r -d '' USER_CONTENT <<- EOM +Who is James T Kirk? +EOM + +USER_CONTENT=$(Sanitize "${USER_CONTENT}") + +read -r -d '' DATA <<- EOM +{ + "model":"${MODEL}", + "options": { + "temperature": 0.5, + "repeat_last_n": 2 + }, + "prompt": "${USER_CONTENT}", + "stream": true +} +EOM + +# A function that handles a chunk of data received from a stream. +# +# Parameters: +# - $1: The chunk of data received from the stream. +# +# Returns: +# - The extracted response from the chunk, without a newline character. +function onChunk() { + chunk=$1 + data=$(echo ${chunk} | jq -r '.response') + echo -n "${data}" +} + +# For tests +function onJsonChunk() { + chunk=$1 + echo ${chunk} | jq -c '{ response, context }' +} + +GenerateStream "${OLLAMA_URL}" "${DATA}" onChunk + + + + diff --git a/examples/03-use-context.sh b/examples/03-use-context.sh new file mode 100755 index 0000000..914cd4d --- /dev/null +++ b/examples/03-use-context.sh @@ -0,0 +1,66 @@ +#!/bin/bash + +. "../lib/parakeet.sh" + +OLLAMA_URL=${OLLAMA_URL:-http://localhost:11434} + +MODEL="tinyllama" + +read -r -d '' USER_CONTENT <<- EOM +[Brief] Who is James T Kirk? +EOM + +USER_CONTENT=$(Sanitize "${USER_CONTENT}") + +read -r -d '' DATA <<- EOM +{ + "model":"${MODEL}", + "options": { + "temperature": 0.5, + "repeat_last_n": 2 + }, + "prompt": "${USER_CONTENT}", + "stream": true +} +EOM + +function onChunk() { + chunk=$1 + data=$(echo ${chunk} | jq -r '.response') + echo -n "${data}" + + # Save context at the end of the stream + if [ -z "$data" ]; then + context=$(echo ${chunk} | jq -r '.context') + echo "${context}" > context.save + fi + +} + +GenerateStream "${OLLAMA_URL}" "${DATA}" onChunk + +echo "" +echo "" + +# New question +read -r -d '' USER_CONTENT <<- EOM +Who is his best friend? +EOM + +USER_CONTENT=$(Sanitize "${USER_CONTENT}") +CONTEXT=$(cat context.save) + +read -r -d '' DATA <<- EOM +{ + "model":"${MODEL}", + "options": { + "temperature": 0.5, + "repeat_last_n": 2 + }, + "prompt": "${USER_CONTENT}", + "stream": true, + "context": ${CONTEXT} +} +EOM + +GenerateStream "${OLLAMA_URL}" "${DATA}" onChunk diff --git a/examples/04-chat-completion.sh b/examples/04-chat-completion.sh new file mode 100755 index 0000000..2bfd800 --- /dev/null +++ b/examples/04-chat-completion.sh @@ -0,0 +1,46 @@ +#!/bin/bash + +. "../lib/parakeet.sh" + +OLLAMA_URL=${OLLAMA_URL:-http://localhost:11434} + +MODEL="tinyllama" + +read -r -d '' SYSTEM_CONTENT <<- EOM +You are an expert of the StarTrek universe. +Your name is Seven of Nine. +Speak like a Borg. +EOM + +read -r -d '' USER_CONTENT <<- EOM +Who is Jean-Luc Picard? +EOM + +SYSTEM_CONTENT=$(Sanitize "${SYSTEM_CONTENT}") +USER_CONTENT=$(Sanitize "${USER_CONTENT}") + + +read -r -d '' DATA <<- EOM +{ + "model":"${MODEL}", + "options": { + "temperature": 0.5, + "repeat_last_n": 2 + }, + "messages": [ + {"role":"system", "content": "${SYSTEM_CONTENT}"}, + {"role":"user", "content": "${USER_CONTENT}"} + ], + "stream": false, + "raw": false +} +EOM + +jsonResult=$(Chat "${OLLAMA_URL}" "${DATA}") + +messageContent=$(echo "${jsonResult}" | jq '.message.content') + +echo "${messageContent}" + + + diff --git a/examples/05-chat-stream-completion.sh b/examples/05-chat-stream-completion.sh new file mode 100755 index 0000000..8052c8c --- /dev/null +++ b/examples/05-chat-stream-completion.sh @@ -0,0 +1,49 @@ +#!/bin/bash + +. "../lib/parakeet.sh" + +OLLAMA_URL=${OLLAMA_URL:-http://localhost:11434} + +MODEL="tinyllama" + +read -r -d '' SYSTEM_CONTENT <<- EOM +You are an expert of the StarTrek universe. +Your name is Seven of Nine. +Speak like a Borg. +EOM + +read -r -d '' USER_CONTENT <<- EOM +Who is Jean-Luc Picard? +EOM + +SYSTEM_CONTENT=$(Sanitize "${SYSTEM_CONTENT}") +USER_CONTENT=$(Sanitize "${USER_CONTENT}") + + +read -r -d '' DATA <<- EOM +{ + "model":"${MODEL}", + "options": { + "temperature": 0.5, + "repeat_last_n": 2 + }, + "messages": [ + {"role":"system", "content": "${SYSTEM_CONTENT}"}, + {"role":"user", "content": "${USER_CONTENT}"} + ], + "stream": true +} +EOM + +function onChunk() { + chunk=$1 + data=$(echo ${chunk} | jq -r '.message.content') + echo -n "${data}" +} + +ChatStream "${OLLAMA_URL}" "${DATA}" onChunk + +echo "" + + + diff --git a/git.sh b/git.sh new file mode 100755 index 0000000..a60e886 --- /dev/null +++ b/git.sh @@ -0,0 +1,111 @@ +#!/bin/bash +message="" +case $1 in + + # 🎨: art + art) + message="Improve structure / format of the code" + emoji="🎨" + ;; + + # πŸ›: bug + bug|fix) + message="Fix a bug" + emoji="πŸ›" + ;; + + # ✨: sparkles + sparkles|feature) + message="Introduce new feature(s)" + emoji="✨" + ;; + + # πŸ“: memo + memo|doc|documentation) + message="Add or update documentation" + emoji="πŸ“" + ;; + + # 🌸: cherry_blossom + gardening|garden|clean|cleaning) + message="Gardening" + emoji="🌸" + ;; + + # πŸš€: rocket + rocket|deploy) + message="Deploy stuff" + emoji="πŸš€" + ;; + + # πŸŽ‰: tada + tada|first) + message="Begin a project" + emoji="πŸŽ‰" + ;; + + # 🚧: construction + construction|wip) + message="Work in progress" + emoji="🚧" + ;; + + # πŸ“¦οΈ: package + package|build) + message="Add or update compiled files or packages" + emoji="πŸ“¦οΈ" + ;; + + # πŸ“¦οΈ: package + release) + message="Create a release" + emoji="πŸ“¦οΈ" + ;; + + # πŸ‘½οΈ: alien + alien|api) + message="Update code due to external API changes" + emoji="πŸ‘½οΈ" + ;; + + # 🐳: whale + docker|container) + message="Docker" + emoji="🐳" + ;; + + # 🍊: tangerine + gitpod|gitpodify) + message="Gitpodify" + emoji="🍊" + ;; + + # πŸ§ͺ: test tube + alembic|experiments|experiment|xp) + message="Perform experiments" + emoji="πŸ§ͺ" + ;; + + # πŸ’Ύ: floppy-disk + save) + message="Saved" + emoji="πŸ’Ύ" + ;; + + *) + message="Updated" + emoji="πŸ›Ÿ" + ;; + +esac + +find . -name '.DS_Store' -type f -delete + +if [ -z "$2" ] +then + # empty + git add .; git commit -m "$emoji $message."; git push +else + # not empty + git add .; git commit -m "$emoji $message: $2"; git push +fi diff --git a/lib/parakeet.sh b/lib/parakeet.sh new file mode 100755 index 0000000..c0e7756 --- /dev/null +++ b/lib/parakeet.sh @@ -0,0 +1,108 @@ +#!/bin/bash +# Parakeet 🦜πŸͺΊπŸš v0.0.0 loaded!" + +: <<'END_COMMENT' +Generate -Generates a response using the OLLAMA API. + + Args: + OLLAMA_URL (str): The URL of the OLLAMA API. + DATA (str): The JSON data to be sent to the API. + + Returns: + str: The JSON response from the API, containing the generated response and context. +END_COMMENT +function Generate() { + OLLAMA_URL="${1}" + DATA="${2}" + + JSON_RESULT=$(curl --silent ${OLLAMA_URL}/api/generate \ + -H "Content-Type: application/json" \ + -d "${DATA}" + ) + echo "${JSON_RESULT}" +} + +: <<'END_COMMENT' +Sanitize - Sanitizes the given content by removing any newlines. + + Args: + CONTENT (str): The content to be sanitized. + + Returns: + str: The sanitized content. +END_COMMENT +function Sanitize() { + CONTENT="${1}" + CONTENT=$(echo ${CONTENT} | tr -d '\n') + echo "${CONTENT}" +} + +: <<'END_COMMENT' +GenerateStream - Generates a stream of data by sending a request to the specified URL with the given data. + + Args: + OLLAMA_URL (str): The URL to send the request to. + DATA (str): The data to send in the request body. + CALL_BACK (function): The callback function to process each line of the response. + + Returns: + None +END_COMMENT +function GenerateStream() { + OLLAMA_URL="${1}" + DATA="${2}" + CALL_BACK=${3} + + curl --no-buffer --silent ${OLLAMA_URL}/api/generate \ + -H "Content-Type: application/json" \ + -d "${DATA}" | while read linestream + do + ${CALL_BACK} "${linestream}" + done + +} + +: <<'END_COMMENT' +Chat - Generates a response using the OLLAMA API. + + Args: + OLLAMA_URL (str): The URL of the OLLAMA API. + DATA (str): The JSON data to be sent to the API. + + Returns: + str: The JSON response from the API, containing the generated response and context. +END_COMMENT +function Chat() { + OLLAMA_URL="${1}" + DATA="${2}" + + JSON_RESULT=$(curl --silent ${OLLAMA_URL}/api/chat \ + -H "Content-Type: application/json" \ + -d "${DATA}" + ) + echo "${JSON_RESULT}" +} + +: <<'END_COMMENT' +ChatStream - Generates a response using the OLLAMA API in a streaming manner. + + Args: + - OLLAMA_URL (str): The URL of the OLLAMA API. + - DATA (str): The JSON data to be sent to the API. + - CALL_BACK (function): The callback function to handle each line of the response. + + Returns: + None +END_COMMENT +function ChatStream() { + OLLAMA_URL="${1}" + DATA="${2}" + CALL_BACK=${3} + + curl --no-buffer --silent ${OLLAMA_URL}/api/chat \ + -H "Content-Type: application/json" \ + -d "${DATA}" | while read linestream + do + ${CALL_BACK} "${linestream}" + done +} diff --git a/release.env b/release.env new file mode 100644 index 0000000..963e668 --- /dev/null +++ b/release.env @@ -0,0 +1,2 @@ +TAG="v0.0.0" +ABOUT="🦜πŸͺΊπŸš Parakeet4Shell ${TAG}" diff --git a/release.sh b/release.sh new file mode 100755 index 0000000..5caa31a --- /dev/null +++ b/release.sh @@ -0,0 +1,22 @@ +#!/bin/bash +set -o allexport; source release.env; set +o allexport + +: <<'COMMENT' +Todo: +- update of release.env: + - TAG + - ABOUT + +Remark: delete tag: git tag -d v0.0.1 +COMMENT + +echo "Generating release: ${TAG} ${ABOUT}" + +find . -name '.DS_Store' -type f -delete + +git add . +git commit -m "πŸ“¦ ${ABOUT}" +git push + +git tag -a ${TAG} -m "${ABOUT}" +git push origin ${TAG}