Skip to content

Commit

Permalink
Feat/ask-stding (#38)
Browse files Browse the repository at this point in the history
* feat(packages/qllm-cli/src/chat, packages/qllm-lib): update provider handling and add openrouter provider

* feat(prompts): add market research prompt

* feat(packages/qllm-cli): add support for reading from stdin and improve documentation
  • Loading branch information
raphaelmansuy authored Aug 30, 2024
1 parent 58e8feb commit f6a2b46
Show file tree
Hide file tree
Showing 7 changed files with 266 additions and 29 deletions.
31 changes: 31 additions & 0 deletions packages/qllm-cli/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,37 @@ The `ask` command supports various options:
- `--screenshot <number>`: Capture and include a screenshot
- `--system-message <message>`: Prepend a system message to the conversation

#### Using with Piped Input

```bash
echo "Explain quantum computing" | qllm ask
```

or

```bash
cat article.txt | qllm ask "Summarize this text"
```

#### Image Analysis

```bash
qllm ask "Describe this image" -i path/to/image.jpg
```

#### Streaming Responses

```bash
qllm ask "Write a short story about AI" -s
```

#### Saving Output to File

```bash
qllm ask "Explain the theory of relativity" -o relativity_explanation.txt



### Interactive Chat

Start an interactive chat session:
Expand Down
4 changes: 2 additions & 2 deletions packages/qllm-cli/src/chat/command-processor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,8 @@ export class CommandProcessor {
ioManager.displayError("Please provide a model name.");
return;
}
if (modelName.includes("/")) {
const [providerName, model] = modelName.split("/");
if (modelName.includes("@")) {
const [providerName, model] = modelName.split("@");
await configManager.setProvider(providerName);
configManager.setModel(model);
} else {
Expand Down
66 changes: 40 additions & 26 deletions packages/qllm-cli/src/commands/ask-command.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import {
AskCommandOptionsPartialSchema,
PartialAskCommandOptions,
} from "../types/ask-command-options";
import { Clipboard } from "../utils/clipboard";
import { Clipboard } from "../utils/clipboard";
import { ScreenshotCapture } from "../utils/screenshot";
import {
readImageFileAndConvertToBase64,
Expand All @@ -21,14 +21,28 @@ import { IOManager } from "../utils/io-manager";
import { CliConfigManager } from "../utils/cli-config-manager";
import { DEFAULT_PROVIDER, DEFAULT_MODEL } from "../constants";

// New function to read from stdin
async function readStdin(): Promise<string> {
return new Promise((resolve) => {
let data = '';
process.stdin.on('readable', () => {
let chunk;
while (null !== (chunk = process.stdin.read())) {
data += chunk;
}
});
process.stdin.on('end', () => {
resolve(data.trim());
});
});
}

const askCommandAction = async (
question: string,
options: AskCommandOptions
) => {
let validOptions: PartialAskCommandOptions = options;

try {
// validate use zod schema
validOptions = await validateOptions(
AskCommandOptionsPartialSchema,
options,
Expand All @@ -39,12 +53,22 @@ const askCommandAction = async (
ioManager.displayError(
`An error occurred while validating the options: ${error.message}`
);
process.exit(1);
}
process.exit(1);
}

const cliConfig = CliConfigManager.getInstance();
// Read from stdin if available
const stdinInput = await readStdin();
if (stdinInput) {
question = stdinInput + (question ? `\n${question}` : '');
}

if (!question) {
ioManager.displayError("No question provided.");
process.exit(1);
}

const cliConfig = CliConfigManager.getInstance();
const providerName =
validOptions.provider ||
cliConfig.get("defaultProvider") ||
Expand All @@ -66,7 +90,6 @@ const askCommandAction = async (
});

spinner.update({ text: "Sending request..." });

const usedOptions: AskCommandOptions = {
...validOptions,
image: imageInputs,
Expand All @@ -90,11 +113,11 @@ const askCommandAction = async (
if (options.output) {
await saveResponseToFile(response, options.output);
ioManager.displaySuccess(`Response saved to ${options.output}`);
}
}

if(!usedOptions.stream) {
ioManager.stdout.log(response);
}

} catch (error) {
spinner.error({
text: ioManager.colorize(
Expand All @@ -106,23 +129,22 @@ const askCommandAction = async (
error instanceof Error ? error.message : String(error)
);
process.exit(1);
} finally {
}
};

export const askCommand = new Command("ask")
.description("Ask a question to an LLM provider")
.argument("<question>", "The question to ask")
.argument("[question]", "The question to ask")
.option("-p, --provider <provider>", "LLM provider to use", "openai")
.option("-m, --model <model>", "Specific model to use")
.option(
"-t, --max-tokens <number>",
"-t, --max-tokens <tokens>",
"Maximum number of tokens to generate",
(value) => parseInt(value, 10),
1024
)
.option(
"--temperature <number>",
"--temperature <temp>",
"Temperature for response generation",
(value) => parseFloat(value),
0.7
Expand Down Expand Up @@ -170,17 +192,16 @@ async function prepareImageInputs({
fullScreen: true,
windowName: undefined,
displayNumber: screenshot
})
});
if (!screenshotBase64) {
ioManager.displayError(
`No screenshot captured from display ${screenshot}`);
}
else {
} else {
images.push(screenshotBase64);
ioManager.displaySuccess(
`Screenshot captured successfully from display ${screenshot}`
);
}
ioManager.displaySuccess(
`Screenshot captured successfully from display ${screenshot}`
);
} catch (error) {
ioManager.displayError(
`Failed to capture screenshot from display ${screenshot}: ${error}`
Expand Down Expand Up @@ -269,14 +290,12 @@ function createMessageContent(
images: string[]
): ChatMessage["content"] {
const content: ChatMessage["content"] = [{ type: "text", text: question }];

for (const image of images) {
content.push({
type: "image_url",
url: image, // Url can be a local file path, URL, or base64 string
});
}

return content;
}

Expand All @@ -286,12 +305,8 @@ async function streamResponse(
params: any
): Promise<string> {
const chunks: string[] = [];

let chunkNumber = 0;

spinner.update({ text: "Waiting response..." });


try {
const stream = await provider.streamChatCompletion(params);
for await (const chunk of stream) {
Expand All @@ -300,7 +315,6 @@ async function streamResponse(
spinner.stop();
spinner.clear(); // Clear the spinner from the console
}

if (chunk.text) {
ioManager.stdout.write(chunk.text);
chunks.push(chunk.text);
Expand All @@ -327,4 +341,4 @@ async function saveResponseToFile(
} catch (error) {
throw new Error(`Failed to save response to file: ${error}`);
}
}
}
2 changes: 1 addition & 1 deletion packages/qllm-lib/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "qllm-lib",
"version": "3.1.11",
"version": "3.1.12",
"description": "Core library providing robust AI engineering functionalities tailored for Large Language Model (LLM) applications, enabling developers to build, deploy, and optimize AI solutions with ease.",
"keywords": [
"ai",
Expand Down
4 changes: 4 additions & 0 deletions packages/qllm-lib/src/providers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import { OpenAIProvider } from './openai';
import { GroqProvider } from './qroq';
import { PerplexityProvider } from './perplexity';
import { MistralProvider } from './mistral';
import { OpenRouterProvider } from './openrouter';

export const getListProviderNames = (): string[] => {
const listProviders = [
Expand All @@ -16,6 +17,7 @@ export const getListProviderNames = (): string[] => {
'aws-anthropic',
'perplexity',
'mistral',
'openrouter',
].sort();
return listProviders;
};
Expand All @@ -37,6 +39,8 @@ export async function getLLMProvider(providerName: string): Promise<LLMProvider>
return new PerplexityProvider();
case 'mistral':
return new MistralProvider();
case 'openrouter':
return new OpenRouterProvider();
default:
throw new Error(`Provider "${providerName}" not found.`);
}
Expand Down
Loading

0 comments on commit f6a2b46

Please sign in to comment.