From 07a02c32ab2e9888c0efa2ad55db2bfc76ab66ae Mon Sep 17 00:00:00 2001 From: Raphael MANSUY Date: Thu, 5 Sep 2024 06:57:20 +0800 Subject: [PATCH 1/3] feat: Add QLLM CLI training prompts YAML file --- docs/training/content.md | 6227 +++++++++++++++++++++++++++ docs/training/qllm-cli.yaml | 106 + docs/training/qllm_training.md | 96 + docs/training/qllm_tuto-V0.md | 840 ++++ docs/training/qllm_tuto.md | 7212 ++++++++++++++++++++++++++++++++ 5 files changed, 14481 insertions(+) create mode 100644 docs/training/content.md create mode 100644 docs/training/qllm-cli.yaml create mode 100644 docs/training/qllm_training.md create mode 100644 docs/training/qllm_tuto-V0.md create mode 100644 docs/training/qllm_tuto.md diff --git a/docs/training/content.md b/docs/training/content.md new file mode 100644 index 0000000..6a0bd10 --- /dev/null +++ b/docs/training/content.md @@ -0,0 +1,6227 @@ +# Table of Contents +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/README.md +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/qllm.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/index.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/types/configure-command-options.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/types/run-command-options.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/types/ask-command-options.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/types/chat-command-options.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/chat/utils.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/chat/chat.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/chat/command-processor.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/chat/image-manager.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/chat/chat-config.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/chat/config-manager.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/chat/message-handler.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/constants/config-constants.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/constants/index.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/utils/template-utils.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/utils/clipboard.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/utils/write-file.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/utils/validate-options.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/utils/common.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/utils/image-utils.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/utils/variable-utils.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/utils/cli-config-manager.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/utils/input-validator.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/commands/configure-command.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/commands/run-command.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/commands/chat-command.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/commands/ask-command.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/commands/list-command.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/chat/commands/list-models.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/chat/commands/list-providers.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/chat/commands/display-current-options.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/chat/commands/display-conversation.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/chat/commands/show-help.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/utils/screenshot/windows-screenshot-capture.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/utils/screenshot/utils.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/utils/screenshot/types.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/utils/screenshot/index.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/utils/screenshot/macos-screenshot-capture.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/utils/screenshot/linux-screenshot-capture.ts +- /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/src/utils/io-manager/index.ts + +## File: /Users/raphaelmansuy/Github/03-working/qllm/packages/qllm-cli/README.md + +- Extension: .md +- Language: markdown +- Size: 16667 bytes +- Created: 2024-09-04 13:22:45 +- Modified: 2024-09-04 13:22:45 + +### Code + +```markdown +# QLLM: Quantalogic Large Language Model CLI & AI Toolbox 🚀 + +## Table of Contents + +1. [Introduction](#1-introduction) +2. [Features](#2-features) +3. [Installation](#3-installation) +4. [Configuration](#4-configuration) +5. [Usage](#5-usage) +6. [Advanced Features](#6-advanced-features) +7. [Command Reference](#7-command-reference) +8. [Examples](#8-examples) +9. [Troubleshooting](#9-troubleshooting) +10. [Contributing](#10-contributing) +11. [License](#11-license) +12. [Acknowledgements](#12-acknowledgements) + +## 1. Introduction + +Welcome to QLLM CLI, the ultimate command-line interface for seamless interaction with Large Language Models (LLMs). Crafted with passion by @quantalogic, QLLM CLI revolutionizes the way you engage with AI, offering a unified platform that supports multiple providers and empowers users with unparalleled flexibility. + +In today's AI-driven landscape, QLLM CLI emerges as a game-changing solution for developers, researchers, and AI enthusiasts alike. Whether you're integrating AI into your workflow, exploring the frontiers of language models, or simply engaging in thought-provoking conversations with AI, QLLM CLI provides the robust toolkit you need. + +Key Highlights: + +- Multi-provider support (OpenAI, Anthropic, and more) +- Rich, interactive chat experiences with advanced conversation management +- Efficient one-time question answering for quick insights +- Cutting-edge image input capabilities for visual analysis tasks +- Fine-grained control over model parameters for tailored responses +- Comprehensive configuration options for a personalized experience + +Embrace the future of AI interaction with QLLM CLI – your gateway to boundless possibilities in the world of language models. + +## 2. Features + +QLLM CLI boasts an impressive array of features designed to elevate your AI interaction experience: + +1. **🌐 Multi-provider Support**: Effortlessly switch between LLM providers like OpenAI and Anthropic, leveraging the unique strengths of various models and facilitating comparative analysis. + +2. **💬 Interactive Chat Sessions**: Immerse yourself in dynamic, context-aware conversations with LLMs, complete with robust conversation history management. + +3. **❓ One-time Question Answering**: Quickly obtain answers to standalone queries without the need for a full chat session. + +4. **🖼️ Image Input Support**: Analyze images from multiple sources: + + - Local files on your system + - URLs pointing to online images + - Images from your clipboard + - Screenshots captured directly through the CLI + +5. **🎛️ Customizable Model Parameters**: Fine-tune AI behavior with adjustable settings: + + - Temperature + - Max tokens + - Top P + - Frequency penalty + - Presence penalty + - Stop sequences + +6. **🗂️ Conversation Management**: Efficiently save, list, load, and delete chat histories for easy reference and continuation of previous interactions. + +7. **📋 Provider and Model Listing**: Easily view available providers and their associated models to make informed choices. + +8. **🔄 Streaming Responses**: Experience real-time output for long-form content, enhancing interactivity. + +9. **💾 Output to File**: Save AI responses directly to your filesystem for future reference or processing. + +10. **⚙️ Configurable Settings**: Tailor your QLLM CLI experience with a robust configuration system for managing defaults and API keys. + +11. **🖥️ Cross-platform Compatibility**: Enjoy a consistent experience across Windows, macOS, and Linux. + +12. **📸 Screenshot Capture**: Take screenshots directly from the CLI for immediate AI analysis. + +13. **🎨 Syntax Highlighting**: Benefit from colorized output for improved readability and a more pleasant user experience. + +14. **🛡️ Error Handling and Validation**: Robust error checking and input validation ensure smooth operation and helpful error messages. + +15. **🧩 Extensible Architecture**: Modular design facilitates easy addition of new features and providers in the future. + +## 3. Installation + +To embark on your QLLM CLI journey, ensure you have Node.js (version 14 or higher) installed on your system. Then, execute the following command: + +```bash +npm install -g qllm +``` + +This global installation makes the `qllm` command readily available in your terminal. + +Verify the installation with: + +```bash +qllm --version +``` + +You should see the version number (e.g., 1.8.0) displayed, confirming a successful installation. + +## 4. Configuration + +Before diving into the world of AI interactions, configure QLLM CLI with your API keys for the desired LLM providers. QLLM CLI offers flexible configuration management: + +### Interactive Configuration + +Initiate the interactive configuration mode: + +```bash +qllm configure +``` + +This guided process helps you set up API keys and default preferences across several sections: + +1. Provider Settings + - Default Provider + - Default Model +2. Model Parameters + - Temperature + - Max Tokens + - Top P + - Frequency Penalty + - Presence Penalty +3. Other Settings + - Log Level + - Custom Prompt Directory + - Stop Sequence + +### Command-line Configuration + +Usage: `qllm configure [options]` + +Configure QLLM CLI settings. + +Options: +-l, --list List all configuration settings +-s, --set Set a configuration value +-g, --get Get a configuration value +-h, --help Display help for command + +This command allows you to manage configuration settings for the QLLM CLI. + +Examples: + +```bash +qllm configure --set provider=openai +qllm configure --set model=gpt-4 +``` + +### Viewing Current Configuration + +Display your current settings at any time: + +```bash +qllm configure --list +``` + +This command shows all current settings, with API keys masked for security. + +### Configuration File + +QLLM CLI stores its configuration in a JSON file located at `~/.qllmrc`. While manual editing is possible, using the `configure` command is recommended for proper formatting and validation. + +## 5. Usage + +QLLM CLI offers a variety of commands for interacting with LLMs. Here's an overview of the primary usage patterns: + +### Running Templates + +QLLM CLI allows you to run templates directly. This is now the default behavior when no specific command is provided: + +```bash +qllm +``` + +For example: + +```bash +qllm https://raw.githubusercontent.com/quantalogic/qllm/main/prompts/chain_of_tought_leader.yaml +``` + +The `run` command supports various options: + +- `-p, --provider `: Specify the LLM provider (default: openai) +- `-m, --model `: Choose a specific model +- `-t, --max-tokens `: Set maximum tokens for the response +- `--temperature `: Adjust output randomness (0.0 to 1.0) +- `-s, --stream`: Stream the response in real-time +- `-o, --output `: Save the response to a file +- `-i, --image `: Include image files or URLs (can be used multiple times) +- `--use-clipboard`: Use an image from your clipboard +- `--screenshot `: Capture and include a screenshot +- `--system-message `: Prepend a system message to the conversation + +#### Using with Piped Input + +```bash +echo "Explain quantum computing" | qllm ask +``` + +or + +```bash +cat article.txt | qllm ask "Summarize this text" +``` + +#### Image Analysis + +```bash +qllm ask "Describe this image" -i path/to/image.jpg +``` + +#### Streaming Responses + +```bash +qllm ask "Write a short story about AI" -s +``` + +#### Saving Output to File + +```bash +qllm ask "Explain the theory of relativity" -o relativity_explanation.txt +``` + +### Interactive Chat + +Start an interactive chat session: + +```bash +qllm chat +``` + +In chat mode, utilize various commands to manage your conversation: + +- `/help`: Display available commands +- `/stop`: End the chat session +- `/new`: Start a new conversation +- `/save`: Save the current conversation +- `/load`: Load a saved conversation +- `/list`: Show all messages in the current conversation +- `/clear`: Clear the current conversation +- `/models`: List available models for the current provider +- `/providers`: List available providers +- `/options`: Display current chat options +- `/set