Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,10 @@ ask "List all Python files" | ask "Generate a script to check syntax of these fi
# Use with other tools
docker ps -a | ask "Which containers are using the most memory?"

# Use custom LLM server (e.g., local llama.cpp/Ollama)
export LLM_SERVER_URL="http://localhost:11434"
ask -m "llama3.2:3b" "Explain quantum computing"

```

## Requirements
Expand Down
22 changes: 15 additions & 7 deletions ask
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,6 @@

set -euo pipefail

# Check for API key
if [ -z "${OPENROUTER_API_KEY:-}" ]; then
echo "Error: OPENROUTER_API_KEY environment variable is not set" >&2
exit 1
fi

# Model shortcuts function
get_model() {
Expand All @@ -29,6 +24,7 @@ PROMPT=""
STREAMING=false
NO_SYSTEM=false
PROVIDER_ORDER=""
LLM_SERVER_URL="${LLM_SERVER_URL:-https://openrouter.ai/api}"

# Default system prompt (direct answers)
DEFAULT_PROMPT="You are a direct answer engine. Output ONLY the requested information.
Expand Down Expand Up @@ -114,6 +110,13 @@ if [ -z "$PROMPT" ]; then
PROMPT=$(cat)
fi


# Check for API key (only required for OpenRouter)
if [[ "$LLM_SERVER_URL" == *"openrouter.ai"* ]] && [ -z "${OPENROUTER_API_KEY:-}" ]; then
echo "Error: OPENROUTER_API_KEY environment variable is not set" >&2
exit 1
fi

# Apply default system prompt unless disabled or custom prompt provided
if [ "$NO_SYSTEM" = false ] && [ -z "$SYSTEM_PROMPT" ]; then
SYSTEM_PROMPT="$DEFAULT_PROMPT"
Expand Down Expand Up @@ -141,7 +144,8 @@ JSON_PAYLOAD='{
"stream": '$([ "$STREAMING" = true ] && echo true || echo false)"$PROVIDER_JSON"'
}'

API_URL="https://openrouter.ai/api/v1/chat/completions"
# Set API URL
API_URL="${LLM_SERVER_URL}/v1/chat/completions"

# Add newline before answer
echo
Expand Down Expand Up @@ -197,5 +201,9 @@ else
TPS=$(echo "scale=1; $TOKENS / $ELAPSED" | bc 2>/dev/null || echo "0.0")

echo
echo "[$MODEL via $PROVIDER - ${ELAPSED}s - ${TPS} tok/s]" >&2
if [[ "$LLM_SERVER_URL" == *"openrouter.ai"* ]]; then
echo "[$MODEL via $PROVIDER - ${ELAPSED}s - ${TPS} tok/s]" >&2
else
echo "[$LLM_SERVER_URL - ${ELAPSED}s - ${TPS} tok/s]" >&2
fi
fi