From d26000081613464c5afb60776aae562c5a867fdb Mon Sep 17 00:00:00 2001 From: MiaoDX Date: Mon, 7 Apr 2025 20:05:07 +0800 Subject: [PATCH] feat: Add user defined openai model provider (#124) Change-Id: Ice3c42381021e474d751528a0b5c4726998a6acb Signed-off-by: MiaoDX --- .env.example | 4 +++- tools/llm_api.py | 10 ++++++---- tools/web_scraper.py | 4 ++-- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/.env.example b/.env.example index fb8290b..e23c326 100644 --- a/.env.example +++ b/.env.example @@ -1,7 +1,9 @@ OPENAI_API_KEY=your_openai_api_key_here +OPENAI_BASE_URL=https://api.openai.com/v1 +OPENAI_MODEL_DEPLOYMENT=gpt-4o ANTHROPIC_API_KEY=your_anthropic_api_key_here DEEPSEEK_API_KEY=your_deepseek_api_key_here GOOGLE_API_KEY=your_google_api_key_here AZURE_OPENAI_API_KEY=your_azure_openai_api_key_here AZURE_OPENAI_MODEL_DEPLOYMENT=gpt-4o-ms -SILICONFLOW_API_KEY=your_siliconflow_api_key_here \ No newline at end of file +SILICONFLOW_API_KEY=your_siliconflow_api_key_here diff --git a/tools/llm_api.py b/tools/llm_api.py index 598f44d..4f70eb1 100644 --- a/tools/llm_api.py +++ b/tools/llm_api.py @@ -1,4 +1,4 @@ -#!/usr/bin/env /workspace/tmp_windsurf/venv/bin/python3 +#!/usr/bin/env python3 import google.generativeai as genai from openai import OpenAI, AzureOpenAI @@ -68,10 +68,12 @@ def encode_image_file(image_path: str) -> tuple[str, str]: def create_llm_client(provider="openai"): if provider == "openai": api_key = os.getenv('OPENAI_API_KEY') + base_url = os.getenv('OPENAI_BASE_URL', "https://api.openai.com/v1") if not api_key: raise ValueError("OPENAI_API_KEY not found in environment variables") return OpenAI( - api_key=api_key + api_key=api_key, + base_url=base_url ) elif provider == "azure": api_key = os.getenv('AZURE_OPENAI_API_KEY') @@ -140,7 +142,7 @@ def query_llm(prompt: str, client=None, model=None, provider="openai", image_pat # Set default model if model is None: if provider == "openai": - model = "gpt-4o" + model = os.getenv('OPENAI_MODEL_DEPLOYMENT', 'gpt-4o') elif provider == "azure": model = os.getenv('AZURE_OPENAI_MODEL_DEPLOYMENT', 'gpt-4o-ms') # Get from env with fallback elif provider == "deepseek": @@ -269,4 +271,4 @@ def main(): print("Failed to get response from LLM") if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/tools/web_scraper.py b/tools/web_scraper.py index 80d7a73..f476d77 100755 --- a/tools/web_scraper.py +++ b/tools/web_scraper.py @@ -1,4 +1,4 @@ -#!/usr/bin/env /workspace/tmp_windsurf/venv/bin/python3 +#!/usr/bin/env python3 import asyncio import argparse @@ -204,4 +204,4 @@ def main(): sys.exit(1) if __name__ == '__main__': - main() \ No newline at end of file + main()