33 lines
1.3 KiB
Plaintext
33 lines
1.3 KiB
Plaintext
# Base URL for the OpenAI instance (default is https://api.openai.com/v1)
|
|
# OpenAI: https://api.openai.com/v1
|
|
# Ollama (example): http://localhost:11434/v1
|
|
# OpenRouter: https://openrouter.ai/api/v1
|
|
BASE_URL=
|
|
|
|
# Get your Open AI API Key by following these instructions -
|
|
# https://help.openai.com/en/articles/4936850-where-do-i-find-my-openai-api-key
|
|
# Even if using OpenRouter/Ollama, you still need to set this for the embedding model.
|
|
# Future versions of Archon will be more flexible with this.
|
|
OPENAI_API_KEY=
|
|
|
|
# For OpenAI: https://help.openai.com/en/articles/4936850-where-do-i-find-my-openai-api-key
|
|
# For OpenRouter: https://openrouter.ai/keys
|
|
LLM_API_KEY=
|
|
|
|
# For the Supabase version (sample_supabase_agent.py), set your Supabase URL and Service Key.
|
|
# Get your SUPABASE_URL from the API section of your Supabase project settings -
|
|
# https://supabase.com/dashboard/project/<your project ID>/settings/api
|
|
SUPABASE_URL=
|
|
|
|
# Get your SUPABASE_SERVICE_KEY from the API section of your Supabase project settings -
|
|
# https://supabase.com/dashboard/project/<your project ID>/settings/api
|
|
# On this page it is called the service_role secret.
|
|
SUPABASE_SERVICE_KEY=
|
|
|
|
# The LLM you want to use for the reasoner (o3-mini, R1, QwQ, etc.).
|
|
# Example: o3-mini
|
|
REASONER_MODEL=
|
|
|
|
# The LLM you want to use for the primary agent/coder.
|
|
# Example: gpt-4o-mini
|
|
PRIMARY_MODEL= |