-
Notifications
You must be signed in to change notification settings - Fork 8
Expand file tree
/
Copy path.env.example
More file actions
34 lines (26 loc) · 1.24 KB
/
.env.example
File metadata and controls
34 lines (26 loc) · 1.24 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
# Second Opinion MCP Server Configuration
# OpenAI Configuration
OPENAI_API_KEY=
# OpenAI uses a fixed endpoint: https://api.openai.com/v1
# Anthropic Configuration
ANTHROPIC_API_KEY=
# Anthropic uses a fixed endpoint: https://api.anthropic.com
# DeepSeek Configuration
DEEPSEEK_API_KEY=
# DeepSeek uses a fixed endpoint: https://api.deepseek.com
# Google AI Configuration
GOOGLE_API_KEY=
# Google uses a fixed endpoint: https://generativelanguage.googleapis.com/v1beta
# OpenRouter Configuration
OPENROUTER_API_KEY=
# OpenRouter doesn't require a base URL as it's fixed
# OpenAI-Compatible API Configuration (for third-party services that use OpenAI's API format)
OPENAI_COMPATIBLE_API_KEY= # Optional: Some services don't require an API key
OPENAI_COMPATIBLE_API_BASE_URL= # Required: Full URL to the API endpoint
OPENAI_COMPATIBLE_API_MODELS= # Optional: Comma-separated list of available models
# Ollama Configuration
OLLAMA_BASE_URL=http://localhost:11434 # Change if running Ollama on a different host
# Optional: Default parameters
DEFAULT_TEMPERATURE=0.7 # Default temperature for non-reasoning models
# Note: Max tokens are model-specific and should be set per request
DEFAULT_REASONING_EFFORT=medium # Default reasoning effort for supported models