-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathbamboo_env_example.sh
More file actions
196 lines (151 loc) · 7.14 KB
/
bamboo_env_example.sh
File metadata and controls
196 lines (151 loc) · 7.14 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
#!/usr/bin/env bash
#
# Example environment configuration for AskPanDA LLM support
# Copy this file, remove `_example`, and fill in the API keys as needed.
# Remember to add this file to your .gitignore to avoid committing sensitive information.
########################################
# PANDA RELATED
########################################
export PANDA_BASE_URL="https://bigpanda.cern.ch"
export ASKPANDA_PANDA_RETRIES="2"
export ASKPANDA_PANDA_BACKOFF_SECONDS="0.8"
# Path to the DuckDB file written by the ingestion agent.
# Used by the panda_jobs_query tool (atlas.jobs_query).
# Defaults to "jobs.duckdb" in the current working directory if unset.
export PANDA_DUCKDB_PATH="jobs.duckdb"
# Optional: maximum rows returned by panda_jobs_query (default: 500).
# export PANDA_JOBS_QUERY_MAX_ROWS="500"
# Path to the CRIC queuedata DuckDB file written by the cric_agent.
# Used by the cric_query tool (atlas.cric_query).
# Defaults to "cric.duckdb" in the current working directory if unset.
export CRIC_DUCKDB_PATH="${HOME}/.askpanda/cric.duckdb"
# Optional: maximum rows returned by cric_query (default: 200).
# export CRIC_QUERY_MAX_ROWS="200"
########################################
# LLM PROFILE SELECTION
########################################
# Which profile names the selector will use
export LLM_DEFAULT_PROFILE="default"
export LLM_FAST_PROFILE="fast"
export LLM_REASONING_PROFILE="reasoning"
########################################
# DEFAULT PROFILE (used if nothing else matches)
########################################
export LLM_DEFAULT_PROVIDER="mistral"
export LLM_DEFAULT_MODEL="mistral-large-latest"
########################################
# FAST PROFILE (classification, routing, lightweight tasks)
########################################
export LLM_FAST_PROVIDER="mistral"
export LLM_FAST_MODEL="mistral-large-latest"
########################################
# REASONING PROFILE (log analysis, synthesis, RAG answers)
########################################
export LLM_REASONING_PROVIDER="mistral"
export LLM_REASONING_MODEL="mistral-large-latest"
########################################
# MISTRAL CONFIGURATION
########################################
# Required when using provider="mistral"
export MISTRAL_API_KEY=""
# Optional concurrency / retry tuning
export ASKPANDA_MISTRAL_CONCURRENCY="4"
export ASKPANDA_MISTRAL_RETRIES="3"
export ASKPANDA_MISTRAL_BACKOFF_SECONDS="1.0"
########################################
# OPENAI CONFIGURATION
########################################
# Required when using provider="openai" or provider="openai_compat".
# Install: pip install -r requirements-openai.txt
export OPENAI_API_KEY=""
# Optional tuning for the OpenAI provider.
# export ASKPANDA_OPENAI_CONCURRENCY="8"
# export ASKPANDA_OPENAI_RETRIES="3"
# export ASKPANDA_OPENAI_BACKOFF_SECONDS="1.0"
########################################
# ANTHROPIC CONFIGURATION
########################################
# Required when using provider="anthropic".
# Install: pip install -r requirements-anthropic.txt
export ANTHROPIC_API_KEY=""
# Optional tuning for the Anthropic provider.
# export ASKPANDA_ANTHROPIC_CONCURRENCY="4"
# export ASKPANDA_ANTHROPIC_RETRIES="3"
# export ASKPANDA_ANTHROPIC_BACKOFF_SECONDS="1.0"
########################################
# GEMINI CONFIGURATION
########################################
# Required when using provider="gemini".
# Install: pip install -r requirements-gemini.txt
export GEMINI_API_KEY=""
# Optional tuning for the Gemini provider.
# export ASKPANDA_GEMINI_CONCURRENCY="4"
# export ASKPANDA_GEMINI_RETRIES="3"
# export ASKPANDA_GEMINI_BACKOFF_SECONDS="1.0"
########################################
# OPENAI-COMPATIBLE ENDPOINT (Llama / Mistral via vLLM, Ollama, etc.)
########################################
# Required when using provider="openai_compat".
# Uses the same openai SDK as the OpenAI provider.
# Install: pip install -r requirements-openai.txt
export ASKPANDA_OPENAI_COMPAT_BASE_URL=""
export OPENAI_COMPAT_API_KEY=""
# Optional tuning.
# export ASKPANDA_OPENAI_COMPAT_CONCURRENCY="8"
# export ASKPANDA_OPENAI_COMPAT_RETRIES="3"
# export ASKPANDA_OPENAI_COMPAT_BACKOFF_SECONDS="1.0"
########################################
# RAG / CHROMADB (panda_doc_search tool)
########################################
# Path to the ChromaDB persistent directory created by the ingestion script.
export BAMBOO_CHROMA_PATH="./chroma_db"
# Name of the ChromaDB collection to query.
export BAMBOO_CHROMA_COLLECTION="document_monitor_agent"
########################################
# DEBUG / SAFETY
########################################
# Uncomment for verbose debug logs if needed
# export ASKPANDA_DEBUG="1"
########################################
# TRACING
########################################
# Set to 1 to enable structured request/response tracing.
# When BAMBOO_TRACE_FILE is set, spans are written only to that file (stderr
# is left clean — required when running under the Textual TUI).
# When BAMBOO_TRACE_FILE is not set, spans are written to stderr instead.
# See docs/tracing.md for the full event schema and jq recipes.
# export BAMBOO_TRACE="1"
# export BAMBOO_TRACE_FILE="/tmp/bamboo_trace.jsonl"
# OpenTelemetry export (optional — requires pip install -r requirements-otel.txt).
# When set, spans are also exported via OTLP/gRPC to the given endpoint
# (Jaeger, Grafana Tempo, Honeycomb, Datadog, etc.) as a parent/child tree.
# export BAMBOO_OTEL_ENDPOINT="http://localhost:4317"
# export BAMBOO_OTEL_SERVICE_NAME="bamboo" # default: bamboo
# export BAMBOO_OTEL_INSECURE="1" # set to 0 to enable TLS
# Set to 1 to redirect the server's stderr to /dev/null.
# The Textual TUI sets this automatically when launching via stdio transport.
# Useful if running the server as a background subprocess in other contexts.
# export BAMBOO_QUIET="1"
# ---------------------------------------------------------------------------
# Context memory (multi-turn chat history)
# ---------------------------------------------------------------------------
# Maximum number of user+assistant turn *pairs* to keep in context per session.
# Each pair = 1 user message + 1 assistant reply (2 messages total).
# Default: 10 pairs (20 messages). Set lower to reduce LLM token usage.
# History is held in-memory in the TUI only; the server is always stateless.
# export BAMBOO_HISTORY_TURNS="10"
# Maximum tokens for LLM synthesis responses.
# Raise these for longer, more detailed answers — at the cost of higher latency.
# export BAMBOO_SYNTHESIS_MAX_TOKENS="2048" # fresh questions (default: 2048)
# export BAMBOO_FOLLOWUP_MAX_TOKENS="600" # follow-up expansions (default: 600)
echo "AskPanDA LLM environment variables loaded (example configuration)."
########################################
# STREAMLIT / HTTP CLIENT
########################################
# Default MCP server URL for the Streamlit app and TUI in HTTP transport mode.
# export MCP_URL="http://localhost:8000/mcp"
# Bearer token for authenticating to a Bamboo HTTP server.
# export MCP_BEARER_TOKEN=""
# Timeout in seconds for MCP tool calls in the Streamlit sync client.
# Large task status fetches can take 60-90 s for tasks with thousands of jobs.
# export BAMBOO_MCP_CLIENT_TIMEOUT="120"