Building Lumen Apps¶
Copy-paste examples for common Lumen AI configurations.
Start with 4 lines¶
Run with: panel serve app.py --show
Full configuration example¶
import lumen.ai as lmai
from lumen.ai.agents import SourceAgent
from lumen.ai.analyses import Analysis
from lumen.ai.controls import CodeSourceControls, UploadSourceControls
from lumen.ai.coordinator import Planner
from lumen.ai.embeddings import OpenAIEmbeddings
from lumen.ai.vector_store import DuckDBVectorStore
# Custom tool
def calculate_growth(current: float, previous: float) -> float:
"""Calculate percentage growth between two values."""
return ((current - previous) / previous) * 100 if previous else 0.0
# Custom analysis
class CorrelationAnalysis(Analysis):
async def analyze(self, pipeline, **kwargs):
return pipeline.data.select_dtypes(include='number').corr()
# Custom data fetcher
def fetch_sales(region: str = "US", year: int = 2024):
"""Fetch sales data."""
return pd.read_csv(f"sales_{region}_{year}.csv")
# LLM with per-agent model overrides
llm = lmai.llm.OpenAI(
model_kwargs={
"default": {"model": "gpt-4.1-mini", "temperature": 0.2},
"sql": {"model": "gpt-4.1", "temperature": 0.1},
}
)
# Semantic search (optional)
vector_store = DuckDBVectorStore(uri='embeddings.db', embeddings=OpenAIEmbeddings())
ui = lmai.ExplorerUI(
# Data sources (file paths, Source, or Pipeline objects)
data=['customers.csv', 'orders.csv'],
# LLM configuration
llm=llm,
# Agents (extend default agents: TableListAgent, ChatAgent, SQLAgent, etc.)
agents=[SourceAgent()],
# Source controls for fetching external data
source_controls=[
CodeSourceControls(functions={"Fetch Sales": fetch_sales}),
UploadSourceControls(),
],
# Custom tools the LLM can invoke
tools=[calculate_growth],
# Custom analyses for the AnalysisAgent
analyses=[CorrelationAnalysis],
# Coordinator for planning responses
coordinator=Planner, # Mostly unnecessary unless you want to customize the planning algorithm
coordinator_params={},
# Vector stores for semantic search
vector_store=vector_store,
document_vector_store=None, # Separate store for document search, else uses same vector store
# UI customization
title='Sales Analytics',
suggestions=[
('search', 'What data is available?'),
('bar_chart', 'Show sales by region'),
], # Predefined suggestions for users to click on
# Code execution: 'hidden', 'disabled', 'prompt', 'llm', 'allow'
code_execution='prompt',
# Export configuration
notebook_preamble='# Generated by Sales Analytics App',
export_functions={},
# Logging
log_level='INFO', # 'DEBUG', 'INFO', 'WARNING', 'ERROR'
logfire_tags=['sales', 'analytics'], # Tags for logfire tracing
logs_db_path=None, # Path to store LLM message logs
# File upload settings
filedropper_kwargs={'max_file_size': '100MB'},
upload_handlers={}, # Custom handlers by file extension
# Page configuration (panel-material-ui Page component)
page_config={},
)
ui.servable()
Configure the LLM¶
Set different models per agent type:
llm = lmai.llm.OpenAI(
model_kwargs={
"default": {"model": "gpt-4.1-mini", "temperature": 0.2},
"sql": {"model": "gpt-4.1", "temperature": 0.1},
"vega_lite": {"model": "gpt-4.1"},
}
)
| Key | Agent |
|---|---|
default |
Fallback for all agents |
sql |
SQLAgent |
vega_lite |
VegaLiteAgent |
deck_gl |
DeckGLAgent |
chat |
ChatAgent |
source |
SourceAgent |
Providers: OpenAI, Anthropic, Google, Ollama, AnthropicBedrock, Bedrock, LiteLLM, Mistral, LlamaCpp
See LLM Providers for setup and model recommendations.
Fetch data from APIs¶
Let users fetch data from external sources with source controls.
Upload files:
from lumen.ai.controls import UploadSourceControls
ui = lmai.ExplorerUI(source_controls=[UploadSourceControls()])
Wrap Python functions:
import param
from lumen.ai.agents import SourceAgent
from lumen.ai.controls import CodeSourceControls, UploadSourceControls
def fetch_sales(region: str = "US", year: int = 2024):
"""Fetch sales data for a region and year."""
return pd.read_csv(f"sales_{region}_{year}.csv")
ui = lmai.ExplorerUI(
agents=[SourceAgent()],
source_controls=[
CodeSourceControls(
functions={"Fetch Sales": fetch_sales},
param_overrides={
"Fetch Sales": {
"region": param.Selector(default="US", objects=["US", "EU", "APAC"]),
"year": param.Selector(default=2024, objects=[2024, 2023, 2022]),
},
},
),
UploadSourceControls(),
],
)
See Source Controls for URL-based fetching and API client wrapping.
Add custom agents¶
from lumen.ai.agents import Agent
class SummaryAgent(Agent):
purpose = "Creates executive summaries of data analysis results"
async def respond(self, messages, context, **kwargs):
system = await self._render_prompt("main", messages, context)
response = await self.llm.invoke(messages, system=system)
return [response], {"summary": str(response)}
ui = lmai.ExplorerUI(data='data.csv', agents=[SummaryAgent()])
See Agents.
Add custom tools¶
def calculate_growth(current: float, previous: float) -> float:
"""Calculate percentage growth between two values."""
if previous == 0:
return 0.0
return ((current - previous) / previous) * 100
ui = lmai.ExplorerUI(data='data.csv', tools=[calculate_growth])
See Tools.
Add custom analyses¶
from lumen.ai.analyses import Analysis
class CorrelationAnalysis(Analysis):
async def analyze(self, pipeline, **kwargs):
df = pipeline.data
return df.select_dtypes(include='number').corr()
ui = lmai.ExplorerUI(data='data.csv', analyses=[CorrelationAnalysis])
See Analyses.
Customize agent prompts¶
from lumen.ai.agents import SQLAgent
sql_agent = SQLAgent(
template_overrides={
"main": {"instructions": "{{ super() }}\nAlways use explicit JOIN syntax."}
}
)
ui = lmai.ExplorerUI(data='data.csv', agents=[sql_agent])
See Prompts.
Enable semantic search¶
from lumen.ai.embeddings import OpenAIEmbeddings
from lumen.ai.vector_store import DuckDBVectorStore
vector_store = DuckDBVectorStore(uri='embeddings.db', embeddings=OpenAIEmbeddings())
ui = lmai.ExplorerUI(data='data.csv', vector_store=vector_store)
Embeddings: OpenAIEmbeddings, NumpyEmbeddings (default)
Vector stores: DuckDBVectorStore (persistent), NumpyVectorStore (in-memory)
See Embeddings and Vector Stores.
Enable code execution¶
By default, Lumen generates safe declarative specs. Enable code execution for more sophisticated visualizations:
ui = lmai.ExplorerUI(
data='data.csv',
code_execution='prompt', # Options: 'hidden', 'disabled', 'prompt', 'llm', 'allow'
)
Security Warning
Code execution runs LLM-generated Python in-process. Never enable in production with secrets or untrusted users.
CLI reference¶
lumen-ai serve data.csv # Basic
lumen-ai serve data.csv --provider anthropic # Change provider
lumen-ai serve data.csv --model gpt-4.1 # Change model
lumen-ai serve data.csv --agents sql vega_lite # Limit agents
lumen-ai serve data.csv --code-execution prompt # Enable code execution
lumen-ai serve data.csv --temperature 0.1 # Set temperature
lumen-ai serve --help # All options
Tutorials¶
- Census Data Explorer — Reactive source controls
- Mesonet Weather Explorer — URL-based controls
- Weather Data Explorer — Custom agents
- SaaS Dashboard — Reports