1- # Provide frontend configuration settings from environment variables
2- # You can override these settings when lauching the app as well:
3- # python -m data_formulator -p 5000 --exec-python-in-subprocess true --disable-display-keys true
1+ # Data Formulator Configuration
2+ # Copy this file to .env and fill in your values.
3+ # cp .env.template .env
44
5- DISABLE_DISPLAY_KEYS = false # if true, the display keys will not be shown in the frontend
6- EXEC_PYTHON_IN_SUBPROCESS = false # if true, the python code will be executed in a subprocess to avoid crashing the main app, but it will increase the time of response
5+ # -------------------------------------------------------------------
6+ # Server settings
7+ # -------------------------------------------------------------------
8+ # You can also override these via CLI flags (run `data_formulator --help`).
79
8- LOCAL_DB_DIR = # the directory to store the local database, if not provided, the app will use the temp directory
10+ DISABLE_DISPLAY_KEYS = false # if true, API keys will not be shown in the frontend
11+ SANDBOX = local # code execution backend: 'local' (default) or 'docker'
12+
13+ # -------------------------------------------------------------------
14+ # LLM provider API keys
15+ # -------------------------------------------------------------------
16+ # Enable providers and set API keys / models below.
17+ # For details see: https://docs.litellm.ai/docs#litellm-python-sdk
18+
19+ # OpenAI
20+ OPENAI_ENABLED = true
21+ OPENAI_API_KEY = #your-openai-api-key
22+ OPENAI_MODELS = gpt-5.2,gpt-5.1 # comma separated list of models
23+
24+ # Azure OpenAI
25+ AZURE_ENABLED = true
26+ AZURE_API_KEY = #your-azure-openai-api-key
27+ AZURE_API_BASE = https://your-azure-openai-endpoint.openai.azure.com/
28+ AZURE_MODELS = gpt-5.1
29+
30+ # Anthropic
31+ ANTHROPIC_ENABLED = true
32+ ANTHROPIC_API_KEY = #your-anthropic-api-key
33+ ANTHROPIC_MODELS = claude-sonnet-4-20250514
34+
35+ # Ollama
36+ OLLAMA_ENABLED = true
37+ OLLAMA_API_BASE = http://localhost:11434
38+ OLLAMA_MODELS = deepseek-v3.1:latest # models with good code generation capabilities recommended
39+
40+ # Add other LiteLLM-supported providers with PROVIDER_API_KEY, PROVIDER_MODELS, etc.
41+
42+ # -------------------------------------------------------------------
43+ # Azure Blob Storage Workspace (optional)
44+ # -------------------------------------------------------------------
45+ # Set WORKSPACE_BACKEND=azure_blob to store workspace data in Azure Blob Storage
46+ # instead of the local filesystem.
47+ #
48+ # Authentication — choose ONE of the following:
49+ # Option A: Connection string (shared key / SAS)
50+ # AZURE_BLOB_CONNECTION_STRING=DefaultEndpointsProtocol=https;AccountName=...
51+ # Option B: Entra ID (Managed Identity / az login / workload identity)
52+ # AZURE_BLOB_ACCOUNT_URL=https://<account>.blob.core.windows.net
53+ #
54+ # WORKSPACE_BACKEND=local
55+ # AZURE_BLOB_CONNECTION_STRING=
56+ # AZURE_BLOB_ACCOUNT_URL=
57+ # AZURE_BLOB_CONTAINER=data-formulator
0 commit comments