forked from BerriAI/litellm
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathrequirements.txt
More file actions
69 lines (68 loc) · 2.78 KB
/
requirements.txt
File metadata and controls
69 lines (68 loc) · 2.78 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
# LITELLM PROXY DEPENDENCIES #
anyio==4.8.0 # openai + http req.
httpx==0.28.1
openai==2.9.0 # openai req.
fastapi==0.120.1 # server dep
starlette==0.49.1 # starlette fastapi dep
backoff==2.2.1 # server dep
pyyaml==6.0.2 # server dep
uvicorn==0.31.1 # server dep
gunicorn==23.0.0 # server dep
fastuuid==0.13.5 # for uuid4
uvloop==0.21.0 # uvicorn dep, gives us much better performance under load
boto3==1.36.0 # aws bedrock/sagemaker calls
redis==5.2.1 # redis caching
prisma==0.11.0 # for db
mangum==0.17.0 # for aws lambda functions
pynacl==1.5.0 # for encrypting keys
google-cloud-aiplatform==1.47.0 # for vertex ai calls
google-cloud-iam==2.19.1 # for GCP IAM Redis authentication
google-genai==1.22.0
anthropic[vertex]==0.54.0
mcp==1.21.2 ; python_version >= "3.10" # for MCP server
google-generativeai==0.5.0 # for vertex ai calls
async_generator==1.10.0 # for async ollama calls
langfuse==2.59.7 # for langfuse self-hosted logging
prometheus_client==0.20.0 # for /metrics endpoint on proxy
ddtrace==2.19.0 # for advanced DD tracing / profiling
orjson==3.11.2 # fast /embedding responses
polars==1.31.0 # for data processing
apscheduler==3.10.4 # for resetting budget in background
fastapi-sso==0.16.0 # admin UI, SSO
pyjwt[crypto]==2.10.1 ; python_version >= "3.9"
python-multipart==0.0.18 # admin UI
Pillow==11.0.0
azure-ai-contentsafety==1.0.0 # for azure content safety
azure-identity==1.16.1 ; python_version >= "3.9" # for azure content safety
azure-keyvault==4.2.0 # for azure KMS integration
azure-storage-file-datalake==12.20.0 # for azure buck storage logging
opentelemetry-api==1.25.0
opentelemetry-sdk==1.25.0
opentelemetry-exporter-otlp==1.25.0
# grpcio: 1.68.0-1.68.1 has reconnect bug (#38290), 1.75+ has Python 3.14 wheels + fix
grpcio>=1.62.3,<1.68.0; python_version < "3.14"
grpcio>=1.75.0; python_version >= "3.14"
sentry_sdk==2.21.0 # for sentry error handling
detect-secrets==1.5.0 # Enterprise - secret detection / masking in LLM requests
cryptography==44.0.1
tzdata==2025.1 # IANA time zone database
litellm-proxy-extras==0.4.14 # for proxy extras - e.g. prisma migrations
### LITELLM PACKAGE DEPENDENCIES
python-dotenv==1.0.1 # for env
tiktoken==0.8.0 # for calculating usage
importlib-metadata==6.8.0 # for random utils
tokenizers==0.20.2 # for calculating usage
click==8.1.7 # for proxy cli
rich==13.7.1 # for litellm proxy cli
jinja2==3.1.6 # for prompt templates
aiohttp==3.12.14 # for network calls
aioboto3==13.4.0 # for async sagemaker calls
tenacity==8.5.0 # for retrying requests, when litellm.num_retries set
pydantic>=2.11,<3 # proxy + openai req. + mcp
jsonschema==4.22.0 # validating json schema
websockets==13.1.0 # for realtime API
soundfile==0.12.1 # for audio file processing
########################
# LITELLM ENTERPRISE DEPENDENCIES
########################
litellm-enterprise==0.1.25