SIGN IN SIGN UP
BerriAI / litellm UNCLAIMED

Python SDK, Proxy Server (AI Gateway) to call 100+ LLM APIs in OpenAI (or native) format, with cost tracking, guardrails, loadbalancing and logging. [Bedrock, Azure, OpenAI, VertexAI, Cohere, Anthropic, Sagemaker, HuggingFace, VLLM, NVIDIA NIM]

0 0 1 Python
[tool.poetry]
name = "litellm"
2023-12-23 10:03:49 +05:30
version = "1.15.7"
description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"]
license = "MIT License"
readme = "README.md"
[tool.poetry.dependencies]
2023-08-05 09:43:21 -07:00
python = "^3.8"
openai = ">=1.0.0"
python-dotenv = ">=0.2.0"
2023-09-25 07:33:23 -07:00
tiktoken = ">=0.4.0"
importlib-metadata = ">=6.8.0"
2023-09-25 09:15:50 -07:00
tokenizers = "*"
2023-09-26 13:30:35 -07:00
click = "*"
2023-10-03 21:05:20 -07:00
jinja2 = "^3.1.2"
certifi = "^2023.7.22"
appdirs = "^1.4.4"
aiohttp = "*"
2023-09-26 13:30:35 -07:00
2023-11-30 16:23:34 -07:00
uvicorn = {version = "^0.24.0.post1", optional = true}
fastapi = {version = "^0.104.1", optional = true}
backoff = {version = "*", optional = true}
rq = {version = "*", optional = true}
[tool.poetry.extras]
proxy = [
"uvicorn",
"fastapi",
"backoff",
2023-11-30 19:50:47 -08:00
"rq",
"orjson",
"importlib",
2023-11-30 16:23:34 -07:00
]
2023-12-01 19:36:06 -08:00
extra_proxy = [
"prisma",
"azure-identity",
"azure-keyvault-secrets"
]
2023-12-02 11:32:15 -08:00
proxy_otel = [
"opentelemetry-api",
"opentelemetry-sdk",
"opentelemetry-exporter-otlp",
"opentelemetry-sdk-resources"
]
2023-09-26 13:30:35 -07:00
[tool.poetry.scripts]
litellm = 'litellm:run_server'
[build-system]
requires = ["poetry-core", "wheel"]
build-backend = "poetry.core.masonry.api"
2023-10-05 22:15:23 -07:00
[tool.commitizen]
2023-12-23 10:03:49 +05:30
version = "1.15.7"
version_files = [
"pyproject.toml:^version"
]
2023-10-05 22:15:23 -07:00