SIGN IN SIGN UP
BerriAI / litellm UNCLAIMED

Python SDK, Proxy Server (AI Gateway) to call 100+ LLM APIs in OpenAI (or native) format, with cost tracking, guardrails, loadbalancing and logging. [Bedrock, Azure, OpenAI, VertexAI, Cohere, Anthropic, Sagemaker, HuggingFace, VLLM, NVIDIA NIM]

0 0 1 Python
[tool.poetry]
name = "litellm"
2023-11-30 14:30:12 -08:00
version = "1.9.3"
description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"]
license = "MIT License"
readme = "README.md"
[tool.poetry.dependencies]
2023-08-05 09:43:21 -07:00
python = "^3.8"
openai = ">=1.0.0"
python-dotenv = ">=0.2.0"
2023-09-25 07:33:23 -07:00
tiktoken = ">=0.4.0"
importlib-metadata = ">=6.8.0"
2023-09-25 09:15:50 -07:00
tokenizers = "*"
2023-09-26 13:30:35 -07:00
click = "*"
2023-10-03 21:05:20 -07:00
jinja2 = "^3.1.2"
certifi = "^2023.7.22"
appdirs = "^1.4.4"
aiohttp = "*"
2023-09-26 13:30:35 -07:00
[tool.poetry.scripts]
litellm = 'litellm:run_server'
[build-system]
requires = ["poetry-core", "wheel"]
build-backend = "poetry.core.masonry.api"
2023-10-05 22:15:23 -07:00
[tool.commitizen]
2023-11-30 14:30:12 -08:00
version = "1.9.3"
version_files = [
"pyproject.toml:^version"
]
2023-10-05 22:15:23 -07:00