SIGN IN SIGN UP
BerriAI / litellm UNCLAIMED

Python SDK, Proxy Server (AI Gateway) to call 100+ LLM APIs in OpenAI (or native) format, with cost tracking, guardrails, loadbalancing and logging. [Bedrock, Azure, OpenAI, VertexAI, Cohere, Anthropic, Sagemaker, HuggingFace, VLLM, NVIDIA NIM]

0 0 1 Python
[tool.poetry]
name = "litellm"
2023-10-06 06:37:13 -07:00
version = "0.2.5"
description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"]
license = "MIT License"
readme = "README.md"
[tool.poetry.dependencies]
2023-08-05 09:43:21 -07:00
python = "^3.8"
2023-09-15 09:47:28 -07:00
openai = "^0.28.0 || ^0.27.0"
python-dotenv = ">=0.2.0"
2023-09-25 07:33:23 -07:00
tiktoken = ">=0.4.0"
importlib-metadata = ">=6.8.0"
2023-09-25 09:15:50 -07:00
tokenizers = "*"
2023-09-26 13:30:35 -07:00
click = "*"
appdirs = "^1.4.4"
2023-10-03 21:05:20 -07:00
jinja2 = "^3.1.2"
2023-09-26 13:30:35 -07:00
[tool.poetry.scripts]
litellm = 'litellm:run_server'
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
2023-10-05 22:15:23 -07:00
[tool.commitizen]
2023-10-05 23:49:25 -07:00
version = "0.2.5"
2023-10-05 22:15:23 -07:00