LLM-Guard / pyproject.toml
SSK-14's picture
Add LLM guard api
acb544e
[project]
name = "llm-guard-api"
description = "LLM Guard API is a deployment of LLM Guard as an API."
authors = [
{ name = "Protect AI", email = "[email protected]"}
]
readme = "README.md"
dynamic = ["version"]
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
]
requires-python = ">=3.9"
dependencies = [
"asyncio==3.4.3",
"fastapi==0.110.0",
"llm-guard==0.3.10",
"pydantic==1.10.14",
"pyyaml==6.0.1",
"uvicorn[standard]==0.29.0",
"structlog>=24",
"slowapi==0.1.9",
"opentelemetry-instrumentation-fastapi==0.44b0",
"opentelemetry-api==1.23.0",
"opentelemetry-sdk==1.23.0",
"opentelemetry-exporter-otlp-proto-http==1.23.0",
"opentelemetry-exporter-prometheus==0.44b0",
"opentelemetry-sdk-extension-aws==2.0.1",
"opentelemetry-propagator-aws-xray==1.0.1"
]
[project.optional-dependencies]
cpu = [
"llm-guard[onnxruntime]==0.3.10",
]
gpu = [
"llm-guard[onnxruntime-gpu]==0.3.10",
]
[tool.setuptools]
packages = ["app"]
[tool.setuptools.dynamic]
version = {attr = "app.version.__version__"}
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
[project.scripts]
llm_guard_api = "app.app:run_app"