File size: 1,464 Bytes
acb544e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 |
[project]
name = "llm-guard-api"
description = "LLM Guard API is a deployment of LLM Guard as an API."
authors = [
{ name = "Protect AI", email = "[email protected]"}
]
readme = "README.md"
dynamic = ["version"]
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
]
requires-python = ">=3.9"
dependencies = [
"asyncio==3.4.3",
"fastapi==0.110.0",
"llm-guard==0.3.10",
"pydantic==1.10.14",
"pyyaml==6.0.1",
"uvicorn[standard]==0.29.0",
"structlog>=24",
"slowapi==0.1.9",
"opentelemetry-instrumentation-fastapi==0.44b0",
"opentelemetry-api==1.23.0",
"opentelemetry-sdk==1.23.0",
"opentelemetry-exporter-otlp-proto-http==1.23.0",
"opentelemetry-exporter-prometheus==0.44b0",
"opentelemetry-sdk-extension-aws==2.0.1",
"opentelemetry-propagator-aws-xray==1.0.1"
]
[project.optional-dependencies]
cpu = [
"llm-guard[onnxruntime]==0.3.10",
]
gpu = [
"llm-guard[onnxruntime-gpu]==0.3.10",
]
[tool.setuptools]
packages = ["app"]
[tool.setuptools.dynamic]
version = {attr = "app.version.__version__"}
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
[project.scripts]
llm_guard_api = "app.app:run_app"
|