959a391334
Some checks failed
publish docs / publish-docs (push) Has been cancelled
release-please / release-please (push) Has been cancelled
tests / setup (push) Has been cancelled
tests / ${{ matrix.quality-command }} (black) (push) Has been cancelled
tests / ${{ matrix.quality-command }} (mypy) (push) Has been cancelled
tests / ${{ matrix.quality-command }} (ruff) (push) Has been cancelled
tests / test (push) Has been cancelled
tests / all_checks_passed (push) Has been cancelled
Mark stale issues and pull requests / stale (push) Has been cancelled
206 lines
6.3 KiB
TOML
206 lines
6.3 KiB
TOML
[tool.poetry]
|
|
name = "private-gpt"
|
|
version = "0.6.2"
|
|
description = "Private GPT"
|
|
authors = ["Zylon <hi@zylon.ai>"]
|
|
|
|
[tool.poetry.dependencies]
|
|
python = ">=3.11,<3.12"
|
|
# PrivateGPT
|
|
fastapi = { extras = ["all"], version = "^0.111.0" }
|
|
python-multipart = "^0.0.9"
|
|
injector = "^0.21.0"
|
|
pyyaml = "^6.0.1"
|
|
watchdog = "^4.0.1"
|
|
transformers = "^4.42.3"
|
|
docx2txt = "^0.8"
|
|
cryptography = "^3.1"
|
|
# LlamaIndex core libs
|
|
llama-index-core = "^0.10.52"
|
|
llama-index-readers-file = "^0.1.27"
|
|
# Optional LlamaIndex integration libs
|
|
llama-index-llms-llama-cpp = {version = "^0.1.4", optional = true}
|
|
llama-index-llms-openai = {version = "^0.1.25", optional = true}
|
|
llama-index-llms-openai-like = {version ="^0.1.3", optional = true}
|
|
llama-index-llms-ollama = {version ="^0.2.2", optional = true}
|
|
llama-index-llms-azure-openai = {version ="^0.1.8", optional = true}
|
|
llama-index-llms-gemini = {version ="^0.1.11", optional = true}
|
|
llama-index-embeddings-ollama = {version ="^0.1.2", optional = true}
|
|
llama-index-embeddings-huggingface = {version ="^0.2.2", optional = true}
|
|
llama-index-embeddings-openai = {version ="^0.1.10", optional = true}
|
|
llama-index-embeddings-azure-openai = {version ="^0.1.10", optional = true}
|
|
llama-index-embeddings-gemini = {version ="^0.1.8", optional = true}
|
|
llama-index-vector-stores-qdrant = {version ="^0.2.10", optional = true}
|
|
llama-index-vector-stores-milvus = {version ="^0.1.20", optional = true}
|
|
llama-index-vector-stores-chroma = {version ="^0.1.10", optional = true}
|
|
llama-index-vector-stores-postgres = {version ="^0.1.11", optional = true}
|
|
llama-index-vector-stores-clickhouse = {version ="^0.1.3", optional = true}
|
|
llama-index-storage-docstore-postgres = {version ="^0.1.3", optional = true}
|
|
llama-index-storage-index-store-postgres = {version ="^0.1.4", optional = true}
|
|
# Postgres
|
|
psycopg2-binary = {version ="^2.9.9", optional = true}
|
|
asyncpg = {version="^0.29.0", optional = true}
|
|
|
|
# ClickHouse
|
|
clickhouse-connect = {version = "^0.7.15", optional = true}
|
|
|
|
# Optional Sagemaker dependency
|
|
boto3 = {version ="^1.34.139", optional = true}
|
|
|
|
# Optional Qdrant client
|
|
qdrant-client = {version ="^1.9.0", optional = true}
|
|
|
|
# Optional Reranker dependencies
|
|
torch = {version ="^2.3.1", optional = true}
|
|
sentence-transformers = {version ="^3.0.1", optional = true}
|
|
|
|
# Optional UI
|
|
gradio = {version ="^4.37.2", optional = true}
|
|
ffmpy = "0.4.0"
|
|
|
|
# Optional Google Gemini dependency
|
|
google-generativeai = {version ="^0.5.4", optional = true}
|
|
|
|
# Optional Ollama client
|
|
ollama = {version ="^0.3.0", optional = true}
|
|
|
|
# Optional HF Transformers
|
|
einops = {version = "^0.8.0", optional = true}
|
|
retry-async = "^0.1.4"
|
|
elasticsearch = "^8.15.1"
|
|
|
|
[tool.poetry.extras]
|
|
ui = ["gradio", "ffmpy"]
|
|
llms-llama-cpp = ["llama-index-llms-llama-cpp"]
|
|
llms-openai = ["llama-index-llms-openai"]
|
|
llms-openai-like = ["llama-index-llms-openai-like"]
|
|
llms-ollama = ["llama-index-llms-ollama", "ollama"]
|
|
llms-sagemaker = ["boto3"]
|
|
llms-azopenai = ["llama-index-llms-azure-openai"]
|
|
llms-gemini = ["llama-index-llms-gemini", "google-generativeai"]
|
|
embeddings-ollama = ["llama-index-embeddings-ollama", "ollama"]
|
|
embeddings-huggingface = ["llama-index-embeddings-huggingface", "einops"]
|
|
embeddings-openai = ["llama-index-embeddings-openai"]
|
|
embeddings-sagemaker = ["boto3"]
|
|
embeddings-azopenai = ["llama-index-embeddings-azure-openai"]
|
|
embeddings-gemini = ["llama-index-embeddings-gemini"]
|
|
vector-stores-qdrant = ["llama-index-vector-stores-qdrant"]
|
|
vector-stores-clickhouse = ["llama-index-vector-stores-clickhouse", "clickhouse_connect"]
|
|
vector-stores-chroma = ["llama-index-vector-stores-chroma"]
|
|
vector-stores-postgres = ["llama-index-vector-stores-postgres"]
|
|
vector-stores-milvus = ["llama-index-vector-stores-milvus"]
|
|
storage-nodestore-postgres = ["llama-index-storage-docstore-postgres","llama-index-storage-index-store-postgres","psycopg2-binary","asyncpg"]
|
|
rerank-sentence-transformers = ["torch", "sentence-transformers"]
|
|
|
|
[tool.poetry.group.dev.dependencies]
|
|
black = "^22"
|
|
mypy = "^1.2"
|
|
pre-commit = "^2"
|
|
pytest = "^7"
|
|
pytest-cov = "^3"
|
|
ruff = "^0"
|
|
pytest-asyncio = "^0.21.1"
|
|
types-pyyaml = "^6.0.12.12"
|
|
|
|
[build-system]
|
|
requires = ["poetry-core>=1.0.0"]
|
|
build-backend = "poetry.core.masonry.api"
|
|
|
|
# Packages configs
|
|
|
|
## coverage
|
|
|
|
[tool.coverage.run]
|
|
branch = true
|
|
|
|
[tool.coverage.report]
|
|
skip_empty = true
|
|
precision = 2
|
|
|
|
## black
|
|
|
|
[tool.black]
|
|
target-version = ['py311']
|
|
|
|
## ruff
|
|
# Recommended ruff config for now, to be updated as we go along.
|
|
[tool.ruff]
|
|
target-version = 'py311'
|
|
|
|
# See all rules at https://beta.ruff.rs/docs/rules/
|
|
lint.select = [
|
|
"E", # pycodestyle
|
|
"W", # pycodestyle
|
|
"F", # Pyflakes
|
|
"B", # flake8-bugbear
|
|
"C4", # flake8-comprehensions
|
|
"D", # pydocstyle
|
|
"I", # isort
|
|
"SIM", # flake8-simplify
|
|
"TCH", # flake8-type-checking
|
|
"TID", # flake8-tidy-imports
|
|
"Q", # flake8-quotes
|
|
"UP", # pyupgrade
|
|
"PT", # flake8-pytest-style
|
|
"RUF", # Ruff-specific rules
|
|
]
|
|
|
|
lint.ignore = [
|
|
"E501", # "Line too long"
|
|
# -> line length already regulated by black
|
|
"PT011", # "pytest.raises() should specify expected exception"
|
|
# -> would imply to update tests every time you update exception message
|
|
"SIM102", # "Use a single `if` statement instead of nested `if` statements"
|
|
# -> too restrictive,
|
|
"D100",
|
|
"D101",
|
|
"D102",
|
|
"D103",
|
|
"D104",
|
|
"D105",
|
|
"D106",
|
|
"D107"
|
|
# -> "Missing docstring in public function too restrictive"
|
|
]
|
|
|
|
[tool.ruff.lint.pydocstyle]
|
|
# Automatically disable rules that are incompatible with Google docstring convention
|
|
convention = "google"
|
|
|
|
[tool.ruff.lint.pycodestyle]
|
|
max-doc-length = 88
|
|
|
|
[tool.ruff.lint.flake8-tidy-imports]
|
|
ban-relative-imports = "all"
|
|
|
|
[tool.ruff.lint.flake8-type-checking]
|
|
strict = true
|
|
runtime-evaluated-base-classes = ["pydantic.BaseModel"]
|
|
# Pydantic needs to be able to evaluate types at runtime
|
|
# see https://pypi.org/project/flake8-type-checking/ for flake8-type-checking documentation
|
|
# see https://beta.ruff.rs/docs/settings/#flake8-type-checking-runtime-evaluated-base-classes for ruff documentation
|
|
|
|
[tool.ruff.lint.per-file-ignores]
|
|
# Allow missing docstrings for tests
|
|
"tests/**/*.py" = ["D1"]
|
|
|
|
## mypy
|
|
|
|
[tool.mypy]
|
|
python_version = "3.11"
|
|
strict = true
|
|
check_untyped_defs = false
|
|
explicit_package_bases = true
|
|
warn_unused_ignores = false
|
|
exclude = ["tests"]
|
|
|
|
[tool.mypy-llama-index]
|
|
ignore_missing_imports = true
|
|
|
|
[tool.pytest.ini_options]
|
|
asyncio_mode = "auto"
|
|
testpaths = ["tests"]
|
|
addopts = [
|
|
"--import-mode=importlib",
|
|
]
|