[project] name = "nvidia-pipecat" version = "0.2.0" description = "NVIDIA ACE Pipecat SDK" readme = "NVIDIA_PIPECAT.md" license = { file = "LICENSE" } authors = [ { name = "NVIDIA ACE", email = "ace-dev@exchange.nvidia.com" } ] requires-python = ">=3.12" dependencies = [ "av>=13.0.0", "fastapi>=0.115.7", "hatchling>=1.27.0", "onnxruntime>=1.20.1", "openai>=1.58.1", "sentence-transformers>=3.3.1", "torch>=2.5.1", "python-dotenv>=1.0.1", "requests>=2.32.3", "transitions>=0.9.2", "uvicorn>=0.34.0", "websockets==13.1", "wheel>=0.45.1", "redis>=5.2.1", "langchain-nvidia-ai-endpoints>=0.3.8", "nvidia-audio2face-3d==1.3.0", "nvidia-animation-graph==1.1.0", "opentelemetry-sdk>=1.31.0", "opentelemetry-instrumentation-grpc>=0.52b0", "nvidia-riva-client==2.20.0", "pipecat-ai==0.0.68", "blingfire>=0.1.8", ] [build-system] requires = ["hatchling"] build-backend = "hatchling.build" [tool.hatch.build.targets.wheel] packages = ["src/nvidia_pipecat"] [tool.uv.sources] torch = { index = "pytorch", marker = "sys_platform != 'darwin'" } [[tool.uv.index]] name = "pytorch" url = "https://download.pytorch.org/whl/cpu" explicit = true [project.urls] GitHub = "https://github.com/NVIDIA/ace-controller" Documentation = "https://docs.nvidia.com/ace/ace-controller-microservice/latest/index.html" [dependency-groups] dev = [ "ruff>=0.8.2", "pytest>=8.3.4", "pytest-asyncio>=0.24.0", "pytest-mock>=3.14.0", "pytest-cov>=6.0.0", ] testing = [ "pyyaml>=6.0.2", "types-pyyaml>=6.0.12.20241230", ] examples = [ "opentelemetry-distro>=0.52b0", "opentelemetry-exporter-otlp-proto-grpc>=1.31.0", ] [[tool.mypy.overrides]] module = ["pipecat.*"] ignore_missing_imports = true [tool.pytest.ini_options] asyncio_mode = "auto" # Set the default event loop scope of asynchronous fixtures (see https://pytest-asyncio.readthedocs.io/en/latest/reference/configuration.html) asyncio_default_fixture_loop_scope = "function" # The flag below should only be activated in special debug sessions # i.e. the test hangs and we need to see what happened up to that point. # There are some race conditions with how the logging streams are closed in the teardown # phase, which will cause tests to fail or "magically" ignored. log_cli = true filterwarnings = [ "ignore:'audioop' is deprecated:DeprecationWarning", ] [tool.ruff] line-length = 120 exclude = ["nvidia_ace"] ############################################################################### # Test Coverage configuration [tool.coverage.run] branch = true source = ["src/nvidia_pipecat"] omit = ["tests/*"] [tool.coverage.report] # Regexes for lines to exclude from consideration exclude_also = [ # Don't complain about missing debug-only code: "def __repr__", "if self\\.debug", # Don't complain if tests don't hit defensive assertion code: "raise AssertionError", "raise NotImplementedError", # Don't complain if non-runnable code isn't run: "if 0:", "if __name__ == .__main__.:", # Don't complain about abstract methods, they aren't run: "@(abc\\.)?abstractmethod", ] ignore_errors = true ############################################################################### # Linting Rules [tool.ruff.lint] select = [ # pycodestyle "E", # Pyflakes "F", # pyupgrade "UP", # flake8-bugbear "B", # flake8-simplify "SIM", # isort "I", # docstring checks "D" ] ignore = ["SIM108"] [tool.ruff.lint.pydocstyle] convention = "google"