-
Notifications
You must be signed in to change notification settings - Fork 6
Expand file tree
/
Copy pathpyproject.toml
More file actions
86 lines (80 loc) · 2.23 KB
/
pyproject.toml
File metadata and controls
86 lines (80 loc) · 2.23 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
[project]
name = "infernode"
dynamic = ["version"]
description = "Scalable inference platform with multi-node management and control"
authors = [
]
keywords = [
"ai", "ml", "inference", "computer-vision", "yolo", "distributed",
"pipeline", "automation", "iot", "edge-computing"
]
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"Topic :: Software Development :: Libraries :: Python Modules",
]
dependencies = [
"flask>=2.0.0",
"waitress>=2.1.0",
"psutil>=5.8.0",
"paho-mqtt>=1.6.0",
"requests>=2.25.0",
"opencv-python>=4.5.0",
"numpy>=1.21.0",
"Pillow>=8.0.0",
"FrameSource[full] @ git+https://github.com/olkham/FrameSource.git",
]
requires-python = ">=3.10"
readme = "readme.md"
license = {text = "Apache-2.0"}
[project.urls]
Homepage = "https://github.com/olkham/inference_node"
Repository = "https://github.com/olkham/inference_node"
Documentation = "https://github.com/olkham/inference_node/docs"
Issues = "https://github.com/olkham/inference_node/issues"
[project.optional-dependencies]
ai = [
"ultralytics>=8.0.0",
"torch>=1.11.0",
"torchvision>=0.12.0",
"geti-sdk>=2.11.0",
]
onnx = [
"onnxruntime>=1.16.0",
]
onnx-openvino = [
"onnxruntime-openvino>=1.16.0",
]
onnx-gpu = [
"onnxruntime-gpu[cuda12,cudnn]>=1.16.0",
]
gpu = [
"nvidia-ml-py>=12.0.0",
]
serial = [
"pyserial>=3.5",
]
dev = [
"pytest>=6.0.0",
"black>=21.0.0",
"flake8>=3.9.0",
]
[project.scripts]
infernode = "main:main"
[build-system]
requires = ["setuptools>=61.0", "wheel"]
build-backend = "setuptools.build_meta"
[tool.setuptools.packages.find]
where = ["."]
include = ["InferenceNode*", "InferenceEngine*", "ResultPublisher*"]
[tool.setuptools.dynamic]
version = {attr = "InferenceNode._version.__version__"}