diff --git a/docs/advanced/configuration.mdx b/docs/advanced/configuration.mdx
index 9c94630..cf8eb93 100644
--- a/docs/advanced/configuration.mdx
+++ b/docs/advanced/configuration.mdx
@@ -46,9 +46,37 @@ Configure Strix using environment variables or a config file.
- Enable/disable anonymous telemetry. Set to `0`, `false`, `no`, or `off` to disable.
+ Global telemetry default toggle. Set to `0`, `false`, `no`, or `off` to disable both PostHog and OTEL unless overridden by per-channel flags below.
+
+ Enable/disable OpenTelemetry run observability independently. When unset, falls back to `STRIX_TELEMETRY`.
+
+
+
+ Enable/disable PostHog product telemetry independently. When unset, falls back to `STRIX_TELEMETRY`.
+
+
+
+ OTLP/Traceloop base URL for remote OpenTelemetry export. If unset, Strix keeps traces local only.
+
+
+
+ API key used for remote trace export. Remote export is enabled only when both `TRACELOOP_BASE_URL` and `TRACELOOP_API_KEY` are set.
+
+
+
+ Optional custom OTEL headers (JSON object or `key=value,key2=value2`). Useful for Langfuse or custom/self-hosted OTLP gateways.
+
+
+When remote OTEL vars are not set, Strix still writes complete run telemetry locally to:
+
+```bash
+strix_runs//events.jsonl
+```
+
+When remote vars are set, Strix dual-writes telemetry to both local JSONL and the remote OTEL endpoint.
+
## Docker Configuration
@@ -106,4 +134,5 @@ export PERPLEXITY_API_KEY="pplx-..."
# Optional: Custom timeouts
export LLM_TIMEOUT="600"
export STRIX_SANDBOX_EXECUTION_TIMEOUT="300"
+
```
diff --git a/poetry.lock b/poetry.lock
index ffae0a0..5930b42 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand.
+# This file is automatically @generated by Poetry 2.3.2 and should not be changed by hand.
[[package]]
name = "aiohappyeyeballs"
@@ -220,6 +220,34 @@ files = [
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
]
+[[package]]
+name = "anthropic"
+version = "0.84.0"
+description = "The official Python library for the anthropic API"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "anthropic-0.84.0-py3-none-any.whl", hash = "sha256:861c4c50f91ca45f942e091d83b60530ad6d4f98733bfe648065364da05d29e7"},
+ {file = "anthropic-0.84.0.tar.gz", hash = "sha256:72f5f90e5aebe62dca316cb013629cfa24996b0f5a4593b8c3d712bc03c43c37"},
+]
+
+[package.dependencies]
+anyio = ">=3.5.0,<5"
+distro = ">=1.7.0,<2"
+docstring-parser = ">=0.15,<1"
+httpx = ">=0.25.0,<1"
+jiter = ">=0.4.0,<1"
+pydantic = ">=1.9.0,<3"
+sniffio = "*"
+typing-extensions = ">=4.10,<5"
+
+[package.extras]
+aiohttp = ["aiohttp", "httpx-aiohttp (>=0.1.9)"]
+bedrock = ["boto3 (>=1.28.57)", "botocore (>=1.31.57)"]
+mcp = ["mcp (>=1.0) ; python_version >= \"3.10\""]
+vertex = ["google-auth[requests] (>=2,<3)"]
+
[[package]]
name = "anyio"
version = "4.10.0"
@@ -628,6 +656,18 @@ files = [
{file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"},
]
+[[package]]
+name = "catalogue"
+version = "2.0.10"
+description = "Super lightweight function registries for your library"
+optional = false
+python-versions = ">=3.6"
+groups = ["main"]
+files = [
+ {file = "catalogue-2.0.10-py3-none-any.whl", hash = "sha256:58c2de0020aa90f4a2da7dfad161bf7b3b054c86a5f09fcedc0b2b740c109a9f"},
+ {file = "catalogue-2.0.10.tar.gz", hash = "sha256:4f56daa940913d3f09d589c191c74e5a6d51762b3a9e37dd53b7437afd6cda15"},
+]
+
[[package]]
name = "certifi"
version = "2025.8.3"
@@ -890,7 +930,7 @@ files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
-markers = {main = "sys_platform == \"win32\" and extra == \"sandbox\" or platform_system == \"Windows\"", dev = "platform_system == \"Windows\" or sys_platform == \"win32\""}
+markers = {dev = "platform_system == \"Windows\" or sys_platform == \"win32\""}
[[package]]
name = "contourpy"
@@ -1174,6 +1214,17 @@ ssh = ["bcrypt (>=3.1.5)"]
test = ["certifi (>=2024)", "cryptography-vectors (==46.0.5)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
test-randomorder = ["pytest-randomly"]
+[[package]]
+name = "cuid"
+version = "0.4"
+description = "Fast, scalable unique ID generation"
+optional = false
+python-versions = "*"
+groups = ["main"]
+files = [
+ {file = "cuid-0.4.tar.gz", hash = "sha256:74eaba154916a2240405c3631acee708c263ef8fa05a86820b87d0f59f84e978"},
+]
+
[[package]]
name = "cvss"
version = "3.6"
@@ -1203,6 +1254,29 @@ files = [
docs = ["ipython", "matplotlib", "numpydoc", "sphinx"]
tests = ["pytest", "pytest-cov", "pytest-xdist"]
+[[package]]
+name = "dateparser"
+version = "1.3.0"
+description = "Date parsing library designed to parse dates from HTML pages"
+optional = false
+python-versions = ">=3.10"
+groups = ["main"]
+files = [
+ {file = "dateparser-1.3.0-py3-none-any.whl", hash = "sha256:8dc678b0a526e103379f02ae44337d424bd366aac727d3c6cf52ce1b01efbb5a"},
+ {file = "dateparser-1.3.0.tar.gz", hash = "sha256:5bccf5d1ec6785e5be71cc7ec80f014575a09b4923e762f850e57443bddbf1a5"},
+]
+
+[package.dependencies]
+python-dateutil = ">=2.7.0"
+pytz = ">=2024.2"
+regex = ">=2024.9.11"
+tzlocal = ">=0.2"
+
+[package.extras]
+calendars = ["convertdate (>=2.2.1)", "hijridate"]
+fasttext = ["fasttext (>=0.9.1)", "numpy (>=1.22.0,<2)"]
+langdetect = ["langdetect (>=1.0.0)"]
+
[[package]]
name = "decorator"
version = "5.2.1"
@@ -1228,6 +1302,24 @@ files = [
{file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"},
]
+[[package]]
+name = "deprecated"
+version = "1.3.1"
+description = "Python @deprecated decorator to deprecate old python classes, functions or methods."
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7"
+groups = ["main"]
+files = [
+ {file = "deprecated-1.3.1-py2.py3-none-any.whl", hash = "sha256:597bfef186b6f60181535a29fbe44865ce137a5079f295b479886c82729d5f3f"},
+ {file = "deprecated-1.3.1.tar.gz", hash = "sha256:b1b50e0ff0c1fddaa5708a2c6b0a6588bb09b892825ab2b214ac9ea9d92a5223"},
+]
+
+[package.dependencies]
+wrapt = ">=1.10,<3"
+
+[package.extras]
+dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "setuptools ; python_version >= \"3.12\"", "tox"]
+
[[package]]
name = "dill"
version = "0.4.0"
@@ -1316,10 +1408,9 @@ websockets = ["websocket-client (>=1.3.0)"]
name = "docstring-parser"
version = "0.17.0"
description = "Parse Python docstrings in reST, Google and Numpydoc format"
-optional = true
+optional = false
python-versions = ">=3.8"
groups = ["main"]
-markers = "extra == \"vertex\""
files = [
{file = "docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708"},
{file = "docstring_parser-0.17.0.tar.gz", hash = "sha256:583de4a309722b3315439bb31d64ba3eebada841f2e2cee23b99df001434c912"},
@@ -1388,6 +1479,24 @@ files = [
[package.extras]
tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""]
+[[package]]
+name = "faker"
+version = "40.8.0"
+description = "Faker is a Python package that generates fake data for you."
+optional = false
+python-versions = ">=3.10"
+groups = ["main"]
+files = [
+ {file = "faker-40.8.0-py3-none-any.whl", hash = "sha256:eb21bdba18f7a8375382eb94fb436fce07046893dc94cb20817d28deb0c3d579"},
+ {file = "faker-40.8.0.tar.gz", hash = "sha256:936a3c9be6c004433f20aa4d99095df5dec82b8c7ad07459756041f8c1728875"},
+]
+
+[package.dependencies]
+tzdata = {version = "*", markers = "platform_system == \"Windows\""}
+
+[package.extras]
+tzdata = ["tzdata"]
+
[[package]]
name = "fastapi"
version = "0.121.0"
@@ -2142,10 +2251,9 @@ requests = ["requests (>=2.18.0,<3.0.0)"]
name = "googleapis-common-protos"
version = "1.72.0"
description = "Common protobufs used in Google APIs"
-optional = true
+optional = false
python-versions = ">=3.7"
groups = ["main"]
-markers = "extra == \"vertex\""
files = [
{file = "googleapis_common_protos-1.72.0-py3-none-any.whl", hash = "sha256:4299c5a82d5ae1a9702ada957347726b167f9f8d1fc352477702a1e851ff4038"},
{file = "googleapis_common_protos-1.72.0.tar.gz", hash = "sha256:e55a601c1b32b52d7a3e65f43563e2aa61bcd737998ee672ac9b951cd49319f5"},
@@ -2635,6 +2743,18 @@ perf = ["ipython"]
test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"]
type = ["pytest-mypy"]
+[[package]]
+name = "inflection"
+version = "0.5.1"
+description = "A port of Ruby on Rails inflector to Python"
+optional = false
+python-versions = ">=3.5"
+groups = ["main"]
+files = [
+ {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"},
+ {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"},
+]
+
[[package]]
name = "iniconfig"
version = "2.1.0"
@@ -2862,6 +2982,18 @@ files = [
{file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"},
]
+[[package]]
+name = "joblib"
+version = "1.5.3"
+description = "Lightweight pipelining with Python functions"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "joblib-1.5.3-py3-none-any.whl", hash = "sha256:5fc3c5039fc5ca8c0276333a188bbd59d6b7ab37fe6632daa76bc7f9ec18e713"},
+ {file = "joblib-1.5.3.tar.gz", hash = "sha256:8561a3269e6801106863fd0d6d84bb737be9e7631e33aaed3fb9ce5953688da3"},
+]
+
[[package]]
name = "jsonschema"
version = "4.25.1"
@@ -2876,7 +3008,7 @@ files = [
[package.dependencies]
attrs = ">=22.2.0"
-jsonschema-specifications = ">=2023.03.6"
+jsonschema-specifications = ">=2023.3.6"
referencing = ">=0.28.4"
rpds-py = ">=0.7.1"
@@ -3863,6 +3995,32 @@ extra = ["lxml (>=4.6)", "pydot (>=3.0.1)", "pygraphviz (>=1.14)", "sympy (>=1.1
test = ["pytest (>=7.2)", "pytest-cov (>=4.0)", "pytest-xdist (>=3.0)"]
test-extras = ["pytest-mpl", "pytest-randomly"]
+[[package]]
+name = "nltk"
+version = "3.9.3"
+description = "Natural Language Toolkit"
+optional = false
+python-versions = ">=3.10"
+groups = ["main"]
+files = [
+ {file = "nltk-3.9.3-py3-none-any.whl", hash = "sha256:60b3db6e9995b3dd976b1f0fa7dec22069b2677e759c28eb69b62ddd44870522"},
+ {file = "nltk-3.9.3.tar.gz", hash = "sha256:cb5945d6424a98d694c2b9a0264519fab4363711065a46aa0ae7a2195b92e71f"},
+]
+
+[package.dependencies]
+click = "*"
+joblib = "*"
+regex = ">=2021.8.3"
+tqdm = "*"
+
+[package.extras]
+all = ["matplotlib", "numpy", "pyparsing", "python-crfsuite", "requests", "scikit-learn", "scipy", "twython"]
+corenlp = ["requests"]
+machine-learning = ["numpy", "python-crfsuite", "scikit-learn", "scipy"]
+plot = ["matplotlib"]
+tgrep = ["pyparsing"]
+twitter = ["twython"]
+
[[package]]
name = "nodeenv"
version = "1.9.1"
@@ -3879,10 +4037,9 @@ files = [
name = "numpy"
version = "2.3.2"
description = "Fundamental package for array computing in Python"
-optional = true
+optional = false
python-versions = ">=3.11"
groups = ["main"]
-markers = "extra == \"sandbox\""
files = [
{file = "numpy-2.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:852ae5bed3478b92f093e30f785c98e0cb62fa0a939ed057c31716e18a7a22b9"},
{file = "numpy-2.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a0e27186e781a69959d0230dd9909b5e26024f8da10683bd6344baea1885168"},
@@ -4084,6 +4241,957 @@ files = [
[package.dependencies]
et-xmlfile = "*"
+[[package]]
+name = "opentelemetry-api"
+version = "1.40.0"
+description = "OpenTelemetry Python API"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_api-1.40.0-py3-none-any.whl", hash = "sha256:82dd69331ae74b06f6a874704be0cfaa49a1650e1537d4a813b86ecef7d0ecf9"},
+ {file = "opentelemetry_api-1.40.0.tar.gz", hash = "sha256:159be641c0b04d11e9ecd576906462773eb97ae1b657730f0ecf64d32071569f"},
+]
+
+[package.dependencies]
+importlib-metadata = ">=6.0,<8.8.0"
+typing-extensions = ">=4.5.0"
+
+[[package]]
+name = "opentelemetry-exporter-otlp-proto-common"
+version = "1.40.0"
+description = "OpenTelemetry Protobuf encoding"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_exporter_otlp_proto_common-1.40.0-py3-none-any.whl", hash = "sha256:7081ff453835a82417bf38dccf122c827c3cbc94f2079b03bba02a3165f25149"},
+ {file = "opentelemetry_exporter_otlp_proto_common-1.40.0.tar.gz", hash = "sha256:1cbee86a4064790b362a86601ee7934f368b81cd4cc2f2e163902a6e7818a0fa"},
+]
+
+[package.dependencies]
+opentelemetry-proto = "1.40.0"
+
+[[package]]
+name = "opentelemetry-exporter-otlp-proto-grpc"
+version = "1.40.0"
+description = "OpenTelemetry Collector Protobuf over gRPC Exporter"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_exporter_otlp_proto_grpc-1.40.0-py3-none-any.whl", hash = "sha256:2aa0ca53483fe0cf6405087a7491472b70335bc5c7944378a0a8e72e86995c52"},
+ {file = "opentelemetry_exporter_otlp_proto_grpc-1.40.0.tar.gz", hash = "sha256:bd4015183e40b635b3dab8da528b27161ba83bf4ef545776b196f0fb4ec47740"},
+]
+
+[package.dependencies]
+googleapis-common-protos = ">=1.57,<2.0"
+grpcio = [
+ {version = ">=1.63.2,<2.0.0", markers = "python_version < \"3.13\""},
+ {version = ">=1.66.2,<2.0.0", markers = "python_version == \"3.13\""},
+ {version = ">=1.75.1,<2.0.0", markers = "python_version >= \"3.14\""},
+]
+opentelemetry-api = ">=1.15,<2.0"
+opentelemetry-exporter-otlp-proto-common = "1.40.0"
+opentelemetry-proto = "1.40.0"
+opentelemetry-sdk = ">=1.40.0,<1.41.0"
+typing-extensions = ">=4.6.0"
+
+[package.extras]
+gcp-auth = ["opentelemetry-exporter-credential-provider-gcp (>=0.59b0)"]
+
+[[package]]
+name = "opentelemetry-exporter-otlp-proto-http"
+version = "1.40.0"
+description = "OpenTelemetry Collector Protobuf over HTTP Exporter"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_exporter_otlp_proto_http-1.40.0-py3-none-any.whl", hash = "sha256:a8d1dab28f504c5d96577d6509f80a8150e44e8f45f82cdbe0e34c99ab040069"},
+ {file = "opentelemetry_exporter_otlp_proto_http-1.40.0.tar.gz", hash = "sha256:db48f5e0f33217588bbc00274a31517ba830da576e59503507c839b38fa0869c"},
+]
+
+[package.dependencies]
+googleapis-common-protos = ">=1.52,<2.0"
+opentelemetry-api = ">=1.15,<2.0"
+opentelemetry-exporter-otlp-proto-common = "1.40.0"
+opentelemetry-proto = "1.40.0"
+opentelemetry-sdk = ">=1.40.0,<1.41.0"
+requests = ">=2.7,<3.0"
+typing-extensions = ">=4.5.0"
+
+[package.extras]
+gcp-auth = ["opentelemetry-exporter-credential-provider-gcp (>=0.59b0)"]
+
+[[package]]
+name = "opentelemetry-instrumentation"
+version = "0.61b0"
+description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation-0.61b0-py3-none-any.whl", hash = "sha256:92a93a280e69788e8f88391247cc530fd81f16f2b011979d4d6398f805cfbc63"},
+ {file = "opentelemetry_instrumentation-0.61b0.tar.gz", hash = "sha256:cb21b48db738c9de196eba6b805b4ff9de3b7f187e4bbf9a466fa170514f1fc7"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.4,<2.0"
+opentelemetry-semantic-conventions = "0.61b0"
+packaging = ">=18.0"
+wrapt = ">=1.0.0,<2.0.0"
+
+[[package]]
+name = "opentelemetry-instrumentation-agno"
+version = "0.53.0"
+description = "OpenTelemetry Agno instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_agno-0.53.0-py3-none-any.whl", hash = "sha256:bab72e73e12dfcfae6440d6d47f124d6cdd9d6a5ef391ef896b79742696595d1"},
+ {file = "opentelemetry_instrumentation_agno-0.53.0.tar.gz", hash = "sha256:67ff165475ca1c48ea41fe9db2d9f89d72430b8e995ea1aa8b329f04473b7a0c"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.28.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["agno"]
+
+[[package]]
+name = "opentelemetry-instrumentation-alephalpha"
+version = "0.53.0"
+description = "OpenTelemetry Aleph Alpha instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_alephalpha-0.53.0-py3-none-any.whl", hash = "sha256:905d97267097c4d35426fda6893590908a4f15c58f50fdfbe9b59f8cfef266ea"},
+ {file = "opentelemetry_instrumentation_alephalpha-0.53.0.tar.gz", hash = "sha256:e558d0c5aa17c4278619242d06792f272a32297ab1bb6dce61498863f40ee270"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["aleph-alpha-client"]
+
+[[package]]
+name = "opentelemetry-instrumentation-anthropic"
+version = "0.53.0"
+description = "OpenTelemetry Anthropic instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_anthropic-0.53.0-py3-none-any.whl", hash = "sha256:e89f19457cb697fd94d63f29883f38d640603a7a0351c25052f3674f41af1c99"},
+ {file = "opentelemetry_instrumentation_anthropic-0.53.0.tar.gz", hash = "sha256:de8d405f5ed2f6af5f368e028e6ad07504acecd20b133b84a9fa45827deaba15"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.14,<0.5.0"
+
+[package.extras]
+instruments = ["anthropic"]
+
+[[package]]
+name = "opentelemetry-instrumentation-bedrock"
+version = "0.53.0"
+description = "OpenTelemetry Bedrock instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_bedrock-0.53.0-py3-none-any.whl", hash = "sha256:1e13877d1bcf31e4617b0801f0369f2c2aa42fca17e9174d3cbf23b0c1a63315"},
+ {file = "opentelemetry_instrumentation_bedrock-0.53.0.tar.gz", hash = "sha256:0bf17a81fdeddeeee2baf567b30ea42853c9dfd2ba8dca55fcbdb7c306aa0825"},
+]
+
+[package.dependencies]
+anthropic = ">=0.17.0"
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+tokenizers = ">=0.13.0"
+
+[package.extras]
+instruments = ["boto3"]
+
+[[package]]
+name = "opentelemetry-instrumentation-chromadb"
+version = "0.53.0"
+description = "OpenTelemetry Chroma DB instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_chromadb-0.53.0-py3-none-any.whl", hash = "sha256:5c1c17dc07ae94b4dec01022e2c5f9c51d31c8912d9ddde7ac392dd97094d317"},
+ {file = "opentelemetry_instrumentation_chromadb-0.53.0.tar.gz", hash = "sha256:131495c56fdc6131abb8d8a31addcf86e9ab10e63e86927bb74380da351f1b5a"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["chromadb"]
+
+[[package]]
+name = "opentelemetry-instrumentation-cohere"
+version = "0.53.0"
+description = "OpenTelemetry Cohere instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_cohere-0.53.0-py3-none-any.whl", hash = "sha256:7a1483c99db7f30c4dde1763834ee6844f0d2ba1a986b52eb740c5c4e68ed926"},
+ {file = "opentelemetry_instrumentation_cohere-0.53.0.tar.gz", hash = "sha256:51a128e317d0ec09c1b42fb1b955258c2bb337150e55c23a70dbad627dac5097"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["cohere"]
+
+[[package]]
+name = "opentelemetry-instrumentation-crewai"
+version = "0.53.0"
+description = "OpenTelemetry crewAI instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_crewai-0.53.0-py3-none-any.whl", hash = "sha256:348b9214f2557f33057a49fb648402cb46a231a063a9ffa7469047c1b2383afe"},
+ {file = "opentelemetry_instrumentation_crewai-0.53.0.tar.gz", hash = "sha256:9b50cd375ca0b366f1f23e8f7e8d8a8baac61792fe1d3f515e41ef45a7dc360f"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["crewai"]
+
+[[package]]
+name = "opentelemetry-instrumentation-google-generativeai"
+version = "0.53.0"
+description = "OpenTelemetry Google Generative AI instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_google_generativeai-0.53.0-py3-none-any.whl", hash = "sha256:8f3b14ac2bcf348502f039f9b0a1440b9e8a041280c4ee8c6e7ffb79e35f7bd8"},
+ {file = "opentelemetry_instrumentation_google_generativeai-0.53.0.tar.gz", hash = "sha256:c30ed87c3ebb9b52558c97e465a36451e5dc6f40e18d1dbfef482ecbdadcf42f"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["google-genai"]
+
+[[package]]
+name = "opentelemetry-instrumentation-groq"
+version = "0.53.0"
+description = "OpenTelemetry Groq instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_groq-0.53.0-py3-none-any.whl", hash = "sha256:40efe9df236e785ae31a498f3fe5b2287afa7465b4b7786f2ca36cfa70943aa3"},
+ {file = "opentelemetry_instrumentation_groq-0.53.0.tar.gz", hash = "sha256:19065150a7236a2c99f1bcea6056456922a6997102198642285d3c7e80b011e4"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["groq"]
+
+[[package]]
+name = "opentelemetry-instrumentation-haystack"
+version = "0.53.0"
+description = "OpenTelemetry Haystack instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_haystack-0.53.0-py3-none-any.whl", hash = "sha256:782daac342840f3c63194c6655258fc2c80b03b399458a30b6b332727e5a9d57"},
+ {file = "opentelemetry_instrumentation_haystack-0.53.0.tar.gz", hash = "sha256:62307cf41d613b69fe1495e233ff4ec0f86e83fd9b5c8fe208eefc229ebde010"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["haystack-ai"]
+
+[[package]]
+name = "opentelemetry-instrumentation-lancedb"
+version = "0.53.0"
+description = "OpenTelemetry Lancedb instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_lancedb-0.53.0-py3-none-any.whl", hash = "sha256:30e6b1b4b83c3513101931531919b650ea61ab65b8594f9966159f4eeaf436a8"},
+ {file = "opentelemetry_instrumentation_lancedb-0.53.0.tar.gz", hash = "sha256:e646e8e850e4f646199dbf2c62d3bb3e495c00ab093303e5b4dbbd4c76f0738f"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["lancedb"]
+
+[[package]]
+name = "opentelemetry-instrumentation-langchain"
+version = "0.53.0"
+description = "OpenTelemetry Langchain instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_langchain-0.53.0-py3-none-any.whl", hash = "sha256:5426917b76ffc5e9765c0b2eaac516ac7b30f70bd53bbbee51d65364ae668276"},
+ {file = "opentelemetry_instrumentation_langchain-0.53.0.tar.gz", hash = "sha256:47d9ad0baa6b3f2e44b9b31bd655b87eac2d86794dc38079d61a2eb24b747f51"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["langchain"]
+
+[[package]]
+name = "opentelemetry-instrumentation-llamaindex"
+version = "0.53.0"
+description = "OpenTelemetry LlamaIndex instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_llamaindex-0.53.0-py3-none-any.whl", hash = "sha256:c4a0043bc0305b860b0da4840466ffb5fae83595a52a49212a85fb46ddbb6617"},
+ {file = "opentelemetry_instrumentation_llamaindex-0.53.0.tar.gz", hash = "sha256:c7b0bd1fe818002286d0122f6a57c516c6a4b248813ca3a4adff61a547f83050"},
+]
+
+[package.dependencies]
+inflection = ">=0.5.1,<0.6.0"
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["llama-index"]
+llamaparse = ["llama-parse"]
+
+[[package]]
+name = "opentelemetry-instrumentation-logging"
+version = "0.61b0"
+description = "OpenTelemetry Logging instrumentation"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_logging-0.61b0-py3-none-any.whl", hash = "sha256:6d87e5ded6a0128d775d41511f8380910a1b610671081d16efb05ac3711c0074"},
+ {file = "opentelemetry_instrumentation_logging-0.61b0.tar.gz", hash = "sha256:feaa30b700acd2a37cc81db5f562ab0c3a5b6cc2453595e98b72c01dcf649584"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.12,<2.0"
+opentelemetry-instrumentation = "0.61b0"
+
+[[package]]
+name = "opentelemetry-instrumentation-marqo"
+version = "0.53.0"
+description = "OpenTelemetry Marqo instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_marqo-0.53.0-py3-none-any.whl", hash = "sha256:7e3ffb849d45ffade704a24118d4f05df13217a13bb421489a2765dd8996df9a"},
+ {file = "opentelemetry_instrumentation_marqo-0.53.0.tar.gz", hash = "sha256:c2756ca5f2dbdbb48140174119e7e6637d7b6af84ae8125aba4fbf58915cd08b"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["marqo"]
+
+[[package]]
+name = "opentelemetry-instrumentation-mcp"
+version = "0.53.0"
+description = "OpenTelemetry mcp instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_mcp-0.53.0-py3-none-any.whl", hash = "sha256:39172f541a9f74035a1e3108fd1760921962a2e8627f01ba3b9e4822e4d25f37"},
+ {file = "opentelemetry_instrumentation_mcp-0.53.0.tar.gz", hash = "sha256:95bb08cd628ea8d347fb243a831a1ddc104cd4b5d88401885da327345b8e890f"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["mcp"]
+
+[[package]]
+name = "opentelemetry-instrumentation-milvus"
+version = "0.53.0"
+description = "OpenTelemetry Milvus instrumentation"
+optional = false
+python-versions = "<4,>=3.9"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_milvus-0.53.0-py3-none-any.whl", hash = "sha256:26e74998bd735cea4d31d02137a65b8dbc15dd857acdeea2a23af020f2e4cbe6"},
+ {file = "opentelemetry_instrumentation_milvus-0.53.0.tar.gz", hash = "sha256:613b32bee958dacb05ff3325050b87eedb4697eda9c75c304d1438bbb47f929c"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["pymilvus"]
+
+[[package]]
+name = "opentelemetry-instrumentation-mistralai"
+version = "0.53.0"
+description = "OpenTelemetry Mistral AI instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_mistralai-0.53.0-py3-none-any.whl", hash = "sha256:f23c892366262be6c0011105167e7db455a73a72675ce4529258f66aa24f7fb3"},
+ {file = "opentelemetry_instrumentation_mistralai-0.53.0.tar.gz", hash = "sha256:1d05ab9b303efe32dc3e6fb7c7cc844b32b33355535b3a5f03d0d5100b0db36e"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["mistralai"]
+
+[[package]]
+name = "opentelemetry-instrumentation-ollama"
+version = "0.53.0"
+description = "OpenTelemetry Ollama instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_ollama-0.53.0-py3-none-any.whl", hash = "sha256:44aa9e53b9359b9571e2f84ee5313ea39cb49626db42fa0a27c77441b6f7fe1b"},
+ {file = "opentelemetry_instrumentation_ollama-0.53.0.tar.gz", hash = "sha256:2039ac601ff68f2a1fa97e8af5de94f00ccae67797d07c04a3cc706979bcb4cb"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["ollama"]
+
+[[package]]
+name = "opentelemetry-instrumentation-openai"
+version = "0.53.0"
+description = "OpenTelemetry OpenAI instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_openai-0.53.0-py3-none-any.whl", hash = "sha256:91d9f69673636f5f7d50e5a4782e4526d6df3a1ddfd6ac2d9e15a957f8fd9ad8"},
+ {file = "opentelemetry_instrumentation_openai-0.53.0.tar.gz", hash = "sha256:c0cd83d223d138309af3cc5f53c9c6d22136374bfa00e8f66dff31cd322ef547"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["openai"]
+
+[[package]]
+name = "opentelemetry-instrumentation-openai-agents"
+version = "0.53.0"
+description = "OpenTelemetry OpenAI Agents instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_openai_agents-0.53.0-py3-none-any.whl", hash = "sha256:2f19e3348359de73cef8a97865cad82f6ba3820ab52bba671e83e091b1dca6d4"},
+ {file = "opentelemetry_instrumentation_openai_agents-0.53.0.tar.gz", hash = "sha256:f8877927da7de87bafc9757173ff3ce63b487f952260017299678d290c1c432f"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["openai-agents"]
+
+[[package]]
+name = "opentelemetry-instrumentation-pinecone"
+version = "0.53.0"
+description = "OpenTelemetry Pinecone instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_pinecone-0.53.0-py3-none-any.whl", hash = "sha256:b972992b8dae9af5fb811c52333c54d4ac5d0eff0a71e6a9220b4905aa94eee3"},
+ {file = "opentelemetry_instrumentation_pinecone-0.53.0.tar.gz", hash = "sha256:c7918da22d719d15ad6c0148d79f2d25bfeef3ddb3a10800222d8d8491575fd4"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["pinecone (>=5.1.0,<9)"]
+
+[[package]]
+name = "opentelemetry-instrumentation-qdrant"
+version = "0.53.0"
+description = "OpenTelemetry Qdrant instrumentation"
+optional = false
+python-versions = "<4,>=3.9"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_qdrant-0.53.0-py3-none-any.whl", hash = "sha256:448bca5e4ce4061fbb760a51a9732dbb91c07193bb1774a3eb6579d79007e2b3"},
+ {file = "opentelemetry_instrumentation_qdrant-0.53.0.tar.gz", hash = "sha256:4a739516f3864963cab42f8c67c632cb276861b590b852df91124585031e07dc"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["qdrant-client"]
+
+[[package]]
+name = "opentelemetry-instrumentation-redis"
+version = "0.61b0"
+description = "OpenTelemetry Redis instrumentation"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_redis-0.61b0-py3-none-any.whl", hash = "sha256:8d4e850bbb5f8eeafa44c0eac3a007990c7125de187bc9c3659e29ff7e091172"},
+ {file = "opentelemetry_instrumentation_redis-0.61b0.tar.gz", hash = "sha256:ae0fbb56be9a641e621d55b02a7d62977a2c77c5ee760addd79b9b266e46e523"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.12,<2.0"
+opentelemetry-instrumentation = "0.61b0"
+opentelemetry-semantic-conventions = "0.61b0"
+wrapt = ">=1.12.1"
+
+[package.extras]
+instruments = ["redis (>=2.6)"]
+
+[[package]]
+name = "opentelemetry-instrumentation-replicate"
+version = "0.53.0"
+description = "OpenTelemetry Replicate instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_replicate-0.53.0-py3-none-any.whl", hash = "sha256:318b9f59acb6b83b51075d1fbdc5fee1a79867fb24268a030c4e27953ed283b2"},
+ {file = "opentelemetry_instrumentation_replicate-0.53.0.tar.gz", hash = "sha256:ca348b6dd57267d15e715d27eaf33c52113bbb9c27875c479fd868228a812941"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["replicate"]
+
+[[package]]
+name = "opentelemetry-instrumentation-requests"
+version = "0.61b0"
+description = "OpenTelemetry requests instrumentation"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_requests-0.61b0-py3-none-any.whl", hash = "sha256:cce19b379949fe637eb73ba39b02c57d2d0805447ca6d86534aa33fcb141f683"},
+ {file = "opentelemetry_instrumentation_requests-0.61b0.tar.gz", hash = "sha256:15f879ce8fb206bd7e6fdc61663ea63481040a845218c0cf42902ce70bd7e9d9"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.12,<2.0"
+opentelemetry-instrumentation = "0.61b0"
+opentelemetry-semantic-conventions = "0.61b0"
+opentelemetry-util-http = "0.61b0"
+
+[package.extras]
+instruments = ["requests (>=2.0,<3.0)"]
+
+[[package]]
+name = "opentelemetry-instrumentation-sagemaker"
+version = "0.53.0"
+description = "OpenTelemetry SageMaker instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_sagemaker-0.53.0-py3-none-any.whl", hash = "sha256:d20e07fe7765908bbd58a6e00ac970a38482bf05ac7bd737027abd92507fc367"},
+ {file = "opentelemetry_instrumentation_sagemaker-0.53.0.tar.gz", hash = "sha256:08d34be9f9cf6a12457b90713c8589ec5cbc3c87ddff862543f5590549fd202a"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["boto3"]
+
+[[package]]
+name = "opentelemetry-instrumentation-sqlalchemy"
+version = "0.61b0"
+description = "OpenTelemetry SQLAlchemy instrumentation"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_sqlalchemy-0.61b0-py3-none-any.whl", hash = "sha256:f115e0be54116ba4c327b8d7b68db4045ee18d44439d888ab8130a549c50d1c1"},
+ {file = "opentelemetry_instrumentation_sqlalchemy-0.61b0.tar.gz", hash = "sha256:13a3a159a2043a52f0180b3757fbaa26741b0e08abb50deddce4394c118956e6"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.12,<2.0"
+opentelemetry-instrumentation = "0.61b0"
+opentelemetry-semantic-conventions = "0.61b0"
+packaging = ">=21.0"
+wrapt = ">=1.11.2"
+
+[package.extras]
+instruments = ["sqlalchemy (>=1.0.0,<2.1.0)"]
+
+[[package]]
+name = "opentelemetry-instrumentation-threading"
+version = "0.61b0"
+description = "Thread context propagation support for OpenTelemetry"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_threading-0.61b0-py3-none-any.whl", hash = "sha256:735f4a1dc964202fc8aff475efc12bb64e6566f22dff52d5cb5de864b3fe1a70"},
+ {file = "opentelemetry_instrumentation_threading-0.61b0.tar.gz", hash = "sha256:38e0263c692d15a7a458b3fa0286d29290448fa4ac4c63045edac438c6113433"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.12,<2.0"
+opentelemetry-instrumentation = "0.61b0"
+wrapt = ">=1.0.0,<2.0.0"
+
+[[package]]
+name = "opentelemetry-instrumentation-together"
+version = "0.53.0"
+description = "OpenTelemetry Together AI instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_together-0.53.0-py3-none-any.whl", hash = "sha256:686ebf9b181aa942355f44fed2fbb2c7e04174f0622127f7a80c41730fe1bc8c"},
+ {file = "opentelemetry_instrumentation_together-0.53.0.tar.gz", hash = "sha256:f34c411bdc0ed1f72d33ca05ef4d16fcd8935b2ce18b6d9f625cec91a290b3b9"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["together"]
+
+[[package]]
+name = "opentelemetry-instrumentation-transformers"
+version = "0.53.0"
+description = "OpenTelemetry transformers instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_transformers-0.53.0-py3-none-any.whl", hash = "sha256:c2dff5f32579f702842d98dd53b626f25e859a6d9cb9e46f4807a46647f8d6a5"},
+ {file = "opentelemetry_instrumentation_transformers-0.53.0.tar.gz", hash = "sha256:c29c2fd97b01e0ca111996e22a4d4fa5da023b61c643e385e6ce62f2a46b18a1"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["transformers"]
+
+[[package]]
+name = "opentelemetry-instrumentation-urllib3"
+version = "0.61b0"
+description = "OpenTelemetry urllib3 instrumentation"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_urllib3-0.61b0-py3-none-any.whl", hash = "sha256:9644f8c07870266e52f129e6226859ff3a35192555abe46fa0ef9bbbf5b6b46d"},
+ {file = "opentelemetry_instrumentation_urllib3-0.61b0.tar.gz", hash = "sha256:f00037bc8ff813153c4b79306f55a14618c40469a69c6c03a3add29dc7e8b928"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.12,<2.0"
+opentelemetry-instrumentation = "0.61b0"
+opentelemetry-semantic-conventions = "0.61b0"
+opentelemetry-util-http = "0.61b0"
+wrapt = ">=1.0.0,<2.0.0"
+
+[package.extras]
+instruments = ["urllib3 (>=1.0.0,<3.0.0)"]
+
+[[package]]
+name = "opentelemetry-instrumentation-vertexai"
+version = "0.53.0"
+description = "OpenTelemetry Vertex AI instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_vertexai-0.53.0-py3-none-any.whl", hash = "sha256:8f2d610e3da3e717069a439d61a3adfa2b375d4658de03f2e05131a3cbbd4681"},
+ {file = "opentelemetry_instrumentation_vertexai-0.53.0.tar.gz", hash = "sha256:436ebbb284af8c067d5ea98e349c53692d801989f61769481b45b75774756fc8"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["google-cloud-aiplatform"]
+
+[[package]]
+name = "opentelemetry-instrumentation-voyageai"
+version = "0.53.0"
+description = "OpenTelemetry Voyage AI instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_voyageai-0.53.0-py3-none-any.whl", hash = "sha256:43342c73dc6cafe4e7d7c6ce66fc5964481d43d1dd71de55ef1fcd5d6c72c6e3"},
+ {file = "opentelemetry_instrumentation_voyageai-0.53.0.tar.gz", hash = "sha256:8382bbbf00d32dcf38d6b0faabff6bd933163d46a5a4de3e86c49114bb00c9b5"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["voyageai"]
+
+[[package]]
+name = "opentelemetry-instrumentation-watsonx"
+version = "0.53.0"
+description = "OpenTelemetry IBM Watsonx Instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_watsonx-0.53.0-py3-none-any.whl", hash = "sha256:d7567f1f58fb78e37aee04a154f5aedd116628930835d10e78267e122f7f5589"},
+ {file = "opentelemetry_instrumentation_watsonx-0.53.0.tar.gz", hash = "sha256:e0064eb9f173cd06e685c2a55f8afc12a603306ca22d946864ba7db34920edd3"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["ibm-watson-machine-learning"]
+
+[[package]]
+name = "opentelemetry-instrumentation-weaviate"
+version = "0.53.0"
+description = "OpenTelemetry Weaviate instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_weaviate-0.53.0-py3-none-any.whl", hash = "sha256:2d825fe52e83db0c3db8cc5536ea8cede80844e51d2c64a88eb4b3531c55731a"},
+ {file = "opentelemetry_instrumentation_weaviate-0.53.0.tar.gz", hash = "sha256:f843fdac67d07ac99039d889f4f20e36e69358df26de943f490cccaa47da79bd"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+
+[package.extras]
+instruments = ["weaviate-client"]
+
+[[package]]
+name = "opentelemetry-instrumentation-writer"
+version = "0.53.0"
+description = "OpenTelemetry Writer instrumentation"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_instrumentation_writer-0.53.0-py3-none-any.whl", hash = "sha256:04a1c1840ba170fae53b48d80462cb572166ad1e3434969a1293a1dfc68f9dfe"},
+ {file = "opentelemetry_instrumentation_writer-0.53.0.tar.gz", hash = "sha256:802598df8ba6a131fdd2912aa0b7fc4082f541e2d79a57a0ef7fbec78691158d"},
+]
+
+[package.dependencies]
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-instrumentation = ">=0.59b0"
+opentelemetry-semantic-conventions = ">=0.59b0"
+opentelemetry-semantic-conventions-ai = ">=0.4.11"
+
+[package.extras]
+instruments = ["writer"]
+
+[[package]]
+name = "opentelemetry-proto"
+version = "1.40.0"
+description = "OpenTelemetry Python Proto"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_proto-1.40.0-py3-none-any.whl", hash = "sha256:266c4385d88923a23d63e353e9761af0f47a6ed0d486979777fe4de59dc9b25f"},
+ {file = "opentelemetry_proto-1.40.0.tar.gz", hash = "sha256:03f639ca129ba513f5819810f5b1f42bcb371391405d99c168fe6937c62febcd"},
+]
+
+[package.dependencies]
+protobuf = ">=5.0,<7.0"
+
+[[package]]
+name = "opentelemetry-sdk"
+version = "1.40.0"
+description = "OpenTelemetry Python SDK"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_sdk-1.40.0-py3-none-any.whl", hash = "sha256:787d2154a71f4b3d81f20524a8ce061b7db667d24e46753f32a7bc48f1c1f3f1"},
+ {file = "opentelemetry_sdk-1.40.0.tar.gz", hash = "sha256:18e9f5ec20d859d268c7cb3c5198c8d105d073714db3de50b593b8c1345a48f2"},
+]
+
+[package.dependencies]
+opentelemetry-api = "1.40.0"
+opentelemetry-semantic-conventions = "0.61b0"
+typing-extensions = ">=4.5.0"
+
+[[package]]
+name = "opentelemetry-semantic-conventions"
+version = "0.61b0"
+description = "OpenTelemetry Semantic Conventions"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_semantic_conventions-0.61b0-py3-none-any.whl", hash = "sha256:fa530a96be229795f8cef353739b618148b0fe2b4b3f005e60e262926c4d38e2"},
+ {file = "opentelemetry_semantic_conventions-0.61b0.tar.gz", hash = "sha256:072f65473c5d7c6dc0355b27d6c9d1a679d63b6d4b4b16a9773062cb7e31192a"},
+]
+
+[package.dependencies]
+opentelemetry-api = "1.40.0"
+typing-extensions = ">=4.5.0"
+
+[[package]]
+name = "opentelemetry-semantic-conventions-ai"
+version = "0.4.15"
+description = "OpenTelemetry Semantic Conventions Extension for Large Language Models"
+optional = false
+python-versions = "<4,>=3.9"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_semantic_conventions_ai-0.4.15-py3-none-any.whl", hash = "sha256:011461f1fba30f27035c49ab3b8344367adc72da0a6c8d3c7428303c6779edc9"},
+ {file = "opentelemetry_semantic_conventions_ai-0.4.15.tar.gz", hash = "sha256:12de172d1e11d21c6e82bbf578c7e8a713589a7fda76af9ed785632564a28b81"},
+]
+
+[package.dependencies]
+opentelemetry-sdk = ">=1.38.0,<2"
+opentelemetry-semantic-conventions = ">=0.59b0"
+
+[[package]]
+name = "opentelemetry-util-http"
+version = "0.61b0"
+description = "Web util for OpenTelemetry"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "opentelemetry_util_http-0.61b0-py3-none-any.whl", hash = "sha256:8e715e848233e9527ea47e275659ea60a57a75edf5206a3b937e236a6da5fc33"},
+ {file = "opentelemetry_util_http-0.61b0.tar.gz", hash = "sha256:1039cb891334ad2731affdf034d8fb8b48c239af9b6dd295e5fabd07f1c95572"},
+]
+
[[package]]
name = "orjson"
version = "3.11.2"
@@ -4369,6 +5477,18 @@ files = [
[package.dependencies]
ptyprocess = ">=0.5"
+[[package]]
+name = "phonenumbers"
+version = "9.0.25"
+description = "Python version of Google's common library for parsing, formatting, storing and validating international phone numbers."
+optional = false
+python-versions = ">=2.5"
+groups = ["main"]
+files = [
+ {file = "phonenumbers-9.0.25-py2.py3-none-any.whl", hash = "sha256:b1fd6c20d588f5bcd40af3899d727a9f536364211ec6eac554fcd75ca58992a3"},
+ {file = "phonenumbers-9.0.25.tar.gz", hash = "sha256:a5f236fa384c6a77378d7836c8e486ade5f984ad2e8e6cc0dbe5124315cdc81b"},
+]
+
[[package]]
name = "pillow"
version = "12.1.1"
@@ -4745,10 +5865,9 @@ testing = ["google-api-core (>=1.31.5)"]
name = "protobuf"
version = "6.33.5"
description = ""
-optional = true
+optional = false
python-versions = ">=3.9"
groups = ["main"]
-markers = "extra == \"vertex\""
files = [
{file = "protobuf-6.33.5-cp310-abi3-win32.whl", hash = "sha256:d71b040839446bac0f4d162e758bea99c8251161dae9d0983a3b88dee345153b"},
{file = "protobuf-6.33.5-cp310-abi3-win_amd64.whl", hash = "sha256:3093804752167bcab3998bec9f1048baae6e29505adaf1afd14a37bddede533c"},
@@ -5239,9 +6358,10 @@ diagrams = ["jinja2", "railroad-diagrams"]
name = "pypdf"
version = "6.7.5"
description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files"
-optional = false
+optional = true
python-versions = ">=3.9"
groups = ["main"]
+markers = "extra == \"sandbox\""
files = [
{file = "pypdf-6.7.5-py3-none-any.whl", hash = "sha256:07ba7f1d6e6d9aa2a17f5452e320a84718d4ce863367f7ede2fd72280349ab13"},
{file = "pypdf-6.7.5.tar.gz", hash = "sha256:40bb2e2e872078655f12b9b89e2f900888bb505e88a82150b64f9f34fa25651d"},
@@ -5452,6 +6572,21 @@ Pillow = ">=3.3.2"
typing-extensions = ">=4.9.0"
XlsxWriter = ">=0.5.7"
+[[package]]
+name = "python-stdnum"
+version = "2.2"
+description = "Python module to handle standardized numbers and codes"
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "python_stdnum-2.2-py3-none-any.whl", hash = "sha256:bdf98fd117a0ca152e4047aa8ad254bae63853d4e915ddd4e0effb33ba0e9260"},
+ {file = "python_stdnum-2.2.tar.gz", hash = "sha256:e95fcfa858a703d4a40130cb3eaac133c60d8808a7f3c98efeedac968c2479b9"},
+]
+
+[package.extras]
+soap = ["zeep"]
+
[[package]]
name = "pytz"
version = "2025.2"
@@ -6123,10 +7258,173 @@ files = [
]
[package.dependencies]
-botocore = ">=1.37.4,<2.0a.0"
+botocore = ">=1.37.4,<2.0a0"
[package.extras]
-crt = ["botocore[crt] (>=1.37.4,<2.0a.0)"]
+crt = ["botocore[crt] (>=1.37.4,<2.0a0)"]
+
+[[package]]
+name = "scikit-learn"
+version = "1.8.0"
+description = "A set of python modules for machine learning and data mining"
+optional = false
+python-versions = ">=3.11"
+groups = ["main"]
+files = [
+ {file = "scikit_learn-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:146b4d36f800c013d267b29168813f7a03a43ecd2895d04861f1240b564421da"},
+ {file = "scikit_learn-1.8.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:f984ca4b14914e6b4094c5d52a32ea16b49832c03bd17a110f004db3c223e8e1"},
+ {file = "scikit_learn-1.8.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5e30adb87f0cc81c7690a84f7932dd66be5bac57cfe16b91cb9151683a4a2d3b"},
+ {file = "scikit_learn-1.8.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ada8121bcb4dac28d930febc791a69f7cb1673c8495e5eee274190b73a4559c1"},
+ {file = "scikit_learn-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:c57b1b610bd1f40ba43970e11ce62821c2e6569e4d74023db19c6b26f246cb3b"},
+ {file = "scikit_learn-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:2838551e011a64e3053ad7618dda9310175f7515f1742fa2d756f7c874c05961"},
+ {file = "scikit_learn-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5fb63362b5a7ddab88e52b6dbb47dac3fd7dafeee740dc6c8d8a446ddedade8e"},
+ {file = "scikit_learn-1.8.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:5025ce924beccb28298246e589c691fe1b8c1c96507e6d27d12c5fadd85bfd76"},
+ {file = "scikit_learn-1.8.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4496bb2cf7a43ce1a2d7524a79e40bc5da45cf598dbf9545b7e8316ccba47bb4"},
+ {file = "scikit_learn-1.8.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0bcfe4d0d14aec44921545fd2af2338c7471de9cb701f1da4c9d85906ab847a"},
+ {file = "scikit_learn-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:35c007dedb2ffe38fe3ee7d201ebac4a2deccd2408e8621d53067733e3c74809"},
+ {file = "scikit_learn-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:8c497fff237d7b4e07e9ef1a640887fa4fb765647f86fbe00f969ff6280ce2bb"},
+ {file = "scikit_learn-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0d6ae97234d5d7079dc0040990a6f7aeb97cb7fa7e8945f1999a429b23569e0a"},
+ {file = "scikit_learn-1.8.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:edec98c5e7c128328124a029bceb09eda2d526997780fef8d65e9a69eead963e"},
+ {file = "scikit_learn-1.8.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:74b66d8689d52ed04c271e1329f0c61635bcaf5b926db9b12d58914cdc01fe57"},
+ {file = "scikit_learn-1.8.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8fdf95767f989b0cfedb85f7ed8ca215d4be728031f56ff5a519ee1e3276dc2e"},
+ {file = "scikit_learn-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:2de443b9373b3b615aec1bb57f9baa6bb3a9bd093f1269ba95c17d870422b271"},
+ {file = "scikit_learn-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:eddde82a035681427cbedded4e6eff5e57fa59216c2e3e90b10b19ab1d0a65c3"},
+ {file = "scikit_learn-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7cc267b6108f0a1499a734167282c00c4ebf61328566b55ef262d48e9849c735"},
+ {file = "scikit_learn-1.8.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:fe1c011a640a9f0791146011dfd3c7d9669785f9fed2b2a5f9e207536cf5c2fd"},
+ {file = "scikit_learn-1.8.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:72358cce49465d140cc4e7792015bb1f0296a9742d5622c67e31399b75468b9e"},
+ {file = "scikit_learn-1.8.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:80832434a6cc114f5219211eec13dcbc16c2bac0e31ef64c6d346cde3cf054cb"},
+ {file = "scikit_learn-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ee787491dbfe082d9c3013f01f5991658b0f38aa8177e4cd4bf434c58f551702"},
+ {file = "scikit_learn-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf97c10a3f5a7543f9b88cbf488d33d175e9146115a451ae34568597ba33dcde"},
+ {file = "scikit_learn-1.8.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:c22a2da7a198c28dd1a6e1136f19c830beab7fdca5b3e5c8bba8394f8a5c45b3"},
+ {file = "scikit_learn-1.8.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:6b595b07a03069a2b1740dc08c2299993850ea81cce4fe19b2421e0c970de6b7"},
+ {file = "scikit_learn-1.8.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:29ffc74089f3d5e87dfca4c2c8450f88bdc61b0fc6ed5d267f3988f19a1309f6"},
+ {file = "scikit_learn-1.8.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fb65db5d7531bccf3a4f6bec3462223bea71384e2cda41da0f10b7c292b9e7c4"},
+ {file = "scikit_learn-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:56079a99c20d230e873ea40753102102734c5953366972a71d5cb39a32bc40c6"},
+ {file = "scikit_learn-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:3bad7565bc9cf37ce19a7c0d107742b320c1285df7aab1a6e2d28780df167242"},
+ {file = "scikit_learn-1.8.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:4511be56637e46c25721e83d1a9cea9614e7badc7040c4d573d75fbe257d6fd7"},
+ {file = "scikit_learn-1.8.0-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:a69525355a641bf8ef136a7fa447672fb54fe8d60cab5538d9eb7c6438543fb9"},
+ {file = "scikit_learn-1.8.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c2656924ec73e5939c76ac4c8b026fc203b83d8900362eb2599d8aee80e4880f"},
+ {file = "scikit_learn-1.8.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15fc3b5d19cc2be65404786857f2e13c70c83dd4782676dd6814e3b89dc8f5b9"},
+ {file = "scikit_learn-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:00d6f1d66fbcf4eba6e356e1420d33cc06c70a45bb1363cd6f6a8e4ebbbdece2"},
+ {file = "scikit_learn-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:f28dd15c6bb0b66ba09728cf09fd8736c304be29409bd8445a080c1280619e8c"},
+ {file = "scikit_learn-1.8.0.tar.gz", hash = "sha256:9bccbb3b40e3de10351f8f5068e105d0f4083b1a65fa07b6634fbc401a6287fd"},
+]
+
+[package.dependencies]
+joblib = ">=1.3.0"
+numpy = ">=1.24.1"
+scipy = ">=1.10.0"
+threadpoolctl = ">=3.2.0"
+
+[package.extras]
+benchmark = ["matplotlib (>=3.6.1)", "memory_profiler (>=0.57.0)", "pandas (>=1.5.0)"]
+build = ["cython (>=3.1.2)", "meson-python (>=0.17.1)", "numpy (>=1.24.1)", "scipy (>=1.10.0)"]
+docs = ["Pillow (>=10.1.0)", "matplotlib (>=3.6.1)", "memory_profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.5.0)", "plotly (>=5.18.0)", "polars (>=0.20.30)", "pooch (>=1.8.0)", "pydata-sphinx-theme (>=0.15.3)", "scikit-image (>=0.22.0)", "seaborn (>=0.13.0)", "sphinx (>=7.3.7)", "sphinx-copybutton (>=0.5.2)", "sphinx-design (>=0.6.0)", "sphinx-gallery (>=0.17.1)", "sphinx-prompt (>=1.4.0)", "sphinx-remove-toctrees (>=1.0.0.post1)", "sphinxcontrib-sass (>=0.3.4)", "sphinxext-opengraph (>=0.9.1)", "towncrier (>=24.8.0)"]
+examples = ["matplotlib (>=3.6.1)", "pandas (>=1.5.0)", "plotly (>=5.18.0)", "pooch (>=1.8.0)", "scikit-image (>=0.22.0)", "seaborn (>=0.13.0)"]
+install = ["joblib (>=1.3.0)", "numpy (>=1.24.1)", "scipy (>=1.10.0)", "threadpoolctl (>=3.2.0)"]
+maintenance = ["conda-lock (==3.0.1)"]
+tests = ["matplotlib (>=3.6.1)", "mypy (>=1.15)", "numpydoc (>=1.2.0)", "pandas (>=1.5.0)", "polars (>=0.20.30)", "pooch (>=1.8.0)", "pyamg (>=5.0.0)", "pyarrow (>=12.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.11.7)"]
+
+[[package]]
+name = "scipy"
+version = "1.17.1"
+description = "Fundamental algorithms for scientific computing in Python"
+optional = false
+python-versions = ">=3.11"
+groups = ["main"]
+files = [
+ {file = "scipy-1.17.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:1f95b894f13729334fb990162e911c9e5dc1ab390c58aa6cbecb389c5b5e28ec"},
+ {file = "scipy-1.17.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:e18f12c6b0bc5a592ed23d3f7b891f68fd7f8241d69b7883769eb5d5dfb52696"},
+ {file = "scipy-1.17.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:a3472cfbca0a54177d0faa68f697d8ba4c80bbdc19908c3465556d9f7efce9ee"},
+ {file = "scipy-1.17.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:766e0dc5a616d026a3a1cffa379af959671729083882f50307e18175797b3dfd"},
+ {file = "scipy-1.17.1-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:744b2bf3640d907b79f3fd7874efe432d1cf171ee721243e350f55234b4cec4c"},
+ {file = "scipy-1.17.1-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43af8d1f3bea642559019edfe64e9b11192a8978efbd1539d7bc2aaa23d92de4"},
+ {file = "scipy-1.17.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cd96a1898c0a47be4520327e01f874acfd61fb48a9420f8aa9f6483412ffa444"},
+ {file = "scipy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4eb6c25dd62ee8d5edf68a8e1c171dd71c292fdae95d8aeb3dd7d7de4c364082"},
+ {file = "scipy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:d30e57c72013c2a4fe441c2fcb8e77b14e152ad48b5464858e07e2ad9fbfceff"},
+ {file = "scipy-1.17.1-cp311-cp311-win_arm64.whl", hash = "sha256:9ecb4efb1cd6e8c4afea0daa91a87fbddbce1b99d2895d151596716c0b2e859d"},
+ {file = "scipy-1.17.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:35c3a56d2ef83efc372eaec584314bd0ef2e2f0d2adb21c55e6ad5b344c0dcb8"},
+ {file = "scipy-1.17.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:fcb310ddb270a06114bb64bbe53c94926b943f5b7f0842194d585c65eb4edd76"},
+ {file = "scipy-1.17.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:cc90d2e9c7e5c7f1a482c9875007c095c3194b1cfedca3c2f3291cdc2bc7c086"},
+ {file = "scipy-1.17.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:c80be5ede8f3f8eded4eff73cc99a25c388ce98e555b17d31da05287015ffa5b"},
+ {file = "scipy-1.17.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e19ebea31758fac5893a2ac360fedd00116cbb7628e650842a6691ba7ca28a21"},
+ {file = "scipy-1.17.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02ae3b274fde71c5e92ac4d54bc06c42d80e399fec704383dcd99b301df37458"},
+ {file = "scipy-1.17.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8a604bae87c6195d8b1045eddece0514d041604b14f2727bbc2b3020172045eb"},
+ {file = "scipy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f590cd684941912d10becc07325a3eeb77886fe981415660d9265c4c418d0bea"},
+ {file = "scipy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:41b71f4a3a4cab9d366cd9065b288efc4d4f3c0b37a91a8e0947fb5bd7f31d87"},
+ {file = "scipy-1.17.1-cp312-cp312-win_arm64.whl", hash = "sha256:f4115102802df98b2b0db3cce5cb9b92572633a1197c77b7553e5203f284a5b3"},
+ {file = "scipy-1.17.1-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:5e3c5c011904115f88a39308379c17f91546f77c1667cea98739fe0fccea804c"},
+ {file = "scipy-1.17.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:6fac755ca3d2c3edcb22f479fceaa241704111414831ddd3bc6056e18516892f"},
+ {file = "scipy-1.17.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:7ff200bf9d24f2e4d5dc6ee8c3ac64d739d3a89e2326ba68aaf6c4a2b838fd7d"},
+ {file = "scipy-1.17.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:4b400bdc6f79fa02a4d86640310dde87a21fba0c979efff5248908c6f15fad1b"},
+ {file = "scipy-1.17.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2b64ca7d4aee0102a97f3ba22124052b4bd2152522355073580bf4845e2550b6"},
+ {file = "scipy-1.17.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:581b2264fc0aa555f3f435a5944da7504ea3a065d7029ad60e7c3d1ae09c5464"},
+ {file = "scipy-1.17.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:beeda3d4ae615106d7094f7e7cef6218392e4465cc95d25f900bebabfded0950"},
+ {file = "scipy-1.17.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6609bc224e9568f65064cfa72edc0f24ee6655b47575954ec6339534b2798369"},
+ {file = "scipy-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:37425bc9175607b0268f493d79a292c39f9d001a357bebb6b88fdfaff13f6448"},
+ {file = "scipy-1.17.1-cp313-cp313-win_arm64.whl", hash = "sha256:5cf36e801231b6a2059bf354720274b7558746f3b1a4efb43fcf557ccd484a87"},
+ {file = "scipy-1.17.1-cp313-cp313t-macosx_10_14_x86_64.whl", hash = "sha256:d59c30000a16d8edc7e64152e30220bfbd724c9bbb08368c054e24c651314f0a"},
+ {file = "scipy-1.17.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:010f4333c96c9bb1a4516269e33cb5917b08ef2166d5556ca2fd9f082a9e6ea0"},
+ {file = "scipy-1.17.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:2ceb2d3e01c5f1d83c4189737a42d9cb2fc38a6eeed225e7515eef71ad301dce"},
+ {file = "scipy-1.17.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:844e165636711ef41f80b4103ed234181646b98a53c8f05da12ca5ca289134f6"},
+ {file = "scipy-1.17.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:158dd96d2207e21c966063e1635b1063cd7787b627b6f07305315dd73d9c679e"},
+ {file = "scipy-1.17.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:74cbb80d93260fe2ffa334efa24cb8f2f0f622a9b9febf8b483c0b865bfb3475"},
+ {file = "scipy-1.17.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:dbc12c9f3d185f5c737d801da555fb74b3dcfa1a50b66a1a93e09190f41fab50"},
+ {file = "scipy-1.17.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:94055a11dfebe37c656e70317e1996dc197e1a15bbcc351bcdd4610e128fe1ca"},
+ {file = "scipy-1.17.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e30bdeaa5deed6bc27b4cc490823cd0347d7dae09119b8803ae576ea0ce52e4c"},
+ {file = "scipy-1.17.1-cp313-cp313t-win_arm64.whl", hash = "sha256:a720477885a9d2411f94a93d16f9d89bad0f28ca23c3f8daa521e2dcc3f44d49"},
+ {file = "scipy-1.17.1-cp314-cp314-macosx_10_14_x86_64.whl", hash = "sha256:a48a72c77a310327f6a3a920092fa2b8fd03d7deaa60f093038f22d98e096717"},
+ {file = "scipy-1.17.1-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:45abad819184f07240d8a696117a7aacd39787af9e0b719d00285549ed19a1e9"},
+ {file = "scipy-1.17.1-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:3fd1fcdab3ea951b610dc4cef356d416d5802991e7e32b5254828d342f7b7e0b"},
+ {file = "scipy-1.17.1-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:7bdf2da170b67fdf10bca777614b1c7d96ae3ca5794fd9587dce41eb2966e866"},
+ {file = "scipy-1.17.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:adb2642e060a6549c343603a3851ba76ef0b74cc8c079a9a58121c7ec9fe2350"},
+ {file = "scipy-1.17.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eee2cfda04c00a857206a4330f0c5e3e56535494e30ca445eb19ec624ae75118"},
+ {file = "scipy-1.17.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d2650c1fb97e184d12d8ba010493ee7b322864f7d3d00d3f9bb97d9c21de4068"},
+ {file = "scipy-1.17.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:08b900519463543aa604a06bec02461558a6e1cef8fdbb8098f77a48a83c8118"},
+ {file = "scipy-1.17.1-cp314-cp314-win_amd64.whl", hash = "sha256:3877ac408e14da24a6196de0ddcace62092bfc12a83823e92e49e40747e52c19"},
+ {file = "scipy-1.17.1-cp314-cp314-win_arm64.whl", hash = "sha256:f8885db0bc2bffa59d5c1b72fad7a6a92d3e80e7257f967dd81abb553a90d293"},
+ {file = "scipy-1.17.1-cp314-cp314t-macosx_10_14_x86_64.whl", hash = "sha256:1cc682cea2ae55524432f3cdff9e9a3be743d52a7443d0cba9017c23c87ae2f6"},
+ {file = "scipy-1.17.1-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:2040ad4d1795a0ae89bfc7e8429677f365d45aa9fd5e4587cf1ea737f927b4a1"},
+ {file = "scipy-1.17.1-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:131f5aaea57602008f9822e2115029b55d4b5f7c070287699fe45c661d051e39"},
+ {file = "scipy-1.17.1-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:9cdc1a2fcfd5c52cfb3045feb399f7b3ce822abdde3a193a6b9a60b3cb5854ca"},
+ {file = "scipy-1.17.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e3dcd57ab780c741fde8dc68619de988b966db759a3c3152e8e9142c26295ad"},
+ {file = "scipy-1.17.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a9956e4d4f4a301ebf6cde39850333a6b6110799d470dbbb1e25326ac447f52a"},
+ {file = "scipy-1.17.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:a4328d245944d09fd639771de275701ccadf5f781ba0ff092ad141e017eccda4"},
+ {file = "scipy-1.17.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a77cbd07b940d326d39a1d1b37817e2ee4d79cb30e7338f3d0cddffae70fcaa2"},
+ {file = "scipy-1.17.1-cp314-cp314t-win_amd64.whl", hash = "sha256:eb092099205ef62cd1782b006658db09e2fed75bffcae7cc0d44052d8aa0f484"},
+ {file = "scipy-1.17.1-cp314-cp314t-win_arm64.whl", hash = "sha256:200e1050faffacc162be6a486a984a0497866ec54149a01270adc8a59b7c7d21"},
+ {file = "scipy-1.17.1.tar.gz", hash = "sha256:95d8e012d8cb8816c226aef832200b1d45109ed4464303e997c5b13122b297c0"},
+]
+
+[package.dependencies]
+numpy = ">=1.26.4,<2.7"
+
+[package.extras]
+dev = ["click (<8.3.0)", "cython-lint (>=0.12.2)", "mypy (==1.10.0)", "pycodestyle", "ruff (>=0.12.0)", "spin", "types-psutil", "typing_extensions"]
+doc = ["intersphinx_registry", "jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.19.1)", "jupytext", "linkify-it-py", "matplotlib (>=3.5)", "myst-nb (>=1.2.0)", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<8.2.0)", "sphinx-copybutton", "sphinx-design (>=0.4.0)", "tabulate"]
+test = ["Cython", "array-api-strict (>=2.3.1)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja ; sys_platform != \"emscripten\"", "pooch", "pytest (>=8.0.0)", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"]
+
+[[package]]
+name = "scrubadub"
+version = "2.0.1"
+description = "Clean personally identifiable information from dirty dirty text."
+optional = false
+python-versions = "*"
+groups = ["main"]
+files = [
+ {file = "scrubadub-2.0.1-py3-none-any.whl", hash = "sha256:44b9004998a03aff4c6b5d9073a52895081742f994470083a7be610b373e62b7"},
+ {file = "scrubadub-2.0.1.tar.gz", hash = "sha256:52a1fb8aa9bc0226043e02c3ec22d450bd4ebeede9e7e8db2def7c89b37c5aad"},
+]
+
+[package.dependencies]
+catalogue = "*"
+dateparser = "*"
+faker = "*"
+phonenumbers = "*"
+python-stdnum = "*"
+scikit-learn = "*"
+textblob = "0.15.3"
+typing-extensions = "*"
[[package]]
name = "setuptools"
@@ -6530,6 +7828,21 @@ files = [
doc = ["reno", "sphinx"]
test = ["pytest", "tornado (>=4.5)", "typeguard"]
+[[package]]
+name = "textblob"
+version = "0.15.3"
+description = "Simple, Pythonic text processing. Sentiment analysis, part-of-speech tagging, noun phrase parsing, and more."
+optional = false
+python-versions = "*"
+groups = ["main"]
+files = [
+ {file = "textblob-0.15.3-py2.py3-none-any.whl", hash = "sha256:b0eafd8b129c9b196c8128056caed891d64b7fa20ba570e1fcde438f4f7dd312"},
+ {file = "textblob-0.15.3.tar.gz", hash = "sha256:7ff3c00cb5a85a30132ee6768b8c68cb2b9d76432fec18cd1b3ffe2f8594ec8c"},
+]
+
+[package.dependencies]
+nltk = ">=3.1"
+
[[package]]
name = "textual"
version = "4.0.0"
@@ -6551,6 +7864,18 @@ typing-extensions = ">=4.4.0,<5.0.0"
[package.extras]
syntax = ["tree-sitter (>=0.23.0) ; python_version >= \"3.9\"", "tree-sitter-bash (>=0.23.0) ; python_version >= \"3.9\"", "tree-sitter-css (>=0.23.0) ; python_version >= \"3.9\"", "tree-sitter-go (>=0.23.0) ; python_version >= \"3.9\"", "tree-sitter-html (>=0.23.0) ; python_version >= \"3.9\"", "tree-sitter-java (>=0.23.0) ; python_version >= \"3.9\"", "tree-sitter-javascript (>=0.23.0) ; python_version >= \"3.9\"", "tree-sitter-json (>=0.24.0) ; python_version >= \"3.9\"", "tree-sitter-markdown (>=0.3.0) ; python_version >= \"3.9\"", "tree-sitter-python (>=0.23.0) ; python_version >= \"3.9\"", "tree-sitter-regex (>=0.24.0) ; python_version >= \"3.9\"", "tree-sitter-rust (>=0.23.0,<=0.23.2) ; python_version >= \"3.9\"", "tree-sitter-sql (>=0.3.0,<0.3.8) ; python_version >= \"3.9\"", "tree-sitter-toml (>=0.6.0) ; python_version >= \"3.9\"", "tree-sitter-xml (>=0.7.0) ; python_version >= \"3.9\"", "tree-sitter-yaml (>=0.6.0) ; python_version >= \"3.9\""]
+[[package]]
+name = "threadpoolctl"
+version = "3.6.0"
+description = "threadpoolctl"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "threadpoolctl-3.6.0-py3-none-any.whl", hash = "sha256:43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb"},
+ {file = "threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e"},
+]
+
[[package]]
name = "tiktoken"
version = "0.11.0"
@@ -6666,6 +7991,72 @@ notebook = ["ipywidgets (>=6)"]
slack = ["slack-sdk"]
telegram = ["requests"]
+[[package]]
+name = "traceloop-sdk"
+version = "0.53.0"
+description = "Traceloop Software Development Kit (SDK) for Python"
+optional = false
+python-versions = "<4,>=3.10"
+groups = ["main"]
+files = [
+ {file = "traceloop_sdk-0.53.0-py3-none-any.whl", hash = "sha256:29cee493dda92c872b4578a7f570794669a64f51ab09d61a0893749d616bfcfd"},
+ {file = "traceloop_sdk-0.53.0.tar.gz", hash = "sha256:3cd761733eea055d0dc87b5a22c8cc8a6350eca896a80acb5a7e11d089aee3fb"},
+]
+
+[package.dependencies]
+aiohttp = ">=3.11.11,<4"
+colorama = ">=0.4.6,<0.5.0"
+cuid = ">=0.4,<0.5"
+deprecated = ">=1.2.14,<2"
+jinja2 = ">=3.1.5,<4"
+opentelemetry-api = ">=1.38.0,<2"
+opentelemetry-exporter-otlp-proto-grpc = ">=1.38.0,<2"
+opentelemetry-exporter-otlp-proto-http = ">=1.38.0,<2"
+opentelemetry-instrumentation-agno = "*"
+opentelemetry-instrumentation-alephalpha = "*"
+opentelemetry-instrumentation-anthropic = "*"
+opentelemetry-instrumentation-bedrock = "*"
+opentelemetry-instrumentation-chromadb = "*"
+opentelemetry-instrumentation-cohere = "*"
+opentelemetry-instrumentation-crewai = "*"
+opentelemetry-instrumentation-google-generativeai = "*"
+opentelemetry-instrumentation-groq = "*"
+opentelemetry-instrumentation-haystack = "*"
+opentelemetry-instrumentation-lancedb = "*"
+opentelemetry-instrumentation-langchain = "*"
+opentelemetry-instrumentation-llamaindex = "*"
+opentelemetry-instrumentation-logging = ">=0.59b0"
+opentelemetry-instrumentation-marqo = "*"
+opentelemetry-instrumentation-mcp = "*"
+opentelemetry-instrumentation-milvus = "*"
+opentelemetry-instrumentation-mistralai = "*"
+opentelemetry-instrumentation-ollama = "*"
+opentelemetry-instrumentation-openai = "*"
+opentelemetry-instrumentation-openai-agents = "*"
+opentelemetry-instrumentation-pinecone = "*"
+opentelemetry-instrumentation-qdrant = "*"
+opentelemetry-instrumentation-redis = ">=0.59b0"
+opentelemetry-instrumentation-replicate = "*"
+opentelemetry-instrumentation-requests = ">=0.59b0"
+opentelemetry-instrumentation-sagemaker = "*"
+opentelemetry-instrumentation-sqlalchemy = ">=0.59b0"
+opentelemetry-instrumentation-threading = ">=0.59b0"
+opentelemetry-instrumentation-together = "*"
+opentelemetry-instrumentation-transformers = "*"
+opentelemetry-instrumentation-urllib3 = ">=0.59b0"
+opentelemetry-instrumentation-vertexai = "*"
+opentelemetry-instrumentation-voyageai = "*"
+opentelemetry-instrumentation-watsonx = "*"
+opentelemetry-instrumentation-weaviate = "*"
+opentelemetry-instrumentation-writer = "*"
+opentelemetry-sdk = ">=1.38.0,<2"
+opentelemetry-semantic-conventions-ai = ">=0.4.13,<0.5.0"
+pydantic = ">=1"
+tenacity = ">=8.2.3,<10.0"
+
+[package.extras]
+datasets = ["pandas"]
+
[[package]]
name = "traitlets"
version = "5.14.3"
@@ -7104,6 +8495,97 @@ files = [
{file = "whatthepatch-1.0.7.tar.gz", hash = "sha256:9eefb4ebea5200408e02d413d2b4bc28daea6b78bb4b4d53431af7245f7d7edf"},
]
+[[package]]
+name = "wrapt"
+version = "1.17.3"
+description = "Module for decorators, wrappers and monkey patching."
+optional = false
+python-versions = ">=3.8"
+groups = ["main"]
+files = [
+ {file = "wrapt-1.17.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88bbae4d40d5a46142e70d58bf664a89b6b4befaea7b2ecc14e03cedb8e06c04"},
+ {file = "wrapt-1.17.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b13af258d6a9ad602d57d889f83b9d5543acd471eee12eb51f5b01f8eb1bc2"},
+ {file = "wrapt-1.17.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd341868a4b6714a5962c1af0bd44f7c404ef78720c7de4892901e540417111c"},
+ {file = "wrapt-1.17.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f9b2601381be482f70e5d1051a5965c25fb3625455a2bf520b5a077b22afb775"},
+ {file = "wrapt-1.17.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:343e44b2a8e60e06a7e0d29c1671a0d9951f59174f3709962b5143f60a2a98bd"},
+ {file = "wrapt-1.17.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:33486899acd2d7d3066156b03465b949da3fd41a5da6e394ec49d271baefcf05"},
+ {file = "wrapt-1.17.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e6f40a8aa5a92f150bdb3e1c44b7e98fb7113955b2e5394122fa5532fec4b418"},
+ {file = "wrapt-1.17.3-cp310-cp310-win32.whl", hash = "sha256:a36692b8491d30a8c75f1dfee65bef119d6f39ea84ee04d9f9311f83c5ad9390"},
+ {file = "wrapt-1.17.3-cp310-cp310-win_amd64.whl", hash = "sha256:afd964fd43b10c12213574db492cb8f73b2f0826c8df07a68288f8f19af2ebe6"},
+ {file = "wrapt-1.17.3-cp310-cp310-win_arm64.whl", hash = "sha256:af338aa93554be859173c39c85243970dc6a289fa907402289eeae7543e1ae18"},
+ {file = "wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:273a736c4645e63ac582c60a56b0acb529ef07f78e08dc6bfadf6a46b19c0da7"},
+ {file = "wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5531d911795e3f935a9c23eb1c8c03c211661a5060aab167065896bbf62a5f85"},
+ {file = "wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0610b46293c59a3adbae3dee552b648b984176f8562ee0dba099a56cfbe4df1f"},
+ {file = "wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b32888aad8b6e68f83a8fdccbf3165f5469702a7544472bdf41f582970ed3311"},
+ {file = "wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cccf4f81371f257440c88faed6b74f1053eef90807b77e31ca057b2db74edb1"},
+ {file = "wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8a210b158a34164de8bb68b0e7780041a903d7b00c87e906fb69928bf7890d5"},
+ {file = "wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:79573c24a46ce11aab457b472efd8d125e5a51da2d1d24387666cd85f54c05b2"},
+ {file = "wrapt-1.17.3-cp311-cp311-win32.whl", hash = "sha256:c31eebe420a9a5d2887b13000b043ff6ca27c452a9a22fa71f35f118e8d4bf89"},
+ {file = "wrapt-1.17.3-cp311-cp311-win_amd64.whl", hash = "sha256:0b1831115c97f0663cb77aa27d381237e73ad4f721391a9bfb2fe8bc25fa6e77"},
+ {file = "wrapt-1.17.3-cp311-cp311-win_arm64.whl", hash = "sha256:5a7b3c1ee8265eb4c8f1b7d29943f195c00673f5ab60c192eba2d4a7eae5f46a"},
+ {file = "wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0"},
+ {file = "wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba"},
+ {file = "wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd"},
+ {file = "wrapt-1.17.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:042ec3bb8f319c147b1301f2393bc19dba6e176b7da446853406d041c36c7828"},
+ {file = "wrapt-1.17.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3af60380ba0b7b5aeb329bc4e402acd25bd877e98b3727b0135cb5c2efdaefe9"},
+ {file = "wrapt-1.17.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b02e424deef65c9f7326d8c19220a2c9040c51dc165cddb732f16198c168396"},
+ {file = "wrapt-1.17.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:74afa28374a3c3a11b3b5e5fca0ae03bef8450d6aa3ab3a1e2c30e3a75d023dc"},
+ {file = "wrapt-1.17.3-cp312-cp312-win32.whl", hash = "sha256:4da9f45279fff3543c371d5ababc57a0384f70be244de7759c85a7f989cb4ebe"},
+ {file = "wrapt-1.17.3-cp312-cp312-win_amd64.whl", hash = "sha256:e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c"},
+ {file = "wrapt-1.17.3-cp312-cp312-win_arm64.whl", hash = "sha256:604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6"},
+ {file = "wrapt-1.17.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a47681378a0439215912ef542c45a783484d4dd82bac412b71e59cf9c0e1cea0"},
+ {file = "wrapt-1.17.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54a30837587c6ee3cd1a4d1c2ec5d24e77984d44e2f34547e2323ddb4e22eb77"},
+ {file = "wrapt-1.17.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:16ecf15d6af39246fe33e507105d67e4b81d8f8d2c6598ff7e3ca1b8a37213f7"},
+ {file = "wrapt-1.17.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6fd1ad24dc235e4ab88cda009e19bf347aabb975e44fd5c2fb22a3f6e4141277"},
+ {file = "wrapt-1.17.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ed61b7c2d49cee3c027372df5809a59d60cf1b6c2f81ee980a091f3afed6a2d"},
+ {file = "wrapt-1.17.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:423ed5420ad5f5529db9ce89eac09c8a2f97da18eb1c870237e84c5a5c2d60aa"},
+ {file = "wrapt-1.17.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e01375f275f010fcbf7f643b4279896d04e571889b8a5b3f848423d91bf07050"},
+ {file = "wrapt-1.17.3-cp313-cp313-win32.whl", hash = "sha256:53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8"},
+ {file = "wrapt-1.17.3-cp313-cp313-win_amd64.whl", hash = "sha256:1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb"},
+ {file = "wrapt-1.17.3-cp313-cp313-win_arm64.whl", hash = "sha256:7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16"},
+ {file = "wrapt-1.17.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cf30f6e3c077c8e6a9a7809c94551203c8843e74ba0c960f4a98cd80d4665d39"},
+ {file = "wrapt-1.17.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e228514a06843cae89621384cfe3a80418f3c04aadf8a3b14e46a7be704e4235"},
+ {file = "wrapt-1.17.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5ea5eb3c0c071862997d6f3e02af1d055f381b1d25b286b9d6644b79db77657c"},
+ {file = "wrapt-1.17.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:281262213373b6d5e4bb4353bc36d1ba4084e6d6b5d242863721ef2bf2c2930b"},
+ {file = "wrapt-1.17.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc4a8d2b25efb6681ecacad42fca8859f88092d8732b170de6a5dddd80a1c8fa"},
+ {file = "wrapt-1.17.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:373342dd05b1d07d752cecbec0c41817231f29f3a89aa8b8843f7b95992ed0c7"},
+ {file = "wrapt-1.17.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d40770d7c0fd5cbed9d84b2c3f2e156431a12c9a37dc6284060fb4bec0b7ffd4"},
+ {file = "wrapt-1.17.3-cp314-cp314-win32.whl", hash = "sha256:fbd3c8319de8e1dc79d346929cd71d523622da527cca14e0c1d257e31c2b8b10"},
+ {file = "wrapt-1.17.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1a4120ae5705f673727d3253de3ed0e016f7cd78dc463db1b31e2463e1f3cf6"},
+ {file = "wrapt-1.17.3-cp314-cp314-win_arm64.whl", hash = "sha256:507553480670cab08a800b9463bdb881b2edeed77dc677b0a5915e6106e91a58"},
+ {file = "wrapt-1.17.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ed7c635ae45cfbc1a7371f708727bf74690daedc49b4dba310590ca0bd28aa8a"},
+ {file = "wrapt-1.17.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:249f88ed15503f6492a71f01442abddd73856a0032ae860de6d75ca62eed8067"},
+ {file = "wrapt-1.17.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5a03a38adec8066d5a37bea22f2ba6bbf39fcdefbe2d91419ab864c3fb515454"},
+ {file = "wrapt-1.17.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5d4478d72eb61c36e5b446e375bbc49ed002430d17cdec3cecb36993398e1a9e"},
+ {file = "wrapt-1.17.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223db574bb38637e8230eb14b185565023ab624474df94d2af18f1cdb625216f"},
+ {file = "wrapt-1.17.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e405adefb53a435f01efa7ccdec012c016b5a1d3f35459990afc39b6be4d5056"},
+ {file = "wrapt-1.17.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:88547535b787a6c9ce4086917b6e1d291aa8ed914fdd3a838b3539dc95c12804"},
+ {file = "wrapt-1.17.3-cp314-cp314t-win32.whl", hash = "sha256:41b1d2bc74c2cac6f9074df52b2efbef2b30bdfe5f40cb78f8ca22963bc62977"},
+ {file = "wrapt-1.17.3-cp314-cp314t-win_amd64.whl", hash = "sha256:73d496de46cd2cdbdbcce4ae4bcdb4afb6a11234a1df9c085249d55166b95116"},
+ {file = "wrapt-1.17.3-cp314-cp314t-win_arm64.whl", hash = "sha256:f38e60678850c42461d4202739f9bf1e3a737c7ad283638251e79cc49effb6b6"},
+ {file = "wrapt-1.17.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:70d86fa5197b8947a2fa70260b48e400bf2ccacdcab97bb7de47e3d1e6312225"},
+ {file = "wrapt-1.17.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:df7d30371a2accfe4013e90445f6388c570f103d61019b6b7c57e0265250072a"},
+ {file = "wrapt-1.17.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:caea3e9c79d5f0d2c6d9ab96111601797ea5da8e6d0723f77eabb0d4068d2b2f"},
+ {file = "wrapt-1.17.3-cp38-cp38-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:758895b01d546812d1f42204bd443b8c433c44d090248bf22689df673ccafe00"},
+ {file = "wrapt-1.17.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02b551d101f31694fc785e58e0720ef7d9a10c4e62c1c9358ce6f63f23e30a56"},
+ {file = "wrapt-1.17.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:656873859b3b50eeebe6db8b1455e99d90c26ab058db8e427046dbc35c3140a5"},
+ {file = "wrapt-1.17.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a9a2203361a6e6404f80b99234fe7fb37d1fc73487b5a78dc1aa5b97201e0f22"},
+ {file = "wrapt-1.17.3-cp38-cp38-win32.whl", hash = "sha256:55cbbc356c2842f39bcc553cf695932e8b30e30e797f961860afb308e6b1bb7c"},
+ {file = "wrapt-1.17.3-cp38-cp38-win_amd64.whl", hash = "sha256:ad85e269fe54d506b240d2d7b9f5f2057c2aa9a2ea5b32c66f8902f768117ed2"},
+ {file = "wrapt-1.17.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:30ce38e66630599e1193798285706903110d4f057aab3168a34b7fdc85569afc"},
+ {file = "wrapt-1.17.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:65d1d00fbfb3ea5f20add88bbc0f815150dbbde3b026e6c24759466c8b5a9ef9"},
+ {file = "wrapt-1.17.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7c06742645f914f26c7f1fa47b8bc4c91d222f76ee20116c43d5ef0912bba2d"},
+ {file = "wrapt-1.17.3-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7e18f01b0c3e4a07fe6dfdb00e29049ba17eadbc5e7609a2a3a4af83ab7d710a"},
+ {file = "wrapt-1.17.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f5f51a6466667a5a356e6381d362d259125b57f059103dd9fdc8c0cf1d14139"},
+ {file = "wrapt-1.17.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:59923aa12d0157f6b82d686c3fd8e1166fa8cdfb3e17b42ce3b6147ff81528df"},
+ {file = "wrapt-1.17.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:46acc57b331e0b3bcb3e1ca3b421d65637915cfcd65eb783cb2f78a511193f9b"},
+ {file = "wrapt-1.17.3-cp39-cp39-win32.whl", hash = "sha256:3e62d15d3cfa26e3d0788094de7b64efa75f3a53875cdbccdf78547aed547a81"},
+ {file = "wrapt-1.17.3-cp39-cp39-win_amd64.whl", hash = "sha256:1f23fa283f51c890eda8e34e4937079114c74b4c81d2b2f1f1d94948f5cc3d7f"},
+ {file = "wrapt-1.17.3-cp39-cp39-win_arm64.whl", hash = "sha256:24c2ed34dc222ed754247a2702b1e1e89fdbaa4016f324b4b8f1a802d4ffe87f"},
+ {file = "wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22"},
+ {file = "wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0"},
+]
+
[[package]]
name = "xlrd"
version = "2.0.2"
@@ -7309,4 +8791,4 @@ vertex = ["google-cloud-aiplatform"]
[metadata]
lock-version = "2.1"
python-versions = "^3.12"
-content-hash = "4a67311f830ccf488e636a127723741d5de84d7368131ccb99afb065ca4a12b1"
+content-hash = "d6a1cc4aac053c720cd224f72c4bac24371559ab0725a1fb9eb6ab4ed8d64b06"
diff --git a/pyproject.toml b/pyproject.toml
index ab08983..48f9196 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -56,6 +56,9 @@ textual = "^4.0.0"
xmltodict = "^0.13.0"
requests = "^2.32.0"
cvss = "^3.2"
+traceloop-sdk = "^0.53.0"
+opentelemetry-exporter-otlp-proto-http = "^1.40.0"
+scrubadub = "^2.0.1"
# Optional LLM provider dependencies
google-cloud-aiplatform = { version = ">=1.38", optional = true }
@@ -148,6 +151,9 @@ module = [
"libtmux.*",
"pytest.*",
"cvss.*",
+ "opentelemetry.*",
+ "scrubadub.*",
+ "traceloop.*",
]
ignore_missing_imports = true
@@ -155,6 +161,7 @@ ignore_missing_imports = true
[[tool.mypy.overrides]]
module = ["tests.*"]
disallow_untyped_decorators = false
+disallow_untyped_defs = false
# ============================================================================
# Ruff Configuration (Fast Python Linter & Formatter)
diff --git a/strix/config/config.py b/strix/config/config.py
index 7578b61..bad994a 100644
--- a/strix/config/config.py
+++ b/strix/config/config.py
@@ -47,6 +47,11 @@ class Config:
# Telemetry
strix_telemetry = "1"
+ strix_otel_telemetry = None
+ strix_posthog_telemetry = None
+ traceloop_base_url = None
+ traceloop_api_key = None
+ traceloop_headers = None
# Config file override (set via --config CLI arg)
_config_file_override: Path | None = None
diff --git a/strix/interface/main.py b/strix/interface/main.py
index 33785e6..7d340df 100644
--- a/strix/interface/main.py
+++ b/strix/interface/main.py
@@ -413,8 +413,6 @@ def display_completion_message(args: argparse.Namespace, results_path: Path) ->
if tracer and tracer.scan_results:
scan_completed = tracer.scan_results.get("scan_completed", False)
- has_vulnerabilities = tracer and len(tracer.vulnerability_reports) > 0
-
completion_text = Text()
if scan_completed:
completion_text.append("Penetration test completed", style="bold #22c55e")
@@ -439,13 +437,12 @@ def display_completion_message(args: argparse.Namespace, results_path: Path) ->
if stats_text.plain:
panel_parts.extend(["\n", stats_text])
- if scan_completed or has_vulnerabilities:
- results_text = Text()
- results_text.append("\n")
- results_text.append("Output", style="dim")
- results_text.append(" ")
- results_text.append(str(results_path), style="#60a5fa")
- panel_parts.extend(["\n", results_text])
+ results_text = Text()
+ results_text.append("\n")
+ results_text.append("Output", style="dim")
+ results_text.append(" ")
+ results_text.append(str(results_path), style="#60a5fa")
+ panel_parts.extend(["\n", results_text])
panel_content = Text.assemble(*panel_parts)
diff --git a/strix/telemetry/flags.py b/strix/telemetry/flags.py
new file mode 100644
index 0000000..bae9427
--- /dev/null
+++ b/strix/telemetry/flags.py
@@ -0,0 +1,23 @@
+from strix.config import Config
+
+
+_DISABLED_VALUES = {"0", "false", "no", "off"}
+
+
+def _is_enabled(raw_value: str | None, default: str = "1") -> bool:
+ value = (raw_value if raw_value is not None else default).strip().lower()
+ return value not in _DISABLED_VALUES
+
+
+def is_otel_enabled() -> bool:
+ explicit = Config.get("strix_otel_telemetry")
+ if explicit is not None:
+ return _is_enabled(explicit)
+ return _is_enabled(Config.get("strix_telemetry"), default="1")
+
+
+def is_posthog_enabled() -> bool:
+ explicit = Config.get("strix_posthog_telemetry")
+ if explicit is not None:
+ return _is_enabled(explicit)
+ return _is_enabled(Config.get("strix_telemetry"), default="1")
diff --git a/strix/telemetry/posthog.py b/strix/telemetry/posthog.py
index fd66bcc..aa534d2 100644
--- a/strix/telemetry/posthog.py
+++ b/strix/telemetry/posthog.py
@@ -6,7 +6,7 @@ from pathlib import Path
from typing import TYPE_CHECKING, Any
from uuid import uuid4
-from strix.config import Config
+from strix.telemetry.flags import is_posthog_enabled
if TYPE_CHECKING:
@@ -19,7 +19,7 @@ _SESSION_ID = uuid4().hex[:16]
def _is_enabled() -> bool:
- return (Config.get("strix_telemetry") or "1").lower() not in ("0", "false", "no", "off")
+ return is_posthog_enabled()
def _is_first_run() -> bool:
diff --git a/strix/telemetry/tracer.py b/strix/telemetry/tracer.py
index ef97ab6..bde9750 100644
--- a/strix/telemetry/tracer.py
+++ b/strix/telemetry/tracer.py
@@ -1,20 +1,40 @@
+import json
import logging
+import threading
from datetime import UTC, datetime
from pathlib import Path
-from typing import TYPE_CHECKING, Any, Optional
+from typing import Any, Callable, Optional
from uuid import uuid4
+from opentelemetry import trace
+from opentelemetry.trace import SpanContext, SpanKind
+
+from strix.config import Config
from strix.telemetry import posthog
+from strix.telemetry.flags import is_otel_enabled
+from strix.telemetry.utils import (
+ TelemetrySanitizer,
+ append_jsonl_record,
+ bootstrap_otel,
+ format_span_id,
+ format_trace_id,
+ get_events_write_lock,
+)
-if TYPE_CHECKING:
- from collections.abc import Callable
+try:
+ from traceloop.sdk import Traceloop
+except ImportError: # pragma: no cover - exercised when dependency is absent
+ Traceloop = None # type: ignore[assignment,unused-ignore]
logger = logging.getLogger(__name__)
_global_tracer: Optional["Tracer"] = None
+_OTEL_BOOTSTRAP_LOCK = threading.Lock()
+_OTEL_BOOTSTRAPPED = False
+_OTEL_REMOTE_ENABLED = False
def get_global_tracer() -> Optional["Tracer"]:
return _global_tracer
@@ -52,16 +72,225 @@ class Tracer:
"status": "running",
}
self._run_dir: Path | None = None
+ self._events_file_path: Path | None = None
self._next_execution_id = 1
self._next_message_id = 1
self._saved_vuln_ids: set[str] = set()
+ self._run_completed_emitted = False
+ self._telemetry_enabled = is_otel_enabled()
+ self._sanitizer = TelemetrySanitizer()
+
+ self._otel_tracer: Any = None
+ self._remote_export_enabled = False
self.caido_url: str | None = None
self.vulnerability_found_callback: Callable[[dict[str, Any]], None] | None = None
+ self._setup_telemetry()
+ self._emit_run_started_event()
+
+ @property
+ def events_file_path(self) -> Path:
+ if self._events_file_path is None:
+ self._events_file_path = self.get_run_dir() / "events.jsonl"
+ return self._events_file_path
+
+ def _active_events_file_path(self) -> Path:
+ active = get_global_tracer()
+ if active and active._events_file_path is not None:
+ return active._events_file_path
+ return self.events_file_path
+
+ def _get_events_write_lock(self, output_path: Path | None = None) -> threading.Lock:
+ path = output_path or self.events_file_path
+ return get_events_write_lock(path)
+
+ def _active_run_metadata(self) -> dict[str, Any]:
+ active = get_global_tracer()
+ if active:
+ return active.run_metadata
+ return self.run_metadata
+
+ def _setup_telemetry(self) -> None:
+ global _OTEL_BOOTSTRAPPED, _OTEL_REMOTE_ENABLED
+
+ if not self._telemetry_enabled:
+ self._otel_tracer = None
+ self._remote_export_enabled = False
+ return
+
+ run_dir = self.get_run_dir()
+ self._events_file_path = run_dir / "events.jsonl"
+ base_url = (Config.get("traceloop_base_url") or "").strip()
+ api_key = (Config.get("traceloop_api_key") or "").strip()
+ headers_raw = Config.get("traceloop_headers") or ""
+
+ (
+ self._otel_tracer,
+ self._remote_export_enabled,
+ _OTEL_BOOTSTRAPPED,
+ _OTEL_REMOTE_ENABLED,
+ ) = bootstrap_otel(
+ bootstrapped=_OTEL_BOOTSTRAPPED,
+ remote_enabled_state=_OTEL_REMOTE_ENABLED,
+ bootstrap_lock=_OTEL_BOOTSTRAP_LOCK,
+ traceloop=Traceloop,
+ base_url=base_url,
+ api_key=api_key,
+ headers_raw=headers_raw,
+ output_path_getter=self._active_events_file_path,
+ run_metadata_getter=self._active_run_metadata,
+ sanitizer=self._sanitize_data,
+ write_lock_getter=self._get_events_write_lock,
+ tracer_name="strix.telemetry.tracer",
+ )
+
+ def _set_association_properties(self, properties: dict[str, Any]) -> None:
+ if Traceloop is None:
+ return
+ sanitized = self._sanitize_data(properties)
+ try:
+ Traceloop.set_association_properties(sanitized)
+ except Exception: # noqa: BLE001
+ logger.debug("Failed to set Traceloop association properties")
+
+ def _sanitize_data(self, data: Any, key_hint: str | None = None) -> Any:
+ return self._sanitizer.sanitize(data, key_hint=key_hint)
+
+ def _append_event_record(self, record: dict[str, Any]) -> None:
+ try:
+ append_jsonl_record(self.events_file_path, record)
+ except OSError:
+ logger.exception("Failed to append JSONL event record")
+
+ def _enrich_actor(self, actor: dict[str, Any] | None) -> dict[str, Any] | None:
+ if not actor:
+ return None
+
+ enriched = dict(actor)
+ if "agent_name" in enriched:
+ return enriched
+
+ agent_id = enriched.get("agent_id")
+ if not isinstance(agent_id, str):
+ return enriched
+
+ agent_data = self.agents.get(agent_id, {})
+ agent_name = agent_data.get("name")
+ if isinstance(agent_name, str) and agent_name:
+ enriched["agent_name"] = agent_name
+
+ return enriched
+
+ def _emit_event(
+ self,
+ event_type: str,
+ actor: dict[str, Any] | None = None,
+ payload: Any | None = None,
+ status: str | None = None,
+ error: Any | None = None,
+ source: str = "strix.tracer",
+ include_run_metadata: bool = False,
+ ) -> None:
+ if not self._telemetry_enabled:
+ return
+
+ enriched_actor = self._enrich_actor(actor)
+ sanitized_actor = self._sanitize_data(enriched_actor) if enriched_actor else None
+ sanitized_payload = self._sanitize_data(payload) if payload is not None else None
+ sanitized_error = self._sanitize_data(error) if error is not None else None
+
+ trace_id: str | None = None
+ span_id: str | None = None
+ parent_span_id: str | None = None
+
+ current_context = trace.get_current_span().get_span_context()
+ if isinstance(current_context, SpanContext) and current_context.is_valid:
+ parent_span_id = format_span_id(current_context.span_id)
+
+ if self._otel_tracer is not None:
+ try:
+ with self._otel_tracer.start_as_current_span(
+ f"strix.{event_type}",
+ kind=SpanKind.INTERNAL,
+ ) as span:
+ span_context = span.get_span_context()
+ trace_id = format_trace_id(span_context.trace_id)
+ span_id = format_span_id(span_context.span_id)
+
+ span.set_attribute("strix.event_type", event_type)
+ span.set_attribute("strix.source", source)
+ span.set_attribute("strix.run_id", self.run_id)
+ span.set_attribute("strix.run_name", self.run_name or "")
+
+ if status:
+ span.set_attribute("strix.status", status)
+ if sanitized_actor is not None:
+ span.set_attribute(
+ "strix.actor",
+ json.dumps(sanitized_actor, ensure_ascii=False),
+ )
+ if sanitized_payload is not None:
+ span.set_attribute(
+ "strix.payload",
+ json.dumps(sanitized_payload, ensure_ascii=False),
+ )
+ if sanitized_error is not None:
+ span.set_attribute(
+ "strix.error",
+ json.dumps(sanitized_error, ensure_ascii=False),
+ )
+ except Exception: # noqa: BLE001
+ logger.debug("Failed to create OTEL span for event type '%s'", event_type)
+
+ if trace_id is None:
+ trace_id = format_trace_id(uuid4().int & ((1 << 128) - 1)) or uuid4().hex
+ if span_id is None:
+ span_id = format_span_id(uuid4().int & ((1 << 64) - 1)) or uuid4().hex[:16]
+
+ record = {
+ "timestamp": datetime.now(UTC).isoformat(),
+ "event_type": event_type,
+ "run_id": self.run_id,
+ "trace_id": trace_id,
+ "span_id": span_id,
+ "parent_span_id": parent_span_id,
+ "actor": sanitized_actor,
+ "payload": sanitized_payload,
+ "status": status,
+ "error": sanitized_error,
+ "source": source,
+ }
+ if include_run_metadata:
+ record["run_metadata"] = self._sanitize_data(self.run_metadata)
+ self._append_event_record(record)
+
def set_run_name(self, run_name: str) -> None:
self.run_name = run_name
self.run_id = run_name
+ self.run_metadata["run_name"] = run_name
+ self.run_metadata["run_id"] = run_name
+ self._run_dir = None
+ self._events_file_path = None
+ self._run_completed_emitted = False
+ self._set_association_properties({"run_id": self.run_id, "run_name": self.run_name or ""})
+ self._emit_run_started_event()
+
+ def _emit_run_started_event(self) -> None:
+ if not self._telemetry_enabled:
+ return
+
+ self._emit_event(
+ "run.started",
+ payload={
+ "run_name": self.run_name,
+ "start_time": self.start_time,
+ "local_jsonl_path": str(self.events_file_path),
+ "remote_export_enabled": self._remote_export_enabled,
+ },
+ status="running",
+ include_run_metadata=True,
+ )
def get_run_dir(self) -> Path:
if self._run_dir is None:
@@ -134,6 +363,12 @@ class Tracer:
self.vulnerability_reports.append(report)
logger.info(f"Added vulnerability report: {report_id} - {title}")
posthog.finding(severity)
+ self._emit_event(
+ "finding.created",
+ payload={"report": report},
+ status=report["severity"],
+ source="strix.findings",
+ )
if self.vulnerability_found_callback:
self.vulnerability_found_callback(report)
@@ -178,11 +413,24 @@ class Tracer:
"""
logger.info("Updated scan final fields")
+ self._emit_event(
+ "finding.reviewed",
+ payload={
+ "scan_completed": True,
+ "vulnerability_count": len(self.vulnerability_reports),
+ },
+ status="completed",
+ source="strix.findings",
+ )
self.save_run_data(mark_complete=True)
posthog.end(self, exit_reason="finished_by_tool")
def log_agent_creation(
- self, agent_id: str, name: str, task: str, parent_id: str | None = None
+ self,
+ agent_id: str,
+ name: str,
+ task: str,
+ parent_id: str | None = None,
) -> None:
agent_data: dict[str, Any] = {
"id": agent_id,
@@ -196,6 +444,13 @@ class Tracer:
}
self.agents[agent_id] = agent_data
+ self._emit_event(
+ "agent.created",
+ actor={"agent_id": agent_id, "agent_name": name},
+ payload={"task": task, "parent_id": parent_id},
+ status="running",
+ source="strix.agents",
+ )
def log_chat_message(
self,
@@ -217,9 +472,21 @@ class Tracer:
}
self.chat_messages.append(message_data)
+ self._emit_event(
+ "chat.message",
+ actor={"agent_id": agent_id, "role": role},
+ payload={"message_id": message_id, "content": content, "metadata": metadata or {}},
+ status="logged",
+ source="strix.chat",
+ )
return message_id
- def log_tool_execution_start(self, agent_id: str, tool_name: str, args: dict[str, Any]) -> int:
+ def log_tool_execution_start(
+ self,
+ agent_id: str,
+ tool_name: str,
+ args: dict[str, Any],
+ ) -> int:
execution_id = self._next_execution_id
self._next_execution_id += 1
@@ -241,18 +508,67 @@ class Tracer:
if agent_id in self.agents:
self.agents[agent_id]["tool_executions"].append(execution_id)
+ self._emit_event(
+ "tool.execution.started",
+ actor={
+ "agent_id": agent_id,
+ "tool_name": tool_name,
+ "execution_id": execution_id,
+ },
+ payload={"args": args},
+ status="running",
+ source="strix.tools",
+ )
+
return execution_id
def update_tool_execution(
- self, execution_id: int, status: str, result: Any | None = None
+ self,
+ execution_id: int,
+ status: str,
+ result: Any | None = None,
) -> None:
- if execution_id in self.tool_executions:
- self.tool_executions[execution_id]["status"] = status
- self.tool_executions[execution_id]["result"] = result
- self.tool_executions[execution_id]["completed_at"] = datetime.now(UTC).isoformat()
+ if execution_id not in self.tool_executions:
+ return
+
+ tool_data = self.tool_executions[execution_id]
+ tool_data["status"] = status
+ tool_data["result"] = result
+ tool_data["completed_at"] = datetime.now(UTC).isoformat()
+
+ tool_name = str(tool_data.get("tool_name", "unknown"))
+ agent_id = str(tool_data.get("agent_id", "unknown"))
+ error_payload = result if status in {"error", "failed"} else None
+
+ self._emit_event(
+ "tool.execution.updated",
+ actor={
+ "agent_id": agent_id,
+ "tool_name": tool_name,
+ "execution_id": execution_id,
+ },
+ payload={"result": result},
+ status=status,
+ error=error_payload,
+ source="strix.tools",
+ )
+
+ if tool_name == "create_vulnerability_report":
+ finding_status = "reviewed" if status == "completed" else "rejected"
+ self._emit_event(
+ "finding.reviewed",
+ actor={"agent_id": agent_id, "tool_name": tool_name},
+ payload={"execution_id": execution_id, "result": result},
+ status=finding_status,
+ error=error_payload,
+ source="strix.findings",
+ )
def update_agent_status(
- self, agent_id: str, status: str, error_message: str | None = None
+ self,
+ agent_id: str,
+ status: str,
+ error_message: str | None = None,
) -> None:
if agent_id in self.agents:
self.agents[agent_id]["status"] = status
@@ -260,6 +576,15 @@ class Tracer:
if error_message:
self.agents[agent_id]["error_message"] = error_message
+ self._emit_event(
+ "agent.status.updated",
+ actor={"agent_id": agent_id},
+ payload={"error_message": error_message},
+ status=status,
+ error=error_message,
+ source="strix.agents",
+ )
+
def set_scan_config(self, config: dict[str, Any]) -> None:
self.scan_config = config
self.run_metadata.update(
@@ -269,13 +594,29 @@ class Tracer:
"max_iterations": config.get("max_iterations", 200),
}
)
- self.get_run_dir()
+ self._set_association_properties(
+ {
+ "run_id": self.run_id,
+ "run_name": self.run_name or "",
+ "targets": config.get("targets", []),
+ "max_iterations": config.get("max_iterations", 200),
+ }
+ )
+ self._emit_event(
+ "run.configured",
+ payload={"scan_config": config},
+ status="configured",
+ source="strix.run",
+ )
- def save_run_data(self, mark_complete: bool = False) -> None: # noqa: PLR0912, PLR0915
+ def save_run_data(self, mark_complete: bool = False) -> None:
try:
run_dir = self.get_run_dir()
if mark_complete:
- self.end_time = datetime.now(UTC).isoformat()
+ if self.end_time is None:
+ self.end_time = datetime.now(UTC).isoformat()
+ self.run_metadata["end_time"] = self.end_time
+ self.run_metadata["status"] = "completed"
if self.final_scan_result:
penetration_test_report_file = run_dir / "penetration_test_report.md"
@@ -286,7 +627,8 @@ class Tracer:
)
f.write(f"{self.final_scan_result}\n")
logger.info(
- f"Saved final penetration test report to: {penetration_test_report_file}"
+ "Saved final penetration test report to: %s",
+ penetration_test_report_file,
)
if self.vulnerability_reports:
@@ -302,7 +644,10 @@ class Tracer:
severity_order = {"critical": 0, "high": 1, "medium": 2, "low": 3, "info": 4}
sorted_reports = sorted(
self.vulnerability_reports,
- key=lambda x: (severity_order.get(x["severity"], 5), x["timestamp"]),
+ key=lambda report: (
+ severity_order.get(report["severity"], 5),
+ report["timestamp"],
+ ),
)
for report in new_reports:
@@ -329,8 +674,8 @@ class Tracer:
f.write(f"**{label}:** {value}\n")
f.write("\n## Description\n\n")
- desc = report.get("description") or "No description provided."
- f.write(f"{desc}\n\n")
+ description = report.get("description") or "No description provided."
+ f.write(f"{description}\n\n")
if report.get("impact"):
f.write("## Impact\n\n")
@@ -404,11 +749,25 @@ class Tracer:
if new_reports:
logger.info(
- f"Saved {len(new_reports)} new vulnerability report(s) to: {vuln_dir}"
+ "Saved %d new vulnerability report(s) to: %s",
+ len(new_reports),
+ vuln_dir,
)
- logger.info(f"Updated vulnerability index: {vuln_csv_file}")
+ logger.info("Updated vulnerability index: %s", vuln_csv_file)
- logger.info(f"📊 Essential scan data saved to: {run_dir}")
+ logger.info("📊 Essential scan data saved to: %s", run_dir)
+ if mark_complete and not self._run_completed_emitted:
+ self._emit_event(
+ "run.completed",
+ payload={
+ "duration_seconds": self._calculate_duration(),
+ "vulnerability_count": len(self.vulnerability_reports),
+ },
+ status="completed",
+ source="strix.run",
+ include_run_metadata=True,
+ )
+ self._run_completed_emitted = True
except (OSError, RuntimeError):
logger.exception("Failed to save scan data")
diff --git a/strix/telemetry/utils.py b/strix/telemetry/utils.py
new file mode 100644
index 0000000..85e49f3
--- /dev/null
+++ b/strix/telemetry/utils.py
@@ -0,0 +1,413 @@
+import json
+import logging
+import re
+import threading
+from collections.abc import Callable, Sequence
+from datetime import UTC, datetime
+from pathlib import Path
+from typing import Any
+
+from opentelemetry import trace
+from opentelemetry.sdk.trace import ReadableSpan, TracerProvider
+from opentelemetry.sdk.trace.export import (
+ BatchSpanProcessor,
+ SimpleSpanProcessor,
+ SpanExporter,
+ SpanExportResult,
+)
+from scrubadub import Scrubber
+from scrubadub.detectors import RegexDetector
+from scrubadub.filth import Filth
+
+
+logger = logging.getLogger(__name__)
+
+_REDACTED = "[REDACTED]"
+_SCREENSHOT_OMITTED = "[SCREENSHOT_OMITTED]"
+_SCREENSHOT_KEY_PATTERN = re.compile(r"screenshot", re.IGNORECASE)
+_SENSITIVE_KEY_PATTERN = re.compile(
+ r"(api[_-]?key|token|secret|password|authorization|cookie|session|credential|private[_-]?key)",
+ re.IGNORECASE,
+)
+_SENSITIVE_TOKEN_PATTERN = re.compile(
+ r"(?i)\b("
+ r"bearer\s+[a-z0-9._-]+|"
+ r"sk-[a-z0-9_-]{8,}|"
+ r"gh[pousr]_[a-z0-9_-]{12,}|"
+ r"xox[baprs]-[a-z0-9-]{12,}"
+ r")\b"
+)
+_SCRUBADUB_PLACEHOLDER_PATTERN = re.compile(r"\{\{[^}]+\}\}")
+_EVENTS_FILE_LOCKS_LOCK = threading.Lock()
+_EVENTS_FILE_LOCKS: dict[str, threading.Lock] = {}
+_NOISY_OTEL_CONTENT_PREFIXES = (
+ "gen_ai.prompt.",
+ "gen_ai.completion.",
+ "llm.input_messages.",
+ "llm.output_messages.",
+)
+_NOISY_OTEL_EXACT_KEYS = {
+ "llm.input",
+ "llm.output",
+ "llm.prompt",
+ "llm.completion",
+}
+
+
+class _SecretFilth(Filth): # type: ignore[misc]
+ type = "secret"
+
+
+class _SecretTokenDetector(RegexDetector): # type: ignore[misc]
+ name = "strix_secret_token_detector"
+ filth_cls = _SecretFilth
+ regex = _SENSITIVE_TOKEN_PATTERN
+
+
+class TelemetrySanitizer:
+ def __init__(self) -> None:
+ self._scrubber = Scrubber(detector_list=[_SecretTokenDetector])
+
+ def sanitize(self, data: Any, key_hint: str | None = None) -> Any: # noqa: PLR0911
+ if data is None:
+ return None
+
+ if isinstance(data, dict):
+ sanitized: dict[str, Any] = {}
+ for key, value in data.items():
+ key_str = str(key)
+ if _SCREENSHOT_KEY_PATTERN.search(key_str):
+ sanitized[key_str] = _SCREENSHOT_OMITTED
+ elif _SENSITIVE_KEY_PATTERN.search(key_str):
+ sanitized[key_str] = _REDACTED
+ else:
+ sanitized[key_str] = self.sanitize(value, key_hint=key_str)
+ return sanitized
+
+ if isinstance(data, list):
+ return [self.sanitize(item, key_hint=key_hint) for item in data]
+
+ if isinstance(data, tuple):
+ return [self.sanitize(item, key_hint=key_hint) for item in data]
+
+ if isinstance(data, str):
+ if key_hint and _SENSITIVE_KEY_PATTERN.search(key_hint):
+ return _REDACTED
+
+ cleaned = self._scrubber.clean(data)
+ return _SCRUBADUB_PLACEHOLDER_PATTERN.sub(_REDACTED, cleaned)
+
+ if isinstance(data, int | float | bool):
+ return data
+
+ return str(data)
+
+
+def format_trace_id(trace_id: int | None) -> str | None:
+ if trace_id is None or trace_id == 0:
+ return None
+ return f"{trace_id:032x}"
+
+
+def format_span_id(span_id: int | None) -> str | None:
+ if span_id is None or span_id == 0:
+ return None
+ return f"{span_id:016x}"
+
+
+def iso_from_unix_ns(unix_ns: int | None) -> str | None:
+ if unix_ns is None:
+ return None
+ try:
+ return datetime.fromtimestamp(unix_ns / 1_000_000_000, tz=UTC).isoformat()
+ except (OSError, OverflowError, ValueError):
+ return None
+
+
+
+def get_events_write_lock(output_path: Path) -> threading.Lock:
+ path_key = str(output_path.resolve(strict=False))
+ with _EVENTS_FILE_LOCKS_LOCK:
+ lock = _EVENTS_FILE_LOCKS.get(path_key)
+ if lock is None:
+ lock = threading.Lock()
+ _EVENTS_FILE_LOCKS[path_key] = lock
+ return lock
+
+
+def reset_events_write_locks() -> None:
+ with _EVENTS_FILE_LOCKS_LOCK:
+ _EVENTS_FILE_LOCKS.clear()
+
+
+def append_jsonl_record(output_path: Path, record: dict[str, Any]) -> None:
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ with get_events_write_lock(output_path), output_path.open("a", encoding="utf-8") as f:
+ f.write(json.dumps(record, ensure_ascii=False) + "\n")
+
+
+def default_resource_attributes() -> dict[str, str]:
+ return {
+ "service.name": "strix-agent",
+ "service.namespace": "strix",
+ }
+
+
+def parse_traceloop_headers(raw_headers: str) -> dict[str, str]:
+ headers = raw_headers.strip()
+ if not headers:
+ return {}
+
+ if headers.startswith("{"):
+ try:
+ parsed = json.loads(headers)
+ except json.JSONDecodeError:
+ logger.warning("Invalid TRACELOOP_HEADERS JSON, ignoring custom headers")
+ return {}
+ if isinstance(parsed, dict):
+ return {str(key): str(value) for key, value in parsed.items() if value is not None}
+ logger.warning("TRACELOOP_HEADERS JSON must be an object, ignoring custom headers")
+ return {}
+
+ result: dict[str, str] = {}
+ for part in headers.split(","):
+ key, sep, value = part.partition("=")
+ if not sep:
+ continue
+ key = key.strip()
+ value = value.strip()
+ if key and value:
+ result[key] = value
+ return result
+
+
+def prune_otel_span_attributes(attributes: dict[str, Any]) -> dict[str, Any]:
+ """Drop high-volume LLM payload attributes to keep JSONL event files compact."""
+ filtered: dict[str, Any] = {}
+ filtered_count = 0
+
+ for key, value in attributes.items():
+ key_str = str(key)
+ if key_str in _NOISY_OTEL_EXACT_KEYS:
+ filtered_count += 1
+ continue
+
+ if key_str.endswith(".content") and key_str.startswith(_NOISY_OTEL_CONTENT_PREFIXES):
+ filtered_count += 1
+ continue
+
+ filtered[key_str] = value
+
+ if filtered_count:
+ filtered["strix.filtered_attributes_count"] = filtered_count
+
+ return filtered
+
+
+class JsonlSpanExporter(SpanExporter): # type: ignore[misc]
+ """Append OTEL spans to JSONL for local run artifacts."""
+
+ def __init__(
+ self,
+ output_path_getter: Callable[[], Path],
+ run_metadata_getter: Callable[[], dict[str, Any]],
+ sanitizer: Callable[[Any], Any],
+ write_lock_getter: Callable[[Path], threading.Lock],
+ ):
+ self._output_path_getter = output_path_getter
+ self._run_metadata_getter = run_metadata_getter
+ self._sanitize = sanitizer
+ self._write_lock_getter = write_lock_getter
+
+ def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
+ records: list[dict[str, Any]] = []
+ for span in spans:
+ attributes = prune_otel_span_attributes(dict(span.attributes or {}))
+ if "strix.event_type" in attributes:
+ # Tracer events are written directly in Tracer._emit_event.
+ continue
+ records.append(self._span_to_record(span, attributes))
+
+ if not records:
+ return SpanExportResult.SUCCESS
+
+ try:
+ output_path = self._output_path_getter()
+ output_path.parent.mkdir(parents=True, exist_ok=True)
+ with self._write_lock_getter(output_path), output_path.open("a", encoding="utf-8") as f:
+ for record in records:
+ f.write(json.dumps(record, ensure_ascii=False) + "\n")
+ except OSError:
+ logger.exception("Failed to write OTEL span records to JSONL")
+ return SpanExportResult.FAILURE
+
+ return SpanExportResult.SUCCESS
+
+ def shutdown(self) -> None:
+ return None
+
+ def force_flush(self, timeout_millis: int = 30_000) -> bool: # noqa: ARG002
+ return True
+
+ def _span_to_record(
+ self,
+ span: ReadableSpan,
+ attributes: dict[str, Any],
+ ) -> dict[str, Any]:
+ span_context = span.get_span_context()
+ parent_context = span.parent
+
+ status = None
+ if span.status and span.status.status_code:
+ status = span.status.status_code.name.lower()
+
+ event_type = str(attributes.get("gen_ai.operation.name", span.name))
+ run_metadata = self._run_metadata_getter()
+ run_id_attr = (
+ attributes.get("strix.run_id")
+ or attributes.get("strix_run_id")
+ or run_metadata.get("run_id")
+ or span.resource.attributes.get("strix.run_id")
+ )
+
+ record: dict[str, Any] = {
+ "timestamp": iso_from_unix_ns(span.end_time) or datetime.now(UTC).isoformat(),
+ "event_type": event_type,
+ "run_id": str(run_id_attr or run_metadata.get("run_id") or ""),
+ "trace_id": format_trace_id(span_context.trace_id),
+ "span_id": format_span_id(span_context.span_id),
+ "parent_span_id": format_span_id(parent_context.span_id if parent_context else None),
+ "actor": None,
+ "payload": None,
+ "status": status,
+ "error": None,
+ "source": "otel.span",
+ "span_name": span.name,
+ "span_kind": span.kind.name.lower(),
+ "attributes": self._sanitize(attributes),
+ }
+
+ if span.events:
+ record["otel_events"] = self._sanitize(
+ [
+ {
+ "name": event.name,
+ "timestamp": iso_from_unix_ns(event.timestamp),
+ "attributes": dict(event.attributes or {}),
+ }
+ for event in span.events
+ ]
+ )
+
+ return record
+
+
+def bootstrap_otel(
+ *,
+ bootstrapped: bool,
+ remote_enabled_state: bool,
+ bootstrap_lock: threading.Lock,
+ traceloop: Any,
+ base_url: str,
+ api_key: str,
+ headers_raw: str,
+ output_path_getter: Callable[[], Path],
+ run_metadata_getter: Callable[[], dict[str, Any]],
+ sanitizer: Callable[[Any], Any],
+ write_lock_getter: Callable[[Path], threading.Lock],
+ tracer_name: str = "strix.telemetry.tracer",
+) -> tuple[Any, bool, bool, bool]:
+ with bootstrap_lock:
+ if bootstrapped:
+ return (
+ trace.get_tracer(tracer_name),
+ remote_enabled_state,
+ bootstrapped,
+ remote_enabled_state,
+ )
+
+ local_exporter = JsonlSpanExporter(
+ output_path_getter=output_path_getter,
+ run_metadata_getter=run_metadata_getter,
+ sanitizer=sanitizer,
+ write_lock_getter=write_lock_getter,
+ )
+ local_processor = SimpleSpanProcessor(local_exporter)
+
+ headers = parse_traceloop_headers(headers_raw)
+ remote_enabled = bool(base_url and api_key)
+ otlp_headers = headers
+ if remote_enabled:
+ otlp_headers = {"Authorization": f"Bearer {api_key}"}
+ otlp_headers.update(headers)
+
+ otel_init_ok = False
+ if traceloop:
+ try:
+ from traceloop.sdk.instruments import Instruments
+
+ init_kwargs: dict[str, Any] = {
+ "app_name": "strix-agent",
+ "processor": local_processor,
+ "telemetry_enabled": False,
+ "resource_attributes": default_resource_attributes(),
+ "block_instruments": {
+ Instruments.URLLIB3,
+ Instruments.REQUESTS,
+ },
+ }
+ if remote_enabled:
+ init_kwargs.update(
+ {
+ "api_endpoint": base_url,
+ "api_key": api_key,
+ "headers": headers,
+ }
+ )
+ import io
+ import sys
+
+ _stdout = sys.stdout
+ sys.stdout = io.StringIO()
+ try:
+ traceloop.init(**init_kwargs)
+ finally:
+ sys.stdout = _stdout
+ otel_init_ok = True
+ except Exception:
+ logger.exception("Failed to initialize Traceloop/OpenLLMetry")
+ remote_enabled = False
+
+ if not otel_init_ok:
+ from opentelemetry.sdk.resources import Resource
+
+ provider = TracerProvider(resource=Resource.create(default_resource_attributes()))
+ provider.add_span_processor(local_processor)
+ if remote_enabled:
+ try:
+ from opentelemetry.exporter.otlp.proto.http.trace_exporter import (
+ OTLPSpanExporter,
+ )
+
+ endpoint = base_url.rstrip("/") + "/v1/traces"
+ provider.add_span_processor(
+ BatchSpanProcessor(
+ OTLPSpanExporter(endpoint=endpoint, headers=otlp_headers)
+ )
+ )
+ except Exception:
+ logger.exception("Failed to configure OTLP HTTP exporter")
+ remote_enabled = False
+
+ try:
+ trace.set_tracer_provider(provider)
+ otel_init_ok = True
+ except Exception:
+ logger.exception("Failed to set OpenTelemetry tracer provider")
+ remote_enabled = False
+
+ otel_tracer = trace.get_tracer(tracer_name)
+ if otel_init_ok:
+ return otel_tracer, remote_enabled, True, remote_enabled
+
+ return otel_tracer, remote_enabled, bootstrapped, remote_enabled_state
diff --git a/tests/config/__init__.py b/tests/config/__init__.py
new file mode 100644
index 0000000..2edfe31
--- /dev/null
+++ b/tests/config/__init__.py
@@ -0,0 +1 @@
+"""Tests for strix.config module."""
diff --git a/tests/config/test_config_telemetry.py b/tests/config/test_config_telemetry.py
new file mode 100644
index 0000000..89af42f
--- /dev/null
+++ b/tests/config/test_config_telemetry.py
@@ -0,0 +1,55 @@
+import json
+
+from strix.config.config import Config
+
+
+def test_traceloop_vars_are_tracked() -> None:
+ tracked = Config.tracked_vars()
+
+ assert "STRIX_OTEL_TELEMETRY" in tracked
+ assert "STRIX_POSTHOG_TELEMETRY" in tracked
+ assert "TRACELOOP_BASE_URL" in tracked
+ assert "TRACELOOP_API_KEY" in tracked
+ assert "TRACELOOP_HEADERS" in tracked
+
+
+def test_apply_saved_uses_saved_traceloop_vars(monkeypatch, tmp_path) -> None:
+ config_path = tmp_path / "cli-config.json"
+ config_path.write_text(
+ json.dumps(
+ {
+ "env": {
+ "TRACELOOP_BASE_URL": "https://otel.example.com",
+ "TRACELOOP_API_KEY": "api-key",
+ "TRACELOOP_HEADERS": "x-test=value",
+ }
+ }
+ ),
+ encoding="utf-8",
+ )
+
+ monkeypatch.setattr(Config, "_config_file_override", config_path)
+ monkeypatch.delenv("TRACELOOP_BASE_URL", raising=False)
+ monkeypatch.delenv("TRACELOOP_API_KEY", raising=False)
+ monkeypatch.delenv("TRACELOOP_HEADERS", raising=False)
+
+ applied = Config.apply_saved()
+
+ assert applied["TRACELOOP_BASE_URL"] == "https://otel.example.com"
+ assert applied["TRACELOOP_API_KEY"] == "api-key"
+ assert applied["TRACELOOP_HEADERS"] == "x-test=value"
+
+
+def test_apply_saved_respects_existing_env_traceloop_vars(monkeypatch, tmp_path) -> None:
+ config_path = tmp_path / "cli-config.json"
+ config_path.write_text(
+ json.dumps({"env": {"TRACELOOP_BASE_URL": "https://otel.example.com"}}),
+ encoding="utf-8",
+ )
+
+ monkeypatch.setattr(Config, "_config_file_override", config_path)
+ monkeypatch.setenv("TRACELOOP_BASE_URL", "https://env.example.com")
+
+ applied = Config.apply_saved(force=False)
+
+ assert "TRACELOOP_BASE_URL" not in applied
diff --git a/tests/llm/test_llm_otel.py b/tests/llm/test_llm_otel.py
new file mode 100644
index 0000000..58ee89e
--- /dev/null
+++ b/tests/llm/test_llm_otel.py
@@ -0,0 +1,15 @@
+import litellm
+
+from strix.llm.config import LLMConfig
+from strix.llm.llm import LLM
+
+
+def test_llm_does_not_modify_litellm_callbacks(monkeypatch) -> None:
+ monkeypatch.setenv("STRIX_TELEMETRY", "1")
+ monkeypatch.setenv("STRIX_OTEL_TELEMETRY", "1")
+ monkeypatch.setattr(litellm, "callbacks", ["custom-callback"])
+
+ llm = LLM(LLMConfig(model_name="openai/gpt-5"), agent_name=None)
+
+ assert llm is not None
+ assert litellm.callbacks == ["custom-callback"]
diff --git a/tests/telemetry/test_flags.py b/tests/telemetry/test_flags.py
new file mode 100644
index 0000000..a7f8e43
--- /dev/null
+++ b/tests/telemetry/test_flags.py
@@ -0,0 +1,28 @@
+from strix.telemetry.flags import is_otel_enabled, is_posthog_enabled
+
+
+def test_flags_fallback_to_strix_telemetry(monkeypatch) -> None:
+ monkeypatch.delenv("STRIX_OTEL_TELEMETRY", raising=False)
+ monkeypatch.delenv("STRIX_POSTHOG_TELEMETRY", raising=False)
+ monkeypatch.setenv("STRIX_TELEMETRY", "0")
+
+ assert is_otel_enabled() is False
+ assert is_posthog_enabled() is False
+
+
+def test_otel_flag_overrides_global_telemetry(monkeypatch) -> None:
+ monkeypatch.setenv("STRIX_TELEMETRY", "0")
+ monkeypatch.setenv("STRIX_OTEL_TELEMETRY", "1")
+ monkeypatch.delenv("STRIX_POSTHOG_TELEMETRY", raising=False)
+
+ assert is_otel_enabled() is True
+ assert is_posthog_enabled() is False
+
+
+def test_posthog_flag_overrides_global_telemetry(monkeypatch) -> None:
+ monkeypatch.setenv("STRIX_TELEMETRY", "0")
+ monkeypatch.setenv("STRIX_POSTHOG_TELEMETRY", "1")
+ monkeypatch.delenv("STRIX_OTEL_TELEMETRY", raising=False)
+
+ assert is_otel_enabled() is False
+ assert is_posthog_enabled() is True
diff --git a/tests/telemetry/test_tracer.py b/tests/telemetry/test_tracer.py
new file mode 100644
index 0000000..10f887e
--- /dev/null
+++ b/tests/telemetry/test_tracer.py
@@ -0,0 +1,379 @@
+import json
+import sys
+import types
+from pathlib import Path
+from typing import Any, ClassVar
+
+import pytest
+from opentelemetry.sdk.trace.export import SimpleSpanProcessor, SpanExportResult
+
+from strix.telemetry import tracer as tracer_module
+from strix.telemetry import utils as telemetry_utils
+from strix.telemetry.tracer import Tracer, set_global_tracer
+
+
+def _load_events(events_path: Path) -> list[dict[str, Any]]:
+ lines = events_path.read_text(encoding="utf-8").splitlines()
+ return [json.loads(line) for line in lines if line]
+
+
+@pytest.fixture(autouse=True)
+def _reset_tracer_globals(monkeypatch) -> None:
+ monkeypatch.setattr(tracer_module, "_global_tracer", None)
+ monkeypatch.setattr(tracer_module, "_OTEL_BOOTSTRAPPED", False)
+ monkeypatch.setattr(tracer_module, "_OTEL_REMOTE_ENABLED", False)
+ telemetry_utils.reset_events_write_locks()
+ monkeypatch.delenv("STRIX_TELEMETRY", raising=False)
+ monkeypatch.delenv("STRIX_OTEL_TELEMETRY", raising=False)
+ monkeypatch.delenv("STRIX_POSTHOG_TELEMETRY", raising=False)
+ monkeypatch.delenv("TRACELOOP_BASE_URL", raising=False)
+ monkeypatch.delenv("TRACELOOP_API_KEY", raising=False)
+ monkeypatch.delenv("TRACELOOP_HEADERS", raising=False)
+
+
+def test_tracer_local_mode_writes_jsonl_with_correlation(monkeypatch, tmp_path) -> None:
+ monkeypatch.chdir(tmp_path)
+
+ tracer = Tracer("local-observability")
+ set_global_tracer(tracer)
+ tracer.set_scan_config({"targets": ["https://example.com"], "user_instructions": "focus auth"})
+ tracer.log_agent_creation("agent-1", "Root Agent", "scan auth")
+ tracer.log_chat_message("starting scan", "user", "agent-1")
+ execution_id = tracer.log_tool_execution_start(
+ "agent-1",
+ "send_request",
+ {"url": "https://example.com/login"},
+ )
+ tracer.update_tool_execution(execution_id, "completed", {"status_code": 200, "body": "ok"})
+
+ events_path = tmp_path / "strix_runs" / "local-observability" / "events.jsonl"
+ assert events_path.exists()
+
+ events = _load_events(events_path)
+ assert any(event["event_type"] == "tool.execution.updated" for event in events)
+ assert not any(event["event_type"] == "traffic.intercepted" for event in events)
+
+ for event in events:
+ assert event["run_id"] == "local-observability"
+ assert event["trace_id"]
+ assert event["span_id"]
+
+
+def test_tracer_redacts_sensitive_payloads(monkeypatch, tmp_path) -> None:
+ monkeypatch.chdir(tmp_path)
+
+ tracer = Tracer("redaction-run")
+ set_global_tracer(tracer)
+ execution_id = tracer.log_tool_execution_start(
+ "agent-1",
+ "send_request",
+ {
+ "url": "https://example.com",
+ "api_key": "sk-secret-token-value",
+ "authorization": "Bearer super-secret-token",
+ },
+ )
+ tracer.update_tool_execution(
+ execution_id,
+ "error",
+ {"error": "request failed with token sk-secret-token-value"},
+ )
+
+ events_path = tmp_path / "strix_runs" / "redaction-run" / "events.jsonl"
+ events = _load_events(events_path)
+ serialized = json.dumps(events)
+
+ assert "sk-secret-token-value" not in serialized
+ assert "super-secret-token" not in serialized
+ assert "[REDACTED]" in serialized
+
+
+def test_tracer_remote_mode_configures_traceloop_export(monkeypatch, tmp_path) -> None:
+ monkeypatch.chdir(tmp_path)
+
+ class FakeTraceloop:
+ init_calls: ClassVar[list[dict[str, Any]]] = []
+
+ @staticmethod
+ def init(**kwargs: Any) -> None:
+ FakeTraceloop.init_calls.append(kwargs)
+
+ @staticmethod
+ def set_association_properties(properties: dict[str, Any]) -> None: # noqa: ARG004
+ return None
+
+ monkeypatch.setattr(tracer_module, "Traceloop", FakeTraceloop)
+ monkeypatch.setenv("TRACELOOP_BASE_URL", "https://otel.example.com")
+ monkeypatch.setenv("TRACELOOP_API_KEY", "test-api-key")
+ monkeypatch.setenv("TRACELOOP_HEADERS", '{"x-custom":"header"}')
+
+ tracer = Tracer("remote-observability")
+ set_global_tracer(tracer)
+ tracer.log_chat_message("hello", "user", "agent-1")
+
+ assert tracer._remote_export_enabled is True
+ assert FakeTraceloop.init_calls
+ init_kwargs = FakeTraceloop.init_calls[-1]
+ assert init_kwargs["api_endpoint"] == "https://otel.example.com"
+ assert init_kwargs["api_key"] == "test-api-key"
+ assert init_kwargs["headers"] == {"x-custom": "header"}
+ assert isinstance(init_kwargs["processor"], SimpleSpanProcessor)
+ assert "strix.run_id" not in init_kwargs["resource_attributes"]
+ assert "strix.run_name" not in init_kwargs["resource_attributes"]
+
+ events_path = tmp_path / "strix_runs" / "remote-observability" / "events.jsonl"
+ events = _load_events(events_path)
+ run_started = next(event for event in events if event["event_type"] == "run.started")
+ assert run_started["payload"]["remote_export_enabled"] is True
+
+
+def test_tracer_local_mode_avoids_traceloop_remote_endpoint(monkeypatch, tmp_path) -> None:
+ monkeypatch.chdir(tmp_path)
+
+ class FakeTraceloop:
+ init_calls: ClassVar[list[dict[str, Any]]] = []
+
+ @staticmethod
+ def init(**kwargs: Any) -> None:
+ FakeTraceloop.init_calls.append(kwargs)
+
+ @staticmethod
+ def set_association_properties(properties: dict[str, Any]) -> None: # noqa: ARG004
+ return None
+
+ monkeypatch.setattr(tracer_module, "Traceloop", FakeTraceloop)
+
+ tracer = Tracer("local-traceloop")
+ set_global_tracer(tracer)
+ tracer.log_chat_message("hello", "user", "agent-1")
+
+ assert FakeTraceloop.init_calls
+ init_kwargs = FakeTraceloop.init_calls[-1]
+ assert "api_endpoint" not in init_kwargs
+ assert "api_key" not in init_kwargs
+ assert "headers" not in init_kwargs
+ assert isinstance(init_kwargs["processor"], SimpleSpanProcessor)
+ assert tracer._remote_export_enabled is False
+
+
+def test_otlp_fallback_includes_auth_and_custom_headers(monkeypatch, tmp_path) -> None:
+ monkeypatch.chdir(tmp_path)
+ monkeypatch.setattr(tracer_module, "Traceloop", None)
+ monkeypatch.setenv("TRACELOOP_BASE_URL", "https://otel.example.com")
+ monkeypatch.setenv("TRACELOOP_API_KEY", "test-api-key")
+ monkeypatch.setenv("TRACELOOP_HEADERS", '{"x-custom":"header"}')
+
+ captured: dict[str, Any] = {}
+
+ class FakeOTLPSpanExporter:
+ def __init__(self, endpoint: str, headers: dict[str, str] | None = None, **kwargs: Any):
+ captured["endpoint"] = endpoint
+ captured["headers"] = headers or {}
+ captured["kwargs"] = kwargs
+
+ def export(self, spans: Any) -> SpanExportResult: # noqa: ARG002
+ return SpanExportResult.SUCCESS
+
+ def shutdown(self) -> None:
+ return None
+
+ def force_flush(self, timeout_millis: int = 30_000) -> bool: # noqa: ARG002
+ return True
+
+ fake_module = types.ModuleType("opentelemetry.exporter.otlp.proto.http.trace_exporter")
+ fake_module.OTLPSpanExporter = FakeOTLPSpanExporter
+ monkeypatch.setitem(
+ sys.modules,
+ "opentelemetry.exporter.otlp.proto.http.trace_exporter",
+ fake_module,
+ )
+
+ tracer = Tracer("otlp-fallback")
+ set_global_tracer(tracer)
+
+ assert tracer._remote_export_enabled is True
+ assert captured["endpoint"] == "https://otel.example.com/v1/traces"
+ assert captured["headers"]["Authorization"] == "Bearer test-api-key"
+ assert captured["headers"]["x-custom"] == "header"
+
+
+def test_traceloop_init_failure_does_not_mark_bootstrapped_on_provider_failure(
+ monkeypatch, tmp_path
+) -> None:
+ monkeypatch.chdir(tmp_path)
+
+ class FakeTraceloop:
+ @staticmethod
+ def init(**kwargs: Any) -> None: # noqa: ARG004
+ raise RuntimeError("traceloop init failed")
+
+ @staticmethod
+ def set_association_properties(properties: dict[str, Any]) -> None: # noqa: ARG004
+ return None
+
+ monkeypatch.setattr(tracer_module, "Traceloop", FakeTraceloop)
+
+ def _raise_provider_error(provider: Any) -> None:
+ raise RuntimeError("provider setup failed")
+
+ monkeypatch.setattr(tracer_module.trace, "set_tracer_provider", _raise_provider_error)
+
+ tracer = Tracer("bootstrap-failure")
+ set_global_tracer(tracer)
+
+ assert tracer_module._OTEL_BOOTSTRAPPED is False
+ assert tracer._remote_export_enabled is False
+
+
+def test_run_completed_event_emitted_once(monkeypatch, tmp_path) -> None:
+ monkeypatch.chdir(tmp_path)
+
+ tracer = Tracer("single-complete")
+ set_global_tracer(tracer)
+ tracer.save_run_data(mark_complete=True)
+ tracer.save_run_data(mark_complete=True)
+
+ events_path = tmp_path / "strix_runs" / "single-complete" / "events.jsonl"
+ events = _load_events(events_path)
+ run_completed = [event for event in events if event["event_type"] == "run.completed"]
+ assert len(run_completed) == 1
+
+
+def test_events_with_agent_id_include_agent_name(monkeypatch, tmp_path) -> None:
+ monkeypatch.chdir(tmp_path)
+
+ tracer = Tracer("agent-name-enrichment")
+ set_global_tracer(tracer)
+ tracer.log_agent_creation("agent-1", "Root Agent", "scan auth")
+ tracer.log_chat_message("hello", "assistant", "agent-1")
+
+ events_path = tmp_path / "strix_runs" / "agent-name-enrichment" / "events.jsonl"
+ events = _load_events(events_path)
+ chat_event = next(event for event in events if event["event_type"] == "chat.message")
+
+ assert chat_event["actor"]["agent_id"] == "agent-1"
+ assert chat_event["actor"]["agent_name"] == "Root Agent"
+
+
+def test_run_metadata_is_only_on_run_lifecycle_events(monkeypatch, tmp_path) -> None:
+ monkeypatch.chdir(tmp_path)
+
+ tracer = Tracer("metadata-scope")
+ set_global_tracer(tracer)
+ tracer.log_chat_message("hello", "assistant", "agent-1")
+ tracer.save_run_data(mark_complete=True)
+
+ events_path = tmp_path / "strix_runs" / "metadata-scope" / "events.jsonl"
+ events = _load_events(events_path)
+
+ run_started = next(event for event in events if event["event_type"] == "run.started")
+ run_completed = next(event for event in events if event["event_type"] == "run.completed")
+ chat_event = next(event for event in events if event["event_type"] == "chat.message")
+
+ assert "run_metadata" in run_started
+ assert "run_metadata" in run_completed
+ assert "run_metadata" not in chat_event
+
+
+def test_set_run_name_resets_cached_paths(monkeypatch, tmp_path) -> None:
+ monkeypatch.chdir(tmp_path)
+
+ tracer = Tracer()
+ set_global_tracer(tracer)
+ old_events_path = tracer.events_file_path
+
+ tracer.set_run_name("renamed-run")
+ tracer.log_chat_message("hello", "assistant", "agent-1")
+
+ new_events_path = tracer.events_file_path
+ assert new_events_path != old_events_path
+ assert new_events_path == tmp_path / "strix_runs" / "renamed-run" / "events.jsonl"
+
+ events = _load_events(new_events_path)
+ assert any(event["event_type"] == "run.started" for event in events)
+ assert any(event["event_type"] == "chat.message" for event in events)
+
+
+def test_set_run_name_resets_run_completed_flag(monkeypatch, tmp_path) -> None:
+ monkeypatch.chdir(tmp_path)
+
+ tracer = Tracer()
+ set_global_tracer(tracer)
+
+ tracer.save_run_data(mark_complete=True)
+ tracer.set_run_name("renamed-complete")
+ tracer.save_run_data(mark_complete=True)
+
+ events_path = tmp_path / "strix_runs" / "renamed-complete" / "events.jsonl"
+ events = _load_events(events_path)
+ run_completed = [event for event in events if event["event_type"] == "run.completed"]
+
+ assert any(event["event_type"] == "run.started" for event in events)
+ assert len(run_completed) == 1
+
+
+def test_set_run_name_updates_traceloop_association_properties(monkeypatch, tmp_path) -> None:
+ monkeypatch.chdir(tmp_path)
+
+ class FakeTraceloop:
+ associations: ClassVar[list[dict[str, Any]]] = []
+
+ @staticmethod
+ def init(**kwargs: Any) -> None: # noqa: ARG004
+ return None
+
+ @staticmethod
+ def set_association_properties(properties: dict[str, Any]) -> None:
+ FakeTraceloop.associations.append(properties)
+
+ monkeypatch.setattr(tracer_module, "Traceloop", FakeTraceloop)
+
+ tracer = Tracer()
+ set_global_tracer(tracer)
+ tracer.set_run_name("renamed-run")
+
+ assert FakeTraceloop.associations
+ assert FakeTraceloop.associations[-1]["run_id"] == "renamed-run"
+ assert FakeTraceloop.associations[-1]["run_name"] == "renamed-run"
+
+
+def test_events_write_locks_are_scoped_by_events_file(monkeypatch, tmp_path) -> None:
+ monkeypatch.chdir(tmp_path)
+ monkeypatch.setenv("STRIX_TELEMETRY", "0")
+
+ tracer_one = Tracer("lock-run-a")
+ tracer_two = Tracer("lock-run-b")
+
+ lock_a_from_one = tracer_one._get_events_write_lock(tracer_one.events_file_path)
+ lock_a_from_two = tracer_two._get_events_write_lock(tracer_one.events_file_path)
+ lock_b = tracer_two._get_events_write_lock(tracer_two.events_file_path)
+
+ assert lock_a_from_one is lock_a_from_two
+ assert lock_a_from_one is not lock_b
+
+
+def test_tracer_skips_jsonl_when_telemetry_disabled(monkeypatch, tmp_path) -> None:
+ monkeypatch.chdir(tmp_path)
+ monkeypatch.setenv("STRIX_TELEMETRY", "0")
+
+ tracer = Tracer("telemetry-disabled")
+ set_global_tracer(tracer)
+ tracer.log_chat_message("hello", "assistant", "agent-1")
+ tracer.save_run_data(mark_complete=True)
+
+ events_path = tmp_path / "strix_runs" / "telemetry-disabled" / "events.jsonl"
+ assert not events_path.exists()
+
+
+def test_tracer_otel_flag_overrides_global_telemetry(monkeypatch, tmp_path) -> None:
+ monkeypatch.chdir(tmp_path)
+ monkeypatch.setenv("STRIX_TELEMETRY", "0")
+ monkeypatch.setenv("STRIX_OTEL_TELEMETRY", "1")
+
+ tracer = Tracer("otel-enabled")
+ set_global_tracer(tracer)
+ tracer.log_chat_message("hello", "assistant", "agent-1")
+ tracer.save_run_data(mark_complete=True)
+
+ events_path = tmp_path / "strix_runs" / "otel-enabled" / "events.jsonl"
+ assert events_path.exists()
diff --git a/tests/telemetry/test_utils.py b/tests/telemetry/test_utils.py
new file mode 100644
index 0000000..3e039ac
--- /dev/null
+++ b/tests/telemetry/test_utils.py
@@ -0,0 +1,39 @@
+from strix.telemetry.utils import prune_otel_span_attributes
+
+
+def test_prune_otel_span_attributes_drops_high_volume_prompt_content() -> None:
+ attributes = {
+ "gen_ai.operation.name": "openai.chat",
+ "gen_ai.request.model": "gpt-5.2",
+ "gen_ai.prompt.0.role": "system",
+ "gen_ai.prompt.0.content": "a" * 20_000,
+ "gen_ai.completion.0.content": "b" * 10_000,
+ "llm.input_messages.0.content": "c" * 5_000,
+ "llm.output_messages.0.content": "d" * 5_000,
+ "llm.input": "x" * 3_000,
+ "llm.output": "y" * 3_000,
+ }
+
+ pruned = prune_otel_span_attributes(attributes)
+
+ assert "gen_ai.prompt.0.content" not in pruned
+ assert "gen_ai.completion.0.content" not in pruned
+ assert "llm.input_messages.0.content" not in pruned
+ assert "llm.output_messages.0.content" not in pruned
+ assert "llm.input" not in pruned
+ assert "llm.output" not in pruned
+ assert pruned["gen_ai.operation.name"] == "openai.chat"
+ assert pruned["gen_ai.prompt.0.role"] == "system"
+ assert pruned["strix.filtered_attributes_count"] == 6
+
+
+def test_prune_otel_span_attributes_keeps_metadata_when_nothing_is_dropped() -> None:
+ attributes = {
+ "gen_ai.operation.name": "openai.chat",
+ "gen_ai.request.model": "gpt-5.2",
+ "gen_ai.prompt.0.role": "user",
+ }
+
+ pruned = prune_otel_span_attributes(attributes)
+
+ assert pruned == attributes