diff --git a/poetry.lock b/poetry.lock index 5d34d47..1e8ce2b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -170,6 +170,24 @@ typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] trio = ["trio (>=0.31.0)"] +[[package]] +name = "asgiref" +version = "3.10.0" +description = "ASGI specs, helper code, and adapters" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "asgiref-3.10.0-py3-none-any.whl", hash = "sha256:aef8a81283a34d0ab31630c9b7dfe70c812c95eba78171367ca8745e88124734"}, + {file = "asgiref-3.10.0.tar.gz", hash = "sha256:d89f2d8cd8b56dada7d52fa7dc8075baa08fb836560710d38c292a7a3f78c04e"}, +] + +[package.dependencies] +typing_extensions = {version = ">=4", markers = "python_version < \"3.11\""} + +[package.extras] +tests = ["mypy (>=1.14.0)", "pytest", "pytest-asyncio"] + [[package]] name = "async-timeout" version = "5.0.1" @@ -746,7 +764,7 @@ version = "8.6.1" description = "Read metadata from Python packages" optional = false python-versions = ">=3.9" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, @@ -1084,6 +1102,93 @@ files = [ {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] +[[package]] +name = "opentelemetry-api" +version = "1.38.0" +description = "OpenTelemetry Python API" +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "opentelemetry_api-1.38.0-py3-none-any.whl", hash = "sha256:2891b0197f47124454ab9f0cf58f3be33faca394457ac3e09daba13ff50aa582"}, + {file = "opentelemetry_api-1.38.0.tar.gz", hash = "sha256:f4c193b5e8acb0912b06ac5b16321908dd0843d75049c091487322284a3eea12"}, +] +markers = {main = "extra == \"opentelemetry\""} + +[package.dependencies] +importlib-metadata = ">=6.0,<8.8.0" +typing-extensions = ">=4.5.0" + +[[package]] +name = "opentelemetry-instrumentation" +version = "0.59b0" +description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" +optional = true +python-versions = ">=3.9" +groups = ["main"] +markers = "extra == \"opentelemetry\"" +files = [ + {file = "opentelemetry_instrumentation-0.59b0-py3-none-any.whl", hash = "sha256:44082cc8fe56b0186e87ee8f7c17c327c4c2ce93bdbe86496e600985d74368ee"}, + {file = "opentelemetry_instrumentation-0.59b0.tar.gz", hash = "sha256:6010f0faaacdaf7c4dff8aac84e226d23437b331dcda7e70367f6d73a7db1adc"}, +] + +[package.dependencies] +opentelemetry-api = ">=1.4,<2.0" +opentelemetry-semantic-conventions = "0.59b0" +packaging = ">=18.0" +wrapt = ">=1.0.0,<2.0.0" + +[[package]] +name = "opentelemetry-sdk" +version = "1.38.0" +description = "OpenTelemetry Python SDK" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "opentelemetry_sdk-1.38.0-py3-none-any.whl", hash = "sha256:1c66af6564ecc1553d72d811a01df063ff097cdc82ce188da9951f93b8d10f6b"}, + {file = "opentelemetry_sdk-1.38.0.tar.gz", hash = "sha256:93df5d4d871ed09cb4272305be4d996236eedb232253e3ab864c8620f051cebe"}, +] + +[package.dependencies] +opentelemetry-api = "1.38.0" +opentelemetry-semantic-conventions = "0.59b0" +typing-extensions = ">=4.5.0" + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.59b0" +description = "OpenTelemetry Semantic Conventions" +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "opentelemetry_semantic_conventions-0.59b0-py3-none-any.whl", hash = "sha256:35d3b8833ef97d614136e253c1da9342b4c3c083bbaf29ce31d572a1c3825eed"}, + {file = "opentelemetry_semantic_conventions-0.59b0.tar.gz", hash = "sha256:7a6db3f30d70202d5bf9fa4b69bc866ca6a30437287de6c510fb594878aed6b0"}, +] +markers = {main = "extra == \"opentelemetry\""} + +[package.dependencies] +opentelemetry-api = "1.38.0" +typing-extensions = ">=4.5.0" + +[[package]] +name = "opentelemetry-test-utils" +version = "0.59b0" +description = "Test utilities for OpenTelemetry unit tests" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "opentelemetry_test_utils-0.59b0-py3-none-any.whl", hash = "sha256:c7f5b6ce4423509b4ae9ec4f0fd892746f2d26dce69b6f842e8f05ed3206730f"}, + {file = "opentelemetry_test_utils-0.59b0.tar.gz", hash = "sha256:e16c3b6012cb3eacaca73fc9105b2a927b7eb1c8b6ad9f01a00f7ee464dd156c"}, +] + +[package.dependencies] +asgiref = ">=3.0,<4.0" +opentelemetry-api = "1.38.0" +opentelemetry-sdk = "1.38.0" + [[package]] name = "orjson" version = "3.10.16" @@ -2218,6 +2323,98 @@ files = [ [package.extras] watchmedo = ["PyYAML (>=3.10)"] +[[package]] +name = "wrapt" +version = "1.17.3" +description = "Module for decorators, wrappers and monkey patching." +optional = true +python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"opentelemetry\"" +files = [ + {file = "wrapt-1.17.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88bbae4d40d5a46142e70d58bf664a89b6b4befaea7b2ecc14e03cedb8e06c04"}, + {file = "wrapt-1.17.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b13af258d6a9ad602d57d889f83b9d5543acd471eee12eb51f5b01f8eb1bc2"}, + {file = "wrapt-1.17.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd341868a4b6714a5962c1af0bd44f7c404ef78720c7de4892901e540417111c"}, + {file = "wrapt-1.17.3-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f9b2601381be482f70e5d1051a5965c25fb3625455a2bf520b5a077b22afb775"}, + {file = "wrapt-1.17.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:343e44b2a8e60e06a7e0d29c1671a0d9951f59174f3709962b5143f60a2a98bd"}, + {file = "wrapt-1.17.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:33486899acd2d7d3066156b03465b949da3fd41a5da6e394ec49d271baefcf05"}, + {file = "wrapt-1.17.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e6f40a8aa5a92f150bdb3e1c44b7e98fb7113955b2e5394122fa5532fec4b418"}, + {file = "wrapt-1.17.3-cp310-cp310-win32.whl", hash = "sha256:a36692b8491d30a8c75f1dfee65bef119d6f39ea84ee04d9f9311f83c5ad9390"}, + {file = "wrapt-1.17.3-cp310-cp310-win_amd64.whl", hash = "sha256:afd964fd43b10c12213574db492cb8f73b2f0826c8df07a68288f8f19af2ebe6"}, + {file = "wrapt-1.17.3-cp310-cp310-win_arm64.whl", hash = "sha256:af338aa93554be859173c39c85243970dc6a289fa907402289eeae7543e1ae18"}, + {file = "wrapt-1.17.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:273a736c4645e63ac582c60a56b0acb529ef07f78e08dc6bfadf6a46b19c0da7"}, + {file = "wrapt-1.17.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5531d911795e3f935a9c23eb1c8c03c211661a5060aab167065896bbf62a5f85"}, + {file = "wrapt-1.17.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0610b46293c59a3adbae3dee552b648b984176f8562ee0dba099a56cfbe4df1f"}, + {file = "wrapt-1.17.3-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b32888aad8b6e68f83a8fdccbf3165f5469702a7544472bdf41f582970ed3311"}, + {file = "wrapt-1.17.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cccf4f81371f257440c88faed6b74f1053eef90807b77e31ca057b2db74edb1"}, + {file = "wrapt-1.17.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8a210b158a34164de8bb68b0e7780041a903d7b00c87e906fb69928bf7890d5"}, + {file = "wrapt-1.17.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:79573c24a46ce11aab457b472efd8d125e5a51da2d1d24387666cd85f54c05b2"}, + {file = "wrapt-1.17.3-cp311-cp311-win32.whl", hash = "sha256:c31eebe420a9a5d2887b13000b043ff6ca27c452a9a22fa71f35f118e8d4bf89"}, + {file = "wrapt-1.17.3-cp311-cp311-win_amd64.whl", hash = "sha256:0b1831115c97f0663cb77aa27d381237e73ad4f721391a9bfb2fe8bc25fa6e77"}, + {file = "wrapt-1.17.3-cp311-cp311-win_arm64.whl", hash = "sha256:5a7b3c1ee8265eb4c8f1b7d29943f195c00673f5ab60c192eba2d4a7eae5f46a"}, + {file = "wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0"}, + {file = "wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba"}, + {file = "wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd"}, + {file = "wrapt-1.17.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:042ec3bb8f319c147b1301f2393bc19dba6e176b7da446853406d041c36c7828"}, + {file = "wrapt-1.17.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3af60380ba0b7b5aeb329bc4e402acd25bd877e98b3727b0135cb5c2efdaefe9"}, + {file = "wrapt-1.17.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b02e424deef65c9f7326d8c19220a2c9040c51dc165cddb732f16198c168396"}, + {file = "wrapt-1.17.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:74afa28374a3c3a11b3b5e5fca0ae03bef8450d6aa3ab3a1e2c30e3a75d023dc"}, + {file = "wrapt-1.17.3-cp312-cp312-win32.whl", hash = "sha256:4da9f45279fff3543c371d5ababc57a0384f70be244de7759c85a7f989cb4ebe"}, + {file = "wrapt-1.17.3-cp312-cp312-win_amd64.whl", hash = "sha256:e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c"}, + {file = "wrapt-1.17.3-cp312-cp312-win_arm64.whl", hash = "sha256:604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6"}, + {file = "wrapt-1.17.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a47681378a0439215912ef542c45a783484d4dd82bac412b71e59cf9c0e1cea0"}, + {file = "wrapt-1.17.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54a30837587c6ee3cd1a4d1c2ec5d24e77984d44e2f34547e2323ddb4e22eb77"}, + {file = "wrapt-1.17.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:16ecf15d6af39246fe33e507105d67e4b81d8f8d2c6598ff7e3ca1b8a37213f7"}, + {file = "wrapt-1.17.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6fd1ad24dc235e4ab88cda009e19bf347aabb975e44fd5c2fb22a3f6e4141277"}, + {file = "wrapt-1.17.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ed61b7c2d49cee3c027372df5809a59d60cf1b6c2f81ee980a091f3afed6a2d"}, + {file = "wrapt-1.17.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:423ed5420ad5f5529db9ce89eac09c8a2f97da18eb1c870237e84c5a5c2d60aa"}, + {file = "wrapt-1.17.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e01375f275f010fcbf7f643b4279896d04e571889b8a5b3f848423d91bf07050"}, + {file = "wrapt-1.17.3-cp313-cp313-win32.whl", hash = "sha256:53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8"}, + {file = "wrapt-1.17.3-cp313-cp313-win_amd64.whl", hash = "sha256:1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb"}, + {file = "wrapt-1.17.3-cp313-cp313-win_arm64.whl", hash = "sha256:7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16"}, + {file = "wrapt-1.17.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cf30f6e3c077c8e6a9a7809c94551203c8843e74ba0c960f4a98cd80d4665d39"}, + {file = "wrapt-1.17.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e228514a06843cae89621384cfe3a80418f3c04aadf8a3b14e46a7be704e4235"}, + {file = "wrapt-1.17.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5ea5eb3c0c071862997d6f3e02af1d055f381b1d25b286b9d6644b79db77657c"}, + {file = "wrapt-1.17.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:281262213373b6d5e4bb4353bc36d1ba4084e6d6b5d242863721ef2bf2c2930b"}, + {file = "wrapt-1.17.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc4a8d2b25efb6681ecacad42fca8859f88092d8732b170de6a5dddd80a1c8fa"}, + {file = "wrapt-1.17.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:373342dd05b1d07d752cecbec0c41817231f29f3a89aa8b8843f7b95992ed0c7"}, + {file = "wrapt-1.17.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d40770d7c0fd5cbed9d84b2c3f2e156431a12c9a37dc6284060fb4bec0b7ffd4"}, + {file = "wrapt-1.17.3-cp314-cp314-win32.whl", hash = "sha256:fbd3c8319de8e1dc79d346929cd71d523622da527cca14e0c1d257e31c2b8b10"}, + {file = "wrapt-1.17.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1a4120ae5705f673727d3253de3ed0e016f7cd78dc463db1b31e2463e1f3cf6"}, + {file = "wrapt-1.17.3-cp314-cp314-win_arm64.whl", hash = "sha256:507553480670cab08a800b9463bdb881b2edeed77dc677b0a5915e6106e91a58"}, + {file = "wrapt-1.17.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ed7c635ae45cfbc1a7371f708727bf74690daedc49b4dba310590ca0bd28aa8a"}, + {file = "wrapt-1.17.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:249f88ed15503f6492a71f01442abddd73856a0032ae860de6d75ca62eed8067"}, + {file = "wrapt-1.17.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5a03a38adec8066d5a37bea22f2ba6bbf39fcdefbe2d91419ab864c3fb515454"}, + {file = "wrapt-1.17.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5d4478d72eb61c36e5b446e375bbc49ed002430d17cdec3cecb36993398e1a9e"}, + {file = "wrapt-1.17.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223db574bb38637e8230eb14b185565023ab624474df94d2af18f1cdb625216f"}, + {file = "wrapt-1.17.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e405adefb53a435f01efa7ccdec012c016b5a1d3f35459990afc39b6be4d5056"}, + {file = "wrapt-1.17.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:88547535b787a6c9ce4086917b6e1d291aa8ed914fdd3a838b3539dc95c12804"}, + {file = "wrapt-1.17.3-cp314-cp314t-win32.whl", hash = "sha256:41b1d2bc74c2cac6f9074df52b2efbef2b30bdfe5f40cb78f8ca22963bc62977"}, + {file = "wrapt-1.17.3-cp314-cp314t-win_amd64.whl", hash = "sha256:73d496de46cd2cdbdbcce4ae4bcdb4afb6a11234a1df9c085249d55166b95116"}, + {file = "wrapt-1.17.3-cp314-cp314t-win_arm64.whl", hash = "sha256:f38e60678850c42461d4202739f9bf1e3a737c7ad283638251e79cc49effb6b6"}, + {file = "wrapt-1.17.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:70d86fa5197b8947a2fa70260b48e400bf2ccacdcab97bb7de47e3d1e6312225"}, + {file = "wrapt-1.17.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:df7d30371a2accfe4013e90445f6388c570f103d61019b6b7c57e0265250072a"}, + {file = "wrapt-1.17.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:caea3e9c79d5f0d2c6d9ab96111601797ea5da8e6d0723f77eabb0d4068d2b2f"}, + {file = "wrapt-1.17.3-cp38-cp38-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:758895b01d546812d1f42204bd443b8c433c44d090248bf22689df673ccafe00"}, + {file = "wrapt-1.17.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02b551d101f31694fc785e58e0720ef7d9a10c4e62c1c9358ce6f63f23e30a56"}, + {file = "wrapt-1.17.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:656873859b3b50eeebe6db8b1455e99d90c26ab058db8e427046dbc35c3140a5"}, + {file = "wrapt-1.17.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a9a2203361a6e6404f80b99234fe7fb37d1fc73487b5a78dc1aa5b97201e0f22"}, + {file = "wrapt-1.17.3-cp38-cp38-win32.whl", hash = "sha256:55cbbc356c2842f39bcc553cf695932e8b30e30e797f961860afb308e6b1bb7c"}, + {file = "wrapt-1.17.3-cp38-cp38-win_amd64.whl", hash = "sha256:ad85e269fe54d506b240d2d7b9f5f2057c2aa9a2ea5b32c66f8902f768117ed2"}, + {file = "wrapt-1.17.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:30ce38e66630599e1193798285706903110d4f057aab3168a34b7fdc85569afc"}, + {file = "wrapt-1.17.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:65d1d00fbfb3ea5f20add88bbc0f815150dbbde3b026e6c24759466c8b5a9ef9"}, + {file = "wrapt-1.17.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7c06742645f914f26c7f1fa47b8bc4c91d222f76ee20116c43d5ef0912bba2d"}, + {file = "wrapt-1.17.3-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7e18f01b0c3e4a07fe6dfdb00e29049ba17eadbc5e7609a2a3a4af83ab7d710a"}, + {file = "wrapt-1.17.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f5f51a6466667a5a356e6381d362d259125b57f059103dd9fdc8c0cf1d14139"}, + {file = "wrapt-1.17.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:59923aa12d0157f6b82d686c3fd8e1166fa8cdfb3e17b42ce3b6147ff81528df"}, + {file = "wrapt-1.17.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:46acc57b331e0b3bcb3e1ca3b421d65637915cfcd65eb783cb2f78a511193f9b"}, + {file = "wrapt-1.17.3-cp39-cp39-win32.whl", hash = "sha256:3e62d15d3cfa26e3d0788094de7b64efa75f3a53875cdbccdf78547aed547a81"}, + {file = "wrapt-1.17.3-cp39-cp39-win_amd64.whl", hash = "sha256:1f23fa283f51c890eda8e34e4937079114c74b4c81d2b2f1f1d94948f5cc3d7f"}, + {file = "wrapt-1.17.3-cp39-cp39-win_arm64.whl", hash = "sha256:24c2ed34dc222ed754247a2702b1e1e89fdbaa4016f324b4b8f1a802d4ffe87f"}, + {file = "wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22"}, + {file = "wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0"}, +] + [[package]] name = "yarl" version = "1.20.1" @@ -2343,7 +2540,7 @@ version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.9" -groups = ["main"] +groups = ["main", "dev"] files = [ {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, @@ -2361,6 +2558,7 @@ type = ["pytest-mypy"] cbor = ["cbor2"] metrics = ["prometheus_client"] msgpack = ["msgpack"] +opentelemetry = ["opentelemetry-api", "opentelemetry-instrumentation", "opentelemetry-semantic-conventions"] orjson = ["orjson"] reload = ["gitignore-parser", "watchdog"] uv = ["uvloop"] @@ -2369,4 +2567,4 @@ zmq = ["pyzmq"] [metadata] lock-version = "2.1" python-versions = "^3.9" -content-hash = "2b3feaf434e86a3923bb75a66daf60dff0d43d64a1e27d6d2f269a891621af01" +content-hash = "08d1a303942bca932a307e27b1510da7a7b71ad4f4d55f766310de874657886b" diff --git a/pyproject.toml b/pyproject.toml index c151745..53ea3cc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,6 +36,10 @@ pycron = "^3.0.0" taskiq_dependencies = ">=1.3.1,<2" anyio = ">=4" packaging = ">=19" +# For opentelemetry instrumentation +opentelemetry-api = { version = "^1.38.0", optional = true } +opentelemetry-instrumentation = { version = "^0.59b0", optional = true} +opentelemetry-semantic-conventions = { version = "^0.59b0", optional = true} # For prometheus metrics prometheus_client = { version = "^0", optional = true } # For ZMQBroker @@ -69,10 +73,12 @@ pytest-mock = "^3.11.1" tzlocal = "^5.0.1" types-tzlocal = "^5.0.1.1" types-pytz = "^2023.3.1.1" +opentelemetry-test-utils = "^0.59b0" [tool.poetry.extras] zmq = ["pyzmq"] uv = ["uvloop"] +opentelemetry = ["opentelemetry-api", "opentelemetry-instrumentation", "opentelemetry-semantic-conventions"] metrics = ["prometheus_client"] reload = ["watchdog", "gitignore-parser"] orjson = ["orjson"] @@ -86,6 +92,9 @@ taskiq = "taskiq.__main__:main" worker = "taskiq.cli.worker.cmd:WorkerCMD" scheduler = "taskiq.cli.scheduler.cmd:SchedulerCMD" +[tool.poetry.plugins.opentelemetry_instrumentor] +taskiq = "taskiq.instrumentation:TaskiqInstrumentor" + [tool.mypy] strict = true ignore_missing_imports = true diff --git a/taskiq/instrumentation.py b/taskiq/instrumentation.py new file mode 100644 index 0000000..b123311 --- /dev/null +++ b/taskiq/instrumentation.py @@ -0,0 +1,134 @@ +""" +Instrument `taskiq`_ to trace Taskiq applications. + +.. _taskiq: https://pypi.org/project/taskiq/ + +Usage +----- + +* Run instrumented task + +.. code:: python + + import asyncio + + from taskiq import InMemoryBroker, TaskiqEvents, TaskiqState + from taskiq.instrumentation import TaskiqInstrumentor + + broker = InMemoryBroker() + + @broker.on_event(TaskiqEvents.WORKER_STARTUP) + async def startup(state: TaskiqState) -> None: + TaskiqInstrumentor().instrument() + + @broker.task + async def add(x, y): + return x + y + + async def main(): + await broker.startup() + await my_task.kiq(1, 2) + await broker.shutdown() + + if __name__ == "__main__": + asyncio.run(main()) + +API +--- +""" + + +import logging +from typing import Any, Callable, Collection, Optional +from weakref import WeakSet as _WeakSet + +try: + import opentelemetry # noqa: F401 +except ImportError as exc: + raise ImportError( + "Cannot instrument. Please install 'taskiq[opentelemetry]'.", + ) from exc + + +from opentelemetry.instrumentation.instrumentor import ( # type: ignore[attr-defined] + BaseInstrumentor, +) +from opentelemetry.instrumentation.utils import unwrap +from opentelemetry.metrics import MeterProvider +from opentelemetry.trace import TracerProvider +from wrapt import wrap_function_wrapper + +from taskiq import AsyncBroker +from taskiq.middlewares.opentelemetry_middleware import OpenTelemetryMiddleware + +logger = logging.getLogger("taskiq.opentelemetry") + + +class TaskiqInstrumentor(BaseInstrumentor): + """OpenTelemetry instrumentor for Taskiq.""" + + _instrumented_brokers: _WeakSet[AsyncBroker] = _WeakSet() + + def __init__(self) -> None: + super().__init__() + self._middleware = None + + def instrument_broker( + self, + broker: AsyncBroker, + tracer_provider: Optional[TracerProvider] = None, + meter_provider: Optional[MeterProvider] = None, + ) -> None: + """Instrument broker.""" + if not hasattr(broker, "_is_instrumented_by_opentelemetry"): + broker._is_instrumented_by_opentelemetry = False # type: ignore[attr-defined] # noqa: SLF001 + + if not getattr(broker, "is_instrumented_by_opentelemetry", False): + broker.middlewares.insert( + 0, + OpenTelemetryMiddleware( + tracer_provider=tracer_provider, + meter_provider=meter_provider, + ), + ) + broker._is_instrumented_by_opentelemetry = True # type: ignore[attr-defined] # noqa: SLF001 + if broker not in self._instrumented_brokers: + self._instrumented_brokers.add(broker) + else: + logger.warning( + "Attempting to instrument taskiq broker while already instrumented", + ) + + def uninstrument_broker(self, broker: AsyncBroker) -> None: + """Uninstrument broker.""" + broker.middlewares = [ + middleware + for middleware in broker.middlewares + if not isinstance(middleware, OpenTelemetryMiddleware) + ] + broker._is_instrumented_by_opentelemetry = False # type: ignore[attr-defined] # noqa: SLF001 + self._instrumented_brokers.discard(broker) + + def instrumentation_dependencies(self) -> Collection[str]: + """This function tells which library this instrumentor instruments.""" + return ("taskiq >= 0.0.1",) + + def _instrument(self, **kwargs: Any) -> None: + def broker_init( + init: Callable[[Any], Any], + broker: AsyncBroker, + args: Any, + kwargs: Any, + ) -> None: + result = init(*args, **kwargs) + self.instrument_broker(broker) + return result + + wrap_function_wrapper("taskiq", "AsyncBroker.__init__", broker_init) + + def _uninstrument(self, **kwargs: Any) -> None: + instances_to_uninstrument = list(self._instrumented_brokers) + for broker in instances_to_uninstrument: + self.uninstrument_broker(broker) + self._instrumented_brokers.clear() + unwrap(AsyncBroker, "__init__") diff --git a/taskiq/middlewares/opentelemetry_middleware.py b/taskiq/middlewares/opentelemetry_middleware.py new file mode 100644 index 0000000..291f9f7 --- /dev/null +++ b/taskiq/middlewares/opentelemetry_middleware.py @@ -0,0 +1,299 @@ +import logging +from contextlib import AbstractContextManager +from typing import Any, Dict, Optional, Tuple, TypeVar + +try: + import opentelemetry # noqa: F401 +except ImportError as exc: + raise ImportError( + "Cannot import opentelemetry_middleware. " + "Please install 'taskiq[opentelemetry]'.", + ) from exc + +from opentelemetry import context as context_api +from opentelemetry import trace +from opentelemetry.metrics import Meter, MeterProvider, get_meter +from opentelemetry.propagate import extract, inject +from opentelemetry.semconv.trace import SpanAttributes +from opentelemetry.trace import Span, Tracer, TracerProvider +from opentelemetry.trace.status import Status, StatusCode + +from taskiq import TaskiqMessage, TaskiqMiddleware, TaskiqResult, __version__ + +logger = logging.getLogger("taskiq.opentelemetry") + +T = TypeVar("T") + +# Taskiq Context key +CTX_KEY = "__otel_task_span" + +# Taskiq Context attributes +TASKIQ_CONTEXT_ATTRIBUTES = [ + "_retries", + "delay", + "max_retries", + "retry_on_error", + "timeout", + "X-Taskiq-requeue", +] + +# Task operations +_TASK_TAG_KEY = "taskiq.action" +_TASK_SEND = "send" +_TASK_EXECUTE = "execute" + +_TASK_RETRY_REASON_KEY = "taskiq.retry.reason" +_TASK_NAME_KEY = "taskiq.task_name" + + +def set_attributes_from_context(span: Span, context: Dict[str, Any]) -> None: + """Helper to extract meta values from a Taskiq Context.""" + if not span.is_recording(): + return + + for key in TASKIQ_CONTEXT_ATTRIBUTES: + value = context.get(key) + + # Skip this key if it is not set + if value is None: + continue + + # Skip `retries` if it's value is `0` + if key == "_retries" and value == "0": + continue + + attribute_name = f"taskiq.{key}" + + span.set_attribute(attribute_name, value) + + +def attach_context( + message: Optional[TaskiqMessage], + span: Span, + activation: AbstractContextManager[Span], + token: Optional[object], + is_publish: bool = False, +) -> None: + """ + Propagate context to `TaskiqMessage`. + + Helper to propagate a `Span`, `ContextManager` and context token + for the given `Task` instance. This function uses a `dict` that stores + the Span using the `(task_id, is_publish)` as a key. This is useful + when information must be propagated from one Celery signal to another. + + We use (task_id, is_publish) for the key to ensure that publishing a + task from within another task does not cause any conflicts. + + This mostly happens when either a task fails and a retry policy is in place, + we end up trying to publish a task with the same id as the task currently running. + """ + if message is None: + return + + ctx_dict = getattr(message, CTX_KEY, None) + + if ctx_dict is None: + ctx_dict = {} + setattr(message, CTX_KEY, ctx_dict) + + ctx_dict[(message.task_id, is_publish)] = (span, activation, token) + + +def detach_context(message: TaskiqMessage, is_publish: bool = False) -> None: + """Remove context from `TaskiqMessage`.""" + span_dict = getattr(message, CTX_KEY, None) + if span_dict is None: + return + + # See note in `attach_context` for key info + span_dict.pop((message.task_id, is_publish), None) + + +def retrieve_context( + message: TaskiqMessage, + is_publish: bool = False, +) -> Optional[Tuple[Span, AbstractContextManager[Span], Optional[object]]]: + """Retrieve context from `TaskiqMessage`.""" + span_dict = getattr(message, CTX_KEY, None) + if span_dict is None: + return None + + # See note in `attach_context` for key info + return span_dict.get((message.task_id, is_publish), None) + + +class OpenTelemetryMiddleware(TaskiqMiddleware): + """Middleware to instrument Taskiq with OpenTelemetry.""" + + def __init__( + self, + tracer_provider: Optional[TracerProvider] = None, + meter_provider: Optional[MeterProvider] = None, + tracer: Optional[Tracer] = None, + meter: Optional[Meter] = None, + ) -> None: + super().__init__() + self._tracer = ( + trace.get_tracer( + __name__, + __version__, + tracer_provider, + schema_url="https://opentelemetry.io/schemas/1.11.0", + ) + if tracer is None + else tracer + ) + self._meter = ( + get_meter( + __name__, + __version__, + meter_provider, + schema_url="https://opentelemetry.io/schemas/1.11.0", + ) + if meter is None + else meter + ) + + def pre_send(self, message: TaskiqMessage) -> TaskiqMessage: + """ + This function starts new span and propagates opentelemetry state to labels. + + :param message: current message. + :return: message + """ + logger.debug("pre_send task_id=%s", message.task_id) + + operation_name = f"{_TASK_SEND}/{message.task_name}" + span = self._tracer.start_span(operation_name, kind=trace.SpanKind.PRODUCER) + + if span.is_recording(): + span.set_attribute(_TASK_TAG_KEY, _TASK_SEND) + span.set_attribute(SpanAttributes.MESSAGING_MESSAGE_ID, message.task_id) + span.set_attribute(_TASK_NAME_KEY, message.task_name) + set_attributes_from_context(span, message.labels) + + activation = trace.use_span(span, end_on_exit=True) + activation.__enter__() + attach_context(message, span, activation, None, is_publish=True) + inject(message.labels) + + return message + + def post_send(self, message: TaskiqMessage) -> None: + """ + This function closes span from `pre_send`. + + :param message: current message. + """ + logger.debug("post_send task_id=%s", message.task_id) + # retrieve and finish the Span + ctx = retrieve_context(message, is_publish=True) + + if ctx is None: + logger.warning("no existing span found for task_id=%s", message.task_id) + return + + _, activation, _ = ctx + + activation.__exit__(None, None, None) + detach_context(message, is_publish=True) + + def pre_execute(self, message: TaskiqMessage) -> TaskiqMessage: + """ + This function starts new span and propagates opentelemetry state to labels. + + :param message: current message. + :return: message + """ + logger.debug("pre_execute task_id=%s", message.task_id) + tracectx = extract(message.labels) or None + token = context_api.attach(tracectx) if tracectx is not None else None + + operation_name = f"{_TASK_EXECUTE}/{message.task_name}" + span = self._tracer.start_span( + operation_name, + context=tracectx, + kind=trace.SpanKind.CONSUMER, + ) + + activation = trace.use_span(span, end_on_exit=True) + activation.__enter__() # pylint: disable=E1101 + attach_context(message, span, activation, token) + return message + + def post_execute( # pylint: disable=R6301 + self, + message: TaskiqMessage, + result: TaskiqResult[T], + ) -> None: + """ + This function closes span from `pre_execute`. + + :param message: received message. + :param result: result of the execution. + """ + logger.debug("post_execute task_id=%s", message.task_id) + + # retrieve and finish the Span + ctx = retrieve_context(message) + + if ctx is None: + logger.warning("no existing span found for task_id=%s", message.task_id) + return + + span, activation, token = ctx + + if span.is_recording(): + span.set_attribute(_TASK_TAG_KEY, _TASK_EXECUTE) + set_attributes_from_context(span, message.labels) + span.set_attribute(_TASK_NAME_KEY, message.task_name) + + activation.__exit__(None, None, None) + detach_context(message) + # if the process sending the task is not instrumented + # there's no incoming context and no token to detach + if token is not None: + context_api.detach(token) # type: ignore[arg-type] + + def on_error( + self, + message: TaskiqMessage, + result: TaskiqResult[T], + exception: BaseException, + ) -> None: + """ + This function closes span from `pre_execute` with error. + + :param message: Message that caused the error. + :param result: execution result. + :param exception: found exception. + """ + ctx = retrieve_context(message) + + if ctx is None: + return + + span, _, _ = ctx + + if not span.is_recording(): + return + + retry_on_error = message.labels.get("retry_on_error") + if isinstance(retry_on_error, str): + retry_on_error = retry_on_error.lower() == "true" + + if retry_on_error is None: + retry_on_error = False + + if retry_on_error: + # Add retry reason metadata to span + span.set_attribute(_TASK_RETRY_REASON_KEY, str(exception)) + return + + status_kwargs = { + "status_code": StatusCode.ERROR, + "description": str(exception), + } + span.record_exception(exception) + span.set_status(Status(**status_kwargs)) # type: ignore[arg-type] diff --git a/taskiq/package.py b/taskiq/package.py new file mode 100644 index 0000000..72e9d73 --- /dev/null +++ b/taskiq/package.py @@ -0,0 +1,2 @@ +# for compatibility with opentelemetry-instrumentation +_instruments = ("taskiq >= 0.11.20",) diff --git a/tests/opentelemetry/__init__.py b/tests/opentelemetry/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/opentelemetry/taskiq_test_tasks.py b/tests/opentelemetry/taskiq_test_tasks.py new file mode 100644 index 0000000..d910313 --- /dev/null +++ b/tests/opentelemetry/taskiq_test_tasks.py @@ -0,0 +1,28 @@ +from typing import Any + +from opentelemetry import baggage + +from taskiq import InMemoryBroker + +broker = InMemoryBroker( + await_inplace=True, # used to sort spans in a deterministic way +) + + +class CustomError(Exception): + pass + + +@broker.task +async def task_add(num_a: float, num_b: float) -> float: + return num_a + num_b + + +@broker.task +async def task_raises() -> None: + raise CustomError("The task failed!") + + +@broker.task +async def task_returns_baggage() -> Any: + return dict(baggage.get_all()) diff --git a/tests/opentelemetry/test_auto_instrumentation.py b/tests/opentelemetry/test_auto_instrumentation.py new file mode 100644 index 0000000..65d5660 --- /dev/null +++ b/tests/opentelemetry/test_auto_instrumentation.py @@ -0,0 +1,59 @@ +from opentelemetry.test.test_base import TestBase +from opentelemetry.trace import SpanKind, StatusCode + +from taskiq import InMemoryBroker +from taskiq.instrumentation import TaskiqInstrumentor + + +class TestTaskiqAutoInstrumentation(TestBase): + async def test_auto_instrument(self) -> None: + TaskiqInstrumentor().instrument() + + broker = InMemoryBroker(await_inplace=True) + + @broker.task + async def task_add(a: float, b: float) -> float: + return a + b + + await task_add.kiq(1, 2) + await broker.wait_all() + + spans = self.sorted_spans(self.memory_exporter.get_finished_spans()) + self.assertEqual(len(spans), 2) + + consumer, producer = spans + + self.assertEqual( + consumer.name, + "execute/tests.test_auto_instrumentation:task_add", + f"{consumer._end_time}:{producer._end_time}", + ) + self.assertEqual(consumer.kind, SpanKind.CONSUMER) + self.assertSpanHasAttributes( + consumer, + { + "taskiq.action": "execute", + "taskiq.task_name": "tests.test_auto_instrumentation:task_add", + }, + ) + + self.assertEqual(consumer.status.status_code, StatusCode.UNSET) + + self.assertEqual(0, len(consumer.events)) + + self.assertEqual( + producer.name, + "send/tests.test_auto_instrumentation:task_add", + ) + self.assertEqual(producer.kind, SpanKind.PRODUCER) + self.assertSpanHasAttributes( + producer, + { + "taskiq.action": "send", + "taskiq.task_name": "tests.test_auto_instrumentation:task_add", + }, + ) + + self.assertNotEqual(consumer.parent, producer.context) + self.assertEqual(consumer.parent.span_id, producer.context.span_id) + self.assertEqual(consumer.context.trace_id, producer.context.trace_id) diff --git a/tests/opentelemetry/test_duplicate.py b/tests/opentelemetry/test_duplicate.py new file mode 100644 index 0000000..b03ff8f --- /dev/null +++ b/tests/opentelemetry/test_duplicate.py @@ -0,0 +1,15 @@ +import unittest + +from taskiq.instrumentation import TaskiqInstrumentor + +from .taskiq_test_tasks import broker + + +class TestUtils(unittest.TestCase): + @staticmethod + def test_duplicate_instrumentation() -> None: + first = TaskiqInstrumentor() + first.instrument_broker(broker) + second = TaskiqInstrumentor() + second.instrument_broker(broker) + TaskiqInstrumentor().uninstrument() diff --git a/tests/opentelemetry/test_helpers.py b/tests/opentelemetry/test_helpers.py new file mode 100644 index 0000000..ba7fa85 --- /dev/null +++ b/tests/opentelemetry/test_helpers.py @@ -0,0 +1,116 @@ +# mypy: disable-error-code="arg-type, union-attr" +import unittest +from unittest import mock + +from opentelemetry import trace as trace_api +from opentelemetry.sdk import trace + +from taskiq import TaskiqMessage +from taskiq.middlewares import opentelemetry_middleware + +from .taskiq_test_tasks import broker + + +class TestUtils(unittest.TestCase): + def setUp(self) -> None: + self.broker = broker + + def test_set_attributes_from_context(self) -> None: + # it should extract only relevant keys + context = { + "_retries": "4", + "delay": "30", + "max_retries": "6", + "retry_on_error": "true", + "timeout": "60", + "X-Taskiq-requeue": "4", + "custom_meta": "custom_value", + } + + span = trace._Span("name", mock.Mock(spec=trace_api.SpanContext)) + opentelemetry_middleware.set_attributes_from_context(span, context) + + self.assertEqual(span.attributes.get("taskiq._retries"), "4") + self.assertEqual(span.attributes.get("taskiq.delay"), "30") + self.assertEqual(span.attributes.get("taskiq.max_retries"), "6") + self.assertEqual(span.attributes.get("taskiq.retry_on_error"), "true") + self.assertEqual(span.attributes.get("taskiq.timeout"), "60") + self.assertEqual(span.attributes.get("taskiq.X-Taskiq-requeue"), "4") + + self.assertNotIn("custom_meta", span.attributes) + + def test_set_attributes_not_recording(self) -> None: + # it should extract only relevant keys + context = { + "_retries": "4", + "delay": "30", + "max_retries": "6", + "retry_on_error": "true", + "timeout": "60", + "X-Taskiq-requeue": "4", + "custom_meta": "custom_value", + } + + mock_span = mock.Mock() + mock_span.is_recording.return_value = False + opentelemetry_middleware.set_attributes_from_context(mock_span, context) + self.assertFalse(mock_span.is_recording()) + self.assertTrue(mock_span.is_recording.called) + self.assertFalse(mock_span.set_attribute.called) + self.assertFalse(mock_span.set_status.called) + + def test_set_attributes_from_context_empty_keys(self) -> None: + # it should not extract empty keys + context = { + "retries": 0, + } + + span = trace._Span("name", mock.Mock(spec=trace_api.SpanContext)) + opentelemetry_middleware.set_attributes_from_context(span, context) + + self.assertEqual(len(span.attributes), 0) + + def test_span_propagation(self) -> None: + # propagate and retrieve a Span + message = mock.Mock( + task_id="7c6731af-9533-40c3-83a9-25b58f0d837f", + spec=TaskiqMessage, + ) + span = trace._Span("name", mock.Mock(spec=trace_api.SpanContext)) + opentelemetry_middleware.attach_context(message, span, mock.Mock(), "") + ctx = opentelemetry_middleware.retrieve_context(message) + self.assertIsNotNone(ctx) + span_after, _, _ = ctx # type: ignore[misc] + self.assertIs(span, span_after) + + def test_span_delete(self) -> None: + # propagate a Span + message = mock.Mock( + task_id="7c6731af-9533-40c3-83a9-25b58f0d837f", + spec=TaskiqMessage, + ) + span = trace._Span("name", mock.Mock(spec=trace_api.SpanContext)) + opentelemetry_middleware.attach_context(message, span, mock.Mock(), "") + # delete the Span + opentelemetry_middleware.detach_context(message) + self.assertEqual(opentelemetry_middleware.retrieve_context(message), None) + + def test_optional_message_span_attach(self) -> None: + span = trace._Span("name", mock.Mock(spec=trace_api.SpanContext)) + + # assert this is is a no-aop + self.assertIsNone( + opentelemetry_middleware.attach_context(None, span, mock.Mock(), ""), # type: ignore[func-returns-value] + ) + + def test_span_delete_empty(self) -> None: + # delete the Span + message = mock.Mock( + task_id="7c6731af-9533-40c3-83a9-25b58f0d837f", + spec=TaskiqMessage, + ) + try: + opentelemetry_middleware.detach_context(message) + self.assertEqual(opentelemetry_middleware.retrieve_context(message), None) + except Exception as ex: # pylint: disable=broad-except + self.fail(f"Exception was raised: {ex}") diff --git a/tests/opentelemetry/test_tasks.py b/tests/opentelemetry/test_tasks.py new file mode 100644 index 0000000..d9524e7 --- /dev/null +++ b/tests/opentelemetry/test_tasks.py @@ -0,0 +1,194 @@ +import asyncio +from contextlib import AbstractContextManager +from typing import Any, Callable, Optional, Tuple + +from opentelemetry import baggage, context +from opentelemetry.instrumentation.utils import unwrap +from opentelemetry.semconv.trace import SpanAttributes +from opentelemetry.test.test_base import TestBase +from opentelemetry.trace import Span, SpanKind, StatusCode +from wrapt import wrap_function_wrapper + +from taskiq.instrumentation import TaskiqInstrumentor +from taskiq.middlewares import opentelemetry_middleware + +from .taskiq_test_tasks import ( + broker, + task_add, + task_raises, + task_returns_baggage, +) + + +class TestTaskiqInstrumentation(TestBase): + def tearDown(self) -> None: + super().tearDown() + TaskiqInstrumentor().uninstrument_broker(broker) + + async def test_task(self) -> None: + TaskiqInstrumentor().instrument_broker(broker) + + await task_add.kiq(1, 2) + await broker.wait_all() + + spans = self.sorted_spans(self.memory_exporter.get_finished_spans()) + self.assertEqual(len(spans), 2) + + consumer, producer = spans + + self.assertEqual( + consumer.name, + "execute/tests.opentelemetry.taskiq_test_tasks:task_add", + f"{consumer._end_time}:{producer._end_time}", + ) + self.assertEqual(consumer.kind, SpanKind.CONSUMER) + self.assertSpanHasAttributes( + consumer, + { + "taskiq.action": "execute", + "taskiq.task_name": "tests.opentelemetry.taskiq_test_tasks:task_add", + }, + ) + + self.assertEqual(consumer.status.status_code, StatusCode.UNSET) + + self.assertEqual(0, len(consumer.events)) + + self.assertEqual( + producer.name, + "send/tests.opentelemetry.taskiq_test_tasks:task_add", + ) + self.assertEqual(producer.kind, SpanKind.PRODUCER) + self.assertSpanHasAttributes( + producer, + { + "taskiq.action": "send", + "taskiq.task_name": "tests.opentelemetry.taskiq_test_tasks:task_add", + }, + ) + + self.assertNotEqual(consumer.parent, producer.context) + self.assertEqual(consumer.parent.span_id, producer.context.span_id) + self.assertEqual(consumer.context.trace_id, producer.context.trace_id) + + async def test_task_raises(self) -> None: + TaskiqInstrumentor().instrument_broker(broker) + + await task_raises.kiq() + await broker.wait_all() + + spans = self.sorted_spans(self.memory_exporter.get_finished_spans()) + self.assertEqual(len(spans), 2) + + consumer, producer = spans + + self.assertEqual( + consumer.name, + "execute/tests.opentelemetry.taskiq_test_tasks:task_raises", + ) + self.assertEqual(consumer.kind, SpanKind.CONSUMER) + self.assertSpanHasAttributes( + consumer, + { + "taskiq.action": "execute", + "taskiq.task_name": "tests.opentelemetry.taskiq_test_tasks:task_raises", + }, + ) + + self.assertEqual(consumer.status.status_code, StatusCode.ERROR) + + self.assertEqual(1, len(consumer.events)) + event = consumer.events[0] + + self.assertIn(SpanAttributes.EXCEPTION_STACKTRACE, event.attributes) + + self.assertEqual( + event.attributes[SpanAttributes.EXCEPTION_TYPE], + "tests.opentelemetry.taskiq_test_tasks.CustomError", + ) + + self.assertEqual( + event.attributes[SpanAttributes.EXCEPTION_MESSAGE], + "The task failed!", + ) + + self.assertEqual( + producer.name, + "send/tests.opentelemetry.taskiq_test_tasks:task_raises", + ) + self.assertEqual(producer.kind, SpanKind.PRODUCER) + self.assertSpanHasAttributes( + producer, + { + "taskiq.action": "send", + "taskiq.task_name": "tests.opentelemetry.taskiq_test_tasks:task_raises", + }, + ) + + self.assertNotEqual(consumer.parent, producer.context) + self.assertEqual(consumer.parent.span_id, producer.context.span_id) + self.assertEqual(consumer.context.trace_id, producer.context.trace_id) + + async def test_uninstrument(self) -> None: + TaskiqInstrumentor().instrument_broker(broker) + TaskiqInstrumentor().uninstrument_broker(broker) + + async def test() -> None: + await task_add.kiq(1, 2) + await broker.wait_all() + + asyncio.run(test()) + + spans = self.memory_exporter.get_finished_spans() + self.assertEqual(len(spans), 0) + + async def test_baggage(self) -> None: + TaskiqInstrumentor().instrument_broker(broker) + + ctx = baggage.set_baggage("key", "value") + context.attach(ctx) + + task = await task_returns_baggage.kiq() + result = await task.wait_result(timeout=2) + + self.assertEqual(result.return_value, {"key": "value"}) + + async def test_task_not_instrumented_does_not_raise(self) -> None: + def _retrieve_context_wrapper_none_token( + wrapped: Callable[ + [Any], + Optional[ + Tuple[ + Span, + AbstractContextManager[Span], + Optional[object], + ] + ], + ], + instance: Any, + args: Any, + kwargs: Any, + ) -> Optional[Tuple[Span, AbstractContextManager[Span], None]]: + ctx = wrapped(*args, **kwargs) + if ctx is None: + return ctx + span, activation, _ = ctx + return span, activation, None + + wrap_function_wrapper( + opentelemetry_middleware, + "retrieve_context", + _retrieve_context_wrapper_none_token, + ) + + TaskiqInstrumentor().instrument_broker(broker) + + task = await task_add.kiq(1, 2) + result = await task.wait_result(timeout=2) + + spans = self.sorted_spans(self.memory_exporter.get_finished_spans()) + self.assertEqual(len(spans), 2) + + self.assertTrue(result.return_value) + + unwrap(opentelemetry_middleware, "retrieve_context")