diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..f39751c --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,14 @@ +exclude: "^docs/conf.py|^docs/build/" + +repos: + - repo: https://github.com/pycqa/isort + rev: 5.13.2 + hooks: + - id: isort + name: isort (python) + + - repo: https://github.com/psf/black + rev: 24.8.0 + hooks: + - id: black + language_version: python3 diff --git a/.vscode/launch.json b/.vscode/launch.json index 97519f8..587e986 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -13,7 +13,7 @@ "name": "Launch_kit", "type": "debugpy", "request": "launch", - "program": "implementing_kit.py", + "program": "components/frontend/main.py", "console": "integratedTerminal", "justMyCode": false, } diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..23b8ea3 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,17 @@ +# Use an official Python runtime as a parent image +FROM python:3.12-alpine + +# Set the working directory in the container +WORKDIR /kit + +# Copy only the necessary files to install dependencies +COPY setup.py setup.cfg ./ + +# Install dependencies +RUN pip install --no-cache-dir . + +# Copy the rest of the application code +COPY . . + +# Install the local package +RUN pip install --no-cache-dir . diff --git a/Pipfile b/Pipfile index 7b1bb4a..59a54e2 100644 --- a/Pipfile +++ b/Pipfile @@ -4,9 +4,10 @@ verify_ssl = true name = "pypi" [packages] -kubefox = {file = ".", editable = true, path = "."} +kubefox-sdk = {file = ".", editable = true, path = "."} [dev-packages] +kubefox-sdk = {file = ".", editable = true, path = ".", extras = ["dev"]} [requires] python_version = "3.12" diff --git a/Pipfile.lock b/Pipfile.lock index be1cb0f..3033b37 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "f5ea3c72ada26aaa5ba85cc31040f6d5e5613b42dedaa42b90a3428f565a85df" + "sha256": "94bf157c12b3acfec913ff367eb60e6c4b7c5774963d00f15faca0b6246938e5" }, "pipfile-spec": 6, "requires": { @@ -24,69 +24,102 @@ "markers": "python_version >= '3.7' and python_version < '4.0'", "version": "==0.6.7" }, + "deprecated": { + "hashes": [ + "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c", + "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.2.14" + }, + "googleapis-common-protos": { + "hashes": [ + "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63", + "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0" + ], + "markers": "python_version >= '3.7'", + "version": "==1.65.0" + }, "grpcio": { "hashes": [ - "sha256:03b43d0ccf99c557ec671c7dede64f023c7da9bb632ac65dbc57f166e4970040", - "sha256:0a12ddb1678ebc6a84ec6b0487feac020ee2b1659cbe69b80f06dbffdb249122", - "sha256:0a2813093ddb27418a4c99f9b1c223fab0b053157176a64cc9db0f4557b69bd9", - "sha256:0cc79c982ccb2feec8aad0e8fb0d168bcbca85bc77b080d0d3c5f2f15c24ea8f", - "sha256:1257b76748612aca0f89beec7fa0615727fd6f2a1ad580a9638816a4b2eb18fd", - "sha256:1262402af5a511c245c3ae918167eca57342c72320dffae5d9b51840c4b2f86d", - "sha256:19264fc964576ddb065368cae953f8d0514ecc6cb3da8903766d9fb9d4554c33", - "sha256:198908f9b22e2672a998870355e226a725aeab327ac4e6ff3a1399792ece4762", - "sha256:1de403fc1305fd96cfa75e83be3dee8538f2413a6b1685b8452301c7ba33c294", - "sha256:20405cb8b13fd779135df23fabadc53b86522d0f1cba8cca0e87968587f50650", - "sha256:2981c7365a9353f9b5c864595c510c983251b1ab403e05b1ccc70a3d9541a73b", - "sha256:2c3c1b90ab93fed424e454e93c0ed0b9d552bdf1b0929712b094f5ecfe7a23ad", - "sha256:39b9d0acaa8d835a6566c640f48b50054f422d03e77e49716d4c4e8e279665a1", - "sha256:3b64ae304c175671efdaa7ec9ae2cc36996b681eb63ca39c464958396697daff", - "sha256:4657d24c8063e6095f850b68f2d1ba3b39f2b287a38242dcabc166453e950c59", - "sha256:4d6dab6124225496010bd22690f2d9bd35c7cbb267b3f14e7a3eb05c911325d4", - "sha256:55260032b95c49bee69a423c2f5365baa9369d2f7d233e933564d8a47b893027", - "sha256:55697ecec192bc3f2f3cc13a295ab670f51de29884ca9ae6cd6247df55df2502", - "sha256:5841dd1f284bd1b3d8a6eca3a7f062b06f1eec09b184397e1d1d43447e89a7ae", - "sha256:58b1041e7c870bb30ee41d3090cbd6f0851f30ae4eb68228955d973d3efa2e61", - "sha256:5e42634a989c3aa6049f132266faf6b949ec2a6f7d302dbb5c15395b77d757eb", - "sha256:5e56462b05a6f860b72f0fa50dca06d5b26543a4e88d0396259a07dc30f4e5aa", - "sha256:5f8b75f64d5d324c565b263c67dbe4f0af595635bbdd93bb1a88189fc62ed2e5", - "sha256:62b4e6eb7bf901719fce0ca83e3ed474ae5022bb3827b0a501e056458c51c0a1", - "sha256:6503b64c8b2dfad299749cad1b595c650c91e5b2c8a1b775380fcf8d2cbba1e9", - "sha256:6c024ffc22d6dc59000faf8ad781696d81e8e38f4078cb0f2630b4a3cf231a90", - "sha256:73819689c169417a4f978e562d24f2def2be75739c4bed1992435d007819da1b", - "sha256:75dbbf415026d2862192fe1b28d71f209e2fd87079d98470db90bebe57b33179", - "sha256:8caee47e970b92b3dd948371230fcceb80d3f2277b3bf7fbd7c0564e7d39068e", - "sha256:8d51dd1c59d5fa0f34266b80a3805ec29a1f26425c2a54736133f6d87fc4968a", - "sha256:940e3ec884520155f68a3b712d045e077d61c520a195d1a5932c531f11883489", - "sha256:a011ac6c03cfe162ff2b727bcb530567826cec85eb8d4ad2bfb4bd023287a52d", - "sha256:a3a035c37ce7565b8f4f35ff683a4db34d24e53dc487e47438e434eb3f701b2a", - "sha256:a5e771d0252e871ce194d0fdcafd13971f1aae0ddacc5f25615030d5df55c3a2", - "sha256:ac15b6c2c80a4d1338b04d42a02d376a53395ddf0ec9ab157cbaf44191f3ffdd", - "sha256:b1a82e0b9b3022799c336e1fc0f6210adc019ae84efb7321d668129d28ee1efb", - "sha256:bac71b4b28bc9af61efcdc7630b166440bbfbaa80940c9a697271b5e1dabbc61", - "sha256:bbc5b1d78a7822b0a84c6f8917faa986c1a744e65d762ef6d8be9d75677af2ca", - "sha256:c1a786ac592b47573a5bb7e35665c08064a5d77ab88a076eec11f8ae86b3e3f6", - "sha256:c84ad903d0d94311a2b7eea608da163dace97c5fe9412ea311e72c3684925602", - "sha256:d4d29cc612e1332237877dfa7fe687157973aab1d63bd0f84cf06692f04c0367", - "sha256:e3d9f8d1221baa0ced7ec7322a981e28deb23749c76eeeb3d33e18b72935ab62", - "sha256:e7cd5c1325f6808b8ae31657d281aadb2a51ac11ab081ae335f4f7fc44c1721d", - "sha256:ed6091fa0adcc7e4ff944090cf203a52da35c37a130efa564ded02b7aff63bcd", - "sha256:ee73a2f5ca4ba44fa33b4d7d2c71e2c8a9e9f78d53f6507ad68e7d2ad5f64a22", - "sha256:f10193c69fc9d3d726e83bbf0f3d316f1847c3071c8c93d8090cf5f326b14309" - ], - "markers": "python_version >= '3.8'", - "version": "==1.64.1" - }, - "kubefox": { + "sha256:02697eb4a5cbe5a9639f57323b4c37bcb3ab2d48cec5da3dc2f13334d72790dd", + "sha256:03b0b307ba26fae695e067b94cbb014e27390f8bc5ac7a3a39b7723fed085604", + "sha256:05bc2ceadc2529ab0b227b1310d249d95d9001cd106aa4d31e8871ad3c428d73", + "sha256:06de8ec0bd71be123eec15b0e0d457474931c2c407869b6c349bd9bed4adbac3", + "sha256:0be4e0490c28da5377283861bed2941d1d20ec017ca397a5df4394d1c31a9b50", + "sha256:12fda97ffae55e6526825daf25ad0fa37483685952b5d0f910d6405c87e3adb6", + "sha256:1caa38fb22a8578ab8393da99d4b8641e3a80abc8fd52646f1ecc92bcb8dee34", + "sha256:2018b053aa15782db2541ca01a7edb56a0bf18c77efed975392583725974b249", + "sha256:20657d6b8cfed7db5e11b62ff7dfe2e12064ea78e93f1434d61888834bc86d75", + "sha256:2335c58560a9e92ac58ff2bc5649952f9b37d0735608242973c7a8b94a6437d8", + "sha256:31fd163105464797a72d901a06472860845ac157389e10f12631025b3e4d0453", + "sha256:38b68498ff579a3b1ee8f93a05eb48dc2595795f2f62716e797dc24774c1aaa8", + "sha256:3b00efc473b20d8bf83e0e1ae661b98951ca56111feb9b9611df8efc4fe5d55d", + "sha256:3ed71e81782966ffead60268bbda31ea3f725ebf8aa73634d5dda44f2cf3fb9c", + "sha256:45a3d462826f4868b442a6b8fdbe8b87b45eb4f5b5308168c156b21eca43f61c", + "sha256:49f0ca7ae850f59f828a723a9064cadbed90f1ece179d375966546499b8a2c9c", + "sha256:4e504572433f4e72b12394977679161d495c4c9581ba34a88d843eaf0f2fbd39", + "sha256:4ea1d062c9230278793820146c95d038dc0f468cbdd172eec3363e42ff1c7d01", + "sha256:563588c587b75c34b928bc428548e5b00ea38c46972181a4d8b75ba7e3f24231", + "sha256:6001e575b8bbd89eee11960bb640b6da6ae110cf08113a075f1e2051cc596cae", + "sha256:66a0cd8ba6512b401d7ed46bb03f4ee455839957f28b8d61e7708056a806ba6a", + "sha256:6851de821249340bdb100df5eacfecfc4e6075fa85c6df7ee0eb213170ec8e5d", + "sha256:728bdf36a186e7f51da73be7f8d09457a03061be848718d0edf000e709418987", + "sha256:73e3b425c1e155730273f73e419de3074aa5c5e936771ee0e4af0814631fb30a", + "sha256:73fc8f8b9b5c4a03e802b3cd0c18b2b06b410d3c1dcbef989fdeb943bd44aff7", + "sha256:78fa51ebc2d9242c0fc5db0feecc57a9943303b46664ad89921f5079e2e4ada7", + "sha256:7b2c86457145ce14c38e5bf6bdc19ef88e66c5fee2c3d83285c5aef026ba93b3", + "sha256:7d69ce1f324dc2d71e40c9261d3fdbe7d4c9d60f332069ff9b2a4d8a257c7b2b", + "sha256:802d84fd3d50614170649853d121baaaa305de7b65b3e01759247e768d691ddf", + "sha256:80fd702ba7e432994df208f27514280b4b5c6843e12a48759c9255679ad38db8", + "sha256:8ac475e8da31484efa25abb774674d837b343afb78bb3bcdef10f81a93e3d6bf", + "sha256:950da58d7d80abd0ea68757769c9db0a95b31163e53e5bb60438d263f4bed7b7", + "sha256:99a641995a6bc4287a6315989ee591ff58507aa1cbe4c2e70d88411c4dcc0839", + "sha256:9c3a99c519f4638e700e9e3f83952e27e2ea10873eecd7935823dab0c1c9250e", + "sha256:9c509a4f78114cbc5f0740eb3d7a74985fd2eff022971bc9bc31f8bc93e66a3b", + "sha256:a18e20d8321c6400185b4263e27982488cb5cdd62da69147087a76a24ef4e7e3", + "sha256:a917d26e0fe980b0ac7bfcc1a3c4ad6a9a4612c911d33efb55ed7833c749b0ee", + "sha256:a9539f01cb04950fd4b5ab458e64a15f84c2acc273670072abe49a3f29bbad54", + "sha256:ad2efdbe90c73b0434cbe64ed372e12414ad03c06262279b104a029d1889d13e", + "sha256:b672abf90a964bfde2d0ecbce30f2329a47498ba75ce6f4da35a2f4532b7acbc", + "sha256:bbd27c24a4cc5e195a7f56cfd9312e366d5d61b86e36d46bbe538457ea6eb8dd", + "sha256:c400ba5675b67025c8a9f48aa846f12a39cf0c44df5cd060e23fda5b30e9359d", + "sha256:c408f5ef75cfffa113cacd8b0c0e3611cbfd47701ca3cdc090594109b9fcbaed", + "sha256:c806852deaedee9ce8280fe98955c9103f62912a5b2d5ee7e3eaa284a6d8d8e7", + "sha256:ce89f5876662f146d4c1f695dda29d4433a5d01c8681fbd2539afff535da14d4", + "sha256:d25a14af966438cddf498b2e338f88d1c9706f3493b1d73b93f695c99c5f0e2a", + "sha256:d8d4732cc5052e92cea2f78b233c2e2a52998ac40cd651f40e398893ad0d06ec", + "sha256:d9a9724a156c8ec6a379869b23ba3323b7ea3600851c91489b871e375f710bc8", + "sha256:e636ce23273683b00410f1971d209bf3689238cf5538d960adc3cdfe80dd0dbd", + "sha256:e88264caad6d8d00e7913996030bac8ad5f26b7411495848cc218bd3a9040b6c", + "sha256:f145cc21836c332c67baa6fc81099d1d27e266401565bf481948010d6ea32d46", + "sha256:fb57870449dfcfac428afbb5a877829fcb0d6db9d9baa1148705739e9083880e", + "sha256:fb70487c95786e345af5e854ffec8cb8cc781bcc5df7930c4fbb7feaa72e1cdf", + "sha256:fe96281713168a3270878255983d2cb1a97e034325c8c2c25169a69289d3ecfa", + "sha256:ff1f7882e56c40b0d33c4922c15dfa30612f05fb785074a012f7cda74d1c3679" + ], + "markers": "python_version >= '3.8'", + "version": "==1.66.2" + }, + "importlib-metadata": { + "hashes": [ + "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1", + "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5" + ], + "markers": "python_version >= '3.8'", + "version": "==8.4.0" + }, + "kubefox-sdk": { "editable": true, "path": "." }, "marshmallow": { "hashes": [ - "sha256:4f57c5e050a54d66361e826f94fba213eb10b67b2fdb02c3e0343ce207ba1662", - "sha256:86ce7fb914aa865001a4b2092c4c2872d13bc347f3d42673272cabfdbad386f1" + "sha256:4972f529104a220bb8637d595aa4c9762afbe7f7a77d82dc58c1615d70c5823e", + "sha256:71a2dce49ef901c3f97ed296ae5051135fd3febd2bf43afe0ae9a82143a494d9" ], "markers": "python_version >= '3.8'", - "version": "==3.21.3" + "version": "==3.22.0" }, "mypy-extensions": { "hashes": [ @@ -96,13 +129,53 @@ "markers": "python_version >= '3.5'", "version": "==1.0.0" }, + "opentelemetry-api": { + "hashes": [ + "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7", + "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342" + ], + "markers": "python_version >= '3.8'", + "version": "==1.27.0" + }, + "opentelemetry-exporter-otlp-proto-common": { + "hashes": [ + "sha256:159d27cf49f359e3798c4c3eb8da6ef4020e292571bd8c5604a2a573231dd5c8", + "sha256:675db7fffcb60946f3a5c43e17d1168a3307a94a930ecf8d2ea1f286f3d4f79a" + ], + "markers": "python_version >= '3.8'", + "version": "==1.27.0" + }, + "opentelemetry-exporter-otlp-proto-grpc": { + "hashes": [ + "sha256:56b5bbd5d61aab05e300d9d62a6b3c134827bbd28d0b12f2649c2da368006c9e", + "sha256:af6f72f76bcf425dfb5ad11c1a6d6eca2863b91e63575f89bb7b4b55099d968f" + ], + "markers": "python_version >= '3.8'", + "version": "==1.27.0" + }, "opentelemetry-proto": { "hashes": [ - "sha256:35b6ef9dc4a9f7853ecc5006738ad40443701e52c26099e197895cbda8b815a3", - "sha256:f07e3341c78d835d9b86665903b199893befa5e98866f63d22b00d0b7ca4972f" + "sha256:33c9345d91dafd8a74fc3d7576c5a38f18b7fdf8d02983ac67485386132aedd6", + "sha256:b133873de5581a50063e1e4b29cdcf0c5e253a8c2d8dc1229add20a4c3830ace" + ], + "markers": "python_version >= '3.8'", + "version": "==1.27.0" + }, + "opentelemetry-sdk": { + "hashes": [ + "sha256:365f5e32f920faf0fd9e14fdfd92c086e317eaa5f860edba9cdc17a380d9197d", + "sha256:d525017dea0ccce9ba4e0245100ec46ecdc043f2d7b8315d56b19aff0904fa6f" ], "markers": "python_version >= '3.8'", - "version": "==1.25.0" + "version": "==1.27.0" + }, + "opentelemetry-semantic-conventions": { + "hashes": [ + "sha256:12d74983783b6878162208be57c9effcb89dc88691c64992d70bb89dc00daa1a", + "sha256:a0de9f45c413a8669788a38569c7e0a11ce6ce97861a628cca785deecdc32a1f" + ], + "markers": "python_version >= '3.8'", + "version": "==0.48b0" }, "packaging": { "hashes": [ @@ -114,20 +187,20 @@ }, "protobuf": { "hashes": [ - "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4", - "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8", - "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c", - "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d", - "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4", - "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa", - "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c", - "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019", - "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9", - "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c", - "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2" + "sha256:0aebecb809cae990f8129ada5ca273d9d670b76d9bfc9b1809f0a9c02b7dbf41", + "sha256:4be0571adcbe712b282a330c6e89eae24281344429ae95c6d85e79e84780f5ea", + "sha256:5e61fd921603f58d2f5acb2806a929b4675f8874ff5f330b7d6f7e2e784bbcd8", + "sha256:7a183f592dc80aa7c8da7ad9e55091c4ffc9497b3054452d629bb85fa27c2a45", + "sha256:7f8249476b4a9473645db7f8ab42b02fe1488cbe5fb72fddd445e0665afd8584", + "sha256:919ad92d9b0310070f8356c24b855c98df2b8bd207ebc1c0c6fcc9ab1e007f3d", + "sha256:98d8d8aa50de6a2747efd9cceba361c9034050ecce3e09136f90de37ddba66e1", + "sha256:abe32aad8561aa7cc94fc7ba4fdef646e576983edb94a73381b03c53728a626f", + "sha256:b0234dd5a03049e4ddd94b93400b67803c823cfc405689688f59b34e0742381a", + "sha256:b2fde3d805354df675ea4c7c6338c1aecd254dfc9925e88c6d31a2bcb97eb173", + "sha256:fe14e16c22be926d3abfcb500e60cab068baf10b542b8c858fa27e098123e331" ], "markers": "python_version >= '3.8'", - "version": "==4.25.3" + "version": "==4.25.5" }, "typing-extensions": { "hashes": [ @@ -143,7 +216,548 @@ "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78" ], "version": "==0.9.0" + }, + "wrapt": { + "hashes": [ + "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc", + "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81", + "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09", + "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e", + "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca", + "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0", + "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb", + "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487", + "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40", + "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c", + "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060", + "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202", + "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41", + "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9", + "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b", + "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664", + "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d", + "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362", + "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00", + "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc", + "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1", + "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267", + "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956", + "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966", + "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1", + "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228", + "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72", + "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d", + "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292", + "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0", + "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0", + "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36", + "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c", + "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5", + "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f", + "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73", + "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b", + "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2", + "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593", + "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39", + "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389", + "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf", + "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf", + "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89", + "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c", + "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c", + "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f", + "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440", + "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465", + "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136", + "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b", + "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8", + "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3", + "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8", + "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6", + "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e", + "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f", + "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c", + "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e", + "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8", + "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2", + "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020", + "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35", + "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d", + "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3", + "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537", + "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809", + "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d", + "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a", + "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4" + ], + "markers": "python_version >= '3.6'", + "version": "==1.16.0" + }, + "zipp": { + "hashes": [ + "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350", + "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29" + ], + "markers": "python_version >= '3.8'", + "version": "==3.20.2" } }, - "develop": {} + "develop": { + "black": { + "hashes": [ + "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6", + "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e", + "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f", + "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018", + "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e", + "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd", + "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4", + "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed", + "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2", + "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42", + "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af", + "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb", + "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368", + "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb", + "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af", + "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed", + "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47", + "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2", + "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a", + "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c", + "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920", + "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1" + ], + "version": "==24.8.0" + }, + "cfgv": { + "hashes": [ + "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", + "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560" + ], + "markers": "python_version >= '3.8'", + "version": "==3.4.0" + }, + "click": { + "hashes": [ + "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", + "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de" + ], + "markers": "python_version >= '3.7'", + "version": "==8.1.7" + }, + "dataclasses-json": { + "hashes": [ + "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a", + "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0" + ], + "markers": "python_version >= '3.7' and python_version < '4.0'", + "version": "==0.6.7" + }, + "deprecated": { + "hashes": [ + "sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c", + "sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.2.14" + }, + "distlib": { + "hashes": [ + "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784", + "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64" + ], + "version": "==0.3.8" + }, + "filelock": { + "hashes": [ + "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", + "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435" + ], + "markers": "python_version >= '3.8'", + "version": "==3.16.1" + }, + "googleapis-common-protos": { + "hashes": [ + "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63", + "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0" + ], + "markers": "python_version >= '3.7'", + "version": "==1.65.0" + }, + "grpcio": { + "hashes": [ + "sha256:02697eb4a5cbe5a9639f57323b4c37bcb3ab2d48cec5da3dc2f13334d72790dd", + "sha256:03b0b307ba26fae695e067b94cbb014e27390f8bc5ac7a3a39b7723fed085604", + "sha256:05bc2ceadc2529ab0b227b1310d249d95d9001cd106aa4d31e8871ad3c428d73", + "sha256:06de8ec0bd71be123eec15b0e0d457474931c2c407869b6c349bd9bed4adbac3", + "sha256:0be4e0490c28da5377283861bed2941d1d20ec017ca397a5df4394d1c31a9b50", + "sha256:12fda97ffae55e6526825daf25ad0fa37483685952b5d0f910d6405c87e3adb6", + "sha256:1caa38fb22a8578ab8393da99d4b8641e3a80abc8fd52646f1ecc92bcb8dee34", + "sha256:2018b053aa15782db2541ca01a7edb56a0bf18c77efed975392583725974b249", + "sha256:20657d6b8cfed7db5e11b62ff7dfe2e12064ea78e93f1434d61888834bc86d75", + "sha256:2335c58560a9e92ac58ff2bc5649952f9b37d0735608242973c7a8b94a6437d8", + "sha256:31fd163105464797a72d901a06472860845ac157389e10f12631025b3e4d0453", + "sha256:38b68498ff579a3b1ee8f93a05eb48dc2595795f2f62716e797dc24774c1aaa8", + "sha256:3b00efc473b20d8bf83e0e1ae661b98951ca56111feb9b9611df8efc4fe5d55d", + "sha256:3ed71e81782966ffead60268bbda31ea3f725ebf8aa73634d5dda44f2cf3fb9c", + "sha256:45a3d462826f4868b442a6b8fdbe8b87b45eb4f5b5308168c156b21eca43f61c", + "sha256:49f0ca7ae850f59f828a723a9064cadbed90f1ece179d375966546499b8a2c9c", + "sha256:4e504572433f4e72b12394977679161d495c4c9581ba34a88d843eaf0f2fbd39", + "sha256:4ea1d062c9230278793820146c95d038dc0f468cbdd172eec3363e42ff1c7d01", + "sha256:563588c587b75c34b928bc428548e5b00ea38c46972181a4d8b75ba7e3f24231", + "sha256:6001e575b8bbd89eee11960bb640b6da6ae110cf08113a075f1e2051cc596cae", + "sha256:66a0cd8ba6512b401d7ed46bb03f4ee455839957f28b8d61e7708056a806ba6a", + "sha256:6851de821249340bdb100df5eacfecfc4e6075fa85c6df7ee0eb213170ec8e5d", + "sha256:728bdf36a186e7f51da73be7f8d09457a03061be848718d0edf000e709418987", + "sha256:73e3b425c1e155730273f73e419de3074aa5c5e936771ee0e4af0814631fb30a", + "sha256:73fc8f8b9b5c4a03e802b3cd0c18b2b06b410d3c1dcbef989fdeb943bd44aff7", + "sha256:78fa51ebc2d9242c0fc5db0feecc57a9943303b46664ad89921f5079e2e4ada7", + "sha256:7b2c86457145ce14c38e5bf6bdc19ef88e66c5fee2c3d83285c5aef026ba93b3", + "sha256:7d69ce1f324dc2d71e40c9261d3fdbe7d4c9d60f332069ff9b2a4d8a257c7b2b", + "sha256:802d84fd3d50614170649853d121baaaa305de7b65b3e01759247e768d691ddf", + "sha256:80fd702ba7e432994df208f27514280b4b5c6843e12a48759c9255679ad38db8", + "sha256:8ac475e8da31484efa25abb774674d837b343afb78bb3bcdef10f81a93e3d6bf", + "sha256:950da58d7d80abd0ea68757769c9db0a95b31163e53e5bb60438d263f4bed7b7", + "sha256:99a641995a6bc4287a6315989ee591ff58507aa1cbe4c2e70d88411c4dcc0839", + "sha256:9c3a99c519f4638e700e9e3f83952e27e2ea10873eecd7935823dab0c1c9250e", + "sha256:9c509a4f78114cbc5f0740eb3d7a74985fd2eff022971bc9bc31f8bc93e66a3b", + "sha256:a18e20d8321c6400185b4263e27982488cb5cdd62da69147087a76a24ef4e7e3", + "sha256:a917d26e0fe980b0ac7bfcc1a3c4ad6a9a4612c911d33efb55ed7833c749b0ee", + "sha256:a9539f01cb04950fd4b5ab458e64a15f84c2acc273670072abe49a3f29bbad54", + "sha256:ad2efdbe90c73b0434cbe64ed372e12414ad03c06262279b104a029d1889d13e", + "sha256:b672abf90a964bfde2d0ecbce30f2329a47498ba75ce6f4da35a2f4532b7acbc", + "sha256:bbd27c24a4cc5e195a7f56cfd9312e366d5d61b86e36d46bbe538457ea6eb8dd", + "sha256:c400ba5675b67025c8a9f48aa846f12a39cf0c44df5cd060e23fda5b30e9359d", + "sha256:c408f5ef75cfffa113cacd8b0c0e3611cbfd47701ca3cdc090594109b9fcbaed", + "sha256:c806852deaedee9ce8280fe98955c9103f62912a5b2d5ee7e3eaa284a6d8d8e7", + "sha256:ce89f5876662f146d4c1f695dda29d4433a5d01c8681fbd2539afff535da14d4", + "sha256:d25a14af966438cddf498b2e338f88d1c9706f3493b1d73b93f695c99c5f0e2a", + "sha256:d8d4732cc5052e92cea2f78b233c2e2a52998ac40cd651f40e398893ad0d06ec", + "sha256:d9a9724a156c8ec6a379869b23ba3323b7ea3600851c91489b871e375f710bc8", + "sha256:e636ce23273683b00410f1971d209bf3689238cf5538d960adc3cdfe80dd0dbd", + "sha256:e88264caad6d8d00e7913996030bac8ad5f26b7411495848cc218bd3a9040b6c", + "sha256:f145cc21836c332c67baa6fc81099d1d27e266401565bf481948010d6ea32d46", + "sha256:fb57870449dfcfac428afbb5a877829fcb0d6db9d9baa1148705739e9083880e", + "sha256:fb70487c95786e345af5e854ffec8cb8cc781bcc5df7930c4fbb7feaa72e1cdf", + "sha256:fe96281713168a3270878255983d2cb1a97e034325c8c2c25169a69289d3ecfa", + "sha256:ff1f7882e56c40b0d33c4922c15dfa30612f05fb785074a012f7cda74d1c3679" + ], + "markers": "python_version >= '3.8'", + "version": "==1.66.2" + }, + "identify": { + "hashes": [ + "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0", + "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98" + ], + "markers": "python_version >= '3.8'", + "version": "==2.6.1" + }, + "importlib-metadata": { + "hashes": [ + "sha256:66f342cc6ac9818fc6ff340576acd24d65ba0b3efabb2b4ac08b598965a4a2f1", + "sha256:9a547d3bc3608b025f93d403fdd1aae741c24fbb8314df4b155675742ce303c5" + ], + "markers": "python_version >= '3.8'", + "version": "==8.4.0" + }, + "isort": { + "hashes": [ + "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109", + "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6" + ], + "version": "==5.13.2" + }, + "kubefox-sdk": { + "editable": true, + "path": "." + }, + "marshmallow": { + "hashes": [ + "sha256:4972f529104a220bb8637d595aa4c9762afbe7f7a77d82dc58c1615d70c5823e", + "sha256:71a2dce49ef901c3f97ed296ae5051135fd3febd2bf43afe0ae9a82143a494d9" + ], + "markers": "python_version >= '3.8'", + "version": "==3.22.0" + }, + "mypy-extensions": { + "hashes": [ + "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", + "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782" + ], + "markers": "python_version >= '3.5'", + "version": "==1.0.0" + }, + "nodeenv": { + "hashes": [ + "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", + "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6'", + "version": "==1.9.1" + }, + "opentelemetry-api": { + "hashes": [ + "sha256:953d5871815e7c30c81b56d910c707588000fff7a3ca1c73e6531911d53065e7", + "sha256:ed673583eaa5f81b5ce5e86ef7cdaf622f88ef65f0b9aab40b843dcae5bef342" + ], + "markers": "python_version >= '3.8'", + "version": "==1.27.0" + }, + "opentelemetry-exporter-otlp-proto-common": { + "hashes": [ + "sha256:159d27cf49f359e3798c4c3eb8da6ef4020e292571bd8c5604a2a573231dd5c8", + "sha256:675db7fffcb60946f3a5c43e17d1168a3307a94a930ecf8d2ea1f286f3d4f79a" + ], + "markers": "python_version >= '3.8'", + "version": "==1.27.0" + }, + "opentelemetry-exporter-otlp-proto-grpc": { + "hashes": [ + "sha256:56b5bbd5d61aab05e300d9d62a6b3c134827bbd28d0b12f2649c2da368006c9e", + "sha256:af6f72f76bcf425dfb5ad11c1a6d6eca2863b91e63575f89bb7b4b55099d968f" + ], + "markers": "python_version >= '3.8'", + "version": "==1.27.0" + }, + "opentelemetry-proto": { + "hashes": [ + "sha256:33c9345d91dafd8a74fc3d7576c5a38f18b7fdf8d02983ac67485386132aedd6", + "sha256:b133873de5581a50063e1e4b29cdcf0c5e253a8c2d8dc1229add20a4c3830ace" + ], + "markers": "python_version >= '3.8'", + "version": "==1.27.0" + }, + "opentelemetry-sdk": { + "hashes": [ + "sha256:365f5e32f920faf0fd9e14fdfd92c086e317eaa5f860edba9cdc17a380d9197d", + "sha256:d525017dea0ccce9ba4e0245100ec46ecdc043f2d7b8315d56b19aff0904fa6f" + ], + "markers": "python_version >= '3.8'", + "version": "==1.27.0" + }, + "opentelemetry-semantic-conventions": { + "hashes": [ + "sha256:12d74983783b6878162208be57c9effcb89dc88691c64992d70bb89dc00daa1a", + "sha256:a0de9f45c413a8669788a38569c7e0a11ce6ce97861a628cca785deecdc32a1f" + ], + "markers": "python_version >= '3.8'", + "version": "==0.48b0" + }, + "packaging": { + "hashes": [ + "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002", + "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124" + ], + "markers": "python_version >= '3.8'", + "version": "==24.1" + }, + "pathspec": { + "hashes": [ + "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", + "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712" + ], + "markers": "python_version >= '3.8'", + "version": "==0.12.1" + }, + "platformdirs": { + "hashes": [ + "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", + "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb" + ], + "markers": "python_version >= '3.8'", + "version": "==4.3.6" + }, + "pre-commit": { + "hashes": [ + "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af", + "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f" + ], + "version": "==3.8.0" + }, + "protobuf": { + "hashes": [ + "sha256:0aebecb809cae990f8129ada5ca273d9d670b76d9bfc9b1809f0a9c02b7dbf41", + "sha256:4be0571adcbe712b282a330c6e89eae24281344429ae95c6d85e79e84780f5ea", + "sha256:5e61fd921603f58d2f5acb2806a929b4675f8874ff5f330b7d6f7e2e784bbcd8", + "sha256:7a183f592dc80aa7c8da7ad9e55091c4ffc9497b3054452d629bb85fa27c2a45", + "sha256:7f8249476b4a9473645db7f8ab42b02fe1488cbe5fb72fddd445e0665afd8584", + "sha256:919ad92d9b0310070f8356c24b855c98df2b8bd207ebc1c0c6fcc9ab1e007f3d", + "sha256:98d8d8aa50de6a2747efd9cceba361c9034050ecce3e09136f90de37ddba66e1", + "sha256:abe32aad8561aa7cc94fc7ba4fdef646e576983edb94a73381b03c53728a626f", + "sha256:b0234dd5a03049e4ddd94b93400b67803c823cfc405689688f59b34e0742381a", + "sha256:b2fde3d805354df675ea4c7c6338c1aecd254dfc9925e88c6d31a2bcb97eb173", + "sha256:fe14e16c22be926d3abfcb500e60cab068baf10b542b8c858fa27e098123e331" + ], + "markers": "python_version >= '3.8'", + "version": "==4.25.5" + }, + "pyyaml": { + "hashes": [ + "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff", + "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", + "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", + "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e", + "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", + "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", + "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", + "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", + "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", + "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", + "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a", + "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", + "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", + "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8", + "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", + "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19", + "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", + "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a", + "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", + "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", + "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", + "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631", + "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d", + "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", + "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", + "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", + "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", + "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", + "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", + "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706", + "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", + "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", + "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", + "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083", + "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", + "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", + "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", + "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f", + "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725", + "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", + "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", + "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", + "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", + "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", + "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5", + "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d", + "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290", + "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", + "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", + "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", + "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", + "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12", + "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4" + ], + "markers": "python_version >= '3.8'", + "version": "==6.0.2" + }, + "typing-extensions": { + "hashes": [ + "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", + "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8" + ], + "markers": "python_version >= '3.8'", + "version": "==4.12.2" + }, + "typing-inspect": { + "hashes": [ + "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f", + "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78" + ], + "version": "==0.9.0" + }, + "virtualenv": { + "hashes": [ + "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48", + "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2" + ], + "markers": "python_version >= '3.7'", + "version": "==20.26.6" + }, + "wrapt": { + "hashes": [ + "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc", + "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81", + "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09", + "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e", + "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca", + "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0", + "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb", + "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487", + "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40", + "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c", + "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060", + "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202", + "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41", + "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9", + "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b", + "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664", + "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d", + "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362", + "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00", + "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc", + "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1", + "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267", + "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956", + "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966", + "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1", + "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228", + "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72", + "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d", + "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292", + "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0", + "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0", + "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36", + "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c", + "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5", + "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f", + "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73", + "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b", + "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2", + "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593", + "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39", + "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389", + "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf", + "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf", + "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89", + "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c", + "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c", + "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f", + "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440", + "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465", + "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136", + "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b", + "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8", + "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3", + "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8", + "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6", + "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e", + "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f", + "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c", + "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e", + "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8", + "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2", + "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020", + "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35", + "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d", + "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3", + "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537", + "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809", + "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d", + "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a", + "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4" + ], + "markers": "python_version >= '3.6'", + "version": "==1.16.0" + }, + "zipp": { + "hashes": [ + "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350", + "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29" + ], + "markers": "python_version >= '3.8'", + "version": "==3.20.2" + } + } } diff --git a/README.md b/README.md index 07f06f5..92721df 100644 --- a/README.md +++ b/README.md @@ -25,4 +25,5 @@ Auto-generate files from .proto: Running the python script locally and taking the hello-world creds: - sudo mkdir -p /var/run/secrets/kubernetes.io/serviceaccount/ && sudo chown ${USER}:${USER} /var/run/secrets/kubernetes.io/serviceaccount/ -- kubectl create token -n kubefox-debug hello-world-frontend-976e059 > /var/run/secrets/kubernetes.io/serviceaccount/token \ No newline at end of file +- kubectl create token -n kubefox-debug hello-world-frontend-976e059 > /var/run/secrets/kubernetes.io/serviceaccount/token +- kubectl create token -n kubefox-debug hello-world-frontend-976e059 >/tmp/kubefox/hello-world-token \ No newline at end of file diff --git a/app.yaml b/app.yaml new file mode 100644 index 0000000..b1bc3ec --- /dev/null +++ b/app.yaml @@ -0,0 +1,6 @@ +# This is only for testing +name: hello-world +title: Hello World +description: A simple App demonstrating the use of KubeFox in python. +environment: + KUBEFOX_LOG_LEVEL: debug diff --git a/components/frontend/Dockerfile b/components/frontend/Dockerfile new file mode 100644 index 0000000..2885cf6 --- /dev/null +++ b/components/frontend/Dockerfile @@ -0,0 +1,8 @@ +# This image is built from the Dockerfile at the root of the project +FROM kubefox-sdk:0.0.1 + +WORKDIR /app +COPY . . + +# TODO: Install new dependencies that are required for the component + diff --git a/components/frontend/main.py b/components/frontend/main.py new file mode 100644 index 0000000..01be7b0 --- /dev/null +++ b/components/frontend/main.py @@ -0,0 +1,58 @@ +"""Test running the Python Kit SDK""" + +import asyncio +import logging +import random + +from opentelemetry import trace + +from kit import Kit +from kit.proto.protobuf_msgs_pb2 import Category, Event, EventContext, MatchedEvent + +logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)s [%(module)s:%(lineno)d - %(levelname)s]: %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", +) + +tracer = trace.get_tracer(__name__) + +# TODO: Extract this out into a "nice to use" interface that exposes the same functionality as the golang client + + +def return_response(event: MatchedEvent) -> Event: + my_context = EventContext( + platform="debug", + virtual_environment="qa", + app_deployment="hello-world-main", + release_manifest="", + ) + target_component = event.event.target + target_component.id = "1234" + target_component.hash = "976e059" + my_event = Event( + context=my_context, + type="io.kubefox.kubefox", + content_type="text/plain; charset=UTF-8", + ttl=event.event.ttl, + source=target_component, + target=event.event.source, + content=b"hello world", + category=Category.RESPONSE, + parent_id=event.event.id, + parent_span=event.event.parent_span, + ) + return my_event + + +async def my_cool_function(event: MatchedEvent) -> Event: + with tracer.start_as_current_span("my_cool_function"): + await asyncio.sleep(random.uniform(0.01, 1)) + return return_response(event) + + +if __name__ == "__main__": + instance = Kit.new() + instance.export = False + instance.route("Path(`/{{.Vars.subPath}}/hello`)", my_cool_function) + asyncio.run(instance.start()) diff --git a/implementing_kit.py b/implementing_kit.py deleted file mode 100644 index 857b5a8..0000000 --- a/implementing_kit.py +++ /dev/null @@ -1,15 +0,0 @@ -"""Test running the Python Kit SDK""" - - -from kit import Kit -import logging - -logging.basicConfig( - level=logging.DEBUG, - format="%(asctime)s [%(module)s:%(lineno)d - %(levelname)s]: %(message)s", - datefmt='%Y-%m-%d %H:%M:%S' -) - -if __name__ == "__main__": - instance = Kit.new() - instance.start() diff --git a/kit/api/__init__.py b/kit/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kit/api/component.py b/kit/api/component.py new file mode 100644 index 0000000..23bef21 --- /dev/null +++ b/kit/api/component.py @@ -0,0 +1,31 @@ +from dataclasses import dataclass +from typing import Self + +from kit.proto.protobuf_msgs_pb2 import Component as ProtoObject +from kit.utils import utils + + +@dataclass +class Component: + proto_object: ProtoObject + + @classmethod + def new_component(cls, typ, app, name, component_hash) -> Self: + return cls( + ProtoObject( + Type=str(typ), + App=utils.clean_name(app), + Name=utils.clean_name(name), + Hash=component_hash, + ) + ) + + @classmethod + def new_target_component(cls, typ, name) -> Self: + return cls(ProtoObject(Type=str(typ), Name=utils.clean_name(name))) + + @classmethod + def new_platform_component(cls, typ, name, component_hash) -> Self: + return cls( + ProtoObject(Type=str(typ), Name=utils.clean_name(name), Hash=component_hash) + ) diff --git a/vars.py b/kit/api/constants.py similarity index 99% rename from vars.py rename to kit/api/constants.py index 4fc7d77..e4bcb9f 100644 --- a/vars.py +++ b/kit/api/constants.py @@ -1,5 +1,5 @@ -import re import os +import re # Misc SECRET_MASK = "••••••" @@ -216,7 +216,8 @@ def is_adapter(c): REGEXP_IMAGE = re.compile(r"^.*:[a-z0-9-]{40}$") REGEXP_NAME = re.compile(r"^[a-z0-9][a-z0-9-]{0,28}[a-z0-9]$") REGEXP_UUID = re.compile( - r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$") + r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$" +) KUBEFOX_HOME = os.getenv("KUBEFOX_HOME", os.path.join("/", "tmp", "kubefox")) diff --git a/kit/api/env_template.py b/kit/api/env_template.py new file mode 100644 index 0000000..731ae62 --- /dev/null +++ b/kit/api/env_template.py @@ -0,0 +1,156 @@ +import re +import string +from dataclasses import dataclass, field +from typing import Dict, List, Optional + +from dataclasses_json import LetterCase, dataclass_json + + +@dataclass_json(letter_case=LetterCase.CAMEL) +@dataclass +class EnvVarDefinition: + type: str + required: bool + + +@dataclass_json(letter_case=LetterCase.CAMEL) +@dataclass +class EnvSchema: + vars: Dict[str, EnvVarDefinition] = field(default_factory=dict) + secrets: Dict[str, EnvVarDefinition] = field(default_factory=dict) + + +@dataclass_json(letter_case=LetterCase.CAMEL) +@dataclass +class EnvTemplate: + name: str + template: str + env_schema: EnvSchema = field(default_factory=EnvSchema) + parse_err: Optional[Exception] = None + + def __post_init__(self): + resolved = " ".join(self.template.split()) + try: + self.tree = string.Template(resolved) + except Exception as e: + self.parse_err = e + return + + # Simulate parsing the template and extracting variables + for match in re.finditer(r"\{\{(\w+)\.(\w+)\}\}", self.template): + section, name = match.groups() + if section in ["Vars", "Env"]: + self.env_schema.vars[name] = EnvVarDefinition(type="", required=True) + elif section == "Secrets": + self.env_schema.secrets[name] = EnvVarDefinition(type="", required=True) + + def parse_error(self) -> Optional[Exception]: + return self.parse_err + + def resolve(self, data: "Data", include_secrets: bool) -> str: + if data is None: + data = Data() + + env_var_data = { + "Vars": {k: v for k, v in data.vars.items()}, + "Env": {k: v for k, v in data.vars.items()}, + "Secrets": ( + {k: v for k, v in data.secrets.items()} if include_secrets else {} + ), + } + + try: + result = self.tree.safe_substitute(env_var_data) + return result.replace("", "") + except KeyError as e: + raise ValueError(f"Missing key in template: {e}") + + +@dataclass_json(letter_case=LetterCase.CAMEL) +@dataclass +class Data: + vars: Dict[str, "Val"] = field(default_factory=dict) + secrets: Dict[str, "Val"] = field(default_factory=dict) + + +@dataclass_json(letter_case=LetterCase.CAMEL) +@dataclass +class ProblemSource: + kind: str + name: str + observed_generation: int + path: str + value: str + + +@dataclass_json(letter_case=LetterCase.CAMEL) +@dataclass +class Problem: + type: str + message: str + causes: List[ProblemSource] + + +@dataclass_json(letter_case=LetterCase.CAMEL) +@dataclass +class Val: + type: str + value: str + + def env_var_type(self) -> str: + return self.type + + def array_string(self) -> List[str]: + return self.value.split(",") + + +@dataclass +class EnvVar: + val: Val + + def __str__(self) -> str: + if self.val.type in ["ArrayNumber", "ArrayString"]: + return ( + "{" + + "|".join(f"^{re.escape(s)}$" for s in self.val.array_string()) + + "}" + ) + return self.val.value + + +@dataclass_json(letter_case=LetterCase.CAMEL) +@dataclass +class EnvVarSchema: + vars: Dict[str, EnvVarDefinition] = field(default_factory=dict) + secrets: Dict[str, EnvVarDefinition] = field(default_factory=dict) + + def validate( + self, typ: str, vars: Dict[str, Val], src: ProblemSource, append_name: bool + ) -> List[Problem]: + problems = [] + for var_name, var_def in self.vars.items(): + val = vars.get(var_name) + if not val and var_def.required: + src_copy = src + if append_name: + src_copy.path = f"{src.path}.{var_name}" + problems.append( + Problem( + type="VarNotFound", + message=f'{typ} "{var_name}" not found but is required.', + causes=[src_copy], + ) + ) + elif val and var_def.type and val.env_var_type() != var_def.type: + src_copy = src + src_copy.path = f"{src.path}.{var_name}.type" + src_copy.value = var_def.type + problems.append( + Problem( + type="VarWrongType", + message=f'{typ} "{var_name}" has wrong type; wanted "{ + var_def.type}" got "{val.env_var_type()}".', + causes=[src_copy], + ) + ) + return problems diff --git a/kit/api/event_reader.py b/kit/api/event_reader.py new file mode 100644 index 0000000..a7839c1 --- /dev/null +++ b/kit/api/event_reader.py @@ -0,0 +1,78 @@ +from dataclasses import dataclass +from typing import Any, Optional + +from kit.api.kit_types import EventType +from kit.proto.protobuf_msgs_pb2 import Event + + +@dataclass +class EventReader: + # params: dict[str, str] = None + # param_vs: dict[str, Any] = None + # url: str = None + # path_suffix: str = None + # queries: dict[str, str] = None + # query_vs: dict[str, Any] = None + # headers: dict[str, str] = None + # header_vs: dict[str, Any] = None + # status: int = None + # status_v: Optional[Any] = None + event: Event = None + + @property + def event_type(self) -> EventType: + return EventType(self.event.type) + + def param(self, key: str) -> str: + raise NotImplementedError + + def param_v(self, key: str) -> Any: + raise NotImplementedError + + def param_def(self, key: str, default: str) -> str: + raise NotImplementedError + + def url(self) -> str: + raise NotImplementedError + + def path_suffix(self) -> str: + raise NotImplementedError + + def query(self, key: str) -> str: + raise NotImplementedError + + def query_v(self, key: str) -> Any: + raise NotImplementedError + + def query_def(self, key: str, default: str) -> str: + raise NotImplementedError + + def query_all(self, key: str) -> list[str]: + raise NotImplementedError + + def header(self, key: str) -> str: + raise NotImplementedError + + def header_v(self, key: str) -> Any: + raise NotImplementedError + + def header_def(self, key: str, default: str) -> str: + raise NotImplementedError + + def header_all(self, key: str) -> list[str]: + raise NotImplementedError + + def status(self) -> int: + raise NotImplementedError + + def status_v(self) -> Optional[Any]: + raise NotImplementedError + + def bind(self, v: Any) -> None: + raise NotImplementedError + + def str(self) -> str: + raise NotImplementedError + + def bytes(self) -> bytes: + raise NotImplementedError diff --git a/kit/api/event_writer.py b/kit/api/event_writer.py new file mode 100644 index 0000000..0c63ac0 --- /dev/null +++ b/kit/api/event_writer.py @@ -0,0 +1,60 @@ +from dataclasses import dataclass +from typing import Any, Optional, Union +from urllib.parse import urlparse, urlunparse + +from kit.api.event_reader import EventReader + + +@dataclass +class EventWriter(EventReader): + def SetParam(self, key: str, value: str) -> None: + """Set a parameter with a string value.""" + raise NotImplementedError + + def SetParamV(self, key: str, value: Any) -> None: + """Set a parameter with a value of any type.""" + raise NotImplementedError + + def SetURL(self, u: str) -> None: + """Set the URL.""" + raise NotImplementedError + + def RewritePath(self, path: str) -> None: + """Rewrite the path.""" + raise NotImplementedError + + def SetQuery(self, key: str, value: str) -> None: + """Set a query parameter with a string value.""" + raise NotImplementedError + + def SetQueryV(self, key: str, value: Any) -> None: + """Set a query parameter with a value of any type.""" + raise NotImplementedError + + def DelQuery(self, key: str) -> None: + """Delete a query parameter.""" + raise NotImplementedError + + def SetHeader(self, key: str, value: str) -> None: + """Set a header with a string value.""" + raise NotImplementedError + + def SetHeaderV(self, key: str, value: Any) -> None: + """Set a header with a value of any type.""" + raise NotImplementedError + + def AddHeader(self, key: str, value: str) -> None: + """Add a header with a string value.""" + raise NotImplementedError + + def DelHeader(self, key: str) -> None: + """Delete a header.""" + raise NotImplementedError + + def SetStatus(self, code: int) -> None: + """Set the status code.""" + raise NotImplementedError + + def SetStatusV(self, val: Any) -> None: + """Set the status code with a value of any type.""" + raise NotImplementedError diff --git a/kit/api/exceptions.py b/kit/api/exceptions.py new file mode 100644 index 0000000..a0e6efd --- /dev/null +++ b/kit/api/exceptions.py @@ -0,0 +1,157 @@ +import json +import traceback +from enum import Enum +from typing import Optional + +# Define the error codes as an enumeration + + +class Code(Enum): + UNEXPECTED = 0 + BROKER_MISMATCH = 1 + BROKER_UNAVAILABLE = 2 + COMPONENT_GONE = 3 + COMPONENT_MISMATCH = 4 + CONTENT_TOO_LARGE = 5 + INVALID = 6 + NOT_FOUND = 7 + PORT_UNAVAILABLE = 8 + ROUTE_INVALID = 9 + ROUTE_NOT_FOUND = 10 + TIMEOUT = 11 + UNAUTHORIZED = 12 + UNKNOWN_CONTENT_TYPE = 13 + UNSUPPORTED_ADAPTER = 14 + + +# Global variable to control stack trace recording +RECORD_STACK_TRACES = False + + +class KubeFoxError(Exception): + def __init__( + self, + msg: str, + code: Code, + grpc_code: int, + http_code: int, + cause: Optional[str] = None, + ) -> None: + self.msg = msg + self.code = code + self.grpc_code = grpc_code + self.http_code = http_code + self.cause = cause + self.stack = ( + traceback.format_stack() + if RECORD_STACK_TRACES or code == Code.UNEXPECTED + else None + ) + + def __str__(self): + if self.cause: + return f"{self.msg}: {self.cause}" + return self.msg + + def to_dict(self): + return { + "msg": self.msg, + "code": self.code.value, + "grpc_code": self.grpc_code, + "http_code": self.http_code, + "cause": self.cause, + "stack": self.stack, + } + + def to_json(self): + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str): + data = json.loads(json_str) + return cls( + msg=data["msg"], + code=Code(data["code"]), + grpc_code=data["grpc_code"], + http_code=data["http_code"], + cause=data["cause"], + ) + + +class KubeFoxErrorBrokerMismatch(KubeFoxError): + def __init__(self, cause: Optional[str] = None) -> None: + super().__init__("broker mismatch", Code.BROKER_MISMATCH, 9, 502, cause) + + +class KubeFoxErrorBrokerUnavailable(KubeFoxError): + def __init__(self, cause: Optional[str] = None) -> None: + super().__init__("broker unavailable", Code.BROKER_UNAVAILABLE, 14, 502, cause) + + +class KubeFoxErrorComponentGone(KubeFoxError): + def __init__(self, cause: Optional[str] = None) -> None: + super().__init__("component gone", Code.COMPONENT_GONE, 9, 502, cause) + + +class KubeFoxErrorComponentMismatch(KubeFoxError): + def __init__(self, cause: Optional[str] = None) -> None: + super().__init__("component mismatch", Code.COMPONENT_MISMATCH, 9, 502, cause) + + +class KubeFoxErrorContentTooLarge(KubeFoxError): + def __init__(self, cause: Optional[str] = None) -> None: + super().__init__("content too large", Code.CONTENT_TOO_LARGE, 8, 413, cause) + + +class KubeFoxErrorInvalid(KubeFoxError): + def __init__(self, cause: Optional[str] = None) -> None: + super().__init__("invalid", Code.INVALID, 3, 400, cause) + + +class KubeFoxErrorNotFound(KubeFoxError): + def __init__(self, cause: Optional[str] = None) -> None: + super().__init__("not found", Code.NOT_FOUND, 12, 404, cause) + + +class KubeFoxErrorPortUnavailable(KubeFoxError): + def __init__(self, cause: Optional[str] = None) -> None: + super().__init__("port unavailable", Code.PORT_UNAVAILABLE, 14, 409, cause) + + +class KubeFoxErrorRouteInvalid(KubeFoxError): + def __init__(self, cause: Optional[str] = None) -> None: + super().__init__("route invalid", Code.ROUTE_INVALID, 3, 400, cause) + + +class KubeFoxErrorRouteNotFound(KubeFoxError): + def __init__(self, cause: Optional[str] = None) -> None: + super().__init__("route not found", Code.ROUTE_NOT_FOUND, 12, 404, cause) + + +class KubeFoxErrorTimeout(KubeFoxError): + def __init__(self, cause: Optional[str] = None) -> None: + super().__init__("time out", Code.TIMEOUT, 4, 504, cause) + + +class KubeFoxErrorUnauthorized(KubeFoxError): + def __init__(self, cause: Optional[str] = None) -> None: + super().__init__("component unauthorized", Code.UNAUTHORIZED, 7, 403, cause) + + +class KubeFoxErrorUnexpected(KubeFoxError): + def __init__(self, cause: Optional[str] = None) -> None: + super().__init__("unexpected error", Code.UNEXPECTED, 2, 500, cause) + + +class KubeFoxErrorUnknownContentType(KubeFoxError): + def __init__(self, cause: Optional[str] = None) -> None: + super().__init__( + "unknown content type", Code.UNKNOWN_CONTENT_TYPE, 3, 400, cause + ) + + +class KubeFoxErrorUnsupportedAdapter(KubeFoxError): + def __init__(self, cause: Optional[str] = None) -> None: + super().__init__( + "unsupported adapter", Code.UNSUPPORTED_ADAPTER, 12, 400, cause + ) diff --git a/kit/api/kit_types.py b/kit/api/kit_types.py new file mode 100644 index 0000000..c9ae93a --- /dev/null +++ b/kit/api/kit_types.py @@ -0,0 +1,170 @@ +from dataclasses import dataclass +from enum import Enum +from typing import Any, Dict, List + +from dataclasses_json import LetterCase, dataclass_json + + +@dataclass_json(letter_case=LetterCase.CAMEL) +@dataclass +class Object: + def get_namespace(self) -> str: + pass + + def get_name(self) -> str: + pass + + def get_resource_version(self) -> str: + pass + + def get_generation(self) -> int: + pass + + +@dataclass_json(letter_case=LetterCase.CAMEL) +@dataclass +class EnvVarDefinition: + type: str + required: bool + + +@dataclass_json(letter_case=LetterCase.CAMEL) +@dataclass +class EnvSchema: + vars: Dict[str, EnvVarDefinition] + secrets: Dict[str, EnvVarDefinition] + + +@dataclass_json(letter_case=LetterCase.CAMEL) +@dataclass +class ComponentDefinition: + type: str + routes: List["RouteSpec"] + default_handler: bool + env_var_schema: Dict[str, EnvVarDefinition] + dependencies: Dict[str, "Dependency"] + hash: str + image: str + + +@dataclass_json(letter_case=LetterCase.CAMEL) +@dataclass +class RouteSpec: + id: int + rule: str + env_var_schema: Dict[str, EnvVarDefinition] + priority: int = None + + +@dataclass_json(letter_case=LetterCase.CAMEL) +@dataclass +class Dependency: + type: str + + +@dataclass_json(letter_case=LetterCase.CAMEL) +@dataclass +class Details: + title: str + description: str + + +@dataclass_json(letter_case=LetterCase.CAMEL) +@dataclass +class ProblemSource: + kind: str + name: str + observed_generation: int + path: str + value: str + + +@dataclass_json(letter_case=LetterCase.CAMEL) +@dataclass +class Problem: + type: str + message: str + causes: List[ProblemSource] + + +class EventType(str, Enum): + Cron = "io.kubefox.cron" + Dapr = "io.kubefox.dapr" + HTTP = "io.kubefox.http" + KubeFox = "io.kubefox.kubefox" + Kubernetes = "io.kubefox.kubernetes" + Ack = "io.kubefox.ack" + Bootstrap = "io.kubefox.bootstrap" + Error = "io.kubefox.error" + Health = "io.kubefox.health" + Metrics = "io.kubefox.metrics" + Nack = "io.kubefox.nack" + Register = "io.kubefox.register" + Rejected = "io.kubefox.rejected" + Telemetry = "io.kubefox.telemetry" + Unknown = "io.kubefox.unknown" + + def __str__(self) -> str: + return self.value + + +class ComponentType(Enum): + Broker = "Broker" + HTTPAdapter = "HTTPAdapter" + KubeFox = "KubeFox" + NATS = "NATS" + + +class EnvVarType(Enum): + Array = "Array" + Boolean = "Boolean" + Number = "Number" + String = "String" + + +@dataclass +class EnvVarDep: + name: str + type: EnvVarType + + +@dataclass +class ComponentDep: + name: str + app: str + type: ComponentType + event_type: EventType + + +# @dataclass +# class RouteSpec: +# pass + + +@dataclass +class Route: + route_spec: RouteSpec + # Swap this out for a better type + handler: Any + + +# @dataclass +# class Dependency: +# type: ComponentType +# app: str +# name: str + +# def name(self) -> str: +# return self.name + +# def app(self) -> str: +# return self.app + +# def type(self) -> ComponentType: +# return self.type + +# def event_type(self) -> EventType: +# if self.type == ComponentType.HTTPAdapter: +# return EventType.HTTP +# else: +# return EventType.KubeFox diff --git a/kit/api/request.py b/kit/api/request.py new file mode 100644 index 0000000..007128a --- /dev/null +++ b/kit/api/request.py @@ -0,0 +1,102 @@ +from io import IOBase +from typing import Any + + +class Req: + """ + Req interface. + + This interface provides methods for sending requests to a target Component and + returning the response. + """ + + def SendStr(self, s: str) -> tuple["EventReader", Exception]: + """ + SendStr sends the request to the target Component and returns the response. + The given string is used as the content of the request Event, content-type + is set to 'text/plain'. + + Args: + s (str): The string to be sent. + + Returns: + tuple['EventReader', Exception]: A tuple containing the EventReader + and any error that occurred. + """ + raise NotImplementedError + + def SendHTML(self, h: str) -> tuple["EventReader", Exception]: + """ + SendHTML sends the request to the target Component and returns the response. + The given HTML is used as the content of the request Event, content-type + is set to 'text/html'. + + Args: + h (str): The HTML to be sent. + + Returns: + tuple['EventReader', Exception]: A tuple containing the EventReader + and any error that occurred. + """ + raise NotImplementedError + + def SendJSON(self, v: Any) -> tuple["EventReader", Exception]: + """ + SendJSON sends the request to the target Component and returns the response. + The given object is marshalled to JSON and the output is used as the + content of the request Event, content-type is set to 'application/json'. + + Args: + v (Any): The object to be sent. + + Returns: + tuple['EventReader', Exception]: A tuple containing the EventReader + and any error that occurred. + """ + raise NotImplementedError + + def SendBytes( + self, content_type: str, content: bytes + ) -> tuple["EventReader", Exception]: + """ + SendBytes sends the request to the target Component using the given + content-type and content and returns the response. + + Args: + content_type (str): The content type of the request. + content (bytes): The content of the request. + + Returns: + tuple['EventReader', Exception]: A tuple containing the EventReader + and any error that occurred. + """ + raise NotImplementedError + + def SendReader( + self, content_type: str, reader: IOBase + ) -> tuple["EventReader", Exception]: + """ + SendReader sends the request to the target Component and returns the response. + All data is read from the given reader and is used as the content of the + request Event. If the reader implements io.ReadCloser then it will be + automatically closed. + + Args: + content_type (str): The content type of the request. + reader (IOBase): The reader to read the content from. + + Returns: + tuple['EventReader', Exception]: A tuple containing the EventReader + and any error that occurred. + """ + raise NotImplementedError + + def Send(self) -> tuple["EventReader", Exception]: + """ + Send sends the request to the target Component and returns the response. + + Returns: + tuple['EventReader', Exception]: A tuple containing the EventReader + and any error that occurred. + """ + raise NotImplementedError diff --git a/kit/api/types.py b/kit/api/types.py deleted file mode 100644 index a562bda..0000000 --- a/kit/api/types.py +++ /dev/null @@ -1,88 +0,0 @@ -from typing import Dict, List -from dataclasses import dataclass -from dataclasses_json import dataclass_json, LetterCase - - -@dataclass_json(letter_case=LetterCase.SNAKE) -@dataclass -class Object: - def get_namespace(self) -> str: - pass - - def get_name(self) -> str: - pass - - def get_resource_version(self) -> str: - pass - - def get_generation(self) -> int: - pass - - -@dataclass_json(letter_case=LetterCase.SNAKE) -@dataclass -class EnvVarDefinition: - type: str - required: bool - - -EnvVarSchema = Dict[str, EnvVarDefinition] - - -@dataclass_json(letter_case=LetterCase.SNAKE) -@dataclass -class EnvSchema: - vars: EnvVarSchema - secrets: EnvVarSchema - - -@dataclass_json(letter_case=LetterCase.SNAKE) -@dataclass -class ComponentDefinition: - type: str - routes: List['RouteSpec'] - default_handler: bool - env_var_schema: EnvVarSchema - dependencies: Dict[str, 'Dependency'] - hash: str - image: str - - -@dataclass_json(letter_case=LetterCase.SNAKE) -@dataclass -class RouteSpec: - id: int - rule: str - priority: int - env_var_schema: EnvVarSchema - - -@dataclass_json(letter_case=LetterCase.SNAKE) -@dataclass -class Dependency: - type: str - - -@dataclass_json(letter_case=LetterCase.SNAKE) -@dataclass -class Details: - title: str - description: str - - -@dataclass_json(letter_case=LetterCase.SNAKE) -@dataclass -class ProblemSource: - kind: str - name: str - observed_generation: int - path: str - value: str - - -@dataclass_json(letter_case=LetterCase.SNAKE) -@dataclass -class Problem: - type: str - message: str - causes: List[ProblemSource] diff --git a/kit/api/vars.py b/kit/api/vars.py index 4fc7d77..e4bcb9f 100644 --- a/kit/api/vars.py +++ b/kit/api/vars.py @@ -1,5 +1,5 @@ -import re import os +import re # Misc SECRET_MASK = "••••••" @@ -216,7 +216,8 @@ def is_adapter(c): REGEXP_IMAGE = re.compile(r"^.*:[a-z0-9-]{40}$") REGEXP_NAME = re.compile(r"^[a-z0-9][a-z0-9-]{0,28}[a-z0-9]$") REGEXP_UUID = re.compile( - r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$") + r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$" +) KUBEFOX_HOME = os.getenv("KUBEFOX_HOME", os.path.join("/", "tmp", "kubefox")) diff --git a/kit/kit.py b/kit/kit.py index 01ed2a7..47d6e4b 100644 --- a/kit/kit.py +++ b/kit/kit.py @@ -1,165 +1,233 @@ +import asyncio import logging -from dataclasses import dataclass import os -from typing import Any, Callable, Generator, List, Optional, Self, Union -from enum import Enum -from urllib.parse import urlparse -from enum import Enum - -from grpc import StatusCode, insecure_channel, secure_channel, ssl_channel_credentials -from kit.api.types import ComponentDefinition, Dependency, EnvVarDefinition, EnvVarSchema, RouteSpec -from dataclasses_json import dataclass_json, LetterCase -from kit.api.vars import DEFAULT_MAX_EVENT_SIZE_BYTES -from proto.broker_svc_pb2_grpc import BrokerStub -from proto.protobuf_msgs_pb2 import Event, MatchedEvent, EventContext - +import random +from dataclasses import dataclass, field +from typing import Any, AsyncGenerator, Coroutine, Dict, List, Self, Tuple + +from dataclasses_json import LetterCase, dataclass_json +from grpc import aio, ssl_channel_credentials +from opentelemetry import trace + +from kit.api import exceptions as KitExceptions +from kit.api import vars as KitVars +from kit.api.env_template import EnvTemplate +from kit.api.kit_types import ( + ComponentDefinition, + ComponentType, + EventType, + Route, + RouteSpec, +) +from kit.proto.broker_svc_pb2_grpc import BrokerStub +from kit.proto.protobuf_msgs_pb2 import ( + Category, + Component, + Event, + EventContext, + MatchedEvent, +) +from kit.telemetry.trace import ( + attach_event_attributes, + extract_otel_context, + setup_trace_provder, +) + +tracer = trace.get_tracer(__name__) logger = logging.getLogger(__name__) +max_attempts = 5 # TODO: https://www.python-httpx.org/advanced/transports/#custom-transports # Implement a custom transport for HTTPX to handle putting data into and pulling data out of an event -class EventType(Enum): - Cron = "io.kubefox.cron" - Dapr = "io.kubefox.dapr" - HTTP = "io.kubefox.http" - KubeFox = "io.kubefox.kubefox" - Kubernetes = "io.kubefox.kubernetes" - Ack = "io.kubefox.ack" - Bootstrap = "io.kubefox.bootstrap" - Error = "io.kubefox.error" - Health = "io.kubefox.health" - Metrics = "io.kubefox.metrics" - Nack = "io.kubefox.nack" - Register = "io.kubefox.register" - Rejected = "io.kubefox.rejected" - Telemetry = "io.kubefox.telemetry" - Unknown = "io.kubefox.unknown" - - -class ComponentType(Enum): - Broker = "Broker" - HTTPAdapter = "HTTPAdapter" - KubeFox = "KubeFox" - NATS = "NATS" - - -class EnvVarType(Enum): - Array = "Array" - Boolean = "Boolean" - Number = "Number" - String = "String" - - -class Val: - pass - - -class URL: - pass - - -class Logger: - pass - - -class FS: - pass - - -class Client: - pass - - -class RoundTripper: - pass - - -class Context: - pass - - -class Template: - pass - - -class HTMLTemplate: - pass - - -# EventHandler = Callable[['Kontext'], None] - - -@dataclass -class EnvVarDep: - name: str - type: EnvVarType - - -@dataclass -class ComponentDep: - name: str - app: str - type: ComponentType - event_type: EventType - - -# @dataclass -# class RouteSpec: -# pass - - -@dataclass -class Route: - route_spec: RouteSpec - # handler: EventHandler - - -# @dataclass -# class Dependency: -# type: ComponentType -# app: str -# name: str - -# def name(self) -> str: -# return self.name - -# def app(self) -> str: -# return self.app - -# def type(self) -> ComponentType: -# return self.type - -# def event_type(self) -> EventType: -# if self.type == ComponentType.HTTPAdapter: -# return EventType.HTTP -# else: -# return EventType.KubeFox - -def make_event() -> Generator[Event, None, None]: - my_context = EventContext(platform="debug", virtual_environment="virtual_environment", - app_deployment="hello-world", release_manifest="release_manifest") - yield Event(context=my_context, id="1234") +def set_json(event: Event, v) -> None: + if v is None: + v = {} + b = v.to_json().encode("utf-8") + + event.content_type = f"{ + KitVars.CONTENT_TYPE_JSON}; {KitVars.CHARSET_UTF8}" + event.content = b + + +def create_subscription() -> Event: + my_context = EventContext( + platform="debug", + virtual_environment="virtual_environment", + app_deployment="hello-world", + release_manifest="release_manifest", + ) + my_event = Event( + context=my_context, + id="1234", + type=EventType.Register.value, + content_type="application/json", + ttl=1, + ) + component_def = ComponentDefinition( + type=ComponentType.KubeFox, + default_handler=False, + hash="123", + image="abc", + # TODO: Pass along the routes to the initial subscription event + routes=[], + env_var_schema={}, + dependencies={}, + ) + set_json(event=my_event, v=component_def) + return my_event + + +async def yield_event_queue( + initial_subscription: Event, request_queue: asyncio.Queue +) -> AsyncGenerator[Event, Any]: + yield initial_subscription + while True: + new_request: Event = await request_queue.get() + with tracer.start_as_current_span( + name=f"Send {Category.Name(new_request.category)} to broker", + context=extract_otel_context(new_request.parent_span), + ) as root_span: + attach_event_attributes(event=new_request, span=root_span) + yield new_request + request_queue.task_done() @dataclass_json(letter_case=LetterCase.CAMEL) @dataclass class Kit: - routes: List[Route] - comp_def: Any max_event_size: int num_workers: int export: bool - log: Logger - brk: Any + log: logging.Logger + broker_component: Component = None + routes: Dict[int, Route] = field(default_factory=dict) + comp_def: ComponentDefinition = field(default_factory=ComponentDefinition) + # TODO: Update the typing here to be more specific than Any + default_request_handler: Coroutine[Any, Any, Any] = None + + to_broker_queue: asyncio.Queue = asyncio.Queue() + from_broker_queue: asyncio.Queue = asyncio.Queue() + + def __post_init__(self) -> None: + # TODO: Pull data to pass along to the initial setup of the tracer instead of hard coding + metadata = { + "id": "1234", + "hash": "976e059", + "name": "hello-world", + "component": "frontend", + } + setup_trace_provder( + component_id=metadata["id"], + component_hash=metadata["hash"], + name=metadata["name"], + component=metadata["component"], + ) + + def register_default_request_handler( + self, handler: Coroutine[Any, Any, Any] + ) -> None: + self.default_request_handler = handler + self.comp_def.default_handler = handler is not None + + def route(self, rule: str, handler: Coroutine[Any, Any, Any]) -> None: + r = EnvTemplate("route", rule) + kit_route_spec = RouteSpec( + id=len(self.routes), rule=rule, env_var_schema=r.env_schema.vars + ) + kit_route = Route(route_spec=kit_route_spec, handler=handler) + self.routes.update({len(self.routes): kit_route}) + self.comp_def.routes.append(kit_route_spec) + + async def handle_subscription( + self, + initial_sub: AsyncGenerator[Event, Any], + stub: BrokerStub, + metadata_sequence: List[Tuple[str, str]], + ) -> None: + response = stub.Subscribe(initial_sub, metadata=metadata_sequence) + subscribed = False + async for res in response: + res_event: MatchedEvent = res + if not subscribed: + subscribed = True + self.broker_component = res_event.event.source + print(f"Initial subscription event response: {res_event}") + else: + print("Got message from broker") + self.from_broker_queue.put_nowait(res_event) + + async def process_responses(self, worker_name: str) -> None: + """ + Handles the responses from the broker. This is one of the two main loops of + kit. + + TODO: Add some more documentation here + TODO: Implement all categories and exception handling + """ + try: + while True: + # Wait until there is an event in the queue + matched_event: MatchedEvent = await self.from_broker_queue.get() + + with tracer.start_as_current_span( + name=f"Handle {Category.Name(matched_event.event.category)}", + context=extract_otel_context(matched_event.event.parent_span), + ) as root_span: + attach_event_attributes(matched_event=matched_event, span=root_span) + + print( + f"Worker: {worker_name}, " + + f"Processing event: {matched_event.event.id}" + ) + + if matched_event.event.category == Category.REQUEST: + route_id = matched_event.route_id + route = self.routes.get(route_id, None) + if route is None: + handler = self.default_request_handler + if handler is None: + raise KitExceptions.KubeFoxErrorNotFound( + "default handler not found" + ) + else: + handler = route.handler + result_of_response_to_request = await handler(matched_event) + print( + f"Got result from handler for worker: { + worker_name}" + ) + await self.to_broker_queue.put(result_of_response_to_request) + elif matched_event.event.category == Category.RESPONSE: + print("Response received") + + self.from_broker_queue.task_done() + + except Exception as e: + logger.exception(f"Error in process_responses") + finally: + raise KitExceptions.KubeFoxErrorUnexpected("Shutting down worker") + + async def start(self, attempt: int = 0): + if attempt >= max_attempts: + raise KitExceptions.KubeFoxErrorTimeout("broker subscription timed out") + + if self.export: + # TODO: Write out in JSON the component definition + print(self.comp_def.to_json()) + return + # TODO: Code for starting up broker in "dry-run" mode is needed. This is used to generate the ApplicationManifest k8s resource definition + # Extracting env vars from routes: https://github.com/xigxog/kubefox/blob/main/api/env_template.go#L53 - def start(self): - # Alternative to a context managed instance is to use `.close()` # open ca.crt and read into string: with open("/tmp/kubefox/ca.crt", "r") as file: root_ca = file.read() - creds = ssl_channel_credentials(root_certificates=root_ca.encode(), - private_key=None, certificate_chain=None) + with open("/tmp/kubefox/hello-world-token", "r") as file: + token = file.read() + creds = ssl_channel_credentials( + root_certificates=root_ca.encode(), private_key=None, certificate_chain=None + ) # TODO: Implement this config: # grpcCfg := `{ @@ -174,51 +242,55 @@ def start(self): # "RetryableStatusCodes": [ "UNAVAILABLE" ] # } # }]}` - with secure_channel("127.0.0.1:6060", credentials=creds) as channel: - # TODO: See if I can just hit the broker directly: - # This is the golang code to create a new client: - # broker := grpc.NewClient(grpc.ClientOpts{ - # Platform: adapter.Platform, - # Component: comp, - # Pod: pod, - # BrokerAddr: adapter.BrokerAddr, - # HealthSrvAddr: adapter.HealthSrvAddr, - # TokenPath: tokenPath, - # }) - print("-------------- BrokerStub --------------") - stub = BrokerStub(channel) - result = stub.Subscribe(make_event()) - if not result.is_active(): - logger.error(result.exception()) - raise RuntimeError(result.details) - logger.debug(result.details()) - - # def route(self, rule: str, handler: EventHandler): - # pass - - # def static(self, path_prefix: str, fs_prefix: str, fs: FS): - # pass - - # def default(self, handler: EventHandler): - # pass - - # def env_var(self, name: str, opts: List[Any]) -> EnvVarDep: - # pass - - # def component(self, name: str) -> ComponentDep: - # pass - # def http_adapter(self, name: str) -> ComponentDep: - # pass - - # def title(self, title: str): - # pass - - # def description(self, description: str): - # pass + logger.info("subscribing to broker, attempt %d/%d", attempt + 1, max_attempts) + async with aio.secure_channel( + target="127.0.0.1:6060", credentials=creds + ) as channel: + stub = BrokerStub(channel) - # def log(self) -> Logger: - # pass + # A CallCredentials has to be used with secure Channel, otherwise the metadata will not be transmitted to the server. + metadata = { + "id": "1234", + "hash": "976e059", + "name": "hello-world", + "app": "hello-world", + "type": "KubeFox", + "platform": "debug", + "pod": "hello-world", + "token": token, + "component": "frontend", + } + metadata_sequence = [(k, v) for k, v in metadata.items()] + my_event = create_subscription() + response_workers = [] + for i in range(os.cpu_count()): + response_workers.append(asyncio.create_task(self.process_responses(i))) + try: + await asyncio.gather( + *[ + self.handle_subscription( + yield_event_queue( + initial_subscription=my_event, + request_queue=self.to_broker_queue, + ), + stub, + metadata_sequence, + ), + *response_workers, + ] + ) + except (asyncio.exceptions.CancelledError, ConnectionRefusedError): + logger.warning("broker subscription closed", exc_info=True) + await asyncio.sleep(random.randint(1, 2)) + await self.start(attempt + 1) + except Exception as e: + print(f"Error: {e}") + raise + finally: + # Should probably make sure we empty out the event queues + # print("Done - To clean up anything here?") + pass @classmethod def new(cls) -> Self: @@ -246,7 +318,7 @@ def new(cls) -> Self: # return svc svc = cls( - routes=[], + # routes=[], comp_def=ComponentDefinition( type=ComponentType.KubeFox, default_handler=False, @@ -254,9 +326,9 @@ def new(cls) -> Self: image="abc", routes=[], env_var_schema={}, - dependencies={} + dependencies={}, ), - max_event_size=DEFAULT_MAX_EVENT_SIZE_BYTES, + max_event_size=KitVars.DEFAULT_MAX_EVENT_SIZE_BYTES, num_workers=os.cpu_count(), export=False, # log=logkf.Global, @@ -267,194 +339,6 @@ def new(cls) -> Self: # HealthSrvAddr=healthAddr # )) log=None, - brk=None ) return svc - - -# class Kontext: -# def env(self, v: EnvVarDep) -> str: -# pass - -# def env_v(self, v: EnvVarDep) -> Val: -# pass - -# def env_def(self, v: EnvVarDep, def: str) -> str: -# pass - -# def env_def_v(self, v: EnvVarDep, def: Val) -> Val: -# pass - -# def resp(self) -> 'Resp': -# pass - -# def req(self, target: ComponentDep) -> 'Req': -# pass - -# def forward(self, target: ComponentDep) -> 'Req': -# pass - -# def http(self, target: ComponentDep) -> Client: -# pass - -# def transport(self, target: ComponentDep) -> RoundTripper: -# pass - -# def context(self) -> Context: -# pass - -# def log(self) -> Logger: -# pass - - -# class Req: -# def send_str(self, s: str) -> ('EventReader', Optional[Exception]): -# pass - -# def send_html(self, h: str) -> ('EventReader', Optional[Exception]): -# pass - -# def send_json(self, v: Any) -> ('EventReader', Optional[Exception]): -# pass - -# def send_bytes(self, content_type: str, content: bytes) -> ('EventReader', Optional[Exception]): -# pass - -# def send_reader(self, content_type: str, reader: Any) -> ('EventReader', Optional[Exception]): -# pass - -# def send(self) -> ('EventReader', Optional[Exception]): -# pass - - -# class Resp: -# def forward(self, evt: 'EventReader') -> Optional[Exception]: -# pass - -# def send_str(self, s: str) -> Optional[Exception]: -# pass - -# def send_html(self, h: str) -> Optional[Exception]: -# pass - -# def send_json(self, v: Any) -> Optional[Exception]: -# pass - -# def send_accepts(self, json: Any, html: str, str: str) -> Optional[Exception]: -# pass - -# def send_bytes(self, content_type: str, b: bytes) -> Optional[Exception]: -# pass - -# def send_reader(self, content_type: str, reader: Any) -> Optional[Exception]: -# pass - -# def send_template(self, tpl: Template, name: str, data: Any) -> Optional[Exception]: -# pass - -# def send_html_template(self, tpl: HTMLTemplate, name: str, data: Any) -> Optional[Exception]: -# pass - -# def send(self) -> Optional[Exception]: -# pass - - -# class EventReader: -# def event_type(self) -> EventType: -# pass - -# def param(self, key: str) -> str: -# pass - -# def param_v(self, key: str) -> Val: -# pass - -# def param_def(self, key: str, def: str) -> str: -# pass - -# def url(self) -> (URL, Optional[Exception]): -# pass - -# def path_suffix(self) -> str: -# pass - -# def query(self, key: str) -> str: -# pass - -# def query_v(self, key: str) -> Val: -# pass - -# def query_def(self, key: str, def: str) -> str: -# pass - -# def query_all(self, key: str) -> List[str]: -# pass - -# def header(self, key: str) -> str: -# pass - -# def header_v(self, key: str) -> Val: -# pass - -# def header_def(self, key: str, def: str) -> str: -# pass - -# def header_all(self, key: str) -> List[str]: -# pass - -# def status(self) -> int: -# pass - -# def status_v(self) -> Val: -# pass - -# def bind(self, v: Any) -> Optional[Exception]: -# pass - -# def str(self) -> str: -# pass - -# def bytes(self) -> bytes: -# pass - - -# class EventWriter(EventReader): -# def set_param(self, key: str, value: str): -# pass - -# def set_param_v(self, key: str, value: Val): -# pass - -# def set_url(self, u: URL): -# pass - -# def rewrite_path(self, path: str): -# pass - -# def set_query(self, key: str, value: str): -# pass - -# def set_query_v(self, key: str, value: Val): -# pass - -# def del_query(self, key: str): -# pass - -# def set_header(self, key: str, value: str): -# pass - -# def set_header_v(self, key: str, value: Val): -# pass - -# def add_header(self, key: str, value: str): -# pass - -# def del_header(self, key: str): -# pass - -# def set_status(self, code: int): -# pass - -# def set_status_v(self, val: Val): -# pass diff --git a/kit/kit_types.py b/kit/kit_types.py deleted file mode 100644 index a562bda..0000000 --- a/kit/kit_types.py +++ /dev/null @@ -1,88 +0,0 @@ -from typing import Dict, List -from dataclasses import dataclass -from dataclasses_json import dataclass_json, LetterCase - - -@dataclass_json(letter_case=LetterCase.SNAKE) -@dataclass -class Object: - def get_namespace(self) -> str: - pass - - def get_name(self) -> str: - pass - - def get_resource_version(self) -> str: - pass - - def get_generation(self) -> int: - pass - - -@dataclass_json(letter_case=LetterCase.SNAKE) -@dataclass -class EnvVarDefinition: - type: str - required: bool - - -EnvVarSchema = Dict[str, EnvVarDefinition] - - -@dataclass_json(letter_case=LetterCase.SNAKE) -@dataclass -class EnvSchema: - vars: EnvVarSchema - secrets: EnvVarSchema - - -@dataclass_json(letter_case=LetterCase.SNAKE) -@dataclass -class ComponentDefinition: - type: str - routes: List['RouteSpec'] - default_handler: bool - env_var_schema: EnvVarSchema - dependencies: Dict[str, 'Dependency'] - hash: str - image: str - - -@dataclass_json(letter_case=LetterCase.SNAKE) -@dataclass -class RouteSpec: - id: int - rule: str - priority: int - env_var_schema: EnvVarSchema - - -@dataclass_json(letter_case=LetterCase.SNAKE) -@dataclass -class Dependency: - type: str - - -@dataclass_json(letter_case=LetterCase.SNAKE) -@dataclass -class Details: - title: str - description: str - - -@dataclass_json(letter_case=LetterCase.SNAKE) -@dataclass -class ProblemSource: - kind: str - name: str - observed_generation: int - path: str - value: str - - -@dataclass_json(letter_case=LetterCase.SNAKE) -@dataclass -class Problem: - type: str - message: str - causes: List[ProblemSource] diff --git a/kit/proto/__init__.py b/kit/proto/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kit/proto/broker_svc_pb2.py b/kit/proto/broker_svc_pb2.py index e314d90..e0302e4 100644 --- a/kit/proto/broker_svc_pb2.py +++ b/kit/proto/broker_svc_pb2.py @@ -3,25 +3,28 @@ # source: broker_svc.proto # Protobuf Python Version: 5.26.1 """Generated protocol buffer code.""" -import protobuf_msgs_pb2 as protobuf__msgs__pb2 from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() +import protobuf_msgs_pb2 as protobuf__msgs__pb2 + DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n\x10\x62roker_svc.proto\x12\x10kubefox.proto.v1\x1a\x13protobuf_msgs.proto2R\n\x06\x42roker\x12H\n\tSubscribe\x12\x17.kubefox.proto.v1.Event\x1a\x1e.kubefox.proto.v1.MatchedEvent(\x01\x30\x01\x42 Z\x1egithub.com/xigxog/kubefox/grpcb\x06proto3') + b"\n\x10\x62roker_svc.proto\x12\x10kubefox.proto.v1\x1a\x13protobuf_msgs.proto2R\n\x06\x42roker\x12H\n\tSubscribe\x12\x17.kubefox.proto.v1.Event\x1a\x1e.kubefox.proto.v1.MatchedEvent(\x01\x30\x01\x42 Z\x1egithub.com/xigxog/kubefox/grpcb\x06proto3" +) _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'broker_svc_pb2', _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "broker_svc_pb2", _globals) if not _descriptor._USE_C_DESCRIPTORS: - _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'Z\036github.com/xigxog/kubefox/grpc' - _globals['_BROKER']._serialized_start = 59 - _globals['_BROKER']._serialized_end = 141 + _globals["DESCRIPTOR"]._loaded_options = None + _globals["DESCRIPTOR"]._serialized_options = b"Z\036github.com/xigxog/kubefox/grpc" + _globals["_BROKER"]._serialized_start = 59 + _globals["_BROKER"]._serialized_end = 141 # @@protoc_insertion_point(module_scope) diff --git a/kit/proto/broker_svc_pb2_grpc.py b/kit/proto/broker_svc_pb2_grpc.py index 73afe3d..d58042e 100644 --- a/kit/proto/broker_svc_pb2_grpc.py +++ b/kit/proto/broker_svc_pb2_grpc.py @@ -1,33 +1,36 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" -import grpc import warnings -import proto.protobuf_msgs_pb2 as protobuf__msgs__pb2 +import grpc + +import kit.proto.protobuf_msgs_pb2 as protobuf__msgs__pb2 -GRPC_GENERATED_VERSION = '1.64.1' +GRPC_GENERATED_VERSION = "1.64.1" GRPC_VERSION = grpc.__version__ -EXPECTED_ERROR_RELEASE = '1.65.0' -SCHEDULED_RELEASE_DATE = 'June 25, 2024' +EXPECTED_ERROR_RELEASE = "1.65.0" +SCHEDULED_RELEASE_DATE = "June 25, 2024" _version_not_supported = False try: from grpc._utilities import first_version_is_lower + _version_not_supported = first_version_is_lower( - GRPC_VERSION, GRPC_GENERATED_VERSION) + GRPC_VERSION, GRPC_GENERATED_VERSION + ) except ImportError: _version_not_supported = True if _version_not_supported: warnings.warn( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in broker_svc_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - + f' This warning will become an error in {EXPECTED_ERROR_RELEASE},' - + f' scheduled for release on {SCHEDULED_RELEASE_DATE}.', - RuntimeWarning + f"The grpc package installed is at version {GRPC_VERSION}," + + f" but the generated code in broker_svc_pb2_grpc.py depends on" + + f" grpcio>={GRPC_GENERATED_VERSION}." + + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" + + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." + + f" This warning will become an error in {EXPECTED_ERROR_RELEASE}," + + f" scheduled for release on {SCHEDULED_RELEASE_DATE}.", + RuntimeWarning, ) @@ -41,10 +44,11 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.Subscribe = channel.stream_stream( - '/kubefox.proto.v1.Broker/Subscribe', + "/kubefox.proto.v1.Broker/Subscribe", request_serializer=protobuf__msgs__pb2.Event.SerializeToString, response_deserializer=protobuf__msgs__pb2.MatchedEvent.FromString, - _registered_method=True) + _registered_method=True, + ) class BrokerServicer(object): @@ -53,45 +57,50 @@ class BrokerServicer(object): def Subscribe(self, request_iterator, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def add_BrokerServicer_to_server(servicer, server): rpc_method_handlers = { - 'Subscribe': grpc.stream_stream_rpc_method_handler( + "Subscribe": grpc.stream_stream_rpc_method_handler( servicer.Subscribe, request_deserializer=protobuf__msgs__pb2.Event.FromString, response_serializer=protobuf__msgs__pb2.MatchedEvent.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( - 'kubefox.proto.v1.Broker', rpc_method_handlers) + "kubefox.proto.v1.Broker", rpc_method_handlers + ) server.add_generic_rpc_handlers((generic_handler,)) server.add_registered_method_handlers( - 'kubefox.proto.v1.Broker', rpc_method_handlers) + "kubefox.proto.v1.Broker", rpc_method_handlers + ) + - # This class is part of an EXPERIMENTAL API. +# This class is part of an EXPERIMENTAL API. class Broker(object): """Missing associated documentation comment in .proto file.""" @staticmethod - def Subscribe(request_iterator, - target, - options=(), - channel_credentials=None, - call_credentials=None, - insecure=False, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None): + def Subscribe( + request_iterator, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): return grpc.experimental.stream_stream( request_iterator, target, - '/kubefox.proto.v1.Broker/Subscribe', + "/kubefox.proto.v1.Broker/Subscribe", protobuf__msgs__pb2.Event.SerializeToString, protobuf__msgs__pb2.MatchedEvent.FromString, options, @@ -102,4 +111,5 @@ def Subscribe(request_iterator, wait_for_ready, timeout, metadata, - _registered_method=True) + _registered_method=True, + ) diff --git a/kit/proto/google/__init__.py b/kit/proto/google/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kit/proto/google/protobuf/__init__.py b/kit/proto/google/protobuf/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kit/proto/google/protobuf/struct_pb2.py b/kit/proto/google/protobuf/struct_pb2.py new file mode 100644 index 0000000..9e9ac4a --- /dev/null +++ b/kit/proto/google/protobuf/struct_pb2.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/struct.proto +# Protobuf Python Version: 5.26.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf"\x84\x01\n\x06Struct\x12\x33\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntry\x1a\x45\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01"\xea\x01\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x16\n\x0cnumber_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12/\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12\x30\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00\x42\x06\n\x04kind"3\n\tListValue\x12&\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.Value*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x7f\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z/google.golang.org/protobuf/types/known/structpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "google.protobuf.struct_pb2", _globals +) +if not _descriptor._USE_C_DESCRIPTORS: + _globals["DESCRIPTOR"]._loaded_options = None + _globals["DESCRIPTOR"]._serialized_options = ( + b"\n\023com.google.protobufB\013StructProtoP\001Z/google.golang.org/protobuf/types/known/structpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes" + ) + _globals["_STRUCT_FIELDSENTRY"]._loaded_options = None + _globals["_STRUCT_FIELDSENTRY"]._serialized_options = b"8\001" + _globals["_NULLVALUE"]._serialized_start = 474 + _globals["_NULLVALUE"]._serialized_end = 501 + _globals["_STRUCT"]._serialized_start = 50 + _globals["_STRUCT"]._serialized_end = 182 + _globals["_STRUCT_FIELDSENTRY"]._serialized_start = 113 + _globals["_STRUCT_FIELDSENTRY"]._serialized_end = 182 + _globals["_VALUE"]._serialized_start = 185 + _globals["_VALUE"]._serialized_end = 419 + _globals["_LISTVALUE"]._serialized_start = 421 + _globals["_LISTVALUE"]._serialized_end = 472 +# @@protoc_insertion_point(module_scope) diff --git a/kit/proto/google/protobuf/struct_pb2.pyi b/kit/proto/google/protobuf/struct_pb2.pyi new file mode 100644 index 0000000..bf56db7 --- /dev/null +++ b/kit/proto/google/protobuf/struct_pb2.pyi @@ -0,0 +1,77 @@ +from typing import ClassVar as _ClassVar +from typing import Iterable as _Iterable +from typing import Mapping as _Mapping +from typing import Optional as _Optional +from typing import Union as _Union + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from google.protobuf.internal import well_known_types as _well_known_types + +DESCRIPTOR: _descriptor.FileDescriptor + +class NullValue(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + NULL_VALUE: _ClassVar[NullValue] + +NULL_VALUE: NullValue + +class Struct(_message.Message, _well_known_types.Struct): + __slots__ = ("fields",) + + class FieldsEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: Value + def __init__( + self, + key: _Optional[str] = ..., + value: _Optional[_Union[Value, _Mapping]] = ..., + ) -> None: ... + + FIELDS_FIELD_NUMBER: _ClassVar[int] + fields: _containers.MessageMap[str, Value] + def __init__(self, fields: _Optional[_Mapping[str, Value]] = ...) -> None: ... + +class Value(_message.Message): + __slots__ = ( + "null_value", + "number_value", + "string_value", + "bool_value", + "struct_value", + "list_value", + ) + NULL_VALUE_FIELD_NUMBER: _ClassVar[int] + NUMBER_VALUE_FIELD_NUMBER: _ClassVar[int] + STRING_VALUE_FIELD_NUMBER: _ClassVar[int] + BOOL_VALUE_FIELD_NUMBER: _ClassVar[int] + STRUCT_VALUE_FIELD_NUMBER: _ClassVar[int] + LIST_VALUE_FIELD_NUMBER: _ClassVar[int] + null_value: NullValue + number_value: float + string_value: str + bool_value: bool + struct_value: Struct + list_value: ListValue + def __init__( + self, + null_value: _Optional[_Union[NullValue, str]] = ..., + number_value: _Optional[float] = ..., + string_value: _Optional[str] = ..., + bool_value: bool = ..., + struct_value: _Optional[_Union[Struct, _Mapping]] = ..., + list_value: _Optional[_Union[ListValue, _Mapping]] = ..., + ) -> None: ... + +class ListValue(_message.Message, _well_known_types.ListValue): + __slots__ = ("values",) + VALUES_FIELD_NUMBER: _ClassVar[int] + values: _containers.RepeatedCompositeFieldContainer[Value] + def __init__( + self, values: _Optional[_Iterable[_Union[Value, _Mapping]]] = ... + ) -> None: ... diff --git a/kit/proto/google/protobuf/timestamp_pb2.py b/kit/proto/google/protobuf/timestamp_pb2.py new file mode 100644 index 0000000..6c56e2d --- /dev/null +++ b/kit/proto/google/protobuf/timestamp_pb2.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/protobuf/timestamp.proto +# Protobuf Python Version: 5.26.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x1fgoogle/protobuf/timestamp.proto\x12\x0fgoogle.protobuf"+\n\tTimestamp\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x85\x01\n\x13\x63om.google.protobufB\x0eTimestampProtoP\x01Z2google.golang.org/protobuf/types/known/timestamppb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "google.protobuf.timestamp_pb2", _globals +) +if not _descriptor._USE_C_DESCRIPTORS: + _globals["DESCRIPTOR"]._loaded_options = None + _globals["DESCRIPTOR"]._serialized_options = ( + b"\n\023com.google.protobufB\016TimestampProtoP\001Z2google.golang.org/protobuf/types/known/timestamppb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes" + ) + _globals["_TIMESTAMP"]._serialized_start = 52 + _globals["_TIMESTAMP"]._serialized_end = 95 +# @@protoc_insertion_point(module_scope) diff --git a/kit/proto/google/protobuf/timestamp_pb2.pyi b/kit/proto/google/protobuf/timestamp_pb2.pyi new file mode 100644 index 0000000..a4c4447 --- /dev/null +++ b/kit/proto/google/protobuf/timestamp_pb2.pyi @@ -0,0 +1,18 @@ +from typing import ClassVar as _ClassVar +from typing import Optional as _Optional + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf.internal import well_known_types as _well_known_types + +DESCRIPTOR: _descriptor.FileDescriptor + +class Timestamp(_message.Message, _well_known_types.Timestamp): + __slots__ = ("seconds", "nanos") + SECONDS_FIELD_NUMBER: _ClassVar[int] + NANOS_FIELD_NUMBER: _ClassVar[int] + seconds: int + nanos: int + def __init__( + self, seconds: _Optional[int] = ..., nanos: _Optional[int] = ... + ) -> None: ... diff --git a/kit/proto/opentelemetry/__init__.py b/kit/proto/opentelemetry/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kit/proto/opentelemetry/proto/collector/__init__.py b/kit/proto/opentelemetry/proto/collector/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kit/proto/opentelemetry/proto/collector/logs/v1/__init__.py b/kit/proto/opentelemetry/proto/collector/logs/v1/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kit/proto/opentelemetry/proto/collector/logs/v1/logs_service_pb2.py b/kit/proto/opentelemetry/proto/collector/logs/v1/logs_service_pb2.py new file mode 100644 index 0000000..483f7dc --- /dev/null +++ b/kit/proto/opentelemetry/proto/collector/logs/v1/logs_service_pb2.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: opentelemetry/proto/collector/logs/v1/logs_service.proto +# Protobuf Python Version: 5.26.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from opentelemetry.proto.logs.v1 import ( + logs_pb2 as opentelemetry_dot_proto_dot_logs_dot_v1_dot_logs__pb2, +) + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n8opentelemetry/proto/collector/logs/v1/logs_service.proto\x12%opentelemetry.proto.collector.logs.v1\x1a&opentelemetry/proto/logs/v1/logs.proto"\\\n\x18\x45xportLogsServiceRequest\x12@\n\rresource_logs\x18\x01 \x03(\x0b\x32).opentelemetry.proto.logs.v1.ResourceLogs"u\n\x19\x45xportLogsServiceResponse\x12X\n\x0fpartial_success\x18\x01 \x01(\x0b\x32?.opentelemetry.proto.collector.logs.v1.ExportLogsPartialSuccess"O\n\x18\x45xportLogsPartialSuccess\x12\x1c\n\x14rejected_log_records\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\x9d\x01\n\x0bLogsService\x12\x8d\x01\n\x06\x45xport\x12?.opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest\x1a@.opentelemetry.proto.collector.logs.v1.ExportLogsServiceResponse"\x00\x42\x98\x01\n(io.opentelemetry.proto.collector.logs.v1B\x10LogsServiceProtoP\x01Z0go.opentelemetry.io/proto/otlp/collector/logs/v1\xaa\x02%OpenTelemetry.Proto.Collector.Logs.V1b\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "opentelemetry.proto.collector.logs.v1.logs_service_pb2", _globals +) +if not _descriptor._USE_C_DESCRIPTORS: + _globals["DESCRIPTOR"]._loaded_options = None + _globals["DESCRIPTOR"]._serialized_options = ( + b"\n(io.opentelemetry.proto.collector.logs.v1B\020LogsServiceProtoP\001Z0go.opentelemetry.io/proto/otlp/collector/logs/v1\252\002%OpenTelemetry.Proto.Collector.Logs.V1" + ) + _globals["_EXPORTLOGSSERVICEREQUEST"]._serialized_start = 139 + _globals["_EXPORTLOGSSERVICEREQUEST"]._serialized_end = 231 + _globals["_EXPORTLOGSSERVICERESPONSE"]._serialized_start = 233 + _globals["_EXPORTLOGSSERVICERESPONSE"]._serialized_end = 350 + _globals["_EXPORTLOGSPARTIALSUCCESS"]._serialized_start = 352 + _globals["_EXPORTLOGSPARTIALSUCCESS"]._serialized_end = 431 + _globals["_LOGSSERVICE"]._serialized_start = 434 + _globals["_LOGSSERVICE"]._serialized_end = 591 +# @@protoc_insertion_point(module_scope) diff --git a/kit/proto/opentelemetry/proto/collector/logs/v1/logs_service_pb2.pyi b/kit/proto/opentelemetry/proto/collector/logs/v1/logs_service_pb2.pyi new file mode 100644 index 0000000..e574a98 --- /dev/null +++ b/kit/proto/opentelemetry/proto/collector/logs/v1/logs_service_pb2.pyi @@ -0,0 +1,44 @@ +from typing import ClassVar as _ClassVar +from typing import Iterable as _Iterable +from typing import Mapping as _Mapping +from typing import Optional as _Optional +from typing import Union as _Union + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf.internal import containers as _containers +from opentelemetry.proto.logs.v1 import logs_pb2 as _logs_pb2 + +DESCRIPTOR: _descriptor.FileDescriptor + +class ExportLogsServiceRequest(_message.Message): + __slots__ = ("resource_logs",) + RESOURCE_LOGS_FIELD_NUMBER: _ClassVar[int] + resource_logs: _containers.RepeatedCompositeFieldContainer[_logs_pb2.ResourceLogs] + def __init__( + self, + resource_logs: _Optional[ + _Iterable[_Union[_logs_pb2.ResourceLogs, _Mapping]] + ] = ..., + ) -> None: ... + +class ExportLogsServiceResponse(_message.Message): + __slots__ = ("partial_success",) + PARTIAL_SUCCESS_FIELD_NUMBER: _ClassVar[int] + partial_success: ExportLogsPartialSuccess + def __init__( + self, + partial_success: _Optional[_Union[ExportLogsPartialSuccess, _Mapping]] = ..., + ) -> None: ... + +class ExportLogsPartialSuccess(_message.Message): + __slots__ = ("rejected_log_records", "error_message") + REJECTED_LOG_RECORDS_FIELD_NUMBER: _ClassVar[int] + ERROR_MESSAGE_FIELD_NUMBER: _ClassVar[int] + rejected_log_records: int + error_message: str + def __init__( + self, + rejected_log_records: _Optional[int] = ..., + error_message: _Optional[str] = ..., + ) -> None: ... diff --git a/kit/proto/opentelemetry/proto/collector/logs/v1/logs_service_pb2_grpc.py b/kit/proto/opentelemetry/proto/collector/logs/v1/logs_service_pb2_grpc.py new file mode 100644 index 0000000..7864a85 --- /dev/null +++ b/kit/proto/opentelemetry/proto/collector/logs/v1/logs_service_pb2_grpc.py @@ -0,0 +1,125 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import warnings + +import grpc +from opentelemetry.proto.collector.logs.v1 import ( + logs_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2, +) + +GRPC_GENERATED_VERSION = "1.64.1" +GRPC_VERSION = grpc.__version__ +EXPECTED_ERROR_RELEASE = "1.65.0" +SCHEDULED_RELEASE_DATE = "June 25, 2024" +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + + _version_not_supported = first_version_is_lower( + GRPC_VERSION, GRPC_GENERATED_VERSION + ) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + warnings.warn( + f"The grpc package installed is at version {GRPC_VERSION}," + + f" but the generated code in opentelemetry/proto/collector/logs/v1/logs_service_pb2_grpc.py depends on" + + f" grpcio>={GRPC_GENERATED_VERSION}." + + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" + + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." + + f" This warning will become an error in {EXPECTED_ERROR_RELEASE}," + + f" scheduled for release on {SCHEDULED_RELEASE_DATE}.", + RuntimeWarning, + ) + + +class LogsServiceStub(object): + """Service that can be used to push logs between one Application instrumented with + OpenTelemetry and an collector, or between an collector and a central collector (in this + case logs are sent/received to/from multiple Applications). + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Export = channel.unary_unary( + "/opentelemetry.proto.collector.logs.v1.LogsService/Export", + request_serializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceRequest.SerializeToString, + response_deserializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceResponse.FromString, + _registered_method=True, + ) + + +class LogsServiceServicer(object): + """Service that can be used to push logs between one Application instrumented with + OpenTelemetry and an collector, or between an collector and a central collector (in this + case logs are sent/received to/from multiple Applications). + """ + + def Export(self, request, context): + """For performance reasons, it is recommended to keep this RPC + alive for the entire life of the application. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_LogsServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + "Export": grpc.unary_unary_rpc_method_handler( + servicer.Export, + request_deserializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceRequest.FromString, + response_serializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "opentelemetry.proto.collector.logs.v1.LogsService", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers( + "opentelemetry.proto.collector.logs.v1.LogsService", rpc_method_handlers + ) + + +# This class is part of an EXPERIMENTAL API. +class LogsService(object): + """Service that can be used to push logs between one Application instrumented with + OpenTelemetry and an collector, or between an collector and a central collector (in this + case logs are sent/received to/from multiple Applications). + """ + + @staticmethod + def Export( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/opentelemetry.proto.collector.logs.v1.LogsService/Export", + opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceRequest.SerializeToString, + opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True, + ) diff --git a/kit/proto/opentelemetry/proto/collector/metrics/v1/__init__.py b/kit/proto/opentelemetry/proto/collector/metrics/v1/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kit/proto/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.py b/kit/proto/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.py new file mode 100644 index 0000000..702708d --- /dev/null +++ b/kit/proto/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: opentelemetry/proto/collector/metrics/v1/metrics_service.proto +# Protobuf Python Version: 5.26.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from opentelemetry.proto.metrics.v1 import ( + metrics_pb2 as opentelemetry_dot_proto_dot_metrics_dot_v1_dot_metrics__pb2, +) + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n>opentelemetry/proto/collector/metrics/v1/metrics_service.proto\x12(opentelemetry.proto.collector.metrics.v1\x1a,opentelemetry/proto/metrics/v1/metrics.proto"h\n\x1b\x45xportMetricsServiceRequest\x12I\n\x10resource_metrics\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.ResourceMetrics"~\n\x1c\x45xportMetricsServiceResponse\x12^\n\x0fpartial_success\x18\x01 \x01(\x0b\x32\x45.opentelemetry.proto.collector.metrics.v1.ExportMetricsPartialSuccess"R\n\x1b\x45xportMetricsPartialSuccess\x12\x1c\n\x14rejected_data_points\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\xac\x01\n\x0eMetricsService\x12\x99\x01\n\x06\x45xport\x12\x45.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest\x1a\x46.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceResponse"\x00\x42\xa4\x01\n+io.opentelemetry.proto.collector.metrics.v1B\x13MetricsServiceProtoP\x01Z3go.opentelemetry.io/proto/otlp/collector/metrics/v1\xaa\x02(OpenTelemetry.Proto.Collector.Metrics.V1b\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "opentelemetry.proto.collector.metrics.v1.metrics_service_pb2", _globals +) +if not _descriptor._USE_C_DESCRIPTORS: + _globals["DESCRIPTOR"]._loaded_options = None + _globals["DESCRIPTOR"]._serialized_options = ( + b"\n+io.opentelemetry.proto.collector.metrics.v1B\023MetricsServiceProtoP\001Z3go.opentelemetry.io/proto/otlp/collector/metrics/v1\252\002(OpenTelemetry.Proto.Collector.Metrics.V1" + ) + _globals["_EXPORTMETRICSSERVICEREQUEST"]._serialized_start = 154 + _globals["_EXPORTMETRICSSERVICEREQUEST"]._serialized_end = 258 + _globals["_EXPORTMETRICSSERVICERESPONSE"]._serialized_start = 260 + _globals["_EXPORTMETRICSSERVICERESPONSE"]._serialized_end = 386 + _globals["_EXPORTMETRICSPARTIALSUCCESS"]._serialized_start = 388 + _globals["_EXPORTMETRICSPARTIALSUCCESS"]._serialized_end = 470 + _globals["_METRICSSERVICE"]._serialized_start = 473 + _globals["_METRICSSERVICE"]._serialized_end = 645 +# @@protoc_insertion_point(module_scope) diff --git a/kit/proto/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.pyi b/kit/proto/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.pyi new file mode 100644 index 0000000..e004369 --- /dev/null +++ b/kit/proto/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.pyi @@ -0,0 +1,46 @@ +from typing import ClassVar as _ClassVar +from typing import Iterable as _Iterable +from typing import Mapping as _Mapping +from typing import Optional as _Optional +from typing import Union as _Union + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf.internal import containers as _containers +from opentelemetry.proto.metrics.v1 import metrics_pb2 as _metrics_pb2 + +DESCRIPTOR: _descriptor.FileDescriptor + +class ExportMetricsServiceRequest(_message.Message): + __slots__ = ("resource_metrics",) + RESOURCE_METRICS_FIELD_NUMBER: _ClassVar[int] + resource_metrics: _containers.RepeatedCompositeFieldContainer[ + _metrics_pb2.ResourceMetrics + ] + def __init__( + self, + resource_metrics: _Optional[ + _Iterable[_Union[_metrics_pb2.ResourceMetrics, _Mapping]] + ] = ..., + ) -> None: ... + +class ExportMetricsServiceResponse(_message.Message): + __slots__ = ("partial_success",) + PARTIAL_SUCCESS_FIELD_NUMBER: _ClassVar[int] + partial_success: ExportMetricsPartialSuccess + def __init__( + self, + partial_success: _Optional[_Union[ExportMetricsPartialSuccess, _Mapping]] = ..., + ) -> None: ... + +class ExportMetricsPartialSuccess(_message.Message): + __slots__ = ("rejected_data_points", "error_message") + REJECTED_DATA_POINTS_FIELD_NUMBER: _ClassVar[int] + ERROR_MESSAGE_FIELD_NUMBER: _ClassVar[int] + rejected_data_points: int + error_message: str + def __init__( + self, + rejected_data_points: _Optional[int] = ..., + error_message: _Optional[str] = ..., + ) -> None: ... diff --git a/kit/proto/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2_grpc.py b/kit/proto/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2_grpc.py new file mode 100644 index 0000000..a78d89b --- /dev/null +++ b/kit/proto/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2_grpc.py @@ -0,0 +1,125 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import warnings + +import grpc +from opentelemetry.proto.collector.metrics.v1 import ( + metrics_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2, +) + +GRPC_GENERATED_VERSION = "1.64.1" +GRPC_VERSION = grpc.__version__ +EXPECTED_ERROR_RELEASE = "1.65.0" +SCHEDULED_RELEASE_DATE = "June 25, 2024" +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + + _version_not_supported = first_version_is_lower( + GRPC_VERSION, GRPC_GENERATED_VERSION + ) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + warnings.warn( + f"The grpc package installed is at version {GRPC_VERSION}," + + f" but the generated code in opentelemetry/proto/collector/metrics/v1/metrics_service_pb2_grpc.py depends on" + + f" grpcio>={GRPC_GENERATED_VERSION}." + + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" + + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." + + f" This warning will become an error in {EXPECTED_ERROR_RELEASE}," + + f" scheduled for release on {SCHEDULED_RELEASE_DATE}.", + RuntimeWarning, + ) + + +class MetricsServiceStub(object): + """Service that can be used to push metrics between one Application + instrumented with OpenTelemetry and a collector, or between a collector and a + central collector. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Export = channel.unary_unary( + "/opentelemetry.proto.collector.metrics.v1.MetricsService/Export", + request_serializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceRequest.SerializeToString, + response_deserializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceResponse.FromString, + _registered_method=True, + ) + + +class MetricsServiceServicer(object): + """Service that can be used to push metrics between one Application + instrumented with OpenTelemetry and a collector, or between a collector and a + central collector. + """ + + def Export(self, request, context): + """For performance reasons, it is recommended to keep this RPC + alive for the entire life of the application. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_MetricsServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + "Export": grpc.unary_unary_rpc_method_handler( + servicer.Export, + request_deserializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceRequest.FromString, + response_serializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "opentelemetry.proto.collector.metrics.v1.MetricsService", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers( + "opentelemetry.proto.collector.metrics.v1.MetricsService", rpc_method_handlers + ) + + +# This class is part of an EXPERIMENTAL API. +class MetricsService(object): + """Service that can be used to push metrics between one Application + instrumented with OpenTelemetry and a collector, or between a collector and a + central collector. + """ + + @staticmethod + def Export( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/opentelemetry.proto.collector.metrics.v1.MetricsService/Export", + opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceRequest.SerializeToString, + opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True, + ) diff --git a/kit/proto/opentelemetry/proto/collector/trace/v1/__init__.py b/kit/proto/opentelemetry/proto/collector/trace/v1/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kit/proto/opentelemetry/proto/collector/trace/v1/trace_service_pb2.py b/kit/proto/opentelemetry/proto/collector/trace/v1/trace_service_pb2.py new file mode 100644 index 0000000..e097f3f --- /dev/null +++ b/kit/proto/opentelemetry/proto/collector/trace/v1/trace_service_pb2.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: opentelemetry/proto/collector/trace/v1/trace_service.proto +# Protobuf Python Version: 5.26.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from opentelemetry.proto.trace.v1 import ( + trace_pb2 as opentelemetry_dot_proto_dot_trace_dot_v1_dot_trace__pb2, +) + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n:opentelemetry/proto/collector/trace/v1/trace_service.proto\x12&opentelemetry.proto.collector.trace.v1\x1a(opentelemetry/proto/trace/v1/trace.proto"`\n\x19\x45xportTraceServiceRequest\x12\x43\n\x0eresource_spans\x18\x01 \x03(\x0b\x32+.opentelemetry.proto.trace.v1.ResourceSpans"x\n\x1a\x45xportTraceServiceResponse\x12Z\n\x0fpartial_success\x18\x01 \x01(\x0b\x32\x41.opentelemetry.proto.collector.trace.v1.ExportTracePartialSuccess"J\n\x19\x45xportTracePartialSuccess\x12\x16\n\x0erejected_spans\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\xa2\x01\n\x0cTraceService\x12\x91\x01\n\x06\x45xport\x12\x41.opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest\x1a\x42.opentelemetry.proto.collector.trace.v1.ExportTraceServiceResponse"\x00\x42\x9c\x01\n)io.opentelemetry.proto.collector.trace.v1B\x11TraceServiceProtoP\x01Z1go.opentelemetry.io/proto/otlp/collector/trace/v1\xaa\x02&OpenTelemetry.Proto.Collector.Trace.V1b\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "opentelemetry.proto.collector.trace.v1.trace_service_pb2", _globals +) +if not _descriptor._USE_C_DESCRIPTORS: + _globals["DESCRIPTOR"]._loaded_options = None + _globals["DESCRIPTOR"]._serialized_options = ( + b"\n)io.opentelemetry.proto.collector.trace.v1B\021TraceServiceProtoP\001Z1go.opentelemetry.io/proto/otlp/collector/trace/v1\252\002&OpenTelemetry.Proto.Collector.Trace.V1" + ) + _globals["_EXPORTTRACESERVICEREQUEST"]._serialized_start = 144 + _globals["_EXPORTTRACESERVICEREQUEST"]._serialized_end = 240 + _globals["_EXPORTTRACESERVICERESPONSE"]._serialized_start = 242 + _globals["_EXPORTTRACESERVICERESPONSE"]._serialized_end = 362 + _globals["_EXPORTTRACEPARTIALSUCCESS"]._serialized_start = 364 + _globals["_EXPORTTRACEPARTIALSUCCESS"]._serialized_end = 438 + _globals["_TRACESERVICE"]._serialized_start = 441 + _globals["_TRACESERVICE"]._serialized_end = 603 +# @@protoc_insertion_point(module_scope) diff --git a/kit/proto/opentelemetry/proto/collector/trace/v1/trace_service_pb2.pyi b/kit/proto/opentelemetry/proto/collector/trace/v1/trace_service_pb2.pyi new file mode 100644 index 0000000..edff7c0 --- /dev/null +++ b/kit/proto/opentelemetry/proto/collector/trace/v1/trace_service_pb2.pyi @@ -0,0 +1,44 @@ +from typing import ClassVar as _ClassVar +from typing import Iterable as _Iterable +from typing import Mapping as _Mapping +from typing import Optional as _Optional +from typing import Union as _Union + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf.internal import containers as _containers +from opentelemetry.proto.trace.v1 import trace_pb2 as _trace_pb2 + +DESCRIPTOR: _descriptor.FileDescriptor + +class ExportTraceServiceRequest(_message.Message): + __slots__ = ("resource_spans",) + RESOURCE_SPANS_FIELD_NUMBER: _ClassVar[int] + resource_spans: _containers.RepeatedCompositeFieldContainer[ + _trace_pb2.ResourceSpans + ] + def __init__( + self, + resource_spans: _Optional[ + _Iterable[_Union[_trace_pb2.ResourceSpans, _Mapping]] + ] = ..., + ) -> None: ... + +class ExportTraceServiceResponse(_message.Message): + __slots__ = ("partial_success",) + PARTIAL_SUCCESS_FIELD_NUMBER: _ClassVar[int] + partial_success: ExportTracePartialSuccess + def __init__( + self, + partial_success: _Optional[_Union[ExportTracePartialSuccess, _Mapping]] = ..., + ) -> None: ... + +class ExportTracePartialSuccess(_message.Message): + __slots__ = ("rejected_spans", "error_message") + REJECTED_SPANS_FIELD_NUMBER: _ClassVar[int] + ERROR_MESSAGE_FIELD_NUMBER: _ClassVar[int] + rejected_spans: int + error_message: str + def __init__( + self, rejected_spans: _Optional[int] = ..., error_message: _Optional[str] = ... + ) -> None: ... diff --git a/kit/proto/opentelemetry/proto/collector/trace/v1/trace_service_pb2_grpc.py b/kit/proto/opentelemetry/proto/collector/trace/v1/trace_service_pb2_grpc.py new file mode 100644 index 0000000..34f931f --- /dev/null +++ b/kit/proto/opentelemetry/proto/collector/trace/v1/trace_service_pb2_grpc.py @@ -0,0 +1,125 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import warnings + +import grpc +from opentelemetry.proto.collector.trace.v1 import ( + trace_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2, +) + +GRPC_GENERATED_VERSION = "1.64.1" +GRPC_VERSION = grpc.__version__ +EXPECTED_ERROR_RELEASE = "1.65.0" +SCHEDULED_RELEASE_DATE = "June 25, 2024" +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + + _version_not_supported = first_version_is_lower( + GRPC_VERSION, GRPC_GENERATED_VERSION + ) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + warnings.warn( + f"The grpc package installed is at version {GRPC_VERSION}," + + f" but the generated code in opentelemetry/proto/collector/trace/v1/trace_service_pb2_grpc.py depends on" + + f" grpcio>={GRPC_GENERATED_VERSION}." + + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" + + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." + + f" This warning will become an error in {EXPECTED_ERROR_RELEASE}," + + f" scheduled for release on {SCHEDULED_RELEASE_DATE}.", + RuntimeWarning, + ) + + +class TraceServiceStub(object): + """Service that can be used to push spans between one Application instrumented with + OpenTelemetry and a collector, or between a collector and a central collector (in this + case spans are sent/received to/from multiple Applications). + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.Export = channel.unary_unary( + "/opentelemetry.proto.collector.trace.v1.TraceService/Export", + request_serializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceRequest.SerializeToString, + response_deserializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceResponse.FromString, + _registered_method=True, + ) + + +class TraceServiceServicer(object): + """Service that can be used to push spans between one Application instrumented with + OpenTelemetry and a collector, or between a collector and a central collector (in this + case spans are sent/received to/from multiple Applications). + """ + + def Export(self, request, context): + """For performance reasons, it is recommended to keep this RPC + alive for the entire life of the application. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_TraceServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + "Export": grpc.unary_unary_rpc_method_handler( + servicer.Export, + request_deserializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceRequest.FromString, + response_serializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "opentelemetry.proto.collector.trace.v1.TraceService", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers( + "opentelemetry.proto.collector.trace.v1.TraceService", rpc_method_handlers + ) + + +# This class is part of an EXPERIMENTAL API. +class TraceService(object): + """Service that can be used to push spans between one Application instrumented with + OpenTelemetry and a collector, or between a collector and a central collector (in this + case spans are sent/received to/from multiple Applications). + """ + + @staticmethod + def Export( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/opentelemetry.proto.collector.trace.v1.TraceService/Export", + opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceRequest.SerializeToString, + opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True, + ) diff --git a/kit/proto/opentelemetry/proto/common/v1/__init__.py b/kit/proto/opentelemetry/proto/common/v1/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kit/proto/opentelemetry/proto/common/v1/common_pb2.py b/kit/proto/opentelemetry/proto/common/v1/common_pb2.py new file mode 100644 index 0000000..f55ae93 --- /dev/null +++ b/kit/proto/opentelemetry/proto/common/v1/common_pb2.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: opentelemetry/proto/common/v1/common.proto +# Protobuf Python Version: 5.26.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n*opentelemetry/proto/common/v1/common.proto\x12\x1dopentelemetry.proto.common.v1"\x8c\x02\n\x08\x41nyValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x12@\n\x0b\x61rray_value\x18\x05 \x01(\x0b\x32).opentelemetry.proto.common.v1.ArrayValueH\x00\x12\x43\n\x0ckvlist_value\x18\x06 \x01(\x0b\x32+.opentelemetry.proto.common.v1.KeyValueListH\x00\x12\x15\n\x0b\x62ytes_value\x18\x07 \x01(\x0cH\x00\x42\x07\n\x05value"E\n\nArrayValue\x12\x37\n\x06values\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue"G\n\x0cKeyValueList\x12\x37\n\x06values\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue"O\n\x08KeyValue\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x36\n\x05value\x18\x02 \x01(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue"\x94\x01\n\x14InstrumentationScope\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12;\n\nattributes\x18\x03 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x04 \x01(\rB{\n io.opentelemetry.proto.common.v1B\x0b\x43ommonProtoP\x01Z(go.opentelemetry.io/proto/otlp/common/v1\xaa\x02\x1dOpenTelemetry.Proto.Common.V1b\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "opentelemetry.proto.common.v1.common_pb2", _globals +) +if not _descriptor._USE_C_DESCRIPTORS: + _globals["DESCRIPTOR"]._loaded_options = None + _globals["DESCRIPTOR"]._serialized_options = ( + b"\n io.opentelemetry.proto.common.v1B\013CommonProtoP\001Z(go.opentelemetry.io/proto/otlp/common/v1\252\002\035OpenTelemetry.Proto.Common.V1" + ) + _globals["_ANYVALUE"]._serialized_start = 78 + _globals["_ANYVALUE"]._serialized_end = 346 + _globals["_ARRAYVALUE"]._serialized_start = 348 + _globals["_ARRAYVALUE"]._serialized_end = 417 + _globals["_KEYVALUELIST"]._serialized_start = 419 + _globals["_KEYVALUELIST"]._serialized_end = 490 + _globals["_KEYVALUE"]._serialized_start = 492 + _globals["_KEYVALUE"]._serialized_end = 571 + _globals["_INSTRUMENTATIONSCOPE"]._serialized_start = 574 + _globals["_INSTRUMENTATIONSCOPE"]._serialized_end = 722 +# @@protoc_insertion_point(module_scope) diff --git a/kit/proto/opentelemetry/proto/common/v1/common_pb2.pyi b/kit/proto/opentelemetry/proto/common/v1/common_pb2.pyi new file mode 100644 index 0000000..839042f --- /dev/null +++ b/kit/proto/opentelemetry/proto/common/v1/common_pb2.pyi @@ -0,0 +1,92 @@ +from typing import ClassVar as _ClassVar +from typing import Iterable as _Iterable +from typing import Mapping as _Mapping +from typing import Optional as _Optional +from typing import Union as _Union + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf.internal import containers as _containers + +DESCRIPTOR: _descriptor.FileDescriptor + +class AnyValue(_message.Message): + __slots__ = ( + "string_value", + "bool_value", + "int_value", + "double_value", + "array_value", + "kvlist_value", + "bytes_value", + ) + STRING_VALUE_FIELD_NUMBER: _ClassVar[int] + BOOL_VALUE_FIELD_NUMBER: _ClassVar[int] + INT_VALUE_FIELD_NUMBER: _ClassVar[int] + DOUBLE_VALUE_FIELD_NUMBER: _ClassVar[int] + ARRAY_VALUE_FIELD_NUMBER: _ClassVar[int] + KVLIST_VALUE_FIELD_NUMBER: _ClassVar[int] + BYTES_VALUE_FIELD_NUMBER: _ClassVar[int] + string_value: str + bool_value: bool + int_value: int + double_value: float + array_value: ArrayValue + kvlist_value: KeyValueList + bytes_value: bytes + def __init__( + self, + string_value: _Optional[str] = ..., + bool_value: bool = ..., + int_value: _Optional[int] = ..., + double_value: _Optional[float] = ..., + array_value: _Optional[_Union[ArrayValue, _Mapping]] = ..., + kvlist_value: _Optional[_Union[KeyValueList, _Mapping]] = ..., + bytes_value: _Optional[bytes] = ..., + ) -> None: ... + +class ArrayValue(_message.Message): + __slots__ = ("values",) + VALUES_FIELD_NUMBER: _ClassVar[int] + values: _containers.RepeatedCompositeFieldContainer[AnyValue] + def __init__( + self, values: _Optional[_Iterable[_Union[AnyValue, _Mapping]]] = ... + ) -> None: ... + +class KeyValueList(_message.Message): + __slots__ = ("values",) + VALUES_FIELD_NUMBER: _ClassVar[int] + values: _containers.RepeatedCompositeFieldContainer[KeyValue] + def __init__( + self, values: _Optional[_Iterable[_Union[KeyValue, _Mapping]]] = ... + ) -> None: ... + +class KeyValue(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: AnyValue + def __init__( + self, + key: _Optional[str] = ..., + value: _Optional[_Union[AnyValue, _Mapping]] = ..., + ) -> None: ... + +class InstrumentationScope(_message.Message): + __slots__ = ("name", "version", "attributes", "dropped_attributes_count") + NAME_FIELD_NUMBER: _ClassVar[int] + VERSION_FIELD_NUMBER: _ClassVar[int] + ATTRIBUTES_FIELD_NUMBER: _ClassVar[int] + DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: _ClassVar[int] + name: str + version: str + attributes: _containers.RepeatedCompositeFieldContainer[KeyValue] + dropped_attributes_count: int + def __init__( + self, + name: _Optional[str] = ..., + version: _Optional[str] = ..., + attributes: _Optional[_Iterable[_Union[KeyValue, _Mapping]]] = ..., + dropped_attributes_count: _Optional[int] = ..., + ) -> None: ... diff --git a/kit/proto/opentelemetry/proto/common/v1/common_pb2_grpc.py b/kit/proto/opentelemetry/proto/common/v1/common_pb2_grpc.py new file mode 100644 index 0000000..09d01e5 --- /dev/null +++ b/kit/proto/opentelemetry/proto/common/v1/common_pb2_grpc.py @@ -0,0 +1,32 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import warnings + +import grpc + +GRPC_GENERATED_VERSION = "1.64.1" +GRPC_VERSION = grpc.__version__ +EXPECTED_ERROR_RELEASE = "1.65.0" +SCHEDULED_RELEASE_DATE = "June 25, 2024" +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + + _version_not_supported = first_version_is_lower( + GRPC_VERSION, GRPC_GENERATED_VERSION + ) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + warnings.warn( + f"The grpc package installed is at version {GRPC_VERSION}," + + f" but the generated code in opentelemetry/proto/common/v1/common_pb2_grpc.py depends on" + + f" grpcio>={GRPC_GENERATED_VERSION}." + + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" + + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." + + f" This warning will become an error in {EXPECTED_ERROR_RELEASE}," + + f" scheduled for release on {SCHEDULED_RELEASE_DATE}.", + RuntimeWarning, + ) diff --git a/kit/proto/opentelemetry/proto/logs/v1/__init__.py b/kit/proto/opentelemetry/proto/logs/v1/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kit/proto/opentelemetry/proto/logs/v1/logs_pb2.py b/kit/proto/opentelemetry/proto/logs/v1/logs_pb2.py new file mode 100644 index 0000000..367d457 --- /dev/null +++ b/kit/proto/opentelemetry/proto/logs/v1/logs_pb2.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: opentelemetry/proto/logs/v1/logs.proto +# Protobuf Python Version: 5.26.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from opentelemetry.proto.common.v1 import ( + common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2, +) +from opentelemetry.proto.resource.v1 import ( + resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2, +) + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n&opentelemetry/proto/logs/v1/logs.proto\x12\x1bopentelemetry.proto.logs.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto"L\n\x08LogsData\x12@\n\rresource_logs\x18\x01 \x03(\x0b\x32).opentelemetry.proto.logs.v1.ResourceLogs"\xa3\x01\n\x0cResourceLogs\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12:\n\nscope_logs\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.logs.v1.ScopeLogs\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07"\xa0\x01\n\tScopeLogs\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12;\n\x0blog_records\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.logs.v1.LogRecord\x12\x12\n\nschema_url\x18\x03 \x01(\t"\xef\x02\n\tLogRecord\x12\x16\n\x0etime_unix_nano\x18\x01 \x01(\x06\x12\x1f\n\x17observed_time_unix_nano\x18\x0b \x01(\x06\x12\x44\n\x0fseverity_number\x18\x02 \x01(\x0e\x32+.opentelemetry.proto.logs.v1.SeverityNumber\x12\x15\n\rseverity_text\x18\x03 \x01(\t\x12\x35\n\x04\x62ody\x18\x05 \x01(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue\x12;\n\nattributes\x18\x06 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x07 \x01(\r\x12\r\n\x05\x66lags\x18\x08 \x01(\x07\x12\x10\n\x08trace_id\x18\t \x01(\x0c\x12\x0f\n\x07span_id\x18\n \x01(\x0cJ\x04\x08\x04\x10\x05*\xc3\x05\n\x0eSeverityNumber\x12\x1f\n\x1bSEVERITY_NUMBER_UNSPECIFIED\x10\x00\x12\x19\n\x15SEVERITY_NUMBER_TRACE\x10\x01\x12\x1a\n\x16SEVERITY_NUMBER_TRACE2\x10\x02\x12\x1a\n\x16SEVERITY_NUMBER_TRACE3\x10\x03\x12\x1a\n\x16SEVERITY_NUMBER_TRACE4\x10\x04\x12\x19\n\x15SEVERITY_NUMBER_DEBUG\x10\x05\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG2\x10\x06\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG3\x10\x07\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG4\x10\x08\x12\x18\n\x14SEVERITY_NUMBER_INFO\x10\t\x12\x19\n\x15SEVERITY_NUMBER_INFO2\x10\n\x12\x19\n\x15SEVERITY_NUMBER_INFO3\x10\x0b\x12\x19\n\x15SEVERITY_NUMBER_INFO4\x10\x0c\x12\x18\n\x14SEVERITY_NUMBER_WARN\x10\r\x12\x19\n\x15SEVERITY_NUMBER_WARN2\x10\x0e\x12\x19\n\x15SEVERITY_NUMBER_WARN3\x10\x0f\x12\x19\n\x15SEVERITY_NUMBER_WARN4\x10\x10\x12\x19\n\x15SEVERITY_NUMBER_ERROR\x10\x11\x12\x1a\n\x16SEVERITY_NUMBER_ERROR2\x10\x12\x12\x1a\n\x16SEVERITY_NUMBER_ERROR3\x10\x13\x12\x1a\n\x16SEVERITY_NUMBER_ERROR4\x10\x14\x12\x19\n\x15SEVERITY_NUMBER_FATAL\x10\x15\x12\x1a\n\x16SEVERITY_NUMBER_FATAL2\x10\x16\x12\x1a\n\x16SEVERITY_NUMBER_FATAL3\x10\x17\x12\x1a\n\x16SEVERITY_NUMBER_FATAL4\x10\x18*Y\n\x0eLogRecordFlags\x12\x1f\n\x1bLOG_RECORD_FLAGS_DO_NOT_USE\x10\x00\x12&\n!LOG_RECORD_FLAGS_TRACE_FLAGS_MASK\x10\xff\x01\x42s\n\x1eio.opentelemetry.proto.logs.v1B\tLogsProtoP\x01Z&go.opentelemetry.io/proto/otlp/logs/v1\xaa\x02\x1bOpenTelemetry.Proto.Logs.V1b\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "opentelemetry.proto.logs.v1.logs_pb2", _globals +) +if not _descriptor._USE_C_DESCRIPTORS: + _globals["DESCRIPTOR"]._loaded_options = None + _globals["DESCRIPTOR"]._serialized_options = ( + b"\n\036io.opentelemetry.proto.logs.v1B\tLogsProtoP\001Z&go.opentelemetry.io/proto/otlp/logs/v1\252\002\033OpenTelemetry.Proto.Logs.V1" + ) + _globals["_SEVERITYNUMBER"]._serialized_start = 941 + _globals["_SEVERITYNUMBER"]._serialized_end = 1648 + _globals["_LOGRECORDFLAGS"]._serialized_start = 1650 + _globals["_LOGRECORDFLAGS"]._serialized_end = 1739 + _globals["_LOGSDATA"]._serialized_start = 163 + _globals["_LOGSDATA"]._serialized_end = 239 + _globals["_RESOURCELOGS"]._serialized_start = 242 + _globals["_RESOURCELOGS"]._serialized_end = 405 + _globals["_SCOPELOGS"]._serialized_start = 408 + _globals["_SCOPELOGS"]._serialized_end = 568 + _globals["_LOGRECORD"]._serialized_start = 571 + _globals["_LOGRECORD"]._serialized_end = 938 +# @@protoc_insertion_point(module_scope) diff --git a/kit/proto/opentelemetry/proto/logs/v1/logs_pb2.pyi b/kit/proto/opentelemetry/proto/logs/v1/logs_pb2.pyi new file mode 100644 index 0000000..e4561c4 --- /dev/null +++ b/kit/proto/opentelemetry/proto/logs/v1/logs_pb2.pyi @@ -0,0 +1,160 @@ +from typing import ClassVar as _ClassVar +from typing import Iterable as _Iterable +from typing import Mapping as _Mapping +from typing import Optional as _Optional +from typing import Union as _Union + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from opentelemetry.proto.common.v1 import common_pb2 as _common_pb2 +from opentelemetry.proto.resource.v1 import resource_pb2 as _resource_pb2 + +DESCRIPTOR: _descriptor.FileDescriptor + +class SeverityNumber(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + SEVERITY_NUMBER_UNSPECIFIED: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_TRACE: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_TRACE2: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_TRACE3: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_TRACE4: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_DEBUG: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_DEBUG2: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_DEBUG3: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_DEBUG4: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_INFO: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_INFO2: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_INFO3: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_INFO4: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_WARN: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_WARN2: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_WARN3: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_WARN4: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_ERROR: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_ERROR2: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_ERROR3: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_ERROR4: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_FATAL: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_FATAL2: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_FATAL3: _ClassVar[SeverityNumber] + SEVERITY_NUMBER_FATAL4: _ClassVar[SeverityNumber] + +class LogRecordFlags(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + LOG_RECORD_FLAGS_DO_NOT_USE: _ClassVar[LogRecordFlags] + LOG_RECORD_FLAGS_TRACE_FLAGS_MASK: _ClassVar[LogRecordFlags] + +SEVERITY_NUMBER_UNSPECIFIED: SeverityNumber +SEVERITY_NUMBER_TRACE: SeverityNumber +SEVERITY_NUMBER_TRACE2: SeverityNumber +SEVERITY_NUMBER_TRACE3: SeverityNumber +SEVERITY_NUMBER_TRACE4: SeverityNumber +SEVERITY_NUMBER_DEBUG: SeverityNumber +SEVERITY_NUMBER_DEBUG2: SeverityNumber +SEVERITY_NUMBER_DEBUG3: SeverityNumber +SEVERITY_NUMBER_DEBUG4: SeverityNumber +SEVERITY_NUMBER_INFO: SeverityNumber +SEVERITY_NUMBER_INFO2: SeverityNumber +SEVERITY_NUMBER_INFO3: SeverityNumber +SEVERITY_NUMBER_INFO4: SeverityNumber +SEVERITY_NUMBER_WARN: SeverityNumber +SEVERITY_NUMBER_WARN2: SeverityNumber +SEVERITY_NUMBER_WARN3: SeverityNumber +SEVERITY_NUMBER_WARN4: SeverityNumber +SEVERITY_NUMBER_ERROR: SeverityNumber +SEVERITY_NUMBER_ERROR2: SeverityNumber +SEVERITY_NUMBER_ERROR3: SeverityNumber +SEVERITY_NUMBER_ERROR4: SeverityNumber +SEVERITY_NUMBER_FATAL: SeverityNumber +SEVERITY_NUMBER_FATAL2: SeverityNumber +SEVERITY_NUMBER_FATAL3: SeverityNumber +SEVERITY_NUMBER_FATAL4: SeverityNumber +LOG_RECORD_FLAGS_DO_NOT_USE: LogRecordFlags +LOG_RECORD_FLAGS_TRACE_FLAGS_MASK: LogRecordFlags + +class LogsData(_message.Message): + __slots__ = ("resource_logs",) + RESOURCE_LOGS_FIELD_NUMBER: _ClassVar[int] + resource_logs: _containers.RepeatedCompositeFieldContainer[ResourceLogs] + def __init__( + self, resource_logs: _Optional[_Iterable[_Union[ResourceLogs, _Mapping]]] = ... + ) -> None: ... + +class ResourceLogs(_message.Message): + __slots__ = ("resource", "scope_logs", "schema_url") + RESOURCE_FIELD_NUMBER: _ClassVar[int] + SCOPE_LOGS_FIELD_NUMBER: _ClassVar[int] + SCHEMA_URL_FIELD_NUMBER: _ClassVar[int] + resource: _resource_pb2.Resource + scope_logs: _containers.RepeatedCompositeFieldContainer[ScopeLogs] + schema_url: str + def __init__( + self, + resource: _Optional[_Union[_resource_pb2.Resource, _Mapping]] = ..., + scope_logs: _Optional[_Iterable[_Union[ScopeLogs, _Mapping]]] = ..., + schema_url: _Optional[str] = ..., + ) -> None: ... + +class ScopeLogs(_message.Message): + __slots__ = ("scope", "log_records", "schema_url") + SCOPE_FIELD_NUMBER: _ClassVar[int] + LOG_RECORDS_FIELD_NUMBER: _ClassVar[int] + SCHEMA_URL_FIELD_NUMBER: _ClassVar[int] + scope: _common_pb2.InstrumentationScope + log_records: _containers.RepeatedCompositeFieldContainer[LogRecord] + schema_url: str + def __init__( + self, + scope: _Optional[_Union[_common_pb2.InstrumentationScope, _Mapping]] = ..., + log_records: _Optional[_Iterable[_Union[LogRecord, _Mapping]]] = ..., + schema_url: _Optional[str] = ..., + ) -> None: ... + +class LogRecord(_message.Message): + __slots__ = ( + "time_unix_nano", + "observed_time_unix_nano", + "severity_number", + "severity_text", + "body", + "attributes", + "dropped_attributes_count", + "flags", + "trace_id", + "span_id", + ) + TIME_UNIX_NANO_FIELD_NUMBER: _ClassVar[int] + OBSERVED_TIME_UNIX_NANO_FIELD_NUMBER: _ClassVar[int] + SEVERITY_NUMBER_FIELD_NUMBER: _ClassVar[int] + SEVERITY_TEXT_FIELD_NUMBER: _ClassVar[int] + BODY_FIELD_NUMBER: _ClassVar[int] + ATTRIBUTES_FIELD_NUMBER: _ClassVar[int] + DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: _ClassVar[int] + FLAGS_FIELD_NUMBER: _ClassVar[int] + TRACE_ID_FIELD_NUMBER: _ClassVar[int] + SPAN_ID_FIELD_NUMBER: _ClassVar[int] + time_unix_nano: int + observed_time_unix_nano: int + severity_number: SeverityNumber + severity_text: str + body: _common_pb2.AnyValue + attributes: _containers.RepeatedCompositeFieldContainer[_common_pb2.KeyValue] + dropped_attributes_count: int + flags: int + trace_id: bytes + span_id: bytes + def __init__( + self, + time_unix_nano: _Optional[int] = ..., + observed_time_unix_nano: _Optional[int] = ..., + severity_number: _Optional[_Union[SeverityNumber, str]] = ..., + severity_text: _Optional[str] = ..., + body: _Optional[_Union[_common_pb2.AnyValue, _Mapping]] = ..., + attributes: _Optional[_Iterable[_Union[_common_pb2.KeyValue, _Mapping]]] = ..., + dropped_attributes_count: _Optional[int] = ..., + flags: _Optional[int] = ..., + trace_id: _Optional[bytes] = ..., + span_id: _Optional[bytes] = ..., + ) -> None: ... diff --git a/kit/proto/opentelemetry/proto/logs/v1/logs_pb2_grpc.py b/kit/proto/opentelemetry/proto/logs/v1/logs_pb2_grpc.py new file mode 100644 index 0000000..61f3200 --- /dev/null +++ b/kit/proto/opentelemetry/proto/logs/v1/logs_pb2_grpc.py @@ -0,0 +1,32 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import warnings + +import grpc + +GRPC_GENERATED_VERSION = "1.64.1" +GRPC_VERSION = grpc.__version__ +EXPECTED_ERROR_RELEASE = "1.65.0" +SCHEDULED_RELEASE_DATE = "June 25, 2024" +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + + _version_not_supported = first_version_is_lower( + GRPC_VERSION, GRPC_GENERATED_VERSION + ) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + warnings.warn( + f"The grpc package installed is at version {GRPC_VERSION}," + + f" but the generated code in opentelemetry/proto/logs/v1/logs_pb2_grpc.py depends on" + + f" grpcio>={GRPC_GENERATED_VERSION}." + + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" + + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." + + f" This warning will become an error in {EXPECTED_ERROR_RELEASE}," + + f" scheduled for release on {SCHEDULED_RELEASE_DATE}.", + RuntimeWarning, + ) diff --git a/kit/proto/opentelemetry/proto/metrics/v1/__init__.py b/kit/proto/opentelemetry/proto/metrics/v1/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kit/proto/opentelemetry/proto/metrics/v1/metrics_pb2.py b/kit/proto/opentelemetry/proto/metrics/v1/metrics_pb2.py new file mode 100644 index 0000000..77a7051 --- /dev/null +++ b/kit/proto/opentelemetry/proto/metrics/v1/metrics_pb2.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: opentelemetry/proto/metrics/v1/metrics.proto +# Protobuf Python Version: 5.26.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from opentelemetry.proto.common.v1 import ( + common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2, +) +from opentelemetry.proto.resource.v1 import ( + resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2, +) + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n,opentelemetry/proto/metrics/v1/metrics.proto\x12\x1eopentelemetry.proto.metrics.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto"X\n\x0bMetricsData\x12I\n\x10resource_metrics\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.ResourceMetrics"\xaf\x01\n\x0fResourceMetrics\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12\x43\n\rscope_metrics\x18\x02 \x03(\x0b\x32,.opentelemetry.proto.metrics.v1.ScopeMetrics\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07"\x9f\x01\n\x0cScopeMetrics\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12\x37\n\x07metrics\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.metrics.v1.Metric\x12\x12\n\nschema_url\x18\x03 \x01(\t"\xcd\x03\n\x06Metric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0c\n\x04unit\x18\x03 \x01(\t\x12\x36\n\x05gauge\x18\x05 \x01(\x0b\x32%.opentelemetry.proto.metrics.v1.GaugeH\x00\x12\x32\n\x03sum\x18\x07 \x01(\x0b\x32#.opentelemetry.proto.metrics.v1.SumH\x00\x12>\n\thistogram\x18\t \x01(\x0b\x32).opentelemetry.proto.metrics.v1.HistogramH\x00\x12U\n\x15\x65xponential_histogram\x18\n \x01(\x0b\x32\x34.opentelemetry.proto.metrics.v1.ExponentialHistogramH\x00\x12:\n\x07summary\x18\x0b \x01(\x0b\x32\'.opentelemetry.proto.metrics.v1.SummaryH\x00\x12\x39\n\x08metadata\x18\x0c \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValueB\x06\n\x04\x64\x61taJ\x04\x08\x04\x10\x05J\x04\x08\x06\x10\x07J\x04\x08\x08\x10\t"M\n\x05Gauge\x12\x44\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.NumberDataPoint"\xba\x01\n\x03Sum\x12\x44\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.NumberDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality\x12\x14\n\x0cis_monotonic\x18\x03 \x01(\x08"\xad\x01\n\tHistogram\x12G\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32\x32.opentelemetry.proto.metrics.v1.HistogramDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality"\xc3\x01\n\x14\x45xponentialHistogram\x12R\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32=.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality"P\n\x07Summary\x12\x45\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32\x30.opentelemetry.proto.metrics.v1.SummaryDataPoint"\x86\x02\n\x0fNumberDataPoint\x12;\n\nattributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\x13\n\tas_double\x18\x04 \x01(\x01H\x00\x12\x10\n\x06\x61s_int\x18\x06 \x01(\x10H\x00\x12;\n\texemplars\x18\x05 \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\x12\r\n\x05\x66lags\x18\x08 \x01(\rB\x07\n\x05valueJ\x04\x08\x01\x10\x02"\xe6\x02\n\x12HistogramDataPoint\x12;\n\nattributes\x18\t \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x10\n\x03sum\x18\x05 \x01(\x01H\x00\x88\x01\x01\x12\x15\n\rbucket_counts\x18\x06 \x03(\x06\x12\x17\n\x0f\x65xplicit_bounds\x18\x07 \x03(\x01\x12;\n\texemplars\x18\x08 \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\x12\r\n\x05\x66lags\x18\n \x01(\r\x12\x10\n\x03min\x18\x0b \x01(\x01H\x01\x88\x01\x01\x12\x10\n\x03max\x18\x0c \x01(\x01H\x02\x88\x01\x01\x42\x06\n\x04_sumB\x06\n\x04_minB\x06\n\x04_maxJ\x04\x08\x01\x10\x02"\xda\x04\n\x1d\x45xponentialHistogramDataPoint\x12;\n\nattributes\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x10\n\x03sum\x18\x05 \x01(\x01H\x00\x88\x01\x01\x12\r\n\x05scale\x18\x06 \x01(\x11\x12\x12\n\nzero_count\x18\x07 \x01(\x06\x12W\n\x08positive\x18\x08 \x01(\x0b\x32\x45.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets\x12W\n\x08negative\x18\t \x01(\x0b\x32\x45.opentelemetry.proto.metrics.v1.ExponentialHistogramDataPoint.Buckets\x12\r\n\x05\x66lags\x18\n \x01(\r\x12;\n\texemplars\x18\x0b \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\x12\x10\n\x03min\x18\x0c \x01(\x01H\x01\x88\x01\x01\x12\x10\n\x03max\x18\r \x01(\x01H\x02\x88\x01\x01\x12\x16\n\x0ezero_threshold\x18\x0e \x01(\x01\x1a\x30\n\x07\x42uckets\x12\x0e\n\x06offset\x18\x01 \x01(\x11\x12\x15\n\rbucket_counts\x18\x02 \x03(\x04\x42\x06\n\x04_sumB\x06\n\x04_minB\x06\n\x04_max"\xc5\x02\n\x10SummaryDataPoint\x12;\n\nattributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x0b\n\x03sum\x18\x05 \x01(\x01\x12Y\n\x0fquantile_values\x18\x06 \x03(\x0b\x32@.opentelemetry.proto.metrics.v1.SummaryDataPoint.ValueAtQuantile\x12\r\n\x05\x66lags\x18\x08 \x01(\r\x1a\x32\n\x0fValueAtQuantile\x12\x10\n\x08quantile\x18\x01 \x01(\x01\x12\r\n\x05value\x18\x02 \x01(\x01J\x04\x08\x01\x10\x02"\xc1\x01\n\x08\x45xemplar\x12\x44\n\x13\x66iltered_attributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x16\n\x0etime_unix_nano\x18\x02 \x01(\x06\x12\x13\n\tas_double\x18\x03 \x01(\x01H\x00\x12\x10\n\x06\x61s_int\x18\x06 \x01(\x10H\x00\x12\x0f\n\x07span_id\x18\x04 \x01(\x0c\x12\x10\n\x08trace_id\x18\x05 \x01(\x0c\x42\x07\n\x05valueJ\x04\x08\x01\x10\x02*\x8c\x01\n\x16\x41ggregationTemporality\x12\'\n#AGGREGATION_TEMPORALITY_UNSPECIFIED\x10\x00\x12!\n\x1d\x41GGREGATION_TEMPORALITY_DELTA\x10\x01\x12&\n"AGGREGATION_TEMPORALITY_CUMULATIVE\x10\x02*^\n\x0e\x44\x61taPointFlags\x12\x1f\n\x1b\x44\x41TA_POINT_FLAGS_DO_NOT_USE\x10\x00\x12+\n\'DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK\x10\x01\x42\x7f\n!io.opentelemetry.proto.metrics.v1B\x0cMetricsProtoP\x01Z)go.opentelemetry.io/proto/otlp/metrics/v1\xaa\x02\x1eOpenTelemetry.Proto.Metrics.V1b\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "opentelemetry.proto.metrics.v1.metrics_pb2", _globals +) +if not _descriptor._USE_C_DESCRIPTORS: + _globals["DESCRIPTOR"]._loaded_options = None + _globals["DESCRIPTOR"]._serialized_options = ( + b"\n!io.opentelemetry.proto.metrics.v1B\014MetricsProtoP\001Z)go.opentelemetry.io/proto/otlp/metrics/v1\252\002\036OpenTelemetry.Proto.Metrics.V1" + ) + _globals["_AGGREGATIONTEMPORALITY"]._serialized_start = 3546 + _globals["_AGGREGATIONTEMPORALITY"]._serialized_end = 3686 + _globals["_DATAPOINTFLAGS"]._serialized_start = 3688 + _globals["_DATAPOINTFLAGS"]._serialized_end = 3782 + _globals["_METRICSDATA"]._serialized_start = 172 + _globals["_METRICSDATA"]._serialized_end = 260 + _globals["_RESOURCEMETRICS"]._serialized_start = 263 + _globals["_RESOURCEMETRICS"]._serialized_end = 438 + _globals["_SCOPEMETRICS"]._serialized_start = 441 + _globals["_SCOPEMETRICS"]._serialized_end = 600 + _globals["_METRIC"]._serialized_start = 603 + _globals["_METRIC"]._serialized_end = 1064 + _globals["_GAUGE"]._serialized_start = 1066 + _globals["_GAUGE"]._serialized_end = 1143 + _globals["_SUM"]._serialized_start = 1146 + _globals["_SUM"]._serialized_end = 1332 + _globals["_HISTOGRAM"]._serialized_start = 1335 + _globals["_HISTOGRAM"]._serialized_end = 1508 + _globals["_EXPONENTIALHISTOGRAM"]._serialized_start = 1511 + _globals["_EXPONENTIALHISTOGRAM"]._serialized_end = 1706 + _globals["_SUMMARY"]._serialized_start = 1708 + _globals["_SUMMARY"]._serialized_end = 1788 + _globals["_NUMBERDATAPOINT"]._serialized_start = 1791 + _globals["_NUMBERDATAPOINT"]._serialized_end = 2053 + _globals["_HISTOGRAMDATAPOINT"]._serialized_start = 2056 + _globals["_HISTOGRAMDATAPOINT"]._serialized_end = 2414 + _globals["_EXPONENTIALHISTOGRAMDATAPOINT"]._serialized_start = 2417 + _globals["_EXPONENTIALHISTOGRAMDATAPOINT"]._serialized_end = 3019 + _globals["_EXPONENTIALHISTOGRAMDATAPOINT_BUCKETS"]._serialized_start = 2947 + _globals["_EXPONENTIALHISTOGRAMDATAPOINT_BUCKETS"]._serialized_end = 2995 + _globals["_SUMMARYDATAPOINT"]._serialized_start = 3022 + _globals["_SUMMARYDATAPOINT"]._serialized_end = 3347 + _globals["_SUMMARYDATAPOINT_VALUEATQUANTILE"]._serialized_start = 3291 + _globals["_SUMMARYDATAPOINT_VALUEATQUANTILE"]._serialized_end = 3341 + _globals["_EXEMPLAR"]._serialized_start = 3350 + _globals["_EXEMPLAR"]._serialized_end = 3543 +# @@protoc_insertion_point(module_scope) diff --git a/kit/proto/opentelemetry/proto/metrics/v1/metrics_pb2.pyi b/kit/proto/opentelemetry/proto/metrics/v1/metrics_pb2.pyi new file mode 100644 index 0000000..c702a83 --- /dev/null +++ b/kit/proto/opentelemetry/proto/metrics/v1/metrics_pb2.pyi @@ -0,0 +1,424 @@ +from typing import ClassVar as _ClassVar +from typing import Iterable as _Iterable +from typing import Mapping as _Mapping +from typing import Optional as _Optional +from typing import Union as _Union + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from opentelemetry.proto.common.v1 import common_pb2 as _common_pb2 +from opentelemetry.proto.resource.v1 import resource_pb2 as _resource_pb2 + +DESCRIPTOR: _descriptor.FileDescriptor + +class AggregationTemporality(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + AGGREGATION_TEMPORALITY_UNSPECIFIED: _ClassVar[AggregationTemporality] + AGGREGATION_TEMPORALITY_DELTA: _ClassVar[AggregationTemporality] + AGGREGATION_TEMPORALITY_CUMULATIVE: _ClassVar[AggregationTemporality] + +class DataPointFlags(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + DATA_POINT_FLAGS_DO_NOT_USE: _ClassVar[DataPointFlags] + DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK: _ClassVar[DataPointFlags] + +AGGREGATION_TEMPORALITY_UNSPECIFIED: AggregationTemporality +AGGREGATION_TEMPORALITY_DELTA: AggregationTemporality +AGGREGATION_TEMPORALITY_CUMULATIVE: AggregationTemporality +DATA_POINT_FLAGS_DO_NOT_USE: DataPointFlags +DATA_POINT_FLAGS_NO_RECORDED_VALUE_MASK: DataPointFlags + +class MetricsData(_message.Message): + __slots__ = ("resource_metrics",) + RESOURCE_METRICS_FIELD_NUMBER: _ClassVar[int] + resource_metrics: _containers.RepeatedCompositeFieldContainer[ResourceMetrics] + def __init__( + self, + resource_metrics: _Optional[_Iterable[_Union[ResourceMetrics, _Mapping]]] = ..., + ) -> None: ... + +class ResourceMetrics(_message.Message): + __slots__ = ("resource", "scope_metrics", "schema_url") + RESOURCE_FIELD_NUMBER: _ClassVar[int] + SCOPE_METRICS_FIELD_NUMBER: _ClassVar[int] + SCHEMA_URL_FIELD_NUMBER: _ClassVar[int] + resource: _resource_pb2.Resource + scope_metrics: _containers.RepeatedCompositeFieldContainer[ScopeMetrics] + schema_url: str + def __init__( + self, + resource: _Optional[_Union[_resource_pb2.Resource, _Mapping]] = ..., + scope_metrics: _Optional[_Iterable[_Union[ScopeMetrics, _Mapping]]] = ..., + schema_url: _Optional[str] = ..., + ) -> None: ... + +class ScopeMetrics(_message.Message): + __slots__ = ("scope", "metrics", "schema_url") + SCOPE_FIELD_NUMBER: _ClassVar[int] + METRICS_FIELD_NUMBER: _ClassVar[int] + SCHEMA_URL_FIELD_NUMBER: _ClassVar[int] + scope: _common_pb2.InstrumentationScope + metrics: _containers.RepeatedCompositeFieldContainer[Metric] + schema_url: str + def __init__( + self, + scope: _Optional[_Union[_common_pb2.InstrumentationScope, _Mapping]] = ..., + metrics: _Optional[_Iterable[_Union[Metric, _Mapping]]] = ..., + schema_url: _Optional[str] = ..., + ) -> None: ... + +class Metric(_message.Message): + __slots__ = ( + "name", + "description", + "unit", + "gauge", + "sum", + "histogram", + "exponential_histogram", + "summary", + "metadata", + ) + NAME_FIELD_NUMBER: _ClassVar[int] + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + UNIT_FIELD_NUMBER: _ClassVar[int] + GAUGE_FIELD_NUMBER: _ClassVar[int] + SUM_FIELD_NUMBER: _ClassVar[int] + HISTOGRAM_FIELD_NUMBER: _ClassVar[int] + EXPONENTIAL_HISTOGRAM_FIELD_NUMBER: _ClassVar[int] + SUMMARY_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + name: str + description: str + unit: str + gauge: Gauge + sum: Sum + histogram: Histogram + exponential_histogram: ExponentialHistogram + summary: Summary + metadata: _containers.RepeatedCompositeFieldContainer[_common_pb2.KeyValue] + def __init__( + self, + name: _Optional[str] = ..., + description: _Optional[str] = ..., + unit: _Optional[str] = ..., + gauge: _Optional[_Union[Gauge, _Mapping]] = ..., + sum: _Optional[_Union[Sum, _Mapping]] = ..., + histogram: _Optional[_Union[Histogram, _Mapping]] = ..., + exponential_histogram: _Optional[_Union[ExponentialHistogram, _Mapping]] = ..., + summary: _Optional[_Union[Summary, _Mapping]] = ..., + metadata: _Optional[_Iterable[_Union[_common_pb2.KeyValue, _Mapping]]] = ..., + ) -> None: ... + +class Gauge(_message.Message): + __slots__ = ("data_points",) + DATA_POINTS_FIELD_NUMBER: _ClassVar[int] + data_points: _containers.RepeatedCompositeFieldContainer[NumberDataPoint] + def __init__( + self, data_points: _Optional[_Iterable[_Union[NumberDataPoint, _Mapping]]] = ... + ) -> None: ... + +class Sum(_message.Message): + __slots__ = ("data_points", "aggregation_temporality", "is_monotonic") + DATA_POINTS_FIELD_NUMBER: _ClassVar[int] + AGGREGATION_TEMPORALITY_FIELD_NUMBER: _ClassVar[int] + IS_MONOTONIC_FIELD_NUMBER: _ClassVar[int] + data_points: _containers.RepeatedCompositeFieldContainer[NumberDataPoint] + aggregation_temporality: AggregationTemporality + is_monotonic: bool + def __init__( + self, + data_points: _Optional[_Iterable[_Union[NumberDataPoint, _Mapping]]] = ..., + aggregation_temporality: _Optional[_Union[AggregationTemporality, str]] = ..., + is_monotonic: bool = ..., + ) -> None: ... + +class Histogram(_message.Message): + __slots__ = ("data_points", "aggregation_temporality") + DATA_POINTS_FIELD_NUMBER: _ClassVar[int] + AGGREGATION_TEMPORALITY_FIELD_NUMBER: _ClassVar[int] + data_points: _containers.RepeatedCompositeFieldContainer[HistogramDataPoint] + aggregation_temporality: AggregationTemporality + def __init__( + self, + data_points: _Optional[_Iterable[_Union[HistogramDataPoint, _Mapping]]] = ..., + aggregation_temporality: _Optional[_Union[AggregationTemporality, str]] = ..., + ) -> None: ... + +class ExponentialHistogram(_message.Message): + __slots__ = ("data_points", "aggregation_temporality") + DATA_POINTS_FIELD_NUMBER: _ClassVar[int] + AGGREGATION_TEMPORALITY_FIELD_NUMBER: _ClassVar[int] + data_points: _containers.RepeatedCompositeFieldContainer[ + ExponentialHistogramDataPoint + ] + aggregation_temporality: AggregationTemporality + def __init__( + self, + data_points: _Optional[ + _Iterable[_Union[ExponentialHistogramDataPoint, _Mapping]] + ] = ..., + aggregation_temporality: _Optional[_Union[AggregationTemporality, str]] = ..., + ) -> None: ... + +class Summary(_message.Message): + __slots__ = ("data_points",) + DATA_POINTS_FIELD_NUMBER: _ClassVar[int] + data_points: _containers.RepeatedCompositeFieldContainer[SummaryDataPoint] + def __init__( + self, + data_points: _Optional[_Iterable[_Union[SummaryDataPoint, _Mapping]]] = ..., + ) -> None: ... + +class NumberDataPoint(_message.Message): + __slots__ = ( + "attributes", + "start_time_unix_nano", + "time_unix_nano", + "as_double", + "as_int", + "exemplars", + "flags", + ) + ATTRIBUTES_FIELD_NUMBER: _ClassVar[int] + START_TIME_UNIX_NANO_FIELD_NUMBER: _ClassVar[int] + TIME_UNIX_NANO_FIELD_NUMBER: _ClassVar[int] + AS_DOUBLE_FIELD_NUMBER: _ClassVar[int] + AS_INT_FIELD_NUMBER: _ClassVar[int] + EXEMPLARS_FIELD_NUMBER: _ClassVar[int] + FLAGS_FIELD_NUMBER: _ClassVar[int] + attributes: _containers.RepeatedCompositeFieldContainer[_common_pb2.KeyValue] + start_time_unix_nano: int + time_unix_nano: int + as_double: float + as_int: int + exemplars: _containers.RepeatedCompositeFieldContainer[Exemplar] + flags: int + def __init__( + self, + attributes: _Optional[_Iterable[_Union[_common_pb2.KeyValue, _Mapping]]] = ..., + start_time_unix_nano: _Optional[int] = ..., + time_unix_nano: _Optional[int] = ..., + as_double: _Optional[float] = ..., + as_int: _Optional[int] = ..., + exemplars: _Optional[_Iterable[_Union[Exemplar, _Mapping]]] = ..., + flags: _Optional[int] = ..., + ) -> None: ... + +class HistogramDataPoint(_message.Message): + __slots__ = ( + "attributes", + "start_time_unix_nano", + "time_unix_nano", + "count", + "sum", + "bucket_counts", + "explicit_bounds", + "exemplars", + "flags", + "min", + "max", + ) + ATTRIBUTES_FIELD_NUMBER: _ClassVar[int] + START_TIME_UNIX_NANO_FIELD_NUMBER: _ClassVar[int] + TIME_UNIX_NANO_FIELD_NUMBER: _ClassVar[int] + COUNT_FIELD_NUMBER: _ClassVar[int] + SUM_FIELD_NUMBER: _ClassVar[int] + BUCKET_COUNTS_FIELD_NUMBER: _ClassVar[int] + EXPLICIT_BOUNDS_FIELD_NUMBER: _ClassVar[int] + EXEMPLARS_FIELD_NUMBER: _ClassVar[int] + FLAGS_FIELD_NUMBER: _ClassVar[int] + MIN_FIELD_NUMBER: _ClassVar[int] + MAX_FIELD_NUMBER: _ClassVar[int] + attributes: _containers.RepeatedCompositeFieldContainer[_common_pb2.KeyValue] + start_time_unix_nano: int + time_unix_nano: int + count: int + sum: float + bucket_counts: _containers.RepeatedScalarFieldContainer[int] + explicit_bounds: _containers.RepeatedScalarFieldContainer[float] + exemplars: _containers.RepeatedCompositeFieldContainer[Exemplar] + flags: int + min: float + max: float + def __init__( + self, + attributes: _Optional[_Iterable[_Union[_common_pb2.KeyValue, _Mapping]]] = ..., + start_time_unix_nano: _Optional[int] = ..., + time_unix_nano: _Optional[int] = ..., + count: _Optional[int] = ..., + sum: _Optional[float] = ..., + bucket_counts: _Optional[_Iterable[int]] = ..., + explicit_bounds: _Optional[_Iterable[float]] = ..., + exemplars: _Optional[_Iterable[_Union[Exemplar, _Mapping]]] = ..., + flags: _Optional[int] = ..., + min: _Optional[float] = ..., + max: _Optional[float] = ..., + ) -> None: ... + +class ExponentialHistogramDataPoint(_message.Message): + __slots__ = ( + "attributes", + "start_time_unix_nano", + "time_unix_nano", + "count", + "sum", + "scale", + "zero_count", + "positive", + "negative", + "flags", + "exemplars", + "min", + "max", + "zero_threshold", + ) + + class Buckets(_message.Message): + __slots__ = ("offset", "bucket_counts") + OFFSET_FIELD_NUMBER: _ClassVar[int] + BUCKET_COUNTS_FIELD_NUMBER: _ClassVar[int] + offset: int + bucket_counts: _containers.RepeatedScalarFieldContainer[int] + def __init__( + self, + offset: _Optional[int] = ..., + bucket_counts: _Optional[_Iterable[int]] = ..., + ) -> None: ... + + ATTRIBUTES_FIELD_NUMBER: _ClassVar[int] + START_TIME_UNIX_NANO_FIELD_NUMBER: _ClassVar[int] + TIME_UNIX_NANO_FIELD_NUMBER: _ClassVar[int] + COUNT_FIELD_NUMBER: _ClassVar[int] + SUM_FIELD_NUMBER: _ClassVar[int] + SCALE_FIELD_NUMBER: _ClassVar[int] + ZERO_COUNT_FIELD_NUMBER: _ClassVar[int] + POSITIVE_FIELD_NUMBER: _ClassVar[int] + NEGATIVE_FIELD_NUMBER: _ClassVar[int] + FLAGS_FIELD_NUMBER: _ClassVar[int] + EXEMPLARS_FIELD_NUMBER: _ClassVar[int] + MIN_FIELD_NUMBER: _ClassVar[int] + MAX_FIELD_NUMBER: _ClassVar[int] + ZERO_THRESHOLD_FIELD_NUMBER: _ClassVar[int] + attributes: _containers.RepeatedCompositeFieldContainer[_common_pb2.KeyValue] + start_time_unix_nano: int + time_unix_nano: int + count: int + sum: float + scale: int + zero_count: int + positive: ExponentialHistogramDataPoint.Buckets + negative: ExponentialHistogramDataPoint.Buckets + flags: int + exemplars: _containers.RepeatedCompositeFieldContainer[Exemplar] + min: float + max: float + zero_threshold: float + def __init__( + self, + attributes: _Optional[_Iterable[_Union[_common_pb2.KeyValue, _Mapping]]] = ..., + start_time_unix_nano: _Optional[int] = ..., + time_unix_nano: _Optional[int] = ..., + count: _Optional[int] = ..., + sum: _Optional[float] = ..., + scale: _Optional[int] = ..., + zero_count: _Optional[int] = ..., + positive: _Optional[ + _Union[ExponentialHistogramDataPoint.Buckets, _Mapping] + ] = ..., + negative: _Optional[ + _Union[ExponentialHistogramDataPoint.Buckets, _Mapping] + ] = ..., + flags: _Optional[int] = ..., + exemplars: _Optional[_Iterable[_Union[Exemplar, _Mapping]]] = ..., + min: _Optional[float] = ..., + max: _Optional[float] = ..., + zero_threshold: _Optional[float] = ..., + ) -> None: ... + +class SummaryDataPoint(_message.Message): + __slots__ = ( + "attributes", + "start_time_unix_nano", + "time_unix_nano", + "count", + "sum", + "quantile_values", + "flags", + ) + + class ValueAtQuantile(_message.Message): + __slots__ = ("quantile", "value") + QUANTILE_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + quantile: float + value: float + def __init__( + self, quantile: _Optional[float] = ..., value: _Optional[float] = ... + ) -> None: ... + + ATTRIBUTES_FIELD_NUMBER: _ClassVar[int] + START_TIME_UNIX_NANO_FIELD_NUMBER: _ClassVar[int] + TIME_UNIX_NANO_FIELD_NUMBER: _ClassVar[int] + COUNT_FIELD_NUMBER: _ClassVar[int] + SUM_FIELD_NUMBER: _ClassVar[int] + QUANTILE_VALUES_FIELD_NUMBER: _ClassVar[int] + FLAGS_FIELD_NUMBER: _ClassVar[int] + attributes: _containers.RepeatedCompositeFieldContainer[_common_pb2.KeyValue] + start_time_unix_nano: int + time_unix_nano: int + count: int + sum: float + quantile_values: _containers.RepeatedCompositeFieldContainer[ + SummaryDataPoint.ValueAtQuantile + ] + flags: int + def __init__( + self, + attributes: _Optional[_Iterable[_Union[_common_pb2.KeyValue, _Mapping]]] = ..., + start_time_unix_nano: _Optional[int] = ..., + time_unix_nano: _Optional[int] = ..., + count: _Optional[int] = ..., + sum: _Optional[float] = ..., + quantile_values: _Optional[ + _Iterable[_Union[SummaryDataPoint.ValueAtQuantile, _Mapping]] + ] = ..., + flags: _Optional[int] = ..., + ) -> None: ... + +class Exemplar(_message.Message): + __slots__ = ( + "filtered_attributes", + "time_unix_nano", + "as_double", + "as_int", + "span_id", + "trace_id", + ) + FILTERED_ATTRIBUTES_FIELD_NUMBER: _ClassVar[int] + TIME_UNIX_NANO_FIELD_NUMBER: _ClassVar[int] + AS_DOUBLE_FIELD_NUMBER: _ClassVar[int] + AS_INT_FIELD_NUMBER: _ClassVar[int] + SPAN_ID_FIELD_NUMBER: _ClassVar[int] + TRACE_ID_FIELD_NUMBER: _ClassVar[int] + filtered_attributes: _containers.RepeatedCompositeFieldContainer[ + _common_pb2.KeyValue + ] + time_unix_nano: int + as_double: float + as_int: int + span_id: bytes + trace_id: bytes + def __init__( + self, + filtered_attributes: _Optional[ + _Iterable[_Union[_common_pb2.KeyValue, _Mapping]] + ] = ..., + time_unix_nano: _Optional[int] = ..., + as_double: _Optional[float] = ..., + as_int: _Optional[int] = ..., + span_id: _Optional[bytes] = ..., + trace_id: _Optional[bytes] = ..., + ) -> None: ... diff --git a/kit/proto/opentelemetry/proto/metrics/v1/metrics_pb2_grpc.py b/kit/proto/opentelemetry/proto/metrics/v1/metrics_pb2_grpc.py new file mode 100644 index 0000000..d9573dd --- /dev/null +++ b/kit/proto/opentelemetry/proto/metrics/v1/metrics_pb2_grpc.py @@ -0,0 +1,32 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import warnings + +import grpc + +GRPC_GENERATED_VERSION = "1.64.1" +GRPC_VERSION = grpc.__version__ +EXPECTED_ERROR_RELEASE = "1.65.0" +SCHEDULED_RELEASE_DATE = "June 25, 2024" +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + + _version_not_supported = first_version_is_lower( + GRPC_VERSION, GRPC_GENERATED_VERSION + ) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + warnings.warn( + f"The grpc package installed is at version {GRPC_VERSION}," + + f" but the generated code in opentelemetry/proto/metrics/v1/metrics_pb2_grpc.py depends on" + + f" grpcio>={GRPC_GENERATED_VERSION}." + + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" + + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." + + f" This warning will become an error in {EXPECTED_ERROR_RELEASE}," + + f" scheduled for release on {SCHEDULED_RELEASE_DATE}.", + RuntimeWarning, + ) diff --git a/kit/proto/opentelemetry/proto/resource/v1/__init__.py b/kit/proto/opentelemetry/proto/resource/v1/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kit/proto/opentelemetry/proto/resource/v1/resource_pb2.py b/kit/proto/opentelemetry/proto/resource/v1/resource_pb2.py new file mode 100644 index 0000000..bbc3ab1 --- /dev/null +++ b/kit/proto/opentelemetry/proto/resource/v1/resource_pb2.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: opentelemetry/proto/resource/v1/resource.proto +# Protobuf Python Version: 5.26.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from opentelemetry.proto.common.v1 import ( + common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2, +) + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n.opentelemetry/proto/resource/v1/resource.proto\x12\x1fopentelemetry.proto.resource.v1\x1a*opentelemetry/proto/common/v1/common.proto"i\n\x08Resource\x12;\n\nattributes\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x02 \x01(\rB\x83\x01\n"io.opentelemetry.proto.resource.v1B\rResourceProtoP\x01Z*go.opentelemetry.io/proto/otlp/resource/v1\xaa\x02\x1fOpenTelemetry.Proto.Resource.V1b\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "opentelemetry.proto.resource.v1.resource_pb2", _globals +) +if not _descriptor._USE_C_DESCRIPTORS: + _globals["DESCRIPTOR"]._loaded_options = None + _globals["DESCRIPTOR"]._serialized_options = ( + b'\n"io.opentelemetry.proto.resource.v1B\rResourceProtoP\001Z*go.opentelemetry.io/proto/otlp/resource/v1\252\002\037OpenTelemetry.Proto.Resource.V1' + ) + _globals["_RESOURCE"]._serialized_start = 127 + _globals["_RESOURCE"]._serialized_end = 232 +# @@protoc_insertion_point(module_scope) diff --git a/kit/proto/opentelemetry/proto/resource/v1/resource_pb2.pyi b/kit/proto/opentelemetry/proto/resource/v1/resource_pb2.pyi new file mode 100644 index 0000000..0691ac8 --- /dev/null +++ b/kit/proto/opentelemetry/proto/resource/v1/resource_pb2.pyi @@ -0,0 +1,24 @@ +from typing import ClassVar as _ClassVar +from typing import Iterable as _Iterable +from typing import Mapping as _Mapping +from typing import Optional as _Optional +from typing import Union as _Union + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf.internal import containers as _containers +from opentelemetry.proto.common.v1 import common_pb2 as _common_pb2 + +DESCRIPTOR: _descriptor.FileDescriptor + +class Resource(_message.Message): + __slots__ = ("attributes", "dropped_attributes_count") + ATTRIBUTES_FIELD_NUMBER: _ClassVar[int] + DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: _ClassVar[int] + attributes: _containers.RepeatedCompositeFieldContainer[_common_pb2.KeyValue] + dropped_attributes_count: int + def __init__( + self, + attributes: _Optional[_Iterable[_Union[_common_pb2.KeyValue, _Mapping]]] = ..., + dropped_attributes_count: _Optional[int] = ..., + ) -> None: ... diff --git a/kit/proto/opentelemetry/proto/resource/v1/resource_pb2_grpc.py b/kit/proto/opentelemetry/proto/resource/v1/resource_pb2_grpc.py new file mode 100644 index 0000000..5f80990 --- /dev/null +++ b/kit/proto/opentelemetry/proto/resource/v1/resource_pb2_grpc.py @@ -0,0 +1,32 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import warnings + +import grpc + +GRPC_GENERATED_VERSION = "1.64.1" +GRPC_VERSION = grpc.__version__ +EXPECTED_ERROR_RELEASE = "1.65.0" +SCHEDULED_RELEASE_DATE = "June 25, 2024" +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + + _version_not_supported = first_version_is_lower( + GRPC_VERSION, GRPC_GENERATED_VERSION + ) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + warnings.warn( + f"The grpc package installed is at version {GRPC_VERSION}," + + f" but the generated code in opentelemetry/proto/resource/v1/resource_pb2_grpc.py depends on" + + f" grpcio>={GRPC_GENERATED_VERSION}." + + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" + + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." + + f" This warning will become an error in {EXPECTED_ERROR_RELEASE}," + + f" scheduled for release on {SCHEDULED_RELEASE_DATE}.", + RuntimeWarning, + ) diff --git a/kit/proto/opentelemetry/proto/trace/v1/__init__.py b/kit/proto/opentelemetry/proto/trace/v1/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kit/proto/opentelemetry/proto/trace/v1/trace_pb2.py b/kit/proto/opentelemetry/proto/trace/v1/trace_pb2.py new file mode 100644 index 0000000..1b01298 --- /dev/null +++ b/kit/proto/opentelemetry/proto/trace/v1/trace_pb2.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: opentelemetry/proto/trace/v1/trace.proto +# Protobuf Python Version: 5.26.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from opentelemetry.proto.common.v1 import ( + common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2, +) +from opentelemetry.proto.resource.v1 import ( + resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2, +) + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n(opentelemetry/proto/trace/v1/trace.proto\x12\x1copentelemetry.proto.trace.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto"Q\n\nTracesData\x12\x43\n\x0eresource_spans\x18\x01 \x03(\x0b\x32+.opentelemetry.proto.trace.v1.ResourceSpans"\xa7\x01\n\rResourceSpans\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12=\n\x0bscope_spans\x18\x02 \x03(\x0b\x32(.opentelemetry.proto.trace.v1.ScopeSpans\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07"\x97\x01\n\nScopeSpans\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12\x31\n\x05spans\x18\x02 \x03(\x0b\x32".opentelemetry.proto.trace.v1.Span\x12\x12\n\nschema_url\x18\x03 \x01(\t"\x84\x08\n\x04Span\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\x12\x13\n\x0btrace_state\x18\x03 \x01(\t\x12\x16\n\x0eparent_span_id\x18\x04 \x01(\x0c\x12\r\n\x05\x66lags\x18\x10 \x01(\x07\x12\x0c\n\x04name\x18\x05 \x01(\t\x12\x39\n\x04kind\x18\x06 \x01(\x0e\x32+.opentelemetry.proto.trace.v1.Span.SpanKind\x12\x1c\n\x14start_time_unix_nano\x18\x07 \x01(\x06\x12\x1a\n\x12\x65nd_time_unix_nano\x18\x08 \x01(\x06\x12;\n\nattributes\x18\t \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\n \x01(\r\x12\x38\n\x06\x65vents\x18\x0b \x03(\x0b\x32(.opentelemetry.proto.trace.v1.Span.Event\x12\x1c\n\x14\x64ropped_events_count\x18\x0c \x01(\r\x12\x36\n\x05links\x18\r \x03(\x0b\x32\'.opentelemetry.proto.trace.v1.Span.Link\x12\x1b\n\x13\x64ropped_links_count\x18\x0e \x01(\r\x12\x34\n\x06status\x18\x0f \x01(\x0b\x32$.opentelemetry.proto.trace.v1.Status\x1a\x8c\x01\n\x05\x45vent\x12\x16\n\x0etime_unix_nano\x18\x01 \x01(\x06\x12\x0c\n\x04name\x18\x02 \x01(\t\x12;\n\nattributes\x18\x03 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x04 \x01(\r\x1a\xac\x01\n\x04Link\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\x12\x13\n\x0btrace_state\x18\x03 \x01(\t\x12;\n\nattributes\x18\x04 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x05 \x01(\r\x12\r\n\x05\x66lags\x18\x06 \x01(\x07"\x99\x01\n\x08SpanKind\x12\x19\n\x15SPAN_KIND_UNSPECIFIED\x10\x00\x12\x16\n\x12SPAN_KIND_INTERNAL\x10\x01\x12\x14\n\x10SPAN_KIND_SERVER\x10\x02\x12\x14\n\x10SPAN_KIND_CLIENT\x10\x03\x12\x16\n\x12SPAN_KIND_PRODUCER\x10\x04\x12\x16\n\x12SPAN_KIND_CONSUMER\x10\x05"\xae\x01\n\x06Status\x12\x0f\n\x07message\x18\x02 \x01(\t\x12=\n\x04\x63ode\x18\x03 \x01(\x0e\x32/.opentelemetry.proto.trace.v1.Status.StatusCode"N\n\nStatusCode\x12\x15\n\x11STATUS_CODE_UNSET\x10\x00\x12\x12\n\x0eSTATUS_CODE_OK\x10\x01\x12\x15\n\x11STATUS_CODE_ERROR\x10\x02J\x04\x08\x01\x10\x02*\x9c\x01\n\tSpanFlags\x12\x19\n\x15SPAN_FLAGS_DO_NOT_USE\x10\x00\x12 \n\x1bSPAN_FLAGS_TRACE_FLAGS_MASK\x10\xff\x01\x12*\n%SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK\x10\x80\x02\x12&\n!SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK\x10\x80\x04\x42w\n\x1fio.opentelemetry.proto.trace.v1B\nTraceProtoP\x01Z\'go.opentelemetry.io/proto/otlp/trace/v1\xaa\x02\x1cOpenTelemetry.Proto.Trace.V1b\x06proto3' +) + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages( + DESCRIPTOR, "opentelemetry.proto.trace.v1.trace_pb2", _globals +) +if not _descriptor._USE_C_DESCRIPTORS: + _globals["DESCRIPTOR"]._loaded_options = None + _globals["DESCRIPTOR"]._serialized_options = ( + b"\n\037io.opentelemetry.proto.trace.v1B\nTraceProtoP\001Z'go.opentelemetry.io/proto/otlp/trace/v1\252\002\034OpenTelemetry.Proto.Trace.V1" + ) + _globals["_SPANFLAGS"]._serialized_start = 1782 + _globals["_SPANFLAGS"]._serialized_end = 1938 + _globals["_TRACESDATA"]._serialized_start = 166 + _globals["_TRACESDATA"]._serialized_end = 247 + _globals["_RESOURCESPANS"]._serialized_start = 250 + _globals["_RESOURCESPANS"]._serialized_end = 417 + _globals["_SCOPESPANS"]._serialized_start = 420 + _globals["_SCOPESPANS"]._serialized_end = 571 + _globals["_SPAN"]._serialized_start = 574 + _globals["_SPAN"]._serialized_end = 1602 + _globals["_SPAN_EVENT"]._serialized_start = 1131 + _globals["_SPAN_EVENT"]._serialized_end = 1271 + _globals["_SPAN_LINK"]._serialized_start = 1274 + _globals["_SPAN_LINK"]._serialized_end = 1446 + _globals["_SPAN_SPANKIND"]._serialized_start = 1449 + _globals["_SPAN_SPANKIND"]._serialized_end = 1602 + _globals["_STATUS"]._serialized_start = 1605 + _globals["_STATUS"]._serialized_end = 1779 + _globals["_STATUS_STATUSCODE"]._serialized_start = 1695 + _globals["_STATUS_STATUSCODE"]._serialized_end = 1773 +# @@protoc_insertion_point(module_scope) diff --git a/kit/proto/opentelemetry/proto/trace/v1/trace_pb2.pyi b/kit/proto/opentelemetry/proto/trace/v1/trace_pb2.pyi new file mode 100644 index 0000000..d26ebab --- /dev/null +++ b/kit/proto/opentelemetry/proto/trace/v1/trace_pb2.pyi @@ -0,0 +1,228 @@ +from typing import ClassVar as _ClassVar +from typing import Iterable as _Iterable +from typing import Mapping as _Mapping +from typing import Optional as _Optional +from typing import Union as _Union + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from opentelemetry.proto.common.v1 import common_pb2 as _common_pb2 +from opentelemetry.proto.resource.v1 import resource_pb2 as _resource_pb2 + +DESCRIPTOR: _descriptor.FileDescriptor + +class SpanFlags(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + SPAN_FLAGS_DO_NOT_USE: _ClassVar[SpanFlags] + SPAN_FLAGS_TRACE_FLAGS_MASK: _ClassVar[SpanFlags] + SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK: _ClassVar[SpanFlags] + SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK: _ClassVar[SpanFlags] + +SPAN_FLAGS_DO_NOT_USE: SpanFlags +SPAN_FLAGS_TRACE_FLAGS_MASK: SpanFlags +SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK: SpanFlags +SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK: SpanFlags + +class TracesData(_message.Message): + __slots__ = ("resource_spans",) + RESOURCE_SPANS_FIELD_NUMBER: _ClassVar[int] + resource_spans: _containers.RepeatedCompositeFieldContainer[ResourceSpans] + def __init__( + self, + resource_spans: _Optional[_Iterable[_Union[ResourceSpans, _Mapping]]] = ..., + ) -> None: ... + +class ResourceSpans(_message.Message): + __slots__ = ("resource", "scope_spans", "schema_url") + RESOURCE_FIELD_NUMBER: _ClassVar[int] + SCOPE_SPANS_FIELD_NUMBER: _ClassVar[int] + SCHEMA_URL_FIELD_NUMBER: _ClassVar[int] + resource: _resource_pb2.Resource + scope_spans: _containers.RepeatedCompositeFieldContainer[ScopeSpans] + schema_url: str + def __init__( + self, + resource: _Optional[_Union[_resource_pb2.Resource, _Mapping]] = ..., + scope_spans: _Optional[_Iterable[_Union[ScopeSpans, _Mapping]]] = ..., + schema_url: _Optional[str] = ..., + ) -> None: ... + +class ScopeSpans(_message.Message): + __slots__ = ("scope", "spans", "schema_url") + SCOPE_FIELD_NUMBER: _ClassVar[int] + SPANS_FIELD_NUMBER: _ClassVar[int] + SCHEMA_URL_FIELD_NUMBER: _ClassVar[int] + scope: _common_pb2.InstrumentationScope + spans: _containers.RepeatedCompositeFieldContainer[Span] + schema_url: str + def __init__( + self, + scope: _Optional[_Union[_common_pb2.InstrumentationScope, _Mapping]] = ..., + spans: _Optional[_Iterable[_Union[Span, _Mapping]]] = ..., + schema_url: _Optional[str] = ..., + ) -> None: ... + +class Span(_message.Message): + __slots__ = ( + "trace_id", + "span_id", + "trace_state", + "parent_span_id", + "flags", + "name", + "kind", + "start_time_unix_nano", + "end_time_unix_nano", + "attributes", + "dropped_attributes_count", + "events", + "dropped_events_count", + "links", + "dropped_links_count", + "status", + ) + + class SpanKind(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + SPAN_KIND_UNSPECIFIED: _ClassVar[Span.SpanKind] + SPAN_KIND_INTERNAL: _ClassVar[Span.SpanKind] + SPAN_KIND_SERVER: _ClassVar[Span.SpanKind] + SPAN_KIND_CLIENT: _ClassVar[Span.SpanKind] + SPAN_KIND_PRODUCER: _ClassVar[Span.SpanKind] + SPAN_KIND_CONSUMER: _ClassVar[Span.SpanKind] + + SPAN_KIND_UNSPECIFIED: Span.SpanKind + SPAN_KIND_INTERNAL: Span.SpanKind + SPAN_KIND_SERVER: Span.SpanKind + SPAN_KIND_CLIENT: Span.SpanKind + SPAN_KIND_PRODUCER: Span.SpanKind + SPAN_KIND_CONSUMER: Span.SpanKind + + class Event(_message.Message): + __slots__ = ("time_unix_nano", "name", "attributes", "dropped_attributes_count") + TIME_UNIX_NANO_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + ATTRIBUTES_FIELD_NUMBER: _ClassVar[int] + DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: _ClassVar[int] + time_unix_nano: int + name: str + attributes: _containers.RepeatedCompositeFieldContainer[_common_pb2.KeyValue] + dropped_attributes_count: int + def __init__( + self, + time_unix_nano: _Optional[int] = ..., + name: _Optional[str] = ..., + attributes: _Optional[ + _Iterable[_Union[_common_pb2.KeyValue, _Mapping]] + ] = ..., + dropped_attributes_count: _Optional[int] = ..., + ) -> None: ... + + class Link(_message.Message): + __slots__ = ( + "trace_id", + "span_id", + "trace_state", + "attributes", + "dropped_attributes_count", + "flags", + ) + TRACE_ID_FIELD_NUMBER: _ClassVar[int] + SPAN_ID_FIELD_NUMBER: _ClassVar[int] + TRACE_STATE_FIELD_NUMBER: _ClassVar[int] + ATTRIBUTES_FIELD_NUMBER: _ClassVar[int] + DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: _ClassVar[int] + FLAGS_FIELD_NUMBER: _ClassVar[int] + trace_id: bytes + span_id: bytes + trace_state: str + attributes: _containers.RepeatedCompositeFieldContainer[_common_pb2.KeyValue] + dropped_attributes_count: int + flags: int + def __init__( + self, + trace_id: _Optional[bytes] = ..., + span_id: _Optional[bytes] = ..., + trace_state: _Optional[str] = ..., + attributes: _Optional[ + _Iterable[_Union[_common_pb2.KeyValue, _Mapping]] + ] = ..., + dropped_attributes_count: _Optional[int] = ..., + flags: _Optional[int] = ..., + ) -> None: ... + + TRACE_ID_FIELD_NUMBER: _ClassVar[int] + SPAN_ID_FIELD_NUMBER: _ClassVar[int] + TRACE_STATE_FIELD_NUMBER: _ClassVar[int] + PARENT_SPAN_ID_FIELD_NUMBER: _ClassVar[int] + FLAGS_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + KIND_FIELD_NUMBER: _ClassVar[int] + START_TIME_UNIX_NANO_FIELD_NUMBER: _ClassVar[int] + END_TIME_UNIX_NANO_FIELD_NUMBER: _ClassVar[int] + ATTRIBUTES_FIELD_NUMBER: _ClassVar[int] + DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: _ClassVar[int] + EVENTS_FIELD_NUMBER: _ClassVar[int] + DROPPED_EVENTS_COUNT_FIELD_NUMBER: _ClassVar[int] + LINKS_FIELD_NUMBER: _ClassVar[int] + DROPPED_LINKS_COUNT_FIELD_NUMBER: _ClassVar[int] + STATUS_FIELD_NUMBER: _ClassVar[int] + trace_id: bytes + span_id: bytes + trace_state: str + parent_span_id: bytes + flags: int + name: str + kind: Span.SpanKind + start_time_unix_nano: int + end_time_unix_nano: int + attributes: _containers.RepeatedCompositeFieldContainer[_common_pb2.KeyValue] + dropped_attributes_count: int + events: _containers.RepeatedCompositeFieldContainer[Span.Event] + dropped_events_count: int + links: _containers.RepeatedCompositeFieldContainer[Span.Link] + dropped_links_count: int + status: Status + def __init__( + self, + trace_id: _Optional[bytes] = ..., + span_id: _Optional[bytes] = ..., + trace_state: _Optional[str] = ..., + parent_span_id: _Optional[bytes] = ..., + flags: _Optional[int] = ..., + name: _Optional[str] = ..., + kind: _Optional[_Union[Span.SpanKind, str]] = ..., + start_time_unix_nano: _Optional[int] = ..., + end_time_unix_nano: _Optional[int] = ..., + attributes: _Optional[_Iterable[_Union[_common_pb2.KeyValue, _Mapping]]] = ..., + dropped_attributes_count: _Optional[int] = ..., + events: _Optional[_Iterable[_Union[Span.Event, _Mapping]]] = ..., + dropped_events_count: _Optional[int] = ..., + links: _Optional[_Iterable[_Union[Span.Link, _Mapping]]] = ..., + dropped_links_count: _Optional[int] = ..., + status: _Optional[_Union[Status, _Mapping]] = ..., + ) -> None: ... + +class Status(_message.Message): + __slots__ = ("message", "code") + + class StatusCode(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + STATUS_CODE_UNSET: _ClassVar[Status.StatusCode] + STATUS_CODE_OK: _ClassVar[Status.StatusCode] + STATUS_CODE_ERROR: _ClassVar[Status.StatusCode] + + STATUS_CODE_UNSET: Status.StatusCode + STATUS_CODE_OK: Status.StatusCode + STATUS_CODE_ERROR: Status.StatusCode + MESSAGE_FIELD_NUMBER: _ClassVar[int] + CODE_FIELD_NUMBER: _ClassVar[int] + message: str + code: Status.StatusCode + def __init__( + self, + message: _Optional[str] = ..., + code: _Optional[_Union[Status.StatusCode, str]] = ..., + ) -> None: ... diff --git a/kit/proto/opentelemetry/proto/trace/v1/trace_pb2_grpc.py b/kit/proto/opentelemetry/proto/trace/v1/trace_pb2_grpc.py new file mode 100644 index 0000000..e3b9c65 --- /dev/null +++ b/kit/proto/opentelemetry/proto/trace/v1/trace_pb2_grpc.py @@ -0,0 +1,32 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import warnings + +import grpc + +GRPC_GENERATED_VERSION = "1.64.1" +GRPC_VERSION = grpc.__version__ +EXPECTED_ERROR_RELEASE = "1.65.0" +SCHEDULED_RELEASE_DATE = "June 25, 2024" +_version_not_supported = False + +try: + from grpc._utilities import first_version_is_lower + + _version_not_supported = first_version_is_lower( + GRPC_VERSION, GRPC_GENERATED_VERSION + ) +except ImportError: + _version_not_supported = True + +if _version_not_supported: + warnings.warn( + f"The grpc package installed is at version {GRPC_VERSION}," + + f" but the generated code in opentelemetry/proto/trace/v1/trace_pb2_grpc.py depends on" + + f" grpcio>={GRPC_GENERATED_VERSION}." + + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" + + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." + + f" This warning will become an error in {EXPECTED_ERROR_RELEASE}," + + f" scheduled for release on {SCHEDULED_RELEASE_DATE}.", + RuntimeWarning, + ) diff --git a/kit/proto/protobuf_msgs_pb2.py b/kit/proto/protobuf_msgs_pb2.py index a139b22..baf4f98 100644 --- a/kit/proto/protobuf_msgs_pb2.py +++ b/kit/proto/protobuf_msgs_pb2.py @@ -3,52 +3,60 @@ # source: protobuf_msgs.proto # Protobuf Python Version: 5.26.1 """Generated protocol buffer code.""" -from opentelemetry.proto.trace.v1 import trace_pb2 as opentelemetry_dot_proto_dot_trace_dot_v1_dot_trace__pb2 -from opentelemetry.proto.metrics.v1 import metrics_pb2 as opentelemetry_dot_proto_dot_metrics_dot_v1_dot_metrics__pb2 -from opentelemetry.proto.logs.v1 import logs_pb2 as opentelemetry_dot_proto_dot_logs_dot_v1_dot_logs__pb2 -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13protobuf_msgs.proto\x12\x10kubefox.proto.v1\x1a\x1cgoogle/protobuf/struct.proto\x1a&opentelemetry/proto/logs/v1/logs.proto\x1a,opentelemetry/proto/metrics/v1/metrics.proto\x1a(opentelemetry/proto/trace/v1/trace.proto\"a\n\tComponent\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0b\n\x03\x61pp\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x0c\n\x04hash\x18\x04 \x01(\t\x12\n\n\x02id\x18\x05 \x01(\t\x12\x11\n\tbroker_id\x18\x06 \x01(\t\"o\n\x0c\x45ventContext\x12\x10\n\x08platform\x18\x01 \x01(\t\x12\x1b\n\x13virtual_environment\x18\x02 \x01(\t\x12\x16\n\x0e\x61pp_deployment\x18\x03 \x01(\t\x12\x18\n\x10release_manifest\x18\x04 \x01(\t\"T\n\x0bSpanContext\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\x12\x13\n\x0btrace_state\x18\x03 \x01(\t\x12\r\n\x05\x66lags\x18\x04 \x01(\x07\"\xe2\x04\n\x05\x45vent\x12\n\n\x02id\x18\x01 \x01(\t\x12\x11\n\tparent_id\x18\x02 \x01(\t\x12\x32\n\x0bparent_span\x18\x03 \x01(\x0b\x32\x1d.kubefox.proto.v1.SpanContext\x12\x0c\n\x04type\x18\x04 \x01(\t\x12,\n\x08\x63\x61tegory\x18\x05 \x01(\x0e\x32\x1a.kubefox.proto.v1.Category\x12\x13\n\x0b\x63reate_time\x18\x06 \x01(\x03\x12\x0b\n\x03ttl\x18\x07 \x01(\x03\x12/\n\x07\x63ontext\x18\x08 \x01(\x0b\x32\x1e.kubefox.proto.v1.EventContext\x12+\n\x06source\x18\t \x01(\x0b\x32\x1b.kubefox.proto.v1.Component\x12+\n\x06target\x18\n \x01(\x0b\x32\x1b.kubefox.proto.v1.Component\x12\x33\n\x06params\x18\x0b \x03(\x0b\x32#.kubefox.proto.v1.Event.ParamsEntry\x12\x33\n\x06values\x18\x0c \x03(\x0b\x32#.kubefox.proto.v1.Event.ValuesEntry\x12\x14\n\x0c\x63ontent_type\x18\x0e \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\x0c\x1a\x45\n\x0bParamsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\x1a\x45\n\x0bValuesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\"\xc2\x01\n\x0cMatchedEvent\x12&\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x17.kubefox.proto.v1.Event\x12\x10\n\x08route_id\x18\x02 \x01(\x03\x12\x34\n\x03\x65nv\x18\x03 \x03(\x0b\x32\'.kubefox.proto.v1.MatchedEvent.EnvEntry\x1a\x42\n\x08\x45nvEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\"\xc6\x01\n\tTelemetry\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12;\n\x0blog_records\x18\r \x03(\x0b\x32&.opentelemetry.proto.logs.v1.LogRecord\x12\x37\n\x07metrics\x18\x0e \x03(\x0b\x32&.opentelemetry.proto.metrics.v1.Metric\x12\x31\n\x05spans\x18\x0f \x03(\x0b\x32\".opentelemetry.proto.trace.v1.Span*?\n\x08\x43\x61tegory\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07MESSAGE\x10\x01\x12\x0b\n\x07REQUEST\x10\x02\x12\x0c\n\x08RESPONSE\x10\x03\x42 Z\x1egithub.com/xigxog/kubefox/coreb\x06proto3') +from opentelemetry.proto.logs.v1 import ( + logs_pb2 as opentelemetry_dot_proto_dot_logs_dot_v1_dot_logs__pb2, +) +from opentelemetry.proto.metrics.v1 import ( + metrics_pb2 as opentelemetry_dot_proto_dot_metrics_dot_v1_dot_metrics__pb2, +) +from opentelemetry.proto.trace.v1 import ( + trace_pb2 as opentelemetry_dot_proto_dot_trace_dot_v1_dot_trace__pb2, +) + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x13protobuf_msgs.proto\x12\x10kubefox.proto.v1\x1a&opentelemetry/proto/logs/v1/logs.proto\x1a,opentelemetry/proto/metrics/v1/metrics.proto\x1a(opentelemetry/proto/trace/v1/trace.proto"a\n\tComponent\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x0b\n\x03\x61pp\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x0c\n\x04hash\x18\x04 \x01(\t\x12\n\n\x02id\x18\x05 \x01(\t\x12\x11\n\tbroker_id\x18\x06 \x01(\t"o\n\x0c\x45ventContext\x12\x10\n\x08platform\x18\x01 \x01(\t\x12\x1b\n\x13virtual_environment\x18\x02 \x01(\t\x12\x16\n\x0e\x61pp_deployment\x18\x03 \x01(\t\x12\x18\n\x10release_manifest\x18\x04 \x01(\t"T\n\x0bSpanContext\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\x12\x13\n\x0btrace_state\x18\x03 \x01(\t\x12\r\n\x05\x66lags\x18\x04 \x01(\x07"\xb2\x04\n\x05\x45vent\x12\n\n\x02id\x18\x01 \x01(\t\x12\x11\n\tparent_id\x18\x02 \x01(\t\x12\x32\n\x0bparent_span\x18\x03 \x01(\x0b\x32\x1d.kubefox.proto.v1.SpanContext\x12\x0c\n\x04type\x18\x04 \x01(\t\x12,\n\x08\x63\x61tegory\x18\x05 \x01(\x0e\x32\x1a.kubefox.proto.v1.Category\x12\x13\n\x0b\x63reate_time\x18\x06 \x01(\x03\x12\x0b\n\x03ttl\x18\x07 \x01(\x03\x12/\n\x07\x63ontext\x18\x08 \x01(\x0b\x32\x1e.kubefox.proto.v1.EventContext\x12+\n\x06source\x18\t \x01(\x0b\x32\x1b.kubefox.proto.v1.Component\x12+\n\x06target\x18\n \x01(\x0b\x32\x1b.kubefox.proto.v1.Component\x12\x33\n\x06params\x18\x0b \x03(\x0b\x32#.kubefox.proto.v1.Event.ParamsEntry\x12\x33\n\x06values\x18\x0c \x03(\x0b\x32#.kubefox.proto.v1.Event.ValuesEntry\x12\x14\n\x0c\x63ontent_type\x18\x0e \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\x0c\x1a-\n\x0bParamsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a-\n\x0bValuesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xaa\x01\n\x0cMatchedEvent\x12&\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x17.kubefox.proto.v1.Event\x12\x10\n\x08route_id\x18\x02 \x01(\x03\x12\x34\n\x03\x65nv\x18\x03 \x03(\x0b\x32\'.kubefox.proto.v1.MatchedEvent.EnvEntry\x1a*\n\x08\x45nvEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xc6\x01\n\tTelemetry\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12;\n\x0blog_records\x18\r \x03(\x0b\x32&.opentelemetry.proto.logs.v1.LogRecord\x12\x37\n\x07metrics\x18\x0e \x03(\x0b\x32&.opentelemetry.proto.metrics.v1.Metric\x12\x31\n\x05spans\x18\x0f \x03(\x0b\x32".opentelemetry.proto.trace.v1.Span*?\n\x08\x43\x61tegory\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x0b\n\x07MESSAGE\x10\x01\x12\x0b\n\x07REQUEST\x10\x02\x12\x0c\n\x08RESPONSE\x10\x03\x42 Z\x1egithub.com/xigxog/kubefox/coreb\x06proto3' +) _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages( - DESCRIPTOR, 'protobuf_msgs_pb2', _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "protobuf_msgs_pb2", _globals) if not _descriptor._USE_C_DESCRIPTORS: - _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'Z\036github.com/xigxog/kubefox/core' - _globals['_EVENT_PARAMSENTRY']._loaded_options = None - _globals['_EVENT_PARAMSENTRY']._serialized_options = b'8\001' - _globals['_EVENT_VALUESENTRY']._loaded_options = None - _globals['_EVENT_VALUESENTRY']._serialized_options = b'8\001' - _globals['_MATCHEDEVENT_ENVENTRY']._loaded_options = None - _globals['_MATCHEDEVENT_ENVENTRY']._serialized_options = b'8\001' - _globals['_CATEGORY']._serialized_start = 1508 - _globals['_CATEGORY']._serialized_end = 1571 - _globals['_COMPONENT']._serialized_start = 199 - _globals['_COMPONENT']._serialized_end = 296 - _globals['_EVENTCONTEXT']._serialized_start = 298 - _globals['_EVENTCONTEXT']._serialized_end = 409 - _globals['_SPANCONTEXT']._serialized_start = 411 - _globals['_SPANCONTEXT']._serialized_end = 495 - _globals['_EVENT']._serialized_start = 498 - _globals['_EVENT']._serialized_end = 1108 - _globals['_EVENT_PARAMSENTRY']._serialized_start = 968 - _globals['_EVENT_PARAMSENTRY']._serialized_end = 1037 - _globals['_EVENT_VALUESENTRY']._serialized_start = 1039 - _globals['_EVENT_VALUESENTRY']._serialized_end = 1108 - _globals['_MATCHEDEVENT']._serialized_start = 1111 - _globals['_MATCHEDEVENT']._serialized_end = 1305 - _globals['_MATCHEDEVENT_ENVENTRY']._serialized_start = 1239 - _globals['_MATCHEDEVENT_ENVENTRY']._serialized_end = 1305 - _globals['_TELEMETRY']._serialized_start = 1308 - _globals['_TELEMETRY']._serialized_end = 1506 + _globals["DESCRIPTOR"]._loaded_options = None + _globals["DESCRIPTOR"]._serialized_options = b"Z\036github.com/xigxog/kubefox/core" + _globals["_EVENT_PARAMSENTRY"]._loaded_options = None + _globals["_EVENT_PARAMSENTRY"]._serialized_options = b"8\001" + _globals["_EVENT_VALUESENTRY"]._loaded_options = None + _globals["_EVENT_VALUESENTRY"]._serialized_options = b"8\001" + _globals["_MATCHEDEVENT_ENVENTRY"]._loaded_options = None + _globals["_MATCHEDEVENT_ENVENTRY"]._serialized_options = b"8\001" + _globals["_CATEGORY"]._serialized_start = 1406 + _globals["_CATEGORY"]._serialized_end = 1469 + _globals["_COMPONENT"]._serialized_start = 169 + _globals["_COMPONENT"]._serialized_end = 266 + _globals["_EVENTCONTEXT"]._serialized_start = 268 + _globals["_EVENTCONTEXT"]._serialized_end = 379 + _globals["_SPANCONTEXT"]._serialized_start = 381 + _globals["_SPANCONTEXT"]._serialized_end = 465 + _globals["_EVENT"]._serialized_start = 468 + _globals["_EVENT"]._serialized_end = 1030 + _globals["_EVENT_PARAMSENTRY"]._serialized_start = 938 + _globals["_EVENT_PARAMSENTRY"]._serialized_end = 983 + _globals["_EVENT_VALUESENTRY"]._serialized_start = 985 + _globals["_EVENT_VALUESENTRY"]._serialized_end = 1030 + _globals["_MATCHEDEVENT"]._serialized_start = 1033 + _globals["_MATCHEDEVENT"]._serialized_end = 1203 + _globals["_MATCHEDEVENT_ENVENTRY"]._serialized_start = 1161 + _globals["_MATCHEDEVENT_ENVENTRY"]._serialized_end = 1203 + _globals["_TELEMETRY"]._serialized_start = 1206 + _globals["_TELEMETRY"]._serialized_end = 1404 # @@protoc_insertion_point(module_scope) diff --git a/kit/proto/protobuf_msgs_pb2.pyi b/kit/proto/protobuf_msgs_pb2.pyi index 965dc93..b9e8290 100644 --- a/kit/proto/protobuf_msgs_pb2.pyi +++ b/kit/proto/protobuf_msgs_pb2.pyi @@ -1,16 +1,19 @@ -from google.protobuf import struct_pb2 as _struct_pb2 +from typing import ClassVar as _ClassVar +from typing import Iterable as _Iterable +from typing import Mapping as _Mapping +from typing import Optional as _Optional +from typing import Union as _Union + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper from opentelemetry.proto.logs.v1 import logs_pb2 as _logs_pb2 from opentelemetry.proto.metrics.v1 import metrics_pb2 as _metrics_pb2 from opentelemetry.proto.trace.v1 import trace_pb2 as _trace_pb2 -from google.protobuf.internal import containers as _containers -from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Mapping, Optional as _Optional, Union as _Union DESCRIPTOR: _descriptor.FileDescriptor - class Category(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): __slots__ = () UNKNOWN: _ClassVar[Category] @@ -18,13 +21,11 @@ class Category(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): REQUEST: _ClassVar[Category] RESPONSE: _ClassVar[Category] - UNKNOWN: Category MESSAGE: Category REQUEST: Category RESPONSE: Category - class Component(_message.Message): __slots__ = ("type", "app", "name", "hash", "id", "broker_id") TYPE_FIELD_NUMBER: _ClassVar[int] @@ -39,13 +40,23 @@ class Component(_message.Message): hash: str id: str broker_id: str - def __init__(self, type: _Optional[str] = ..., app: _Optional[str] = ..., name: _Optional[str] = ..., - hash: _Optional[str] = ..., id: _Optional[str] = ..., broker_id: _Optional[str] = ...) -> None: ... - + def __init__( + self, + type: _Optional[str] = ..., + app: _Optional[str] = ..., + name: _Optional[str] = ..., + hash: _Optional[str] = ..., + id: _Optional[str] = ..., + broker_id: _Optional[str] = ..., + ) -> None: ... class EventContext(_message.Message): - __slots__ = ("platform", "virtual_environment", - "app_deployment", "release_manifest") + __slots__ = ( + "platform", + "virtual_environment", + "app_deployment", + "release_manifest", + ) PLATFORM_FIELD_NUMBER: _ClassVar[int] VIRTUAL_ENVIRONMENT_FIELD_NUMBER: _ClassVar[int] APP_DEPLOYMENT_FIELD_NUMBER: _ClassVar[int] @@ -54,9 +65,13 @@ class EventContext(_message.Message): virtual_environment: str app_deployment: str release_manifest: str - def __init__(self, platform: _Optional[str] = ..., virtual_environment: _Optional[str] = ..., - app_deployment: _Optional[str] = ..., release_manifest: _Optional[str] = ...) -> None: ... - + def __init__( + self, + platform: _Optional[str] = ..., + virtual_environment: _Optional[str] = ..., + app_deployment: _Optional[str] = ..., + release_manifest: _Optional[str] = ..., + ) -> None: ... class SpanContext(_message.Message): __slots__ = ("trace_id", "span_id", "trace_state", "flags") @@ -68,32 +83,52 @@ class SpanContext(_message.Message): span_id: bytes trace_state: str flags: int - def __init__(self, trace_id: _Optional[bytes] = ..., span_id: _Optional[bytes] = ..., - trace_state: _Optional[str] = ..., flags: _Optional[int] = ...) -> None: ... - + def __init__( + self, + trace_id: _Optional[bytes] = ..., + span_id: _Optional[bytes] = ..., + trace_state: _Optional[str] = ..., + flags: _Optional[int] = ..., + ) -> None: ... class Event(_message.Message): - __slots__ = ("id", "parent_id", "parent_span", "type", "category", "create_time", - "ttl", "context", "source", "target", "params", "values", "content_type", "content") + __slots__ = ( + "id", + "parent_id", + "parent_span", + "type", + "category", + "create_time", + "ttl", + "context", + "source", + "target", + "params", + "values", + "content_type", + "content", + ) class ParamsEntry(_message.Message): __slots__ = ("key", "value") KEY_FIELD_NUMBER: _ClassVar[int] VALUE_FIELD_NUMBER: _ClassVar[int] key: str - value: _struct_pb2.Value + value: str def __init__( - self, key: _Optional[str] = ..., value: _Optional[_Union[_struct_pb2.Value, _Mapping]] = ...) -> None: ... + self, key: _Optional[str] = ..., value: _Optional[str] = ... + ) -> None: ... class ValuesEntry(_message.Message): __slots__ = ("key", "value") KEY_FIELD_NUMBER: _ClassVar[int] VALUE_FIELD_NUMBER: _ClassVar[int] key: str - value: _struct_pb2.Value - + value: str def __init__( - self, key: _Optional[str] = ..., value: _Optional[_Union[_struct_pb2.Value, _Mapping]] = ...) -> None: ... + self, key: _Optional[str] = ..., value: _Optional[str] = ... + ) -> None: ... + ID_FIELD_NUMBER: _ClassVar[int] PARENT_ID_FIELD_NUMBER: _ClassVar[int] PARENT_SPAN_FIELD_NUMBER: _ClassVar[int] @@ -118,13 +153,27 @@ class Event(_message.Message): context: EventContext source: Component target: Component - params: _containers.MessageMap[str, _struct_pb2.Value] - values: _containers.MessageMap[str, _struct_pb2.Value] + params: _containers.ScalarMap[str, str] + values: _containers.ScalarMap[str, str] content_type: str content: bytes - def __init__(self, id: _Optional[str] = ..., parent_id: _Optional[str] = ..., parent_span: _Optional[_Union[SpanContext, _Mapping]] = ..., type: _Optional[str] = ..., category: _Optional[_Union[Category, str]] = ..., create_time: _Optional[int] = ..., ttl: _Optional[int] = ..., context: _Optional[_Union[EventContext, _Mapping]] - = ..., source: _Optional[_Union[Component, _Mapping]] = ..., target: _Optional[_Union[Component, _Mapping]] = ..., params: _Optional[_Mapping[str, _struct_pb2.Value]] = ..., values: _Optional[_Mapping[str, _struct_pb2.Value]] = ..., content_type: _Optional[str] = ..., content: _Optional[bytes] = ...) -> None: ... - + def __init__( + self, + id: _Optional[str] = ..., + parent_id: _Optional[str] = ..., + parent_span: _Optional[_Union[SpanContext, _Mapping]] = ..., + type: _Optional[str] = ..., + category: _Optional[_Union[Category, str]] = ..., + create_time: _Optional[int] = ..., + ttl: _Optional[int] = ..., + context: _Optional[_Union[EventContext, _Mapping]] = ..., + source: _Optional[_Union[Component, _Mapping]] = ..., + target: _Optional[_Union[Component, _Mapping]] = ..., + params: _Optional[_Mapping[str, str]] = ..., + values: _Optional[_Mapping[str, str]] = ..., + content_type: _Optional[str] = ..., + content: _Optional[bytes] = ..., + ) -> None: ... class MatchedEvent(_message.Message): __slots__ = ("event", "route_id", "env") @@ -134,19 +183,23 @@ class MatchedEvent(_message.Message): KEY_FIELD_NUMBER: _ClassVar[int] VALUE_FIELD_NUMBER: _ClassVar[int] key: str - value: _struct_pb2.Value - + value: str def __init__( - self, key: _Optional[str] = ..., value: _Optional[_Union[_struct_pb2.Value, _Mapping]] = ...) -> None: ... + self, key: _Optional[str] = ..., value: _Optional[str] = ... + ) -> None: ... + EVENT_FIELD_NUMBER: _ClassVar[int] ROUTE_ID_FIELD_NUMBER: _ClassVar[int] ENV_FIELD_NUMBER: _ClassVar[int] event: Event route_id: int - env: _containers.MessageMap[str, _struct_pb2.Value] - def __init__(self, event: _Optional[_Union[Event, _Mapping]] = ..., route_id: _Optional[int] - = ..., env: _Optional[_Mapping[str, _struct_pb2.Value]] = ...) -> None: ... - + env: _containers.ScalarMap[str, str] + def __init__( + self, + event: _Optional[_Union[Event, _Mapping]] = ..., + route_id: _Optional[int] = ..., + env: _Optional[_Mapping[str, str]] = ..., + ) -> None: ... class Telemetry(_message.Message): __slots__ = ("trace_id", "log_records", "metrics", "spans") @@ -158,6 +211,10 @@ class Telemetry(_message.Message): log_records: _containers.RepeatedCompositeFieldContainer[_logs_pb2.LogRecord] metrics: _containers.RepeatedCompositeFieldContainer[_metrics_pb2.Metric] spans: _containers.RepeatedCompositeFieldContainer[_trace_pb2.Span] - - def __init__(self, trace_id: _Optional[bytes] = ..., log_records: _Optional[_Iterable[_Union[_logs_pb2.LogRecord, _Mapping]]] = ..., - metrics: _Optional[_Iterable[_Union[_metrics_pb2.Metric, _Mapping]]] = ..., spans: _Optional[_Iterable[_Union[_trace_pb2.Span, _Mapping]]] = ...) -> None: ... + def __init__( + self, + trace_id: _Optional[bytes] = ..., + log_records: _Optional[_Iterable[_Union[_logs_pb2.LogRecord, _Mapping]]] = ..., + metrics: _Optional[_Iterable[_Union[_metrics_pb2.Metric, _Mapping]]] = ..., + spans: _Optional[_Iterable[_Union[_trace_pb2.Span, _Mapping]]] = ..., + ) -> None: ... diff --git a/kit/proto/protobuf_msgs_pb2_grpc.py b/kit/proto/protobuf_msgs_pb2_grpc.py index 06ee0d6..683e394 100644 --- a/kit/proto/protobuf_msgs_pb2_grpc.py +++ b/kit/proto/protobuf_msgs_pb2_grpc.py @@ -1,30 +1,32 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" -import grpc import warnings +import grpc -GRPC_GENERATED_VERSION = '1.64.1' +GRPC_GENERATED_VERSION = "1.64.1" GRPC_VERSION = grpc.__version__ -EXPECTED_ERROR_RELEASE = '1.65.0' -SCHEDULED_RELEASE_DATE = 'June 25, 2024' +EXPECTED_ERROR_RELEASE = "1.65.0" +SCHEDULED_RELEASE_DATE = "June 25, 2024" _version_not_supported = False try: from grpc._utilities import first_version_is_lower + _version_not_supported = first_version_is_lower( - GRPC_VERSION, GRPC_GENERATED_VERSION) + GRPC_VERSION, GRPC_GENERATED_VERSION + ) except ImportError: _version_not_supported = True if _version_not_supported: warnings.warn( - f'The grpc package installed is at version {GRPC_VERSION},' - + f' but the generated code in protobuf_msgs_pb2_grpc.py depends on' - + f' grpcio>={GRPC_GENERATED_VERSION}.' - + f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}' - + f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.' - + f' This warning will become an error in {EXPECTED_ERROR_RELEASE},' - + f' scheduled for release on {SCHEDULED_RELEASE_DATE}.', - RuntimeWarning + f"The grpc package installed is at version {GRPC_VERSION}," + + f" but the generated code in protobuf_msgs_pb2_grpc.py depends on" + + f" grpcio>={GRPC_GENERATED_VERSION}." + + f" Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}" + + f" or downgrade your generated code using grpcio-tools<={GRPC_VERSION}." + + f" This warning will become an error in {EXPECTED_ERROR_RELEASE}," + + f" scheduled for release on {SCHEDULED_RELEASE_DATE}.", + RuntimeWarning, ) diff --git a/kit/telemetry/__init__.py b/kit/telemetry/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kit/telemetry/trace.py b/kit/telemetry/trace.py new file mode 100644 index 0000000..1605565 --- /dev/null +++ b/kit/telemetry/trace.py @@ -0,0 +1,228 @@ +from typing import List, Optional + +from opentelemetry import trace +from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter +from opentelemetry.sdk.resources import SERVICE_NAME, Resource +from opentelemetry.sdk.trace import SpanProcessor, TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor +from opentelemetry.trace import Context, Span, SpanContext, get_current_span +from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator + +from kit.proto.protobuf_msgs_pb2 import Event, MatchedEvent +from kit.proto.protobuf_msgs_pb2 import SpanContext as ProtoSpanContext +from kit.telemetry import vars as TelemetryVars + + +class AttributePropagatingSpanProcessor(SpanProcessor): + """Class used to propagate attributes from parent spans to child spans if + the attributes exist in the parent span.""" + + def __init__(self, attributes_to_propagate: Optional[List[str]] = None) -> None: + """Initializes the AttributePropagatingSpanProcessor with a number of + attributes to propagate.""" + self.attributes_to_propagate = attributes_to_propagate or [ + TelemetryVars.ATTR_KEY_EVENT_ID, + TelemetryVars.ATTR_KEY_EVENT_PARENT_ID, + TelemetryVars.ATTR_KEY_EVENT_TYPE, + TelemetryVars.ATTR_KEY_EVENT_CATEGORY, + TelemetryVars.ATTR_KEY_EVENT_TTL, + TelemetryVars.ATTR_KEY_ROUTE_ID, + TelemetryVars.ATTR_KEY_EVENT_VIRTUAL_ENV, + TelemetryVars.ATTR_KEY_EVENT_APP_DEPLOYMENT, + TelemetryVars.ATTR_KEY_EVENT_REL_MANIFEST, + TelemetryVars.ATTR_KEY_PLATFORM, + TelemetryVars.ATTR_KEY_EVENT_SOURCE_ID, + TelemetryVars.ATTR_KEY_EVENT_SOURCE_HASH, + TelemetryVars.ATTR_KEY_EVENT_SOURCE_NAME, + TelemetryVars.ATTR_KEY_EVENT_SOURCE_TYPE, + TelemetryVars.ATTR_KEY_EVENT_TARGET_ID, + TelemetryVars.ATTR_KEY_EVENT_TARGET_HASH, + TelemetryVars.ATTR_KEY_EVENT_TARGET_NAME, + TelemetryVars.ATTR_KEY_EVENT_TARGET_TYPE, + TelemetryVars.ATTR_KEY_COMPONENT_APP, + TelemetryVars.ATTR_KEY_COMPONENT_HASH, + TelemetryVars.ATTR_KEY_COMPONENT_ID, + TelemetryVars.ATTR_KEY_COMPONENT_NAME, + TelemetryVars.ATTR_KEY_COMPONENT_TYPE, + ] + + def on_start(self, span: Span, parent_context: SpanContext) -> None: + """Propagates attributes from the parent span to the child span. + + Arguments: + span: The child span to which the attributes should be propagated. + parent_context: The context of the parent span. + + Returns: + None + """ + parent_span = get_current_span() + if parent_span is not None and parent_span.is_recording(): + for attribute in self.attributes_to_propagate: + if attribute in parent_span.attributes: + span.set_attribute(attribute, parent_span.attributes[attribute]) + + def on_end(self, span: Span) -> None: + """No-op method that does nothing when the span ends.""" + pass + + def shutdown(self) -> None: + """No-op method that does nothing when the span processor is shut down.""" + pass + + def force_flush(self, timeout_millis: int = 30000) -> None: + """No-op method that does nothing when the span processor is forced to flush.""" + pass + + +def extract_otel_context(protobuf_message: ProtoSpanContext) -> Context: + """Utility function to extract an OpenTelemetry context from a protobuf message. + + Arguments: + protobuf_message: The protobuf message containing the span context. + + Returns: + The OpenTelemetry context. + """ + + carrier = {"traceparent": construct_traceparent(protobuf_message)} + return TraceContextTextMapPropagator().extract(carrier) + + +def construct_traceparent(protobuf_message: ProtoSpanContext) -> str: + """Utility function to construct a traceparent string from a protobuf message.""" + # Extract fields from the protobuf message + trace_id = protobuf_message.trace_id + span_id = protobuf_message.span_id + # flags = protobuf_message.flags + + # Convert trace_id and span_id from bytes to hex strings + trace_id_hex = trace_id.hex() + span_id_hex = span_id.hex() + + if protobuf_message.trace_state == "kf=1": + # Convert flags to a 2-digit hex string + flags_hex = f"{1:02x}" + else: + # Convert flags to a 2-digit hex string + flags_hex = f"{0:02x}" + + # Construct the traceparent string + traceparent = f"00-{trace_id_hex}-{span_id_hex}-{flags_hex}" + + return traceparent + + +def attach_event_attributes( + span: Span, + matched_event: Optional[MatchedEvent] = None, + event: Optional[Event] = None, +) -> None: + """Utility function to attach event attributes to a span. match_event or event + must be provided. If both are provided, the event attributes from the matched_event + will be attached to the span. + + Arguments: + span: The span to which the event attributes should be attached. + matched_event: The matched event containing the event attributes. + event: The event containing the event attributes. This is an optional + alternative to the matched_event. + + Returns: + None + """ + if matched_event is None and matched_event is None: + return + + event = event or matched_event.event + + # Set basic event attributes + span.set_attributes( + attributes={ + TelemetryVars.ATTR_KEY_EVENT_ID: event.id, + TelemetryVars.ATTR_KEY_EVENT_PARENT_ID: event.parent_id, + TelemetryVars.ATTR_KEY_EVENT_TYPE: event.type, + TelemetryVars.ATTR_KEY_EVENT_CATEGORY: event.category, + TelemetryVars.ATTR_KEY_EVENT_TTL: event.ttl, + # TODO: We do not have this data + # TelemetryVars.ATTR_KEY_INSTANCE: , + } + ) + + if matched_event is not None: + span.set_attributes( + attributes={ + TelemetryVars.ATTR_KEY_ROUTE_ID: matched_event.route_id, + } + ) + + # Set context attributes if available + if event.context is not None: + span.set_attributes( + attributes={ + TelemetryVars.ATTR_KEY_EVENT_VIRTUAL_ENV: event.context.virtual_environment, + TelemetryVars.ATTR_KEY_EVENT_APP_DEPLOYMENT: event.context.app_deployment, + TelemetryVars.ATTR_KEY_EVENT_REL_MANIFEST: event.context.release_manifest, + TelemetryVars.ATTR_KEY_PLATFORM: event.context.platform, + } + ) + + # Set source attributes if available + if event.source is not None: + span.set_attributes( + attributes={ + TelemetryVars.ATTR_KEY_EVENT_SOURCE_ID: event.source.id, + TelemetryVars.ATTR_KEY_EVENT_SOURCE_HASH: event.source.hash, + TelemetryVars.ATTR_KEY_EVENT_SOURCE_NAME: event.source.name, + TelemetryVars.ATTR_KEY_EVENT_SOURCE_TYPE: event.source.type, + } + ) + + # Set target attributes if available + if event.target is not None: + span.set_attributes( + attributes={ + TelemetryVars.ATTR_KEY_EVENT_TARGET_ID: event.target.id, + TelemetryVars.ATTR_KEY_EVENT_TARGET_HASH: event.target.hash, + TelemetryVars.ATTR_KEY_EVENT_TARGET_NAME: event.target.name, + TelemetryVars.ATTR_KEY_EVENT_TARGET_TYPE: event.target.type, + } + ) + + # TODO: These should come from our own component definition: + span.set_attributes( + attributes={ + TelemetryVars.ATTR_KEY_COMPONENT_APP: event.target.app, + TelemetryVars.ATTR_KEY_COMPONENT_HASH: event.target.hash, + TelemetryVars.ATTR_KEY_COMPONENT_ID: event.target.id, + TelemetryVars.ATTR_KEY_COMPONENT_NAME: event.target.name, + TelemetryVars.ATTR_KEY_COMPONENT_TYPE: event.target.type, + } + ) + + +def setup_trace_provder( + component_id: str, component_hash: str, name: str, component: str +) -> None: + """ + Utility function to setup the trace provider with the necessary span processors. + With this setup, the trace provider will propagate attributes from parent spans to + child spans. + + Arguments: + component_id: The ID of the component. + component_hash: The hash of the component. + name: The name of the component. + component: The type of the component. + + Returns: + None + """ + service_name = f"{name}-{component}-{component_hash}-{component_id}" + trace.set_tracer_provider( + TracerProvider(resource=Resource(attributes={SERVICE_NAME: service_name})) + ) + processor = BatchSpanProcessor(OTLPSpanExporter()) + attribute_propogator = AttributePropagatingSpanProcessor() + trace.get_tracer_provider().add_span_processor(attribute_propogator) + trace.get_tracer_provider().add_span_processor(processor) diff --git a/kit/telemetry/vars.py b/kit/telemetry/vars.py new file mode 100644 index 0000000..430ff69 --- /dev/null +++ b/kit/telemetry/vars.py @@ -0,0 +1,93 @@ +# KubeFox Attribute Keys +ATTR_KEY_COMPONENT_APP = "kubefox.component.app" +ATTR_KEY_COMPONENT_HASH = "kubefox.component.hash" +ATTR_KEY_COMPONENT_ID = "kubefox.component.id" +ATTR_KEY_COMPONENT_NAME = "kubefox.component.name" +ATTR_KEY_COMPONENT_TYPE = "kubefox.component.type" +ATTR_KEY_EVENT_APP_DEPLOYMENT = "kubefox.event.context.app_deployment" +ATTR_KEY_EVENT_CATEGORY = "kubefox.event.category" +ATTR_KEY_EVENT_ID = "kubefox.event.id" +ATTR_KEY_EVENT_PARENT_ID = "kubefox.event.parent_id" +ATTR_KEY_EVENT_REL_MANIFEST = "kubefox.event.context.release_manifest" +ATTR_KEY_EVENT_SOURCE_HASH = "kubefox.event.source.hash" +ATTR_KEY_EVENT_SOURCE_ID = "kubefox.event.source.id" +ATTR_KEY_EVENT_SOURCE_NAME = "kubefox.event.source.name" +ATTR_KEY_EVENT_SOURCE_TYPE = "kubefox.event.source.type" +ATTR_KEY_EVENT_TARGET_HASH = "kubefox.event.target.hash" +ATTR_KEY_EVENT_TARGET_ID = "kubefox.event.target.id" +ATTR_KEY_EVENT_TARGET_NAME = "kubefox.event.target.name" +ATTR_KEY_EVENT_TARGET_TYPE = "kubefox.event.target.type" +ATTR_KEY_EVENT_TTL = "kubefox.event.ttl" +ATTR_KEY_EVENT_TYPE = "kubefox.event.type" +ATTR_KEY_EVENT_VIRTUAL_ENV = "kubefox.event.context.virtual_environment" +ATTR_KEY_INSTANCE = "kubefox.instance" +ATTR_KEY_PLATFORM = "kubefox.platform" +ATTR_KEY_ROUTE_ID = "kubefox.route.id" + +# OTEL Attribute Keys +ATTR_KEY_SDK_LANG = "telemetry.sdk.language" # Required +ATTR_KEY_SDK_NAME = "telemetry.sdk.name" # Required +ATTR_KEY_SDK_VERSION = "telemetry.sdk.version" # Required +ATTR_KEY_SVC_NAME = "service.name" # Required + +ATTR_KEY_CLOUD_ACCOUNT_ID = "cloud.account.id" +ATTR_KEY_CLOUD_AZ = "cloud.availability_zone" +ATTR_KEY_CLOUD_PLATFORM = "cloud.platform" +ATTR_KEY_CLOUD_PROVIDER = "cloud.provider" +ATTR_KEY_CLOUD_REGION = "cloud.region" +ATTR_KEY_CLOUD_RESOURCE_ID = "cloud.resource_id" +ATTR_KEY_CODE_COLUMN = "code.column" +ATTR_KEY_CODE_FILEPATH = "code.filepath" +ATTR_KEY_CODE_FUNCTION = "code.function" +ATTR_KEY_CODE_LINE_NO = "code.lineno" +ATTR_KEY_CODE_NAMESPACE = "code.namespace" +ATTR_KEY_CODE_STACKTRACE = "code.stacktrace" +ATTR_KEY_CONTAINER_ARGS = "container.command_args" +ATTR_KEY_CONTAINER_COMMAND = "container.command" +ATTR_KEY_CONTAINER_ID = "container.id" +ATTR_KEY_CONTAINER_IMAGE_DIGEST = "container.image.repo_digests" +ATTR_KEY_CONTAINER_IMAGE_ID = "container.image.id" +ATTR_KEY_CONTAINER_IMAGE_NAME = "container.image.name" +ATTR_KEY_CONTAINER_NAME = "container.name" +ATTR_KEY_ERR_TYPE = "error.type" +ATTR_KEY_EXCEPTION_MSG = "exception.message" +ATTR_KEY_EXCEPTION_STACKTRACE = "exception.stacktrace" +ATTR_KEY_EXCEPTION_TYPE = "exception.type" +ATTR_KEY_GRAPHQL_DOCUMENT = "graphql.document" +ATTR_KEY_GRAPHQL_OP_NAME = "graphql.operation.name" +ATTR_KEY_GRAPHQL_OP_TYPE = "graphql.operation.type" # query, mutation, subscription +ATTR_KEY_GRPC_STATUS_CODE = "rpc.grpc.status_code" +ATTR_KEY_HTTP_REQ_BODY_SIZE = "http.request.body.size" +ATTR_KEY_HTTP_REQ_METHOD = "http.request.method" +ATTR_KEY_HTTP_RESP_BODY_SIZE = "http.response.body.size" +ATTR_KEY_HTTP_RESP_STATUS_CODE = "http.response.status_code" +ATTR_KEY_HTTP_ROUTE = "http.route" +ATTR_KEY_K8S_CLUSTER_ID = "k8s.cluster.uid" +ATTR_KEY_K8S_CLUSTER_NAME = "k8s.cluster.name" +ATTR_KEY_K8S_CONTAINER_NAME = "k8s.container.name" +ATTR_KEY_K8S_CONTAINER_RESTART = "k8s.container.restart_count" +ATTR_KEY_K8S_NAMESPACE = "k8s.namespace.name" +ATTR_KEY_K8S_NODE_ID = "k8s.node.uid" +ATTR_KEY_K8S_NODE_NAME = "k8s.node.name" +ATTR_KEY_K8S_POD_ID = "k8s.pod.uid" +ATTR_KEY_K8S_POD_NAME = "k8s.pod.name" +ATTR_KEY_MSG_ID = "message.id" +ATTR_KEY_MSG_SIZE = "message.uncompressed_size" +ATTR_KEY_MSG_TYPE = "message.type" # SENT, RECEIVED +ATTR_KEY_NETWORK_PROTOCOL = "network.protocol.name" +ATTR_KEY_OCI_MANIFEST_DIGEST = "oci.manifest.digest" +ATTR_KEY_OTEL_STATUS_CODE = "otel.status_code" # OK, ERROR +ATTR_KEY_OTEL_STATUS_DESCRIPTION = "otel.status_description" +ATTR_KEY_SVC_INSTANCE_ID = "service.instance.id" +ATTR_KEY_SVC_NAMESPACE = "service.namespace" +ATTR_KEY_SVC_VERSION = "service.version" +ATTR_KEY_THREAD_ID = "thread.id" +ATTR_KEY_THREAD_NAME = "thread.name" +ATTR_KEY_URL_FULL = "url.full" +ATTR_KEY_URL_PATH = "url.path" +ATTR_KEY_URL_QUERY = "url.query" +ATTR_KEY_URL_SCHEME = "url.scheme" +ATTR_KEY_USER_AGENT = "user_agent.original" + +# Event names +EVENT_NAME_EXCEPTION = "exception" diff --git a/kit/utils/__init__.py b/kit/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/kit/utils/utils.py b/kit/utils/utils.py new file mode 100644 index 0000000..4d9aca1 --- /dev/null +++ b/kit/utils/utils.py @@ -0,0 +1,81 @@ +import os +import re +import struct + +RegexpNameSpecialChar = re.compile(r"[^a-z0-9]") +RegexpLabelSpecialChar = re.compile(r"[^a-z0-9A-Z-_\.]") +RegexpLabelPrefix = re.compile(r"^[^a-z0-9A-Z]*") +RegexpLabelSuffix = re.compile(r"[^a-z0-9A-Z-_\.]*[^a-z0-9A-Z]*$") + + +def resolve_flag(curr, env_var, def_val): + return curr if curr else os.getenv(env_var, def_val) + + +def resolve_flag_bool(curr, env_var, def_val): + return curr if curr != def_val else bool(os.getenv(env_var, def_val)) + + +def resolve_flag_int(curr, env_var, def_val): + return curr if curr != def_val else int(os.getenv(env_var, def_val)) + + +def check_required_flag(n, p): + if not p: + raise ValueError(f'The flag "{n}" is required.') + + +@staticmethod +def env_def(name, def_val): + return os.getenv(name, def_val) + + +def uint_to_byte_array(i): + return struct.pack("Q", i) + + +def byte_array_to_uint(b): + return struct.unpack("Q", b)[0] + + +def short_hash(incoming_hash): + return incoming_hash[:7] if len(incoming_hash) >= 7 else "" + + +@staticmethod +def first(*strs): + return next((s for s in strs if s), "") + + +def clean_name(name): + cleaned = os.path.basename(name).lower() + cleaned = RegexpNameSpecialChar.sub("-", cleaned).strip("-") + return cleaned[:63] + + +def is_valid_name(name): + return name == clean_name(name) + + +def clean_label(value): + cleaned = os.path.basename(str(value)) + cleaned = RegexpLabelSpecialChar.sub("-", cleaned) + cleaned = RegexpLabelPrefix.sub("", cleaned) + cleaned = RegexpLabelSuffix.sub("", cleaned) + return cleaned + + +def join(sep, *elems): + return sep.join(filter(None, elems)) + + +def set_bit(n, pos): + return n | (1 << pos) + + +def clear_bit(n, pos): + return n & ~(1 << pos) + + +def has_bit(n, pos): + return (n & (1 << pos)) > 0 diff --git a/setup.cfg b/setup.cfg index b7b8927..e56fb96 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,3 @@ -# TODO: This file is not up to date and will need to be updated - [metadata] name = kubefox-sdk description = The Kubefox Kit SDK for Python @@ -23,20 +21,22 @@ classifiers = [options] python_requires = >=3.8 - +packages = find: install_requires = - protobuf~=4.25.3 - opentelemetry-proto~=1.25.0 - dataclasses-json~=0.6.7 - grpcio~=1.64.1 -; tests_require = + opentelemetry-api>=1.27.0 + opentelemetry-exporter-otlp-proto-grpc>=1.27.0 + dataclasses-json>=0.6.7 + grpcio>=1.64.1 +# tests_require = [options.extras_require] dev = black + isort + pre-commit -; tests = +# tests = [options.package_data] kit = - kit + kit*