From 1e067565620b777ea0de390e050960dbe1a47d92 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jiri=20Dan=C4=9Bk?= Date: Tue, 25 Feb 2025 13:39:09 +0100 Subject: [PATCH 1/6] RHOAIENG-18848: chore(tests/containers): add openshift-python-wrapper dependency --- poetry.lock | 1422 ++++++++++++++++++++++++++++++++++++++++++++++-- pyproject.toml | 2 + 2 files changed, 1383 insertions(+), 41 deletions(-) diff --git a/poetry.lock b/poetry.lock index bef1f570e..c8b4a3c76 100644 --- a/poetry.lock +++ b/poetry.lock @@ -44,6 +44,22 @@ files = [ {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, ] +[[package]] +name = "asttokens" +version = "3.0.0" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2"}, + {file = "asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7"}, +] + +[package.extras] +astroid = ["astroid (>=2,<4)"] +test = ["astroid (>=2,<4)", "pytest", "pytest-cov", "pytest-xdist"] + [[package]] name = "attrs" version = "25.1.0" @@ -64,6 +80,57 @@ docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphi tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] +[[package]] +name = "bcrypt" +version = "4.2.1" +description = "Modern password hashing for your software and your servers" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "bcrypt-4.2.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:1340411a0894b7d3ef562fb233e4b6ed58add185228650942bdc885362f32c17"}, + {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ee315739bc8387aa36ff127afc99120ee452924e0df517a8f3e4c0187a0f5f"}, + {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dbd0747208912b1e4ce730c6725cb56c07ac734b3629b60d4398f082ea718ad"}, + {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:aaa2e285be097050dba798d537b6efd9b698aa88eef52ec98d23dcd6d7cf6fea"}, + {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:76d3e352b32f4eeb34703370e370997065d28a561e4a18afe4fef07249cb4396"}, + {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b7703ede632dc945ed1172d6f24e9f30f27b1b1a067f32f68bf169c5f08d0425"}, + {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:89df2aea2c43be1e1fa066df5f86c8ce822ab70a30e4c210968669565c0f4685"}, + {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:04e56e3fe8308a88b77e0afd20bec516f74aecf391cdd6e374f15cbed32783d6"}, + {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cfdf3d7530c790432046c40cda41dfee8c83e29482e6a604f8930b9930e94139"}, + {file = "bcrypt-4.2.1-cp37-abi3-win32.whl", hash = "sha256:adadd36274510a01f33e6dc08f5824b97c9580583bd4487c564fc4617b328005"}, + {file = "bcrypt-4.2.1-cp37-abi3-win_amd64.whl", hash = "sha256:8c458cd103e6c5d1d85cf600e546a639f234964d0228909d8f8dbeebff82d526"}, + {file = "bcrypt-4.2.1-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:8ad2f4528cbf0febe80e5a3a57d7a74e6635e41af1ea5675282a33d769fba413"}, + {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:909faa1027900f2252a9ca5dfebd25fc0ef1417943824783d1c8418dd7d6df4a"}, + {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cde78d385d5e93ece5479a0a87f73cd6fa26b171c786a884f955e165032b262c"}, + {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:533e7f3bcf2f07caee7ad98124fab7499cb3333ba2274f7a36cf1daee7409d99"}, + {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:687cf30e6681eeda39548a93ce9bfbb300e48b4d445a43db4298d2474d2a1e54"}, + {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:041fa0155c9004eb98a232d54da05c0b41d4b8e66b6fc3cb71b4b3f6144ba837"}, + {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f85b1ffa09240c89aa2e1ae9f3b1c687104f7b2b9d2098da4e923f1b7082d331"}, + {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c6f5fa3775966cca251848d4d5393ab016b3afed251163c1436fefdec3b02c84"}, + {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:807261df60a8b1ccd13e6599c779014a362ae4e795f5c59747f60208daddd96d"}, + {file = "bcrypt-4.2.1-cp39-abi3-win32.whl", hash = "sha256:b588af02b89d9fad33e5f98f7838bf590d6d692df7153647724a7f20c186f6bf"}, + {file = "bcrypt-4.2.1-cp39-abi3-win_amd64.whl", hash = "sha256:e84e0e6f8e40a242b11bce56c313edc2be121cec3e0ec2d76fce01f6af33c07c"}, + {file = "bcrypt-4.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76132c176a6d9953cdc83c296aeaed65e1a708485fd55abf163e0d9f8f16ce0e"}, + {file = "bcrypt-4.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e158009a54c4c8bc91d5e0da80920d048f918c61a581f0a63e4e93bb556d362f"}, + {file = "bcrypt-4.2.1.tar.gz", hash = "sha256:6765386e3ab87f569b276988742039baab087b2cdb01e809d74e74503c2faafe"}, +] + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + +[[package]] +name = "cachetools" +version = "5.5.2" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, + {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, +] + [[package]] name = "certifi" version = "2024.12.14" @@ -76,6 +143,86 @@ files = [ {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, ] +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + [[package]] name = "charset-normalizer" version = "3.4.1" @@ -178,6 +325,36 @@ files = [ {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] +[[package]] +name = "click" +version = "8.1.8" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "cloup" +version = "3.0.6" +description = "Adds features to Click: option groups, constraints, subcommand sections and help themes." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "cloup-3.0.6-py2.py3-none-any.whl", hash = "sha256:ce46566204318e32b3e02d97d5e89bcaa9d9c3d0b8f94d1d060bc8b7a6be9179"}, + {file = "cloup-3.0.6.tar.gz", hash = "sha256:7a43e1b611b9f1e9cb3e1e0e02247154cb530df3d909fa184e377cdee6834b98"}, +] + +[package.dependencies] +click = ">=8.0,<9.0" + [[package]] name = "colorama" version = "0.4.6" @@ -185,12 +362,191 @@ description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" groups = ["dev"] -markers = "sys_platform == \"win32\"" +markers = "sys_platform == \"win32\" or platform_system == \"Windows\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "colorlog" +version = "6.9.0" +description = "Add colours to the output of Python's logging module." +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "colorlog-6.9.0-py3-none-any.whl", hash = "sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff"}, + {file = "colorlog-6.9.0.tar.gz", hash = "sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +development = ["black", "flake8", "mypy", "pytest", "types-colorama"] + +[[package]] +name = "coverage" +version = "7.6.12" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"}, + {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06097c7abfa611c91edb9e6920264e5be1d6ceb374efb4986f38b09eed4cb2fe"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:220fa6c0ad7d9caef57f2c8771918324563ef0d8272c94974717c3909664e674"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3688b99604a24492bcfe1c106278c45586eb819bf66a654d8a9a1433022fb2eb"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d1a987778b9c71da2fc8948e6f2656da6ef68f59298b7e9786849634c35d2c3c"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cec6b9ce3bd2b7853d4a4563801292bfee40b030c05a3d29555fd2a8ee9bd68c"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ace9048de91293e467b44bce0f0381345078389814ff6e18dbac8fdbf896360e"}, + {file = "coverage-7.6.12-cp310-cp310-win32.whl", hash = "sha256:ea31689f05043d520113e0552f039603c4dd71fa4c287b64cb3606140c66f425"}, + {file = "coverage-7.6.12-cp310-cp310-win_amd64.whl", hash = "sha256:676f92141e3c5492d2a1596d52287d0d963df21bf5e55c8b03075a60e1ddf8aa"}, + {file = "coverage-7.6.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e18aafdfb3e9ec0d261c942d35bd7c28d031c5855dadb491d2723ba54f4c3015"}, + {file = "coverage-7.6.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66fe626fd7aa5982cdebad23e49e78ef7dbb3e3c2a5960a2b53632f1f703ea45"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef01d70198431719af0b1f5dcbefc557d44a190e749004042927b2a3fed0702"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e92ae5a289a4bc4c0aae710c0948d3c7892e20fd3588224ebe242039573bf0"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e695df2c58ce526eeab11a2e915448d3eb76f75dffe338ea613c1201b33bab2f"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d74c08e9aaef995f8c4ef6d202dbd219c318450fe2a76da624f2ebb9c8ec5d9f"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e995b3b76ccedc27fe4f477b349b7d64597e53a43fc2961db9d3fbace085d69d"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b1f097878d74fe51e1ddd1be62d8e3682748875b461232cf4b52ddc6e6db0bba"}, + {file = "coverage-7.6.12-cp311-cp311-win32.whl", hash = "sha256:1f7ffa05da41754e20512202c866d0ebfc440bba3b0ed15133070e20bf5aeb5f"}, + {file = "coverage-7.6.12-cp311-cp311-win_amd64.whl", hash = "sha256:e216c5c45f89ef8971373fd1c5d8d1164b81f7f5f06bbf23c37e7908d19e8558"}, + {file = "coverage-7.6.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b172f8e030e8ef247b3104902cc671e20df80163b60a203653150d2fc204d1ad"}, + {file = "coverage-7.6.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:641dfe0ab73deb7069fb972d4d9725bf11c239c309ce694dd50b1473c0f641c3"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e549f54ac5f301e8e04c569dfdb907f7be71b06b88b5063ce9d6953d2d58574"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959244a17184515f8c52dcb65fb662808767c0bd233c1d8a166e7cf74c9ea985"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda1c5f347550c359f841d6614fb8ca42ae5cb0b74d39f8a1e204815ebe25750"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ceeb90c3eda1f2d8c4c578c14167dbd8c674ecd7d38e45647543f19839dd6ea"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f16f44025c06792e0fb09571ae454bcc7a3ec75eeb3c36b025eccf501b1a4c3"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b076e625396e787448d27a411aefff867db2bffac8ed04e8f7056b07024eed5a"}, + {file = "coverage-7.6.12-cp312-cp312-win32.whl", hash = "sha256:00b2086892cf06c7c2d74983c9595dc511acca00665480b3ddff749ec4fb2a95"}, + {file = "coverage-7.6.12-cp312-cp312-win_amd64.whl", hash = "sha256:7ae6eabf519bc7871ce117fb18bf14e0e343eeb96c377667e3e5dd12095e0288"}, + {file = "coverage-7.6.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:488c27b3db0ebee97a830e6b5a3ea930c4a6e2c07f27a5e67e1b3532e76b9ef1"}, + {file = "coverage-7.6.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d1095bbee1851269f79fd8e0c9b5544e4c00c0c24965e66d8cba2eb5bb535fd"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0533adc29adf6a69c1baa88c3d7dbcaadcffa21afbed3ca7a225a440e4744bf9"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53c56358d470fa507a2b6e67a68fd002364d23c83741dbc4c2e0680d80ca227e"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64cbb1a3027c79ca6310bf101014614f6e6e18c226474606cf725238cf5bc2d4"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:79cac3390bfa9836bb795be377395f28410811c9066bc4eefd8015258a7578c6"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b148068e881faa26d878ff63e79650e208e95cf1c22bd3f77c3ca7b1d9821a3"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8bec2ac5da793c2685ce5319ca9bcf4eee683b8a1679051f8e6ec04c4f2fd7dc"}, + {file = "coverage-7.6.12-cp313-cp313-win32.whl", hash = "sha256:200e10beb6ddd7c3ded322a4186313d5ca9e63e33d8fab4faa67ef46d3460af3"}, + {file = "coverage-7.6.12-cp313-cp313-win_amd64.whl", hash = "sha256:2b996819ced9f7dbb812c701485d58f261bef08f9b85304d41219b1496b591ef"}, + {file = "coverage-7.6.12-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:299cf973a7abff87a30609879c10df0b3bfc33d021e1adabc29138a48888841e"}, + {file = "coverage-7.6.12-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4b467a8c56974bf06e543e69ad803c6865249d7a5ccf6980457ed2bc50312703"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2458f275944db8129f95d91aee32c828a408481ecde3b30af31d552c2ce284a0"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a9d8be07fb0832636a0f72b80d2a652fe665e80e720301fb22b191c3434d924"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d47376a4f445e9743f6c83291e60adb1b127607a3618e3185bbc8091f0467b"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b95574d06aa9d2bd6e5cc35a5bbe35696342c96760b69dc4287dbd5abd4ad51d"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ecea0c38c9079570163d663c0433a9af4094a60aafdca491c6a3d248c7432827"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2251fabcfee0a55a8578a9d29cecfee5f2de02f11530e7d5c5a05859aa85aee9"}, + {file = "coverage-7.6.12-cp313-cp313t-win32.whl", hash = "sha256:eb5507795caabd9b2ae3f1adc95f67b1104971c22c624bb354232d65c4fc90b3"}, + {file = "coverage-7.6.12-cp313-cp313t-win_amd64.whl", hash = "sha256:f60a297c3987c6c02ffb29effc70eadcbb412fe76947d394a1091a3615948e2f"}, + {file = "coverage-7.6.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e7575ab65ca8399c8c4f9a7d61bbd2d204c8b8e447aab9d355682205c9dd948d"}, + {file = "coverage-7.6.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8161d9fbc7e9fe2326de89cd0abb9f3599bccc1287db0aba285cb68d204ce929"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a1e465f398c713f1b212400b4e79a09829cd42aebd360362cd89c5bdc44eb87"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f25d8b92a4e31ff1bd873654ec367ae811b3a943583e05432ea29264782dc32c"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a936309a65cc5ca80fa9f20a442ff9e2d06927ec9a4f54bcba9c14c066323f2"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa6f302a3a0b5f240ee201297fff0bbfe2fa0d415a94aeb257d8b461032389bd"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f973643ef532d4f9be71dd88cf7588936685fdb576d93a79fe9f65bc337d9d73"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:78f5243bb6b1060aed6213d5107744c19f9571ec76d54c99cc15938eb69e0e86"}, + {file = "coverage-7.6.12-cp39-cp39-win32.whl", hash = "sha256:69e62c5034291c845fc4df7f8155e8544178b6c774f97a99e2734b05eb5bed31"}, + {file = "coverage-7.6.12-cp39-cp39-win_amd64.whl", hash = "sha256:b01a840ecc25dce235ae4c1b6a0daefb2a203dba0e6e980637ee9c2f6ee0df57"}, + {file = "coverage-7.6.12-pp39.pp310-none-any.whl", hash = "sha256:7e39e845c4d764208e7b8f6a21c541ade741e2c41afabdfa1caa28687a3c98cf"}, + {file = "coverage-7.6.12-py3-none-any.whl", hash = "sha256:eb8668cfbc279a536c633137deeb9435d2962caec279c3f8cf8b91fff6ff8953"}, + {file = "coverage-7.6.12.tar.gz", hash = "sha256:48cfc4641d95d34766ad41d9573cc0f22a48aa88d22657a1fe01dca0dbae4de2"}, +] + +[package.extras] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] + +[[package]] +name = "cryptography" +version = "44.0.1" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = "!=3.9.0,!=3.9.1,>=3.7" +groups = ["dev"] +files = [ + {file = "cryptography-44.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887143b9ff6bad2b7570da75a7fe8bbf5f65276365ac259a5d2d5147a73775f2"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:322eb03ecc62784536bc173f1483e76747aafeb69c8728df48537eb431cd1911"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:21377472ca4ada2906bc313168c9dc7b1d7ca417b63c1c3011d0c74b7de9ae69"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:df978682c1504fc93b3209de21aeabf2375cb1571d4e61907b3e7a2540e83026"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:eb3889330f2a4a148abead555399ec9a32b13b7c8ba969b72d8e500eb7ef84cd"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:8e6a85a93d0642bd774460a86513c5d9d80b5c002ca9693e63f6e540f1815ed0"}, + {file = "cryptography-44.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6f76fdd6fd048576a04c5210d53aa04ca34d2ed63336d4abd306d0cbe298fddf"}, + {file = "cryptography-44.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6c8acf6f3d1f47acb2248ec3ea261171a671f3d9428e34ad0357148d492c7864"}, + {file = "cryptography-44.0.1-cp37-abi3-win32.whl", hash = "sha256:24979e9f2040c953a94bf3c6782e67795a4c260734e5264dceea65c8f4bae64a"}, + {file = "cryptography-44.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:fd0ee90072861e276b0ff08bd627abec29e32a53b2be44e41dbcdf87cbee2b00"}, + {file = "cryptography-44.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a2d8a7045e1ab9b9f803f0d9531ead85f90c5f2859e653b61497228b18452008"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8272f257cf1cbd3f2e120f14c68bff2b6bdfcc157fafdee84a1b795efd72862"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e8d181e90a777b63f3f0caa836844a1182f1f265687fac2115fcf245f5fbec3"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:436df4f203482f41aad60ed1813811ac4ab102765ecae7a2bbb1dbb66dcff5a7"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4f422e8c6a28cf8b7f883eb790695d6d45b0c385a2583073f3cec434cc705e1a"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:72198e2b5925155497a5a3e8c216c7fb3e64c16ccee11f0e7da272fa93b35c4c"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:2a46a89ad3e6176223b632056f321bc7de36b9f9b93b2cc1cccf935a3849dc62"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:53f23339864b617a3dfc2b0ac8d5c432625c80014c25caac9082314e9de56f41"}, + {file = "cryptography-44.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:888fcc3fce0c888785a4876ca55f9f43787f4c5c1cc1e2e0da71ad481ff82c5b"}, + {file = "cryptography-44.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:00918d859aa4e57db8299607086f793fa7813ae2ff5a4637e318a25ef82730f7"}, + {file = "cryptography-44.0.1-cp39-abi3-win32.whl", hash = "sha256:9b336599e2cb77b1008cb2ac264b290803ec5e8e89d618a5e978ff5eb6f715d9"}, + {file = "cryptography-44.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:e403f7f766ded778ecdb790da786b418a9f2394f36e8cc8b796cc056ab05f44f"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f9a92144fa0c877117e9748c74501bea842f93d21ee00b0cf922846d9d0b183"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:610a83540765a8d8ce0f351ce42e26e53e1f774a6efb71eb1b41eb01d01c3d12"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5fed5cd6102bb4eb843e3315d2bf25fede494509bddadb81e03a859c1bc17b83"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:f4daefc971c2d1f82f03097dc6f216744a6cd2ac0f04c68fb935ea2ba2a0d420"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94f99f2b943b354a5b6307d7e8d19f5c423a794462bde2bf310c770ba052b1c4"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d9c5b9f698a83c8bd71e0f4d3f9f839ef244798e5ffe96febfa9714717db7af7"}, + {file = "cryptography-44.0.1.tar.gz", hash = "sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""] +docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] +pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +sdist = ["build (>=1.0.0)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi (>=2024)", "cryptography-vectors (==44.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "decorator" +version = "5.2.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a"}, + {file = "decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360"}, +] + +[[package]] +name = "deepdiff" +version = "8.2.0" +description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "deepdiff-8.2.0-py3-none-any.whl", hash = "sha256:5091f2cdfd372b1b9f6bfd8065ba323ae31118dc4e42594371b38c8bea3fd0a4"}, + {file = "deepdiff-8.2.0.tar.gz", hash = "sha256:6ec78f65031485735545ffbe7a61e716c3c2d12ca6416886d5e9291fc76c46c3"}, +] + +[package.dependencies] +orderly-set = ">=5.3.0,<6" + +[package.extras] +cli = ["click (==8.1.8)", "pyyaml (==6.0.2)"] +optimize = ["orjson"] + [[package]] name = "docker" version = "7.1.0" @@ -214,6 +570,58 @@ docs = ["myst-parser (==0.18.0)", "sphinx (==5.1.1)"] ssh = ["paramiko (>=2.4.3)"] websockets = ["websocket-client (>=1.3.0)"] +[[package]] +name = "durationpy" +version = "0.9" +description = "Module for converting between datetime.timedelta and Go's Duration strings." +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "durationpy-0.9-py3-none-any.whl", hash = "sha256:e65359a7af5cedad07fb77a2dd3f390f8eb0b74cb845589fa6c057086834dd38"}, + {file = "durationpy-0.9.tar.gz", hash = "sha256:fd3feb0a69a0057d582ef643c355c40d2fa1c942191f914d12203b1a01ac722a"}, +] + +[[package]] +name = "executing" +version = "2.2.0" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa"}, + {file = "executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755"}, +] + +[package.extras] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""] + +[[package]] +name = "google-auth" +version = "2.38.0" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "google_auth-2.38.0-py2.py3-none-any.whl", hash = "sha256:e7dae6694313f434a2727bf2906f27ad259bae090d7aa896590d86feec3d9d4a"}, + {file = "google_auth-2.38.0.tar.gz", hash = "sha256:8285113607d3b80a3f1543b75962447ba8a09fe85783432a784fdeef6ac094c4"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography", "pyopenssl"] +pyjwt = ["cryptography (>=38.0.3)", "pyjwt (>=2.0)"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] + [[package]] name = "idna" version = "3.10" @@ -222,74 +630,542 @@ optional = false python-versions = ">=3.6" groups = ["dev"] files = [ - {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, - {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "ipdb" +version = "0.13.13" +description = "IPython-enabled pdb" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["dev"] +files = [ + {file = "ipdb-0.13.13-py3-none-any.whl", hash = "sha256:45529994741c4ab6d2388bfa5d7b725c2cf7fe9deffabdb8a6113aa5ed449ed4"}, + {file = "ipdb-0.13.13.tar.gz", hash = "sha256:e3ac6018ef05126d442af680aad863006ec19d02290561ac88b8b1c0b0cfc726"}, +] + +[package.dependencies] +decorator = {version = "*", markers = "python_version >= \"3.11\""} +ipython = {version = ">=7.31.1", markers = "python_version >= \"3.11\""} + +[[package]] +name = "ipython" +version = "8.32.0" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "ipython-8.32.0-py3-none-any.whl", hash = "sha256:cae85b0c61eff1fc48b0a8002de5958b6528fa9c8defb1894da63f42613708aa"}, + {file = "ipython-8.32.0.tar.gz", hash = "sha256:be2c91895b0b9ea7ba49d33b23e2040c352b33eb6a519cca7ce6e0c743444251"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} +prompt_toolkit = ">=3.0.41,<3.1.0" +pygments = ">=2.4.0" +stack_data = "*" +traitlets = ">=5.13.0" + +[package.extras] +all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] +black = ["black"] +doc = ["docrepr", "exceptiongroup", "intersphinx_registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli ; python_version < \"3.11\"", "typing_extensions"] +kernel = ["ipykernel"] +matplotlib = ["matplotlib"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] + +[[package]] +name = "jedi" +version = "0.19.2" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, + {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, +] + +[package.dependencies] +parso = ">=0.8.4,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<9.0.0)"] + +[[package]] +name = "jinja2" +version = "3.1.5" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, + {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "kubernetes" +version = "31.0.0" +description = "Kubernetes python client" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "kubernetes-31.0.0-py2.py3-none-any.whl", hash = "sha256:bf141e2d380c8520eada8b351f4e319ffee9636328c137aa432bc486ca1200e1"}, + {file = "kubernetes-31.0.0.tar.gz", hash = "sha256:28945de906c8c259c1ebe62703b56a03b714049372196f854105afe4e6d014c0"}, +] + +[package.dependencies] +certifi = ">=14.05.14" +durationpy = ">=0.7" +google-auth = ">=1.0.1" +oauthlib = ">=3.2.2" +python-dateutil = ">=2.5.3" +pyyaml = ">=5.4.1" +requests = "*" +requests-oauthlib = "*" +six = ">=1.9.0" +urllib3 = ">=1.24.2" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" + +[package.extras] +adal = ["adal (>=1.0.2)"] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markupsafe" +version = "3.0.2" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +] + +[[package]] +name = "matplotlib-inline" +version = "0.1.7" +description = "Inline Matplotlib backend for Jupyter" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, + {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, +] + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "netaddr" +version = "1.3.0" +description = "A network address manipulation library for Python" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "netaddr-1.3.0-py3-none-any.whl", hash = "sha256:c2c6a8ebe5554ce33b7d5b3a306b71bbb373e000bbbf2350dd5213cc56e3dbbe"}, + {file = "netaddr-1.3.0.tar.gz", hash = "sha256:5c3c3d9895b551b763779ba7db7a03487dc1f8e3b385af819af341ae9ef6e48a"}, +] + +[package.extras] +nicer-shell = ["ipython"] + +[[package]] +name = "oauthlib" +version = "3.2.2" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, +] + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + +[[package]] +name = "openshift-python-wrapper" +version = "11.0.31" +description = "Wrapper around https://github.com/kubernetes-client/python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "openshift_python_wrapper-11.0.31.tar.gz", hash = "sha256:dc4a06805bfd639a2079fcc6ffa6cc9ef42e74a930680e86e7c540f1ddb6195c"}, +] + +[package.dependencies] +click = ">=8.1.7" +cloup = ">=3.0.5" +colorlog = ">=6.8.2" +deepdiff = ">=8.0.1" +jinja2 = ">=3.1.4" +kubernetes = ">=31.0.0" +packaging = ">=24.1" +pyhelper-utils = ">=0.0.42" +pytest = ">=8.3.3" +pytest-cov = ">=6.0.0" +python-benedict = ">=0.33.2" +python-simple-logger = ">=1.0.40" +requests = ">=2.32.3" +rich = ">=13.9.2" +ruff = ">=0.6.9" +testcontainers = ">=4.9.0" +timeout-sampler = ">=0.0.46" +xmltodict = ">=0.13.0" + +[[package]] +name = "orderly-set" +version = "5.3.0" +description = "Orderly set" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "orderly_set-5.3.0-py3-none-any.whl", hash = "sha256:c2c0bfe604f5d3d9b24e8262a06feb612594f37aa3845650548befd7772945d1"}, + {file = "orderly_set-5.3.0.tar.gz", hash = "sha256:80b3d8fdd3d39004d9aad389eaa0eab02c71f0a0511ba3a6d54a935a6c6a0acc"}, +] + +[[package]] +name = "packaging" +version = "24.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, +] + +[[package]] +name = "paramiko" +version = "3.5.1" +description = "SSH2 protocol library" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "paramiko-3.5.1-py3-none-any.whl", hash = "sha256:43b9a0501fc2b5e70680388d9346cf252cfb7d00b0667c39e80eb43a408b8f61"}, + {file = "paramiko-3.5.1.tar.gz", hash = "sha256:b2c665bc45b2b215bd7d7f039901b14b067da00f3a11e6640995fd58f2664822"}, +] + +[package.dependencies] +bcrypt = ">=3.2" +cryptography = ">=3.3" +pynacl = ">=1.5" + +[package.extras] +all = ["gssapi (>=1.4.1) ; platform_system != \"Windows\"", "invoke (>=2.0)", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8) ; platform_system == \"Windows\""] +gssapi = ["gssapi (>=1.4.1) ; platform_system != \"Windows\"", "pyasn1 (>=0.1.7)", "pywin32 (>=2.1.8) ; platform_system == \"Windows\""] +invoke = ["invoke (>=2.0)"] + +[[package]] +name = "parso" +version = "0.8.4" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, + {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, +] + +[package.extras] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["docopt", "pytest"] + +[[package]] +name = "pbr" +version = "6.1.1" +description = "Python Build Reasonableness" +optional = false +python-versions = ">=2.6" +groups = ["dev"] +files = [ + {file = "pbr-6.1.1-py2.py3-none-any.whl", hash = "sha256:38d4daea5d9fa63b3f626131b9d34947fd0c8be9b05a29276870580050a25a76"}, + {file = "pbr-6.1.1.tar.gz", hash = "sha256:93ea72ce6989eb2eed99d0f75721474f69ad88128afdef5ac377eb797c4bf76b"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "pexpect" +version = "4.9.0" +description = "Pexpect allows easy control of interactive console applications." +optional = false +python-versions = "*" +groups = ["dev"] +markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\"" +files = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "podman" +version = "5.4.0.1" +description = "Bindings for Podman RESTful API" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "podman-5.4.0.1-py3-none-any.whl", hash = "sha256:abd32e49a66bf18a680d9a0ac3989a3f4a3cc7293bc2a5060653276d8ee712f4"}, + {file = "podman-5.4.0.1.tar.gz", hash = "sha256:ee537aaa44ba530fad7cd939d886a7632f9f7018064e7831e8cb614c54cb1789"}, +] + +[package.dependencies] +requests = ">=2.24" +urllib3 = "*" + +[package.extras] +docs = ["sphinx"] +progress-bar = ["rich (>=12.5.1)"] +test = ["coverage", "fixtures", "pytest", "requests-mock", "tox"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.50" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.8.0" +groups = ["dev"] +files = [ + {file = "prompt_toolkit-3.0.50-py3-none-any.whl", hash = "sha256:9b6427eb19e479d98acff65196a307c555eb567989e6d88ebbb1b509d9779198"}, + {file = "prompt_toolkit-3.0.50.tar.gz", hash = "sha256:544748f3860a2623ca5cd6d2795e7a14f3d0e1c3c9728359013f79877fc89bab"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +groups = ["dev"] +markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\"" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, ] -[package.extras] -all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] - [[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" +name = "pure-eval" +version = "0.2.3" +description = "Safely evaluate AST nodes without side effects" optional = false -python-versions = ">=3.7" +python-versions = "*" groups = ["dev"] files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, + {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, + {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, ] +[package.extras] +tests = ["pytest"] + [[package]] -name = "packaging" -version = "24.2" -description = "Core utilities for Python packages" +name = "pyasn1" +version = "0.6.1" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, + {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, ] [[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" +name = "pyasn1-modules" +version = "0.4.1" +description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, + {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, + {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, ] -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] +[package.dependencies] +pyasn1 = ">=0.4.6,<0.7.0" [[package]] -name = "podman" -version = "5.4.0.1" -description = "Bindings for Podman RESTful API" +name = "pycparser" +version = "2.22" +description = "C parser in Python" optional = false -python-versions = ">=3.9" +python-versions = ">=3.8" groups = ["dev"] files = [ - {file = "podman-5.4.0.1-py3-none-any.whl", hash = "sha256:abd32e49a66bf18a680d9a0ac3989a3f4a3cc7293bc2a5060653276d8ee712f4"}, - {file = "podman-5.4.0.1.tar.gz", hash = "sha256:ee537aaa44ba530fad7cd939d886a7632f9f7018064e7831e8cb614c54cb1789"}, + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, ] -[package.dependencies] -requests = ">=2.24" -urllib3 = "*" - -[package.extras] -docs = ["sphinx"] -progress-bar = ["rich (>=12.5.1)"] -test = ["coverage", "fixtures", "pytest", "requests-mock", "tox"] - [[package]] name = "pydantic" version = "2.10.6" @@ -436,6 +1312,66 @@ files = [ {file = "pyfakefs-5.7.4.tar.gz", hash = "sha256:4971e65cc80a93a1e6f1e3a4654909c0c493186539084dc9301da3d68c8878fe"}, ] +[[package]] +name = "pygments" +version = "2.19.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyhelper-utils" +version = "1.0.11" +description = "Collective utility functions for python projects" +optional = false +python-versions = "~=3.9" +groups = ["dev"] +files = [ + {file = "pyhelper_utils-1.0.11.tar.gz", hash = "sha256:53d461490821ba73d54946be6b79f2fc4ff1eaa3b4218b0afbde77aa78b44c74"}, +] + +[package.dependencies] +ipdb = ">=0.13.13,<0.14" +python-rrmngmnt = ">=0.1.32,<0.2" +python-simple-logger = ">=2.0.0,<3" +requests = ">=2.31.0,<3" +rich = ">=13.7.1,<14" + +[[package]] +name = "pynacl" +version = "1.5.0" +description = "Python binding to the Networking and Cryptography (NaCl) library" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d"}, + {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b"}, + {file = "PyNaCl-1.5.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:61f642bf2378713e2c2e1de73444a3778e5f0a38be6fee0fe532fe30060282ff"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win32.whl", hash = "sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543"}, + {file = "PyNaCl-1.5.0-cp36-abi3-win_amd64.whl", hash = "sha256:20f42270d27e1b6a29f54032090b972d97f0a1b0948cc52392041ef7831fee93"}, + {file = "PyNaCl-1.5.0.tar.gz", hash = "sha256:8ac7448f09ab85811607bdd21ec2464495ac8b7c66d146bf545b0f08fb9220ba"}, +] + +[package.dependencies] +cffi = ">=1.4.1" + +[package.extras] +docs = ["sphinx (>=1.6.5)", "sphinx-rtd-theme"] +tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] + [[package]] name = "pytest" version = "8.3.4" @@ -457,6 +1393,25 @@ pluggy = ">=1.5,<2" [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-cov" +version = "6.0.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, + {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, +] + +[package.dependencies] +coverage = {version = ">=7.5", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + [[package]] name = "pytest-subtests" version = "0.14.1" @@ -473,6 +1428,49 @@ files = [ attrs = ">=19.2.0" pytest = ">=7.4" +[[package]] +name = "python-benedict" +version = "0.34.1" +description = "python-benedict is a dict subclass with keylist/keypath/keyattr support, normalized I/O operations (base64, csv, ini, json, pickle, plist, query-string, toml, xls, xml, yaml) and many utilities... for humans, obviously." +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "python_benedict-0.34.1-py3-none-any.whl", hash = "sha256:f09711ca2d8e716ca40440f1e26dfa3ccff15e08f1baa88de594fbba799cf87a"}, + {file = "python_benedict-0.34.1.tar.gz", hash = "sha256:0a81784c826c8983c485bb647ed5e6c8fad139c3921ea1844b21e41da0ff0dc0"}, +] + +[package.dependencies] +python-fsutil = ">=0.9.3,<1.0.0" +python-slugify = ">=7.0.0,<9.0.0" +requests = ">=2.26.0,<3.0.0" + +[package.extras] +all = ["python-benedict[io,parse,s3]"] +html = ["beautifulsoup4 (>=4.12.0,<5.0.0)", "python-benedict[xml]"] +io = ["python-benedict[html,toml,xls,xml,yaml]"] +parse = ["ftfy (>=6.0.0,<7.0.0)", "mailchecker (>=4.1.0,<7.0.0)", "phonenumbers (>=8.12.0,<9.0.0)", "python-dateutil (>=2.8.0,<3.0.0)"] +s3 = ["boto3 (>=1.24.89,<2.0.0)"] +toml = ["toml (>=0.10.2,<1.0.0)"] +xls = ["openpyxl (>=3.0.0,<4.0.0)", "xlrd (>=2.0.0,<3.0.0)"] +xml = ["xmltodict (>=0.12.0,<1.0.0)"] +yaml = ["pyyaml (>=6.0,<7.0)"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["dev"] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + [[package]] name = "python-dotenv" version = "1.0.1" @@ -488,6 +1486,67 @@ files = [ [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "python-fsutil" +version = "0.15.0" +description = "high-level file-system operations for lazy devs." +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "python_fsutil-0.15.0-py3-none-any.whl", hash = "sha256:8ae31def522916e35caf67723b8526fe6e5fcc1e160ea2dc23c845567708ca6e"}, + {file = "python_fsutil-0.15.0.tar.gz", hash = "sha256:b51d8ab7ee218314480ea251fff7fef513be4fbccfe72a5af4ff2954f8a4a2c4"}, +] + +[[package]] +name = "python-rrmngmnt" +version = "0.1.32" +description = "Tool to manage remote systems and services" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "python-rrmngmnt-0.1.32.zip", hash = "sha256:0953f9e9e3911d4a310282513261ed7572fbc269f78c8e3d0680f84ba9955526"}, +] + +[package.dependencies] +netaddr = "*" +paramiko = "*" +pbr = "*" +six = "*" + +[[package]] +name = "python-simple-logger" +version = "2.0.7" +description = "A simple logger for python" +optional = false +python-versions = "~=3.9" +groups = ["dev"] +files = [ + {file = "python_simple_logger-2.0.7.tar.gz", hash = "sha256:aae25a76ebd40e8a0bf96bcd001e5ae52190bc7bb467f1c549df3a65ceabaee8"}, +] + +[package.dependencies] +colorlog = ">=6.7.0,<7" + +[[package]] +name = "python-slugify" +version = "8.0.4" +description = "A Python slugify application that also handles Unicode" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "python-slugify-8.0.4.tar.gz", hash = "sha256:59202371d1d05b54a9e7720c5e038f928f45daaffe41dd10822f3907b937c856"}, + {file = "python_slugify-8.0.4-py2.py3-none-any.whl", hash = "sha256:276540b79961052b66b7d116620b36518847f52d5fd9e3a70164fc8c50faa6b8"}, +] + +[package.dependencies] +text-unidecode = ">=1.3" + +[package.extras] +unidecode = ["Unidecode (>=1.1.1)"] + [[package]] name = "pywin32" version = "308" @@ -517,6 +1576,69 @@ files = [ {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, ] +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + [[package]] name = "requests" version = "2.32.3" @@ -539,6 +1661,140 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "requests-oauthlib" +version = "2.0.0" +description = "OAuthlib authentication support for Requests." +optional = false +python-versions = ">=3.4" +groups = ["dev"] +files = [ + {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, + {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, +] + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + +[[package]] +name = "rich" +version = "13.9.4" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.8.0" +groups = ["dev"] +files = [ + {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, + {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +groups = ["dev"] +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "ruff" +version = "0.9.7" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "ruff-0.9.7-py3-none-linux_armv6l.whl", hash = "sha256:99d50def47305fe6f233eb8dabfd60047578ca87c9dcb235c9723ab1175180f4"}, + {file = "ruff-0.9.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d59105ae9c44152c3d40a9c40d6331a7acd1cdf5ef404fbe31178a77b174ea66"}, + {file = "ruff-0.9.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f313b5800483770bd540cddac7c90fc46f895f427b7820f18fe1822697f1fec9"}, + {file = "ruff-0.9.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:042ae32b41343888f59c0a4148f103208bf6b21c90118d51dc93a68366f4e903"}, + {file = "ruff-0.9.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87862589373b33cc484b10831004e5e5ec47dc10d2b41ba770e837d4f429d721"}, + {file = "ruff-0.9.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a17e1e01bee0926d351a1ee9bc15c445beae888f90069a6192a07a84af544b6b"}, + {file = "ruff-0.9.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:7c1f880ac5b2cbebd58b8ebde57069a374865c73f3bf41f05fe7a179c1c8ef22"}, + {file = "ruff-0.9.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e63fc20143c291cab2841dbb8260e96bafbe1ba13fd3d60d28be2c71e312da49"}, + {file = "ruff-0.9.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91ff963baed3e9a6a4eba2a02f4ca8eaa6eba1cc0521aec0987da8d62f53cbef"}, + {file = "ruff-0.9.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88362e3227c82f63eaebf0b2eff5b88990280fb1ecf7105523883ba8c3aaf6fb"}, + {file = "ruff-0.9.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0372c5a90349f00212270421fe91874b866fd3626eb3b397ede06cd385f6f7e0"}, + {file = "ruff-0.9.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d76b8ab60e99e6424cd9d3d923274a1324aefce04f8ea537136b8398bbae0a62"}, + {file = "ruff-0.9.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0c439bdfc8983e1336577f00e09a4e7a78944fe01e4ea7fe616d00c3ec69a3d0"}, + {file = "ruff-0.9.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:115d1f15e8fdd445a7b4dc9a30abae22de3f6bcabeb503964904471691ef7606"}, + {file = "ruff-0.9.7-py3-none-win32.whl", hash = "sha256:e9ece95b7de5923cbf38893f066ed2872be2f2f477ba94f826c8defdd6ec6b7d"}, + {file = "ruff-0.9.7-py3-none-win_amd64.whl", hash = "sha256:3770fe52b9d691a15f0b87ada29c45324b2ace8f01200fb0c14845e499eb0c2c"}, + {file = "ruff-0.9.7-py3-none-win_arm64.whl", hash = "sha256:b075a700b2533feb7a01130ff656a4ec0d5f340bb540ad98759b8401c32c2037"}, + {file = "ruff-0.9.7.tar.gz", hash = "sha256:643757633417907510157b206e490c3aa11cab0c087c912f60e07fbafa87a4c6"}, +] + +[[package]] +name = "setuptools" +version = "75.8.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3"}, + {file = "setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["dev"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "stack-data" +version = "0.6.3" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + [[package]] name = "testcontainers" version = "4.9.1" @@ -593,6 +1849,49 @@ test-module-import = ["httpx"] trino = ["trino"] weaviate = ["weaviate-client (>=4.5.4,<5.0.0)"] +[[package]] +name = "text-unidecode" +version = "1.3" +description = "The most basic Text::Unidecode port" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, + {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, +] + +[[package]] +name = "timeout-sampler" +version = "1.0.11" +description = "Timeout utility class to wait for any function output and interact with it in given time" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "timeout_sampler-1.0.11.tar.gz", hash = "sha256:bbc238c61f5714961991cd3d9254285534f8c237f489380bb7becf7f1e3afa07"}, +] + +[package.dependencies] +pytest = ">=8.3.4" +python-simple-logger = ">=2.0.2" + +[[package]] +name = "traitlets" +version = "5.14.3" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, + {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] + [[package]] name = "typing-extensions" version = "4.12.2" @@ -623,6 +1922,35 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + +[[package]] +name = "websocket-client" +version = "1.8.0" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, + {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + [[package]] name = "wrapt" version = "1.17.2" @@ -712,7 +2040,19 @@ files = [ {file = "wrapt-1.17.2.tar.gz", hash = "sha256:41388e9d4d1522446fe79d3213196bd9e3b301a336965b9e27ca2788ebd122f3"}, ] +[[package]] +name = "xmltodict" +version = "0.14.2" +description = "Makes working with XML feel like you are working with JSON" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac"}, + {file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"}, +] + [metadata] lock-version = "2.1" python-versions = "~3.12" -content-hash = "29d9a9933892cb8c8d8d8c969a70acb157d49b748c93723afe20c25eab69297d" +content-hash = "65f64208a0c43d3192e9f4750be9618c4cee64507a26377cef5cb84608caed5b" diff --git a/pyproject.toml b/pyproject.toml index a1c32cedb..9271a4c3f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,6 +18,8 @@ pyfakefs = "^5.7.4" testcontainers = "^4.9.1" docker = "^7.1.0" podman = "^5.2.0" +kubernetes = "^31.0.0" +openshift-python-wrapper = "^11.0.19" pydantic = "^2.10.6" requests = "^2.32.3" From 50db24f97b3c140a84378f098d1eba22e0e2dee7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jiri=20Dan=C4=9Bk?= Date: Tue, 21 Jan 2025 18:16:57 +0100 Subject: [PATCH 2/6] RHOAIENG-18848: chore(tests/containers): initial test that runs container on openshift/kubernetes * install local-path provisioner on kubernetes in github actions * more careful printing of pod status in case `containerStatuses == None` * sort out how we want to work with privileged/unprivileged client * only run the new test if we have kubernetes around * add pod waiting and port forwarding utils diff --git c/.github/workflows/build-notebooks-TEMPLATE.yaml i/.github/workflows/build-notebooks-TEMPLATE.yaml index 8a98aa21..13507b78 100644 --- c/.github/workflows/build-notebooks-TEMPLATE.yaml +++ i/.github/workflows/build-notebooks-TEMPLATE.yaml @@ -290,10 +290,10 @@ jobs: - name: Install deps run: poetry install --sync - - name: Run container tests (in PyTest) + - name: Run Testcontainers container tests (in PyTest) run: | set -Eeuxo pipefail - poetry run pytest --capture=fd tests/containers --image="${{ steps.calculated_vars.outputs.OUTPUT_IMAGE }}" + poetry run pytest --capture=fd tests/containers -m 'not openshift' --image="${{ steps.calculated_vars.outputs.OUTPUT_IMAGE }}" env: DOCKER_HOST: "unix:///var/run/podman/podman.sock" TESTCONTAINERS_DOCKER_SOCKET_OVERRIDE: "/var/run/podman/podman.sock" @@ -439,6 +439,16 @@ jobs: kubectl wait deployments --all --all-namespaces --for=condition=Available --timeout=100s kubectl wait pods --all --all-namespaces --for=condition=Ready --timeout=100s + - name: "Install local-path provisioner" + if: ${{ steps.have-tests.outputs.tests == 'true' }} + run: | + set -Eeuxo pipefail + kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/v0.0.31/deploy/local-path-storage.yaml + kubectl wait deployments --all --namespace=local-path-storage --for=condition=Available --timeout=100s + # https://kubernetes.io/docs/tasks/administer-cluster/change-default-storage-class/ + kubectl get storageclass + kubectl patch storageclass local-path -p '{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}' + - name: "Run image tests" if: ${{ steps.have-tests.outputs.tests == 'true' }} run: python3 ci/cached-builds/make_test.py --target ${{ inputs.target }} @@ -449,6 +459,18 @@ jobs: # endregion + - name: Run OpenShift container tests (in PyTest) + if: ${{ steps.have-tests.outputs.tests == 'true' }} + run: | + set -Eeuxo pipefail + poetry run pytest --capture=fd tests/containers -m 'openshift' --image="${{ steps.calculated_vars.outputs.OUTPUT_IMAGE }}" + env: + # TODO(jdanek): this Testcontainers stuff should not be necessary but currently it has to be there + DOCKER_HOST: "unix:///var/run/podman/podman.sock" + TESTCONTAINERS_DOCKER_SOCKET_OVERRIDE: "/var/run/podman/podman.sock" + # pulling the Ryuk container from docker.io introduces CI flakiness + TESTCONTAINERS_RYUK_DISABLED: "true" + # region Trivy vulnerability scan - name: Run Trivy vulnerability scanner diff --git c/README.md i/README.md index 961e5907..22703ac0 100644 --- c/README.md +++ i/README.md @@ -105,7 +105,7 @@ sudo dnf install podman systemctl --user start podman.service systemctl --user status podman.service systemctl --user status podman.socket -DOCKER_HOST=unix:///run/user/$UID/podman/podman.sock poetry run pytest tests/containers --image quay.io/opendatahub/workbench-images@sha256:e98d19df346e7abb1fa3053f6d41f0d1fa9bab39e49b4cb90b510ca33452c2e4 +DOCKER_HOST=unix:///run/user/$UID/podman/podman.sock poetry run pytest tests/containers -m 'not openshift' --image quay.io/opendatahub/workbench-images@sha256:e98d19df346e7abb1fa3053f6d41f0d1fa9bab39e49b4cb90b510ca33452c2e4 # Mac OS brew install podman @@ -113,7 +113,7 @@ podman machine init podman machine set --rootful sudo podman-mac-helper install podman machine start -poetry run pytest tests/containers --image quay.io/opendatahub/workbench-images@sha256:e98d19df346e7abb1fa3053f6d41f0d1fa9bab39e49b4cb90b510ca33452c2e4 +poetry run pytest tests/containers -m 'not openshift' --image quay.io/opendatahub/workbench-images@sha256:e98d19df346e7abb1fa3053f6d41f0d1fa9bab39e49b4cb90b510ca33452c2e4 ``` When using lima on macOS, it might be useful to give yourself access to rootful podman socket diff --git c/pyproject.toml i/pyproject.toml index 9271a4c3..6440b123 100644 --- c/pyproject.toml +++ i/pyproject.toml @@ -8,6 +8,7 @@ package-mode = false [tool.poetry.dependencies] python = "~3.12" +requests = "^2.32.3" [tool.poetry.group.dev.dependencies] diff --git c/pytest.ini i/pytest.ini index 2b320d7a..aff25089 100644 --- c/pytest.ini +++ i/pytest.ini @@ -15,3 +15,5 @@ log_cli_level = INFO log_file = logs/pytest-logs.txt log_file_level = DEBUG + +markers = openshift diff --git c/tests/containers/base_image_test.py i/tests/containers/base_image_test.py index 03f3d9ae..b7e00498 100644 --- c/tests/containers/base_image_test.py +++ i/tests/containers/base_image_test.py @@ -11,12 +11,13 @@ import tempfile import textwrap from typing import TYPE_CHECKING, Any, Callable -import pytest import testcontainers.core.container import testcontainers.core.waiting_utils from tests.containers import docker_utils +import pytest + logging.basicConfig(level=logging.DEBUG) LOGGER = logging.getLogger(__name__) @@ -72,7 +73,8 @@ class TestBaseImage: if "not found" in line: unsatisfied_deps.append((dlib, line.strip())) assert output - print("OUTPUT>", json.dumps({"dir": path, "count_scanned": count_scanned, "unsatisfied": unsatisfied_deps})) + print("OUTPUT>", + json.dumps({"dir": path, "count_scanned": count_scanned, "unsatisfied": unsatisfied_deps})) try: container.start() @@ -105,18 +107,7 @@ class TestBaseImage: with subtests.test(f"{dlib=}"): pytest.fail(f"{dlib=} has unsatisfied dependencies {deps=}") - def test_oc_command_runs(self, image: str): - container = testcontainers.core.container.DockerContainer(image=image, user=23456, group_add=[0]) - container.with_command("/bin/sh -c 'sleep infinity'") - try: - container.start() - ecode, output = container.exec(["/bin/sh", "-c", "oc version"]) - finally: - docker_utils.NotebookContainer(container).stop(timeout=0) - - logging.debug(output.decode()) - assert ecode == 0 - + # @pytest.mark.environmentss("docker") def test_oc_command_runs_fake_fips(self, image: str, subtests: pytest_subtests.SubTests): """Establishes a best-effort fake FIPS environment and attempts to execute `oc` binary in it. @@ -190,7 +181,8 @@ class TestBaseImage: docker_utils.NotebookContainer(container).stop(timeout=0) -def encode_python_function_execution_command_interpreter(python: str, function: Callable[..., Any], *args: list[Any]) -> list[str]: +def encode_python_function_execution_command_interpreter(python: str, function: Callable[..., Any], *args: list[Any]) -> \ + list[str]: """Returns a cli command that will run the given Python function encoded inline. All dependencies (imports, ...) must be part of function body.""" code = textwrap.dedent(inspect.getsource(function)) diff --git c/tests/containers/cancellation_token.py i/tests/containers/cancellation_token.py new file mode 100644 index 00000000..d7d62603 --- /dev/null +++ i/tests/containers/cancellation_token.py @@ -0,0 +1,37 @@ +import os +import threading + + +class CancellationToken: + """Flag to signal a thread it should cancel itself. + This cooperative cancellation pattern is commonly used in c# and go + See https://learn.microsoft.com/en-us/dotnet/api/system.threading.cancellationtoken?view=net-9.0 + """ + + def __init__(self): + # consider using the wrapt.synchronized decorator + # https://github.com/GrahamDumpleton/wrapt/blob/develop/blog/07-the-missing-synchronized-decorator.md + self._lock = threading.Lock() + self._canceled = False + # something selectable avoids having to use short timeout in select + self._read_fd, self._write_fd = os.pipe() + + def fileno(self): + """This lets us use the token in select() calls""" + return self._read_fd + + @property + def cancelled(self): + with self._lock: + return self._canceled + + def cancel(self): + with self._lock: + os.write(self._write_fd, b'x') + self._canceled = True + + def __del__(self): + # consider https://docs.python.org/3/library/weakref.html#weakref.finalize + with self._lock: + os.close(self._read_fd) + os.close(self._write_fd) diff --git c/tests/containers/kubernetes_utils.py i/tests/containers/kubernetes_utils.py new file mode 100644 index 00000000..66829829 --- /dev/null +++ i/tests/containers/kubernetes_utils.py @@ -0,0 +1,473 @@ +from __future__ import annotations + +import contextlib +import functools +import logging +import threading +import time +import traceback +import typing +import socket +from socket import socket +from typing import Any, Callable, Generator + +import requests + +import kubernetes +import kubernetes.dynamic.exceptions +import kubernetes.stream.ws_client +import kubernetes.dynamic.exceptions +import kubernetes.stream.ws_client +import kubernetes.client.api.core_v1_api +from kubernetes.dynamic import DynamicClient, ResourceField + +import ocp_resources.pod +import ocp_resources.deployment +import ocp_resources.service +import ocp_resources.persistent_volume_claim +import ocp_resources.project_request +import ocp_resources.namespace +import ocp_resources.project_project_openshift_io +import ocp_resources.deployment +import ocp_resources.resource +import ocp_resources.pod +import ocp_resources.namespace +import ocp_resources.project_project_openshift_io +import ocp_resources.project_request + +from tests.containers import socket_proxy + + +class TestFrameConstants: + GLOBAL_POLL_INTERVAL_MEDIUM = 10 + TIMEOUT_2MIN = 2 * 60 + + +logging.basicConfig(level=logging.DEBUG) +LOGGER = logging.getLogger(__name__) + + +# https://github.com/RedHatQE/openshift-python-wrapper/tree/main/examples + +def get_client() -> kubernetes.dynamic.DynamicClient: + try: + # client = kubernetes.dynamic.DynamicClient(client=kubernetes.config.new_client_from_config()) + # probably same as above + client = ocp_resources.resource.get_client() + return client + except kubernetes.config.ConfigException as e: + # probably bad config + logging.error(e) + except kubernetes.dynamic.exceptions.UnauthorizedError as e: + # wrong or expired credentials + logging.error(e) + except kubernetes.client.ApiException as e: + # unexpected, we catch unauthorized above + logging.error(e) + except Exception as e: + # unexpected error, assert here + logging.error(e) + + raise RuntimeError("Failed to instantiate client") + + +def get_username(client: kubernetes.dynamic.DynamicClient) -> str: + # can't just access + # > client.configuration.username + # because we normally auth using tokens, not username and password + + # this is what kubectl does (see kubectl -v8 auth whoami) + self_subject_review_resource: kubernetes.dynamic.Resource = client.resources.get( + api_version="authentication.k8s.io/v1", kind="SelfSubjectReview" + ) + self_subject_review: kubernetes.dynamic.ResourceInstance = client.create(self_subject_review_resource) + username: str = self_subject_review.status.userInfo.username + return username + + +class TestKubernetesUtils: + def test_get_username(self): + client = get_client() + username = get_username(client) + assert username is not None and len(username) > 0 + + +class TestFrame: + def __init__[T](self): + self.stack: list[tuple[T, Callable[[T], None] | None]] = [] + + def defer_resource[T: ocp_resources.resource.Resource](self, resource: T, wait=False, + destructor: Callable[[T], None] | None = None) -> T: + result = resource.deploy(wait=wait) + self.defer(resource, destructor) + return result + + def add[T](self, resource: T, destructor: Callable[[T], None] = None) -> T: + self.defer(resource, destructor) + return resource + + def defer[T](self, resource: T, destructor: Callable[[T], None] = None) -> T: + self.stack.append((resource, destructor)) + + def destroy(self, wait=False): + while self.stack: + resource, destructor = self.stack.pop() + if destructor is not None: + destructor(resource) + else: + resource.clean_up(wait=wait) + + def __enter__(self) -> TestFrame: + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.destroy(wait=True) + + +class ImageDeployment: + def __init__(self, client: kubernetes.dynamic.DynamicClient, image: str): + self.client = client + self.image = image + self.tf = TestFrame() + + def __enter__(self) -> ImageDeployment: + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.tf.destroy() + + def deploy(self, container_name: str) -> None: + LOGGER.debug(f"Deploying {self.image}") + # custom namespace is necessary, because we cannot assign a SCC to pods created in one of the default namespaces: + # default, kube-system, kube-public, openshift-node, openshift-infra, openshift. + # https://docs.openshift.com/container-platform/4.17/authentication/managing-security-context-constraints.html#role-based-access-to-ssc_configuring-internal-oauth + + # TODO(jdanek): sort out how we want to work with privileged/unprivileged client + # take inspiration from odh-tests + ns = create_namespace(privileged_client=True, name=f"test-ns-{container_name}") + self.tf.defer_resource(ns) + + pvc = ocp_resources.persistent_volume_claim.PersistentVolumeClaim( + name=container_name, + namespace=ns.name, + accessmodes=ocp_resources.persistent_volume_claim.PersistentVolumeClaim.AccessMode.RWO, + volume_mode=ocp_resources.persistent_volume_claim.PersistentVolumeClaim.VolumeMode.FILE, + size="1Gi", + ) + self.tf.defer_resource(pvc, wait=True) + deployment = ocp_resources.deployment.Deployment( + client=self.client, + name=container_name, + namespace=ns.name, + selector={"matchLabels": {"app": container_name}}, + replicas=1, + template={ + "metadata": { + "annotations": { + # This will result in the container spec having something like below, + # regardless of what kind of namespace this is being run in. + # Keep in mind that `default` is a privileged namespace and this annotation has no effect there. + # ``` + # spec: + # securityContext: + # seLinuxOptions: + # level: 's0:c34,c4' + # fsGroup: 1001130000 + # seccompProfile: + # type: RuntimeDefault + # ``` + "openshift.io/scc": "restricted-v2" + }, + "labels": { + "app": container_name, + } + }, + "spec": { + "containers": [ + { + "name": container_name, + "image": self.image, + # "command": ["/bin/sh", "-c", "while true ; do date; sleep 5; done;"], + "ports": [ + { + "containerPort": 8888, + "name": "notebook-port", + "protocol": "TCP", + } + ], + # rstudio will not start without its volume mount and it does not log the error for it + # See the testcontainers implementation of this (the tty=True part) + "volumeMounts": [ + { + "mountPath": "/opt/app-root/src", + "name": "my-workbench" + } + ], + }, + ], + "volumes": [ + { + "name": "my-workbench", + "persistentVolumeClaim": { + "claimName": container_name, + } + } + ] + } + } + ) + self.tf.defer_resource(deployment) + LOGGER.debug(f"Waiting for pods to become ready...") + PodUtils.wait_for_pods_ready(self.client, namespace_name=ns.name, label_selector=f"app={container_name}", + expect_pods_count=1) + + core_v1_api = kubernetes.client.api.core_v1_api.CoreV1Api(api_client=self.client.client) + pod_name: kubernetes.client.models.v1_pod_list.V1PodList = core_v1_api.list_namespaced_pod( + namespace=ns.name, + label_selector=f"app={container_name}" + ) + assert len(pod_name.items) == 1 + pod: kubernetes.client.models.v1_pod.V1Pod = pod_name.items[0] + + p = socket_proxy.SocketProxy(exposing_contextmanager(core_v1_api, pod), "localhost", 0) + t = threading.Thread(target=p.listen_and_serve_until_canceled) + t.start() + self.tf.defer(t, lambda thread: thread.join()) + self.tf.defer(p.cancellation_token, lambda token: token.cancel()) + + self.port = p.get_actual_port() + LOGGER.debug(f"Listening on port {self.port}") + resp = requests.get(f"http://localhost:{self.port}") + assert resp.status_code == 200 + LOGGER.debug(f"Done with portforward") + + +class PodUtils: + READINESS_TIMEOUT = TestFrameConstants.TIMEOUT_2MIN + + # consider using timeout_sampler + @staticmethod + def wait_for_pods_ready( + client: DynamicClient, namespace_name: str, label_selector: str, expect_pods_count: int + ) -> None: + """Wait for all pods in namespace to be ready + :param client: + :param namespace_name: name of the namespace + :param label_selector: + :param expect_pods_count: + """ + + # it's a dynamic client with the `resource` parameter already filled in + class ResourceType(kubernetes.dynamic.Resource, kubernetes.dynamic.DynamicClient): + pass + + resource: ResourceType = client.resources.get( + kind=ocp_resources.pod.Pod.kind, + api_version=ocp_resources.pod.Pod.api_version, + ) + + def ready() -> bool: + pods = resource.get(namespace=namespace_name, label_selector=label_selector).items + if not pods and expect_pods_count == 0: + logging.debug("All expected Pods %s in Namespace %s are ready", label_selector, namespace_name) + return True + if not pods: + logging.debug("Pods matching %s/%s are not ready", namespace_name, label_selector) + return False + if len(pods) != expect_pods_count: + logging.debug("Expected Pods %s/%s are not ready", namespace_name, label_selector) + return False + pod: ResourceField + for pod in pods: + if not Readiness.is_pod_ready(pod) and not Readiness.is_pod_succeeded(pod): + if not pod.status.containerStatuses: + pod_status = pod.status + else: + pod_status = {cs.name: cs.state for cs in pod.status.containerStatuses} + + logging.debug("Pod is not ready: %s/%s (%s)", + namespace_name, pod.metadata.name, pod_status) + return False + else: + # check all containers in pods are ready + for cs in pod.status.containerStatuses: + if not (cs.ready or cs.state.get("terminated", {}).get("reason", "") == "Completed"): + logging.debug( + f"Container {cs.getName()} of Pod {namespace_name}/{pod.metadata.name} not ready ({cs.state=})" + ) + return False + logging.info("Pods matching %s/%s are ready", namespace_name, label_selector) + return True + + Wait.until( + description=f"readiness of all Pods matching {label_selector} in Namespace {namespace_name}", + poll_interval=TestFrameConstants.GLOBAL_POLL_INTERVAL_MEDIUM, + timeout=PodUtils.READINESS_TIMEOUT, + ready=ready, + ) + + +class Wait: + @staticmethod + def until( + description: str, + poll_interval: float, + timeout: float, + ready: Callable[[], bool], + on_timeout: Callable[[], None] | None = None, + ) -> None: + """For every poll (happening once each {@code pollIntervalMs}) checks if supplier {@code ready} is true. + + If yes, the wait is closed. Otherwise, waits another {@code pollIntervalMs} and tries again. + Once the wait timeout (specified by {@code timeoutMs} is reached and supplier wasn't true until that time, + runs the {@code onTimeout} (f.e. print of logs, showing the actual value that was checked inside {@code ready}), + and finally throws {@link WaitException}. + @param description information about on what we are waiting + @param pollIntervalMs poll interval in milliseconds + @param timeoutMs timeout specified in milliseconds + @param ready {@link BooleanSupplier} containing code, which should be executed each poll, + verifying readiness of the particular thing + @param onTimeout {@link Runnable} executed once timeout is reached and + before the {@link WaitException} is thrown.""" + logging.info("Waiting for: %s", description) + deadline = time.monotonic() + timeout + + exception_message: str | None = None + previous_exception_message: str | None = None + + # in case we are polling every 1s, we want to print exception after x tries, not on the first try + # for minutes poll interval will 2 be enough + exception_appearance_count: int = 2 if (poll_interval // 60) > 0 else max(int(timeout // poll_interval // 4), 2) + exception_count: int = 0 + new_exception_appearance: int = 0 + + stack_trace_error: str | None = None + + while True: + try: + result: bool = ready() + except KeyboardInterrupt: + raise # quick exit if the user gets tired of waiting + except Exception as e: + exception_message = str(e) + + exception_count += 1 + new_exception_appearance += 1 + if ( + exception_count == exception_appearance_count + and exception_message is not None + and exception_message == previous_exception_message + ): + logging.info(f"While waiting for: {description} exception occurred: {exception_message}") + # log the stacktrace + stack_trace_error = traceback.format_exc() + elif ( + exception_message is not None + and exception_message != previous_exception_message + and new_exception_appearance == 2 + ): + previous_exception_message = exception_message + + result = False + + time_left: float = deadline - time.monotonic() + if result: + return + if time_left <= 0: + if exception_count > 1: + logging.error("Exception waiting for: %s, %s", description, exception_message) + + if stack_trace_error is not None: + # printing handled stacktrace + logging.error(stack_trace_error) + if on_timeout is not None: + on_timeout() + wait_exception: WaitException = WaitException(f"Timeout after {timeout} s waiting for {description}") + logging.error(wait_exception) + raise wait_exception + + sleep_time: float = min(poll_interval, time_left) + time.sleep(sleep_time) # noqa: FCN001 + + +class WaitException(Exception): + pass + + +class Readiness: + @staticmethod + def is_pod_ready(pod: ResourceField) -> bool: + Utils.check_not_none(value=pod, message="Pod can't be null.") + + condition = ocp_resources.pod.Pod.Condition.READY + status = ocp_resources.pod.Pod.Condition.Status.TRUE + for cond in pod.get("status", {}).get("conditions", []): + if cond["type"] == condition and cond["status"].casefold() == status.casefold(): + return True + return False + + @staticmethod + def is_pod_succeeded(pod: ResourceField) -> bool: + Utils.check_not_none(value=pod, message="Pod can't be null.") + return pod.status is not None and "Succeeded" == pod.status.phase + + +class Utils: + @staticmethod + def check_not_none(value: Any, message: str) -> None: + if value is None: + raise ValueError(message) + + +@contextlib.contextmanager +def exposing_contextmanager( + core_v1_api: kubernetes.client.CoreV1Api, + pod: kubernetes.client.models.V1Pod +) -> Generator[socket, None, None]: + # If we e.g., specify the wrong port, the pf = portforward() call succeeds, + # but pf.connected will later flip to False + # we need to check that _everything_ works before moving on + pf = None + s = None + while not pf or not pf.connected or not s: + pf: kubernetes.stream.ws_client.PortForward = kubernetes.stream.portforward( + api_method=core_v1_api.connect_get_namespaced_pod_portforward, + name=pod.metadata.name, + namespace=pod.metadata.namespace, + ports=",".join(str(p) for p in [8888]), + ) + s: typing.Union[kubernetes.stream.ws_client.PortForward._Port._Socket, socket.socket] | None = pf.socket(8888) + assert s, "Failed to establish connection" + + try: + yield s + finally: + s.close() + pf.close() + + +@functools.wraps(ocp_resources.namespace.Namespace.__init__) +def create_namespace(privileged_client: bool = False, *args, + **kwargs) -> ocp_resources.project_project_openshift_io.Project: + if not privileged_client: + with ocp_resources.project_request.ProjectRequest(*args, **kwargs): + project = ocp_resources.project_project_openshift_io.Project(*args, **kwargs) + project.wait_for_status(status=project.Status.ACTIVE, timeout=TestFrameConstants.TIMEOUT_2MIN) + return project + else: + with ocp_resources.namespace.Namespace(*args, **kwargs) as ns: + ns.wait_for_status(status=ocp_resources.namespace.Namespace.Status.ACTIVE, + timeout=TestFrameConstants.TIMEOUT_2MIN) + return ns + + +__all__ = [ + get_client, + get_username, + exposing_contextmanager, + create_namespace, + PodUtils, + TestFrame, + TestFrameConstants, + ImageDeployment, +] diff --git c/tests/containers/socket_proxy.py i/tests/containers/socket_proxy.py new file mode 100644 index 00000000..77fc3485 --- /dev/null +++ i/tests/containers/socket_proxy.py @@ -0,0 +1,173 @@ +from __future__ import annotations + +import contextlib +import logging +import socket +import select +import threading +import subprocess +import typing + +from tests.containers.cancellation_token import CancellationToken + +"""Proxies kubernetes portforwards to a local port. + +This is implemented as a thread running select() loop and managing the sockets. + +There are alternative implementations for this. + +1) Run oc port-forward in a subprocess +* There isn't a nice way where kubectl would report in machine-readable way the + port number, https://github.com/kubernetes/kubectl/issues/1190#issuecomment-1075911615 +2) Use the socket as is, mount a custom adaptor to the requests library +* The code to do this is weird. This is what docker-py does w.r.t. the docker socket. + It defines a custom 'http+docker://' protocol, and an adaptor for it, that uses the docker socket. +3) Implement proxy using asyncio +* There are advantages to asyncio, but since we don't have Python asyncio anywhere else yet, + it is probably best to avoid using asyncio. + +Out of these, the oc port-forward subprocess is a decent alternative solution. +""" + +class SubprocessProxy: + # + def __init__(self, namespace: str, name: str, port: int): + self.namespace = namespace + self.name = name + self.port = port + + def start(self): + self.forwarder = subprocess.Popen( + ["kubectl", "port-forward", self.namespace, self.name], + text=True, + ) + self.forwarder.communicate() + + def stop(self): + self.forwarder.terminate() + + +class SocketProxy: + def __init__( + self, + remote_socket_factory: typing.ContextManager[socket.socket], + local_host: str = "localhost", + local_port: int = 0, + buffer_size: int = 4096 + ) -> None: + """ + + :param local_host: probably "localhost" would make most sense here + :param local_port: usually leave as to 0, which will make the OS choose a free port + :param remote_socket_factory: this is a context manager for kubernetes port forwarding + :param buffer_size: do not poke it, leave this at the default value + """ + self.local_host = local_host + self.local_port = local_port + self.buffer_size = buffer_size + self.remote_socket_factory = remote_socket_factory + + self.cancellation_token = CancellationToken() + + self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + self.server_socket.bind((self.local_host, self.local_port)) + self.server_socket.listen(1) + logging.info(f"Proxy listening on {self.local_host}:{self.local_port}") + + def listen_and_serve_until_canceled(self): + """Accepts the client, creates a new socket to the remote, and proxies the data. + + Handles at most one client at a time. """ + try: + while not self.cancellation_token.cancelled: + client_socket, addr = self.server_socket.accept() + logging.info(f"Accepted connection from {addr[0]}:{addr[1]}") + self._handle_client(client_socket) + except Exception as e: + logging.exception(f"Proxying failed to listen", exc_info=e) + raise + finally: + self.server_socket.close() + + def get_actual_port(self) -> int: + """Returns the port that the proxy is listening on. + When port number 0 was passed in, this will return the actual randomly assigned port.""" + return self.server_socket.getsockname()[1] + + def _handle_client(self, client_socket): + with client_socket as _, self.remote_socket_factory as remote_socket: + while True: + readable, _, _ = select.select([client_socket, remote_socket, self.cancellation_token], [], []) + + if self.cancellation_token.cancelled: + break + + if client_socket in readable: + data = client_socket.recv(self.buffer_size) + if not data: + break + remote_socket.send(data) + + if remote_socket in readable: + data = remote_socket.recv(self.buffer_size) + if not data: + break + client_socket.send(data) + + +if __name__ == "__main__": + """Sample application to show how this can work.""" + + + @contextlib.contextmanager + def remote_socket_factory(): + class MockServer(threading.Thread): + def __init__(self, local_host: str = "localhost", local_port: int = 0): + self.local_host = local_host + self.local_port = local_port + + self.is_socket_bound = threading.Event() + + super().__init__() + + def run(self): + self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + self.server_socket.bind((self.local_host, self.local_port)) + self.server_socket.listen(1) + print(f"MockServer listening on {self.local_host}:{self.local_port}") + self.is_socket_bound.set() + + client_socket, addr = self.server_socket.accept() + logging.info(f"MockServer accepted connection from {addr[0]}:{addr[1]}") + + client_socket.send(b"Hello World\n") + client_socket.close() + + def get_actual_port(self): + self.is_socket_bound.wait() + return self.server_socket.getsockname()[1] + + server = MockServer() + server.start() + + client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + client_socket.connect(("localhost", server.get_actual_port())) + + yield client_socket + + client_socket.close() + server.join() + + + proxy = SocketProxy(remote_socket_factory(), "localhost", 0) + thread = threading.Thread(target=proxy.listen_and_serve_until_canceled) + thread.start() + + client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + client_socket.connect(("localhost", proxy.get_actual_port())) + + print(client_socket.recv(1024)) # prints Hello World + + thread.join() diff --git c/tests/containers/workbenches/workbench_image_test.py i/tests/containers/workbenches/workbench_image_test.py index cbfb7dae..892f775c 100644 --- c/tests/containers/workbenches/workbench_image_test.py +++ i/tests/containers/workbenches/workbench_image_test.py @@ -21,7 +21,7 @@ import testcontainers.core.waiting_utils import pytest import pytest_subtests -from tests.containers import docker_utils, podman_machine_utils +from tests.containers import docker_utils, podman_machine_utils, kubernetes_utils class TestWorkbenchImage: @@ -108,6 +108,18 @@ class TestWorkbenchImage: finally: docker_utils.NotebookContainer(container).stop(timeout=0) + @pytest.mark.openshift + def test_image_run_on_openshift(self, image: str): + skip_if_not_workbench_image(image) + + client = kubernetes_utils.get_client() + print(client) + + username = kubernetes_utils.get_username(client) + print(username) + + with kubernetes_utils.ImageDeployment(client, image) as image: + image.deploy(container_name="notebook-tests-pod") class WorkbenchContainer(testcontainers.core.container.DockerContainer): @functools.wraps(testcontainers.core.container.DockerContainer.__init__) --- .../workflows/build-notebooks-TEMPLATE.yaml | 26 +- README.md | 4 +- poetry.lock | 12 +- pyproject.toml | 1 + pytest.ini | 2 + tests/containers/base_image_test.py | 22 +- tests/containers/cancellation_token.py | 37 ++ tests/containers/kubernetes_utils.py | 473 ++++++++++++++++++ tests/containers/socket_proxy.py | 173 +++++++ .../workbenches/workbench_image_test.py | 14 +- 10 files changed, 738 insertions(+), 26 deletions(-) create mode 100644 tests/containers/cancellation_token.py create mode 100644 tests/containers/kubernetes_utils.py create mode 100644 tests/containers/socket_proxy.py diff --git a/.github/workflows/build-notebooks-TEMPLATE.yaml b/.github/workflows/build-notebooks-TEMPLATE.yaml index 59cb6703f..0cc3f9db8 100644 --- a/.github/workflows/build-notebooks-TEMPLATE.yaml +++ b/.github/workflows/build-notebooks-TEMPLATE.yaml @@ -296,10 +296,10 @@ jobs: - name: Install deps run: poetry install --sync - - name: Run container tests (in PyTest) + - name: Run Testcontainers container tests (in PyTest) run: | set -Eeuxo pipefail - poetry run pytest --capture=fd tests/containers --image="${{ steps.calculated_vars.outputs.OUTPUT_IMAGE }}" + poetry run pytest --capture=fd tests/containers -m 'not openshift' --image="${{ steps.calculated_vars.outputs.OUTPUT_IMAGE }}" env: DOCKER_HOST: "unix:///var/run/podman/podman.sock" TESTCONTAINERS_DOCKER_SOCKET_OVERRIDE: "/var/run/podman/podman.sock" @@ -445,6 +445,16 @@ jobs: kubectl wait deployments --all --all-namespaces --for=condition=Available --timeout=100s kubectl wait pods --all --all-namespaces --for=condition=Ready --timeout=100s + - name: "Install local-path provisioner" + if: ${{ steps.have-tests.outputs.tests == 'true' }} + run: | + set -Eeuxo pipefail + kubectl apply -f https://raw.githubusercontent.com/rancher/local-path-provisioner/v0.0.31/deploy/local-path-storage.yaml + kubectl wait deployments --all --namespace=local-path-storage --for=condition=Available --timeout=100s + # https://kubernetes.io/docs/tasks/administer-cluster/change-default-storage-class/ + kubectl get storageclass + kubectl patch storageclass local-path -p '{"metadata": {"annotations":{"storageclass.kubernetes.io/is-default-class":"true"}}}' + - name: "Run image tests" if: ${{ steps.have-tests.outputs.tests == 'true' }} run: python3 ci/cached-builds/make_test.py --target ${{ inputs.target }} @@ -455,6 +465,18 @@ jobs: # endregion + - name: Run OpenShift container tests (in PyTest) + if: ${{ steps.have-tests.outputs.tests == 'true' }} + run: | + set -Eeuxo pipefail + poetry run pytest --capture=fd tests/containers -m 'openshift' --image="${{ steps.calculated_vars.outputs.OUTPUT_IMAGE }}" + env: + # TODO(jdanek): this Testcontainers stuff should not be necessary but currently it has to be there + DOCKER_HOST: "unix:///var/run/podman/podman.sock" + TESTCONTAINERS_DOCKER_SOCKET_OVERRIDE: "/var/run/podman/podman.sock" + # pulling the Ryuk container from docker.io introduces CI flakiness + TESTCONTAINERS_RYUK_DISABLED: "true" + # region Trivy vulnerability scan - name: Run Trivy vulnerability scanner diff --git a/README.md b/README.md index f11c3e602..6d14dfa33 100644 --- a/README.md +++ b/README.md @@ -105,7 +105,7 @@ sudo dnf install podman systemctl --user start podman.service systemctl --user status podman.service systemctl --user status podman.socket -DOCKER_HOST=unix:///run/user/$UID/podman/podman.sock poetry run pytest tests/containers --image quay.io/opendatahub/workbench-images@sha256:e98d19df346e7abb1fa3053f6d41f0d1fa9bab39e49b4cb90b510ca33452c2e4 +DOCKER_HOST=unix:///run/user/$UID/podman/podman.sock poetry run pytest tests/containers -m 'not openshift' --image quay.io/opendatahub/workbench-images@sha256:e98d19df346e7abb1fa3053f6d41f0d1fa9bab39e49b4cb90b510ca33452c2e4 # Mac OS brew install podman @@ -113,7 +113,7 @@ podman machine init podman machine set --rootful=false sudo podman-mac-helper install podman machine start -poetry run pytest tests/containers --image quay.io/opendatahub/workbench-images@sha256:e98d19df346e7abb1fa3053f6d41f0d1fa9bab39e49b4cb90b510ca33452c2e4 +poetry run pytest tests/containers -m 'not openshift' --image quay.io/opendatahub/workbench-images@sha256:e98d19df346e7abb1fa3053f6d41f0d1fa9bab39e49b4cb90b510ca33452c2e4 ``` When using lima on macOS, it might be useful to give yourself access to rootful podman socket diff --git a/poetry.lock b/poetry.lock index c8b4a3c76..b5b82234d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -137,7 +137,7 @@ version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, @@ -229,7 +229,7 @@ version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, @@ -628,7 +628,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -1645,7 +1645,7 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -1910,7 +1910,7 @@ version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.9" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, @@ -2055,4 +2055,4 @@ files = [ [metadata] lock-version = "2.1" python-versions = "~3.12" -content-hash = "65f64208a0c43d3192e9f4750be9618c4cee64507a26377cef5cb84608caed5b" +content-hash = "e251b6ddf779cee9a6f9943dee0e0a180fbd6b25a641cebd03068de9115ffb45" diff --git a/pyproject.toml b/pyproject.toml index 9271a4c3f..6440b1238 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,6 +8,7 @@ package-mode = false [tool.poetry.dependencies] python = "~3.12" +requests = "^2.32.3" [tool.poetry.group.dev.dependencies] diff --git a/pytest.ini b/pytest.ini index 2b320d7a0..aff250897 100644 --- a/pytest.ini +++ b/pytest.ini @@ -15,3 +15,5 @@ log_cli_level = INFO log_file = logs/pytest-logs.txt log_file_level = DEBUG + +markers = openshift diff --git a/tests/containers/base_image_test.py b/tests/containers/base_image_test.py index 03f3d9ae3..b7e004989 100644 --- a/tests/containers/base_image_test.py +++ b/tests/containers/base_image_test.py @@ -11,12 +11,13 @@ import textwrap from typing import TYPE_CHECKING, Any, Callable -import pytest import testcontainers.core.container import testcontainers.core.waiting_utils from tests.containers import docker_utils +import pytest + logging.basicConfig(level=logging.DEBUG) LOGGER = logging.getLogger(__name__) @@ -72,7 +73,8 @@ def check_elf_file(): if "not found" in line: unsatisfied_deps.append((dlib, line.strip())) assert output - print("OUTPUT>", json.dumps({"dir": path, "count_scanned": count_scanned, "unsatisfied": unsatisfied_deps})) + print("OUTPUT>", + json.dumps({"dir": path, "count_scanned": count_scanned, "unsatisfied": unsatisfied_deps})) try: container.start() @@ -105,18 +107,7 @@ def check_elf_file(): with subtests.test(f"{dlib=}"): pytest.fail(f"{dlib=} has unsatisfied dependencies {deps=}") - def test_oc_command_runs(self, image: str): - container = testcontainers.core.container.DockerContainer(image=image, user=23456, group_add=[0]) - container.with_command("/bin/sh -c 'sleep infinity'") - try: - container.start() - ecode, output = container.exec(["/bin/sh", "-c", "oc version"]) - finally: - docker_utils.NotebookContainer(container).stop(timeout=0) - - logging.debug(output.decode()) - assert ecode == 0 - + # @pytest.mark.environmentss("docker") def test_oc_command_runs_fake_fips(self, image: str, subtests: pytest_subtests.SubTests): """Establishes a best-effort fake FIPS environment and attempts to execute `oc` binary in it. @@ -190,7 +181,8 @@ def test_pip_install_cowsay_runs(self, image: str): docker_utils.NotebookContainer(container).stop(timeout=0) -def encode_python_function_execution_command_interpreter(python: str, function: Callable[..., Any], *args: list[Any]) -> list[str]: +def encode_python_function_execution_command_interpreter(python: str, function: Callable[..., Any], *args: list[Any]) -> \ + list[str]: """Returns a cli command that will run the given Python function encoded inline. All dependencies (imports, ...) must be part of function body.""" code = textwrap.dedent(inspect.getsource(function)) diff --git a/tests/containers/cancellation_token.py b/tests/containers/cancellation_token.py new file mode 100644 index 000000000..d7d62603f --- /dev/null +++ b/tests/containers/cancellation_token.py @@ -0,0 +1,37 @@ +import os +import threading + + +class CancellationToken: + """Flag to signal a thread it should cancel itself. + This cooperative cancellation pattern is commonly used in c# and go + See https://learn.microsoft.com/en-us/dotnet/api/system.threading.cancellationtoken?view=net-9.0 + """ + + def __init__(self): + # consider using the wrapt.synchronized decorator + # https://github.com/GrahamDumpleton/wrapt/blob/develop/blog/07-the-missing-synchronized-decorator.md + self._lock = threading.Lock() + self._canceled = False + # something selectable avoids having to use short timeout in select + self._read_fd, self._write_fd = os.pipe() + + def fileno(self): + """This lets us use the token in select() calls""" + return self._read_fd + + @property + def cancelled(self): + with self._lock: + return self._canceled + + def cancel(self): + with self._lock: + os.write(self._write_fd, b'x') + self._canceled = True + + def __del__(self): + # consider https://docs.python.org/3/library/weakref.html#weakref.finalize + with self._lock: + os.close(self._read_fd) + os.close(self._write_fd) diff --git a/tests/containers/kubernetes_utils.py b/tests/containers/kubernetes_utils.py new file mode 100644 index 000000000..66829829f --- /dev/null +++ b/tests/containers/kubernetes_utils.py @@ -0,0 +1,473 @@ +from __future__ import annotations + +import contextlib +import functools +import logging +import threading +import time +import traceback +import typing +import socket +from socket import socket +from typing import Any, Callable, Generator + +import requests + +import kubernetes +import kubernetes.dynamic.exceptions +import kubernetes.stream.ws_client +import kubernetes.dynamic.exceptions +import kubernetes.stream.ws_client +import kubernetes.client.api.core_v1_api +from kubernetes.dynamic import DynamicClient, ResourceField + +import ocp_resources.pod +import ocp_resources.deployment +import ocp_resources.service +import ocp_resources.persistent_volume_claim +import ocp_resources.project_request +import ocp_resources.namespace +import ocp_resources.project_project_openshift_io +import ocp_resources.deployment +import ocp_resources.resource +import ocp_resources.pod +import ocp_resources.namespace +import ocp_resources.project_project_openshift_io +import ocp_resources.project_request + +from tests.containers import socket_proxy + + +class TestFrameConstants: + GLOBAL_POLL_INTERVAL_MEDIUM = 10 + TIMEOUT_2MIN = 2 * 60 + + +logging.basicConfig(level=logging.DEBUG) +LOGGER = logging.getLogger(__name__) + + +# https://github.com/RedHatQE/openshift-python-wrapper/tree/main/examples + +def get_client() -> kubernetes.dynamic.DynamicClient: + try: + # client = kubernetes.dynamic.DynamicClient(client=kubernetes.config.new_client_from_config()) + # probably same as above + client = ocp_resources.resource.get_client() + return client + except kubernetes.config.ConfigException as e: + # probably bad config + logging.error(e) + except kubernetes.dynamic.exceptions.UnauthorizedError as e: + # wrong or expired credentials + logging.error(e) + except kubernetes.client.ApiException as e: + # unexpected, we catch unauthorized above + logging.error(e) + except Exception as e: + # unexpected error, assert here + logging.error(e) + + raise RuntimeError("Failed to instantiate client") + + +def get_username(client: kubernetes.dynamic.DynamicClient) -> str: + # can't just access + # > client.configuration.username + # because we normally auth using tokens, not username and password + + # this is what kubectl does (see kubectl -v8 auth whoami) + self_subject_review_resource: kubernetes.dynamic.Resource = client.resources.get( + api_version="authentication.k8s.io/v1", kind="SelfSubjectReview" + ) + self_subject_review: kubernetes.dynamic.ResourceInstance = client.create(self_subject_review_resource) + username: str = self_subject_review.status.userInfo.username + return username + + +class TestKubernetesUtils: + def test_get_username(self): + client = get_client() + username = get_username(client) + assert username is not None and len(username) > 0 + + +class TestFrame: + def __init__[T](self): + self.stack: list[tuple[T, Callable[[T], None] | None]] = [] + + def defer_resource[T: ocp_resources.resource.Resource](self, resource: T, wait=False, + destructor: Callable[[T], None] | None = None) -> T: + result = resource.deploy(wait=wait) + self.defer(resource, destructor) + return result + + def add[T](self, resource: T, destructor: Callable[[T], None] = None) -> T: + self.defer(resource, destructor) + return resource + + def defer[T](self, resource: T, destructor: Callable[[T], None] = None) -> T: + self.stack.append((resource, destructor)) + + def destroy(self, wait=False): + while self.stack: + resource, destructor = self.stack.pop() + if destructor is not None: + destructor(resource) + else: + resource.clean_up(wait=wait) + + def __enter__(self) -> TestFrame: + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.destroy(wait=True) + + +class ImageDeployment: + def __init__(self, client: kubernetes.dynamic.DynamicClient, image: str): + self.client = client + self.image = image + self.tf = TestFrame() + + def __enter__(self) -> ImageDeployment: + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.tf.destroy() + + def deploy(self, container_name: str) -> None: + LOGGER.debug(f"Deploying {self.image}") + # custom namespace is necessary, because we cannot assign a SCC to pods created in one of the default namespaces: + # default, kube-system, kube-public, openshift-node, openshift-infra, openshift. + # https://docs.openshift.com/container-platform/4.17/authentication/managing-security-context-constraints.html#role-based-access-to-ssc_configuring-internal-oauth + + # TODO(jdanek): sort out how we want to work with privileged/unprivileged client + # take inspiration from odh-tests + ns = create_namespace(privileged_client=True, name=f"test-ns-{container_name}") + self.tf.defer_resource(ns) + + pvc = ocp_resources.persistent_volume_claim.PersistentVolumeClaim( + name=container_name, + namespace=ns.name, + accessmodes=ocp_resources.persistent_volume_claim.PersistentVolumeClaim.AccessMode.RWO, + volume_mode=ocp_resources.persistent_volume_claim.PersistentVolumeClaim.VolumeMode.FILE, + size="1Gi", + ) + self.tf.defer_resource(pvc, wait=True) + deployment = ocp_resources.deployment.Deployment( + client=self.client, + name=container_name, + namespace=ns.name, + selector={"matchLabels": {"app": container_name}}, + replicas=1, + template={ + "metadata": { + "annotations": { + # This will result in the container spec having something like below, + # regardless of what kind of namespace this is being run in. + # Keep in mind that `default` is a privileged namespace and this annotation has no effect there. + # ``` + # spec: + # securityContext: + # seLinuxOptions: + # level: 's0:c34,c4' + # fsGroup: 1001130000 + # seccompProfile: + # type: RuntimeDefault + # ``` + "openshift.io/scc": "restricted-v2" + }, + "labels": { + "app": container_name, + } + }, + "spec": { + "containers": [ + { + "name": container_name, + "image": self.image, + # "command": ["/bin/sh", "-c", "while true ; do date; sleep 5; done;"], + "ports": [ + { + "containerPort": 8888, + "name": "notebook-port", + "protocol": "TCP", + } + ], + # rstudio will not start without its volume mount and it does not log the error for it + # See the testcontainers implementation of this (the tty=True part) + "volumeMounts": [ + { + "mountPath": "/opt/app-root/src", + "name": "my-workbench" + } + ], + }, + ], + "volumes": [ + { + "name": "my-workbench", + "persistentVolumeClaim": { + "claimName": container_name, + } + } + ] + } + } + ) + self.tf.defer_resource(deployment) + LOGGER.debug(f"Waiting for pods to become ready...") + PodUtils.wait_for_pods_ready(self.client, namespace_name=ns.name, label_selector=f"app={container_name}", + expect_pods_count=1) + + core_v1_api = kubernetes.client.api.core_v1_api.CoreV1Api(api_client=self.client.client) + pod_name: kubernetes.client.models.v1_pod_list.V1PodList = core_v1_api.list_namespaced_pod( + namespace=ns.name, + label_selector=f"app={container_name}" + ) + assert len(pod_name.items) == 1 + pod: kubernetes.client.models.v1_pod.V1Pod = pod_name.items[0] + + p = socket_proxy.SocketProxy(exposing_contextmanager(core_v1_api, pod), "localhost", 0) + t = threading.Thread(target=p.listen_and_serve_until_canceled) + t.start() + self.tf.defer(t, lambda thread: thread.join()) + self.tf.defer(p.cancellation_token, lambda token: token.cancel()) + + self.port = p.get_actual_port() + LOGGER.debug(f"Listening on port {self.port}") + resp = requests.get(f"http://localhost:{self.port}") + assert resp.status_code == 200 + LOGGER.debug(f"Done with portforward") + + +class PodUtils: + READINESS_TIMEOUT = TestFrameConstants.TIMEOUT_2MIN + + # consider using timeout_sampler + @staticmethod + def wait_for_pods_ready( + client: DynamicClient, namespace_name: str, label_selector: str, expect_pods_count: int + ) -> None: + """Wait for all pods in namespace to be ready + :param client: + :param namespace_name: name of the namespace + :param label_selector: + :param expect_pods_count: + """ + + # it's a dynamic client with the `resource` parameter already filled in + class ResourceType(kubernetes.dynamic.Resource, kubernetes.dynamic.DynamicClient): + pass + + resource: ResourceType = client.resources.get( + kind=ocp_resources.pod.Pod.kind, + api_version=ocp_resources.pod.Pod.api_version, + ) + + def ready() -> bool: + pods = resource.get(namespace=namespace_name, label_selector=label_selector).items + if not pods and expect_pods_count == 0: + logging.debug("All expected Pods %s in Namespace %s are ready", label_selector, namespace_name) + return True + if not pods: + logging.debug("Pods matching %s/%s are not ready", namespace_name, label_selector) + return False + if len(pods) != expect_pods_count: + logging.debug("Expected Pods %s/%s are not ready", namespace_name, label_selector) + return False + pod: ResourceField + for pod in pods: + if not Readiness.is_pod_ready(pod) and not Readiness.is_pod_succeeded(pod): + if not pod.status.containerStatuses: + pod_status = pod.status + else: + pod_status = {cs.name: cs.state for cs in pod.status.containerStatuses} + + logging.debug("Pod is not ready: %s/%s (%s)", + namespace_name, pod.metadata.name, pod_status) + return False + else: + # check all containers in pods are ready + for cs in pod.status.containerStatuses: + if not (cs.ready or cs.state.get("terminated", {}).get("reason", "") == "Completed"): + logging.debug( + f"Container {cs.getName()} of Pod {namespace_name}/{pod.metadata.name} not ready ({cs.state=})" + ) + return False + logging.info("Pods matching %s/%s are ready", namespace_name, label_selector) + return True + + Wait.until( + description=f"readiness of all Pods matching {label_selector} in Namespace {namespace_name}", + poll_interval=TestFrameConstants.GLOBAL_POLL_INTERVAL_MEDIUM, + timeout=PodUtils.READINESS_TIMEOUT, + ready=ready, + ) + + +class Wait: + @staticmethod + def until( + description: str, + poll_interval: float, + timeout: float, + ready: Callable[[], bool], + on_timeout: Callable[[], None] | None = None, + ) -> None: + """For every poll (happening once each {@code pollIntervalMs}) checks if supplier {@code ready} is true. + + If yes, the wait is closed. Otherwise, waits another {@code pollIntervalMs} and tries again. + Once the wait timeout (specified by {@code timeoutMs} is reached and supplier wasn't true until that time, + runs the {@code onTimeout} (f.e. print of logs, showing the actual value that was checked inside {@code ready}), + and finally throws {@link WaitException}. + @param description information about on what we are waiting + @param pollIntervalMs poll interval in milliseconds + @param timeoutMs timeout specified in milliseconds + @param ready {@link BooleanSupplier} containing code, which should be executed each poll, + verifying readiness of the particular thing + @param onTimeout {@link Runnable} executed once timeout is reached and + before the {@link WaitException} is thrown.""" + logging.info("Waiting for: %s", description) + deadline = time.monotonic() + timeout + + exception_message: str | None = None + previous_exception_message: str | None = None + + # in case we are polling every 1s, we want to print exception after x tries, not on the first try + # for minutes poll interval will 2 be enough + exception_appearance_count: int = 2 if (poll_interval // 60) > 0 else max(int(timeout // poll_interval // 4), 2) + exception_count: int = 0 + new_exception_appearance: int = 0 + + stack_trace_error: str | None = None + + while True: + try: + result: bool = ready() + except KeyboardInterrupt: + raise # quick exit if the user gets tired of waiting + except Exception as e: + exception_message = str(e) + + exception_count += 1 + new_exception_appearance += 1 + if ( + exception_count == exception_appearance_count + and exception_message is not None + and exception_message == previous_exception_message + ): + logging.info(f"While waiting for: {description} exception occurred: {exception_message}") + # log the stacktrace + stack_trace_error = traceback.format_exc() + elif ( + exception_message is not None + and exception_message != previous_exception_message + and new_exception_appearance == 2 + ): + previous_exception_message = exception_message + + result = False + + time_left: float = deadline - time.monotonic() + if result: + return + if time_left <= 0: + if exception_count > 1: + logging.error("Exception waiting for: %s, %s", description, exception_message) + + if stack_trace_error is not None: + # printing handled stacktrace + logging.error(stack_trace_error) + if on_timeout is not None: + on_timeout() + wait_exception: WaitException = WaitException(f"Timeout after {timeout} s waiting for {description}") + logging.error(wait_exception) + raise wait_exception + + sleep_time: float = min(poll_interval, time_left) + time.sleep(sleep_time) # noqa: FCN001 + + +class WaitException(Exception): + pass + + +class Readiness: + @staticmethod + def is_pod_ready(pod: ResourceField) -> bool: + Utils.check_not_none(value=pod, message="Pod can't be null.") + + condition = ocp_resources.pod.Pod.Condition.READY + status = ocp_resources.pod.Pod.Condition.Status.TRUE + for cond in pod.get("status", {}).get("conditions", []): + if cond["type"] == condition and cond["status"].casefold() == status.casefold(): + return True + return False + + @staticmethod + def is_pod_succeeded(pod: ResourceField) -> bool: + Utils.check_not_none(value=pod, message="Pod can't be null.") + return pod.status is not None and "Succeeded" == pod.status.phase + + +class Utils: + @staticmethod + def check_not_none(value: Any, message: str) -> None: + if value is None: + raise ValueError(message) + + +@contextlib.contextmanager +def exposing_contextmanager( + core_v1_api: kubernetes.client.CoreV1Api, + pod: kubernetes.client.models.V1Pod +) -> Generator[socket, None, None]: + # If we e.g., specify the wrong port, the pf = portforward() call succeeds, + # but pf.connected will later flip to False + # we need to check that _everything_ works before moving on + pf = None + s = None + while not pf or not pf.connected or not s: + pf: kubernetes.stream.ws_client.PortForward = kubernetes.stream.portforward( + api_method=core_v1_api.connect_get_namespaced_pod_portforward, + name=pod.metadata.name, + namespace=pod.metadata.namespace, + ports=",".join(str(p) for p in [8888]), + ) + s: typing.Union[kubernetes.stream.ws_client.PortForward._Port._Socket, socket.socket] | None = pf.socket(8888) + assert s, "Failed to establish connection" + + try: + yield s + finally: + s.close() + pf.close() + + +@functools.wraps(ocp_resources.namespace.Namespace.__init__) +def create_namespace(privileged_client: bool = False, *args, + **kwargs) -> ocp_resources.project_project_openshift_io.Project: + if not privileged_client: + with ocp_resources.project_request.ProjectRequest(*args, **kwargs): + project = ocp_resources.project_project_openshift_io.Project(*args, **kwargs) + project.wait_for_status(status=project.Status.ACTIVE, timeout=TestFrameConstants.TIMEOUT_2MIN) + return project + else: + with ocp_resources.namespace.Namespace(*args, **kwargs) as ns: + ns.wait_for_status(status=ocp_resources.namespace.Namespace.Status.ACTIVE, + timeout=TestFrameConstants.TIMEOUT_2MIN) + return ns + + +__all__ = [ + get_client, + get_username, + exposing_contextmanager, + create_namespace, + PodUtils, + TestFrame, + TestFrameConstants, + ImageDeployment, +] diff --git a/tests/containers/socket_proxy.py b/tests/containers/socket_proxy.py new file mode 100644 index 000000000..77fc34857 --- /dev/null +++ b/tests/containers/socket_proxy.py @@ -0,0 +1,173 @@ +from __future__ import annotations + +import contextlib +import logging +import socket +import select +import threading +import subprocess +import typing + +from tests.containers.cancellation_token import CancellationToken + +"""Proxies kubernetes portforwards to a local port. + +This is implemented as a thread running select() loop and managing the sockets. + +There are alternative implementations for this. + +1) Run oc port-forward in a subprocess +* There isn't a nice way where kubectl would report in machine-readable way the + port number, https://github.com/kubernetes/kubectl/issues/1190#issuecomment-1075911615 +2) Use the socket as is, mount a custom adaptor to the requests library +* The code to do this is weird. This is what docker-py does w.r.t. the docker socket. + It defines a custom 'http+docker://' protocol, and an adaptor for it, that uses the docker socket. +3) Implement proxy using asyncio +* There are advantages to asyncio, but since we don't have Python asyncio anywhere else yet, + it is probably best to avoid using asyncio. + +Out of these, the oc port-forward subprocess is a decent alternative solution. +""" + +class SubprocessProxy: + # + def __init__(self, namespace: str, name: str, port: int): + self.namespace = namespace + self.name = name + self.port = port + + def start(self): + self.forwarder = subprocess.Popen( + ["kubectl", "port-forward", self.namespace, self.name], + text=True, + ) + self.forwarder.communicate() + + def stop(self): + self.forwarder.terminate() + + +class SocketProxy: + def __init__( + self, + remote_socket_factory: typing.ContextManager[socket.socket], + local_host: str = "localhost", + local_port: int = 0, + buffer_size: int = 4096 + ) -> None: + """ + + :param local_host: probably "localhost" would make most sense here + :param local_port: usually leave as to 0, which will make the OS choose a free port + :param remote_socket_factory: this is a context manager for kubernetes port forwarding + :param buffer_size: do not poke it, leave this at the default value + """ + self.local_host = local_host + self.local_port = local_port + self.buffer_size = buffer_size + self.remote_socket_factory = remote_socket_factory + + self.cancellation_token = CancellationToken() + + self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + self.server_socket.bind((self.local_host, self.local_port)) + self.server_socket.listen(1) + logging.info(f"Proxy listening on {self.local_host}:{self.local_port}") + + def listen_and_serve_until_canceled(self): + """Accepts the client, creates a new socket to the remote, and proxies the data. + + Handles at most one client at a time. """ + try: + while not self.cancellation_token.cancelled: + client_socket, addr = self.server_socket.accept() + logging.info(f"Accepted connection from {addr[0]}:{addr[1]}") + self._handle_client(client_socket) + except Exception as e: + logging.exception(f"Proxying failed to listen", exc_info=e) + raise + finally: + self.server_socket.close() + + def get_actual_port(self) -> int: + """Returns the port that the proxy is listening on. + When port number 0 was passed in, this will return the actual randomly assigned port.""" + return self.server_socket.getsockname()[1] + + def _handle_client(self, client_socket): + with client_socket as _, self.remote_socket_factory as remote_socket: + while True: + readable, _, _ = select.select([client_socket, remote_socket, self.cancellation_token], [], []) + + if self.cancellation_token.cancelled: + break + + if client_socket in readable: + data = client_socket.recv(self.buffer_size) + if not data: + break + remote_socket.send(data) + + if remote_socket in readable: + data = remote_socket.recv(self.buffer_size) + if not data: + break + client_socket.send(data) + + +if __name__ == "__main__": + """Sample application to show how this can work.""" + + + @contextlib.contextmanager + def remote_socket_factory(): + class MockServer(threading.Thread): + def __init__(self, local_host: str = "localhost", local_port: int = 0): + self.local_host = local_host + self.local_port = local_port + + self.is_socket_bound = threading.Event() + + super().__init__() + + def run(self): + self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + self.server_socket.bind((self.local_host, self.local_port)) + self.server_socket.listen(1) + print(f"MockServer listening on {self.local_host}:{self.local_port}") + self.is_socket_bound.set() + + client_socket, addr = self.server_socket.accept() + logging.info(f"MockServer accepted connection from {addr[0]}:{addr[1]}") + + client_socket.send(b"Hello World\n") + client_socket.close() + + def get_actual_port(self): + self.is_socket_bound.wait() + return self.server_socket.getsockname()[1] + + server = MockServer() + server.start() + + client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + client_socket.connect(("localhost", server.get_actual_port())) + + yield client_socket + + client_socket.close() + server.join() + + + proxy = SocketProxy(remote_socket_factory(), "localhost", 0) + thread = threading.Thread(target=proxy.listen_and_serve_until_canceled) + thread.start() + + client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + client_socket.connect(("localhost", proxy.get_actual_port())) + + print(client_socket.recv(1024)) # prints Hello World + + thread.join() diff --git a/tests/containers/workbenches/workbench_image_test.py b/tests/containers/workbenches/workbench_image_test.py index 7db15e717..c14312650 100644 --- a/tests/containers/workbenches/workbench_image_test.py +++ b/tests/containers/workbenches/workbench_image_test.py @@ -21,7 +21,7 @@ import pytest import pytest_subtests -from tests.containers import docker_utils, podman_machine_utils +from tests.containers import docker_utils, podman_machine_utils, kubernetes_utils class TestWorkbenchImage: @@ -106,6 +106,18 @@ def test_ipv6_only(self, subtests: pytest_subtests.SubTests, workbench_image: st finally: docker_utils.NotebookContainer(container).stop(timeout=0) + @pytest.mark.openshift + def test_image_run_on_openshift(self, image: str): + skip_if_not_workbench_image(image) + + client = kubernetes_utils.get_client() + print(client) + + username = kubernetes_utils.get_username(client) + print(username) + + with kubernetes_utils.ImageDeployment(client, image) as image: + image.deploy(container_name="notebook-tests-pod") class WorkbenchContainer(testcontainers.core.container.DockerContainer): @functools.wraps(testcontainers.core.container.DockerContainer.__init__) From 0ce8eb5d495422b346b62ce6c67a682157103201 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jiri=20Dan=C4=9Bk?= Date: Mon, 24 Feb 2025 15:52:26 +0100 Subject: [PATCH 3/6] fixup, remove the add function in TestFrame --- tests/containers/kubernetes_utils.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/tests/containers/kubernetes_utils.py b/tests/containers/kubernetes_utils.py index 66829829f..4f437a312 100644 --- a/tests/containers/kubernetes_utils.py +++ b/tests/containers/kubernetes_utils.py @@ -102,19 +102,15 @@ def defer_resource[T: ocp_resources.resource.Resource](self, resource: T, wait=F self.defer(resource, destructor) return result - def add[T](self, resource: T, destructor: Callable[[T], None] = None) -> T: - self.defer(resource, destructor) - return resource - - def defer[T](self, resource: T, destructor: Callable[[T], None] = None) -> T: - self.stack.append((resource, destructor)) + def defer[T](self, obj: T, destructor: Callable[[T], None] = None) -> T: + self.stack.append((obj, destructor)) def destroy(self, wait=False): while self.stack: resource, destructor = self.stack.pop() if destructor is not None: destructor(resource) - else: + elif isinstance(resource, ocp_resources.resource.Resource): resource.clean_up(wait=wait) def __enter__(self) -> TestFrame: From 7094188df65b2d48aff5201dda7deca9de3b2432 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jiri=20Dan=C4=9Bk?= Date: Tue, 25 Feb 2025 13:40:53 +0100 Subject: [PATCH 4/6] fixup, adjust to changes after rebase --- .../workbenches/workbench_image_test.py | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/tests/containers/workbenches/workbench_image_test.py b/tests/containers/workbenches/workbench_image_test.py index c14312650..1f2d54199 100644 --- a/tests/containers/workbenches/workbench_image_test.py +++ b/tests/containers/workbenches/workbench_image_test.py @@ -83,7 +83,7 @@ def test_ipv6_only(self, subtests: pytest_subtests.SubTests, workbench_image: st # NOTE: this is only reachable from the host machine, so remote podman won't work container.get_wrapped_container().reload() ipv6_address = (container.get_wrapped_container().attrs - ["NetworkSettings"]["Networks"][network.name]["GlobalIPv6Address"]) + ["NetworkSettings"]["Networks"][network.name]["GlobalIPv6Address"]) if platform.system().lower() == 'darwin': # the container host is a podman machine, we need to expose port on podman machine first host = "localhost" @@ -91,7 +91,8 @@ def test_ipv6_only(self, subtests: pytest_subtests.SubTests, workbench_image: st socket_path = os.path.realpath(docker_utils.get_socket_path(client.client)) logging.debug(f"{socket_path=}") process = podman_machine_utils.open_ssh_tunnel( - machine_predicate=lambda m: os.path.realpath(m.ConnectionInfo.PodmanSocket.Path) == socket_path, + machine_predicate=lambda m: os.path.realpath( + m.ConnectionInfo.PodmanSocket.Path) == socket_path, local_port=port, remote_port=container.port, remote_interface=f"[{ipv6_address}]") test_frame.append(process, lambda p: p.kill()) @@ -107,18 +108,17 @@ def test_ipv6_only(self, subtests: pytest_subtests.SubTests, workbench_image: st docker_utils.NotebookContainer(container).stop(timeout=0) @pytest.mark.openshift - def test_image_run_on_openshift(self, image: str): - skip_if_not_workbench_image(image) - + def test_image_run_on_openshift(self, workbench_image: str): client = kubernetes_utils.get_client() print(client) username = kubernetes_utils.get_username(client) print(username) - with kubernetes_utils.ImageDeployment(client, image) as image: + with kubernetes_utils.ImageDeployment(client, workbench_image) as image: image.deploy(container_name="notebook-tests-pod") + class WorkbenchContainer(testcontainers.core.container.DockerContainer): @functools.wraps(testcontainers.core.container.DockerContainer.__init__) def __init__( @@ -190,7 +190,6 @@ def start(self, wait_for_readiness: bool = True) -> WorkbenchContainer: return self - def grab_and_check_logs(subtests: pytest_subtests.SubTests, container: WorkbenchContainer) -> None: # Here is a list of blocked keywords we don't want to see in the log messages during the container/workbench # startup (e.g., log messages from Jupyter IDE, code-server IDE or RStudio IDE). @@ -226,10 +225,11 @@ def grab_and_check_logs(subtests: pytest_subtests.SubTests, container: Workbench failed_lines: list[str] = [] for line in full_logs.splitlines(): if any(keyword in line for keyword in blocked_keywords): - if any(allowed in line for allowed in allowed_messages): - logging.debug(f"Waived message: '{line}'") - else: - logging.error(f"Unexpected keyword in the following message: '{line}'") - failed_lines.append(line) + if any(allowed in line for allowed in allowed_messages): + logging.debug(f"Waived message: '{line}'") + else: + logging.error(f"Unexpected keyword in the following message: '{line}'") + failed_lines.append(line) if len(failed_lines) > 0: - pytest.fail(f"Log message(s) ({len(failed_lines)}) that violate our checks occurred during the workbench startup:\n{"\n".join(failed_lines)}") + pytest.fail( + f"Log message(s) ({len(failed_lines)}) that violate our checks occurred during the workbench startup:\n{"\n".join(failed_lines)}") From 34309e35ca07c93da8d87e09cdc2863ae326cdfd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jiri=20Dan=C4=9Bk?= Date: Tue, 25 Feb 2025 13:44:58 +0100 Subject: [PATCH 5/6] fixup, restore test_oc_command_runs --- tests/containers/base_image_test.py | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/tests/containers/base_image_test.py b/tests/containers/base_image_test.py index b7e004989..c84c3d1b7 100644 --- a/tests/containers/base_image_test.py +++ b/tests/containers/base_image_test.py @@ -107,6 +107,18 @@ def check_elf_file(): with subtests.test(f"{dlib=}"): pytest.fail(f"{dlib=} has unsatisfied dependencies {deps=}") + def test_oc_command_runs(self, image: str): + container = testcontainers.core.container.DockerContainer(image=image, user=23456, group_add=[0]) + container.with_command("/bin/sh -c 'sleep infinity'") + try: + container.start() + ecode, output = container.exec(["/bin/sh", "-c", "oc version"]) + finally: + docker_utils.NotebookContainer(container).stop(timeout=0) + + logging.debug(output.decode()) + assert ecode == 0 + # @pytest.mark.environmentss("docker") def test_oc_command_runs_fake_fips(self, image: str, subtests: pytest_subtests.SubTests): """Establishes a best-effort fake FIPS environment and attempts to execute `oc` binary in it. @@ -131,7 +143,8 @@ def test_oc_command_runs_fake_fips(self, image: str, subtests: pytest_subtests.S # if /proc/sys/crypto/fips_enabled exists, only replace this file, # otherwise (Ubuntu case), assume entire /proc/sys/crypto does not exist if platform.system().lower() == "darwin" or pathlib.Path("/proc/sys/crypto/fips_enabled").exists(): - container.with_volume_mapping(str(tmp_crypto / 'crypto' / 'fips_enabled'), "/proc/sys/crypto/fips_enabled", mode="ro,z") + container.with_volume_mapping(str(tmp_crypto / 'crypto' / 'fips_enabled'), + "/proc/sys/crypto/fips_enabled", mode="ro,z") else: container.with_volume_mapping(str(tmp_crypto), "/proc/sys", mode="ro,z") @@ -181,8 +194,7 @@ def test_pip_install_cowsay_runs(self, image: str): docker_utils.NotebookContainer(container).stop(timeout=0) -def encode_python_function_execution_command_interpreter(python: str, function: Callable[..., Any], *args: list[Any]) -> \ - list[str]: +def encode_python_function_execution_command_interpreter(python: str, function: Callable[..., Any], *args: list[Any]) -> list[str]: """Returns a cli command that will run the given Python function encoded inline. All dependencies (imports, ...) must be part of function body.""" code = textwrap.dedent(inspect.getsource(function)) From 6f05d4039a137b152b63b5af16da2f407304ba53 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jiri=20Dan=C4=9Bk?= Date: Tue, 25 Feb 2025 15:19:12 +0100 Subject: [PATCH 6/6] fixup, undo intellij reformat changes --- tests/containers/workbenches/workbench_image_test.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/tests/containers/workbenches/workbench_image_test.py b/tests/containers/workbenches/workbench_image_test.py index 1f2d54199..e1674e86f 100644 --- a/tests/containers/workbenches/workbench_image_test.py +++ b/tests/containers/workbenches/workbench_image_test.py @@ -82,8 +82,7 @@ def test_ipv6_only(self, subtests: pytest_subtests.SubTests, workbench_image: st # rootful containers have an IP assigned, so we can connect to that # NOTE: this is only reachable from the host machine, so remote podman won't work container.get_wrapped_container().reload() - ipv6_address = (container.get_wrapped_container().attrs - ["NetworkSettings"]["Networks"][network.name]["GlobalIPv6Address"]) + ipv6_address = container.get_wrapped_container().attrs["NetworkSettings"]["Networks"][network.name]["GlobalIPv6Address"] if platform.system().lower() == 'darwin': # the container host is a podman machine, we need to expose port on podman machine first host = "localhost" @@ -91,8 +90,7 @@ def test_ipv6_only(self, subtests: pytest_subtests.SubTests, workbench_image: st socket_path = os.path.realpath(docker_utils.get_socket_path(client.client)) logging.debug(f"{socket_path=}") process = podman_machine_utils.open_ssh_tunnel( - machine_predicate=lambda m: os.path.realpath( - m.ConnectionInfo.PodmanSocket.Path) == socket_path, + machine_predicate=lambda m: os.path.realpath(m.ConnectionInfo.PodmanSocket.Path) == socket_path, local_port=port, remote_port=container.port, remote_interface=f"[{ipv6_address}]") test_frame.append(process, lambda p: p.kill()) @@ -231,5 +229,4 @@ def grab_and_check_logs(subtests: pytest_subtests.SubTests, container: Workbench logging.error(f"Unexpected keyword in the following message: '{line}'") failed_lines.append(line) if len(failed_lines) > 0: - pytest.fail( - f"Log message(s) ({len(failed_lines)}) that violate our checks occurred during the workbench startup:\n{"\n".join(failed_lines)}") + pytest.fail(f"Log message(s) ({len(failed_lines)}) that violate our checks occurred during the workbench startup:\n{"\n".join(failed_lines)}")