diff --git a/bzl/mypy/mypy.ini b/bzl/mypy/mypy.ini index a3d8d22c1..c0dcd5726 100644 --- a/bzl/mypy/mypy.ini +++ b/bzl/mypy/mypy.ini @@ -67,6 +67,9 @@ ignore_missing_imports = True [mypy-packaging.*] ignore_missing_imports = True +[mypy-pydantic_settings.*] +ignore_missing_imports = True + [mypy-prometheus_client.*] ignore_missing_imports = True diff --git a/docs/docs_requirements.txt b/docs/docs_requirements.txt index f5593a8a3..83430855e 100644 --- a/docs/docs_requirements.txt +++ b/docs/docs_requirements.txt @@ -20,9 +20,8 @@ # pip-compile ./docs_requirements.txt -o locked_requirements.txt --allow-unsafe -v --generate-hashes # Building documentation -# Pin pydantic to match src/requirements.txt -pydantic==1.10.26 -autodoc_pydantic==1.9.1 +pydantic==2.12.5 +autodoc_pydantic==2.2.0 commonmark==0.9.1 nvidia-sphinx-theme==0.0.8 sphinx==7.4.7 diff --git a/docs/locked_requirements.txt b/docs/locked_requirements.txt index 799b97e9d..0c958a4b4 100644 --- a/docs/locked_requirements.txt +++ b/docs/locked_requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --allow-unsafe --generate-hashes --output-file=locked_requirements.txt ./docs_requirements.txt +# pip-compile --allow-unsafe --generate-hashes --output-file=locked_requirements.txt docs_requirements.txt # accessible-pygments==0.0.5 \ --hash=sha256:40918d3e6a2b619ad424cb91e556bd3bd8865443d9f22f1dcdf79e33c8046872 \ @@ -12,10 +12,13 @@ alabaster==0.7.16 \ --hash=sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65 \ --hash=sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92 # via sphinx -autodoc-pydantic==1.9.1 \ - --hash=sha256:0443987f1cc2516c8186e85d05a1816a314a19e1433b69a0a4b154f4acca3f9b \ - --hash=sha256:7b7c68ce3720f099ec85b7b8b9bd91414b8873704aa60f75489c2bcfe2d57bb5 - # via -r docs_requirements.txt +annotated-types==0.7.0 \ + --hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 \ + --hash=sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89 + # via pydantic +autodoc-pydantic==2.2.0 \ + --hash=sha256:8c6a36fbf6ed2700ea9c6d21ea76ad541b621fbdf16b5a80ee04673548af4d95 + # via -r docs/docs_requirements.txt babel==2.17.0 \ --hash=sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d \ --hash=sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2 @@ -325,7 +328,7 @@ charset-normalizer==3.4.1 \ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 - # via -r docs_requirements.txt + # via -r docs/docs_requirements.txt cssselect2==0.8.0 \ --hash=sha256:46fc70ebc41ced7a32cd42d58b1884d72ade23d21e5a4eaaf022401c13f0e76e \ --hash=sha256:7674ffb954a3b46162392aee2a3a0aedb2e14ecf99fcc28644900f4e6e3e9d3a @@ -477,7 +480,7 @@ markupsafe==3.0.2 \ # via jinja2 nvidia-sphinx-theme==0.0.8 \ --hash=sha256:18f117aa154a3a156251a75647279c541464f3e75f7df2ae283e720cc7d0bc2c - # via -r docs_requirements.txt + # via -r docs/docs_requirements.txt packaging==24.2 \ --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f @@ -554,47 +557,140 @@ pycparser==2.22 \ --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc # via cffi -pydantic==1.10.26 \ - --hash=sha256:0141f4bafe5eda539d98c9755128a9ea933654c6ca4306b5059fc87a01a38573 \ - --hash=sha256:0d8f6087bf697dec3bf7ffcd7fe8362674f16519f3151789f33cbe8f1d19fc15 \ - --hash=sha256:0e4451951a9a93bf9a90576f3e25240b47ee49ab5236adccb8eff6ac943adf0f \ - --hash=sha256:116233e53889bcc536f617e38c1b8337d7fa9c280f0fd7a4045947515a785637 \ - --hash=sha256:15b13b9f8ba8867095769e1156e0d7fbafa1f65b898dd40fd1c02e34430973cb \ - --hash=sha256:1a4e3062b71ab1d5df339ba12c48f9ed5817c5de6cb92a961dd5c64bb32e7b96 \ - --hash=sha256:1ae7913bb40a96c87e3d3f6fe4e918ef53bf181583de4e71824360a9b11aef1c \ - --hash=sha256:2c1b0b914be31671000ca25cf7ea17fcaaa68cfeadf6924529c5c5aa24b7ab1f \ - --hash=sha256:36d9e46b588aaeb1dcd2409fa4c467fe0b331f3cc9f227b03a7a00643704e962 \ - --hash=sha256:4482b299874dabb88a6c3759e3d85c6557c407c3b586891f7d808d8a38b66b9c \ - --hash=sha256:465ad8edb29b15c10b779b16431fe8e77c380098badf6db367b7a1d3e572cf53 \ - --hash=sha256:468d5b9cacfcaadc76ed0a4645354ab6f263ec01a63fb6d05630ea1df6ae453f \ - --hash=sha256:502b9d30d18a2dfaf81b7302f6ba0e5853474b1c96212449eb4db912cb604b7d \ - --hash=sha256:6b40730cc81d53d515dc0b8bb5c9b43fadb9bed46de4a3c03bd95e8571616dba \ - --hash=sha256:71cde228bc0600cf8619f0ee62db050d1880dcc477eba0e90b23011b4ee0f314 \ - --hash=sha256:80e6be6272839c8a7641d26ad569ab77772809dd78f91d0068dc0fc97f071945 \ - --hash=sha256:8154c13f58d4de5d3a856bb6c909c7370f41fb876a5952a503af6b975265f4ba \ - --hash=sha256:81ce3c8616d12a7be31b4aadfd3434f78f6b44b75adbfaec2fe1ad4f7f999b8c \ - --hash=sha256:8be08b5cfe88e58198722861c7aab737c978423c3a27300911767931e5311d0d \ - --hash=sha256:8c6aa39b494c5af092e690127c283d84f363ac36017106a9e66cb33a22ac412e \ - --hash=sha256:9858ed44c6bea5f29ffe95308db9e62060791c877766c67dd5f55d072c8612b5 \ - --hash=sha256:a943ce8e00ad708ed06a1d9df5b4fd28f5635a003b82a4908ece6f24c0b18464 \ - --hash=sha256:ac1089f723e2106ebde434377d31239e00870a7563245072968e5af5cc4d33df \ - --hash=sha256:ad7025ca324ae263d4313998e25078dcaec5f9ed0392c06dedb57e053cc8086b \ - --hash=sha256:bc5c91a3b3106caf07ac6735ec6efad8ba37b860b9eb569923386debe65039ad \ - --hash=sha256:c3bbb9c0eecdf599e4db9b372fa9cc55be12e80a0d9c6d307950a39050cb0e37 \ - --hash=sha256:c3cfdd361addb6eb64ccd26ac356ad6514cee06a61ab26b27e16b5ed53108f77 \ - --hash=sha256:c43ad70dc3ce7787543d563792426a16fd7895e14be4b194b5665e36459dd917 \ - --hash=sha256:cc2e3fe7bc4993626ef6b6fa855defafa1d6f8996aa1caef2deb83c5ac4d043a \ - --hash=sha256:ce3293b86ca9f4125df02ff0a70be91bc7946522467cbd98e7f1493f340616ba \ - --hash=sha256:d95a76cf503f0f72ed7812a91de948440b2bf564269975738a4751e4fadeb572 \ - --hash=sha256:dcb5a7318fb43189fde6af6f21ac7149c4bcbcfffc54bc87b5becddc46084847 \ - --hash=sha256:dd40a99c358419910c85e6f5d22f9c56684c25b5e7abc40879b3b4a52f34ae90 \ - --hash=sha256:dde599e0388e04778480d57f49355c9cc7916de818bf674de5d5429f2feebfb6 \ - --hash=sha256:eb664305ffca8a9766a8629303bb596607d77eae35bb5f32ff9245984881b638 \ - --hash=sha256:f7ae36fa0ecef8d39884120f212e16c06bb096a38f523421278e2f39c1784546 \ - --hash=sha256:f8af0507bf6118b054a9765fb2e402f18a8b70c964f420d95b525eb711122d62 +pydantic==2.12.5 \ + --hash=sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49 \ + --hash=sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d # via - # -r docs_requirements.txt + # -r /Users/fernandol/Downloads/OSMO/docs/docs_requirements.txt # autodoc-pydantic + # pydantic-settings +pydantic-core==2.41.5 \ + --hash=sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90 \ + --hash=sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740 \ + --hash=sha256:0384e2e1021894b1ff5a786dbf94771e2986ebe2869533874d7e43bc79c6f504 \ + --hash=sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84 \ + --hash=sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33 \ + --hash=sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c \ + --hash=sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0 \ + --hash=sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e \ + --hash=sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0 \ + --hash=sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a \ + --hash=sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34 \ + --hash=sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2 \ + --hash=sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3 \ + --hash=sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815 \ + --hash=sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14 \ + --hash=sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba \ + --hash=sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375 \ + --hash=sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf \ + --hash=sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963 \ + --hash=sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1 \ + --hash=sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808 \ + --hash=sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553 \ + --hash=sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1 \ + --hash=sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2 \ + --hash=sha256:299e0a22e7ae2b85c1a57f104538b2656e8ab1873511fd718a1c1c6f149b77b5 \ + --hash=sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470 \ + --hash=sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2 \ + --hash=sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b \ + --hash=sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660 \ + --hash=sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c \ + --hash=sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093 \ + --hash=sha256:346285d28e4c8017da95144c7f3acd42740d637ff41946af5ce6e5e420502dd5 \ + --hash=sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594 \ + --hash=sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008 \ + --hash=sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a \ + --hash=sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a \ + --hash=sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd \ + --hash=sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284 \ + --hash=sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586 \ + --hash=sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869 \ + --hash=sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294 \ + --hash=sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f \ + --hash=sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66 \ + --hash=sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51 \ + --hash=sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc \ + --hash=sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97 \ + --hash=sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a \ + --hash=sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d \ + --hash=sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9 \ + --hash=sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c \ + --hash=sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07 \ + --hash=sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36 \ + --hash=sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e \ + --hash=sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05 \ + --hash=sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e \ + --hash=sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941 \ + --hash=sha256:707625ef0983fcfb461acfaf14de2067c5942c6bb0f3b4c99158bed6fedd3cf3 \ + --hash=sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612 \ + --hash=sha256:753e230374206729bf0a807954bcc6c150d3743928a73faffee51ac6557a03c3 \ + --hash=sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b \ + --hash=sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe \ + --hash=sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146 \ + --hash=sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11 \ + --hash=sha256:7b93a4d08587e2b7e7882de461e82b6ed76d9026ce91ca7915e740ecc7855f60 \ + --hash=sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd \ + --hash=sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b \ + --hash=sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c \ + --hash=sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a \ + --hash=sha256:873e0d5b4fb9b89ef7c2d2a963ea7d02879d9da0da8d9d4933dee8ee86a8b460 \ + --hash=sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1 \ + --hash=sha256:8bfeaf8735be79f225f3fefab7f941c712aaca36f1128c9d7e2352ee1aa87bdf \ + --hash=sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf \ + --hash=sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858 \ + --hash=sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2 \ + --hash=sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9 \ + --hash=sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2 \ + --hash=sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3 \ + --hash=sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6 \ + --hash=sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770 \ + --hash=sha256:a75dafbf87d6276ddc5b2bf6fae5254e3d0876b626eb24969a574fff9149ee5d \ + --hash=sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc \ + --hash=sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23 \ + --hash=sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26 \ + --hash=sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa \ + --hash=sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8 \ + --hash=sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d \ + --hash=sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3 \ + --hash=sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d \ + --hash=sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034 \ + --hash=sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9 \ + --hash=sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1 \ + --hash=sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56 \ + --hash=sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b \ + --hash=sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c \ + --hash=sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a \ + --hash=sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e \ + --hash=sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9 \ + --hash=sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5 \ + --hash=sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a \ + --hash=sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556 \ + --hash=sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e \ + --hash=sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49 \ + --hash=sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2 \ + --hash=sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9 \ + --hash=sha256:e4f4a984405e91527a0d62649ee21138f8e3d0ef103be488c1dc11a80d7f184b \ + --hash=sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc \ + --hash=sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb \ + --hash=sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0 \ + --hash=sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8 \ + --hash=sha256:e8465ab91a4bd96d36dde3263f06caa6a8a6019e4113f24dc753d79a8b3a3f82 \ + --hash=sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69 \ + --hash=sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b \ + --hash=sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c \ + --hash=sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75 \ + --hash=sha256:f0cd744688278965817fd0839c4a4116add48d23890d468bc436f78beb28abf5 \ + --hash=sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f \ + --hash=sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad \ + --hash=sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b \ + --hash=sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7 \ + --hash=sha256:f41eb9797986d6ebac5e8edff36d5cef9de40def462311b3eb3eeded1431e425 \ + --hash=sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52 + # via pydantic +pydantic-settings==2.13.1 \ + --hash=sha256:b4c11847b15237fb0171e1462bf540e294affb9b86db4d9aa5c01730bdbe4025 \ + --hash=sha256:d56fd801823dbeae7f0975e1f8c8e25c258eb75d278ea7abb5d9cebb01b56237 + # via autodoc-pydantic pydata-sphinx-theme==0.16.1 \ --hash=sha256:225331e8ac4b32682c18fcac5a57a6f717c4e632cea5dd0e247b55155faeccde \ --hash=sha256:a08b7f0b7f70387219dc659bff0893a7554d5eb39b59d3b8ef37b8401b7642d7 @@ -621,6 +717,10 @@ pyphen==0.17.2 \ --hash=sha256:3a07fb017cb2341e1d9ff31b8634efb1ae4dc4b130468c7c39dd3d32e7c3affd \ --hash=sha256:f60647a9c9b30ec6c59910097af82bc5dd2d36576b918e44148d8b07ef3b4aa3 # via weasyprint +python-dotenv==1.2.2 \ + --hash=sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a \ + --hash=sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3 + # via pydantic-settings pyyaml==6.0.3 \ --hash=sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c \ --hash=sha256:0150219816b6a1fa26fb4699fb7daa9caf09eb1999f3b70fb6e786805e80375a \ @@ -712,7 +812,7 @@ sphinx==7.4.7 \ --hash=sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe \ --hash=sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239 # via - # -r docs_requirements.txt + # -r /Users/fernandol/Downloads/OSMO/docs/docs_requirements.txt # autodoc-pydantic # nvidia-sphinx-theme # pydata-sphinx-theme @@ -732,28 +832,28 @@ sphinx==7.4.7 \ sphinx-argparse==0.5.2 \ --hash=sha256:d771b906c36d26dee669dbdbb5605c558d9440247a5608b810f7fa6e26ab1fd3 \ --hash=sha256:e5352f8fa894b6fb6fda0498ba28a9f8d435971ef4bbc1a6c9c6414e7644f032 - # via -r docs_requirements.txt + # via -r docs/docs_requirements.txt sphinx-copybutton==0.5.2 \ --hash=sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd \ --hash=sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e - # via -r docs_requirements.txt + # via -r docs/docs_requirements.txt sphinx-design==0.6.1 \ --hash=sha256:b11f37db1a802a183d61b159d9a202314d4d2fe29c163437001324fe2f19549c \ --hash=sha256:b44eea3719386d04d765c1a8257caca2b3e6f8421d7b3a5e742c0fd45f84e632 - # via -r docs_requirements.txt + # via -r docs/docs_requirements.txt sphinx-markdown-builder==0.6.8 \ --hash=sha256:6141b566bf18dd1cd515a0a90efd91c6c4d10fc638554fab2fd19cba66543dd7 \ --hash=sha256:f04ab42d52449363228b9104569c56b778534f9c41a168af8cfc721a1e0e3edc # via - # -r docs_requirements.txt + # -r /Users/fernandol/Downloads/OSMO/docs/docs_requirements.txt # sphinx-multiversion sphinx-multiversion @ https://github.com/RyaliNvidia/sphinx-multiversion/archive/refs/tags/v0.2.5.zip \ --hash=sha256:2616bde204930ed6995d9acd86f35770c1d8b02b37383e0653a706b1306f2406 - # via -r docs_requirements.txt + # via -r docs/docs_requirements.txt sphinx-new-tab-link==0.8.0 \ --hash=sha256:6c757d99f559224a04142c3971c8baa6ac90aca905f15b129d57eeca0ece9582 \ --hash=sha256:c74b873d6c8a1ec089015dc414a75f6908e87f66ce4ab8d9f2c7268f13afc593 - # via -r docs_requirements.txt + # via -r docs/docs_requirements.txt sphinx-prompt==1.8.0 \ --hash=sha256:369ecc633f0711886f9b3a078c83264245be1adf46abeeb9b88b5519e4b51007 \ --hash=sha256:47482f86fcec29662fdfd23e7c04ef03582714195d01f5d565403320084372ed @@ -761,15 +861,15 @@ sphinx-prompt==1.8.0 \ sphinx-reredirects==0.1.6 \ --hash=sha256:c491cba545f67be9697508727818d8626626366245ae64456fe29f37e9bbea64 \ --hash=sha256:efd50c766fbc5bf40cd5148e10c00f2c00d143027de5c5e48beece93cc40eeea - # via -r docs_requirements.txt + # via -r docs/docs_requirements.txt sphinx-simplepdf==1.6.0 \ --hash=sha256:466a2b7e2000997ebf4dae62d88cd37b27d38c436ca23e81caf939e1d0e611f1 \ --hash=sha256:bc8412c6b029886ae2e9241612dfc59c4cd35fa8cf2e7eb987c14126d422a939 - # via -r docs_requirements.txt + # via -r docs/docs_requirements.txt sphinx-substitution-extensions==2024.2.25 \ --hash=sha256:1e22951dbe5052b5b7866897196317da3fb2640776b608828ffe6c675e607b95 \ --hash=sha256:9d489d462343ba14dabc7f290c4d305946a54541d978a21d1d9b904f1ff7afe4 - # via -r docs_requirements.txt + # via -r docs/docs_requirements.txt sphinxcontrib-applehelp==2.0.0 \ --hash=sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1 \ --hash=sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5 @@ -793,7 +893,7 @@ sphinxcontrib-jsmath==1.0.1 \ sphinxcontrib-mermaid==1.0.0 \ --hash=sha256:2e8ab67d3e1e2816663f9347d026a8dee4a858acdd4ad32dd1c808893db88146 \ --hash=sha256:60b72710ea02087f212028feb09711225fbc2e343a10d34822fe787510e1caa3 - # via -r docs_requirements.txt + # via -r docs/docs_requirements.txt sphinxcontrib-qthelp==2.0.0 \ --hash=sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab \ --hash=sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb @@ -805,7 +905,7 @@ sphinxcontrib-serializinghtml==2.0.0 \ sphinxcontrib-spelling==7.7.0 \ --hash=sha256:56561c3f6a155b0946914e4de988729859315729dc181b5e4dc8a68fe78de35a \ --hash=sha256:95a0defef8ffec6526f9e83b20cc24b08c9179298729d87976891840e3aa3064 - # via -r docs_requirements.txt + # via -r docs/docs_requirements.txt tabulate==0.9.0 \ --hash=sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c \ --hash=sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f @@ -854,13 +954,21 @@ tomli==2.2.1 \ --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 # via sphinx -typing-extensions==4.12.2 \ - --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ - --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 +typing-extensions==4.15.0 \ + --hash=sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466 \ + --hash=sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548 # via # beautifulsoup4 # pydantic + # pydantic-core # pydata-sphinx-theme + # typing-inspection +typing-inspection==0.4.2 \ + --hash=sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7 \ + --hash=sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464 + # via + # pydantic + # pydantic-settings urllib3==2.3.0 \ --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d diff --git a/src/cli/login.py b/src/cli/login.py index 14f6e8442..6736b4c7e 100644 --- a/src/cli/login.py +++ b/src/cli/login.py @@ -86,7 +86,7 @@ class UrlValidator(pydantic.BaseModel): url: pydantic.AnyHttpUrl try: _ = UrlValidator(url=url) - except pydantic.error_wrappers.ValidationError as error: + except pydantic.ValidationError as error: raise osmo_errors.OSMOUserError(f'Bad url {url}: {error}') print(f'Logging in to {url}') diff --git a/src/cli/version.py b/src/cli/version.py index f89465abf..44e0250bf 100644 --- a/src/cli/version.py +++ b/src/cli/version.py @@ -50,7 +50,7 @@ def _client_version(service_client: client.ServiceClient, args: argparse.Namespa pass client_version = version.VERSION if args.format_type == 'json': - output = {'client': client_version.dict()} + output = {'client': client_version.model_dump()} if result: output['service'] = result print(json.dumps(output, indent=common.JSON_INDENT_SIZE)) diff --git a/src/cli/workflow.py b/src/cli/workflow.py index eec06bf51..47bcf4345 100644 --- a/src/cli/workflow.py +++ b/src/cli/workflow.py @@ -54,7 +54,7 @@ RESIZE_PREFIX = b'\x00RESIZE:' -class TemplateData(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class TemplateData(pydantic.BaseModel, extra='forbid'): """Pydantic model representing parsed template data from workflow files.""" file: str set_variables: List[str] diff --git a/src/lib/data/dataset/common.py b/src/lib/data/dataset/common.py index 72ad07d5f..065bef0e4 100644 --- a/src/lib/data/dataset/common.py +++ b/src/lib/data/dataset/common.py @@ -196,7 +196,7 @@ class RemoteToRemoteMapping(NamedTuple): @pydantic.dataclasses.dataclass( config=pydantic.ConfigDict( - extra=pydantic.Extra.forbid, + extra='forbid', frozen=True, ), ) @@ -211,7 +211,7 @@ class UploadStartResult: @pydantic.dataclasses.dataclass( config=pydantic.ConfigDict( - extra=pydantic.Extra.forbid, + extra='forbid', frozen=True, ), ) @@ -225,7 +225,7 @@ class UploadResult: @pydantic.dataclasses.dataclass( config=pydantic.ConfigDict( - extra=pydantic.Extra.forbid, + extra='forbid', frozen=True, ), ) @@ -235,14 +235,14 @@ class UpdateStartResult: """ upload_response: UploadResponse current_manifest_path: str - local_update_paths: List[LocalToRemoteMapping] | None - backend_update_paths: List[RemoteToRemoteMapping] | None - remove_regex: str | None + local_update_paths: List[LocalToRemoteMapping] | None = None + backend_update_paths: List[RemoteToRemoteMapping] | None = None + remove_regex: str | None = None @pydantic.dataclasses.dataclass( config=pydantic.ConfigDict( - extra=pydantic.Extra.forbid, + extra='forbid', frozen=True, ), ) diff --git a/src/lib/data/dataset/manager.py b/src/lib/data/dataset/manager.py index 67040f921..81cb87cb8 100644 --- a/src/lib/data/dataset/manager.py +++ b/src/lib/data/dataset/manager.py @@ -43,11 +43,12 @@ class Manager(pydantic.BaseModel): Manager for a dataset. """ - class Config: - arbitrary_types_allowed = True - extra = pydantic.Extra.forbid - frozen = True - keep_untouched = (functools.cached_property,) + model_config = pydantic.ConfigDict( + arbitrary_types_allowed=True, + extra='forbid', + frozen=True, + ignored_types=(functools.cached_property,), + ) ######################### # Required fields # diff --git a/src/lib/data/storage/backends/backends.py b/src/lib/data/storage/backends/backends.py index 9579aa2a5..f48c6b3ae 100644 --- a/src/lib/data/storage/backends/backends.py +++ b/src/lib/data/storage/backends/backends.py @@ -229,9 +229,8 @@ class SwiftBackend(Boto3Backend): Swift Backend """ - scheme: str = pydantic.Field( + scheme: Literal['swift'] = pydantic.Field( default=common.StorageBackendType.SWIFT.value, - const=True, description='The scheme of the Swift backend.', ) @@ -242,7 +241,6 @@ class SwiftBackend(Boto3Backend): supports_batch_delete: Literal[True] = pydantic.Field( default=True, - const=True, description='Whether the backend supports batch delete.', ) @@ -390,21 +388,18 @@ class S3Backend(Boto3Backend): AWS S3 Backend """ - scheme: str = pydantic.Field( + scheme: Literal['s3'] = pydantic.Field( default=common.StorageBackendType.S3.value, - const=True, description='The scheme of the S3 backend.', ) supports_batch_delete: Literal[True] = pydantic.Field( default=True, - const=True, description='Whether the backend supports batch delete.', ) supports_environment_auth: Literal[True] = pydantic.Field( default=True, - const=True, description='Whether the backend supports environment authentication.', ) @@ -585,9 +580,8 @@ class GSBackend(Boto3Backend): Google Cloud Platform GS Backend """ - scheme: str = pydantic.Field( + scheme: Literal['gs'] = pydantic.Field( default=common.StorageBackendType.GS.value, - const=True, description='The scheme of the GS backend.', ) @@ -595,7 +589,6 @@ class GSBackend(Boto3Backend): # https://issuetracker.google.com/issues/162653700 supports_batch_delete: Literal[False] = pydantic.Field( default=False, - const=True, description='Whether the backend supports batch delete.', ) @@ -708,15 +701,13 @@ class TOSBackend(Boto3Backend): https://docs.byteplus.com/en/docs/tos/docs-compatibility-with-amazon-s3#appendix-tos-compatible-s3-apis """ - scheme: str = pydantic.Field( + scheme: Literal['tos'] = pydantic.Field( default=common.StorageBackendType.TOS.value, - const=True, description='The scheme of the TOS backend.', ) supports_batch_delete: Literal[True] = pydantic.Field( default=True, - const=True, description='Whether the backend supports batch delete.', ) @@ -819,9 +810,8 @@ class AzureBlobStorageBackend(common.StorageBackend): Azure Blob Storage Backend """ - scheme: str = pydantic.Field( + scheme: Literal['azure'] = pydantic.Field( default=common.StorageBackendType.AZURE.value, - const=True, description='The scheme of the Azure Blob Storage backend.', ) @@ -832,7 +822,6 @@ class AzureBlobStorageBackend(common.StorageBackend): supports_environment_auth: Literal[True] = pydantic.Field( default=True, - const=True, description='Whether the backend supports environment authentication.', ) diff --git a/src/lib/data/storage/backends/common.py b/src/lib/data/storage/backends/common.py index 06eda5e46..17585edd7 100644 --- a/src/lib/data/storage/backends/common.py +++ b/src/lib/data/storage/backends/common.py @@ -99,13 +99,15 @@ class StoragePath: class StorageBackend( abc.ABC, pydantic.BaseModel, - extra=pydantic.Extra.forbid, - arbitrary_types_allowed=True, - keep_untouched=(functools.cached_property,), # Don't serialize cached properties ): """ Represents information about a storage backend. """ + model_config = pydantic.ConfigDict( + extra='forbid', + arbitrary_types_allowed=True, + ignored_types=(functools.cached_property,), # Don't serialize cached properties + ) scheme: str uri: str diff --git a/src/lib/data/storage/client.py b/src/lib/data/storage/client.py index 696da245f..aa568a02d 100644 --- a/src/lib/data/storage/client.py +++ b/src/lib/data/storage/client.py @@ -68,10 +68,11 @@ class Client(pydantic.BaseModel): A storage client that can be used to perform data operations against a remote storage. """ - class Config: - extra = pydantic.Extra.forbid - frozen = True - keep_untouched = (functools.cached_property,) + model_config = pydantic.ConfigDict( + extra='forbid', + frozen=True, + ignored_types=(functools.cached_property,), + ) ######################### # Factory methods # @@ -192,7 +193,7 @@ def create( storage_uri: str = pydantic.Field( ..., - regex=constants.STORAGE_BACKEND_REGEX, + pattern=constants.STORAGE_BACKEND_REGEX, description='The URI of the remote storage this instance of storage Client will ' 'operate against. Must point to a valid container.', ) @@ -231,29 +232,25 @@ def create( description='Headers to apply to all requests of this client.', ) - @pydantic.root_validator(skip_on_failure=True) - @classmethod - def validate_data_credential_endpoint(cls, values): + @pydantic.model_validator(mode='after') + def validate_data_credential_endpoint(self): """ Validates that the data credential endpoint matches the storage backend profile. """ - data_credential_input = values.get('data_credential_input') - if data_credential_input is not None: - storage_uri = values.get('storage_uri') - + if self.data_credential_input is not None: # Construct backends to validate profiles match data_cred_backend = backends.construct_storage_backend( - uri=data_credential_input.endpoint, + uri=self.data_credential_input.endpoint, ) storage_backend = backends.construct_storage_backend( - uri=storage_uri, + uri=self.storage_uri, ) if data_cred_backend.profile != storage_backend.profile: raise osmo_errors.OSMOCredentialError( 'Credential endpoint must match the storage backend profile') - return values + return self @functools.cached_property def data_credential(self) -> credentials.DataCredential: @@ -1195,9 +1192,7 @@ class SingleObjectClient(pydantic.BaseModel): interacting with a single object. """ - class Config: - extra = pydantic.Extra.forbid - frozen = True + model_config = pydantic.ConfigDict(extra='forbid', frozen=True) @overload @classmethod diff --git a/src/lib/data/storage/constants/constants.py b/src/lib/data/storage/constants/constants.py index fab5f4b4c..150e834fe 100644 --- a/src/lib/data/storage/constants/constants.py +++ b/src/lib/data/storage/constants/constants.py @@ -38,7 +38,7 @@ TOS_REGEX = fr'^tos://{URI_COMPONENT}(/{URI_COMPONENT})+/*$' AZURE_REGEX = fr'^azure://{URI_COMPONENT}(/{URI_COMPONENT})+/*$' STORAGE_BACKEND_REGEX = fr'({SWIFT_REGEX}|{S3_REGEX}|{GS_REGEX}|{TOS_REGEX}|{AZURE_REGEX})' -StorageBackendPattern = Annotated[str, pydantic.Field(regex=STORAGE_BACKEND_REGEX)] +StorageBackendPattern = Annotated[str, pydantic.Field(pattern=STORAGE_BACKEND_REGEX)] # Regex rules for storage profiles @@ -50,7 +50,7 @@ STORAGE_PROFILE_REGEX = fr'({SWIFT_PROFILE_REGEX}|{S3_PROFILE_REGEX}|' \ fr'{GS_PROFILE_REGEX}|{TOS_PROFILE_REGEX}|' \ fr'{AZURE_PROFILE_REGEX})' -StorageProfilePattern = Annotated[str, pydantic.Field(regex=STORAGE_PROFILE_REGEX)] +StorageProfilePattern = Annotated[str, pydantic.Field(pattern=STORAGE_PROFILE_REGEX)] STORAGE_CREDENTIAL_REGEX = fr'({STORAGE_PROFILE_REGEX}|{STORAGE_BACKEND_REGEX})' -StorageCredentialPattern = Annotated[str, pydantic.Field(regex=STORAGE_CREDENTIAL_REGEX)] +StorageCredentialPattern = Annotated[str, pydantic.Field(pattern=STORAGE_CREDENTIAL_REGEX)] diff --git a/src/lib/data/storage/core/BUILD b/src/lib/data/storage/core/BUILD index a2fe5b22d..6a13ea1e9 100644 --- a/src/lib/data/storage/core/BUILD +++ b/src/lib/data/storage/core/BUILD @@ -28,6 +28,7 @@ osmo_py_library( "//src/lib/utils:logging", "//src/lib/utils:osmo_errors", requirement("pydantic"), + requirement("pydantic-settings"), requirement("tqdm"), ], ) diff --git a/src/lib/data/storage/core/executor.py b/src/lib/data/storage/core/executor.py index 59bcd34c2..698c3ed9e 100644 --- a/src/lib/data/storage/core/executor.py +++ b/src/lib/data/storage/core/executor.py @@ -47,6 +47,7 @@ ) import pydantic +import pydantic_settings from . import progress, provider from ....utils import common, logging as logging_utils, osmo_errors @@ -67,32 +68,30 @@ # Executor Schemas (External) # ################################### -class ExecutorParameters(pydantic.BaseSettings): +class ExecutorParameters(pydantic_settings.BaseSettings): """ A class for storing parameters regarding multi-process/thread operations. Allows for environment variable overrides of the parameters. """ - class Config: - """ - Pydantic configuration for the ExecutorParameters class. - """ - env_prefix = 'OSMO_EXECUTOR_' - - @classmethod - def customise_sources( - cls, - init_settings, - env_settings, - file_secret_settings, - ): - # Treat explicit None as "unset" so env vars can apply - def init_without_none(settings): - data = init_settings(settings) - return {k: v for k, v in data.items() if v is not None} - - return (init_without_none, env_settings, file_secret_settings) + model_config = pydantic_settings.SettingsConfigDict(env_prefix='OSMO_EXECUTOR_') + + @classmethod + def settings_customise_sources( + cls, + settings_cls, # pylint: disable=unused-argument + init_settings, + env_settings, + dotenv_settings, # pylint: disable=unused-argument + file_secret_settings, + ): + # Treat explicit None as "unset" so env vars can apply. + # Override init_settings to filter out None values. + init_settings.init_kwargs = { + k: v for k, v in init_settings.init_kwargs.items() if v is not None + } + return (init_settings, env_settings, file_secret_settings) num_processes: int | None = pydantic.Field( default=None, @@ -130,10 +129,7 @@ def init_without_none(settings): description='The size of the log queue for the executor. Only used for multi-process jobs.', ) - @pydantic.validator( - 'num_threads_inflight_multiplier', - 'chunk_queue_size_multiplier', - ) + @pydantic.field_validator('num_threads_inflight_multiplier', 'chunk_queue_size_multiplier') @classmethod def _validate_multiplier_max(cls, v: int) -> int: if v > MAX_MULTIPLIER: diff --git a/src/lib/data/storage/credentials/credentials.py b/src/lib/data/storage/credentials/credentials.py index c7a5d5da3..ed0493525 100644 --- a/src/lib/data/storage/credentials/credentials.py +++ b/src/lib/data/storage/credentials/credentials.py @@ -32,7 +32,7 @@ from ....utils import client_configs, osmo_errors -class DataCredentialBase(pydantic.BaseModel, abc.ABC, extra=pydantic.Extra.forbid): +class DataCredentialBase(pydantic.BaseModel, abc.ABC, extra='forbid'): """ Base class for data credentials (i.e. credentials with endpoint and region). """ @@ -49,7 +49,7 @@ class DataCredentialBase(pydantic.BaseModel, abc.ABC, extra=pydantic.Extra.forbi description='HTTP endpoint URL override the storage URI (e.g., http://minio:9000)', ) - @pydantic.validator('endpoint') + @pydantic.field_validator('endpoint') @classmethod def validate_endpoint(cls, value: str) -> constants.StorageCredentialPattern: """ @@ -60,7 +60,7 @@ def validate_endpoint(cls, value: str) -> constants.StorageCredentialPattern: return value.rstrip('/') -class StaticDataCredential(DataCredentialBase, abc.ABC, extra=pydantic.Extra.forbid): +class StaticDataCredential(DataCredentialBase, abc.ABC, extra='forbid'): """ Static data credentials (i.e. credentials with access_key_id and access_key) for a data backend. """ @@ -90,7 +90,7 @@ def to_decrypted_dict(self) -> dict[str, str]: return output -class DefaultDataCredential(DataCredentialBase, extra=pydantic.Extra.forbid): +class DefaultDataCredential(DataCredentialBase, extra='forbid'): """ Data credential that delegates resolution to the underlying SDK. diff --git a/src/lib/data/storage/downloading.py b/src/lib/data/storage/downloading.py index d7c189799..12344b5f2 100644 --- a/src/lib/data/storage/downloading.py +++ b/src/lib/data/storage/downloading.py @@ -140,23 +140,24 @@ class DownloadParams: description='Whether to enable the progress tracker. Defaults to False.', ) - @pydantic.root_validator + @pydantic.model_validator(mode='wrap') @classmethod - def validate_download_sources(cls, values): + def validate_download_sources(cls, values, handler): """ Validate that exactly one of download_paths, download_worker_inputs, or download_worker_inputs_generator is provided. """ + instance = handler(values) if sum([ - values.get('download_paths') is not None, - values.get('download_worker_inputs') is not None, - values.get('download_worker_inputs_generator') is not None, + instance.download_paths is not None, + instance.download_worker_inputs is not None, + instance.download_worker_inputs_generator is not None, ]) != 1: raise ValueError( 'Exactly one of download_paths, download_worker_inputs, or ' 'download_worker_inputs_generator must be provided.') - return values + return instance @pydantic.dataclasses.dataclass(frozen=True, kw_only=True) diff --git a/src/lib/data/storage/uploading.py b/src/lib/data/storage/uploading.py index fe24e470e..a90777494 100644 --- a/src/lib/data/storage/uploading.py +++ b/src/lib/data/storage/uploading.py @@ -222,23 +222,24 @@ class UploadParams: 'after each file is uploaded.', ) - @pydantic.root_validator + @pydantic.model_validator(mode='wrap') @classmethod - def validate_upload_sources(cls, values): + def validate_upload_sources(cls, values, handler): """ Validate that exactly one of upload_paths, upload_worker_inputs, or upload_worker_inputs_generator is provided. """ + instance = handler(values) if sum([ - values.get('upload_paths') is not None, - values.get('upload_worker_inputs') is not None, - values.get('upload_worker_inputs_generator') is not None, + instance.upload_paths is not None, + instance.upload_worker_inputs is not None, + instance.upload_worker_inputs_generator is not None, ]) != 1: raise ValueError( 'Exactly one of upload_paths, upload_worker_inputs, or ' 'upload_worker_inputs_generator must be provided.') - return values + return instance @pydantic.dataclasses.dataclass(frozen=True, kw_only=True) diff --git a/src/lib/tests/docs_scripts/data_express/lister.py b/src/lib/tests/docs_scripts/data_express/lister.py index 2d269c91c..52ae0aa23 100644 --- a/src/lib/tests/docs_scripts/data_express/lister.py +++ b/src/lib/tests/docs_scripts/data_express/lister.py @@ -56,7 +56,7 @@ def yield_chunk(file_obj: s3.DownloadQueueObject | s3.UploadEntry, file_size: in and file_chunk: return True - file_chunk.append(file_obj.dict()) + file_chunk.append(file_obj.model_dump()) current_chunk_size += data_utils.convert_to_gib(str(file_size)) if len(file_chunk) >= chunk_amount: return True @@ -97,7 +97,7 @@ def process_benchmark(body: Dict, kombu_message: message.Message, benchmark_loca # Write download result to json file with open(benchmark_path, 'w', encoding='utf-8') as f: - json.dump(benchmark_result.dict(), f, indent=4) + json.dump(benchmark_result.model_dump(), f, indent=4) kombu_message.ack() diff --git a/src/lib/tests/docs_scripts/data_express/worker.py b/src/lib/tests/docs_scripts/data_express/worker.py index b36ad2d18..22900d9ef 100644 --- a/src/lib/tests/docs_scripts/data_express/worker.py +++ b/src/lib/tests/docs_scripts/data_express/worker.py @@ -87,7 +87,7 @@ def get_benchmark_result(folder_path: str) -> Dict: size=data_utils.convert_to_gib(upload_result.size), size_unit='GiB', failed_messages=upload_result.failed_messages, - benchmark_result=benchmark_result).dict(), + benchmark_result=benchmark_result).model_dump(), data_utils.QueueType.BENCHMARK) else: base_storage_backend = storage_backends.construct_storage_backend(output_location) @@ -105,7 +105,7 @@ def get_benchmark_result(folder_path: str) -> Dict: size=data_utils.convert_to_gib(download_result.size), size_unit='GiB', failed_messages=download_result.failed_messages, - benchmark_result=benchmark_result).dict(), + benchmark_result=benchmark_result).model_dump(), data_utils.QueueType.BENCHMARK) kombu_message.ack() diff --git a/src/lib/utils/client.py b/src/lib/utils/client.py index 752b7a83f..b66d562d7 100644 --- a/src/lib/utils/client.py +++ b/src/lib/utils/client.py @@ -248,7 +248,7 @@ def _save_login_info(self, login_storage: login.LoginStorage, welcome: bool = Fa login_dir = client_configs.get_client_config_dir() login_file = login_dir + '/login.yaml' with open(os.path.expanduser(login_file), 'w', encoding='utf-8') as file: - login_dict = login_storage.dict() + login_dict = login_storage.model_dump() login_dict['name'] = login_storage.name yaml.dump(login_dict, file) diff --git a/src/lib/utils/common.py b/src/lib/utils/common.py index 79e8a8801..e4497a92f 100644 --- a/src/lib/utils/common.py +++ b/src/lib/utils/common.py @@ -94,8 +94,10 @@ UUID_REGEX = r'[a-f0-9]{32}' GROUP_UUID_REGEX = r'osmo-[a-f0-9]{32}' OLD_UUID_REGEX = r'[a-z2-7]{26}' -UuidPattern = Annotated[str, - pydantic.Field(regex=f'^{UUID_REGEX}|{OLD_UUID_REGEX}|{GROUP_UUID_REGEX}$')] +UuidPattern = Annotated[ + str, + pydantic.Field( + pattern=f'^{UUID_REGEX}|{OLD_UUID_REGEX}|{GROUP_UUID_REGEX}$')] WFID_REGEX = r'[a-zA-Z]([a-zA-Z0-9_-]*[a-zA-Z0-9])?-\d+$' RESOURCE_REGEX = r'(?P(\d+(?:\.\d+)?))(?P([a-zA-Z]*))' @@ -167,9 +169,23 @@ def pydantic_encoder(obj): ''' Allows pydantic objects to be used for json.dumps ''' if isinstance(obj, pydantic.BaseModel): - return obj.dict() + return obj.model_dump() elif isinstance(obj, enum.Enum): return obj.value + elif isinstance(obj, datetime.datetime): + return obj.isoformat() + elif isinstance(obj, datetime.date): + return obj.isoformat() + elif isinstance(obj, datetime.time): + return obj.isoformat() + elif isinstance(obj, datetime.timedelta): + return obj.total_seconds() + elif isinstance(obj, uuid.UUID): + return str(obj) + elif isinstance(obj, (set, frozenset)): + return list(obj) + elif isinstance(obj, bytes): + return obj.decode('utf-8', errors='replace') raise TypeError(f'Object of type {obj.__class__.__name__} is not JSON serializable') @@ -592,9 +608,38 @@ def _convert_str_to_time(duration: str) -> Tuple[int, str]: return int(duration[:-1]), duration[-1] +_ISO8601_DURATION_RE = re.compile( + r'^P(?:(\d+)D)?(?:T(?:(\d+)H)?(?:(\d+)M)?(?:(\d+(?:\.\d+)?)S)?)?$' +) + + +def _parse_iso8601_duration(duration: str) -> datetime.timedelta | None: + """ Parses an ISO 8601 duration string (e.g. PT10S, PT1H30M) into a timedelta. """ + match = _ISO8601_DURATION_RE.match(duration) + if not match: + return None + if not any(match.group(i) for i in range(1, 5)): + return None + days = int(match.group(1)) if match.group(1) else 0 + hours = int(match.group(2)) if match.group(2) else 0 + minutes = int(match.group(3)) if match.group(3) else 0 + seconds = float(match.group(4)) if match.group(4) else 0 + return datetime.timedelta(days=days, hours=hours, minutes=minutes, seconds=seconds) + + def to_timedelta(duration: str) -> datetime.timedelta: """ Converts time duration str to datetime.timedelta instance. """ - error_message = f'Cannot recognize duration: {duration}. Only support xd, xh, xm, xs, xms, xus' + error_message = ( + f'Cannot recognize duration: {duration}. ' + 'Only support ISO 8601 (e.g. PT10S) or xd, xh, xm, xs, xms, xus' + ) + + if duration.startswith('P'): + result = _parse_iso8601_duration(duration) + if result is not None: + return result + raise ValueError(error_message) + try: value, unit = _convert_str_to_time(duration) except ValueError as error: diff --git a/src/lib/utils/credentials.py b/src/lib/utils/credentials.py index 672378837..3bcbc100c 100644 --- a/src/lib/utils/credentials.py +++ b/src/lib/utils/credentials.py @@ -29,7 +29,7 @@ CREDNAMEREGEX = r'^[a-zA-Z]([a-zA-Z0-9_-]*[a-zA-Z0-9])?$' -class RegistryCredential(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class RegistryCredential(pydantic.BaseModel, extra='forbid'): """ Authentication information for a Docker registry. """ registry: str = pydantic.Field('', description='The Docker registry URL') username: str = pydantic.Field('', description='The username for the Docker registry') diff --git a/src/lib/utils/logging.py b/src/lib/utils/logging.py index 123df744d..b161315db 100644 --- a/src/lib/utils/logging.py +++ b/src/lib/utils/logging.py @@ -131,23 +131,23 @@ def __exit__(self, ex_type, ex_value, ex_traceback): class LoggingConfig(pydantic.BaseModel): """Manages the logging configuration""" log_level: LoggingLevel = pydantic.Field( - command_line='log_level', default=LoggingLevel.INFO, - description='The level of logging errors messages to record.') + description='The level of logging errors messages to record.', + json_schema_extra={'command_line': 'log_level'}) log_dir: Optional[str] = pydantic.Field( - command_line='log_dir', default=None, - description='The directory to write logs to.') + description='The directory to write logs to.', + json_schema_extra={'command_line': 'log_dir'}) log_name: str = pydantic.Field( - command_line='log_name', default='', - description='The name of the log file.') + description='The name of the log file.', + json_schema_extra={'command_line': 'log_name'}) k8s_log_level: LoggingLevel = pydantic.Field( - command_line='k8s_log_level', default=LoggingLevel.WARNING, - description='The level of k8s logging errors messages to record.') + description='The level of k8s logging errors messages to record.', + json_schema_extra={'command_line': 'k8s_log_level'}) - @pydantic.validator('log_level', 'k8s_log_level', pre=True) + @pydantic.field_validator('log_level', 'k8s_log_level', mode='before') @classmethod def _parse_logging_levels(cls, v) -> LoggingLevel: return LoggingLevel.parse(v) diff --git a/src/lib/utils/login.py b/src/lib/utils/login.py index 8cfca5f59..5551f469c 100644 --- a/src/lib/utils/login.py +++ b/src/lib/utils/login.py @@ -54,39 +54,38 @@ def fetch_login_info(url: str): class LoginConfig(pydantic.BaseModel): """ Manages configuration specific to the login """ username: str | None = pydantic.Field( - command_line='username', - description='The username to sign in with.') + default=None, + description='The username to sign in with.', + json_schema_extra={'command_line': 'username'}) password: str | None = pydantic.Field( - command_line='password', default=None, - env='OSMO_LOGIN_PASSWORD', - description='The password to sign in with.') + description='The password to sign in with.', + json_schema_extra={'command_line': 'password', 'env': 'OSMO_LOGIN_PASSWORD'}) password_file: str | None = pydantic.Field( - command_line='password_file', default=None, - description='The password stored in a file to sign in with.') + description='The password stored in a file to sign in with.', + json_schema_extra={'command_line': 'password_file'}) token: str | None = pydantic.Field( - command_line='token', default=None, - env='OSMO_LOGIN_TOKEN', - description='The access token to sign in with.') + description='The access token to sign in with.', + json_schema_extra={'command_line': 'token', 'env': 'OSMO_LOGIN_TOKEN'}) token_file: str | None = pydantic.Field( - command_line='token_file', default=None, - description='The file containing the access token to sign in with.') + description='The file containing the access token to sign in with.', + json_schema_extra={'command_line': 'token_file'}) token_endpoint: str | None = pydantic.Field( - command_line='token_endpoint', default = None, - description='The url to get a token from device auth, client auth, or refresh token.') + description='The url to get a token from device auth, client auth, or refresh token.', + json_schema_extra={'command_line': 'token_endpoint'}) client_id: str | None = pydantic.Field( - command_line='client_id', default=None, - description='The client id for the OSMO application.') + description='The client id for the OSMO application.', + json_schema_extra={'command_line': 'client_id'}) login_method: Literal['password', 'token'] | None = pydantic.Field( - command_line='login_method', default='password', description='The method to use to login, either "password" or "token". ' - 'Defaults to "password".') + 'Defaults to "password".', + json_schema_extra={'command_line': 'login_method'}) def token_or_default(self, login_url: str) -> str: if self.token_endpoint is not None: @@ -138,14 +137,16 @@ class LoginStorage(pydantic.BaseModel): url: str osmo_token: bool = False - @pydantic.validator('url') + @pydantic.field_validator('url') @classmethod def replace_url_without_slash(cls, login_url: str): return login_url.rstrip('/') + @pydantic.model_validator(mode='before') @classmethod - @pydantic.root_validator def validate_one_login_type(cls, values): + if not isinstance(values, dict): + return values fields = ('token_login', 'dev_login') login_fields = [field for field in fields if values.get(field) is not None] if len(login_fields) != 1: diff --git a/src/lib/utils/role.py b/src/lib/utils/role.py index a6d88b158..575ba7798 100644 --- a/src/lib/utils/role.py +++ b/src/lib/utils/role.py @@ -79,10 +79,12 @@ class RolePolicy(pydantic.BaseModel): # If empty or not specified, the policy applies to all resources ("*") resources: List[str] = pydantic.Field(default_factory=list) - @pydantic.validator('actions', pre=True) + @pydantic.field_validator('actions', mode='before') @classmethod def validate_actions(cls, value) -> List[str]: """Parse and validate actions from various input formats.""" + if isinstance(value, str): + value = [value] return [validate_semantic_action(action) for action in value] def to_dict(self) -> Dict[str, Any]: diff --git a/src/lib/utils/version.py b/src/lib/utils/version.py index 59be88646..f2db912cb 100644 --- a/src/lib/utils/version.py +++ b/src/lib/utils/version.py @@ -32,6 +32,8 @@ class Version(pydantic.BaseModel): """ A class to maintain version information. """ + model_config = pydantic.ConfigDict(coerce_numbers_to_str=True) + major: str minor: str = '0' revision: str = '0' @@ -93,7 +95,7 @@ def write_version(version: Version) -> None: """ Replaces the version into version file. """ release_file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'version.yaml') data = '' - for key, value in version.dict().items(): + for key, value in version.model_dump().items(): data += F'{key.lower()}: {value}\n' with open(release_file_path, 'w+', encoding='UTF-8') as file: file.write(data) diff --git a/src/locked_requirements.txt b/src/locked_requirements.txt index d38a3e4bb..d71651569 100644 --- a/src/locked_requirements.txt +++ b/src/locked_requirements.txt @@ -24,6 +24,10 @@ annotated-doc==0.0.4 \ --hash=sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320 \ --hash=sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4 # via fastapi +annotated-types==0.7.0 \ + --hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 \ + --hash=sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89 + # via pydantic anyio==4.12.1 \ --hash=sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703 \ --hash=sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c @@ -792,47 +796,140 @@ pycparser==3.0 \ --hash=sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29 \ --hash=sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992 # via cffi -pydantic==1.10.26 \ - --hash=sha256:0141f4bafe5eda539d98c9755128a9ea933654c6ca4306b5059fc87a01a38573 \ - --hash=sha256:0d8f6087bf697dec3bf7ffcd7fe8362674f16519f3151789f33cbe8f1d19fc15 \ - --hash=sha256:0e4451951a9a93bf9a90576f3e25240b47ee49ab5236adccb8eff6ac943adf0f \ - --hash=sha256:116233e53889bcc536f617e38c1b8337d7fa9c280f0fd7a4045947515a785637 \ - --hash=sha256:15b13b9f8ba8867095769e1156e0d7fbafa1f65b898dd40fd1c02e34430973cb \ - --hash=sha256:1a4e3062b71ab1d5df339ba12c48f9ed5817c5de6cb92a961dd5c64bb32e7b96 \ - --hash=sha256:1ae7913bb40a96c87e3d3f6fe4e918ef53bf181583de4e71824360a9b11aef1c \ - --hash=sha256:2c1b0b914be31671000ca25cf7ea17fcaaa68cfeadf6924529c5c5aa24b7ab1f \ - --hash=sha256:36d9e46b588aaeb1dcd2409fa4c467fe0b331f3cc9f227b03a7a00643704e962 \ - --hash=sha256:4482b299874dabb88a6c3759e3d85c6557c407c3b586891f7d808d8a38b66b9c \ - --hash=sha256:465ad8edb29b15c10b779b16431fe8e77c380098badf6db367b7a1d3e572cf53 \ - --hash=sha256:468d5b9cacfcaadc76ed0a4645354ab6f263ec01a63fb6d05630ea1df6ae453f \ - --hash=sha256:502b9d30d18a2dfaf81b7302f6ba0e5853474b1c96212449eb4db912cb604b7d \ - --hash=sha256:6b40730cc81d53d515dc0b8bb5c9b43fadb9bed46de4a3c03bd95e8571616dba \ - --hash=sha256:71cde228bc0600cf8619f0ee62db050d1880dcc477eba0e90b23011b4ee0f314 \ - --hash=sha256:80e6be6272839c8a7641d26ad569ab77772809dd78f91d0068dc0fc97f071945 \ - --hash=sha256:8154c13f58d4de5d3a856bb6c909c7370f41fb876a5952a503af6b975265f4ba \ - --hash=sha256:81ce3c8616d12a7be31b4aadfd3434f78f6b44b75adbfaec2fe1ad4f7f999b8c \ - --hash=sha256:8be08b5cfe88e58198722861c7aab737c978423c3a27300911767931e5311d0d \ - --hash=sha256:8c6aa39b494c5af092e690127c283d84f363ac36017106a9e66cb33a22ac412e \ - --hash=sha256:9858ed44c6bea5f29ffe95308db9e62060791c877766c67dd5f55d072c8612b5 \ - --hash=sha256:a943ce8e00ad708ed06a1d9df5b4fd28f5635a003b82a4908ece6f24c0b18464 \ - --hash=sha256:ac1089f723e2106ebde434377d31239e00870a7563245072968e5af5cc4d33df \ - --hash=sha256:ad7025ca324ae263d4313998e25078dcaec5f9ed0392c06dedb57e053cc8086b \ - --hash=sha256:bc5c91a3b3106caf07ac6735ec6efad8ba37b860b9eb569923386debe65039ad \ - --hash=sha256:c3bbb9c0eecdf599e4db9b372fa9cc55be12e80a0d9c6d307950a39050cb0e37 \ - --hash=sha256:c3cfdd361addb6eb64ccd26ac356ad6514cee06a61ab26b27e16b5ed53108f77 \ - --hash=sha256:c43ad70dc3ce7787543d563792426a16fd7895e14be4b194b5665e36459dd917 \ - --hash=sha256:cc2e3fe7bc4993626ef6b6fa855defafa1d6f8996aa1caef2deb83c5ac4d043a \ - --hash=sha256:ce3293b86ca9f4125df02ff0a70be91bc7946522467cbd98e7f1493f340616ba \ - --hash=sha256:d95a76cf503f0f72ed7812a91de948440b2bf564269975738a4751e4fadeb572 \ - --hash=sha256:dcb5a7318fb43189fde6af6f21ac7149c4bcbcfffc54bc87b5becddc46084847 \ - --hash=sha256:dd40a99c358419910c85e6f5d22f9c56684c25b5e7abc40879b3b4a52f34ae90 \ - --hash=sha256:dde599e0388e04778480d57f49355c9cc7916de818bf674de5d5429f2feebfb6 \ - --hash=sha256:eb664305ffca8a9766a8629303bb596607d77eae35bb5f32ff9245984881b638 \ - --hash=sha256:f7ae36fa0ecef8d39884120f212e16c06bb096a38f523421278e2f39c1784546 \ - --hash=sha256:f8af0507bf6118b054a9765fb2e402f18a8b70c964f420d95b525eb711122d62 +pydantic==2.12.5 \ + --hash=sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49 \ + --hash=sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d # via # -r requirements.txt # fastapi + # pydantic-settings +pydantic-core==2.41.5 \ + --hash=sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90 \ + --hash=sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740 \ + --hash=sha256:0384e2e1021894b1ff5a786dbf94771e2986ebe2869533874d7e43bc79c6f504 \ + --hash=sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84 \ + --hash=sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33 \ + --hash=sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c \ + --hash=sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0 \ + --hash=sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e \ + --hash=sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0 \ + --hash=sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a \ + --hash=sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34 \ + --hash=sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2 \ + --hash=sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3 \ + --hash=sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815 \ + --hash=sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14 \ + --hash=sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba \ + --hash=sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375 \ + --hash=sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf \ + --hash=sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963 \ + --hash=sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1 \ + --hash=sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808 \ + --hash=sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553 \ + --hash=sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1 \ + --hash=sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2 \ + --hash=sha256:299e0a22e7ae2b85c1a57f104538b2656e8ab1873511fd718a1c1c6f149b77b5 \ + --hash=sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470 \ + --hash=sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2 \ + --hash=sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b \ + --hash=sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660 \ + --hash=sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c \ + --hash=sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093 \ + --hash=sha256:346285d28e4c8017da95144c7f3acd42740d637ff41946af5ce6e5e420502dd5 \ + --hash=sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594 \ + --hash=sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008 \ + --hash=sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a \ + --hash=sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a \ + --hash=sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd \ + --hash=sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284 \ + --hash=sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586 \ + --hash=sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869 \ + --hash=sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294 \ + --hash=sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f \ + --hash=sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66 \ + --hash=sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51 \ + --hash=sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc \ + --hash=sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97 \ + --hash=sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a \ + --hash=sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d \ + --hash=sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9 \ + --hash=sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c \ + --hash=sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07 \ + --hash=sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36 \ + --hash=sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e \ + --hash=sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05 \ + --hash=sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e \ + --hash=sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941 \ + --hash=sha256:707625ef0983fcfb461acfaf14de2067c5942c6bb0f3b4c99158bed6fedd3cf3 \ + --hash=sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612 \ + --hash=sha256:753e230374206729bf0a807954bcc6c150d3743928a73faffee51ac6557a03c3 \ + --hash=sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b \ + --hash=sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe \ + --hash=sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146 \ + --hash=sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11 \ + --hash=sha256:7b93a4d08587e2b7e7882de461e82b6ed76d9026ce91ca7915e740ecc7855f60 \ + --hash=sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd \ + --hash=sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b \ + --hash=sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c \ + --hash=sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a \ + --hash=sha256:873e0d5b4fb9b89ef7c2d2a963ea7d02879d9da0da8d9d4933dee8ee86a8b460 \ + --hash=sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1 \ + --hash=sha256:8bfeaf8735be79f225f3fefab7f941c712aaca36f1128c9d7e2352ee1aa87bdf \ + --hash=sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf \ + --hash=sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858 \ + --hash=sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2 \ + --hash=sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9 \ + --hash=sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2 \ + --hash=sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3 \ + --hash=sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6 \ + --hash=sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770 \ + --hash=sha256:a75dafbf87d6276ddc5b2bf6fae5254e3d0876b626eb24969a574fff9149ee5d \ + --hash=sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc \ + --hash=sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23 \ + --hash=sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26 \ + --hash=sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa \ + --hash=sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8 \ + --hash=sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d \ + --hash=sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3 \ + --hash=sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d \ + --hash=sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034 \ + --hash=sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9 \ + --hash=sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1 \ + --hash=sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56 \ + --hash=sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b \ + --hash=sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c \ + --hash=sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a \ + --hash=sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e \ + --hash=sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9 \ + --hash=sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5 \ + --hash=sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a \ + --hash=sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556 \ + --hash=sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e \ + --hash=sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49 \ + --hash=sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2 \ + --hash=sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9 \ + --hash=sha256:e4f4a984405e91527a0d62649ee21138f8e3d0ef103be488c1dc11a80d7f184b \ + --hash=sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc \ + --hash=sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb \ + --hash=sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0 \ + --hash=sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8 \ + --hash=sha256:e8465ab91a4bd96d36dde3263f06caa6a8a6019e4113f24dc753d79a8b3a3f82 \ + --hash=sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69 \ + --hash=sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b \ + --hash=sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c \ + --hash=sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75 \ + --hash=sha256:f0cd744688278965817fd0839c4a4116add48d23890d468bc436f78beb28abf5 \ + --hash=sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f \ + --hash=sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad \ + --hash=sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b \ + --hash=sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7 \ + --hash=sha256:f41eb9797986d6ebac5e8edff36d5cef9de40def462311b3eb3eeded1431e425 \ + --hash=sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52 + # via pydantic +pydantic-settings==2.9.1 \ + --hash=sha256:59b4f431b1defb26fe620c71a7d3968a710d719f5f4cdbbdb7926edeb770f6ef \ + --hash=sha256:c509bf79d27563add44e8446233359004ed85066cd096d8b510f715e6ef5d268 + # via -r requirements.txt pyinstaller==6.19.0 \ --hash=sha256:1ec54ef967996ca61dacba676227e2b23219878ccce5ee9d6f3aada7b8ed8abf \ --hash=sha256:3c5c251054fe4cfaa04c34a363dcfbf811545438cb7198304cd444756bc2edd2 \ @@ -866,7 +963,9 @@ python-dateutil==2.9.0.post0 \ python-dotenv==1.2.2 \ --hash=sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a \ --hash=sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3 - # via uvicorn + # via + # pydantic-settings + # uvicorn pytz==2023.3 \ --hash=sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588 \ --hash=sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb @@ -1024,8 +1123,16 @@ typing-extensions==4.15.0 \ # opentelemetry-sdk # opentelemetry-semantic-conventions # pydantic + # pydantic-core # starlette + # typing-inspection # uvicorn +typing-inspection==0.4.2 \ + --hash=sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7 \ + --hash=sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464 + # via + # pydantic + # pydantic-settings urllib3==2.6.3 \ --hash=sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed \ --hash=sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4 diff --git a/src/operator/backend_listener.py b/src/operator/backend_listener.py index aa777e779..03548e1e6 100644 --- a/src/operator/backend_listener.py +++ b/src/operator/backend_listener.py @@ -36,7 +36,7 @@ import kubernetes # type: ignore import opentelemetry.metrics as otelmetrics -import pydantic # type: ignore +import pydantic import urllib3 # type: ignore import websockets import websockets.exceptions @@ -91,7 +91,7 @@ def get_container_exit_code(container_name: str, exit_code: int) -> int: return exit_code -class PodErrorInfo(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class PodErrorInfo(pydantic.BaseModel, extra='forbid'): """ Lightweight class for storing information about pod failure""" error_message: str = '' exit_codes: Dict[str, int] = {} @@ -113,10 +113,10 @@ def get_exit_code(self) -> int | None: return None -class PodWaitingStatus(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class PodWaitingStatus(pydantic.BaseModel, extra='forbid'): """ Lightweight class for storing information about pod status. """ waiting_on_error: bool - waiting_reason: str | None + waiting_reason: str | None = None error_info: PodErrorInfo = pydantic.Field(default_factory=PodErrorInfo) @@ -1022,7 +1022,7 @@ def send_pod_conditions(event_send_queue: helpers.EnqueueCallback, retry_id: Retry ID conditions_messages: List of condition messages to send """ - pod_conditions_key = (task_uuid, tuple(c.json() for c in conditions_messages)) + pod_conditions_key = (task_uuid, tuple(c.model_dump_json() for c in conditions_messages)) if not check_ttl_cache(pod_conditions_cache, pod_conditions_key): pod_conditions_message = backend_messages.MessageBody( @@ -1617,10 +1617,10 @@ async def websocket_connect(progress_writer: progress.ProgressWriter, k8s_namespace=config.namespace, version=str(version.VERSION), node_condition_prefix=config.node_condition_prefix)) - await websocket.send(init_message.json()) + await websocket.send(init_message.model_dump_json()) for message in unack_messages.list_messages(): - await websocket.send(message.json()) + await websocket.send(message.model_dump_json()) progress_writer.report_progress() async def _send_message(): @@ -1629,7 +1629,7 @@ async def _send_message(): message = await asyncio.wait_for( message_queue.get(), timeout=TIMEOUT_SEC) await unack_messages.add_message(message) - await websocket.send(message.json()) + await websocket.send(message.model_dump_json()) message_queue.task_done() send_backend_message_transmission_count( event_type=connection_type.value) diff --git a/src/operator/backend_worker.py b/src/operator/backend_worker.py index 2c5cd619c..d42e3e070 100644 --- a/src/operator/backend_worker.py +++ b/src/operator/backend_worker.py @@ -86,7 +86,7 @@ async def send_async_message(self, message: backend_messages.MessageBody): async def forward_messages(self, websocket): while True: message = await self.message_queue.get() - await websocket.send(message.json()) + await websocket.send(message.model_dump_json()) self.message_queue.task_done() async def clear_queue(self): diff --git a/src/operator/helpers.py b/src/operator/helpers.py index 978fdc7c4..a91e2e5b7 100644 --- a/src/operator/helpers.py +++ b/src/operator/helpers.py @@ -36,7 +36,7 @@ async def send_log_through_websocket(log_type: backend_messages.LoggingType, tex await websocket.send(backend_messages.MessageBody( type=backend_messages.MessageType.LOGGING, body=backend_messages.LoggingBody(type=log_type, - text=text)).json()) + text=text)).model_dump_json()) def send_log_through_queue(log_type: backend_messages.LoggingType, text: str, diff --git a/src/operator/utils/node_validation_test/connection_validator.py b/src/operator/utils/node_validation_test/connection_validator.py index f3196c871..2d17ca192 100644 --- a/src/operator/utils/node_validation_test/connection_validator.py +++ b/src/operator/utils/node_validation_test/connection_validator.py @@ -49,28 +49,27 @@ class ConnectionTestConfig(test_base.NodeTestConfig): """Configuration for connection validation tests.""" condition_name: str = pydantic.Field( - command_line='condition_name', default='ServiceConnectionTestFailure', - description='Condition name for service connection failure') + description='Condition name for service connection failure', + json_schema_extra={'command_line': 'condition_name'}) test_url: Optional[str] = pydantic.Field( - command_line='test_url', default=None, - description='Single URL to test connection to') + description='Single URL to test connection to', + json_schema_extra={'command_line': 'test_url'}) test_timeout: int = pydantic.Field( - command_line='test_timeout', default=30, - description='Default timeout in seconds for connection tests') + description='Default timeout in seconds for connection tests', + json_schema_extra={'command_line': 'test_timeout'}) url_configs_filepath: Optional[str] = pydantic.Field( - command_line='url_configs_filepath', default=os.path.join(os.path.dirname(__file__), 'connection_validator.yaml'), - description='Path to a YAML file containing url_configs list' - ) + description='Path to a YAML file containing url_configs list', + json_schema_extra={'command_line': 'url_configs_filepath'}) url_configs: Optional[List[URLTestConfig]] = pydantic.Field( default=None, description='List of URLTestConfig items loaded from YAML' ) - @pydantic.root_validator(pre=True) + @pydantic.model_validator(mode='before') @classmethod def load_url_configs_from_file(cls, values): """ diff --git a/src/operator/utils/node_validation_test/lfs_validator.py b/src/operator/utils/node_validation_test/lfs_validator.py index 63390ee69..c54da9672 100644 --- a/src/operator/utils/node_validation_test/lfs_validator.py +++ b/src/operator/utils/node_validation_test/lfs_validator.py @@ -19,7 +19,7 @@ import logging import pydantic import time -from typing import Dict, List, Any +from typing import Any, Dict, List, Sequence import sys from kubernetes import client as k8s_client @@ -31,70 +31,69 @@ class LFSTestConfig(test_base.NodeTestConfig): """Configuration for LFS validation tests.""" condition_name: str = pydantic.Field( - command_line='condition_name', default='LFSMountFailure', - description='Condition name for LFS mount failure') + description='Condition name for LFS mount failure', + json_schema_extra={'command_line': 'condition_name'}) # Mount configs volume_type: str = pydantic.Field( - command_line='volume_type', default='pvc', - description='Type of volume (pvc or csi)') + description='Type of volume (pvc or csi)', + json_schema_extra={'command_line': 'volume_type'}) volume_names: List[str] = pydantic.Field( - command_line='volume_names', - description='LFS volume names') + description='LFS volume names', + json_schema_extra={'command_line': 'volume_names'}) mount_paths: List[str] = pydantic.Field( - command_line='mount_paths', - description='Mount paths of the LFS volumemount') + description='Mount paths of the LFS volumemount', + json_schema_extra={'command_line': 'mount_paths'}) # PVC configs claim_names: List[str] = pydantic.Field( - command_line='claim_names', default=[], - description='Claim names of the LFS volume') + description='Claim names of the LFS volume', + json_schema_extra={'command_line': 'claim_names'}) sub_paths: List[str] = pydantic.Field( - command_line='sub_paths', default=[], - description='Sub paths of the LFS volumemount') + description='Sub paths of the LFS volumemount', + json_schema_extra={'command_line': 'sub_paths'}) # CSI configs lustre_drivers: List[str] = pydantic.Field( - command_line='lustre_drivers', default=[], - description='Lustre driver names for CSI volumes') + description='Lustre driver names for CSI volumes', + json_schema_extra={'command_line': 'lustre_drivers'}) lustre_shares: List[str] = pydantic.Field( - command_line='lustre_shares', default=[], - description='Lustre share paths for CSI volumes') + description='Lustre share paths for CSI volumes', + json_schema_extra={'command_line': 'lustre_shares'}) lustre_servers: List[str] = pydantic.Field( - command_line='lustre_servers', default=[], - description='Lustre server addresses for CSI volumes. Use ";" to separate multiple servers') + description='Lustre server addresses for CSI volumes. Use ";" to separate multiple servers', + json_schema_extra={'command_line': 'lustre_servers'}) lustre_mount_options: List[str] = pydantic.Field( - command_line='lustre_mount_options', default=[], - description='Lustre mount options for CSI volumes') + description='Lustre mount options for CSI volumes', + json_schema_extra={'command_line': 'lustre_mount_options'}) # Test Pod Configs pod_namespace: str = pydantic.Field( - command_line='pod_namespace', - env='OSMO_POD_NAMESPACE', - description='Namespace of the pod to create') + description='Namespace of the pod to create', + json_schema_extra={'command_line': 'pod_namespace', 'env': 'OSMO_POD_NAMESPACE'}) pod_image: str = pydantic.Field( - command_line='pod_image', default='alpine:latest', - description='Image for the test pod') + description='Image for the test pod', + json_schema_extra={'command_line': 'pod_image'}) image_pull_secret: str = pydantic.Field( - command_line='image_pull_secret', default='nvcr-secret', - description='Secret name for pulling the container image') + description='Secret name for pulling the container image', + json_schema_extra={'command_line': 'image_pull_secret'}) pod_succeeded_timeout: int = pydantic.Field( - command_line='pod_succeeded_timeout', default=120, - description='Timeout in seconds for the pod to be succeeded') + description='Timeout in seconds for the pod to be succeeded', + json_schema_extra={'command_line': 'pod_succeeded_timeout'}) - @pydantic.root_validator() + @pydantic.model_validator(mode='before') @classmethod - def validate_mount_configs(cls, values): - def _check_length(required_fields): + def validate_mount_configs(cls, values: dict[str, Any]) -> dict[str, Any]: + def _check_length(required_fields: Sequence[str]) -> dict[str, Any]: length = len(values.get(required_fields[0], {})) if all(len(values.get(field, {})) == length for field in required_fields): return values diff --git a/src/operator/utils/node_validation_test/resource_validator.py b/src/operator/utils/node_validation_test/resource_validator.py index 95d9c13bf..fff8fb2fd 100644 --- a/src/operator/utils/node_validation_test/resource_validator.py +++ b/src/operator/utils/node_validation_test/resource_validator.py @@ -30,72 +30,72 @@ class ResourceTestConfig(test_base.NodeTestConfig): """Configuration for resource validation tests.""" # label keys gpu_type_label: str = pydantic.Field( - command_line='gpu_type_label', default='nvidia.com/gpu', - description='GPU resource type') + description='GPU resource type', + json_schema_extra={'command_line': 'gpu_type_label'}) nic_type_label: str = pydantic.Field( - command_line='nic_type_label', default='nvidia.com/mlnxnics', - description='NIC resource type') + description='NIC resource type', + json_schema_extra={'command_line': 'nic_type_label'}) # resource counts gpu_count: int = pydantic.Field( - command_line='gpu_count', default=8, - description='Minimum number of GPUs required') + description='Minimum number of GPUs required', + json_schema_extra={'command_line': 'gpu_count'}) nic_count: int = pydantic.Field( - command_line='nic_count', default=8, - description='Minimum number of NICs required') + description='Minimum number of NICs required', + json_schema_extra={'command_line': 'nic_count'}) min_memory: str = pydantic.Field( - command_line='min_memory', default='1850Gi', - description='Minimum required memory') + description='Minimum required memory', + json_schema_extra={'command_line': 'min_memory'}) min_storage: str = pydantic.Field( - command_line='min_storage', default='10Gi', - description='Minimum required storage') + description='Minimum required storage', + json_schema_extra={'command_line': 'min_storage'}) gpu_mode_label: str = pydantic.Field( - command_line='gpu_mode_label', default='nvidia.com/gpu.mode', - description='GPU mode label') + description='GPU mode label', + json_schema_extra={'command_line': 'gpu_mode_label'}) gpu_mode: str = pydantic.Field( - command_line='gpu_mode', default='compute', - description='Expected GPU mode') + description='Expected GPU mode', + json_schema_extra={'command_line': 'gpu_mode'}) gpu_product_label: str = pydantic.Field( - command_line='gpu_product_label', default='nvidia.com/gpu.product', - description='GPU product label') + description='GPU product label', + json_schema_extra={'command_line': 'gpu_product_label'}) gpu_product: str = pydantic.Field( - command_line='gpu_product', default='NVIDIA-H100-80GB-HBM3', - description='Expected GPU product') + description='Expected GPU product', + json_schema_extra={'command_line': 'gpu_product'}) # Add condition names gpu_less_than_total_condition: str = pydantic.Field( - command_line='gpu_less_than_total_condition', default='GpuLessThanTotal', - description='Condition name for insufficient GPU count') + description='Condition name for insufficient GPU count', + json_schema_extra={'command_line': 'gpu_less_than_total_condition'}) nics_less_than_total_condition: str = pydantic.Field( - command_line='nics_less_than_total_condition', default='NicsLessThanTotal', - description='Condition name for insufficient NIC count') + description='Condition name for insufficient NIC count', + json_schema_extra={'command_line': 'nics_less_than_total_condition'}) memory_less_than_total_condition: str = pydantic.Field( - command_line='memory_less_than_total_condition', default='MemoryLessThanTotal', - description='Condition name for insufficient memory') + description='Condition name for insufficient memory', + json_schema_extra={'command_line': 'memory_less_than_total_condition'}) storage_less_than_total_condition: str = pydantic.Field( - command_line='storage_less_than_total_condition', default='StorageLessThanTotal', - description='Condition name for insufficient storage') + description='Condition name for insufficient storage', + json_schema_extra={'command_line': 'storage_less_than_total_condition'}) gpu_incorrect_mode_condition: str = pydantic.Field( - command_line='gpu_incorrect_mode_condition', default='GpuIncorrectMode', - description='Condition name for incorrect GPU mode') + description='Condition name for incorrect GPU mode', + json_schema_extra={'command_line': 'gpu_incorrect_mode_condition'}) gpu_incorrect_product_condition: str = pydantic.Field( - command_line='gpu_incorrect_product_condition', default='GpuIncorrectProduct', - description='Condition name for incorrect GPU product') + description='Condition name for incorrect GPU product', + json_schema_extra={'command_line': 'gpu_incorrect_product_condition'}) class ResourceValidator(test_base.NodeTestBase): diff --git a/src/operator/utils/node_validation_test/test_base.py b/src/operator/utils/node_validation_test/test_base.py index 16d559efc..c7ff0e95a 100644 --- a/src/operator/utils/node_validation_test/test_base.py +++ b/src/operator/utils/node_validation_test/test_base.py @@ -17,7 +17,7 @@ """ -from datetime import datetime +from datetime import datetime, timezone import logging import signal import time @@ -52,38 +52,38 @@ def register_graceful_shutdown() -> None: class NodeTestConfig(static_config.StaticConfig, logging_utils.LoggingConfig): """Configuration for node validation tests.""" exit_after_validation: bool = pydantic.Field( - command_line='exit_after_validation', default=False, - description='Flag to exit after validation') + description='Flag to exit after validation', + json_schema_extra={'command_line': 'exit_after_validation'}) # Node/Pod infomation node_name: str = pydantic.Field( - command_line='node_name', - env='OSMO_NODE_NAME', - description='Name of the node to validate') + description='Name of the node to validate', + json_schema_extra={'command_line': 'node_name', 'env': 'OSMO_NODE_NAME'}) node_condition_prefix: str = pydantic.Field( - command_line='node_condition_prefix', - env='OSMO_NODE_CONDITION_PREFIX', default=DEFAULT_NODE_CONDITION_PREFIX, - description='Prefix for node conditions') + description='Prefix for node conditions', + json_schema_extra={ + 'command_line': 'node_condition_prefix', + 'env': 'OSMO_NODE_CONDITION_PREFIX'}) # Stability max_retries: int = pydantic.Field( - command_line='max_retries', default=3, - description='Maximum number of retries for the LFS mount test') + description='Maximum number of retries for the LFS mount test', + json_schema_extra={'command_line': 'max_retries'}) base_wait_seconds: int = pydantic.Field( - command_line='base_wait_seconds', default=10, - description='Base wait time in seconds between retries') + description='Base wait time in seconds between retries', + json_schema_extra={'command_line': 'base_wait_seconds'}) - @pydantic.validator('node_condition_prefix') + @pydantic.field_validator('node_condition_prefix') @classmethod - def validate_node_condition_prefix(cls, v: str) -> str: + def validate_node_condition_prefix(cls, value: str) -> str: """Validate that node_condition_prefix ends with 'osmo.nvidia.com/'. Args: - v: The value to validate + value: The value to validate Returns: The validated value @@ -91,10 +91,10 @@ def validate_node_condition_prefix(cls, v: str) -> str: Raises: ValueError: If the prefix doesn't end with DEFAULT_NODE_CONDITION_PREFIX """ - if not v.endswith(DEFAULT_NODE_CONDITION_PREFIX): + if not value.endswith(DEFAULT_NODE_CONDITION_PREFIX): raise ValueError( f"node_condition_prefix must end with '{DEFAULT_NODE_CONDITION_PREFIX}'") - return v + return value class NodeCondition(pydantic.BaseModel): @@ -106,27 +106,27 @@ class NodeCondition(pydantic.BaseModel): last_heartbeat_time: Optional[str] = pydantic.Field(None, alias='lastHeartbeatTime') last_transition_time: Optional[str] = pydantic.Field(None, alias='lastTransitionTime') - class Config: - allow_population_by_field_name = True - populate_by_name = True + model_config = pydantic.ConfigDict(populate_by_name=True) - @pydantic.validator('last_heartbeat_time', 'last_transition_time') + @pydantic.field_validator('last_heartbeat_time', 'last_transition_time') @classmethod - def validate_rfc3339_timestamp(cls, v): + def validate_rfc3339_timestamp(cls, value: str | None) -> str | None: """Validate RFC3339 timestamp format if value is provided. Args: - v: Current value of the field + value: Current value of the field Returns: Validated RFC3339 formatted timestamp string or None """ - if v is None: + if value is None: return None try: - # Try to parse the input as datetime - dt = datetime.fromisoformat(v.replace('Z', '+00:00')) - return dt.strftime('%Y-%m-%dT%H:%M:%SZ''') + dt = datetime.fromisoformat(value.replace('Z', '+00:00')) + if dt.tzinfo is None: + raise ValueError('Timestamp must include a timezone offset') + dt = dt.astimezone(tz=timezone.utc) + return dt.strftime('%Y-%m-%dT%H:%M:%SZ') except ValueError as error: raise osmo_errors.OSMOUserError( f'Timestamp must be in RFC3339 format like \'2024-03-21T15:30:00Z\', Error {error}') @@ -213,7 +213,7 @@ def update_node(self, if labels is not None: patch['metadata'] = {'labels': labels} if taints is not None: - patch['spec'] = {'taints': [t.dict() for t in taints]} + patch['spec'] = {'taints': [t.model_dump() for t in taints]} # Update metadata and spec if needed if patch: diff --git a/src/operator/utils/objects.py b/src/operator/utils/objects.py index 2237a916a..fd4a9442e 100644 --- a/src/operator/utils/objects.py +++ b/src/operator/utils/objects.py @@ -18,7 +18,7 @@ from typing import List, Literal -import pydantic # type: ignore +import pydantic from src.lib.utils import logging, login from src.operator.utils.node_validation_test import test_base @@ -31,212 +31,217 @@ class BackendBaseConfig(logging.LoggingConfig, login.LoginConfig, static_config.StaticConfig): """Base configuration class for backend services with common service connector fields""" service_url: str = pydantic.Field( - command_line='host', default='http://127.0.0.1:8000', - description='The osmo service url to connect to.') + description='The osmo service url to connect to.', + json_schema_extra={'command_line': 'host'}) backend: str = pydantic.Field( - command_line='backend', default='osmo-backend', - env='BACKEND', - description='The backend to connect to.') + description='The backend to connect to.', + json_schema_extra={'command_line': 'backend', 'env': 'BACKEND'}) namespace: str = pydantic.Field( - command_line='namespace', - description='The namespace for this backend.') + description='The namespace for this backend.', + json_schema_extra={'command_line': 'namespace'}) method: Literal['dev'] | None = pydantic.Field( - command_line='method', default=None, - description='Login method') + description='Login method', + json_schema_extra={'command_line': 'method'}) class BackendListenerConfig(BackendBaseConfig, metrics.MetricsCreatorConfig): """Configuration for the backend listener service that monitors Kubernetes resources""" include_namespace_usage: List[str] = pydantic.Field( - command_line='include_namespace_usage', default=[], - description='The namespaces of pods to include in node usage.') + description='The namespaces of pods to include in node usage.', + json_schema_extra={'command_line': 'include_namespace_usage'}) progress_folder_path: str = pydantic.Field( - command_line='progress_folder_path', - env='OSMO_PROGRESS_FOLDER_PATH', default='/var/run/osmo', - description='The folder path to write progress timestamps to (For liveness/startup probes)') + description='The folder path to write progress timestamps to (For liveness/startup probes)', + json_schema_extra={ + 'command_line': 'progress_folder_path', + 'env': 'OSMO_PROGRESS_FOLDER_PATH' + }) node_progress_file: str = pydantic.Field( - command_line='node_progress_file', - env='OSMO_NODE_PROGRESS_FILE', default='last_progress_node', description='The file to write node watch progress timestamps to (For liveness/startup ' + - 'probes)') + 'probes)', + json_schema_extra={'command_line': 'node_progress_file', 'env': 'OSMO_NODE_PROGRESS_FILE'}) pod_progress_file: str = pydantic.Field( - command_line='pod_progress_file', - env='OSMO_POD_PROGRESS_FILE', default='last_progress_pod', description='The file to write pod watch progress timestamps to (For liveness/startup ' + - 'probes)') + 'probes)', + json_schema_extra={'command_line': 'pod_progress_file', 'env': 'OSMO_POD_PROGRESS_FILE'}) event_progress_file: str = pydantic.Field( - command_line='event_progress_file', - env='OSMO_EVENT_PROGRESS_FILE', default='last_progress_event', description='The file to write event watch progress timestamps to (For liveness/startup ' + - 'probes)') + 'probes)', + json_schema_extra={ + 'command_line': 'event_progress_file', + 'env': 'OSMO_EVENT_PROGRESS_FILE' + }) control_progress_file: str = pydantic.Field( - command_line='control_progress_file', - env='OSMO_CONTROL_PROGRESS_FILE', default='last_progress_control', description='The file to write control progress timestamps to ' + - '(For liveness/startup probes)') + '(For liveness/startup probes)', + json_schema_extra={ + 'command_line': 'control_progress_file', + 'env': 'OSMO_CONTROL_PROGRESS_FILE' + }) websocket_progress_file: str = pydantic.Field( - command_line='websocket_progress_file', - env='OSMO_WEBSOCKET_PROGRESS_FILE', default='last_progress_websocket', description='The file to write websocket progress timestamps to (For liveness/startup ' + - 'probes)') + 'probes)', + json_schema_extra={ + 'command_line': 'websocket_progress_file', + 'env': 'OSMO_WEBSOCKET_PROGRESS_FILE' + }) pod_event_cache_size: int = pydantic.Field( - command_line='pod_event_cache_size', - env='POD_EVENT_CACHE_SIZE', default=1024, - description='The size of the cache for tracking pod status updates.') + description='The size of the cache for tracking pod status updates.', + json_schema_extra={'command_line': 'pod_event_cache_size', 'env': 'POD_EVENT_CACHE_SIZE'}) pod_event_cache_ttl: int = pydantic.Field( - command_line='pod_event_cache_ttl', - env='POD_EVENT_CACHE_TTL', default=15, description='The duration a cache entry for a pod status update stays in the cache ' '(in minutes). If set to 0, TTL is disabled, and pod status will be ' - 'cached perpetually.') + 'cached perpetually.', + json_schema_extra={'command_line': 'pod_event_cache_ttl', 'env': 'POD_EVENT_CACHE_TTL'}) node_event_cache_size: int = pydantic.Field( - command_line='node_event_cache_size', - env='NODE_EVENT_CACHE_SIZE', default=1024, - description='The size of the cache for tracking node updates.') + description='The size of the cache for tracking node updates.', + json_schema_extra={'command_line': 'node_event_cache_size', 'env': 'NODE_EVENT_CACHE_SIZE'}) node_event_cache_ttl: int = pydantic.Field( - command_line='node_event_cache_ttl', - env='NODE_EVENT_CACHE_TTL', default=15, description='The duration a cache entry for a node updates stays in the cache ' '(in minutes). If set to 0, TTL is disabled, and node status will be ' - 'cached perpetually.') + 'cached perpetually.', + json_schema_extra={'command_line': 'node_event_cache_ttl', 'env': 'NODE_EVENT_CACHE_TTL'}) backend_event_cache_size: int = pydantic.Field( - command_line='backend_event_cache_size', - env='BACKEND_EVENT_CACHE_SIZE', default=1024, - description='The size of the cache for deduplicating backend updates.') + description='The size of the cache for deduplicating backend updates.', + json_schema_extra={ + 'command_line': 'backend_event_cache_size', + 'env': 'BACKEND_EVENT_CACHE_SIZE' + }) max_unacked_messages: int = pydantic.Field( - command_line='max_unacked_messages', - env='MAX_UNACKED_MESSAGES', default=100, description='Threshold for number of unacknowledged messages to determine whether to ' - 'throttle sending messages. This should be smaller than "agent_queue_size"') + 'throttle sending messages. This should be smaller than "agent_queue_size"', + json_schema_extra={'command_line': 'max_unacked_messages', 'env': 'MAX_UNACKED_MESSAGES'}) node_condition_prefix: str = pydantic.Field( - command_line='node_condition_prefix', default=test_base.DEFAULT_NODE_CONDITION_PREFIX, - description='Prefix for node conditions') + description='Prefix for node conditions', + json_schema_extra={'command_line': 'node_condition_prefix'}) enable_node_label_update: bool = pydantic.Field( - command_line='enable_node_label_update', - env='ENABLE_NODE_LABEL_UPDATE', default=False, description='Enable updating the node_condition_prefix/verified node label based on ' - 'node availability determined by node conditions.') + 'node availability determined by node conditions.', + json_schema_extra={ + 'command_line': 'enable_node_label_update', + 'env': 'ENABLE_NODE_LABEL_UPDATE' + }) list_pods_page_size: int = pydantic.Field( - command_line='list_pods_page_size', - env='LIST_PODS_PAGE_SIZE', default=1000, - description='The number of pods to list in a single page when listing pods.') + description='The number of pods to list in a single page when listing pods.', + json_schema_extra={'command_line': 'list_pods_page_size', 'env': 'LIST_PODS_PAGE_SIZE'}) refresh_resource_state_interval: int = pydantic.Field( - command_line='refresh_resource_state_interval', - env='REFRESH_RESOURCE_STATE_INTERVAL', default=300, description='The number of seconds since last successful event fetch before triggering a ' - 'refresh of the resource state.') + 'refresh of the resource state.', + json_schema_extra={ + 'command_line': 'refresh_resource_state_interval', + 'env': 'REFRESH_RESOURCE_STATE_INTERVAL' + }) api_qps: int = pydantic.Field( - command_line='api_qps', - env='OSMO_API_QPS', default=20, description='Kubernetes API client QPS (queries per second) setting. Controls the ' - 'sustained rate of API requests. Default is 20 (Kubernetes default is 5).') + 'sustained rate of API requests. Default is 20 (Kubernetes default is 5).', + json_schema_extra={'command_line': 'api_qps', 'env': 'OSMO_API_QPS'}) api_burst: int = pydantic.Field( - command_line='api_burst', - env='OSMO_API_BURST', default=30, description='Kubernetes API client burst setting. Allows temporary bursts above the QPS ' - 'limit. Default is 30 (Kubernetes default is 10).') + 'limit. Default is 30 (Kubernetes default is 10).', + json_schema_extra={'command_line': 'api_burst', 'env': 'OSMO_API_BURST'}) class BackendWorkerConfig(BackendBaseConfig, metrics.MetricsCreatorConfig): """Configuration for the backend worker service that executes jobs""" test_runner_namespace: str = pydantic.Field( - command_line='test_runner_namespace', default='osmo-test', - description='The namespace for the test runner.') + description='The namespace for the test runner.', + json_schema_extra={'command_line': 'test_runner_namespace'}) test_runner_cronjob_spec_file: str = pydantic.Field( - command_line='test_runner_cronjob_spec_file', - env='TEST_RUNNER_CRONJOB_SPEC_FILE', default='test_runner_cronjob_spec/spec.yaml', - description='Path to the test runner cronjob specification YAML file') + description='Path to the test runner cronjob specification YAML file', + json_schema_extra={ + 'command_line': 'test_runner_cronjob_spec_file', + 'env': 'TEST_RUNNER_CRONJOB_SPEC_FILE' + }) progress_folder_path: str = pydantic.Field( - command_line='progress_folder_path', - env='OSMO_PROGRESS_FOLDER_PATH', default='/var/run/osmo', - description='The folder path to write progress timestamps to (For liveness/startup probes)') + description='The folder path to write progress timestamps to (For liveness/startup probes)', + json_schema_extra={ + 'command_line': 'progress_folder_path', + 'env': 'OSMO_PROGRESS_FOLDER_PATH' + }) worker_heartbeat_progress_file: str = pydantic.Field( - command_line='worker_heartbeat_progress_file', - env='OSMO_WORKER_HEARTBEAT_PROGRESS_FILE', default='last_progress_worker_heartbeat', description='The file to write worker heartbeat progress timestamps to (For ' + - 'liveness/startup probes)') + 'liveness/startup probes)', + json_schema_extra={ + 'command_line': 'worker_heartbeat_progress_file', + 'env': 'OSMO_WORKER_HEARTBEAT_PROGRESS_FILE' + }) worker_job_progress_file: str = pydantic.Field( - command_line='worker_job_progress_file', - env='OSMO_WORKER_JOB_PROGRESS_FILE', default='last_progress_worker_job', description='The file to write worker job progress timestamps to (For liveness/startup ' + - 'probes)') + 'probes)', + json_schema_extra={ + 'command_line': 'worker_job_progress_file', + 'env': 'OSMO_WORKER_JOB_PROGRESS_FILE' + }) progress_iter_frequency: str = pydantic.Field( - command_line='progress_iter_frequency', - env='OSMO_PROGRESS_ITER_FREQUENCY', default='15s', description='How often to write to progress file when processing tasks in a loop (' - 'e.g. write to progress every 100 tasks processed, like uploaded to DB)') + 'e.g. write to progress every 100 tasks processed, like uploaded to DB)', + json_schema_extra={ + 'command_line': 'progress_iter_frequency', + 'env': 'OSMO_PROGRESS_ITER_FREQUENCY' + }) node_condition_prefix: str = pydantic.Field( - command_line='node_condition_prefix', default=test_base.DEFAULT_NODE_CONDITION_PREFIX, - description='Prefix for node conditions') + description='Prefix for node conditions', + json_schema_extra={'command_line': 'node_condition_prefix'}) class TestRunnerConfig(BackendBaseConfig): """Configuration for resource tests.""" test_name: str = pydantic.Field( - command_line='backend_test_name', - env='BACKEND_TEST_NAME', - required=True, - description='Name of the test to run') + description='Name of the test to run', + json_schema_extra={'command_line': 'backend_test_name', 'env': 'BACKEND_TEST_NAME'}) namespace: str = pydantic.Field( - command_line='namespace', - env='NAMESPACE', - required=True, - description='Kubernetes namespace to run test in') + description='Kubernetes namespace to run test in', + json_schema_extra={'command_line': 'namespace', 'env': 'NAMESPACE'}) node_condition_prefix: str = pydantic.Field( - env='NODE_CONDITION_PREFIX', - command_line='node_condition_prefix', default=test_base.DEFAULT_NODE_CONDITION_PREFIX, - description='Prefix for node conditions') + description='Prefix for node conditions', + json_schema_extra={'env': 'NODE_CONDITION_PREFIX', 'command_line': 'node_condition_prefix'}) prefix: str = pydantic.Field( - command_line='prefix', - env='PREFIX', default='osmo', - required=True, - description='Prefix for daemonset names') + description='Prefix for daemonset names', + json_schema_extra={'command_line': 'prefix', 'env': 'PREFIX'}) read_from_osmo: bool = pydantic.Field( - command_line='read_from_osmo', default=True, - description='Whether to read test config from OSMO service') + description='Whether to read test config from OSMO service', + json_schema_extra={'command_line': 'read_from_osmo'}) read_from_file: str | None = pydantic.Field( - command_line='read_from_file', - default='/tmp/test_config.json', - description='Whether to read test config from file') + default=None, + description='Path to read test config from file (required when read_from_osmo is False)', + json_schema_extra={'command_line': 'read_from_file'}) service_account: str | None = pydantic.Field( - command_line='service_account', - env='SERVICE_ACCOUNT', default='test-runner', - description='Service account name to use for the daemonset pods') + description='Service account name to use for the daemonset pods', + json_schema_extra={'command_line': 'service_account', 'env': 'SERVICE_ACCOUNT'}) - @pydantic.root_validator + @pydantic.model_validator(mode='before') @classmethod def validate_config_source(cls, values): read_from_osmo = values.get('read_from_osmo', True) diff --git a/src/requirements.txt b/src/requirements.txt index 98387b7e9..8dd074242 100644 --- a/src/requirements.txt +++ b/src/requirements.txt @@ -19,7 +19,8 @@ # pip install pip-tools # pip-compile ./requirements.txt -o locked_requirements.txt --allow-unsafe -v --generate-hashes -pydantic==1.10.26 +pydantic==2.12.5 +pydantic-settings==2.9.1 # For job queue kombu==5.2.4 diff --git a/src/service/agent/agent_service.py b/src/service/agent/agent_service.py index 199efdbce..3892b7c4e 100644 --- a/src/service/agent/agent_service.py +++ b/src/service/agent/agent_service.py @@ -37,10 +37,9 @@ class BackendServiceConfig(connectors.RedisConfig, connectors.PostgresConfig, src.lib.utils.logging.LoggingConfig, static_config.StaticConfig): """Config settings for the backend service""" progress_period: int = pydantic.Field( - command_line='progress_period', - env='OSMO_PROGRESS_PERIOD', default=30, - description='The amount of time to wait between updating progress') + description='The amount of time to wait between updating progress', + json_schema_extra={'command_line': 'progress_period', 'env': 'OSMO_PROGRESS_PERIOD'}) app = fastapi.FastAPI(docs_url=None, redoc_url=None, openapi_url=None) diff --git a/src/service/agent/helpers.py b/src/service/agent/helpers.py index cc9d51e40..f7fd0e838 100644 --- a/src/service/agent/helpers.py +++ b/src/service/agent/helpers.py @@ -109,7 +109,7 @@ def create_backend(postgres: connectors.PostgresConnector, insert_cmd, (name, message.k8s_uid, message.k8s_namespace, '', '', - connectors.BackendSchedulerSettings().json(), + connectors.BackendSchedulerSettings().model_dump_json(), common.current_time(), common.current_time(), '', router_address, message.version)) if k8s_info[0].k8s_uid != message.k8s_uid: @@ -411,7 +411,7 @@ def keep_pod_conditions(message: backend_messages.ConditionMessage) -> bool: """ if message.type == 'ContainersReady': return False - if message.type in ['Initialized', 'Ready'] and message.status is False: + if message.type in ['Initialized', 'Ready'] and message.status == 'False': return False return True @@ -460,7 +460,7 @@ def send_pod_conditions(postgres: connectors.PostgresConnector, text=condition_log) redis_client.xadd(common.get_workflow_events_redis_name(message.workflow_uuid), - json.loads(log_body.json()), + json.loads(log_body.model_dump_json()), maxlen=max_event_log_lines) # Update the latest timestamp @@ -507,7 +507,7 @@ def send_pod_event(postgres: connectors.PostgresConnector, retry_id=retry_id, text=event_log) redis_client.xadd(common.get_workflow_events_redis_name(workflow_uuid), - json.loads(log_body.json()), + json.loads(log_body.model_dump_json()), maxlen=max_event_log_lines) redis_client.set(timestamp_key, message.timestamp.timestamp()) redis_client.expire(timestamp_key, connectors.MAX_LOG_TTL, nx=True) @@ -588,7 +588,7 @@ async def get_messages(): ack_body = backend_messages.AckBody(uuid=message.uuid) ack_message = backend_messages.MessageBody( type=backend_messages.MessageType.ACK, - body=ack_body.dict() + body=ack_body.model_dump() ) message_options = { message.type.value: message.body @@ -656,7 +656,7 @@ async def get_messages(): ) from db_err finally: if ack_message: - await websocket.send_text(ack_message.json()) + await websocket.send_text(ack_message.model_dump_json()) except fastapi.WebSocketDisconnect as err: # The websocket is closed by client logging.info( @@ -680,7 +680,7 @@ async def backend_listener_control_impl(websocket: fastapi.WebSocket, name: str) try: # Get backend info from database and send node conditions backend_info = connectors.Backend.fetch_from_db(postgres, name) - node_conditions = backend_info.node_conditions.dict() + node_conditions = backend_info.node_conditions.model_dump() # Send node conditions to backend listener message = backend_messages.MessageBody( @@ -689,7 +689,7 @@ async def backend_listener_control_impl(websocket: fastapi.WebSocket, name: str) rules=node_conditions.get('rules', {}) ) ) - await websocket.send_text(message.json()) + await websocket.send_text(message.model_dump_json()) logging.info('Sent node conditions to backend %s', name) async with redis.asyncio.from_url(config.redis_url) as redis_client: @@ -709,7 +709,7 @@ async def backend_listener_control_impl(websocket: fastapi.WebSocket, name: str) body=backend_messages.NodeConditionsBody( rules=json_fields.get('rules', {}) )) - await websocket.send_text(message.json()) + await websocket.send_text(message.model_dump_json()) except (ConnectionError, asyncio.exceptions.TimeoutError) as conn_error: # Handle connection/timeout errors diff --git a/src/service/agent/objects.py b/src/service/agent/objects.py index b95c6383e..ce8d65153 100644 --- a/src/service/agent/objects.py +++ b/src/service/agent/objects.py @@ -56,7 +56,7 @@ class BackendDeleteType(enum.Enum): UNIQUE_JOB_TTL = 5 * 24 * 60 * 60 -class ListBackendsResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class ListBackendsResponse(pydantic.BaseModel, extra='forbid'): """ Object storing info for all backends. """ backends: List[connectors.Backend] @@ -255,7 +255,7 @@ async def handle_job(self, job_spec: Dict): job=job, start_time=time.time()) - compressed = zlib.compress(job.json().encode('utf-8')) + compressed = zlib.compress(job.model_dump_json().encode('utf-8')) await self.websocket.send_bytes(compressed) @@ -311,7 +311,7 @@ async def handle_message(self, message_json: Dict, backend_name:str): self._current_job.log_redis.xadd( f'{self._current_job.workflow.workflow_id}-' +\ f'{message_option.pod_log.task}-{message_option.pod_log.retry_id}-error-logs', - json.loads(logs.json()), + json.loads(logs.model_dump_json()), maxlen=workflow_config.max_log_lines) self._current_job.log_redis.expire( f'{self._current_job.workflow.workflow_id}-' +\ diff --git a/src/service/core/app/objects.py b/src/service/core/app/objects.py index 1a26640c2..55466012a 100644 --- a/src/service/core/app/objects.py +++ b/src/service/core/app/objects.py @@ -24,7 +24,7 @@ from src.lib.utils import common -AppNamePattern = Annotated[str, pydantic.Field(regex=common.APP_NAME_VALIDATION_REGEX)] +AppNamePattern = Annotated[str, pydantic.Field(pattern=common.APP_NAME_VALIDATION_REGEX)] class ListEntry(pydantic.BaseModel): @@ -33,10 +33,10 @@ class ListEntry(pydantic.BaseModel): description: str created_date: datetime.datetime owner: str - latest_version: str + latest_version: int -class ListResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class ListResponse(pydantic.BaseModel, extra='forbid'): apps: List[ListEntry] more_entries: bool @@ -48,7 +48,7 @@ class GetVersionEntry(pydantic.BaseModel): status: str -class GetAppResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class GetAppResponse(pydantic.BaseModel, extra='forbid'): uuid: str name: str description: str @@ -57,7 +57,7 @@ class GetAppResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): versions: List[GetVersionEntry] -class EditResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class EditResponse(pydantic.BaseModel, extra='forbid'): uuid: str version: int name: str diff --git a/src/service/core/config/config_service.py b/src/service/core/config/config_service.py index ef0f4b594..931a876b1 100644 --- a/src/service/core/config/config_service.py +++ b/src/service/core/config/config_service.py @@ -36,6 +36,7 @@ tags=['Config API'] ) + class ConfigNameType(enum.Enum): """ Represents the config type for checking name. """ POD_TEMPLATE = 'Pod template' @@ -51,9 +52,10 @@ def _check_config_name(name: str, name_type: ConfigNameType): if not re.fullmatch(common.CONFIG_NAME_REGEX, name): raise osmo_errors.OSMOUserError( f'{name_type.value} name "{name}" is not valid! Name can only ' - 'be alphanumeric and contain dash or underscore.' + 'be alphanumeric and contain dash or underscore.' ) + @router.get( '/api/configs/service', response_model=connectors.ServiceConfig, @@ -201,18 +203,16 @@ def clean_configs() -> Dict: by_alias=True, exclude_unset=True) try: - connectors.ExtraArgBaseModel.set_extra(connectors.ExtraType.IGNORE) - configs = connectors.ServiceConfig(**service_configs_dict) - connectors.ExtraArgBaseModel.set_extra(connectors.ExtraType.ALLOW) + configs = connectors.ServiceConfig.from_db(service_configs_dict) updated_configs = configs.serialize(postgres) for key, value in updated_configs.items(): postgres.set_config(key, value) except pydantic.ValidationError as err: - raise osmo_errors.OSMOUsageError(f'{err}') - return postgres.get_service_configs().dict(by_alias=True, - exclude_unset=True) + raise osmo_errors.OSMOUsageError(f'{err}') from err + return postgres.get_service_configs().model_dump(by_alias=True, + exclude_unset=True) - app.add_api_route('/api/configs/service/clean', clean_configs, # type: ignore + app.add_api_route('/api/configs/service/clean', clean_configs, # type: ignore description='Clean service configurations', response_model=Dict, methods=['POST'], tags=['Config API']) @@ -262,7 +262,7 @@ def delete_backend( [name], 1) if alive_workflows: raise osmo_errors.OSMOBackendError( - f'Backend {name} is not finished running workflows. Alive workflows: ' +\ + f'Backend {name} is not finished running workflows. Alive workflows: ' + f'{", ".join(wf.workflow_id for wf in alive_workflows)}') connectors.delete_redis_backend(name, workflow_objects.WorkflowServiceContext.get().config) helpers.delete_backend(name, request, username) @@ -273,7 +273,7 @@ def delete_backend( response_model=connectors.VerbosePoolConfig | connectors.EditablePoolConfig, ) def list_pools(verbose: bool = False, backend: str | None = None) -> \ - connectors.VerbosePoolConfig | connectors.EditablePoolConfig: + connectors.VerbosePoolConfig | connectors.EditablePoolConfig: """ List all Pools """ postgres = connectors.PostgresConnector.get_instance() pool_type = connectors.PoolType.VERBOSE if verbose else connectors.PoolType.EDITABLE @@ -304,8 +304,8 @@ def _check_platform_changes(old_pool: connectors.Pool, new_pool: connectors.Pool # Check platforms that exist in both old and new configs for platform_name in old_platforms & new_platforms: if not helpers.pod_labels_and_tolerations_equal( - old_pool.platforms[platform_name].parsed_pod_template, - new_pool.platforms[platform_name].parsed_pod_template): + old_pool.platforms[platform_name].parsed_pod_template, + new_pool.platforms[platform_name].parsed_pod_template): return True return False @@ -332,8 +332,8 @@ def _check_pool_changes(old_pool: connectors.Pool | None, new_pool: connectors.P # Check if pod template changed if not helpers.pod_labels_and_tolerations_equal( - old_pool.parsed_pod_template, - new_pool.parsed_pod_template): + old_pool.parsed_pod_template, + new_pool.parsed_pod_template): return True # Check if platforms changed @@ -400,7 +400,7 @@ def read_pool( """ postgres = connectors.PostgresConnector.get_instance() pool_info = connectors.Pool.fetch_from_db(postgres, name) - return pool_info if verbose else connectors.PoolEditable(**pool_info.dict()) + return pool_info if verbose else connectors.PoolEditable(**pool_info.model_dump()) @router.put('/api/configs/pool/{name}') @@ -462,7 +462,7 @@ def patch_pool( raise osmo_errors.OSMOUserError(f'Pool {name} not found') from e # Apply the strategic merge patch to create the updated pool configuration - current_pool_dict = current_pool.dict() + current_pool_dict = current_pool.model_dump() updated_pool_dict = common.strategic_merge_patch( current_pool_dict, request.configs_dict ) @@ -628,6 +628,7 @@ def rename_platform_in_pool(name: str, platform_name: str, tags=request.tags, ) + @router.get( '/api/configs/pod_template', response_model=Dict[str, Any], @@ -665,7 +666,7 @@ def put_pod_templates(request: objects.PutPodTemplatesRequest, pod_template = connectors.PodTemplate(pod_template=pod_template_dict) pod_template.insert_into_db(postgres, name) if old_pod_template and \ - not helpers.pod_labels_and_tolerations_equal(old_pod_template, pod_template_dict): + not helpers.pod_labels_and_tolerations_equal(old_pod_template, pod_template_dict): pool_list = connectors.PodTemplate.get_pools(postgres, name) for pool in pool_list: helpers.update_backend_node_pool_platform(pool=pool['name'], platform=None) @@ -696,7 +697,7 @@ def put_pod_template(name: str, pod_template = connectors.PodTemplate(pod_template=request.configs) pod_template.insert_into_db(postgres, name) if old_pod_template and \ - not helpers.pod_labels_and_tolerations_equal(old_pod_template, request.configs): + not helpers.pod_labels_and_tolerations_equal(old_pod_template, request.configs): pool_list = connectors.PodTemplate.get_pools(postgres, name) for pool in pool_list: helpers.update_backend_node_pool_platform(pool=pool['name'], platform=None) @@ -1034,7 +1035,7 @@ def patch_backend_test( raise osmo_errors.OSMOUserError(f'Backend test {name} not found') from e # Apply the strategic merge patch - current_test_dict = current_test.dict() + current_test_dict = current_test.model_dump() updated_test_dict = common.strategic_merge_patch( current_test_dict, request.configs_dict ) @@ -1139,7 +1140,7 @@ def rollback_config( if request.config_type == connectors.ConfigHistoryType.SERVICE: helpers.put_configs( objects.PutConfigsRequest( - configs=connectors.ServiceConfig(**history_entry['data']), + configs=connectors.ServiceConfig.from_db(history_entry['data']), description=description, tags=request.tags ), @@ -1151,7 +1152,7 @@ def rollback_config( elif request.config_type == connectors.ConfigHistoryType.WORKFLOW: helpers.put_configs( objects.PutConfigsRequest( - configs=connectors.WorkflowConfig(**history_entry['data']), + configs=connectors.WorkflowConfig.from_db(history_entry['data']), description=description, tags=request.tags ), @@ -1163,7 +1164,7 @@ def rollback_config( elif request.config_type == connectors.ConfigHistoryType.DATASET: helpers.put_configs( objects.PutConfigsRequest( - configs=connectors.DatasetConfig(**history_entry['data']), + configs=connectors.DatasetConfig.from_db(history_entry['data']), description=description, tags=request.tags ), @@ -1289,7 +1290,7 @@ def rollback_config( elif request.config_type == connectors.ConfigHistoryType.ROLE: # Delete all existing roles existing_roles = connectors.Role.list_from_db(postgres) - next_roles= [role['name'] for role in history_entry['data']] + next_roles = [role['name'] for role in history_entry['data']] roles_to_remove = [ role.name for role in existing_roles if role.name not in next_roles ] @@ -1308,6 +1309,7 @@ def rollback_config( else: raise osmo_errors.OSMOUserError(f'Unsupported config type: {request.config_type.value}') + @router.delete('/api/configs/history/{config_type}/revision/{revision}') def delete_config_history_revision( config_type: str, @@ -1469,8 +1471,8 @@ def diff_secret_strs(first_data: Any, second_data: Any, second_revision: int) -> else: return second_data elif isinstance(first_data, pydantic.BaseModel) and \ - isinstance(second_data, pydantic.BaseModel) and \ - isinstance(first_data, type(second_data)): + isinstance(second_data, pydantic.BaseModel) and \ + isinstance(first_data, type(second_data)): result = {} for key in second_data.__dict__: if key in first_data.__dict__: diff --git a/src/service/core/config/helpers.py b/src/service/core/config/helpers.py index 0e0787316..6f90c6d06 100644 --- a/src/service/core/config/helpers.py +++ b/src/service/core/config/helpers.py @@ -20,7 +20,7 @@ from datetime import datetime import json import logging -from typing import Any, Dict, List +from typing import Any, Dict, List, Type import pydantic import yaml @@ -166,16 +166,20 @@ def patch_configs( updated_configs_fields[key] = value try: - if postgres.get_method() != 'dev': - connectors.ExtraArgBaseModel.set_extra(connectors.ExtraType.FORBID) - - configs: connectors.DynamicConfig + config_class: Type[connectors.DynamicConfig] if config_type == connectors.ConfigType.SERVICE: - configs = connectors.ServiceConfig(**updated_configs_fields) + config_class = connectors.ServiceConfig elif config_type == connectors.ConfigType.WORKFLOW: - configs = connectors.WorkflowConfig(**updated_configs_fields) + config_class = connectors.WorkflowConfig elif config_type == connectors.ConfigType.DATASET: - configs = connectors.DatasetConfig(**updated_configs_fields) + config_class = connectors.DatasetConfig + else: + raise osmo_errors.OSMOServerError(f'Config type: {config_type.value} unknown') + + configs = config_class(**updated_configs_fields) + + if config_type == connectors.ConfigType.DATASET and isinstance( + configs, connectors.DatasetConfig): try: for _, bucket_config in configs.buckets.items(): connectors.BucketMode(bucket_config.mode.lower()) @@ -185,11 +189,6 @@ def patch_configs( f'Bucket mode {bucket_config.mode} is not valid. Valid modes are ' f'{", ".join([member.value for member in connectors.BucketMode])}') - else: - raise osmo_errors.OSMOServerError(f'Config type: {config_type.value} unknown') - - if postgres.get_method() != 'dev': - connectors.ExtraArgBaseModel.set_extra(connectors.ExtraType.IGNORE) updated_configs = configs.serialize(postgres) for key, value in updated_configs.items(): postgres.set_config(key, value, config_type) @@ -208,7 +207,8 @@ def patch_configs( tags=request.tags, ) - new_configs_dict = postgres.get_configs(config_type).dict(by_alias=True, exclude_unset=True) + new_configs_dict = postgres.get_configs(config_type).model_dump( + by_alias=True, exclude_unset=True) return {key: value for key, value in new_configs_dict.items() if key in request.configs_dict} def backend_action_request_helper(payload: Dict[str, Any], name: str): @@ -281,7 +281,7 @@ def create_backend_config_history_entry( backends = connectors.Backend.list_from_db(postgres) backends_list = [ - backend.dict(by_alias=True, exclude_unset=True) + backend.model_dump(by_alias=True, exclude_unset=True) for backend in backends ] @@ -313,11 +313,11 @@ def update_backend( postgres = connectors.PostgresConnector.get_instance() try: old_backend = connectors.Backend.fetch_from_db(postgres, name) - except pydantic.error_wrappers.ValidationError as e: + except pydantic.ValidationError as e: logging.warning('Failed to get previous backend %s: %s', name, e) old_backend = None _update_backend_helper(postgres, configs_objects.BackendConfigWithName( - **request.configs.dict(), name=name)) + **request.configs.model_dump(), name=name)) create_backend_config_history_entry( postgres, name, username, request.description or f"Updated backend \'{name}\'", request.tags @@ -404,7 +404,7 @@ def delete_backend( postgres.execute_commit_command(delete_resource_cmd, (name,)) backends = [ - backend.dict(by_alias=True, exclude_unset=True) + backend.model_dump(by_alias=True, exclude_unset=True) for backend in connectors.Backend.list_from_db(postgres) ] @@ -428,7 +428,7 @@ def create_pool_config_history_entry( Add a history entry for a pool config. """ postgres = connectors.PostgresConnector.get_instance() - pools = connectors.fetch_editable_pool_config(postgres).dict( + pools = connectors.fetch_editable_pool_config(postgres).model_dump( by_alias=True, exclude_unset=True ) postgres.create_config_history_entry( @@ -451,7 +451,7 @@ def create_dataset_config_history_entry( Add a history entry for a dataset config. """ postgres = connectors.PostgresConnector.get_instance() - dataset_configs = postgres.get_dataset_configs().dict( + dataset_configs = postgres.get_dataset_configs().model_dump( by_alias=True, exclude_unset=True ) postgres.create_config_history_entry( @@ -726,7 +726,7 @@ def update_backend_tests_cronjobs(backend_name: str, current_tests: List[str], for test_name in current_tests: try: test_config = connectors.BackendTests.fetch_from_db(postgres, test_name) - test_configs[test_name] = test_config.dict(by_alias=True, exclude_unset=True) + test_configs[test_name] = test_config.model_dump(by_alias=True, exclude_unset=True) except osmo_errors.OSMOError as error: logging.error('Failed to fetch test config for test %s: %s', test_name, error) continue diff --git a/src/service/core/config/objects.py b/src/service/core/config/objects.py index 989036c7c..089e7e615 100644 --- a/src/service/core/config/objects.py +++ b/src/service/core/config/objects.py @@ -127,7 +127,7 @@ } -class ListBackendsResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class ListBackendsResponse(pydantic.BaseModel, extra='forbid'): """ Object storing info for all backends. """ backends: List[connectors.Backend] @@ -195,15 +195,15 @@ class BackendConfig(pydantic.BaseModel): def plaintext_dict(self, *args, **kwargs): """Convert the BackendConfig to a dictionary.""" - dict_data = super().dict(*args, **kwargs) + dict_data = super().model_dump(*args, **kwargs) dict_data['scheduler_settings'] = ( None if not self.scheduler_settings - else str(self.scheduler_settings.json()) + else self.scheduler_settings.model_dump_json() ) dict_data['node_conditions'] = ( - None if not self.node_conditions else str(self.node_conditions.json()) + None if not self.node_conditions else self.node_conditions.model_dump_json() ) return dict_data @@ -360,27 +360,31 @@ class ConfigHistoryQueryParams(pydantic.BaseModel): default=False, description='Whether to omit data from the response' ) - @pydantic.validator('config_types') + @pydantic.field_validator('config_types') @classmethod - def validate_config_types(cls, v): - if v is not None: + def validate_config_types( + cls, config_types: List[config_history.ConfigHistoryType] | None + ) -> List[config_history.ConfigHistoryType] | None: + if config_types is not None: valid_types = [t.value.lower() for t in config_history.ConfigHistoryType] - invalid_types = [t for t in v if t.value.lower() not in valid_types] + invalid_types = [t for t in config_types if t.value.lower() not in valid_types] if invalid_types: raise ValueError( f'Invalid config types: {invalid_types}. Valid types are: {valid_types}' ) - return v + return config_types - @pydantic.validator('at_timestamp') + @pydantic.field_validator('at_timestamp') @classmethod - def validate_at_timestamp(cls, v, values): - if v is not None: - if 'created_before' in values and values['created_before'] is not None: + def validate_at_timestamp( + cls, at_timestamp: datetime.datetime | None, info: pydantic.ValidationInfo + ) -> datetime.datetime | None: + if at_timestamp is not None: + if 'created_before' in info.data and info.data['created_before'] is not None: raise ValueError('Cannot specify both at_timestamp and created_before') - if 'created_after' in values and values['created_after'] is not None: + if 'created_after' in info.data and info.data['created_after'] is not None: raise ValueError('Cannot specify both at_timestamp and created_after') - return v + return at_timestamp class ConfigHistory(pydantic.BaseModel): @@ -392,8 +396,8 @@ class ConfigHistory(pydantic.BaseModel): username: str created_at: datetime.datetime description: str - tags: List[str] | None - data: Any + tags: List[str] | None = None + data: Any = None class GetConfigsHistoryResponse(pydantic.BaseModel): @@ -421,16 +425,18 @@ class UpdateConfigTagsRequest(pydantic.BaseModel): description='Tags to remove from the config' ) - @pydantic.validator('set_tags', 'delete_tags') + @pydantic.field_validator('set_tags', 'delete_tags') @classmethod - def validate_tags(cls, v): - if v is not None and not v: + def validate_tags(cls, tags: List[str] | None) -> List[str] | None: + if tags is not None and not tags: raise ValueError('Tags list cannot be empty') - return v + return tags - @pydantic.root_validator + @pydantic.model_validator(mode='before') @classmethod - def validate_at_least_one_tag_operation(cls, values): + def validate_at_least_one_tag_operation(cls, values: Any) -> Any: + if not isinstance(values, dict): + return values if not values.get('set_tags') and not values.get('delete_tags'): raise ValueError('At least one of set_tags or delete_tags must be provided') return values @@ -447,5 +453,5 @@ class ConfigDiffRequest(pydantic.BaseModel): class ConfigDiffResponse(pydantic.BaseModel): """Response body for config diff endpoint.""" - first_data: Any - second_data: Any + first_data: Any = None + second_data: Any = None diff --git a/src/service/core/config/tests/test_config_history.py b/src/service/core/config/tests/test_config_history.py index 81cc820db..c4df321a6 100644 --- a/src/service/core/config/tests/test_config_history.py +++ b/src/service/core/config/tests/test_config_history.py @@ -83,12 +83,6 @@ def test_service_config_history(self): 'cli_config': { 'latest_version': 'test-cli', 'min_supported_version': '1.0.0', - 'credential': { - 'endpoint': 's3://test-bucket', - 'access_key_id': 'test-key', - 'access_key': 'test-secret', - 'region': 'us-east-1', - }, } } first_tags = ['service-update', 'cli-config'] @@ -106,12 +100,6 @@ def test_service_config_history(self): 'cli_config': { 'latest_version': 'updated-cli', 'min_supported_version': '2.0.0', - 'credential': { - 'endpoint': 's3://new-bucket', - 'access_key_id': 'new-key', - 'access_key': 'new-secret', - 'region': 'us-west-1', - }, } } second_tags = ['service-update', 'cli-update'] @@ -173,9 +161,6 @@ def test_service_config_history(self): config = history['configs'][-1]['data'] self.assertEqual(config['cli_config']['latest_version'], 'test-cli') self.assertEqual(config['cli_config']['min_supported_version'], '1.0.0') - self.assertEqual(config['cli_config']['credential']['endpoint'], 's3://test-bucket') - self.assertEqual(config['cli_config']['credential']['access_key_id'], 'test-key') - self.assertEqual(config['cli_config']['credential']['region'], 'us-east-1') def test_workflow_config_history(self): """Test history entries for workflow config operations.""" @@ -1290,7 +1275,7 @@ def test_get_config_diff(self): second_revision=initial_history_entry['revision'], ), ) - self.assertEqual(response.first_data, initial_history_entry['data']) + self.assertEqual(response.first_data.model_dump(mode='json'), initial_history_entry['data']) self.assertEqual(response.second_data, initial_history_entry['data']) # Test 2: Add the new bucket @@ -1302,7 +1287,7 @@ def test_get_config_diff(self): ), ) self.assertEqual( - response.first_data, + response.first_data.model_dump(mode='json'), initial_history_entry['data'] ) self.assertEqual( diff --git a/src/service/core/config/tests/test_config_history_helpers.py b/src/service/core/config/tests/test_config_history_helpers.py index e7727b7cb..1892ecc30 100644 --- a/src/service/core/config/tests/test_config_history_helpers.py +++ b/src/service/core/config/tests/test_config_history_helpers.py @@ -256,7 +256,7 @@ def test_invalid_config_types(self): with self.assertRaises(ValueError) as context: objects.ConfigHistoryQueryParams(config_types=['invalid_type']) self.assertIn('config_types', str(context.exception)) - self.assertIn('not a valid enumeration member', str(context.exception)) + self.assertIn('Input should be', str(context.exception)) def test_at_timestamp_with_created_before(self): """Test validation of at_timestamp with created_before.""" diff --git a/src/service/core/data/data_service.py b/src/service/core/data/data_service.py index 3c17d83c1..b210c563f 100755 --- a/src/service/core/data/data_service.py +++ b/src/service/core/data/data_service.py @@ -198,7 +198,7 @@ def get_dataset_info(postgres: connectors.PostgresConnector, created_date=row.created_date.replace(microsecond=0), last_used=row.last_used.replace(microsecond=0), size=row.size if row.size else 0, - checksum=row.checksum if row.checksum else 0, + checksum=row.checksum if row.checksum else '', location=storage.construct_storage_backend(row.location)\ .parse_uri_to_link(bucket_config.region), uri=row.location, @@ -1178,7 +1178,7 @@ def query_dataset( created_date=row.created_date.replace(microsecond=0), last_used=row.last_used.replace(microsecond=0), size=row.size if row.size else 0, - checksum=row.checksum if row.checksum else 0, + checksum=row.checksum if row.checksum else '', location=storage.construct_storage_backend(row.location)\ .parse_uri_to_link(bucket_config.region), uri=row.location, diff --git a/src/service/core/data/objects.py b/src/service/core/data/objects.py index 0abb96338..5b683bf1e 100644 --- a/src/service/core/data/objects.py +++ b/src/service/core/data/objects.py @@ -25,18 +25,18 @@ from src.lib.utils import common -DatasetPattern = Annotated[str, pydantic.Field(regex=common.DATASET_NAME_REGEX)] -DatasetTagPattern = Annotated[str, pydantic.Field(regex=common.DATASET_BUCKET_TAG_REGEX)] +DatasetPattern = Annotated[str, pydantic.Field(pattern=common.DATASET_NAME_REGEX)] +DatasetTagPattern = Annotated[str, pydantic.Field(pattern=common.DATASET_BUCKET_TAG_REGEX)] class DatasetType(enum.Enum): - COLLECTION = 'COLLECTION' - DATASET = 'DATASET' + COLLECTION = "COLLECTION" + DATASET = "DATASET" class DatasetQueryType(enum.Enum): - VERSION = 'VERSION' - DATASET = 'DATASET' + VERSION = "VERSION" + DATASET = "DATASET" class DatasetStatus(enum.Enum): @@ -44,27 +44,27 @@ class DatasetStatus(enum.Enum): The status of a dataset / dataset version. """ # The dataset has been "allocated" but needs to be uploaded - PENDING = 'PENDING' + PENDING = "PENDING" # The dataset has been uploaded and is ready to use - READY = 'READY' + READY = "READY" # The dataset has been marked for delete but needs to be deleted - PENDING_DELETE = 'PENDING_DELETE' + PENDING_DELETE = "PENDING_DELETE" # The dataset version has been deleted. When the all versions are DELETED, the dataset will be # removed from the table - DELETED = 'DELETED' + DELETED = "DELETED" @staticmethod def is_active(name: str) -> bool: return name not in [DatasetStatus.PENDING_DELETE.value, DatasetStatus.DELETED.value] -class DatasetStructure(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DatasetStructure(pydantic.BaseModel, extra="forbid"): """ Object storing execution cluster node resource information. """ name: DatasetPattern tag: DatasetTagPattern -class BucketInfoEntry(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class BucketInfoEntry(pydantic.BaseModel, extra="forbid"): """ Object storing Upload Response. """ path: str description: str @@ -72,21 +72,21 @@ class BucketInfoEntry(pydantic.BaseModel, extra=pydantic.Extra.forbid): default_cred: bool -class BucketInfoResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class BucketInfoResponse(pydantic.BaseModel, extra="forbid"): """ Object storing Upload Response. """ - default: Optional[str] + default: Optional[str] = None buckets: Dict[str, BucketInfoEntry] -class DataUploadResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DataUploadResponse(pydantic.BaseModel, extra="forbid"): """ Object storing Upload Response. """ version_id: str - region: str = '' - storage_path: str = '' - manifest_path: str = '' + region: str = "" + storage_path: str = "" + manifest_path: str = "" -class DataDownloadResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DataDownloadResponse(pydantic.BaseModel, extra="forbid"): """ Object storing Download Response. """ dataset_names: List[str] dataset_versions: List[str] @@ -95,14 +95,14 @@ class DataDownloadResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): is_collection: bool -class DataDeleteResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DataDeleteResponse(pydantic.BaseModel, extra="forbid"): """ Object storing Download Response. """ versions: List[str] = [] delete_locations: List[str] = [] cleaned_size: int = 0 -class DataInfoDatasetEntry(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DataInfoDatasetEntry(pydantic.BaseModel, extra="forbid"): """ Object storing Info Element. """ name: str version: str @@ -119,92 +119,92 @@ class DataInfoDatasetEntry(pydantic.BaseModel, extra=pydantic.Extra.forbid): collections: List[str] -class DataInfoCollectionEntry(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DataInfoCollectionEntry(pydantic.BaseModel, extra="forbid"): """ Object storing Info Element. """ name: str version: str location: str uri: str - hash_location: str | None + hash_location: str | None = None size: int -class DataInfoResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DataInfoResponse(pydantic.BaseModel, extra="forbid"): """ Object storing Data Info Elements. """ name: str id: str bucket: str - created_by: str | None - created_date: datetime.datetime | None - hash_location: str | None - hash_location_size: int | None + created_by: str | None = None + created_date: datetime.datetime | None = None + hash_location: str | None = None + hash_location_size: int | None = None labels: Dict type: DatasetType versions: List[DataInfoDatasetEntry | DataInfoCollectionEntry] -class DataQueryResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DataQueryResponse(pydantic.BaseModel, extra="forbid"): """ Object storing Dataset and Dataset Version Info. """ type: DatasetQueryType datasets: List[DataInfoResponse | DataInfoDatasetEntry] -class DataListEntry(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DataListEntry(pydantic.BaseModel, extra="forbid"): """ Object storing Data List Element. """ name: str id: str bucket: str create_time: datetime.datetime - last_created: datetime.datetime | None - hash_location: str | None - hash_location_size: int | None - version_id: str | None + last_created: datetime.datetime | None = None + hash_location: str | None = None + hash_location_size: int | None = None + version_id: str | None = None type: DatasetType -class DataListResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DataListResponse(pydantic.BaseModel, extra="forbid"): """ Object storing Data List Elements. """ datasets: List[DataListEntry] -class DataTagResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DataTagResponse(pydantic.BaseModel, extra="forbid"): """ Object storing Tag Response. """ version_id: str tags: List[str] -class DataCopyResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DataCopyResponse(pydantic.BaseModel, extra="forbid"): """ Object storing Inspect Response. """ inital_datasets: List[DatasetStructure] created_datasets: List[DatasetStructure] -class DataMetadataResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DataMetadataResponse(pydantic.BaseModel, extra="forbid"): """ Object storing Label/Metadata Response. """ metadata: Dict -class DataAttributeResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DataAttributeResponse(pydantic.BaseModel, extra="forbid"): """ Object storing Tag/Label/Metadata Response. """ - tag_response: DataTagResponse | None - label_response: DataMetadataResponse | None - metadata_response: DataMetadataResponse | None + tag_response: DataTagResponse | None = None + label_response: DataMetadataResponse | None = None + metadata_response: DataMetadataResponse | None = None -class DataLocationResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DataLocationResponse(pydantic.BaseModel, extra="forbid"): """ Object storing Label/Metadata Response. """ path: str region: str -class DataUpdateEntry(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DataUpdateEntry(pydantic.BaseModel, extra="forbid"): """ Object storing Info Element. """ dataset_name: str version: str -class DataUpdateResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DataUpdateResponse(pydantic.BaseModel, extra="forbid"): """ Object storing Info Element. """ versions: List[DataUpdateEntry] -class DataShareResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DataShareResponse(pydantic.BaseModel, extra="forbid"): """ Object storing Shared Failure Datasets. """ duplicates: List[str] success: List[str] diff --git a/src/service/core/profile/objects.py b/src/service/core/profile/objects.py index c72023867..8231b641e 100644 --- a/src/service/core/profile/objects.py +++ b/src/service/core/profile/objects.py @@ -24,13 +24,13 @@ from src.utils import connectors -class TokenIdentity(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class TokenIdentity(pydantic.BaseModel, extra='forbid'): """ Identity when the request is authenticated with an access token. """ name: str expires_at: datetime.datetime | None = None # YYYY-MM-DD when token is found in DB -class ProfileResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class ProfileResponse(pydantic.BaseModel, extra='forbid'): """ Profile and identity info. When token header is set, roles/pools are the token's; otherwise they are the user's. JSON is self-explanatory for CLI. diff --git a/src/service/core/profile/profile_service.py b/src/service/core/profile/profile_service.py index e5464e70c..c3ab03f86 100644 --- a/src/service/core/profile/profile_service.py +++ b/src/service/core/profile/profile_service.py @@ -68,10 +68,7 @@ def set_notification_settings( preferences: connectors.UserProfile, set_default_backend: bool = False, user_header: Optional[str] = fastapi.Header(alias=login.OSMO_USER_HEADER, default=None)): - fields = {} - for key, value in preferences.dict().items(): - if value is not None: - fields[key] = value + fields = preferences.model_dump(exclude_none=True) if set_default_backend: fields['backend'] = None user_name = connectors.parse_username(user_header) diff --git a/src/service/core/service.py b/src/service/core/service.py index 5d57eb4ac..d9d61986f 100644 --- a/src/service/core/service.py +++ b/src/service/core/service.py @@ -28,7 +28,7 @@ import fastapi.middleware.cors import fastapi.responses import uvicorn # type: ignore -from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor # type: ignore +from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor # type: ignore from src.lib.utils import common, login, osmo_errors, version import src.lib.utils.logging @@ -50,7 +50,7 @@ app = fastapi.FastAPI(docs_url='/api/docs', redoc_url=None, openapi_url='/api/openapi.json') -misc_router = fastapi.APIRouter(tags = ['Misc API']) +misc_router = fastapi.APIRouter(tags=['Misc API']) curr_cli_config = connectors.CliConfig() @@ -71,7 +71,7 @@ async def check_client_version(request: fastapi.Request, call_next): if cli_info.latest_version else version.VERSION if cli_info.client_install_url: install_command = f'Please run the following command:\n' \ - f'curl -fsSL {cli_info.client_install_url} | bash' + f'curl -fsSL {cli_info.client_install_url} | bash' else: install_command = \ 'Please update by running the install command in the documentation.' @@ -81,10 +81,10 @@ async def check_client_version(request: fastapi.Request, call_next): client_version < version.Version.from_string(cli_info.min_supported_version): return fastapi.responses.JSONResponse( status_code=400, - content={'message': 'Your client is out of date. Client version is ' + \ - f'{client_version_str} but the newest client version is ' - f'{newest_client_version}.\n{install_command}', - 'error_code': osmo_errors.OSMOError.error_code}, + content={'message': 'Your client is out of date. Client version is ' + + f'{client_version_str} but the newest client version is ' + f'{newest_client_version}.\n{install_command}', + 'error_code': osmo_errors.OSMOError.error_code}, ) suggest_version_update = True @@ -207,6 +207,7 @@ def get_workflow_plugins_configs() -> connectors.PluginsConfig: app.include_router(misc_router) + @app.exception_handler(osmo_errors.OSMOUsageError) @app.exception_handler(osmo_errors.OSMOResourceError) @app.exception_handler(osmo_errors.OSMOCredentialError) @@ -291,22 +292,22 @@ def set_default_backend_images(postgres: connectors.PostgresConnector): # If backend_images are already set, do not override them if curr_workflow_configs.backend_images.init and \ - curr_workflow_configs.backend_images.client: + curr_workflow_configs.backend_images.client: return if postgres.config.osmo_image_location and \ - postgres.config.osmo_image_tag: + postgres.config.osmo_image_tag: # Override default backend_images with deployment values backend_images = connectors.OsmoImageConfig( init=f'{postgres.config.osmo_image_location}/' - f'init-container:{postgres.config.osmo_image_tag}', + f'init-container:{postgres.config.osmo_image_tag}', client=f'{postgres.config.osmo_image_location}/' - f'client:{postgres.config.osmo_image_tag}', + f'client:{postgres.config.osmo_image_tag}', ) config_service.patch_workflow_configs( request=config_objects.PatchConfigRequest( configs_dict={ - 'backend_images': backend_images.dict() + 'backend_images': backend_images.model_dump() } ), username='System', @@ -344,7 +345,7 @@ def set_client_install_url(postgres: connectors.PostgresConnector, config: objects.WorkflowServiceConfig): curr_service_configs = postgres.get_service_configs() if curr_service_configs.cli_config.client_install_url != config.client_install_url: - updated_cli_config = curr_service_configs.cli_config.dict() + updated_cli_config = curr_service_configs.cli_config.model_dump() updated_cli_config['client_install_url'] = config.client_install_url config_service.patch_service_configs( request=config_objects.PatchConfigRequest( @@ -454,7 +455,7 @@ def configure_app(target_app: fastapi.FastAPI, config: objects.WorkflowServiceCo ) if login_info != service_configs_dict.service_auth.login_info: configs_dict['service_auth'] = { - 'login_info': login_info.dict() + 'login_info': login_info.model_dump() } if configs_dict: diff --git a/src/service/core/tests/test_service.py b/src/service/core/tests/test_service.py index 8ad9ba47c..8bb2b2313 100644 --- a/src/service/core/tests/test_service.py +++ b/src/service/core/tests/test_service.py @@ -84,13 +84,13 @@ def test_get_default_pool(self): def test_get_client_version_with_config_override(self): # Arrange - test_version = version.VERSION.copy() + test_version = version.VERSION.model_copy() test_version.revision = str(int(test_version.revision) + 1) helpers.patch_configs( config_objects.PatchConfigRequest( configs_dict=connectors.postgres.ServiceConfig( cli_config=connectors.postgres.CliConfig(latest_version=str(test_version)), - ).dict(), + ).model_dump(), ), config_type=connectors.ConfigType.SERVICE, username='test@nvidia.com', @@ -101,7 +101,7 @@ def test_get_client_version_with_config_override(self): # Assert self.assertEqual(response.status_code, 200) - self.assertEqual(response.json(), test_version) + self.assertEqual(response.json(), test_version.model_dump()) def test_get_client_version_without_config_override(self): # Arrange / Act @@ -109,7 +109,7 @@ def test_get_client_version_without_config_override(self): # Assert self.assertEqual(response.status_code, 200) - self.assertEqual(response.json(), version.VERSION) + self.assertEqual(response.json(), version.VERSION.model_dump()) def test_get_client_version_plaintext(self): # Arrange / Act @@ -128,11 +128,11 @@ def test_get_version_matches_version_yaml(self): # Assert self.assertEqual(response.status_code, 200) - self.assertEqual(response.json(), version.VERSION) + self.assertEqual(response.json(), version.VERSION.model_dump()) def test_outdated_client_receives_no_update_prompt(self): # Arrange - test_version = version.VERSION.copy() + test_version = version.VERSION.model_copy() test_version.major = str(int(test_version.major) - 1) # Act @@ -145,7 +145,7 @@ def test_outdated_client_receives_no_update_prompt(self): def test_outdated_client_receives_update_prompt_if_min_supported_version_is_set(self): # Arrange self.patch_cli_config(min_supported_version=str(version.VERSION)) - test_version = version.VERSION.copy() + test_version = version.VERSION.model_copy() test_version.major = str(int(test_version.major) - 1) # Act @@ -201,7 +201,7 @@ def test_get_available_workflow_tags_from_workflow_configs(self): helpers.patch_configs( config_objects.PatchConfigRequest( configs_dict=connectors.postgres.WorkflowConfig( - workflow_info=connectors.postgres.WorkflowInfo(tags=tags)).dict(), + workflow_info=connectors.postgres.WorkflowInfo(tags=tags)).model_dump(), ), config_type=connectors.ConfigType.WORKFLOW, username='test@nvidia.com', @@ -461,7 +461,7 @@ def patch_cli_config(self, latest_version=latest_version, min_supported_version=min_supported_version, ), - ).dict(), + ).model_dump(), ), config_type=connectors.ConfigType.SERVICE, username='test@nvidia.com', diff --git a/src/service/core/workflow/objects.py b/src/service/core/workflow/objects.py index 8bc455069..b0abad7fc 100644 --- a/src/service/core/workflow/objects.py +++ b/src/service/core/workflow/objects.py @@ -20,7 +20,7 @@ import datetime import json import math -from typing import Any, Dict, List, NamedTuple, Optional, Protocol, Set +from typing import Any, ClassVar, Dict, List, NamedTuple, Optional, Protocol, Set import yaml import pydantic @@ -40,69 +40,76 @@ class WorkflowServiceConfig(connectors.RedisConfig, connectors.PostgresConfig, static_config.StaticConfig, metrics.MetricsCreatorConfig): """ Manages configuration specific to the workflow service. """ host: str = pydantic.Field( - command_line='host', default='http://0.0.0.0:8000', - description='The url to bind to when serving the workflow service.') + description='The url to bind to when serving the workflow service.', + json_schema_extra={'command_line': 'host'}) device_endpoint: str | None = pydantic.Field( - command_line='device_endpoint', default=None, - description='The url to bind to when authenticating with the device endpoint.') + description='The url to bind to when authenticating with the device endpoint.', + json_schema_extra={'command_line': 'device_endpoint'}) device_client_id: str | None = pydantic.Field( - command_line='device_client_id', default=None, - description='The client id to use when authenticating with the device endpoint.') + description='The client id to use when authenticating with the device endpoint.', + json_schema_extra={'command_line': 'device_client_id'}) browser_endpoint: str | None = pydantic.Field( - command_line='browser_endpoint', default=None, - description='The url to bind to when authenticating with the browser endpoint.') + description='The url to bind to when authenticating with the browser endpoint.', + json_schema_extra={'command_line': 'browser_endpoint'}) browser_client_id: str | None = pydantic.Field( - command_line='browser_client_id', default=None, - description='The client id to use when authenticating with the browser endpoint.') + description='The client id to use when authenticating with the browser endpoint.', + json_schema_extra={'command_line': 'browser_client_id'}) token_endpoint: str | None = pydantic.Field( - command_line='token_endpoint', default=None, - description='The url to bind to when authenticating with the token endpoint.') + description='The url to bind to when authenticating with the token endpoint.', + json_schema_extra={'command_line': 'token_endpoint'}) logout_endpoint: str | None = pydantic.Field( - command_line='logout_endpoint', default=None, - description='The url to bind to when authenticating with the logout endpoint.') + description='The url to bind to when authenticating with the logout endpoint.', + json_schema_extra={'command_line': 'logout_endpoint'}) client_install_url: str | None = pydantic.Field( - command_line='client_install_url', default=None, - description='The URL for the client install script shown in version update messages.') + description='The URL for the client install script shown in version update messages.', + json_schema_extra={'command_line': 'client_install_url'}) progress_file: str = pydantic.Field( - command_line='progress_file', - env='OSMO_PROGRESS_FILE', default='/var/run/osmo/last_progress', - description='The file to write progress timestamps to (For liveness/startup probes)') + description='The file to write progress timestamps to (For liveness/startup probes)', + json_schema_extra={'command_line': 'progress_file', 'env': 'OSMO_PROGRESS_FILE'}) progress_iter_frequency: str = pydantic.Field( - command_line='progress_iter_frequency', - env='OSMO_PROGRESS_ITER_FREQUENCY', default='15s', description='How often to write to progress file when processing tasks in a loop (' 'e.g. write to progress every 1 minute processed, like uploaded to DB). ' 'Format needs to be where unit can be either s (seconds) and ' - 'm (minutes).') + 'm (minutes).', + json_schema_extra={ + 'command_line': 'progress_iter_frequency', + 'env': 'OSMO_PROGRESS_ITER_FREQUENCY' + }) default_admin_username: str | None = pydantic.Field( - command_line='default_admin_username', - env='OSMO_DEFAULT_ADMIN_USERNAME', default=None, description='The username for the default admin user to create on startup. ' - 'If set, default_admin_password must also be set.') + 'If set, default_admin_password must also be set.', + json_schema_extra={ + 'command_line': 'default_admin_username', + 'env': 'OSMO_DEFAULT_ADMIN_USERNAME' + }) default_admin_password: str | None = pydantic.Field( - command_line='default_admin_password', - env='OSMO_DEFAULT_ADMIN_PASSWORD', default=None, description='The password (access token value) for the default admin user. ' - 'Must be set if default_admin_username is set.') + 'Must be set if default_admin_username is set.', + json_schema_extra={ + 'command_line': 'default_admin_password', + 'env': 'OSMO_DEFAULT_ADMIN_PASSWORD' + }) - @pydantic.root_validator() + @pydantic.model_validator(mode='before') @classmethod def validate_default_admin(cls, values): """ Validate that if default_admin_username is set, default_admin_password must also be set """ + if not isinstance(values, dict): + return values username = values.get('default_admin_username') password = values.get('default_admin_password') if username and not password: @@ -115,11 +122,9 @@ class WorkflowServiceContext(pydantic.BaseModel): """ Shared context that needs to be access from all api methods. """ config: WorkflowServiceConfig database: connectors.PostgresConnector - _instance: Optional['WorkflowServiceContext'] = None + _instance: ClassVar[Optional['WorkflowServiceContext']] = None - class Config: - arbitrary_types_allowed = True - extra = 'forbid' + model_config = pydantic.ConfigDict(arbitrary_types_allowed=True, extra='forbid') @classmethod def set(cls, instance: 'WorkflowServiceContext'): @@ -132,8 +137,9 @@ def get(cls) -> 'WorkflowServiceContext': 'Using WorkflowServiceContext before initialization.') return cls._instance -class ResourceUsage(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class ResourceUsage(pydantic.BaseModel): """ Object storing resource usage information. """ + model_config = pydantic.ConfigDict(extra='forbid', coerce_numbers_to_str=True) quota_used: str quota_free: str quota_limit: str @@ -142,64 +148,70 @@ class ResourceUsage(pydantic.BaseModel, extra=pydantic.Extra.forbid): total_free: str -class PoolResourceUsage(connectors.PoolMinimal, extra=pydantic.Extra.forbid): +class PoolResourceUsage(connectors.PoolMinimal, extra='forbid'): """ Object storing pool information. """ resource_usage: ResourceUsage -class PoolNodeSetResourceUsage(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class PoolNodeSetResourceUsage(pydantic.BaseModel, extra='forbid'): """ Object storing pool node set information. """ pools: List[PoolResourceUsage] -class PoolResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class PoolResponse(pydantic.BaseModel, extra='forbid'): """ Object storing pool information. """ node_sets: List[PoolNodeSetResourceUsage] resource_sum: ResourceUsage -class SubmitResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class SubmitResponse(pydantic.BaseModel, extra='forbid'): """ Object storing workflow name, logs, and spec after submission. """ # The name of the newly created workflow name: str - overview: Optional[str] - logs: Optional[str] - spec: Optional[str] - dashboard_url: Optional[str] + overview: Optional[str] = None + logs: Optional[str] = None + spec: Optional[str] = None + dashboard_url: Optional[str] = None + @pydantic.model_validator(mode='before') @classmethod - @pydantic.root_validator def logs_or_spec(cls, values): - if (values['logs'] is not None, values['spec'] is not None).count(True) != 1: + # In Pydantic v2, mode='before' receives raw input, so optional + # fields may be absent from the dict — use .get() with defaults. + logs = values.get('logs') if isinstance(values, dict) else getattr(values, 'logs', None) + spec = values.get('spec') if isinstance(values, dict) else getattr(values, 'spec', None) + if (logs is not None, spec is not None).count(True) != 1: raise ValueError('Exactly one of "logs" or "spec" must be set') return values -class CancelResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class CancelResponse(pydantic.BaseModel, extra='forbid'): """ Object storing workflow name. """ name: str -class ListEntry(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class ListEntry(pydantic.BaseModel): """ Entry for list API results. """ + model_config = pydantic.ConfigDict(extra='forbid', ser_json_timedelta='float') + user: str name: str workflow_uuid: str submit_time: datetime.datetime - start_time: datetime.datetime | None - end_time: datetime.datetime | None + start_time: datetime.datetime | None = None + end_time: datetime.datetime | None = None queued_time: datetime.timedelta - duration: datetime.timedelta | None + duration: datetime.timedelta | None = None status: workflow.WorkflowStatus overview: str logs: str - error_logs: str | None - grafana_url: str | None - dashboard_url: str | None - pool: str | None - app_owner: str | None - app_name: str | None - app_version: int | None + error_logs: str | None = None + grafana_url: str | None = None + dashboard_url: str | None = None + pool: str | None = None + app_owner: str | None = None + app_name: str | None = None + app_version: int | None = None priority: str @classmethod @@ -211,7 +223,7 @@ def from_db_row(cls, row: Any, base_url: str, overview = f'{base_url}/workflows/{row["workflow_id"]}' if config.method == 'dev': overview = f'{base_url}/api/workflow/{row["workflow_id"]}' - return ListEntry.construct( + return ListEntry.model_construct( user=row['submitted_by'], name=row['workflow_id'], workflow_uuid=row['workflow_uuid'], submit_time=row['submit_time'], @@ -235,7 +247,7 @@ def from_db_row(cls, row: Any, base_url: str, priority=row['priority']) -class ListResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class ListResponse(pydantic.BaseModel, extra='forbid'): workflows: List[ListEntry] more_entries: bool @@ -243,53 +255,53 @@ class ListResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): def from_db_rows(cls, rows: Any, base_url: str, more_entries: bool) -> 'ListResponse': backend_lookup: Dict = {} workflows = [ListEntry.from_db_row(row, base_url, backend_lookup) for row in rows] - return ListResponse.construct(workflows=workflows, more_entries=more_entries) + return ListResponse.model_construct(workflows=workflows, more_entries=more_entries) -class ListTaskSummaryEntry(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class ListTaskSummaryEntry(pydantic.BaseModel, extra='forbid'): """ Entry for task list API results. """ user: str - pool: str | None - storage: int # Gi + pool: str | None = None + storage: float # GiB cpu: int - memory: int # Gi + memory: float # GiB gpu: int priority: str @classmethod def from_db_row(cls, row: Any) -> 'ListTaskSummaryEntry': """ Create ListEntry from the DB query result. """ - return ListTaskSummaryEntry.construct( + return ListTaskSummaryEntry.model_construct( user=row['submitted_by'], pool=row['pool'], storage=row['disk_count'], - cpu=row['cpu_count'], + cpu=round(row['cpu_count']), memory=row['memory_count'], - gpu=row['gpu_count'], + gpu=round(row['gpu_count']), priority=row['priority'], ) -class ListTaskAggregatedEntry(ListTaskSummaryEntry, extra=pydantic.Extra.forbid): +class ListTaskAggregatedEntry(ListTaskSummaryEntry, extra='forbid'): """ Entry for task list API results, aggregated by workflow. """ workflow_id: str @classmethod def from_db_row(cls, row: Any) -> 'ListTaskAggregatedEntry': - return ListTaskAggregatedEntry.construct( + return ListTaskAggregatedEntry.model_construct( workflow_id=row['workflow_id'], - **ListTaskSummaryEntry.from_db_row(row).dict() + **ListTaskSummaryEntry.from_db_row(row).model_dump() ) -class ListTaskSummaryResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class ListTaskSummaryResponse(pydantic.BaseModel, extra='forbid'): summaries: List[ListTaskSummaryEntry] @classmethod def from_db_rows(cls, rows: Any) -> 'ListTaskSummaryResponse': summaries = [ListTaskSummaryEntry.from_db_row(row) for row in rows] - return ListTaskSummaryResponse.construct(summaries=summaries) + return ListTaskSummaryResponse.model_construct(summaries=summaries) -class ListTaskAggregatedResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class ListTaskAggregatedResponse(pydantic.BaseModel, extra='forbid'): summaries: List[ListTaskAggregatedEntry] @classmethod @@ -297,17 +309,17 @@ def from_db_rows(cls, rows: Any) -> 'ListTaskAggregatedResponse': summaries = [ListTaskAggregatedEntry.from_db_row(row) for row in rows] return ListTaskAggregatedResponse(summaries=summaries) -class TaskEntry(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class TaskEntry(pydantic.BaseModel, extra='forbid'): """ Entry for task GET API result. """ workflow_id: str task_name: str - node: str | None - start_time: datetime.datetime | None - end_time: datetime.datetime | None + node: str | None = None + start_time: datetime.datetime | None = None + end_time: datetime.datetime | None = None status: task.TaskGroupStatus - storage: int # Gi + storage: float # GiB cpu: int - memory: int # Gi + memory: float # GiB gpu: int @@ -322,33 +334,35 @@ def from_db_row(cls, row: Dict) -> 'TaskEntry': end_time=row['end_time'], status=task.TaskGroupStatus(row['status']), storage=row['disk_count'], - cpu=row['cpu_count'], + cpu=round(row['cpu_count']), memory=row['memory_count'], - gpu=row['gpu_count'], + gpu=round(row['gpu_count']), ) -class ListTaskEntry(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class ListTaskEntry(pydantic.BaseModel): """ Entry for task list API results. """ + model_config = pydantic.ConfigDict(extra='forbid', ser_json_timedelta='float') + user: str workflow_id: str workflow_uuid: str task_name: str retry_id: int - pool: str | None - node: str | None - start_time: datetime.datetime | None - end_time: datetime.datetime | None - duration: datetime.timedelta | None + pool: str | None = None + node: str | None = None + start_time: datetime.datetime | None = None + end_time: datetime.datetime | None = None + duration: datetime.timedelta | None = None status: task.TaskGroupStatus overview: str logs: str - error_logs: str | None - grafana_url: str | None - dashboard_url: str | None - storage: int # Gi + error_logs: str | None = None + grafana_url: str | None = None + dashboard_url: str | None = None + storage: float # GiB cpu: int - memory: int # Gi + memory: float # GiB gpu: int priority: str @@ -361,7 +375,7 @@ def from_db_row(cls, row: Any, base_url: str, overview = f'{base_url}/workflows/{row["workflow_id"]}' if config.method == 'dev': overview = f'{base_url}/api/workflow/{row["workflow_id"]}' - return ListTaskEntry.construct( + return ListTaskEntry.model_construct( user=row['submitted_by'], workflow_id=row['workflow_id'], workflow_uuid=row['workflow_uuid'], @@ -384,21 +398,21 @@ def from_db_row(cls, row: Any, base_url: str, dashboard_url=generate_dashboard_url(row['workflow_uuid'], row['backend'], backend_lookup), storage=row['disk_count'], - cpu=row['cpu_count'], + cpu=round(row['cpu_count']), memory=row['memory_count'], - gpu=row['gpu_count'], + gpu=round(row['gpu_count']), priority=row['priority'], ) -class ListTaskResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class ListTaskResponse(pydantic.BaseModel, extra='forbid'): tasks: List[ListTaskEntry] @classmethod def from_db_rows(cls, rows: Any, base_url: str) -> 'ListTaskResponse': backend_lookup: Dict = {} tasks = [ListTaskEntry.from_db_row(row, base_url, backend_lookup) for row in rows] - return ListTaskResponse.construct(tasks=tasks) + return ListTaskResponse.model_construct(tasks=tasks) class TaskQueryResponse(pydantic.BaseModel): @@ -406,7 +420,7 @@ class TaskQueryResponse(pydantic.BaseModel): name: str retry_id: int status: task.TaskGroupStatus - failure_message: str | None + failure_message: str | None = None exit_code: int | None = None logs: str error_logs: str | None = None @@ -426,7 +440,7 @@ class TaskQueryResponse(pydantic.BaseModel): node_name: str | None = None lead: bool = False -class GroupQueryResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class GroupQueryResponse(pydantic.BaseModel, extra='forbid'): """ Represents the queryed task information. """ name: str status: task.TaskGroupStatus @@ -435,43 +449,45 @@ class GroupQueryResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): processing_start_time: datetime.datetime | None = None scheduling_start_time: datetime.datetime | None = None initializing_start_time: datetime.datetime | None = None - remaining_upstream_groups: Set[str] | None - downstream_groups: Set[str] | None + remaining_upstream_groups: Set[str] | None = None + downstream_groups: Set[str] | None = None failure_message: str | None = None tasks: List[TaskQueryResponse] = [] -class WorkflowQueryResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class WorkflowQueryResponse(pydantic.BaseModel): """ Represents the queryed workflow information. """ + model_config = pydantic.ConfigDict(extra='forbid', ser_json_timedelta='float') + name: str uuid: str submitted_by: str - cancelled_by: str | None + cancelled_by: str | None = None spec: str template_spec: str logs: str events: str overview: str - parent_name: str | None - parent_job_id: int | None - dashboard_url: str | None - grafana_url: str | None + parent_name: str | None = None + parent_job_id: int | None = None + dashboard_url: str | None = None + grafana_url: str | None = None tags: List[str] = [] submit_time: datetime.datetime - start_time: datetime.datetime | None - end_time: datetime.datetime | None - exec_timeout: datetime.timedelta | None - queue_timeout: datetime.timedelta | None - duration: datetime.timedelta | None + start_time: datetime.datetime | None = None + end_time: datetime.datetime | None = None + exec_timeout: datetime.timedelta | None = None + queue_timeout: datetime.timedelta | None = None + duration: datetime.timedelta | None = None queued_time: datetime.timedelta status: workflow.WorkflowStatus outputs: str = '' groups: List[GroupQueryResponse] - pool: str | None - backend: str | None - app_owner: str | None - app_name: str | None - app_version: int | None + pool: str | None = None + backend: str | None = None + app_owner: str | None = None + app_name: str | None = None + app_version: int | None = None plugins: task_common.WorkflowPlugins priority: str @@ -537,12 +553,12 @@ def fetch_from_db(cls, database: connectors.PostgresConnector, priority=workflow_obj.priority) -class ResourcesResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class ResourcesResponse(pydantic.BaseModel, extra='forbid'): """ Object storing execution cluster node resource information. """ resources: List[workflow.ResourcesEntry] -class PoolResourcesEntry(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class PoolResourcesEntry(pydantic.BaseModel, extra='forbid'): """ Entry for resources API results. """ pool: str platform: str @@ -552,12 +568,12 @@ class PoolResourcesEntry(pydantic.BaseModel, extra=pydantic.Extra.forbid): backend: str -class PoolResourcesResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class PoolResourcesResponse(pydantic.BaseModel, extra='forbid'): """ Object storing execution cluster node resource information. """ pools: List[PoolResourcesEntry] -class DataUploadResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DataUploadResponse(pydantic.BaseModel, extra='forbid'): """ Object storing Upload Response. """ version_id: str container: str @@ -565,7 +581,7 @@ class DataUploadResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): path: str -class DataDownloadResponse(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DataDownloadResponse(pydantic.BaseModel, extra='forbid'): """ Object storing Download Response. """ location: str container: str @@ -593,7 +609,7 @@ def valid_cred(self, workflow_config: connectors.WorkflowConfig): class UserRegistryCredential( credentials.RegistryCredential, - extra=pydantic.Extra.forbid, + extra='forbid', ): """ Authentication information for a Docker registry. """ auth: str = pydantic.Field( @@ -622,7 +638,7 @@ def valid_cred(self, workflow_config: connectors.WorkflowConfig): class UserDataCredential( data_credentials.DataCredentialBase, - extra=pydantic.Extra.forbid, + extra='forbid', ): """ Authentication information for a data service. """ @@ -678,7 +694,7 @@ def valid_cred(self, workflow_config: connectors.WorkflowConfig): class UserCredential( pydantic.BaseModel, - extra=pydantic.Extra.forbid, + extra='forbid', ): """ Generic authentication information. """ credential: Dict[str, str] = pydantic.Field( @@ -724,20 +740,23 @@ def valid_cred(self, workflow_config: connectors.WorkflowConfig): class CredentialOptions(pydantic.BaseModel): """ Credential options """ registry_credential: Optional[UserRegistryCredential] = pydantic.Field( - description='Authentication information for a Docker registry') + default=None, description='Authentication information for a Docker registry') data_credential: Optional[UserDataCredential] = pydantic.Field( - description='Authentication information for a data service') + default=None, description='Authentication information for a data service') generic_credential: Optional[UserCredential] = pydantic.Field( - description='Generic authentication information') + default=None, description='Generic authentication information') - @pydantic.root_validator(pre=True) + @pydantic.model_validator(mode='before') + @classmethod def validate_credential(cls, values): # pylint: disable=no-self-argument """ A valid credential can only be one of the three types """ + if not isinstance(values, dict): + return values num_fields_set = sum(1 for value in values.values() if value is not None) if num_fields_set != 1: raise osmo_errors.OSMOUserError( - f'Exactly one of the following must be set {cls.__fields__.keys()}') + f'Exactly one of the following must be set {cls.model_fields.keys()}') return values def get_credential(self) -> CredentialProtocol: @@ -749,7 +768,7 @@ def get_credential(self) -> CredentialProtocol: return self.generic_credential else: raise osmo_errors.OSMOUserError( - f'Exactly one of the following must be set: {self.__fields__.keys()}') + f'Exactly one of the following must be set: {type(self).model_fields.keys()}') class CredentialGetResponse(pydantic.BaseModel): @@ -769,7 +788,7 @@ class WorkflowSubmitInfo(pydantic.BaseModel): context: WorkflowServiceContext base32_id: str = '' name: str = '' - parent_workflow_id: str | None + parent_workflow_id: str | None = None app_uuid: str | None = None app_version: int | None = None user: str @@ -1018,7 +1037,7 @@ def validate_workflow_spec( downstream_groups=downstream_groups) workflow_obj.insert_to_db() uploaded_workflow_dict = {'version': 2, - 'workflow': rendered_spec.dict(exclude_defaults=True)} + 'workflow': rendered_spec.model_dump(exclude_defaults=True)} self.send_workflow_spec_to_queue(workflow_obj.workflow_id, uploaded_workflow_dict, original_templated_spec) diff --git a/src/service/core/workflow/tests/BUILD b/src/service/core/workflow/tests/BUILD index 43ad5b313..f62baf249 100644 --- a/src/service/core/workflow/tests/BUILD +++ b/src/service/core/workflow/tests/BUILD @@ -47,3 +47,12 @@ py_test( ], ) +py_test( + name = "test_task_entry_resources", + srcs = ["test_task_entry_resources.py"], + deps = [ + "//src/service/core/workflow", + "//src/utils/job", + ], +) + diff --git a/src/service/core/workflow/tests/test_calculate_pool_quotas.py b/src/service/core/workflow/tests/test_calculate_pool_quotas.py index e5d87cbfd..4e58cac47 100644 --- a/src/service/core/workflow/tests/test_calculate_pool_quotas.py +++ b/src/service/core/workflow/tests/test_calculate_pool_quotas.py @@ -45,7 +45,7 @@ def make_resource( gpu_usage: int = 0, gpu_non_workflow_usage: int = 0, ) -> workflow.ResourcesEntry: - return workflow.ResourcesEntry.construct( + return workflow.ResourcesEntry.model_construct( hostname=hostname, backend=backend, allocatable_fields={'gpu': str(gpu_allocatable)}, diff --git a/src/service/core/workflow/tests/test_task_entry_resources.py b/src/service/core/workflow/tests/test_task_entry_resources.py new file mode 100644 index 000000000..59e347a0e --- /dev/null +++ b/src/service/core/workflow/tests/test_task_entry_resources.py @@ -0,0 +1,161 @@ +""" +SPDX-FileCopyrightText: Copyright (c) 2026 NVIDIA CORPORATION & AFFILIATES. All rights reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +SPDX-License-Identifier: Apache-2.0 +""" +import json +import unittest + +from src.service.core.workflow import objects +from src.utils.job import task + + +def make_summary_row( + disk_count: float = 0.0, + cpu_count: float = 0.0, + memory_count: float = 0.0, + gpu_count: float = 0.0, + pool: str = 'test-pool', + priority: str = 'NORMAL', +) -> dict: + return { + 'submitted_by': 'test-user', + 'pool': pool, + 'disk_count': disk_count, + 'cpu_count': cpu_count, + 'memory_count': memory_count, + 'gpu_count': gpu_count, + 'priority': priority, + } + + +def make_task_row( + disk_count: float = 0.0, + cpu_count: float = 0.0, + memory_count: float = 0.0, + gpu_count: float = 0.0, +) -> dict: + return { + 'workflow_id': 'test-workflow-1', + 'name': 'task-0', + 'node_name': 'node-1', + 'start_time': None, + 'end_time': None, + 'status': task.TaskGroupStatus.WAITING.name, + 'disk_count': disk_count, + 'cpu_count': cpu_count, + 'memory_count': memory_count, + 'gpu_count': gpu_count, + } + + +class TestListTaskSummaryEntryResources(unittest.TestCase): + + def test_whole_number_resources(self): + row = make_summary_row(disk_count=10.0, cpu_count=4.0, memory_count=8.0, gpu_count=2.0) + entry = objects.ListTaskSummaryEntry.from_db_row(row) + self.assertEqual(entry.storage, 10.0) + self.assertEqual(entry.cpu, 4) + self.assertEqual(entry.memory, 8.0) + self.assertEqual(entry.gpu, 2) + self.assertIsInstance(entry.storage, float) + self.assertIsInstance(entry.cpu, int) + self.assertIsInstance(entry.memory, float) + self.assertIsInstance(entry.gpu, int) + + def test_fractional_storage_and_memory(self): + """500Mi ≈ 0.488 GiB, 1500Mi ≈ 1.465 GiB""" + row = make_summary_row( + disk_count=500 / 1024, + memory_count=1500 / 1024, + cpu_count=2.0, + gpu_count=1.0, + ) + entry = objects.ListTaskSummaryEntry.from_db_row(row) + self.assertAlmostEqual(entry.storage, 500 / 1024) + self.assertAlmostEqual(entry.memory, 1500 / 1024) + self.assertGreater(entry.storage, 0) + self.assertGreater(entry.memory, 0) + + def test_sub_gib_not_rounded_to_zero(self): + """100Mi ≈ 0.098 GiB — must not become 0.""" + row = make_summary_row(disk_count=100 / 1024, memory_count=100 / 1024) + entry = objects.ListTaskSummaryEntry.from_db_row(row) + self.assertGreater(entry.storage, 0) + self.assertGreater(entry.memory, 0) + + def test_json_serialization_preserves_floats(self): + row = make_summary_row(disk_count=0.5, memory_count=1.5, cpu_count=4.0, gpu_count=8.0) + entry = objects.ListTaskSummaryEntry.from_db_row(row) + data = json.loads(entry.model_dump_json()) + self.assertEqual(data['storage'], 0.5) + self.assertEqual(data['memory'], 1.5) + self.assertEqual(data['cpu'], 4) + self.assertEqual(data['gpu'], 8) + + def test_aggregated_entry_inherits_float_fields(self): + row = make_summary_row(disk_count=0.25, memory_count=0.75, cpu_count=1.0, gpu_count=1.0) + row['workflow_id'] = 'test-workflow-1' + entry = objects.ListTaskAggregatedEntry.from_db_row(row) + self.assertAlmostEqual(entry.storage, 0.25) + self.assertAlmostEqual(entry.memory, 0.75) + self.assertEqual(entry.cpu, 1) + self.assertEqual(entry.gpu, 1) + self.assertEqual(entry.workflow_id, 'test-workflow-1') + + +class TestTaskEntryResources(unittest.TestCase): + + def test_whole_number_resources(self): + row = make_task_row(disk_count=10.0, cpu_count=4.0, memory_count=8.0, gpu_count=2.0) + entry = objects.TaskEntry.from_db_row(row) + self.assertEqual(entry.storage, 10.0) + self.assertEqual(entry.cpu, 4) + self.assertEqual(entry.memory, 8.0) + self.assertEqual(entry.gpu, 2) + self.assertIsInstance(entry.storage, float) + self.assertIsInstance(entry.cpu, int) + self.assertIsInstance(entry.memory, float) + self.assertIsInstance(entry.gpu, int) + + def test_fractional_storage_and_memory(self): + row = make_task_row( + disk_count=500 / 1024, + memory_count=1500 / 1024, + cpu_count=2.0, + gpu_count=1.0, + ) + entry = objects.TaskEntry.from_db_row(row) + self.assertAlmostEqual(entry.storage, 500 / 1024) + self.assertAlmostEqual(entry.memory, 1500 / 1024) + + def test_sub_gib_not_rounded_to_zero(self): + row = make_task_row(disk_count=100 / 1024, memory_count=100 / 1024) + entry = objects.TaskEntry.from_db_row(row) + self.assertGreater(entry.storage, 0) + self.assertGreater(entry.memory, 0) + + def test_json_serialization_preserves_floats(self): + row = make_task_row(disk_count=0.5, memory_count=1.5, cpu_count=4.0, gpu_count=8.0) + entry = objects.TaskEntry.from_db_row(row) + data = json.loads(entry.model_dump_json()) + self.assertEqual(data['storage'], 0.5) + self.assertEqual(data['memory'], 1.5) + self.assertEqual(data['cpu'], 4) + self.assertEqual(data['gpu'], 8) + + +if __name__ == '__main__': + unittest.main() diff --git a/src/service/core/workflow/tests/test_workflow_service.py b/src/service/core/workflow/tests/test_workflow_service.py index 82d049895..b887ab8b0 100644 --- a/src/service/core/workflow/tests/test_workflow_service.py +++ b/src/service/core/workflow/tests/test_workflow_service.py @@ -113,7 +113,7 @@ def create_pool( platforms={ platform_name: connectors.Platform(), }, - ).dict(), + ).model_dump(), }, }, ) @@ -178,7 +178,7 @@ def test_submit_workflow_success(self): # Act response = self.client.post( f'/api/pool/{pool_name}/workflow', - json=workflow_template.dict(), + json=workflow_template.model_dump(), ) # Assert @@ -243,7 +243,7 @@ def submit_workflow(request_id: int): response = self.client.post( f'/api/pool/{pool_name}/workflow', - json=workflow_template.dict(), + json=workflow_template.model_dump(), ) return (request_id, response.status_code, response.json()) except Exception as e: # pylint: disable=broad-exception-caught diff --git a/src/service/core/workflow/workflow_service.py b/src/service/core/workflow/workflow_service.py index ee7a615a3..42f958de5 100644 --- a/src/service/core/workflow/workflow_service.py +++ b/src/service/core/workflow/workflow_service.py @@ -271,7 +271,7 @@ def calculate_pool_quotas( ) node_set_response.pools.append(objects.PoolResourceUsage( - **pool_config.dict(), + **pool_config.model_dump(), resource_usage=resource_usage )) @@ -1118,7 +1118,7 @@ def action_request_helper(action_type: ActionType, payload: Dict[str, Any], name router_info = objects.RouterResponse(router_address=router_address, key=key, cookie=cookie) router_infos[task_obj.name] = router_info action_attributes: Dict[str, Any] = { - 'action': action_type.value, **router_info.dict(), **payload} + 'action': action_type.value, **router_info.model_dump(), **payload} # Create redis object redis_client.set(key, json.dumps(action_attributes)) diff --git a/src/service/delayed_job_monitor/delayed_job_monitor.py b/src/service/delayed_job_monitor/delayed_job_monitor.py index 4fffc28e9..8ac0932ee 100644 --- a/src/service/delayed_job_monitor/delayed_job_monitor.py +++ b/src/service/delayed_job_monitor/delayed_job_monitor.py @@ -42,20 +42,21 @@ class DelayedJobMonitorConfig(connectors.RedisConfig, connectors.PostgresConfig, """Configuration for DelayedJobMonitor.""" # The amount of time the monitor waits before polling again (in seconds) poll_interval: int = pydantic.Field( - command_line='poll_interval', - env='OSMO_POLL_INTERVAL', default=5, - description='How long to wait (In seconds) between checking the delayed job queue') + description='How long to wait (In seconds) between checking the delayed job queue', + json_schema_extra={'command_line': 'poll_interval', 'env': 'OSMO_POLL_INTERVAL'}) progress_file: str = pydantic.Field( - command_line='progress_file', - env='OSMO_PROGRESS_FILE', default='/var/run/osmo/last_progress', - description='The file to write progress timestamps to (For liveness/startup probes)') + description='The file to write progress timestamps to (For liveness/startup probes)', + json_schema_extra={'command_line': 'progress_file', 'env': 'OSMO_PROGRESS_FILE'}) enable_metrics: bool = pydantic.Field( - command_line='enable_metrics', - env='OSMO_ENABLE_METRICS', - action='store_true', - description='Enable metrics collection') + default=False, + description='Enable metrics collection', + json_schema_extra={ + 'command_line': 'enable_metrics', + 'env': 'OSMO_ENABLE_METRICS', + 'action': 'store_true' + }) class DelayedJobMonitor: diff --git a/src/service/logger/ctrl_websocket.py b/src/service/logger/ctrl_websocket.py index d80e317aa..c60d162b5 100644 --- a/src/service/logger/ctrl_websocket.py +++ b/src/service/logger/ctrl_websocket.py @@ -36,18 +36,19 @@ class MetricsOptions(pydantic.BaseModel): """ Credential options """ group_metrics: Optional[task.TaskGroupMetrics] = pydantic.Field( - description='Metrics for group') + default=None, description='Metrics for group') task_io_metrics: Optional[task_io.TaskIOMetrics] = pydantic.Field( - description='Metrics for task io') + default=None, description='Metrics for task io') - @pydantic.root_validator(pre=True) + @pydantic.model_validator(mode='before') + @classmethod def validate(cls, values): # pylint: disable=no-self-argument """ A valid metric can only be one of the two types """ num_fields_set = sum(1 for value in values.values() if value is not None) if num_fields_set != 1: raise osmo_errors.OSMOUserError( - f'Exactly one of the following must be set {cls.__fields__.keys()}') + f'Exactly one of the following must be set {cls.model_fields.keys()}') return values @@ -58,7 +59,8 @@ def update_metrics( ): """ Updates the metrics with the given workflow and group_name in the database. """ database = connectors.PostgresConnector.get_instance() - metrics = getattr(metrics_options, metrics_options.__fields_set__.pop()) + field_name = next(iter(metrics_options.model_fields_set)) + metrics = getattr(metrics_options, field_name) if isinstance(metrics, task.TaskGroupMetrics): task.TaskGroup.patch_metrics_in_db( database=database, @@ -194,15 +196,16 @@ async def get_logs(websocket): time=loaded_json['time'], text=loaded_json['text'], io_type=loaded_json['iotype']) - # Use logs.json() instead of logs.dict() to convert enum and datetime to - # strings + # Use logs.model_dump_json() instead of + # logs.model_dump() to convert enum and + # datetime to strings await redis_client.xadd(f'{workflow_obj.workflow_id}-logs', - json.loads(logs.json()), + json.loads(logs.model_dump_json()), maxlen=workflow_config.max_log_lines) await redis_client.xadd( common.get_redis_task_log_name( workflow_obj.workflow_id, task_name, retry_id), - json.loads(logs.json()), + json.loads(logs.model_dump_json()), maxlen=workflow_config.max_task_log_lines) # Set expiration on first log message if first_run: diff --git a/src/service/logger/logger.py b/src/service/logger/logger.py index e10b7e794..b0a6c8433 100644 --- a/src/service/logger/logger.py +++ b/src/service/logger/logger.py @@ -33,19 +33,17 @@ class LoggerServiceConfig(connectors.RedisConfig, connectors.PostgresConfig, src.lib.utils.logging.LoggingConfig, static_config.StaticConfig): """Config settings for the logger service""" host: str = pydantic.Field( - command_line='host', default='http://0.0.0.0:8000', - description='The url to bind to when serving the workflow service.') + description='The url to bind to when serving the workflow service.', + json_schema_extra={'command_line': 'host'}) progress_file: str = pydantic.Field( - command_line='progress_file', - env='OSMO_PROGRESS_FILE', default='/var/run/osmo/last_progress', - description='The file to write node watch progress timestamps to (For liveness/startup)') + description='The file to write node watch progress timestamps to (For liveness/startup)', + json_schema_extra={'command_line': 'progress_file', 'env': 'OSMO_PROGRESS_FILE'}) progress_period: int = pydantic.Field( - command_line='progress_period', - env='OSMO_PROGRESS_PERIOD', default=30, - description='The amount of time to wait between updating progress') + description='The amount of time to wait between updating progress', + json_schema_extra={'command_line': 'progress_period', 'env': 'OSMO_PROGRESS_PERIOD'}) app = fastapi.FastAPI(docs_url=None, redoc_url=None, openapi_url=None) diff --git a/src/service/router/router.py b/src/service/router/router.py index 9fbb1e0d4..24b65b3f3 100644 --- a/src/service/router/router.py +++ b/src/service/router/router.py @@ -38,50 +38,50 @@ class RouterServiceConfig(src.lib.utils.logging.LoggingConfig, static_config.Sta connectors.PostgresConfig): """Config settings for the logger service""" host: str = pydantic.Field( - command_line='host', default='http://0.0.0.0:8000', - description='The url to bind to when serving the router service.') + description='The url to bind to when serving the router service.', + json_schema_extra={'command_line': 'host'}) hostname: str = pydantic.Field( - command_line='hostname', default='localhost', - description='The DNS hostname of the router service.') + description='The DNS hostname of the router service.', + json_schema_extra={'command_line': 'hostname'}) timeout: int = pydantic.Field( - command_line='timeout', default=60, - description='Timeout for router connections.') + description='Timeout for router connections.', + json_schema_extra={'command_line': 'timeout'}) webserver_initial_timeout: int = pydantic.Field( - command_line='webserver_initial_timeout', - default=60 * 60, # 1 hour in seconds - description='Initial timeout for webserver connections.') + default=60 * 60, + # 1 hour in seconds + description='Initial timeout for webserver connections.', + json_schema_extra={'command_line': 'webserver_initial_timeout'}) webserver_nonactive_timeout: int = pydantic.Field( - command_line='webserver_nonactive_timeout', - default=30 * 60, # 30 minutes in seconds - description='Timeout for non-activewebserver connections.') + default=30 * 60, + # 30 minutes in seconds + description='Timeout for non-activewebserver connections.', + json_schema_extra={'command_line': 'webserver_nonactive_timeout'}) sticky_cookies: List[str] = pydantic.Field( - command_line='sticky_cookies', default=['AWSALB', 'AWSALBCORS'], - description='List of sticky cookies to send to the webserver.') + description='List of sticky cookies to send to the webserver.', + json_schema_extra={'command_line': 'sticky_cookies'}) class RouterConnection(pydantic.BaseModel): """Model representing a router connection with websocket and synchronization events.""" + model_config = pydantic.ConfigDict(arbitrary_types_allowed=True) + wait_connect: Optional[asyncio.Event] = None wait_close: Optional[asyncio.Event] = None websocket: Optional[fastapi.WebSocket] = None - class Config: - arbitrary_types_allowed = True - class WebserverConnection(pydantic.BaseModel): """Model representing a webserver connection with websocket and synchronization events.""" + model_config = pydantic.ConfigDict(arbitrary_types_allowed=True) + wait_close: asyncio.Event last_active_time: datetime.datetime websocket: fastapi.WebSocket - class Config: - arbitrary_types_allowed = True - class ConnectionPayload(pydantic.BaseModel): key: str @@ -242,7 +242,7 @@ async def webserver_http_request(request: fastapi.Request, ctrl_key: str): sticky_cookies = RouterServiceConfig.load().sticky_cookies cookie_str = ', '.join(f'{k}={v}' for k, v in request.cookies.items() if k in sticky_cookies) await ctrl_ws.send_json( - ConnectionPayload(key=conn_key, cookie=cookie_str).dict(exclude_none=True)) + ConnectionPayload(key=conn_key, cookie=cookie_str).model_dump(exclude_none=True)) try: await asyncio.wait_for(connect.wait(), RouterServiceConfig.load().timeout) ws = connections[conn_key].websocket @@ -337,7 +337,7 @@ async def webserver_ws_request(ws: fastapi.WebSocket, ctrl_key: str): cookies.append(cookie.strip()) cookie_str = ', '.join(cookies) await ctrl_ws.send_json( - ConnectionPayload(key=conn_key, cookie=cookie_str, type='ws', payload=payload).dict()) + ConnectionPayload(key=conn_key, cookie=cookie_str, type='ws', payload=payload).model_dump()) close = None try: diff --git a/src/service/worker/worker.py b/src/service/worker/worker.py index f8197d58d..9af37fcd0 100644 --- a/src/service/worker/worker.py +++ b/src/service/worker/worker.py @@ -46,18 +46,19 @@ class WorkerConfig(connectors.RedisConfig, connectors.PostgresConfig, src.lib.utils.logging.LoggingConfig, static_config.StaticConfig, metrics.MetricsCreatorConfig): progress_file: str = pydantic.Field( - command_line='progress_file', - env='OSMO_PROGRESS_FILE', default='/var/run/osmo/last_progress', - description='The file to write progress timestamps to (For liveness/startup probes)') + description='The file to write progress timestamps to (For liveness/startup probes)', + json_schema_extra={'command_line': 'progress_file', 'env': 'OSMO_PROGRESS_FILE'}) progress_iter_frequency: str = pydantic.Field( - command_line='progress_iter_frequency', - env='OSMO_PROGRESS_ITER_FREQUENCY', default='15s', description='How often to write to progress file when processing tasks in a loop (' 'e.g. write to progress every 1 minute processed, like uploaded to DB). ' 'Format needs to be where unit can be either s (seconds) and ' - 'm (minutes).') + 'm (minutes).', + json_schema_extra={ + 'command_line': 'progress_iter_frequency', + 'env': 'OSMO_PROGRESS_ITER_FREQUENCY' + }) class Worker(kombu.mixins.ConsumerMixin): diff --git a/src/tests/common/envoy/ssl_proxy.py b/src/tests/common/envoy/ssl_proxy.py index a510c748c..f96c36192 100644 --- a/src/tests/common/envoy/ssl_proxy.py +++ b/src/tests/common/envoy/ssl_proxy.py @@ -123,7 +123,7 @@ def _create_ssl_proxy_backends( for eligible_backend in eligible_backends: ssl_proxy_backends.append(SslProxyBackend( - **eligible_backend.dict(), + **eligible_backend.model_dump(), assigned_ports=[next(assigned_ports) for _ in eligible_backend.ports] )) diff --git a/src/ui/openapi.json b/src/ui/openapi.json index 3d60a0da9..2795d95e0 100644 --- a/src/ui/openapi.json +++ b/src/ui/openapi.json @@ -19,7 +19,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ServiceConfig" + "$ref": "#/components/schemas/ServiceConfig-Output" } } } @@ -35,24 +35,31 @@ "operationId": "put_service_configs_api_configs_service_put", "parameters": [ { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PutServiceRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -61,6 +68,7 @@ "application/json": { "schema": { "type": "object", + "additionalProperties": true, "title": "Response Put Service Configs Api Configs Service Put" } } @@ -87,24 +95,31 @@ "operationId": "patch_service_configs_api_configs_service_patch", "parameters": [ { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PatchConfigRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -113,6 +128,7 @@ "application/json": { "schema": { "type": "object", + "additionalProperties": true, "title": "Response Patch Service Configs Api Configs Service Patch" } } @@ -145,7 +161,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/WorkflowConfig" + "$ref": "#/components/schemas/WorkflowConfig-Output" } } } @@ -161,24 +177,31 @@ "operationId": "put_workflow_configs_api_configs_workflow_put", "parameters": [ { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PutWorkflowRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -187,6 +210,7 @@ "application/json": { "schema": { "type": "object", + "additionalProperties": true, "title": "Response Put Workflow Configs Api Configs Workflow Put" } } @@ -213,24 +237,31 @@ "operationId": "patch_workflow_configs_api_configs_workflow_patch", "parameters": [ { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PatchConfigRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -239,6 +270,7 @@ "application/json": { "schema": { "type": "object", + "additionalProperties": true, "title": "Response Patch Workflow Configs Api Configs Workflow Patch" } } @@ -271,7 +303,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DatasetConfig" + "$ref": "#/components/schemas/DatasetConfig-Output" } } } @@ -287,24 +319,31 @@ "operationId": "put_dataset_configs_api_configs_dataset_put", "parameters": [ { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PutDatasetRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -313,6 +352,7 @@ "application/json": { "schema": { "type": "object", + "additionalProperties": true, "title": "Response Put Dataset Configs Api Configs Dataset Put" } } @@ -339,24 +379,31 @@ "operationId": "patch_dataset_configs_api_configs_dataset_patch", "parameters": [ { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PatchConfigRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -365,6 +412,7 @@ "application/json": { "schema": { "type": "object", + "additionalProperties": true, "title": "Response Patch Dataset Configs Api Configs Dataset Patch" } } @@ -384,49 +432,60 @@ } }, "/api/configs/dataset/{name}": { - "delete": { + "patch": { "tags": [ "Config API" ], - "summary": "Delete Dataset", - "description": "Delete dataset configuration for a specific bucket", - "operationId": "delete_dataset_api_configs_dataset__name__delete", + "summary": "Patch Dataset", + "description": "Patch dataset configuration for a specific bucket", + "operationId": "patch_dataset_api_configs_dataset__name__patch", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ConfigsRequest" + "$ref": "#/components/schemas/PatchDatasetRequest" } } - }, - "required": true + } }, "responses": { "200": { "description": "Successful Response", "content": { "application/json": { - "schema": {} + "schema": { + "type": "object", + "additionalProperties": true, + "title": "Response Patch Dataset Api Configs Dataset Name Patch" + } } } }, @@ -442,52 +501,56 @@ } } }, - "patch": { + "delete": { "tags": [ "Config API" ], - "summary": "Patch Dataset", - "description": "Patch dataset configuration for a specific bucket", - "operationId": "patch_dataset_api_configs_dataset__name__patch", + "summary": "Delete Dataset", + "description": "Delete dataset configuration for a specific bucket", + "operationId": "delete_dataset_api_configs_dataset__name__delete", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/PatchDatasetRequest" + "$ref": "#/components/schemas/ConfigsRequest" } } - }, - "required": true + } }, "responses": { "200": { "description": "Successful Response", "content": { "application/json": { - "schema": { - "type": "object", - "title": "Response Patch Dataset Api Configs Dataset Name Patch" - } + "schema": {} } } }, @@ -527,32 +590,56 @@ } }, "/api/configs/backend/{name}": { - "get": { + "post": { "tags": [ "Config API" ], - "summary": "Get Backend", - "description": "Get info for a specific backend.", - "operationId": "get_backend_api_configs_backend__name__get", + "summary": "Update Backend", + "description": "Override the config for a specific backend.", + "operationId": "update_backend_api_configs_backend__name__post", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } + }, + { + "name": "x-osmo-user", + "in": "header", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "X-Osmo-User" + } } ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/PostBackendRequest" + } + } + } + }, "responses": { "200": { "description": "Successful Response", "content": { "application/json": { - "schema": { - "$ref": "#/components/schemas/Backend" - } + "schema": {} } } }, @@ -568,49 +655,32 @@ } } }, - "post": { + "get": { "tags": [ "Config API" ], - "summary": "Update Backend", - "description": "Override the config for a specific backend.", - "operationId": "update_backend_api_configs_backend__name__post", + "summary": "Get Backend", + "description": "Get info for a specific backend.", + "operationId": "get_backend_api_configs_backend__name__get", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" - }, - { - "required": false, - "schema": { - "type": "string", - "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/PostBackendRequest" - } - } - }, - "required": true - }, "responses": { "200": { "description": "Successful Response", "content": { "application/json": { - "schema": {} + "schema": { + "$ref": "#/components/schemas/Backend" + } } } }, @@ -635,33 +705,40 @@ "operationId": "delete_backend_api_configs_backend__name__delete", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/DeleteBackendRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -695,23 +772,30 @@ "operationId": "list_pools_api_configs_pool_get", "parameters": [ { + "name": "verbose", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "Verbose", - "default": false - }, - "name": "verbose", - "in": "query" + "default": false, + "title": "Verbose" + } }, { + "name": "backend", + "in": "query", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Backend" - }, - "name": "backend", - "in": "query" + } } ], "responses": { @@ -754,24 +838,31 @@ "operationId": "put_pools_api_configs_pool_put", "parameters": [ { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PutPoolsRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -805,23 +896,23 @@ "operationId": "read_pool_api_configs_pool__name__get", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "verbose", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "Verbose", - "default": false - }, - "name": "verbose", - "in": "query" + "default": false, + "title": "Verbose" + } } ], "responses": { @@ -832,7 +923,7 @@ "schema": { "anyOf": [ { - "$ref": "#/components/schemas/Pool" + "$ref": "#/components/schemas/Pool-Output" }, { "$ref": "#/components/schemas/PoolEditable" @@ -864,33 +955,40 @@ "operationId": "put_pool_api_configs_pool__name__put", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PutPoolRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -913,42 +1011,49 @@ } } }, - "delete": { + "patch": { "tags": [ "Config API" ], - "summary": "Delete Pool", - "description": "Delete Pool configurations", - "operationId": "delete_pool_api_configs_pool__name__delete", + "summary": "Patch Pool", + "description": "Patch Pool configurations", + "operationId": "patch_pool_api_configs_pool__name__patch", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ConfigsRequest" + "$ref": "#/components/schemas/PatchPoolRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -971,42 +1076,49 @@ } } }, - "patch": { + "delete": { "tags": [ "Config API" ], - "summary": "Patch Pool", - "description": "Patch Pool configurations", - "operationId": "patch_pool_api_configs_pool__name__patch", + "summary": "Delete Pool", + "description": "Delete Pool configurations", + "operationId": "delete_pool_api_configs_pool__name__delete", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/PatchPoolRequest" + "$ref": "#/components/schemas/ConfigsRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -1040,33 +1152,40 @@ "operationId": "rename_pool_api_configs_pool__name__rename_put", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/RenamePoolRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -1100,23 +1219,23 @@ "operationId": "list_platforms_in_pool_api_configs_pool__name__platform_get", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "verbose", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "Verbose", - "default": false - }, - "name": "verbose", - "in": "query" + "default": false, + "title": "Verbose" + } } ], "responses": { @@ -1125,6 +1244,7 @@ "content": { "application/json": { "schema": { + "type": "object", "additionalProperties": { "anyOf": [ { @@ -1134,11 +1254,10 @@ "$ref": "#/components/schemas/PlatformEditable" }, { - "$ref": "#/components/schemas/Platform" + "$ref": "#/components/schemas/Platform-Output" } ] }, - "type": "object", "title": "Response List Platforms In Pool Api Configs Pool Name Platform Get" } } @@ -1167,32 +1286,32 @@ "operationId": "read_platform_in_pool_api_configs_pool__name__platform__platform_name__get", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "platform_name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Platform Name" - }, - "name": "platform_name", - "in": "path" + } }, { + "name": "verbose", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "Verbose", - "default": false - }, - "name": "verbose", - "in": "query" + "default": false, + "title": "Verbose" + } } ], "responses": { @@ -1209,7 +1328,7 @@ "$ref": "#/components/schemas/PlatformEditable" }, { - "$ref": "#/components/schemas/Platform" + "$ref": "#/components/schemas/Platform-Output" } ], "title": "Response Read Platform In Pool Api Configs Pool Name Platform Platform Name Get" @@ -1238,42 +1357,49 @@ "operationId": "put_platform_in_pool_api_configs_pool__name__platform__platform_name__put", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "platform_name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Platform Name" - }, - "name": "platform_name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PutPoolPlatformRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -1307,42 +1433,49 @@ "operationId": "rename_platform_in_pool_api_configs_pool__name__platform__platform_name__rename_put", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "platform_name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Platform Name" - }, - "name": "platform_name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/RenamePoolPlatformRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -1381,6 +1514,7 @@ "application/json": { "schema": { "type": "object", + "additionalProperties": true, "title": "Response List Pod Templates Api Configs Pod Template Get" } } @@ -1397,24 +1531,31 @@ "operationId": "put_pod_templates_api_configs_pod_template_put", "parameters": [ { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PutPodTemplatesRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -1448,13 +1589,13 @@ "operationId": "read_pod_template_api_configs_pod_template__name__get", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } } ], "responses": { @@ -1464,6 +1605,7 @@ "application/json": { "schema": { "type": "object", + "additionalProperties": true, "title": "Response Read Pod Template Api Configs Pod Template Name Get" } } @@ -1490,33 +1632,40 @@ "operationId": "put_pod_template_api_configs_pod_template__name__put", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PutPodTemplateRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -1548,33 +1697,40 @@ "operationId": "delete_pod_template_api_configs_pod_template__name__delete", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ConfigsRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -1612,10 +1768,11 @@ "content": { "application/json": { "schema": { + "type": "object", "additionalProperties": { - "type": "object" + "type": "object", + "additionalProperties": true }, - "type": "object", "title": "Response List Group Templates Api Configs Group Template Get" } } @@ -1632,24 +1789,31 @@ "operationId": "put_group_templates_api_configs_group_template_put", "parameters": [ { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PutGroupTemplatesRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -1683,13 +1847,13 @@ "operationId": "read_group_template_api_configs_group_template__name__get", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } } ], "responses": { @@ -1699,6 +1863,7 @@ "application/json": { "schema": { "type": "object", + "additionalProperties": true, "title": "Response Read Group Template Api Configs Group Template Name Get" } } @@ -1725,33 +1890,40 @@ "operationId": "put_group_template_api_configs_group_template__name__put", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PutGroupTemplateRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -1783,33 +1955,40 @@ "operationId": "delete_group_template_api_configs_group_template__name__delete", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ConfigsRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -1847,13 +2026,13 @@ "content": { "application/json": { "schema": { + "type": "object", "additionalProperties": { + "type": "array", "items": { "$ref": "#/components/schemas/ResourceAssertion" - }, - "type": "array" + } }, - "type": "object", "title": "Response List Resource Validations Api Configs Resource Validation Get" } } @@ -1870,24 +2049,31 @@ "operationId": "put_resource_validations_api_configs_resource_validation_put", "parameters": [ { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PutResourceValidationsRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -1921,13 +2107,13 @@ "operationId": "read_resource_validation_api_configs_resource_validation__name__get", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } } ], "responses": { @@ -1936,10 +2122,10 @@ "content": { "application/json": { "schema": { + "type": "array", "items": { "$ref": "#/components/schemas/ResourceAssertion" }, - "type": "array", "title": "Response Read Resource Validation Api Configs Resource Validation Name Get" } } @@ -1966,33 +2152,40 @@ "operationId": "put_resource_validation_api_configs_resource_validation__name__put", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PutResourceValidationRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -2024,33 +2217,40 @@ "operationId": "delete_resource_validation_api_configs_resource_validation__name__delete", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ConfigsRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -2088,10 +2288,10 @@ "content": { "application/json": { "schema": { + "type": "array", "items": { - "$ref": "#/components/schemas/Role" + "$ref": "#/components/schemas/Role-Output" }, - "type": "array", "title": "Response List Roles Api Configs Role Get" } } @@ -2108,24 +2308,31 @@ "operationId": "put_roles_api_configs_role_put", "parameters": [ { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PutRolesRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -2159,13 +2366,13 @@ "operationId": "read_role_api_configs_role__name__get", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } } ], "responses": { @@ -2174,7 +2381,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Role" + "$ref": "#/components/schemas/Role-Output" } } } @@ -2200,33 +2407,40 @@ "operationId": "put_role_api_configs_role__name__put", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PutRoleRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -2258,33 +2472,40 @@ "operationId": "delete_role_api_configs_role__name__delete", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/ConfigsRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -2322,10 +2543,10 @@ "content": { "application/json": { "schema": { + "type": "object", "additionalProperties": { "$ref": "#/components/schemas/BackendTests" }, - "type": "object", "title": "Response List Backend Tests Api Configs Backend Test Get" } } @@ -2342,24 +2563,31 @@ "operationId": "put_backend_tests_api_configs_backend_test_put", "parameters": [ { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PutBackendTestsRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -2393,13 +2621,13 @@ "operationId": "read_backend_test_api_configs_backend_test__name__get", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } } ], "responses": { @@ -2434,33 +2662,40 @@ "operationId": "put_backend_test_api_configs_backend_test__name__put", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/PutBackendTestRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -2483,42 +2718,49 @@ } } }, - "delete": { + "patch": { "tags": [ "Config API" ], - "summary": "Delete Backend Test", - "description": "Delete test configuration", - "operationId": "delete_backend_test_api_configs_backend_test__name__delete", + "summary": "Patch Backend Test", + "description": "Patch backend test configuration", + "operationId": "patch_backend_test_api_configs_backend_test__name__patch", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ConfigsRequest" + "$ref": "#/components/schemas/PatchBackendTestRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -2541,42 +2783,49 @@ } } }, - "patch": { + "delete": { "tags": [ "Config API" ], - "summary": "Patch Backend Test", - "description": "Patch backend test configuration", - "operationId": "patch_backend_test_api_configs_backend_test__name__patch", + "summary": "Delete Backend Test", + "description": "Delete test configuration", + "operationId": "delete_backend_test_api_configs_backend_test__name__delete", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/PatchBackendTestRequest" + "$ref": "#/components/schemas/ConfigsRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -2610,146 +2859,204 @@ "operationId": "get_configs_history_api_configs_history_get", "parameters": [ { - "description": "Number of records to skip", + "name": "offset", + "in": "query", "required": false, "schema": { - "type": "integer", - "minimum": 0.0, - "title": "Offset", - "description": "Number of records to skip", - "default": 0 - }, - "name": "offset", - "in": "query" + "anyOf": [ + { + "type": "integer", + "minimum": 0 + }, + { + "type": "null" + } + ], + "description": "Number of records to skip", + "default": 0, + "title": "Offset" + }, + "description": "Number of records to skip" }, { - "description": "Maximum number of records to return", + "name": "limit", + "in": "query", "required": false, "schema": { - "type": "integer", - "maximum": 1000.0, - "exclusiveMinimum": 0.0, - "title": "Limit", + "anyOf": [ + { + "type": "integer", + "maximum": 1000, + "exclusiveMinimum": 0 + }, + { + "type": "null" + } + ], "description": "Maximum number of records to return", - "default": 20 + "default": 20, + "title": "Limit" }, - "name": "limit", - "in": "query" + "description": "Maximum number of records to return" }, { - "description": "Sort order by creation time", + "name": "order", + "in": "query", "required": false, "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/ListOrder" - } - ], + "$ref": "#/components/schemas/ListOrder", "description": "Sort order by creation time", "default": "ASC" }, - "name": "order", - "in": "query" + "description": "Sort order by creation time" }, { - "description": "Filter by config types", + "name": "config_types", + "in": "query", "required": false, "schema": { - "items": { - "$ref": "#/components/schemas/src__lib__utils__config_history__ConfigHistoryType" - }, - "type": "array", - "description": "Filter by config types" + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/components/schemas/src__lib__utils__config_history__ConfigHistoryType" + } + }, + { + "type": "null" + } + ], + "description": "Filter by config types", + "title": "Config Types" }, - "name": "config_types", - "in": "query", - "explode": true + "description": "Filter by config types" }, { - "description": "Filter by config name", + "name": "name", + "in": "query", "required": false, "schema": { - "type": "string", - "title": "Name", - "description": "Filter by config name" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Filter by config name", + "title": "Name" }, - "name": "name", - "in": "query" + "description": "Filter by config name" }, { - "description": "Filter by revision", + "name": "revision", + "in": "query", "required": false, "schema": { - "type": "integer", - "exclusiveMinimum": 0.0, - "title": "Revision", - "description": "Filter by revision" + "anyOf": [ + { + "type": "integer", + "exclusiveMinimum": 0 + }, + { + "type": "null" + } + ], + "description": "Filter by revision", + "title": "Revision" }, - "name": "revision", - "in": "query" + "description": "Filter by revision" }, { - "description": "Filter by tags", + "name": "tags", + "in": "query", "required": false, "schema": { - "items": { - "type": "string" - }, - "type": "array", - "title": "Tags", - "description": "Filter by tags" + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "description": "Filter by tags", + "title": "Tags" }, - "name": "tags", - "in": "query", - "explode": true + "description": "Filter by tags" }, { - "description": "Filter by creation time before", + "name": "created_before", + "in": "query", "required": false, "schema": { - "type": "string", - "format": "date-time", - "title": "Created Before", - "description": "Filter by creation time before" + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], + "description": "Filter by creation time before", + "title": "Created Before" }, - "name": "created_before", - "in": "query" + "description": "Filter by creation time before" }, { - "description": "Filter by creation time after", + "name": "created_after", + "in": "query", "required": false, "schema": { - "type": "string", - "format": "date-time", - "title": "Created After", - "description": "Filter by creation time after" + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], + "description": "Filter by creation time after", + "title": "Created After" }, - "name": "created_after", - "in": "query" + "description": "Filter by creation time after" }, { - "description": "Get config state at specific timestamp", + "name": "at_timestamp", + "in": "query", "required": false, "schema": { - "type": "string", - "format": "date-time", - "title": "At Timestamp", - "description": "Get config state at specific timestamp" + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], + "description": "Get config state at specific timestamp", + "title": "At Timestamp" }, - "name": "at_timestamp", - "in": "query" + "description": "Get config state at specific timestamp" }, { - "description": "Whether to omit data from the response", + "name": "omit_data", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "Omit Data", "description": "Whether to omit data from the response", - "default": false + "default": false, + "title": "Omit Data" }, - "name": "omit_data", - "in": "query" + "description": "Whether to omit data from the response" } ], "responses": { @@ -2786,24 +3093,31 @@ "operationId": "rollback_config_api_configs_history_rollback_post", "parameters": [ { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/RollbackConfigRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -2837,32 +3151,39 @@ "operationId": "delete_config_history_revision_api_configs_history__config_type__revision__revision__delete", "parameters": [ { + "name": "config_type", + "in": "path", "required": true, "schema": { "type": "string", "title": "Config Type" - }, - "name": "config_type", - "in": "path" + } }, { + "name": "revision", + "in": "path", "required": true, "schema": { "type": "integer", - "exclusiveMinimum": 0.0, + "exclusiveMinimum": 0, "title": "Revision" - }, - "name": "revision", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "responses": { @@ -2897,34 +3218,34 @@ "operationId": "update_config_history_tags_api_configs_history__config_type__revision__revision__tags_post", "parameters": [ { + "name": "config_type", + "in": "path", "required": true, "schema": { "type": "string", "title": "Config Type" - }, - "name": "config_type", - "in": "path" + } }, { + "name": "revision", + "in": "path", "required": true, "schema": { "type": "integer", - "exclusiveMinimum": 0.0, + "exclusiveMinimum": 0, "title": "Revision" - }, - "name": "revision", - "in": "path" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/UpdateConfigTagsRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -2958,36 +3279,36 @@ "operationId": "get_config_diff_api_configs_diff_get", "parameters": [ { + "name": "config_type", + "in": "query", "required": true, "schema": { - "$ref": "#/components/schemas/src__utils__connectors__postgres__ConfigHistoryType" - }, - "name": "config_type", - "in": "query" + "$ref": "#/components/schemas/src__lib__utils__config_history__ConfigHistoryType" + } }, { - "description": "First revision to compare", + "name": "first_revision", + "in": "query", "required": true, "schema": { "type": "integer", - "exclusiveMinimum": 0.0, - "title": "First Revision", - "description": "First revision to compare" + "exclusiveMinimum": 0, + "description": "First revision to compare", + "title": "First Revision" }, - "name": "first_revision", - "in": "query" + "description": "First revision to compare" }, { - "description": "Second revision to compare", + "name": "second_revision", + "in": "query", "required": true, "schema": { "type": "integer", - "exclusiveMinimum": 0.0, - "title": "Second Revision", - "description": "Second revision to compare" + "exclusiveMinimum": 0, + "description": "Second revision to compare", + "title": "Second Revision" }, - "name": "second_revision", - "in": "query" + "description": "Second revision to compare" } ], "responses": { @@ -3024,50 +3345,50 @@ "operationId": "get_new_jwt_token_api_auth_jwt_refresh_token_get", "parameters": [ { + "name": "refresh_token", + "in": "query", "required": true, "schema": { "type": "string", "title": "Refresh Token" - }, - "name": "refresh_token", - "in": "query" + } }, { + "name": "workflow_id", + "in": "query", "required": true, "schema": { "type": "string", "title": "Workflow Id" - }, - "name": "workflow_id", - "in": "query" + } }, { + "name": "group_name", + "in": "query", "required": true, "schema": { "type": "string", "title": "Group Name" - }, - "name": "group_name", - "in": "query" + } }, { + "name": "task_name", + "in": "query", "required": true, "schema": { "type": "string", "title": "Task Name" - }, - "name": "task_name", - "in": "query" + } }, { + "name": "retry_id", + "in": "query", "required": false, "schema": { "type": "integer", - "title": "Retry Id", - "default": 0 - }, - "name": "retry_id", - "in": "query" + "default": 0, + "title": "Retry Id" + } } ], "responses": { @@ -3102,52 +3423,52 @@ "operationId": "post_new_jwt_token_api_auth_jwt_refresh_token_post", "parameters": [ { + "name": "workflow_id", + "in": "query", "required": true, "schema": { "type": "string", "title": "Workflow Id" - }, - "name": "workflow_id", - "in": "query" + } }, { + "name": "group_name", + "in": "query", "required": true, "schema": { "type": "string", "title": "Group Name" - }, - "name": "group_name", - "in": "query" + } }, { + "name": "task_name", + "in": "query", "required": true, "schema": { "type": "string", "title": "Task Name" - }, - "name": "task_name", - "in": "query" + } }, { + "name": "retry_id", + "in": "query", "required": false, "schema": { "type": "integer", - "title": "Retry Id", - "default": 0 - }, - "name": "retry_id", - "in": "query" + "default": 0, + "title": "Retry Id" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/TokenRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -3183,13 +3504,13 @@ "operationId": "get_jwt_token_from_access_token_api_auth_jwt_access_token_get", "parameters": [ { + "name": "access_token", + "in": "query", "required": true, "schema": { "type": "string", "title": "Access Token" - }, - "name": "access_token", - "in": "query" + } } ], "responses": { @@ -3223,14 +3544,14 @@ "description": "API to create a new jwt token from an access token.", "operationId": "post_jwt_token_from_access_token_api_auth_jwt_access_token_post", "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/TokenRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -3266,54 +3587,67 @@ "operationId": "create_access_token_api_auth_access_token__token_name__post", "parameters": [ { + "name": "token_name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Token Name" - }, - "name": "token_name", - "in": "path" + } }, { + "name": "expires_at", + "in": "query", "required": true, "schema": { "type": "string", "title": "Expires At" - }, - "name": "expires_at", - "in": "query" + } }, { + "name": "description", + "in": "query", "required": false, "schema": { "type": "string", - "title": "Description", - "default": "" - }, - "name": "description", - "in": "query" + "default": "", + "title": "Description" + } }, { - "required": false, - "schema": { - "items": { - "type": "string" - }, - "type": "array", - "title": "Roles" - }, "name": "roles", "in": "query", - "explode": true - }, - { "required": false, "schema": { - "type": "string", - "title": "X-Osmo-User" - }, + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "title": "Roles" + } + }, + { "name": "x-osmo-user", - "in": "header" + "in": "header", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "X-Osmo-User" + } } ], "responses": { @@ -3349,22 +3683,29 @@ "operationId": "delete_access_token_api_auth_access_token__token_name__delete", "parameters": [ { + "name": "token_name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Token Name" - }, - "name": "token_name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "responses": { @@ -3399,22 +3740,29 @@ "operationId": "list_access_token_roles_api_auth_access_token__token_name__roles_get", "parameters": [ { + "name": "token_name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Token Name" - }, - "name": "token_name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "responses": { @@ -3451,13 +3799,20 @@ "operationId": "list_access_tokens_api_auth_access_token_get", "parameters": [ { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "responses": { @@ -3466,10 +3821,10 @@ "content": { "application/json": { "schema": { + "type": "array", "items": { "$ref": "#/components/schemas/AccessTokenWithRoles" }, - "type": "array", "title": "Response List Access Tokens Api Auth Access Token Get" } } @@ -3498,63 +3853,76 @@ "operationId": "admin_create_access_token_api_auth_user__user_id__access_token__token_name__post", "parameters": [ { + "name": "user_id", + "in": "path", "required": true, "schema": { "type": "string", "title": "User Id" - }, - "name": "user_id", - "in": "path" + } }, { + "name": "token_name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Token Name" - }, - "name": "token_name", - "in": "path" + } }, { + "name": "expires_at", + "in": "query", "required": true, "schema": { "type": "string", "title": "Expires At" - }, - "name": "expires_at", - "in": "query" + } }, { + "name": "description", + "in": "query", "required": false, "schema": { "type": "string", - "title": "Description", - "default": "" - }, - "name": "description", - "in": "query" + "default": "", + "title": "Description" + } }, { + "name": "roles", + "in": "query", "required": false, "schema": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], "title": "Roles" - }, - "name": "roles", - "in": "query", - "explode": true + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "responses": { @@ -3590,22 +3958,22 @@ "operationId": "admin_delete_access_token_api_auth_user__user_id__access_token__token_name__delete", "parameters": [ { + "name": "user_id", + "in": "path", "required": true, "schema": { "type": "string", "title": "User Id" - }, - "name": "user_id", - "in": "path" + } }, { + "name": "token_name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Token Name" - }, - "name": "token_name", - "in": "path" + } } ], "responses": { @@ -3640,13 +4008,13 @@ "operationId": "admin_list_access_tokens_api_auth_user__user_id__access_token_get", "parameters": [ { + "name": "user_id", + "in": "path", "required": true, "schema": { "type": "string", "title": "User Id" - }, - "name": "user_id", - "in": "path" + } } ], "responses": { @@ -3655,10 +4023,10 @@ "content": { "application/json": { "schema": { + "type": "array", "items": { "$ref": "#/components/schemas/AccessTokenWithRoles" }, - "type": "array", "title": "Response Admin List Access Tokens Api Auth User User Id Access Token Get" } } @@ -3687,46 +4055,59 @@ "operationId": "list_users_api_auth_user_get", "parameters": [ { + "name": "start_index", + "in": "query", "required": false, "schema": { "type": "integer", - "title": "Start Index", - "default": 1 - }, - "name": "start_index", - "in": "query" + "default": 1, + "title": "Start Index" + } }, { + "name": "count", + "in": "query", "required": false, "schema": { "type": "integer", - "title": "Count", - "default": 100 - }, - "name": "count", - "in": "query" + "default": 100, + "title": "Count" + } }, { + "name": "id_prefix", + "in": "query", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Id Prefix" - }, - "name": "id_prefix", - "in": "query" + } }, { + "name": "roles", + "in": "query", "required": false, "schema": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], "title": "Roles" - }, - "name": "roles", - "in": "query", - "explode": true + } } ], "responses": { @@ -3761,24 +4142,31 @@ "operationId": "create_user_api_auth_user_post", "parameters": [ { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/CreateUserRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -3814,13 +4202,13 @@ "operationId": "get_user_api_auth_user__user_id__get", "parameters": [ { + "name": "user_id", + "in": "path", "required": true, "schema": { "type": "string", "title": "User Id" - }, - "name": "user_id", - "in": "path" + } } ], "responses": { @@ -3855,13 +4243,13 @@ "operationId": "delete_user_api_auth_user__user_id__delete", "parameters": [ { + "name": "user_id", + "in": "path", "required": true, "schema": { "type": "string", "title": "User Id" - }, - "name": "user_id", - "in": "path" + } } ], "responses": { @@ -3896,13 +4284,13 @@ "operationId": "list_user_roles_api_auth_user__user_id__roles_get", "parameters": [ { + "name": "user_id", + "in": "path", "required": true, "schema": { "type": "string", "title": "User Id" - }, - "name": "user_id", - "in": "path" + } } ], "responses": { @@ -3937,33 +4325,40 @@ "operationId": "assign_role_to_user_api_auth_user__user_id__roles_post", "parameters": [ { + "name": "user_id", + "in": "path", "required": true, "schema": { "type": "string", "title": "User Id" - }, - "name": "user_id", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/AssignRoleRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -3999,22 +4394,22 @@ "operationId": "remove_role_from_user_api_auth_user__user_id__roles__role_name__delete", "parameters": [ { + "name": "user_id", + "in": "path", "required": true, "schema": { "type": "string", "title": "User Id" - }, - "name": "user_id", - "in": "path" + } }, { + "name": "role_name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Role Name" - }, - "name": "role_name", - "in": "path" + } } ], "responses": { @@ -4049,13 +4444,13 @@ "operationId": "list_users_with_role_api_auth_roles__role_name__users_get", "parameters": [ { + "name": "role_name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Role Name" - }, - "name": "role_name", - "in": "path" + } } ], "responses": { @@ -4090,33 +4485,40 @@ "operationId": "bulk_assign_role_api_auth_roles__role_name__users_post", "parameters": [ { + "name": "role_name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Role Name" - }, - "name": "role_name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/BulkAssignRequest" } } - }, - "required": true + } }, "responses": { "200": { @@ -4151,78 +4553,94 @@ "operationId": "list_apps_api_app_get", "parameters": [ { + "name": "name", + "in": "query", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Name" - }, - "name": "name", - "in": "query" + } }, { + "name": "users", + "in": "query", "required": false, "schema": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], "title": "Users" - }, - "name": "users", - "in": "query", - "explode": true + } }, { + "name": "all_users", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "All Users", - "default": false - }, - "name": "all_users", - "in": "query" + "default": false, + "title": "All Users" + } }, { + "name": "offset", + "in": "query", "required": false, "schema": { "type": "integer", - "title": "Offset", - "default": 0 - }, - "name": "offset", - "in": "query" + "default": 0, + "title": "Offset" + } }, { + "name": "limit", + "in": "query", "required": false, "schema": { "type": "integer", - "title": "Limit", - "default": 20 - }, - "name": "limit", - "in": "query" + "default": 20, + "title": "Limit" + } }, { + "name": "order", + "in": "query", "required": false, "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/ListOrder" - } - ], + "$ref": "#/components/schemas/ListOrder", "default": "ASC" - }, - "name": "order", - "in": "query" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "responses": { @@ -4258,45 +4676,49 @@ "operationId": "get_app_api_app_user__name__get", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", + "pattern": "^(?:[a-zA-Z0-9_-]+)$", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "version", + "in": "query", "required": false, "schema": { - "type": "integer", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], "title": "Version" - }, - "name": "version", - "in": "query" + } }, { + "name": "limit", + "in": "query", "required": false, "schema": { "type": "integer", - "title": "Limit", - "default": 20 - }, - "name": "limit", - "in": "query" + "default": 20, + "title": "Limit" + } }, { + "name": "order", + "in": "query", "required": false, "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/ListOrder" - } - ], + "$ref": "#/components/schemas/ListOrder", "default": "ASC" - }, - "name": "order", - "in": "query" + } } ], "responses": { @@ -4330,34 +4752,43 @@ "operationId": "create_app_api_app_user__name__post", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", + "pattern": "^(?:[a-zA-Z0-9_-]+)$", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "description", + "in": "query", "required": true, "schema": { "type": "string", "title": "Description" - }, - "name": "description", - "in": "query" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { @@ -4365,8 +4796,7 @@ "title": "App Content" } } - }, - "required": true + } }, "responses": { "200": { @@ -4389,65 +4819,58 @@ } } }, - "delete": { + "patch": { "tags": [ "Workflow App API" ], - "summary": "Delete App", - "operationId": "delete_app_api_app_user__name__delete", + "summary": "Update App", + "operationId": "update_app_api_app_user__name__patch", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", + "pattern": "^(?:[a-zA-Z0-9_-]+)$", "title": "Name" - }, - "name": "name", - "in": "path" - }, - { - "required": false, - "schema": { - "type": "integer", - "title": "Version" - }, - "name": "version", - "in": "query" - }, - { - "required": false, - "schema": { - "type": "boolean", - "title": "All Versions", - "default": false - }, - "name": "all_versions", - "in": "query" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "type": "string", + "title": "App Content" + } + } + } + }, "responses": { "200": { "description": "Successful Response", "content": { "application/json": { "schema": { - "additionalProperties": { - "items": { - "type": "integer" - }, - "type": "array" - }, - "type": "object", - "title": "Response Delete App Api App User Name Delete" + "$ref": "#/components/schemas/EditResponse" } } } @@ -4464,50 +4887,80 @@ } } }, - "patch": { + "delete": { "tags": [ "Workflow App API" ], - "summary": "Update App", - "operationId": "update_app_api_app_user__name__patch", + "summary": "Delete App", + "operationId": "delete_app_api_app_user__name__delete", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", + "pattern": "^(?:[a-zA-Z0-9_-]+)$", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "version", + "in": "query", "required": false, "schema": { - "type": "string", - "title": "X-Osmo-User" - }, + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "title": "Version" + } + }, + { + "name": "all_versions", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "default": false, + "title": "All Versions" + } + }, + { "name": "x-osmo-user", - "in": "header" + "in": "header", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "X-Osmo-User" + } } ], - "requestBody": { - "content": { - "application/json": { - "schema": { - "type": "string", - "title": "App Content" - } - } - }, - "required": true - }, "responses": { "200": { "description": "Successful Response", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/EditResponse" + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "integer" + } + }, + "title": "Response Delete App Api App User Name Delete" } } } @@ -4534,22 +4987,30 @@ "operationId": "get_app_content_api_app_user__name__spec_get", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", + "pattern": "^(?:[a-zA-Z0-9_-]+)$", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "version", + "in": "query", "required": false, "schema": { - "type": "integer", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], "title": "Version" - }, - "name": "version", - "in": "query" + } } ], "responses": { @@ -4578,25 +5039,34 @@ "operationId": "rename_app_api_app_user__name__rename_post", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", + "pattern": "^(?:[a-zA-Z0-9_-]+)$", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { @@ -4604,8 +5074,7 @@ "title": "New Name" } } - }, - "required": true + } }, "responses": { "200": { @@ -4642,41 +5111,55 @@ "operationId": "cancel_workflow_api_workflow__name__cancel_post", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "message", + "in": "query", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Message" - }, - "name": "message", - "in": "query" + } }, { + "name": "force", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "Force", - "default": false - }, - "name": "force", - "in": "query" + "default": false, + "title": "Force" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "responses": { @@ -4712,167 +5195,230 @@ "operationId": "list_workflow_api_workflow_get", "parameters": [ { + "name": "users", + "in": "query", "required": false, "schema": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], "title": "Users" - }, - "name": "users", - "in": "query", - "explode": true + } }, { + "name": "name", + "in": "query", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Name" - }, - "name": "name", - "in": "query" + } }, { - "required": false, - "schema": { - "items": { - "$ref": "#/components/schemas/WorkflowStatus" - }, - "type": "array" - }, "name": "statuses", "in": "query", - "explode": true + "required": false, + "schema": { + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/components/schemas/WorkflowStatus" + } + }, + { + "type": "null" + } + ], + "title": "Statuses" + } }, { + "name": "offset", + "in": "query", "required": false, "schema": { "type": "integer", - "title": "Offset", - "default": 0 - }, - "name": "offset", - "in": "query" + "default": 0, + "title": "Offset" + } }, { + "name": "limit", + "in": "query", "required": false, "schema": { "type": "integer", - "title": "Limit", - "default": 20 - }, - "name": "limit", - "in": "query" + "default": 20, + "title": "Limit" + } }, { + "name": "order", + "in": "query", "required": false, "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/ListOrder" - } - ], + "$ref": "#/components/schemas/ListOrder", "default": "ASC" - }, - "name": "order", - "in": "query" + } }, { + "name": "all_users", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "All Users", - "default": false - }, - "name": "all_users", - "in": "query" + "default": false, + "title": "All Users" + } }, { + "name": "pools", + "in": "query", "required": false, "schema": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], "title": "Pools" - }, - "name": "pools", - "in": "query", - "explode": true + } }, { + "name": "all_pools", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "All Pools", - "default": false - }, - "name": "all_pools", - "in": "query" + "default": false, + "title": "All Pools" + } }, { + "name": "submitted_before", + "in": "query", "required": false, "schema": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Submitted Before" - }, - "name": "submitted_before", - "in": "query" + } }, { + "name": "submitted_after", + "in": "query", "required": false, "schema": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Submitted After" - }, - "name": "submitted_after", - "in": "query" + } }, { + "name": "tags", + "in": "query", "required": false, "schema": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], "title": "Tags" - }, - "name": "tags", - "in": "query", - "explode": true + } }, { + "name": "app", + "in": "query", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "App" - }, - "name": "app", - "in": "query" + } }, { - "required": false, - "schema": { - "items": { - "$ref": "#/components/schemas/WorkflowPriority" - }, - "type": "array" - }, "name": "priority", "in": "query", - "explode": true + "required": false, + "schema": { + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/components/schemas/WorkflowPriority" + } + }, + { + "type": "null" + } + ], + "title": "Priority" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "responses": { @@ -4909,22 +5455,22 @@ "operationId": "get_workflow_task_api_workflow__name__task__task_name__get", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "task_name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Task Name" - }, - "name": "task_name", - "in": "path" + } } ], "responses": { @@ -4960,178 +5506,234 @@ "operationId": "list_task_api_task_get", "parameters": [ { + "name": "workflow_id", + "in": "query", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Workflow Id" - }, - "name": "workflow_id", - "in": "query" + } }, { - "required": false, - "schema": { - "items": { - "$ref": "#/components/schemas/TaskGroupStatus" - }, - "type": "array" - }, "name": "statuses", "in": "query", - "explode": true + "required": false, + "schema": { + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/components/schemas/TaskGroupStatus" + } + }, + { + "type": "null" + } + ], + "title": "Statuses" + } }, { + "name": "users", + "in": "query", "required": false, "schema": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], "title": "Users" - }, - "name": "users", - "in": "query", - "explode": true + } }, { + "name": "all_users", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "All Users", - "default": false - }, - "name": "all_users", - "in": "query" + "default": false, + "title": "All Users" + } }, { + "name": "pools", + "in": "query", "required": false, "schema": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], "title": "Pools" - }, - "name": "pools", - "in": "query", - "explode": true + } }, { + "name": "all_pools", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "All Pools", - "default": false - }, - "name": "all_pools", - "in": "query" + "default": false, + "title": "All Pools" + } }, { + "name": "nodes", + "in": "query", "required": false, "schema": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], "title": "Nodes" - }, - "name": "nodes", - "in": "query", - "explode": true + } }, { + "name": "started_after", + "in": "query", "required": false, "schema": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Started After" - }, - "name": "started_after", - "in": "query" + } }, { + "name": "started_before", + "in": "query", "required": false, "schema": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Started Before" - }, - "name": "started_before", - "in": "query" + } }, { + "name": "offset", + "in": "query", "required": false, "schema": { "type": "integer", - "title": "Offset", - "default": 0 - }, - "name": "offset", - "in": "query" + "default": 0, + "title": "Offset" + } }, { + "name": "limit", + "in": "query", "required": false, "schema": { "type": "integer", - "title": "Limit", - "default": 20 - }, - "name": "limit", - "in": "query" + "default": 20, + "title": "Limit" + } }, { + "name": "order", + "in": "query", "required": false, "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/ListOrder" - } - ], + "$ref": "#/components/schemas/ListOrder", "default": "ASC" - }, - "name": "order", - "in": "query" + } }, { + "name": "summary", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "Summary", - "default": false - }, - "name": "summary", - "in": "query" + "default": false, + "title": "Summary" + } }, { + "name": "aggregate_by_workflow", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "Aggregate By Workflow", - "default": false - }, - "name": "aggregate_by_workflow", - "in": "query" + "default": false, + "title": "Aggregate By Workflow" + } }, { - "required": false, - "schema": { - "items": { - "$ref": "#/components/schemas/WorkflowPriority" - }, - "type": "array" - }, "name": "priority", "in": "query", - "explode": true + "required": false, + "schema": { + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/components/schemas/WorkflowPriority" + } + }, + { + "type": "null" + } + ], + "title": "Priority" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "responses": { @@ -5179,33 +5781,33 @@ "operationId": "get_workflow_api_workflow__name__get", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "skip_groups", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "Skip Groups", - "default": false - }, - "name": "skip_groups", - "in": "query" + "default": false, + "title": "Skip Groups" + } }, { + "name": "verbose", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "Verbose", - "default": false - }, - "name": "verbose", - "in": "query" + "default": false, + "title": "Verbose" + } } ], "responses": { @@ -5242,49 +5844,77 @@ "operationId": "get_workflow_logs_api_workflow__name__logs_get", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "last_n_lines", + "in": "query", "required": false, "schema": { - "type": "integer", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], "title": "Last N Lines" - }, - "name": "last_n_lines", - "in": "query" + } }, { + "name": "task_name", + "in": "query", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Task Name" - }, - "name": "task_name", - "in": "query" + } }, { + "name": "retry_id", + "in": "query", "required": false, "schema": { - "type": "integer", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], "title": "Retry Id" - }, - "name": "retry_id", - "in": "query" + } }, { + "name": "query", + "in": "query", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Query" - }, - "name": "query", - "in": "query" + } } ], "responses": { @@ -5321,31 +5951,45 @@ "operationId": "get_workflow_pod_conditions_api_workflow__name__events_get", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "task_name", + "in": "query", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Task Name" - }, - "name": "task_name", - "in": "query" + } }, { + "name": "retry_id", + "in": "query", "required": false, "schema": { - "type": "integer", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], "title": "Retry Id" - }, - "name": "retry_id", - "in": "query" + } } ], "responses": { @@ -5382,49 +6026,77 @@ "operationId": "get_workflow_error_logs_api_workflow__name__error_logs_get", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "last_n_lines", + "in": "query", "required": false, "schema": { - "type": "integer", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], "title": "Last N Lines" - }, - "name": "last_n_lines", - "in": "query" + } }, { + "name": "task_name", + "in": "query", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Task Name" - }, - "name": "task_name", - "in": "query" + } }, { + "name": "retry_id", + "in": "query", "required": false, "schema": { - "type": "integer", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], "title": "Retry Id" - }, - "name": "retry_id", - "in": "query" + } }, { + "name": "query", + "in": "query", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Query" - }, - "name": "query", - "in": "query" + } } ], "responses": { @@ -5461,23 +6133,23 @@ "operationId": "get_workflow_spec_api_workflow__name__spec_get", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "use_template", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "Use Template", - "default": false - }, - "name": "use_template", - "in": "query" + "default": false, + "title": "Use Template" + } } ], "responses": { @@ -5514,39 +6186,51 @@ "operationId": "tag_workflow_api_workflow__name__tag_post", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "add", + "in": "query", "required": false, "schema": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], "title": "Add" - }, - "name": "add", - "in": "query", - "explode": true + } }, { + "name": "remove", + "in": "query", "required": false, "schema": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], "title": "Remove" - }, - "name": "remove", - "in": "query", - "explode": true + } } ], "responses": { @@ -5581,31 +6265,31 @@ "operationId": "exec_into_group_api_workflow__name__exec_group__group_name__post", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "group_name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Group Name" - }, - "name": "group_name", - "in": "path" + } }, { + "name": "entry_command", + "in": "query", "required": true, "schema": { "type": "string", "title": "Entry Command" - }, - "name": "entry_command", - "in": "query" + } } ], "responses": { @@ -5614,10 +6298,10 @@ "content": { "application/json": { "schema": { + "type": "object", "additionalProperties": { "$ref": "#/components/schemas/RouterResponse" }, - "type": "object", "title": "Response Exec Into Group Api Workflow Name Exec Group Group Name Post" } } @@ -5646,31 +6330,31 @@ "operationId": "exec_into_task_api_workflow__name__exec_task__task_name__post", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "task_name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Task Name" - }, - "name": "task_name", - "in": "path" + } }, { + "name": "entry_command", + "in": "query", "required": true, "schema": { "type": "string", "title": "Entry Command" - }, - "name": "entry_command", - "in": "query" + } } ], "responses": { @@ -5707,45 +6391,51 @@ "operationId": "port_forward_task_api_workflow__name__portforward__task_name__post", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "task_name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Task Name" - }, - "name": "task_name", - "in": "path" + } }, { + "name": "task_ports", + "in": "query", "required": false, "schema": { - "items": { - "type": "integer" - }, - "type": "array", + "anyOf": [ + { + "type": "array", + "items": { + "type": "integer" + } + }, + { + "type": "null" + } + ], "title": "Task Ports" - }, - "name": "task_ports", - "in": "query", - "explode": true + } }, { + "name": "use_udp", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "Use Udp", - "default": false - }, - "name": "use_udp", - "in": "query" + "default": false, + "title": "Use Udp" + } } ], "responses": { @@ -5756,10 +6446,10 @@ "schema": { "anyOf": [ { + "type": "array", "items": { "$ref": "#/components/schemas/RouterResponse" - }, - "type": "array" + } }, { "$ref": "#/components/schemas/RouterResponse" @@ -5793,31 +6483,31 @@ "operationId": "port_forward_webserver_api_workflow__name__webserver__task_name__post", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "task_name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Task Name" - }, - "name": "task_name", - "in": "path" + } }, { + "name": "task_port", + "in": "query", "required": true, "schema": { "type": "integer", "title": "Task Port" - }, - "name": "task_port", - "in": "query" + } } ], "responses": { @@ -5854,22 +6544,22 @@ "operationId": "rsync_task_api_workflow__name__rsync_task__task_name__post", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "task_name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Task Name" - }, - "name": "task_name", - "in": "path" + } } ], "responses": { @@ -5906,13 +6596,20 @@ "operationId": "get_user_credential_api_credentials_get", "parameters": [ { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "responses": { @@ -5949,33 +6646,40 @@ "operationId": "set_user_credential_api_credentials__cred_name__post", "parameters": [ { + "name": "cred_name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Cred Name" - }, - "name": "cred_name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/CredentialOptions" } } - }, - "required": true + } }, "responses": { "200": { @@ -6007,22 +6711,29 @@ "operationId": "delete_users_credential_api_credentials__cred_name__delete", "parameters": [ { + "name": "cred_name", + "in": "path", "required": true, "schema": { "type": "string", - "title": "Cred Name" - }, - "name": "cred_name", - "in": "path" + "title": "Cred Name" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "responses": { @@ -6059,59 +6770,78 @@ "operationId": "get_resources_api_resources_get", "parameters": [ { + "name": "pools", + "in": "query", "required": false, "schema": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], "title": "Pools" - }, - "name": "pools", - "in": "query", - "explode": true + } }, { + "name": "platforms", + "in": "query", "required": false, "schema": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], "title": "Platforms" - }, - "name": "platforms", - "in": "query", - "explode": true + } }, { + "name": "all_pools", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "All Pools", - "default": false - }, - "name": "all_pools", - "in": "query" + "default": false, + "title": "All Pools" + } }, { + "name": "concise", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "Concise", - "default": false - }, - "name": "concise", - "in": "query" + "default": false, + "title": "Concise" + } }, { + "name": "x-osmo-allowed-pools", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-Allowed-Pools" - }, - "name": "x-osmo-allowed-pools", - "in": "header" + } } ], "responses": { @@ -6156,13 +6886,13 @@ "operationId": "get_one_resource_api_resources__name__get", "parameters": [ { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Name" - }, - "name": "name", - "in": "path" + } } ], "responses": { @@ -6199,27 +6929,33 @@ "operationId": "get_pools_api_pool_get", "parameters": [ { + "name": "all_pools", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "All Pools", - "default": true - }, - "name": "all_pools", - "in": "query" + "default": true, + "title": "All Pools" + } }, { + "name": "pools", + "in": "query", "required": false, "schema": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], "title": "Pools" - }, - "name": "pools", - "in": "query", - "explode": true + } } ], "responses": { @@ -6255,27 +6991,33 @@ "operationId": "get_pool_quotas_api_pool_quota_get", "parameters": [ { + "name": "all_pools", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "All Pools", - "default": true - }, - "name": "all_pools", - "in": "query" + "default": true, + "title": "All Pools" + } }, { + "name": "pools", + "in": "query", "required": false, "schema": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], "title": "Pools" - }, - "name": "pools", - "in": "query", - "explode": true + } } ], "responses": { @@ -6312,112 +7054,151 @@ "operationId": "submit_workflow_api_pool__pool_name__workflow_post", "parameters": [ { + "name": "pool_name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Pool Name" - }, - "name": "pool_name", - "in": "path" + } }, { + "name": "workflow_id", + "in": "query", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Workflow Id" - }, - "name": "workflow_id", - "in": "query" + } }, { + "name": "app_uuid", + "in": "query", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "App Uuid" - }, - "name": "app_uuid", - "in": "query" + } }, { + "name": "app_version", + "in": "query", "required": false, "schema": { - "type": "integer", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], "title": "App Version" - }, - "name": "app_version", - "in": "query" + } }, { + "name": "dry_run", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "Dry Run", - "default": false - }, - "name": "dry_run", - "in": "query" + "default": false, + "title": "Dry Run" + } }, { + "name": "validation_only", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "Validation Only", - "default": false - }, - "name": "validation_only", - "in": "query" + "default": false, + "title": "Validation Only" + } }, { + "name": "priority", + "in": "query", "required": false, "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/WorkflowPriority" - } - ], + "$ref": "#/components/schemas/WorkflowPriority", "default": "NORMAL" - }, - "name": "priority", - "in": "query" + } }, { + "name": "env_vars", + "in": "query", "required": false, "schema": { + "type": "array", "items": { "type": "string" }, - "type": "array", - "title": "Env Vars", - "default": [] + "default": [], + "title": "Env Vars" }, - "name": "env_vars", - "in": "query", "explode": true }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } }, { + "name": "x-osmo-roles", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-Roles" - }, - "name": "x-osmo-roles", - "in": "header" + } } ], "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/TemplateSpec" + "anyOf": [ + { + "$ref": "#/components/schemas/TemplateSpec" + }, + { + "type": "null" + } + ], + "title": "Template Spec" } } } @@ -6456,40 +7237,54 @@ "operationId": "restart_workflow_api_pool__pool_name__workflow__workflow_id__restart_post", "parameters": [ { + "name": "pool_name", + "in": "path", "required": true, "schema": { "type": "string", "title": "Pool Name" - }, - "name": "pool_name", - "in": "path" + } }, { + "name": "workflow_id", + "in": "path", "required": true, "schema": { "type": "string", "title": "Workflow Id" - }, - "name": "workflow_id", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } }, { + "name": "x-osmo-roles", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-Roles" - }, - "name": "x-osmo-roles", - "in": "header" + } } ], "responses": { @@ -6526,23 +7321,30 @@ "operationId": "get_bucket_info_api_bucket_get", "parameters": [ { + "name": "default_only", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "Default Only", - "default": false - }, - "name": "default_only", - "in": "query" + "default": false, + "title": "Default Only" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "responses": { @@ -6579,51 +7381,61 @@ "operationId": "delete_dataset_api_bucket__bucket__dataset__name__delete", "parameters": [ { + "name": "bucket", + "in": "path", "required": true, "schema": { "type": "string", + "pattern": "^[a-zA-Z0-9_-]+$", "title": "Bucket" - }, - "name": "bucket", - "in": "path" + } }, { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", + "pattern": "^[a-zA-Z0-9_-]+$", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "tag", + "in": "query", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string", + "pattern": "^([a-zA-Z0-9_-]*)$" + }, + { + "type": "null" + } + ], "title": "Tag" - }, - "name": "tag", - "in": "query" + } }, { + "name": "all_flag", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "All Flag", - "default": false - }, - "name": "all_flag", - "in": "query" + "default": false, + "title": "All Flag" + } }, { + "name": "finish", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "Finish", - "default": false - }, - "name": "finish", - "in": "query" + "default": false, + "title": "Finish" + } } ], "responses": { @@ -6660,95 +7472,113 @@ "operationId": "change_name_tag_label_metadata_api_bucket__bucket__dataset__name__attribute_post", "parameters": [ { + "name": "bucket", + "in": "path", "required": true, "schema": { "type": "string", + "pattern": "^[a-zA-Z0-9_-]+$", "title": "Bucket" - }, - "name": "bucket", - "in": "path" + } }, { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", + "pattern": "^[a-zA-Z0-9_-]+$", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "tag", + "in": "query", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string", + "pattern": "^([a-zA-Z0-9_-]*)$" + }, + { + "type": "null" + } + ], "title": "Tag" - }, - "name": "tag", - "in": "query" + } }, { + "name": "new_name", + "in": "query", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string", + "pattern": "^[a-zA-Z0-9_-]+$" + }, + { + "type": "null" + } + ], "title": "New Name" - }, - "name": "new_name", - "in": "query" + } }, { + "name": "set_tag", + "in": "query", "required": false, "schema": { + "type": "array", "items": { "type": "string" }, - "type": "array", - "title": "Set Tag", - "default": [] + "default": [], + "title": "Set Tag" }, - "name": "set_tag", - "in": "query", "explode": true }, { + "name": "delete_tag", + "in": "query", "required": false, "schema": { + "type": "array", "items": { "type": "string" }, - "type": "array", - "title": "Delete Tag", - "default": [] + "default": [], + "title": "Delete Tag" }, - "name": "delete_tag", - "in": "query", "explode": true }, { + "name": "delete_label", + "in": "query", "required": false, "schema": { + "type": "array", "items": { "type": "string" }, - "type": "array", - "title": "Delete Label", - "default": [] + "default": [], + "title": "Delete Label" }, - "name": "delete_label", - "in": "query", "explode": true }, { + "name": "delete_metadata", + "in": "query", "required": false, "schema": { + "type": "array", "items": { "type": "string" }, - "type": "array", - "title": "Delete Metadata", - "default": [] + "default": [], + "title": "Delete Metadata" }, - "name": "delete_metadata", - "in": "query", "explode": true } ], @@ -6795,64 +7625,70 @@ "operationId": "get_info_api_bucket__bucket__dataset__name__info_get", "parameters": [ { + "name": "bucket", + "in": "path", "required": true, "schema": { "type": "string", + "pattern": "^[a-zA-Z0-9_-]+$", "title": "Bucket" - }, - "name": "bucket", - "in": "path" + } }, { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", + "pattern": "^[a-zA-Z0-9_-]+$", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "tag", + "in": "query", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string", + "pattern": "^([a-zA-Z0-9_-]*)$" + }, + { + "type": "null" + } + ], "title": "Tag" - }, - "name": "tag", - "in": "query" + } }, { + "name": "all_flag", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "All Flag", - "default": false - }, - "name": "all_flag", - "in": "query" + "default": false, + "title": "All Flag" + } }, { + "name": "count", + "in": "query", "required": false, "schema": { "type": "integer", - "title": "Count", - "default": 100 - }, - "name": "count", - "in": "query" + "default": 100, + "title": "Count" + } }, { + "name": "order", + "in": "query", "required": false, "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/ListOrder" - } - ], + "$ref": "#/components/schemas/ListOrder", "default": "ASC" - }, - "name": "order", - "in": "query" + } } ], "responses": { @@ -6889,110 +7725,149 @@ "operationId": "list_dataset_from_bucket_api_bucket_list_dataset_get", "parameters": [ { + "name": "name", + "in": "query", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string", + "pattern": "^[a-zA-Z0-9_-]+$" + }, + { + "type": "null" + } + ], "title": "Name" - }, - "name": "name", - "in": "query" + } }, { + "name": "user", + "in": "query", "required": false, "schema": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], "title": "User" - }, - "name": "user", - "in": "query", - "explode": true + } }, { + "name": "buckets", + "in": "query", "required": false, "schema": { + "type": "array", "items": { "type": "string" }, - "type": "array", - "title": "Buckets", - "default": [] + "default": [], + "title": "Buckets" }, - "name": "buckets", - "in": "query", "explode": true }, { + "name": "dataset_type", + "in": "query", "required": false, "schema": { - "$ref": "#/components/schemas/DatasetType" - }, - "name": "dataset_type", - "in": "query" + "anyOf": [ + { + "$ref": "#/components/schemas/DatasetType" + }, + { + "type": "null" + } + ], + "title": "Dataset Type" + } }, { + "name": "latest_before", + "in": "query", "required": false, "schema": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Latest Before" - }, - "name": "latest_before", - "in": "query" + } }, { + "name": "latest_after", + "in": "query", "required": false, "schema": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Latest After" - }, - "name": "latest_after", - "in": "query" + } }, { + "name": "all_users", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "All Users", - "default": false - }, - "name": "all_users", - "in": "query" + "default": false, + "title": "All Users" + } }, { + "name": "order", + "in": "query", "required": false, "schema": { - "allOf": [ - { - "$ref": "#/components/schemas/ListOrder" - } - ], + "$ref": "#/components/schemas/ListOrder", "default": "ASC" - }, - "name": "order", - "in": "query" + } }, { + "name": "count", + "in": "query", "required": false, "schema": { "type": "integer", - "title": "Count", - "default": 20 - }, - "name": "count", - "in": "query" + "default": 20, + "title": "Count" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "responses": { @@ -7029,42 +7904,51 @@ "operationId": "create_collection_api_bucket__bucket__dataset__name__collect_post", "parameters": [ { + "name": "bucket", + "in": "path", "required": true, "schema": { "type": "string", + "pattern": "^[a-zA-Z0-9_-]+$", "title": "Bucket" - }, - "name": "bucket", - "in": "path" + } }, { + "name": "name", + "in": "path", "required": true, "schema": { "type": "string", + "pattern": "^[a-zA-Z0-9_-]+$", "title": "Name" - }, - "name": "name", - "in": "path" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/Body_create_collection_api_bucket__bucket__dataset__name__collect_post" } } - }, - "required": true + } }, "responses": { "200": { @@ -7098,23 +7982,24 @@ "operationId": "query_dataset_api_bucket__bucket__query_get", "parameters": [ { + "name": "bucket", + "in": "path", "required": true, "schema": { "type": "string", + "pattern": "^[a-zA-Z0-9_-]+$", "title": "Bucket" - }, - "name": "bucket", - "in": "path" + } }, { + "name": "command", + "in": "query", "required": false, "schema": { "type": "string", - "title": "Command", - "default": "" - }, - "name": "command", - "in": "query" + "default": "", + "title": "Command" + } } ], "responses": { @@ -7150,40 +8035,68 @@ "operationId": "get_notification_settings_api_profile_settings_get", "parameters": [ { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } }, { + "name": "x-osmo-roles", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-Roles" - }, - "name": "x-osmo-roles", - "in": "header" + } }, { + "name": "x-osmo-token-name", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-Token-Name" - }, - "name": "x-osmo-token-name", - "in": "header" + } }, { + "name": "x-osmo-allowed-pools", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-Allowed-Pools" - }, - "name": "x-osmo-allowed-pools", - "in": "header" + } } ], "responses": { @@ -7217,34 +8130,41 @@ "operationId": "set_notification_settings_api_profile_settings_post", "parameters": [ { + "name": "set_default_backend", + "in": "query", "required": false, "schema": { "type": "boolean", - "title": "Set Default Backend", - "default": false - }, - "name": "set_default_backend", - "in": "query" + "default": false, + "title": "Set Default Backend" + } }, { + "name": "x-osmo-user", + "in": "header", "required": false, "schema": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "X-Osmo-User" - }, - "name": "x-osmo-user", - "in": "header" + } } ], "requestBody": { + "required": true, "content": { "application/json": { "schema": { "$ref": "#/components/schemas/UserProfile" } } - }, - "required": true + } }, "responses": { "200": { @@ -7403,7 +8323,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/PluginsConfig" + "$ref": "#/components/schemas/PluginsConfig-Output" } } } @@ -7582,12 +8502,7 @@ ] }, "login_info": { - "allOf": [ - { - "$ref": "#/components/schemas/LoginInfo" - } - ], - "title": "Login Info", + "$ref": "#/components/schemas/LoginInfo", "default": {} }, "max_token_duration": { @@ -7691,36 +8606,92 @@ "BackendConfig": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "k8s_uid": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "K8S Uid" }, "dashboard_url": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Dashboard Url" }, "grafana_url": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Grafana Url" }, "tests": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tests" }, "scheduler_settings": { - "$ref": "#/components/schemas/BackendSchedulerSettings" + "anyOf": [ + { + "$ref": "#/components/schemas/BackendSchedulerSettings" + }, + { + "type": "null" + } + ] }, "node_conditions": { - "$ref": "#/components/schemas/BackendNodeConditions" + "anyOf": [ + { + "$ref": "#/components/schemas/BackendNodeConditions" + }, + { + "type": "null" + } + ] }, "router_address": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Router Address" } }, @@ -7731,10 +8702,17 @@ "BackendNodeConditions": { "properties": { "rules": { - "additionalProperties": { - "type": "string" - }, - "type": "object", + "anyOf": [ + { + "additionalProperties": { + "type": "string" + }, + "type": "object" + }, + { + "type": "null" + } + ], "title": "Rules" }, "prefix": { @@ -7748,6 +8726,7 @@ "description": "Settings for backend node conditions. " }, "BackendResourceType": { + "type": "string", "enum": [ "RESERVED", "SHARED", @@ -7759,11 +8738,7 @@ "BackendSchedulerSettings": { "properties": { "scheduler_type": { - "allOf": [ - { - "$ref": "#/components/schemas/BackendSchedulerType" - } - ], + "$ref": "#/components/schemas/BackendSchedulerType", "default": "kai" }, "scheduler_name": { @@ -7782,6 +8757,7 @@ "description": "Settings that control the how pods are scheduled in a backend" }, "BackendSchedulerType": { + "type": "string", "enum": [ "kai" ], @@ -7826,6 +8802,7 @@ "title": "Common Pod Template" }, "parsed_pod_template": { + "additionalProperties": true, "type": "object", "title": "Parsed Pod Template", "default": {} @@ -7845,11 +8822,13 @@ "Body_change_name_tag_label_metadata_api_bucket__bucket__dataset__name__attribute_post": { "properties": { "set_label": { + "additionalProperties": true, "type": "object", "title": "Set Label", "default": {} }, "set_metadata": { + "additionalProperties": true, "type": "object", "title": "Set Metadata", "default": {} @@ -7897,9 +8876,17 @@ "default": "read-write" }, "default_credential": { - "$ref": "#/components/schemas/StaticDataCredential" + "anyOf": [ + { + "$ref": "#/components/schemas/StaticDataCredential" + }, + { + "type": "null" + } + ] } }, + "additionalProperties": false, "type": "object", "required": [ "dataset_path" @@ -7940,7 +8927,14 @@ "BucketInfoResponse": { "properties": { "default": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Default" }, "buckets": { @@ -8032,18 +9026,40 @@ "CliConfig": { "properties": { "latest_version": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Latest Version" }, "min_supported_version": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Min Supported Version" }, "client_install_url": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Client Install Url" } }, + "additionalProperties": false, "type": "object", "title": "CliConfig", "description": "Config for storing information regarding CLI storage. " @@ -8088,10 +9104,17 @@ "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "data": { @@ -8113,14 +9136,28 @@ "ConfigsRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" } }, @@ -8135,10 +9172,17 @@ "title": "Id" }, "roles": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Roles" } }, @@ -8168,6 +9212,7 @@ "default": [] } }, + "additionalProperties": false, "type": "object", "title": "CredentialConfig", "description": "Stores registries/data which do not do validation " @@ -8177,7 +9222,14 @@ "credentials": { "items": { "additionalProperties": { - "type": "string" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] }, "type": "object" }, @@ -8195,30 +9247,36 @@ "CredentialOptions": { "properties": { "registry_credential": { - "allOf": [ + "anyOf": [ { "$ref": "#/components/schemas/UserRegistryCredential" + }, + { + "type": "null" } ], - "title": "Registry Credential", "description": "Authentication information for a Docker registry" }, "data_credential": { - "allOf": [ + "anyOf": [ { "$ref": "#/components/schemas/UserDataCredential" + }, + { + "type": "null" } ], - "title": "Data Credential", "description": "Authentication information for a data service" }, "generic_credential": { - "allOf": [ + "anyOf": [ { "$ref": "#/components/schemas/UserCredential" + }, + { + "type": "null" } ], - "title": "Generic Credential", "description": "Generic authentication information" } }, @@ -8229,13 +9287,34 @@ "DataAttributeResponse": { "properties": { "tag_response": { - "$ref": "#/components/schemas/DataTagResponse" + "anyOf": [ + { + "$ref": "#/components/schemas/DataTagResponse" + }, + { + "type": "null" + } + ] }, "label_response": { - "$ref": "#/components/schemas/DataMetadataResponse" + "anyOf": [ + { + "$ref": "#/components/schemas/DataMetadataResponse" + }, + { + "type": "null" + } + ] }, "metadata_response": { - "$ref": "#/components/schemas/DataMetadataResponse" + "anyOf": [ + { + "$ref": "#/components/schemas/DataMetadataResponse" + }, + { + "type": "null" + } + ] } }, "additionalProperties": false, @@ -8246,7 +9325,14 @@ "DataConfig": { "properties": { "credential": { - "$ref": "#/components/schemas/StaticDataCredential" + "anyOf": [ + { + "$ref": "#/components/schemas/StaticDataCredential" + }, + { + "type": "null" + } + ] }, "base_url": { "type": "string", @@ -8264,14 +9350,11 @@ "default": 10 }, "download_type": { - "allOf": [ - { - "$ref": "#/components/schemas/DownloadType" - } - ], + "$ref": "#/components/schemas/DownloadType", "default": "download" } }, + "additionalProperties": false, "type": "object", "title": "DataConfig", "description": "Config for storing information about data. " @@ -8324,7 +9407,14 @@ "title": "Uri" }, "hash_location": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Hash Location" }, "size": { @@ -8388,6 +9478,7 @@ "title": "Uri" }, "metadata": { + "additionalProperties": true, "type": "object", "title": "Metadata" }, @@ -8441,23 +9532,52 @@ "title": "Bucket" }, "created_by": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Created By" }, "created_date": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Created Date" }, "hash_location": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Hash Location" }, "hash_location_size": { - "type": "integer", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], "title": "Hash Location Size" }, "labels": { + "additionalProperties": true, "type": "object", "title": "Labels" }, @@ -8512,20 +9632,48 @@ "title": "Create Time" }, "last_created": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Last Created" }, "hash_location": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Hash Location" }, "hash_location_size": { - "type": "integer", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], "title": "Hash Location Size" }, "version_id": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Version Id" }, "type": { @@ -8565,6 +9713,7 @@ "DataMetadataResponse": { "properties": { "metadata": { + "additionalProperties": true, "type": "object", "title": "Metadata" } @@ -8629,7 +9778,28 @@ "title": "DataTagResponse", "description": "Object storing Tag Response. " }, - "DatasetConfig": { + "DatasetConfig-Input": { + "properties": { + "buckets": { + "additionalProperties": { + "$ref": "#/components/schemas/BucketConfig" + }, + "type": "object", + "title": "Buckets", + "default": {} + }, + "default_bucket": { + "type": "string", + "title": "Default Bucket", + "default": "" + } + }, + "additionalProperties": false, + "type": "object", + "title": "DatasetConfig", + "description": "Stores any dataset configs External Admins control " + }, + "DatasetConfig-Output": { "properties": { "buckets": { "additionalProperties": { @@ -8645,19 +9815,21 @@ "default": "" } }, + "additionalProperties": false, "type": "object", "title": "DatasetConfig", "description": "Stores any dataset configs External Admins control " }, "DatasetQueryType": { + "type": "string", "enum": [ "VERSION", "DATASET" ], - "title": "DatasetQueryType", - "description": "An enumeration." + "title": "DatasetQueryType" }, "DatasetStatus": { + "type": "string", "enum": [ "PENDING", "READY", @@ -8690,24 +9862,38 @@ "description": "Object storing execution cluster node resource information. " }, "DatasetType": { + "type": "string", "enum": [ "COLLECTION", "DATASET" ], - "title": "DatasetType", - "description": "An enumeration." + "title": "DatasetType" }, "DeleteBackendRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "force": { @@ -8877,48 +10063,104 @@ "$ref": "#/components/schemas/TaskGroupStatus" }, "start_time": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Start Time" }, "end_time": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "End Time" }, "processing_start_time": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Processing Start Time" }, "scheduling_start_time": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Scheduling Start Time" }, "initializing_start_time": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Initializing Start Time" }, "remaining_upstream_groups": { - "items": { - "type": "string" - }, - "type": "array", - "uniqueItems": true, + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array", + "uniqueItems": true + }, + { + "type": "null" + } + ], "title": "Remaining Upstream Groups" }, "downstream_groups": { - "items": { - "type": "string" - }, - "type": "array", - "uniqueItems": true, + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array", + "uniqueItems": true + }, + { + "type": "null" + } + ], "title": "Downstream Groups" }, "failure_message": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Failure Message" }, "tasks": { @@ -8955,15 +10197,36 @@ "JwtTokenResponse": { "properties": { "token": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Token" }, "expires_at": { - "type": "integer", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], "title": "Expires At" }, "error": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Error" } }, @@ -8990,6 +10253,7 @@ "description": "Object storing info for all backends. " }, "ListOrder": { + "type": "string", "enum": [ "ASC", "DESC" @@ -9004,11 +10268,18 @@ "title": "User" }, "pool": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Pool" }, "storage": { - "type": "integer", + "type": "number", "title": "Storage" }, "cpu": { @@ -9016,7 +10287,7 @@ "title": "Cpu" }, "memory": { - "type": "integer", + "type": "number", "title": "Memory" }, "gpu": { @@ -9086,26 +10357,60 @@ "title": "Retry Id" }, "pool": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Pool" }, "node": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Node" }, "start_time": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Start Time" }, "end_time": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "End Time" }, "duration": { - "type": "number", - "format": "time-delta", + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], "title": "Duration" }, "status": { @@ -9120,19 +10425,40 @@ "title": "Logs" }, "error_logs": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Error Logs" }, "grafana_url": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Grafana Url" }, "dashboard_url": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Dashboard Url" }, "storage": { - "type": "integer", + "type": "number", "title": "Storage" }, "cpu": { @@ -9140,7 +10466,7 @@ "title": "Cpu" }, "memory": { - "type": "integer", + "type": "number", "title": "Memory" }, "gpu": { @@ -9196,11 +10522,18 @@ "title": "User" }, "pool": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Pool" }, "storage": { - "type": "integer", + "type": "number", "title": "Storage" }, "cpu": { @@ -9208,7 +10541,7 @@ "title": "Cpu" }, "memory": { - "type": "integer", + "type": "number", "title": "Memory" }, "gpu": { @@ -9253,9 +10586,17 @@ "LogConfig": { "properties": { "credential": { - "$ref": "#/components/schemas/StaticDataCredential" + "anyOf": [ + { + "$ref": "#/components/schemas/StaticDataCredential" + }, + { + "type": "null" + } + ] } }, + "additionalProperties": false, "type": "object", "title": "LogConfig", "description": "Config for storing information about data. " @@ -9263,27 +10604,69 @@ "LoginInfo": { "properties": { "device_endpoint": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Device Endpoint" }, "device_client_id": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Device Client Id" }, "browser_endpoint": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Browser Endpoint" }, "browser_client_id": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Browser Client Id" }, "token_endpoint": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Token Endpoint" }, "logout_endpoint": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Logout Endpoint" } }, @@ -9316,12 +10699,7 @@ "writeOnly": true }, "smtp_settings": { - "allOf": [ - { - "$ref": "#/components/schemas/SMTPConfig" - } - ], - "title": "Smtp Settings", + "$ref": "#/components/schemas/SMTPConfig", "default": { "host": "", "sender": "", @@ -9333,6 +10711,7 @@ "title": "NotificationConfig" }, "OperatorType": { + "type": "string", "enum": [ "GT", "GE", @@ -9340,8 +10719,7 @@ "LE", "EQ" ], - "title": "OperatorType", - "description": "An enumeration." + "title": "OperatorType" }, "OsmoImageConfig": { "properties": { @@ -9356,12 +10734,7 @@ "default": "" }, "credential": { - "allOf": [ - { - "$ref": "#/components/schemas/RegistryCredential" - } - ], - "title": "Credential", + "$ref": "#/components/schemas/RegistryCredential", "default": { "registry": "", "username": "", @@ -9369,6 +10742,7 @@ } } }, + "additionalProperties": false, "type": "object", "title": "OsmoImageConfig", "description": "Dynamic Config for storing the image URLs for service images and the credentials needed\nto pull them." @@ -9376,17 +10750,32 @@ "PatchBackendTestRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "configs_dict": { + "additionalProperties": true, "type": "object", "title": "Configs Dict" } @@ -9401,17 +10790,32 @@ "PatchConfigRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "configs_dict": { + "additionalProperties": true, "type": "object", "title": "Configs Dict" } @@ -9426,54 +10830,174 @@ "PatchDatasetRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Description" + }, + "tags": { + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Tags" + }, + "configs_dict": { + "additionalProperties": true, + "type": "object", + "title": "Configs Dict" + } + }, + "type": "object", + "required": [ + "configs_dict" + ], + "title": "PatchDatasetRequest", + "description": "Request body for patching a dataset bucket configuration with history tracking metadata." + }, + "PatchPoolRequest": { + "properties": { + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Tags" + }, + "configs_dict": { + "additionalProperties": true, + "type": "object", + "title": "Configs Dict" + } + }, + "type": "object", + "required": [ + "configs_dict" + ], + "title": "PatchPoolRequest", + "description": "Request body for patching a pool with history tracking metadata." + }, + "Platform-Input": { + "properties": { + "description": { + "type": "string", + "title": "Description", + "default": "" + }, + "host_network_allowed": { + "type": "boolean", + "title": "Host Network Allowed", + "default": false + }, + "privileged_allowed": { + "type": "boolean", + "title": "Privileged Allowed", + "default": false + }, + "allowed_mounts": { + "items": { + "type": "string" + }, + "type": "array", + "title": "Allowed Mounts", + "default": [] + }, + "default_mounts": { + "items": { + "type": "string" + }, + "type": "array", + "title": "Default Mounts", + "default": [] + }, + "tolerations": { + "items": { + "$ref": "#/components/schemas/Toleration" + }, + "type": "array", + "title": "Tolerations", + "default": [] + }, + "labels": { + "additionalProperties": { + "type": "string" + }, + "type": "object", + "title": "Labels", + "default": {} + }, + "default_variables": { + "additionalProperties": true, + "type": "object", + "title": "Default Variables", + "default": {} + }, + "resource_validations": { "items": { "type": "string" }, "type": "array", - "title": "Tags" + "title": "Resource Validations", + "default": [] }, - "configs_dict": { - "type": "object", - "title": "Configs Dict" - } - }, - "type": "object", - "required": [ - "configs_dict" - ], - "title": "PatchDatasetRequest", - "description": "Request body for patching a dataset bucket configuration with history tracking metadata." - }, - "PatchPoolRequest": { - "properties": { - "description": { - "type": "string", - "title": "Description" + "parsed_resource_validations": { + "items": { + "$ref": "#/components/schemas/ResourceAssertion" + }, + "type": "array", + "title": "Parsed Resource Validations", + "default": [] }, - "tags": { + "override_pod_template": { "items": { "type": "string" }, "type": "array", - "title": "Tags" + "title": "Override Pod Template", + "default": [] }, - "configs_dict": { + "parsed_pod_template": { + "additionalProperties": true, "type": "object", - "title": "Configs Dict" + "title": "Parsed Pod Template", + "default": {} } }, "type": "object", - "required": [ - "configs_dict" - ], - "title": "PatchPoolRequest", - "description": "Request body for patching a pool with history tracking metadata." + "title": "Platform", + "description": "Single Platform Entry " }, - "Platform": { + "Platform-Output": { "properties": { "description": { "type": "string", @@ -9523,6 +11047,7 @@ "default": {} }, "default_variables": { + "additionalProperties": true, "type": "object", "title": "Default Variables", "default": {} @@ -9552,6 +11077,7 @@ "default": [] }, "parsed_pod_template": { + "additionalProperties": true, "type": "object", "title": "Parsed Pod Template", "default": {} @@ -9587,6 +11113,7 @@ "default": [] }, "default_variables": { + "additionalProperties": true, "type": "object", "title": "Default Variables", "default": {} @@ -9650,15 +11177,32 @@ "title": "PlatformMinimal", "description": "Single Platform Entry " }, - "PluginsConfig": { + "PluginsConfig-Input": { "properties": { "rsync": { - "allOf": [ - { - "$ref": "#/components/schemas/RsyncConfig" - } - ], - "title": "Rsync", + "$ref": "#/components/schemas/RsyncConfig", + "default": { + "enabled": false, + "enable_telemetry": false, + "read_bandwidth_limit": 2621440, + "write_bandwidth_limit": 2621440, + "allowed_paths": {}, + "daemon_debounce_delay": 30.0, + "daemon_poll_interval": 120.0, + "daemon_reconcile_interval": 60.0, + "client_upload_rate_limit": 2097152 + } + } + }, + "additionalProperties": false, + "type": "object", + "title": "PluginsConfig", + "description": "Stores any plugins configs " + }, + "PluginsConfig-Output": { + "properties": { + "rsync": { + "$ref": "#/components/schemas/RsyncConfig", "default": { "enabled": false, "enable_telemetry": false, @@ -9672,6 +11216,7 @@ } } }, + "additionalProperties": false, "type": "object", "title": "PluginsConfig", "description": "Stores any plugins configs " @@ -9685,7 +11230,7 @@ "title": "PolicyEffect", "description": "Effect of a policy statement: Allow or Deny. Deny takes precedence over Allow." }, - "Pool": { + "Pool-Input": { "properties": { "name": { "type": "string", @@ -9698,10 +11243,24 @@ "default": "" }, "status": { - "$ref": "#/components/schemas/PoolStatus" + "anyOf": [ + { + "$ref": "#/components/schemas/PoolStatus" + }, + { + "type": "null" + } + ] }, "download_type": { - "$ref": "#/components/schemas/DownloadType" + "anyOf": [ + { + "$ref": "#/components/schemas/DownloadType" + }, + { + "type": "null" + } + ] }, "enable_maintenance": { "type": "boolean", @@ -9713,7 +11272,14 @@ "title": "Backend" }, "default_platform": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Default Platform" }, "default_exec_timeout": { @@ -9745,12 +11311,180 @@ "default": {} }, "resources": { - "allOf": [ + "$ref": "#/components/schemas/PoolResources", + "default": {} + }, + "topology_keys": { + "items": { + "$ref": "#/components/schemas/TopologyKey" + }, + "type": "array", + "title": "Topology Keys", + "default": [] + }, + "common_default_variables": { + "additionalProperties": true, + "type": "object", + "title": "Common Default Variables", + "default": {} + }, + "common_resource_validations": { + "items": { + "type": "string" + }, + "type": "array", + "title": "Common Resource Validations", + "default": [] + }, + "parsed_resource_validations": { + "items": { + "$ref": "#/components/schemas/ResourceAssertion" + }, + "type": "array", + "title": "Parsed Resource Validations", + "default": [] + }, + "common_pod_template": { + "items": { + "type": "string" + }, + "type": "array", + "title": "Common Pod Template", + "default": [] + }, + "parsed_pod_template": { + "additionalProperties": true, + "type": "object", + "title": "Parsed Pod Template", + "default": {} + }, + "common_group_templates": { + "items": { + "type": "string" + }, + "type": "array", + "title": "Common Group Templates", + "default": [] + }, + "parsed_group_templates": { + "items": { + "additionalProperties": true, + "type": "object" + }, + "type": "array", + "title": "Parsed Group Templates", + "default": [] + }, + "platforms": { + "additionalProperties": { + "$ref": "#/components/schemas/Platform-Input" + }, + "type": "object", + "title": "Platforms", + "default": {} + }, + "last_heartbeat": { + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], + "title": "Last Heartbeat" + } + }, + "type": "object", + "required": [ + "backend" + ], + "title": "Pool", + "description": "Single Pool Entry " + }, + "Pool-Output": { + "properties": { + "name": { + "type": "string", + "title": "Name", + "default": "" + }, + "description": { + "type": "string", + "title": "Description", + "default": "" + }, + "status": { + "anyOf": [ + { + "$ref": "#/components/schemas/PoolStatus" + }, + { + "type": "null" + } + ] + }, + "download_type": { + "anyOf": [ + { + "$ref": "#/components/schemas/DownloadType" + }, + { + "type": "null" + } + ] + }, + "enable_maintenance": { + "type": "boolean", + "title": "Enable Maintenance", + "default": false + }, + "backend": { + "type": "string", + "title": "Backend" + }, + "default_platform": { + "anyOf": [ { - "$ref": "#/components/schemas/PoolResources" + "type": "string" + }, + { + "type": "null" } ], - "title": "Resources", + "title": "Default Platform" + }, + "default_exec_timeout": { + "type": "string", + "title": "Default Exec Timeout", + "default": "" + }, + "default_queue_timeout": { + "type": "string", + "title": "Default Queue Timeout", + "default": "" + }, + "max_exec_timeout": { + "type": "string", + "title": "Max Exec Timeout", + "default": "" + }, + "max_queue_timeout": { + "type": "string", + "title": "Max Queue Timeout", + "default": "" + }, + "default_exit_actions": { + "additionalProperties": { + "type": "string" + }, + "type": "object", + "title": "Default Exit Actions", + "default": {} + }, + "resources": { + "$ref": "#/components/schemas/PoolResources", "default": {} }, "topology_keys": { @@ -9762,6 +11496,7 @@ "default": [] }, "common_default_variables": { + "additionalProperties": true, "type": "object", "title": "Common Default Variables", "default": {} @@ -9791,6 +11526,7 @@ "default": [] }, "parsed_pod_template": { + "additionalProperties": true, "type": "object", "title": "Parsed Pod Template", "default": {} @@ -9805,6 +11541,7 @@ }, "parsed_group_templates": { "items": { + "additionalProperties": true, "type": "object" }, "type": "array", @@ -9813,15 +11550,22 @@ }, "platforms": { "additionalProperties": { - "$ref": "#/components/schemas/Platform" + "$ref": "#/components/schemas/Platform-Output" }, "type": "object", "title": "Platforms", "default": {} }, "last_heartbeat": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Last Heartbeat" } }, @@ -9845,10 +11589,24 @@ "default": "" }, "status": { - "$ref": "#/components/schemas/PoolStatus" + "anyOf": [ + { + "$ref": "#/components/schemas/PoolStatus" + }, + { + "type": "null" + } + ] }, "download_type": { - "$ref": "#/components/schemas/DownloadType" + "anyOf": [ + { + "$ref": "#/components/schemas/DownloadType" + }, + { + "type": "null" + } + ] }, "enable_maintenance": { "type": "boolean", @@ -9860,7 +11618,14 @@ "title": "Backend" }, "default_platform": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Default Platform" }, "default_exec_timeout": { @@ -9892,12 +11657,7 @@ "default": {} }, "resources": { - "allOf": [ - { - "$ref": "#/components/schemas/PoolResources" - } - ], - "title": "Resources", + "$ref": "#/components/schemas/PoolResources", "default": {} }, "topology_keys": { @@ -9909,6 +11669,7 @@ "default": [] }, "common_default_variables": { + "additionalProperties": true, "type": "object", "title": "Common Default Variables", "default": {} @@ -9950,8 +11711,7 @@ "required": [ "backend" ], - "title": "PoolEditable", - "description": "Pool schema to expose through API endpoint. " + "title": "PoolEditable" }, "PoolMinimal": { "properties": { @@ -9966,10 +11726,24 @@ "default": "" }, "status": { - "$ref": "#/components/schemas/PoolStatus" + "anyOf": [ + { + "$ref": "#/components/schemas/PoolStatus" + }, + { + "type": "null" + } + ] }, "download_type": { - "$ref": "#/components/schemas/DownloadType" + "anyOf": [ + { + "$ref": "#/components/schemas/DownloadType" + }, + { + "type": "null" + } + ] }, "enable_maintenance": { "type": "boolean", @@ -9981,7 +11755,14 @@ "title": "Backend" }, "default_platform": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Default Platform" }, "default_exec_timeout": { @@ -10013,12 +11794,7 @@ "default": {} }, "resources": { - "allOf": [ - { - "$ref": "#/components/schemas/PoolResources" - } - ], - "title": "Resources", + "$ref": "#/components/schemas/PoolResources", "default": {} }, "topology_keys": { @@ -10042,8 +11818,7 @@ "required": [ "backend" ], - "title": "PoolMinimal", - "description": "Pool schema to expose through API endpoint. " + "title": "PoolMinimal" }, "PoolNodeSetResourceUsage": { "properties": { @@ -10098,10 +11873,24 @@ "default": "" }, "status": { - "$ref": "#/components/schemas/PoolStatus" + "anyOf": [ + { + "$ref": "#/components/schemas/PoolStatus" + }, + { + "type": "null" + } + ] }, "download_type": { - "$ref": "#/components/schemas/DownloadType" + "anyOf": [ + { + "$ref": "#/components/schemas/DownloadType" + }, + { + "type": "null" + } + ] }, "enable_maintenance": { "type": "boolean", @@ -10113,7 +11902,14 @@ "title": "Backend" }, "default_platform": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Default Platform" }, "default_exec_timeout": { @@ -10145,12 +11941,7 @@ "default": {} }, "resources": { - "allOf": [ - { - "$ref": "#/components/schemas/PoolResources" - } - ], - "title": "Resources", + "$ref": "#/components/schemas/PoolResources", "default": {} }, "topology_keys": { @@ -10185,7 +11976,14 @@ "PoolResources": { "properties": { "gpu": { - "$ref": "#/components/schemas/PoolResourceCountable" + "anyOf": [ + { + "$ref": "#/components/schemas/PoolResourceCountable" + }, + { + "type": "null" + } + ] } }, "type": "object", @@ -10206,10 +12004,12 @@ "$ref": "#/components/schemas/PoolStatus" }, "usage_fields": { + "additionalProperties": true, "type": "object", "title": "Usage Fields" }, "allocatable_fields": { + "additionalProperties": true, "type": "object", "title": "Allocatable Fields" }, @@ -10272,6 +12072,7 @@ "description": "Object storing pool information. " }, "PoolStatus": { + "type": "string", "enum": [ "ONLINE", "OFFLINE", @@ -10283,14 +12084,28 @@ "PostBackendRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "configs": { @@ -10324,7 +12139,14 @@ "title": "Pools" }, "token": { - "$ref": "#/components/schemas/TokenIdentity" + "anyOf": [ + { + "$ref": "#/components/schemas/TokenIdentity" + }, + { + "type": "null" + } + ] } }, "additionalProperties": false, @@ -10340,14 +12162,28 @@ "PutBackendTestRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "configs": { @@ -10364,14 +12200,28 @@ "PutBackendTestsRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "configs": { @@ -10392,18 +12242,32 @@ "PutDatasetRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "configs": { - "$ref": "#/components/schemas/DatasetConfig" + "$ref": "#/components/schemas/DatasetConfig-Input" } }, "type": "object", @@ -10416,17 +12280,32 @@ "PutGroupTemplateRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "configs": { + "additionalProperties": true, "type": "object", "title": "Configs" } @@ -10441,18 +12320,33 @@ "PutGroupTemplatesRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "configs": { "additionalProperties": { + "additionalProperties": true, "type": "object" }, "type": "object", @@ -10469,17 +12363,32 @@ "PutPodTemplateRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "configs": { + "additionalProperties": true, "type": "object", "title": "Configs" } @@ -10494,18 +12403,33 @@ "PutPodTemplatesRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "configs": { "additionalProperties": { + "additionalProperties": true, "type": "object" }, "type": "object", @@ -10522,18 +12446,32 @@ "PutPoolPlatformRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "configs": { - "$ref": "#/components/schemas/Platform" + "$ref": "#/components/schemas/Platform-Input" } }, "type": "object", @@ -10546,18 +12484,32 @@ "PutPoolRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "configs": { - "$ref": "#/components/schemas/Pool" + "$ref": "#/components/schemas/Pool-Input" } }, "type": "object", @@ -10570,19 +12522,33 @@ "PutPoolsRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "configs": { "additionalProperties": { - "$ref": "#/components/schemas/Pool" + "$ref": "#/components/schemas/Pool-Input" }, "type": "object", "title": "Configs" @@ -10598,18 +12564,33 @@ "PutResourceValidationRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "configs": { "items": { + "additionalProperties": true, "type": "object" }, "type": "array", @@ -10626,19 +12607,34 @@ "PutResourceValidationsRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "configs_dict": { "additionalProperties": { "items": { + "additionalProperties": true, "type": "object" }, "type": "array" @@ -10657,18 +12653,32 @@ "PutRoleRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "configs": { - "$ref": "#/components/schemas/Role" + "$ref": "#/components/schemas/Role-Input" } }, "type": "object", @@ -10681,19 +12691,33 @@ "PutRolesRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "configs": { "items": { - "$ref": "#/components/schemas/Role" + "$ref": "#/components/schemas/Role-Input" }, "type": "array", "title": "Configs" @@ -10709,18 +12733,32 @@ "PutServiceRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "configs": { - "$ref": "#/components/schemas/ServiceConfig" + "$ref": "#/components/schemas/ServiceConfig-Input" } }, "type": "object", @@ -10733,18 +12771,32 @@ "PutWorkflowRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "configs": { - "$ref": "#/components/schemas/WorkflowConfig" + "$ref": "#/components/schemas/WorkflowConfig-Input" } }, "type": "object", @@ -10785,14 +12837,28 @@ "RenamePoolPlatformRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "new_name": { @@ -10810,14 +12876,28 @@ "RenamePoolRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "new_name": { @@ -10908,49 +12988,93 @@ "title": "Hostname" }, "exposed_fields": { + "additionalProperties": true, "type": "object", "title": "Exposed Fields" }, "taints": { "items": { + "additionalProperties": true, "type": "object" }, "type": "array", "title": "Taints" }, "usage_fields": { + "additionalProperties": true, "type": "object", "title": "Usage Fields" }, "conditions": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Conditions" }, "non_workflow_usage_fields": { + "additionalProperties": true, "type": "object", "title": "Non Workflow Usage Fields" }, "allocatable_fields": { + "additionalProperties": true, "type": "object", "title": "Allocatable Fields" }, "platform_allocatable_fields": { - "type": "object", + "anyOf": [ + { + "additionalProperties": true, + "type": "object" + }, + { + "type": "null" + } + ], "title": "Platform Allocatable Fields" }, "platform_available_fields": { - "type": "object", + "anyOf": [ + { + "additionalProperties": true, + "type": "object" + }, + { + "type": "null" + } + ], "title": "Platform Available Fields" }, "platform_workflow_allocatable_fields": { - "type": "object", + "anyOf": [ + { + "additionalProperties": true, + "type": "object" + }, + { + "type": "null" + } + ], "title": "Platform Workflow Allocatable Fields" }, "config_fields": { - "type": "object", + "anyOf": [ + { + "additionalProperties": true, + "type": "object" + }, + { + "type": "null" + } + ], "title": "Config Fields" }, "backend": { @@ -10958,7 +13082,15 @@ "title": "Backend" }, "label_fields": { - "type": "object", + "anyOf": [ + { + "additionalProperties": true, + "type": "object" + }, + { + "type": "null" + } + ], "title": "Label Fields" }, "pool_platform_labels": { @@ -11009,7 +13141,7 @@ "title": "ResourcesResponse", "description": "Object storing execution cluster node resource information. " }, - "Role": { + "Role-Input": { "properties": { "name": { "type": "string", @@ -11032,18 +13164,71 @@ "default": false }, "sync_mode": { - "allOf": [ + "$ref": "#/components/schemas/SyncMode", + "default": "import" + }, + "external_roles": { + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, { - "$ref": "#/components/schemas/SyncMode" + "type": "null" } ], - "default": "import" + "title": "External Roles" + } + }, + "type": "object", + "required": [ + "name", + "description", + "policies" + ], + "title": "Role", + "description": "Single Role Entry.\n\nNote: Authorization checking is now handled by the authz_sidecar (Go service).\nThis Python class is only used for role CRUD operations." + }, + "Role-Output": { + "properties": { + "name": { + "type": "string", + "title": "Name" }, - "external_roles": { + "description": { + "type": "string", + "title": "Description" + }, + "policies": { "items": { - "type": "string" + "$ref": "#/components/schemas/RolePolicy" }, "type": "array", + "title": "Policies" + }, + "immutable": { + "type": "boolean", + "title": "Immutable", + "default": false + }, + "sync_mode": { + "$ref": "#/components/schemas/SyncMode", + "default": "import" + }, + "external_roles": { + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "External Roles" } }, @@ -11059,11 +13244,7 @@ "RolePolicy": { "properties": { "effect": { - "allOf": [ - { - "$ref": "#/components/schemas/PolicyEffect" - } - ], + "$ref": "#/components/schemas/PolicyEffect", "default": "Allow" }, "actions": { @@ -11096,6 +13277,7 @@ }, "users": { "items": { + "additionalProperties": true, "type": "object" }, "type": "array", @@ -11113,18 +13295,32 @@ "RollbackConfigRequest": { "properties": { "description": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Description" }, "tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Tags" }, "config_type": { - "$ref": "#/components/schemas/src__utils__connectors__postgres__ConfigHistoryType" + "$ref": "#/components/schemas/src__lib__utils__config_history__ConfigHistoryType" }, "revision": { "type": "integer", @@ -11247,6 +13443,7 @@ "default": 2097152 } }, + "additionalProperties": false, "type": "object", "title": "RsyncConfig", "description": "Stores all configs for rsync " @@ -11274,7 +13471,7 @@ "type": "object", "title": "SMTPConfig" }, - "ServiceConfig": { + "ServiceConfig-Input": { "properties": { "service_base_url": { "type": "string", @@ -11285,12 +13482,37 @@ "$ref": "#/components/schemas/AuthenticationConfig" }, "cli_config": { - "allOf": [ - { - "$ref": "#/components/schemas/CliConfig" - } - ], - "title": "Cli Config", + "$ref": "#/components/schemas/CliConfig", + "default": {} + }, + "max_pod_restart_limit": { + "type": "string", + "title": "Max Pod Restart Limit", + "default": "30m" + }, + "agent_queue_size": { + "type": "integer", + "title": "Agent Queue Size", + "default": 1024 + } + }, + "additionalProperties": false, + "type": "object", + "title": "ServiceConfig", + "description": "Stores any configs OSMO Admins control " + }, + "ServiceConfig-Output": { + "properties": { + "service_base_url": { + "type": "string", + "title": "Service Base Url", + "default": "" + }, + "service_auth": { + "$ref": "#/components/schemas/AuthenticationConfig" + }, + "cli_config": { + "$ref": "#/components/schemas/CliConfig", "default": {} }, "max_pod_restart_limit": { @@ -11304,6 +13526,7 @@ "default": 1024 } }, + "additionalProperties": false, "type": "object", "title": "ServiceConfig", "description": "Stores any configs OSMO Admins control " @@ -11316,12 +13539,26 @@ "description": "The OSMO storage URI for the data service (e.g., s3://bucket)" }, "region": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Region", "description": "The region for the data service" }, "override_url": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Override Url", "description": "HTTP endpoint URL override the storage URI (e.g., http://minio:9000)" }, @@ -11355,19 +13592,47 @@ "title": "Name" }, "overview": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Overview" }, "logs": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Logs" }, "spec": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Spec" }, "dashboard_url": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Dashboard Url" } }, @@ -11400,24 +13665,45 @@ "title": "Task Name" }, "node": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Node" }, "start_time": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Start Time" }, "end_time": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "End Time" }, "status": { "$ref": "#/components/schemas/TaskGroupStatus" }, "storage": { - "type": "integer", + "type": "number", "title": "Storage" }, "cpu": { @@ -11425,7 +13711,7 @@ "title": "Cpu" }, "memory": { - "type": "integer", + "type": "number", "title": "Memory" }, "gpu": { @@ -11448,6 +13734,7 @@ "description": "Entry for task GET API result. " }, "TaskGroupStatus": { + "type": "string", "enum": [ "SUBMITTING", "WAITING", @@ -11487,11 +13774,25 @@ "$ref": "#/components/schemas/TaskGroupStatus" }, "failure_message": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Failure Message" }, "exit_code": { - "type": "integer", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], "title": "Exit Code" }, "logs": { @@ -11499,22 +13800,50 @@ "title": "Logs" }, "error_logs": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Error Logs" }, "processing_start_time": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Processing Start Time" }, "scheduling_start_time": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Scheduling Start Time" }, "initializing_start_time": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Initializing Start Time" }, "events": { @@ -11522,32 +13851,74 @@ "title": "Events" }, "start_time": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Start Time" }, "end_time": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "End Time" }, "input_download_start_time": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Input Download Start Time" }, "input_download_end_time": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Input Download End Time" }, "output_upload_start_time": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Output Upload Start Time" }, "dashboard_url": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Dashboard Url" }, "pod_name": { @@ -11555,7 +13926,14 @@ "title": "Pod Name" }, "pod_ip": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Pod Ip" }, "task_uuid": { @@ -11563,7 +13941,14 @@ "title": "Task Uuid" }, "node_name": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Node Name" }, "lead": { @@ -11608,7 +13993,14 @@ "default": [] }, "uploaded_templated_spec": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Uploaded Templated Spec" } }, @@ -11627,8 +14019,15 @@ "title": "Name" }, "expires_at": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Expires At" } }, @@ -11666,11 +14065,25 @@ "default": "Equal" }, "value": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Value" }, "effect": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Effect" } }, @@ -11703,18 +14116,32 @@ "UpdateConfigTagsRequest": { "properties": { "set_tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Set Tags", "description": "Tags to add to the config" }, "delete_tags": { - "items": { - "type": "string" - }, - "type": "array", + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], "title": "Delete Tags", "description": "Tags to remove from the config" } @@ -11730,12 +14157,26 @@ "title": "Id" }, "created_at": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Created At" }, "created_by": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Created By" } }, @@ -11773,12 +14214,26 @@ "description": "The OSMO storage URI for the data service (e.g., s3://bucket)" }, "region": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Region", "description": "The region for the data service" }, "override_url": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Override Url", "description": "HTTP endpoint URL override the storage URI (e.g., http://minio:9000)" }, @@ -11838,23 +14293,58 @@ "UserProfile": { "properties": { "username": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Username" }, "email_notification": { - "type": "boolean", + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], "title": "Email Notification" }, "slack_notification": { - "type": "boolean", + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], "title": "Slack Notification" }, "bucket": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Bucket" }, "pool": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Pool" } }, @@ -11974,12 +14464,26 @@ "title": "Id" }, "created_at": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Created At" }, "created_by": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Created By" }, "roles": { @@ -12001,13 +14505,27 @@ "UserWorkflowLimitConfig": { "properties": { "max_num_workflows": { - "type": "integer", - "exclusiveMinimum": 0.0, + "anyOf": [ + { + "type": "integer", + "exclusiveMinimum": 0.0 + }, + { + "type": "null" + } + ], "title": "Max Num Workflows" }, "max_num_tasks": { - "type": "integer", - "exclusiveMinimum": 0.0, + "anyOf": [ + { + "type": "integer", + "exclusiveMinimum": 0.0 + }, + { + "type": "null" + } + ], "title": "Max Num Tasks" }, "jinja_sandbox_workers": { @@ -12026,6 +14544,7 @@ "default": 104857600 } }, + "additionalProperties": false, "type": "object", "title": "UserWorkflowLimitConfig", "description": "Stores workflow limits per user. Default is None, which means no limit.\nIf a limit is set, it must be greater than 0." @@ -12067,7 +14586,7 @@ "properties": { "pools": { "additionalProperties": { - "$ref": "#/components/schemas/Pool" + "$ref": "#/components/schemas/Pool-Output" }, "type": "object", "title": "Pools", @@ -12089,33 +14608,189 @@ "title": "Minor", "default": "0" }, - "revision": { - "type": "string", - "title": "Revision", - "default": "0" + "revision": { + "type": "string", + "title": "Revision", + "default": "0" + }, + "hash": { + "type": "string", + "title": "Hash", + "default": "" + } + }, + "type": "object", + "required": [ + "major" + ], + "title": "Version", + "description": "A class to maintain version information. " + }, + "WorkflowConfig-Input": { + "properties": { + "workflow_data": { + "$ref": "#/components/schemas/DataConfig", + "default": { + "base_url": "", + "websocket_timeout": 1440, + "data_timeout": 10, + "download_type": "download" + } + }, + "workflow_log": { + "$ref": "#/components/schemas/LogConfig", + "default": {} + }, + "workflow_app": { + "$ref": "#/components/schemas/LogConfig", + "default": {} + }, + "workflow_info": { + "$ref": "#/components/schemas/WorkflowInfo", + "default": { + "tags": [], + "max_name_length": 64 + } + }, + "backend_images": { + "$ref": "#/components/schemas/OsmoImageConfig", + "default": { + "init": "", + "client": "", + "credential": { + "auth": "", + "registry": "", + "username": "" + } + } + }, + "workflow_alerts": { + "$ref": "#/components/schemas/NotificationConfig", + "default": { + "slack_token": "", + "smtp_settings": { + "host": "", + "password": "", + "sender": "" + } + } + }, + "credential_config": { + "$ref": "#/components/schemas/CredentialConfig", + "default": { + "disable_registry_validation": [], + "disable_data_validation": [] + } + }, + "user_workflow_limits": { + "$ref": "#/components/schemas/UserWorkflowLimitConfig", + "default": { + "jinja_sandbox_workers": 2, + "jinja_sandbox_max_time": 0.5, + "jinja_sandbox_memory_limit": 104857600 + } + }, + "plugins_config": { + "$ref": "#/components/schemas/PluginsConfig-Input", + "default": { + "rsync": { + "allowed_paths": {}, + "client_upload_rate_limit": 2097152, + "daemon_debounce_delay": 30.0, + "daemon_poll_interval": 120.0, + "daemon_reconcile_interval": 60.0, + "enable_telemetry": false, + "enabled": false, + "read_bandwidth_limit": 2621440, + "write_bandwidth_limit": 2621440 + } + } + }, + "max_num_tasks": { + "type": "integer", + "title": "Max Num Tasks", + "default": 20 + }, + "max_num_ports_per_task": { + "type": "integer", + "title": "Max Num Ports Per Task", + "default": 30 + }, + "max_retry_per_task": { + "type": "integer", + "title": "Max Retry Per Task", + "default": 0 + }, + "max_retry_per_job": { + "type": "integer", + "title": "Max Retry Per Job", + "default": 5 + }, + "default_schedule_timeout": { + "type": "integer", + "title": "Default Schedule Timeout", + "default": 30 + }, + "default_exec_timeout": { + "type": "string", + "title": "Default Exec Timeout", + "default": "60d" + }, + "default_queue_timeout": { + "type": "string", + "title": "Default Queue Timeout", + "default": "60d" + }, + "max_exec_timeout": { + "type": "string", + "title": "Max Exec Timeout", + "default": "60d" + }, + "max_queue_timeout": { + "type": "string", + "title": "Max Queue Timeout", + "default": "60d" + }, + "force_cleanup_delay": { + "type": "string", + "title": "Force Cleanup Delay", + "default": "1h" + }, + "max_log_lines": { + "type": "integer", + "title": "Max Log Lines", + "default": 10000 + }, + "max_task_log_lines": { + "type": "integer", + "title": "Max Task Log Lines", + "default": 1000 + }, + "max_error_log_lines": { + "type": "integer", + "title": "Max Error Log Lines", + "default": 100 }, - "hash": { + "max_event_log_lines": { + "type": "integer", + "title": "Max Event Log Lines", + "default": 100 + }, + "task_heartbeat_frequency": { "type": "string", - "title": "Hash", - "default": "" + "title": "Task Heartbeat Frequency", + "default": "10m" } }, + "additionalProperties": false, "type": "object", - "required": [ - "major" - ], - "title": "Version", - "description": "A class to maintain version information. " + "title": "WorkflowConfig", + "description": "Stores any workflow configs External Admins control " }, - "WorkflowConfig": { + "WorkflowConfig-Output": { "properties": { "workflow_data": { - "allOf": [ - { - "$ref": "#/components/schemas/DataConfig" - } - ], - "title": "Workflow Data", + "$ref": "#/components/schemas/DataConfig", "default": { "base_url": "", "websocket_timeout": 1440, @@ -12124,87 +14799,52 @@ } }, "workflow_log": { - "allOf": [ - { - "$ref": "#/components/schemas/LogConfig" - } - ], - "title": "Workflow Log", + "$ref": "#/components/schemas/LogConfig", "default": {} }, "workflow_app": { - "allOf": [ - { - "$ref": "#/components/schemas/LogConfig" - } - ], - "title": "Workflow App", + "$ref": "#/components/schemas/LogConfig", "default": {} }, "workflow_info": { - "allOf": [ - { - "$ref": "#/components/schemas/WorkflowInfo" - } - ], - "title": "Workflow Info", + "$ref": "#/components/schemas/WorkflowInfo", "default": { "tags": [], "max_name_length": 64 } }, "backend_images": { - "allOf": [ - { - "$ref": "#/components/schemas/OsmoImageConfig" - } - ], - "title": "Backend Images", + "$ref": "#/components/schemas/OsmoImageConfig", "default": { "init": "", "client": "", "credential": { + "auth": "", "registry": "", - "username": "", - "auth": "" + "username": "" } } }, "workflow_alerts": { - "allOf": [ - { - "$ref": "#/components/schemas/NotificationConfig" - } - ], - "title": "Workflow Alerts", + "$ref": "#/components/schemas/NotificationConfig", "default": { "slack_token": "", "smtp_settings": { "host": "", - "sender": "", - "password": "" + "password": "", + "sender": "" } } }, "credential_config": { - "allOf": [ - { - "$ref": "#/components/schemas/CredentialConfig" - } - ], - "title": "Credential Config", + "$ref": "#/components/schemas/CredentialConfig", "default": { "disable_registry_validation": [], "disable_data_validation": [] } }, "user_workflow_limits": { - "allOf": [ - { - "$ref": "#/components/schemas/UserWorkflowLimitConfig" - } - ], - "title": "User Workflow Limits", + "$ref": "#/components/schemas/UserWorkflowLimitConfig", "default": { "jinja_sandbox_workers": 2, "jinja_sandbox_max_time": 0.5, @@ -12212,23 +14852,18 @@ } }, "plugins_config": { - "allOf": [ - { - "$ref": "#/components/schemas/PluginsConfig" - } - ], - "title": "Plugins Config", + "$ref": "#/components/schemas/PluginsConfig-Output", "default": { "rsync": { - "enabled": false, - "enable_telemetry": false, - "read_bandwidth_limit": 2621440, - "write_bandwidth_limit": 2621440, "allowed_paths": {}, + "client_upload_rate_limit": 2097152, "daemon_debounce_delay": 30.0, "daemon_poll_interval": 120.0, "daemon_reconcile_interval": 60.0, - "client_upload_rate_limit": 2097152 + "enable_telemetry": false, + "enabled": false, + "read_bandwidth_limit": 2621440, + "write_bandwidth_limit": 2621440 } } }, @@ -12308,6 +14943,7 @@ "default": "10m" } }, + "additionalProperties": false, "type": "object", "title": "WorkflowConfig", "description": "Stores any workflow configs External Admins control " @@ -12328,6 +14964,7 @@ "default": 64 } }, + "additionalProperties": false, "type": "object", "title": "WorkflowInfo", "description": "Config for workflow storage info. " @@ -12369,7 +15006,14 @@ "title": "Submitted By" }, "cancelled_by": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Cancelled By" }, "spec": { @@ -12393,19 +15037,47 @@ "title": "Overview" }, "parent_name": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Parent Name" }, "parent_job_id": { - "type": "integer", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], "title": "Parent Job Id" }, "dashboard_url": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Dashboard Url" }, "grafana_url": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Grafana Url" }, "tags": { @@ -12422,33 +15094,64 @@ "title": "Submit Time" }, "start_time": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Start Time" }, "end_time": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "End Time" }, "exec_timeout": { - "type": "number", - "format": "time-delta", + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], "title": "Exec Timeout" }, "queue_timeout": { - "type": "number", - "format": "time-delta", + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], "title": "Queue Timeout" }, "duration": { - "type": "number", - "format": "time-delta", + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], "title": "Duration" }, "queued_time": { "type": "number", - "format": "time-delta", "title": "Queued Time" }, "status": { @@ -12467,23 +15170,58 @@ "title": "Groups" }, "pool": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Pool" }, "backend": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Backend" }, "app_owner": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "App Owner" }, "app_name": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "App Name" }, "app_version": { - "type": "integer", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], "title": "App Version" }, "plugins": { @@ -12539,6 +15277,7 @@ "description": "Represents the status of a workflow. " }, "src__lib__utils__config_history__ConfigHistoryType": { + "type": "string", "enum": [ "SERVICE", "WORKFLOW", @@ -12578,7 +15317,7 @@ "title": "Owner" }, "latest_version": { - "type": "string", + "type": "integer", "title": "Latest Version" } }, @@ -12635,23 +15374,42 @@ "title": "Submit Time" }, "start_time": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "Start Time" }, "end_time": { - "type": "string", - "format": "date-time", + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], "title": "End Time" }, "queued_time": { "type": "number", - "format": "time-delta", "title": "Queued Time" }, "duration": { - "type": "number", - "format": "time-delta", + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], "title": "Duration" }, "status": { @@ -12666,31 +15424,80 @@ "title": "Logs" }, "error_logs": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Error Logs" }, "grafana_url": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Grafana Url" }, "dashboard_url": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Dashboard Url" }, "pool": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "Pool" }, "app_owner": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "App Owner" }, "app_name": { - "type": "string", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], "title": "App Name" }, "app_version": { - "type": "integer", + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], "title": "App Version" }, "priority": { @@ -12735,23 +15542,7 @@ "more_entries" ], "title": "ListResponse" - }, - "src__utils__connectors__postgres__ConfigHistoryType": { - "enum": [ - "SERVICE", - "WORKFLOW", - "DATASET", - "BACKEND", - "POOL", - "POD_TEMPLATE", - "GROUP_TEMPLATE", - "RESOURCE_VALIDATION", - "BACKEND_TEST", - "ROLE" - ], - "title": "ConfigHistoryType", - "description": "Type of configs supported by config history " } } } -} +} \ No newline at end of file diff --git a/src/ui/playwright.config.ts b/src/ui/playwright.config.ts index e98e132d5..54ea589dd 100644 --- a/src/ui/playwright.config.ts +++ b/src/ui/playwright.config.ts @@ -77,6 +77,11 @@ export default defineConfig({ url: BASE_URL, reuseExistingServer: !process.env.CI, timeout: 60_000, - env: { PORT: String(PORT) }, + env: { + PORT: String(PORT), + // Override .env.local admin roles so E2E tests run as a regular user. + // Tests that need admin can override via Playwright fixtures. + DEV_USER_ROLES: "osmo-user", + }, }, }); diff --git a/src/ui/src/components/event-viewer/event-viewer-container.tsx b/src/ui/src/components/event-viewer/event-viewer-container.tsx index dfd4f9b7b..ae551bb26 100644 --- a/src/ui/src/components/event-viewer/event-viewer-container.tsx +++ b/src/ui/src/components/event-viewer/event-viewer-container.tsx @@ -37,9 +37,9 @@ import "@/components/event-viewer/event-viewer.css"; /** OSMO Postgres timestamps for a single task attempt. */ export interface TaskTiming { /** ISO string from TaskQueryResponse.processing_start_time */ - processingStartTime?: string; + processingStartTime?: string | null; /** ISO string from TaskQueryResponse.end_time — absent for live tasks */ - endTime?: string; + endTime?: string | null; } interface EventViewerContainerProps { diff --git a/src/ui/src/features/workflows/detail/components/panel/ui/task/task-details.tsx b/src/ui/src/features/workflows/detail/components/panel/ui/task/task-details.tsx index 85101c0d7..c7f26177c 100644 --- a/src/ui/src/features/workflows/detail/components/panel/ui/task/task-details.tsx +++ b/src/ui/src/features/workflows/detail/components/panel/ui/task/task-details.tsx @@ -292,7 +292,7 @@ const OverviewTab = memo(function OverviewTab({ ) : null, copyable: true, - copyValue: task.node_name, + copyValue: task.node_name ?? undefined, mono: true, truncate: true, show: !!task.node_name, diff --git a/src/ui/src/lib/api/adapter/datasets.ts b/src/ui/src/lib/api/adapter/datasets.ts index 143558a2e..f05033910 100644 --- a/src/ui/src/lib/api/adapter/datasets.ts +++ b/src/ui/src/lib/api/adapter/datasets.ts @@ -223,7 +223,7 @@ export function transformDatasetListEntry(raw: DataListEntry): Dataset { created_at: raw.create_time, created_by: undefined, // Not available in list view updated_at: raw.last_created || raw.create_time, - size_bytes: ensureNumber(raw.hash_location_size), + size_bytes: ensureNumber(raw.hash_location_size ?? undefined), labels: {}, // Not available in list view }; } @@ -278,9 +278,9 @@ export function transformDatasetDetail(raw: DataInfoResponse): DetailResponse { type: DatasetType.COLLECTION, path: raw.hash_location || "", created_at: raw.created_date || "", - created_by: raw.created_by, + created_by: raw.created_by ?? undefined, updated_at: raw.created_date || "", - size_bytes: ensureNumber(raw.hash_location_size), + size_bytes: ensureNumber(raw.hash_location_size ?? undefined), labels, }, members: collectionEntries.map((e) => ({ @@ -314,9 +314,9 @@ export function transformDatasetDetail(raw: DataInfoResponse): DetailResponse { path: raw.hash_location || "", version: currentVersionNumber, created_at: raw.created_date || "", - created_by: raw.created_by, + created_by: raw.created_by ?? undefined, updated_at: latestVersion?.created_date || raw.created_date || "", - size_bytes: ensureNumber(raw.hash_location_size), + size_bytes: ensureNumber(raw.hash_location_size ?? undefined), labels, }, versions: datasetVersions as DatasetVersion[], diff --git a/src/ui/src/lib/api/generated.ts b/src/ui/src/lib/api/generated.ts index 57bb91a82..70b21d6fe 100644 --- a/src/ui/src/lib/api/generated.ts +++ b/src/ui/src/lib/api/generated.ts @@ -1,5 +1,5 @@ /** - * Generated by orval v8.5.3 🍺 + * Generated by orval v8.4.2 🍺 * Do not edit manually. * FastAPI * OpenAPI spec version: 0.1.0 @@ -75,12 +75,12 @@ export type AuthenticationConfigKeys = {[key: string]: AsymmetricKeyPair}; * Store info needed to login */ export interface LoginInfo { - device_endpoint?: string; - device_client_id?: string; - browser_endpoint?: string; - browser_client_id?: string; - token_endpoint?: string; - logout_endpoint?: string; + device_endpoint?: string | null; + device_client_id?: string | null; + browser_endpoint?: string | null; + browser_client_id?: string | null; + token_endpoint?: string | null; + logout_endpoint?: string | null; } /** @@ -116,7 +116,7 @@ export interface BackendSchedulerSettings { scheduler_timeout?: number; } -export type BackendNodeConditionsRules = {[key: string]: string}; +export type BackendNodeConditionsRules = {[key: string]: string} | null; /** * Settings for backend node conditions. @@ -150,14 +150,14 @@ export interface Backend { * Similar to connectors.Backend, but with optional fields. */ export interface BackendConfig { - description?: string; - k8s_uid?: string; - dashboard_url?: string; - grafana_url?: string; - tests?: string[]; - scheduler_settings?: BackendSchedulerSettings; - node_conditions?: BackendNodeConditions; - router_address?: string; + description?: string | null; + k8s_uid?: string | null; + dashboard_url?: string | null; + grafana_url?: string | null; + tests?: string[] | null; + scheduler_settings?: BackendSchedulerSettings | null; + node_conditions?: BackendNodeConditions | null; + router_address?: string | null; } /** @@ -221,9 +221,9 @@ export interface StaticDataCredential { /** The OSMO storage URI for the data service (e.g., s3://bucket) */ endpoint: string; /** The region for the data service */ - region?: string; + region?: string | null; /** HTTP endpoint URL override the storage URI (e.g., http://minio:9000) */ - override_url?: string; + override_url?: string | null; /** The authentication key for a data backend */ access_key_id: string; /** The encrypted authentication secret for a data backend */ @@ -239,7 +239,7 @@ export interface BucketConfig { region?: string; description?: string; mode?: string; - default_credential?: StaticDataCredential; + default_credential?: StaticDataCredential | null; } /** @@ -258,7 +258,7 @@ export type BucketInfoResponseBuckets = {[key: string]: BucketInfoEntry}; * Object storing Upload Response. */ export interface BucketInfoResponse { - default?: string; + default?: string | null; buckets: BucketInfoResponseBuckets; } @@ -290,9 +290,9 @@ export interface CancelResponse { * Config for storing information regarding CLI storage. */ export interface CliConfig { - latest_version?: string; - min_supported_version?: string; - client_install_url?: string; + latest_version?: string | null; + min_supported_version?: string | null; + client_install_url?: string | null; } /** @@ -332,7 +332,7 @@ export interface ConfigHistory { username: string; created_at: string; description: string; - tags?: string[]; + tags?: string[] | null; data?: unknown; } @@ -340,8 +340,8 @@ export interface ConfigHistory { * Request body for updating configurations with history tracking metadata. */ export interface ConfigsRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; } /** @@ -349,7 +349,7 @@ export interface ConfigsRequest { */ export interface CreateUserRequest { id: string; - roles?: string[]; + roles?: string[] | null; } /** @@ -360,7 +360,7 @@ export interface CredentialConfig { disable_data_validation?: string[]; } -export type CredentialGetResponseCredentialsItem = {[key: string]: string}; +export type CredentialGetResponseCredentialsItem = {[key: string]: string | null}; /** * Credential Response. @@ -388,9 +388,9 @@ export interface UserDataCredential { /** The OSMO storage URI for the data service (e.g., s3://bucket) */ endpoint: string; /** The region for the data service */ - region?: string; + region?: string | null; /** HTTP endpoint URL override the storage URI (e.g., http://minio:9000) */ - override_url?: string; + override_url?: string | null; /** The authentication key for a data backend */ access_key_id: string; /** The authentication secret for a data backend */ @@ -415,11 +415,11 @@ export interface UserCredential { */ export interface CredentialOptions { /** Authentication information for a Docker registry */ - registry_credential?: UserRegistryCredential; + registry_credential?: UserRegistryCredential | null; /** Authentication information for a data service */ - data_credential?: UserDataCredential; + data_credential?: UserDataCredential | null; /** Generic authentication information */ - generic_credential?: UserCredential; + generic_credential?: UserCredential | null; } /** @@ -443,9 +443,9 @@ export interface DataMetadataResponse { * Object storing Tag/Label/Metadata Response. */ export interface DataAttributeResponse { - tag_response?: DataTagResponse; - label_response?: DataMetadataResponse; - metadata_response?: DataMetadataResponse; + tag_response?: DataTagResponse | null; + label_response?: DataMetadataResponse | null; + metadata_response?: DataMetadataResponse | null; } /** @@ -462,7 +462,7 @@ export const DownloadType = { * Config for storing information about data. */ export interface DataConfig { - credential?: StaticDataCredential; + credential?: StaticDataCredential | null; base_url?: string; websocket_timeout?: number; data_timeout?: number; @@ -486,7 +486,7 @@ export interface DataInfoCollectionEntry { version: string; location: string; uri: string; - hash_location?: string; + hash_location?: string | null; size: number; } @@ -526,9 +526,6 @@ export interface DataInfoDatasetEntry { export type DataInfoResponseLabels = { [key: string]: unknown }; -/** - * An enumeration. - */ export type DatasetType = typeof DatasetType[keyof typeof DatasetType]; @@ -544,10 +541,10 @@ export interface DataInfoResponse { name: string; id: string; bucket: string; - created_by?: string; - created_date?: string; - hash_location?: string; - hash_location_size?: number; + created_by?: string | null; + created_date?: string | null; + hash_location?: string | null; + hash_location_size?: number | null; labels: DataInfoResponseLabels; type: DatasetType; versions: (DataInfoDatasetEntry | DataInfoCollectionEntry)[]; @@ -561,10 +558,10 @@ export interface DataListEntry { id: string; bucket: string; create_time: string; - last_created?: string; - hash_location?: string; - hash_location_size?: number; - version_id?: string; + last_created?: string | null; + hash_location?: string | null; + hash_location_size?: number | null; + version_id?: string | null; type: DatasetType; } @@ -575,9 +572,6 @@ export interface DataListResponse { datasets: DataListEntry[]; } -/** - * An enumeration. - */ export type DatasetQueryType = typeof DatasetQueryType[keyof typeof DatasetQueryType]; @@ -594,13 +588,23 @@ export interface DataQueryResponse { datasets: (DataInfoResponse | DataInfoDatasetEntry)[]; } -export type DatasetConfigBuckets = {[key: string]: BucketConfig}; +export type DatasetConfigInputBuckets = {[key: string]: BucketConfig}; /** * Stores any dataset configs External Admins control */ -export interface DatasetConfig { - buckets?: DatasetConfigBuckets; +export interface DatasetConfigInput { + buckets?: DatasetConfigInputBuckets; + default_bucket?: string; +} + +export type DatasetConfigOutputBuckets = {[key: string]: BucketConfig}; + +/** + * Stores any dataset configs External Admins control + */ +export interface DatasetConfigOutput { + buckets?: DatasetConfigOutputBuckets; default_bucket?: string; } @@ -608,8 +612,8 @@ export interface DatasetConfig { * Request body for deleting a backend with history tracking metadata. */ export interface DeleteBackendRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; force?: boolean; } @@ -647,7 +651,7 @@ export interface PoolResourceCountable { * Resources allocated to the pool, for schedulers that support this feature */ export interface PoolResources { - gpu?: PoolResourceCountable; + gpu?: PoolResourceCountable | null; } /** @@ -679,17 +683,14 @@ export type PoolEditableCommonDefaultVariables = { [key: string]: unknown }; export type PoolEditablePlatforms = {[key: string]: PlatformEditable}; -/** - * Pool schema to expose through API endpoint. - */ export interface PoolEditable { name?: string; description?: string; - status?: PoolStatus; - download_type?: DownloadType; + status?: PoolStatus | null; + download_type?: DownloadType | null; enable_maintenance?: boolean; backend: string; - default_platform?: string; + default_platform?: string | null; default_exec_timeout?: string; default_queue_timeout?: string; max_exec_timeout?: string; @@ -772,24 +773,24 @@ export interface TaskQueryResponse { name: string; retry_id: number; status: TaskGroupStatus; - failure_message?: string; - exit_code?: number; + failure_message?: string | null; + exit_code?: number | null; logs: string; - error_logs?: string; - processing_start_time?: string; - scheduling_start_time?: string; - initializing_start_time?: string; + error_logs?: string | null; + processing_start_time?: string | null; + scheduling_start_time?: string | null; + initializing_start_time?: string | null; events: string; - start_time?: string; - end_time?: string; - input_download_start_time?: string; - input_download_end_time?: string; - output_upload_start_time?: string; - dashboard_url?: string; + start_time?: string | null; + end_time?: string | null; + input_download_start_time?: string | null; + input_download_end_time?: string | null; + output_upload_start_time?: string | null; + dashboard_url?: string | null; pod_name: string; - pod_ip?: string; + pod_ip?: string | null; task_uuid: string; - node_name?: string; + node_name?: string | null; lead?: boolean; } @@ -799,14 +800,14 @@ export interface TaskQueryResponse { export interface GroupQueryResponse { name: string; status: TaskGroupStatus; - start_time?: string; - end_time?: string; - processing_start_time?: string; - scheduling_start_time?: string; - initializing_start_time?: string; - remaining_upstream_groups?: string[]; - downstream_groups?: string[]; - failure_message?: string; + start_time?: string | null; + end_time?: string | null; + processing_start_time?: string | null; + scheduling_start_time?: string | null; + initializing_start_time?: string | null; + remaining_upstream_groups?: string[] | null; + downstream_groups?: string[] | null; + failure_message?: string | null; tasks?: TaskQueryResponse[]; } @@ -824,9 +825,9 @@ export interface HTTPValidationError { * Response for JWT token creation endpoints. */ export interface JwtTokenResponse { - token?: string; - expires_at?: number; - error?: string; + token?: string | null; + expires_at?: number | null; + error?: string | null; } /** @@ -852,7 +853,7 @@ export const ListOrder = { */ export interface ListTaskAggregatedEntry { user: string; - pool?: string; + pool?: string | null; storage: number; cpu: number; memory: number; @@ -874,17 +875,17 @@ export interface ListTaskEntry { workflow_uuid: string; task_name: string; retry_id: number; - pool?: string; - node?: string; - start_time?: string; - end_time?: string; - duration?: number; + pool?: string | null; + node?: string | null; + start_time?: string | null; + end_time?: string | null; + duration?: number | null; status: TaskGroupStatus; overview: string; logs: string; - error_logs?: string; - grafana_url?: string; - dashboard_url?: string; + error_logs?: string | null; + grafana_url?: string | null; + dashboard_url?: string | null; storage: number; cpu: number; memory: number; @@ -901,7 +902,7 @@ export interface ListTaskResponse { */ export interface ListTaskSummaryEntry { user: string; - pool?: string; + pool?: string | null; storage: number; cpu: number; memory: number; @@ -917,7 +918,7 @@ export interface ListTaskSummaryResponse { * Config for storing information about data. */ export interface LogConfig { - credential?: StaticDataCredential; + credential?: StaticDataCredential | null; } /** @@ -935,17 +936,14 @@ export type PoolMinimalDefaultExitActions = {[key: string]: string}; export type PoolMinimalPlatforms = {[key: string]: PlatformMinimal}; -/** - * Pool schema to expose through API endpoint. - */ export interface PoolMinimal { name?: string; description?: string; - status?: PoolStatus; - download_type?: DownloadType; + status?: PoolStatus | null; + download_type?: DownloadType | null; enable_maintenance?: boolean; backend: string; - default_platform?: string; + default_platform?: string | null; default_exec_timeout?: string; default_queue_timeout?: string; max_exec_timeout?: string; @@ -976,9 +974,6 @@ export interface NotificationConfig { smtp_settings?: SMTPConfig; } -/** - * An enumeration. - */ export type OperatorType = typeof OperatorType[keyof typeof OperatorType]; @@ -1018,8 +1013,8 @@ export type PatchBackendTestRequestConfigsDict = { [key: string]: unknown }; * Request body for patching a test with history tracking metadata. */ export interface PatchBackendTestRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs_dict: PatchBackendTestRequestConfigsDict; } @@ -1029,8 +1024,8 @@ export type PatchConfigRequestConfigsDict = { [key: string]: unknown }; * Request body for patching configurations with history tracking metadata. */ export interface PatchConfigRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs_dict: PatchConfigRequestConfigsDict; } @@ -1040,8 +1035,8 @@ export type PatchDatasetRequestConfigsDict = { [key: string]: unknown }; * Request body for patching a dataset bucket configuration with history tracking metadata. */ export interface PatchDatasetRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs_dict: PatchDatasetRequestConfigsDict; } @@ -1051,16 +1046,16 @@ export type PatchPoolRequestConfigsDict = { [key: string]: unknown }; * Request body for patching a pool with history tracking metadata. */ export interface PatchPoolRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs_dict: PatchPoolRequestConfigsDict; } -export type PlatformLabels = {[key: string]: string}; +export type PlatformInputLabels = {[key: string]: string}; -export type PlatformDefaultVariables = { [key: string]: unknown }; +export type PlatformInputDefaultVariables = { [key: string]: unknown }; -export type PlatformParsedPodTemplate = { [key: string]: unknown }; +export type PlatformInputParsedPodTemplate = { [key: string]: unknown }; /** * Single Toleration Entry @@ -1068,8 +1063,8 @@ export type PlatformParsedPodTemplate = { [key: string]: unknown }; export interface Toleration { key: string; operator?: string; - value?: string; - effect?: string; + value?: string | null; + effect?: string | null; } /** @@ -1085,19 +1080,43 @@ export interface ResourceAssertion { /** * Single Platform Entry */ -export interface Platform { +export interface PlatformInput { + description?: string; + host_network_allowed?: boolean; + privileged_allowed?: boolean; + allowed_mounts?: string[]; + default_mounts?: string[]; + tolerations?: Toleration[]; + labels?: PlatformInputLabels; + default_variables?: PlatformInputDefaultVariables; + resource_validations?: string[]; + parsed_resource_validations?: ResourceAssertion[]; + override_pod_template?: string[]; + parsed_pod_template?: PlatformInputParsedPodTemplate; +} + +export type PlatformOutputLabels = {[key: string]: string}; + +export type PlatformOutputDefaultVariables = { [key: string]: unknown }; + +export type PlatformOutputParsedPodTemplate = { [key: string]: unknown }; + +/** + * Single Platform Entry + */ +export interface PlatformOutput { description?: string; host_network_allowed?: boolean; privileged_allowed?: boolean; allowed_mounts?: string[]; default_mounts?: string[]; tolerations?: Toleration[]; - labels?: PlatformLabels; - default_variables?: PlatformDefaultVariables; + labels?: PlatformOutputLabels; + default_variables?: PlatformOutputDefaultVariables; resource_validations?: string[]; parsed_resource_validations?: ResourceAssertion[]; override_pod_template?: string[]; - parsed_pod_template?: PlatformParsedPodTemplate; + parsed_pod_template?: PlatformOutputParsedPodTemplate; } /** @@ -1152,7 +1171,14 @@ export interface RsyncConfig { /** * Stores any plugins configs */ -export interface PluginsConfig { +export interface PluginsConfigInput { + rsync?: RsyncConfig; +} + +/** + * Stores any plugins configs + */ +export interface PluginsConfigOutput { rsync?: RsyncConfig; } @@ -1167,43 +1193,82 @@ export const PolicyEffect = { Deny: 'Deny', } as const; -export type PoolDefaultExitActions = {[key: string]: string}; +export type PoolInputDefaultExitActions = {[key: string]: string}; -export type PoolCommonDefaultVariables = { [key: string]: unknown }; +export type PoolInputCommonDefaultVariables = { [key: string]: unknown }; -export type PoolParsedPodTemplate = { [key: string]: unknown }; +export type PoolInputParsedPodTemplate = { [key: string]: unknown }; -export type PoolParsedGroupTemplatesItem = { [key: string]: unknown }; +export type PoolInputParsedGroupTemplatesItem = { [key: string]: unknown }; -export type PoolPlatforms = {[key: string]: Platform}; +export type PoolInputPlatforms = {[key: string]: PlatformInput}; /** * Single Pool Entry */ -export interface Pool { +export interface PoolInput { name?: string; description?: string; - status?: PoolStatus; - download_type?: DownloadType; + status?: PoolStatus | null; + download_type?: DownloadType | null; + enable_maintenance?: boolean; + backend: string; + default_platform?: string | null; + default_exec_timeout?: string; + default_queue_timeout?: string; + max_exec_timeout?: string; + max_queue_timeout?: string; + default_exit_actions?: PoolInputDefaultExitActions; + resources?: PoolResources; + topology_keys?: TopologyKey[]; + common_default_variables?: PoolInputCommonDefaultVariables; + common_resource_validations?: string[]; + parsed_resource_validations?: ResourceAssertion[]; + common_pod_template?: string[]; + parsed_pod_template?: PoolInputParsedPodTemplate; + common_group_templates?: string[]; + parsed_group_templates?: PoolInputParsedGroupTemplatesItem[]; + platforms?: PoolInputPlatforms; + last_heartbeat?: string | null; +} + +export type PoolOutputDefaultExitActions = {[key: string]: string}; + +export type PoolOutputCommonDefaultVariables = { [key: string]: unknown }; + +export type PoolOutputParsedPodTemplate = { [key: string]: unknown }; + +export type PoolOutputParsedGroupTemplatesItem = { [key: string]: unknown }; + +export type PoolOutputPlatforms = {[key: string]: PlatformOutput}; + +/** + * Single Pool Entry + */ +export interface PoolOutput { + name?: string; + description?: string; + status?: PoolStatus | null; + download_type?: DownloadType | null; enable_maintenance?: boolean; backend: string; - default_platform?: string; + default_platform?: string | null; default_exec_timeout?: string; default_queue_timeout?: string; max_exec_timeout?: string; max_queue_timeout?: string; - default_exit_actions?: PoolDefaultExitActions; + default_exit_actions?: PoolOutputDefaultExitActions; resources?: PoolResources; topology_keys?: TopologyKey[]; - common_default_variables?: PoolCommonDefaultVariables; + common_default_variables?: PoolOutputCommonDefaultVariables; common_resource_validations?: string[]; parsed_resource_validations?: ResourceAssertion[]; common_pod_template?: string[]; - parsed_pod_template?: PoolParsedPodTemplate; + parsed_pod_template?: PoolOutputParsedPodTemplate; common_group_templates?: string[]; - parsed_group_templates?: PoolParsedGroupTemplatesItem[]; - platforms?: PoolPlatforms; - last_heartbeat?: string; + parsed_group_templates?: PoolOutputParsedGroupTemplatesItem[]; + platforms?: PoolOutputPlatforms; + last_heartbeat?: string | null; } /** @@ -1228,11 +1293,11 @@ export type PoolResourceUsagePlatforms = {[key: string]: PlatformMinimal}; export interface PoolResourceUsage { name?: string; description?: string; - status?: PoolStatus; - download_type?: DownloadType; + status?: PoolStatus | null; + download_type?: DownloadType | null; enable_maintenance?: boolean; backend: string; - default_platform?: string; + default_platform?: string | null; default_exec_timeout?: string; default_queue_timeout?: string; max_exec_timeout?: string; @@ -1286,8 +1351,8 @@ export interface PoolResponse { * Request body for creating a new backend with history tracking metadata. */ export interface PostBackendRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs: BackendConfig; } @@ -1295,11 +1360,11 @@ export interface PostBackendRequest { * Provides all User Profile Information */ export interface UserProfile { - username?: string; - email_notification?: boolean; - slack_notification?: boolean; - bucket?: string; - pool?: string; + username?: string | null; + email_notification?: boolean | null; + slack_notification?: boolean | null; + bucket?: string | null; + pool?: string | null; } /** @@ -1307,7 +1372,7 @@ export interface UserProfile { */ export interface TokenIdentity { name: string; - expires_at?: string; + expires_at?: string | null; } /** @@ -1318,15 +1383,15 @@ export interface ProfileResponse { profile: UserProfile; roles: string[]; pools: string[]; - token?: TokenIdentity; + token?: TokenIdentity | null; } /** * Request body for updating a test with history tracking metadata. */ export interface PutBackendTestRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs: BackendTests; } @@ -1336,8 +1401,8 @@ export type PutBackendTestsRequestConfigs = {[key: string]: BackendTests}; * Request body for updating a test with history tracking metadata. */ export interface PutBackendTestsRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs: PutBackendTestsRequestConfigs; } @@ -1345,9 +1410,9 @@ export interface PutBackendTestsRequest { * Request body for updating dataset configurations with history tracking metadata. */ export interface PutDatasetRequest { - description?: string; - tags?: string[]; - configs: DatasetConfig; + description?: string | null; + tags?: string[] | null; + configs: DatasetConfigInput; } export type PutGroupTemplateRequestConfigs = { [key: string]: unknown }; @@ -1356,8 +1421,8 @@ export type PutGroupTemplateRequestConfigs = { [key: string]: unknown }; * Request body for updating a group template with history tracking metadata. */ export interface PutGroupTemplateRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs: PutGroupTemplateRequestConfigs; } @@ -1367,8 +1432,8 @@ export type PutGroupTemplatesRequestConfigs = {[key: string]: { [key: string]: u * Request body for updating group templates with history tracking metadata. */ export interface PutGroupTemplatesRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs: PutGroupTemplatesRequestConfigs; } @@ -1378,8 +1443,8 @@ export type PutPodTemplateRequestConfigs = { [key: string]: unknown }; * Request body for updating a pod template with history tracking metadata. */ export interface PutPodTemplateRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs: PutPodTemplateRequestConfigs; } @@ -1389,8 +1454,8 @@ export type PutPodTemplatesRequestConfigs = {[key: string]: { [key: string]: unk * Request body for updating pod templates with history tracking metadata. */ export interface PutPodTemplatesRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs: PutPodTemplatesRequestConfigs; } @@ -1398,28 +1463,28 @@ export interface PutPodTemplatesRequest { * Request body for updating a platform in a pool with history tracking metadata. */ export interface PutPoolPlatformRequest { - description?: string; - tags?: string[]; - configs: Platform; + description?: string | null; + tags?: string[] | null; + configs: PlatformInput; } /** * Request body for updating a pool with history tracking metadata. */ export interface PutPoolRequest { - description?: string; - tags?: string[]; - configs: Pool; + description?: string | null; + tags?: string[] | null; + configs: PoolInput; } -export type PutPoolsRequestConfigs = {[key: string]: Pool}; +export type PutPoolsRequestConfigs = {[key: string]: PoolInput}; /** * Request body for updating pools with history tracking metadata. */ export interface PutPoolsRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs: PutPoolsRequestConfigs; } @@ -1429,8 +1494,8 @@ export type PutResourceValidationRequestConfigsItem = { [key: string]: unknown } * Request body for updating a resource validation with history tracking metadata. */ export interface PutResourceValidationRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs: PutResourceValidationRequestConfigsItem[]; } @@ -1442,8 +1507,8 @@ export type PutResourceValidationsRequestConfigsDict = {[key: string]: PutResour * Request body for updating resource validations with history tracking metadata. */ export interface PutResourceValidationsRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs_dict: PutResourceValidationsRequestConfigsDict; } @@ -1485,37 +1550,37 @@ export const SyncMode = { Note: Authorization checking is now handled by the authz_sidecar (Go service). This Python class is only used for role CRUD operations. */ -export interface Role { +export interface RoleInput { name: string; description: string; policies: RolePolicy[]; immutable?: boolean; sync_mode?: SyncMode; - external_roles?: string[]; + external_roles?: string[] | null; } /** * Request body for updating a role with history tracking metadata. */ export interface PutRoleRequest { - description?: string; - tags?: string[]; - configs: Role; + description?: string | null; + tags?: string[] | null; + configs: RoleInput; } /** * Request body for updating a test with history tracking metadata. */ export interface PutRolesRequest { - description?: string; - tags?: string[]; - configs: Role[]; + description?: string | null; + tags?: string[] | null; + configs: RoleInput[]; } /** * Stores any configs OSMO Admins control */ -export interface ServiceConfig { +export interface ServiceConfigInput { service_base_url?: string; service_auth?: AuthenticationConfig; cli_config?: CliConfig; @@ -1527,9 +1592,9 @@ export interface ServiceConfig { * Request body for updating service configurations with history tracking metadata. */ export interface PutServiceRequest { - description?: string; - tags?: string[]; - configs: ServiceConfig; + description?: string | null; + tags?: string[] | null; + configs: ServiceConfigInput; } /** @@ -1545,10 +1610,8 @@ export interface WorkflowInfo { If a limit is set, it must be greater than 0. */ export interface UserWorkflowLimitConfig { - /** @exclusiveMinimum 0 */ - max_num_workflows?: number; - /** @exclusiveMinimum 0 */ - max_num_tasks?: number; + max_num_workflows?: number | null; + max_num_tasks?: number | null; jinja_sandbox_workers?: number; jinja_sandbox_max_time?: number; jinja_sandbox_memory_limit?: number; @@ -1557,7 +1620,7 @@ export interface UserWorkflowLimitConfig { /** * Stores any workflow configs External Admins control */ -export interface WorkflowConfig { +export interface WorkflowConfigInput { workflow_data?: DataConfig; workflow_log?: LogConfig; workflow_app?: LogConfig; @@ -1566,7 +1629,7 @@ export interface WorkflowConfig { workflow_alerts?: NotificationConfig; credential_config?: CredentialConfig; user_workflow_limits?: UserWorkflowLimitConfig; - plugins_config?: PluginsConfig; + plugins_config?: PluginsConfigInput; max_num_tasks?: number; max_num_ports_per_task?: number; max_retry_per_task?: number; @@ -1588,17 +1651,17 @@ export interface WorkflowConfig { * Request body for updating workflow configurations with history tracking metadata. */ export interface PutWorkflowRequest { - description?: string; - tags?: string[]; - configs: WorkflowConfig; + description?: string | null; + tags?: string[] | null; + configs: WorkflowConfigInput; } /** * Request body for renaming a platform in a pool with history tracking metadata. */ export interface RenamePoolPlatformRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; new_name: string; } @@ -1606,8 +1669,8 @@ export interface RenamePoolPlatformRequest { * Request body for renaming a pool with history tracking metadata. */ export interface RenamePoolRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; new_name: string; } @@ -1621,15 +1684,15 @@ export type ResourcesEntryNonWorkflowUsageFields = { [key: string]: unknown }; export type ResourcesEntryAllocatableFields = { [key: string]: unknown }; -export type ResourcesEntryPlatformAllocatableFields = { [key: string]: unknown }; +export type ResourcesEntryPlatformAllocatableFields = { [key: string]: unknown } | null; -export type ResourcesEntryPlatformAvailableFields = { [key: string]: unknown }; +export type ResourcesEntryPlatformAvailableFields = { [key: string]: unknown } | null; -export type ResourcesEntryPlatformWorkflowAllocatableFields = { [key: string]: unknown }; +export type ResourcesEntryPlatformWorkflowAllocatableFields = { [key: string]: unknown } | null; -export type ResourcesEntryConfigFields = { [key: string]: unknown }; +export type ResourcesEntryConfigFields = { [key: string]: unknown } | null; -export type ResourcesEntryLabelFields = { [key: string]: unknown }; +export type ResourcesEntryLabelFields = { [key: string]: unknown } | null; export type ResourcesEntryPoolPlatformLabels = {[key: string]: string[]}; @@ -1641,7 +1704,7 @@ export interface ResourcesEntry { exposed_fields: ResourcesEntryExposedFields; taints: ResourcesEntryTaintsItem[]; usage_fields: ResourcesEntryUsageFields; - conditions?: string[]; + conditions?: string[] | null; non_workflow_usage_fields: ResourcesEntryNonWorkflowUsageFields; allocatable_fields: ResourcesEntryAllocatableFields; platform_allocatable_fields?: ResourcesEntryPlatformAllocatableFields; @@ -1661,6 +1724,21 @@ export interface ResourcesResponse { resources: ResourcesEntry[]; } +/** + * Single Role Entry. + +Note: Authorization checking is now handled by the authz_sidecar (Go service). +This Python class is only used for role CRUD operations. + */ +export interface RoleOutput { + name: string; + description: string; + policies: RolePolicy[]; + immutable?: boolean; + sync_mode?: SyncMode; + external_roles?: string[] | null; +} + export type RoleUsersResponseUsersItem = { [key: string]: unknown }; /** @@ -1671,32 +1749,13 @@ export interface RoleUsersResponse { users: RoleUsersResponseUsersItem[]; } -/** - * Type of configs supported by config history - */ -export type SrcUtilsConnectorsPostgresConfigHistoryType = typeof SrcUtilsConnectorsPostgresConfigHistoryType[keyof typeof SrcUtilsConnectorsPostgresConfigHistoryType]; - - -export const SrcUtilsConnectorsPostgresConfigHistoryType = { - SERVICE: 'SERVICE', - WORKFLOW: 'WORKFLOW', - DATASET: 'DATASET', - BACKEND: 'BACKEND', - POOL: 'POOL', - POD_TEMPLATE: 'POD_TEMPLATE', - GROUP_TEMPLATE: 'GROUP_TEMPLATE', - RESOURCE_VALIDATION: 'RESOURCE_VALIDATION', - BACKEND_TEST: 'BACKEND_TEST', - ROLE: 'ROLE', -} as const; - /** * Request body for config rollback endpoint. */ export interface RollbackConfigRequest { - description?: string; - tags?: string[]; - config_type: SrcUtilsConnectorsPostgresConfigHistoryType; + description?: string | null; + tags?: string[] | null; + config_type: SrcLibUtilsConfigHistoryConfigHistoryType; /** * Revision to roll back to * @exclusiveMinimum 0 @@ -1713,15 +1772,26 @@ export interface RouterResponse { cookie: string; } +/** + * Stores any configs OSMO Admins control + */ +export interface ServiceConfigOutput { + service_base_url?: string; + service_auth?: AuthenticationConfig; + cli_config?: CliConfig; + max_pod_restart_limit?: string; + agent_queue_size?: number; +} + /** * Object storing workflow name, logs, and spec after submission. */ export interface SubmitResponse { name: string; - overview?: string; - logs?: string; - spec?: string; - dashboard_url?: string; + overview?: string | null; + logs?: string | null; + spec?: string | null; + dashboard_url?: string | null; } /** @@ -1730,9 +1800,9 @@ export interface SubmitResponse { export interface TaskEntry { workflow_id: string; task_name: string; - node?: string; - start_time?: string; - end_time?: string; + node?: string | null; + start_time?: string | null; + end_time?: string | null; status: TaskGroupStatus; storage: number; cpu: number; @@ -1747,7 +1817,7 @@ export interface TemplateSpec { file: string; set_variables?: string[]; set_string_variables?: string[]; - uploaded_templated_spec?: string; + uploaded_templated_spec?: string | null; } /** @@ -1762,9 +1832,9 @@ export interface TokenRequest { */ export interface UpdateConfigTagsRequest { /** Tags to add to the config */ - set_tags?: string[]; + set_tags?: string[] | null; /** Tags to remove from the config */ - delete_tags?: string[]; + delete_tags?: string[] | null; } /** @@ -1772,8 +1842,8 @@ export interface UpdateConfigTagsRequest { */ export interface User { id: string; - created_at?: string; - created_by?: string; + created_at?: string | null; + created_by?: string | null; } /** @@ -1818,12 +1888,12 @@ export interface UserRolesResponse { */ export interface UserWithRoles { id: string; - created_at?: string; - created_by?: string; + created_at?: string | null; + created_by?: string | null; roles?: UserRole[]; } -export type VerbosePoolConfigPools = {[key: string]: Pool}; +export type VerbosePoolConfigPools = {[key: string]: PoolOutput}; /** * Stores verbose pool configs. @@ -1842,6 +1912,36 @@ export interface Version { hash?: string; } +/** + * Stores any workflow configs External Admins control + */ +export interface WorkflowConfigOutput { + workflow_data?: DataConfig; + workflow_log?: LogConfig; + workflow_app?: LogConfig; + workflow_info?: WorkflowInfo; + backend_images?: OsmoImageConfig; + workflow_alerts?: NotificationConfig; + credential_config?: CredentialConfig; + user_workflow_limits?: UserWorkflowLimitConfig; + plugins_config?: PluginsConfigOutput; + max_num_tasks?: number; + max_num_ports_per_task?: number; + max_retry_per_task?: number; + max_retry_per_job?: number; + default_schedule_timeout?: number; + default_exec_timeout?: string; + default_queue_timeout?: string; + max_exec_timeout?: string; + max_queue_timeout?: string; + force_cleanup_delay?: string; + max_log_lines?: number; + max_task_log_lines?: number; + max_error_log_lines?: number; + max_event_log_lines?: number; + task_heartbeat_frequency?: string; +} + /** * Represents the state of plugins in a workflow upon submission. */ @@ -1893,32 +1993,32 @@ export interface WorkflowQueryResponse { name: string; uuid: string; submitted_by: string; - cancelled_by?: string; + cancelled_by?: string | null; spec: string; template_spec: string; logs: string; events: string; overview: string; - parent_name?: string; - parent_job_id?: number; - dashboard_url?: string; - grafana_url?: string; + parent_name?: string | null; + parent_job_id?: number | null; + dashboard_url?: string | null; + grafana_url?: string | null; tags?: string[]; submit_time: string; - start_time?: string; - end_time?: string; - exec_timeout?: number; - queue_timeout?: number; - duration?: number; + start_time?: string | null; + end_time?: string | null; + exec_timeout?: number | null; + queue_timeout?: number | null; + duration?: number | null; queued_time: number; status: WorkflowStatus; outputs?: string; groups: GroupQueryResponse[]; - pool?: string; - backend?: string; - app_owner?: string; - app_name?: string; - app_version?: number; + pool?: string | null; + backend?: string | null; + app_owner?: string | null; + app_name?: string | null; + app_version?: number | null; plugins: WorkflowPlugins; priority: string; } @@ -1929,7 +2029,7 @@ export interface SrcServiceCoreAppObjectsListEntry { description: string; created_date: string; owner: string; - latest_version: string; + latest_version: number; } export interface SrcServiceCoreAppObjectsListResponse { @@ -1945,20 +2045,20 @@ export interface SrcServiceCoreWorkflowObjectsListEntry { name: string; workflow_uuid: string; submit_time: string; - start_time?: string; - end_time?: string; + start_time?: string | null; + end_time?: string | null; queued_time: number; - duration?: number; + duration?: number | null; status: WorkflowStatus; overview: string; logs: string; - error_logs?: string; - grafana_url?: string; - dashboard_url?: string; - pool?: string; - app_owner?: string; - app_name?: string; - app_version?: number; + error_logs?: string | null; + grafana_url?: string | null; + dashboard_url?: string | null; + pool?: string | null; + app_owner?: string | null; + app_name?: string | null; + app_version?: number | null; priority: string; } @@ -1983,7 +2083,7 @@ export type PatchDatasetApiConfigsDatasetNamePatch200 = { [key: string]: unknown export type ListPoolsApiConfigsPoolGetParams = { verbose?: boolean; -backend?: string; +backend?: string | null; }; export type ReadPoolApiConfigsPoolNameGetParams = { @@ -1994,7 +2094,7 @@ export type ListPlatformsInPoolApiConfigsPoolNamePlatformGetParams = { verbose?: boolean; }; -export type ListPlatformsInPoolApiConfigsPoolNamePlatformGet200 = {[key: string]: PlatformMinimal | PlatformEditable | Platform}; +export type ListPlatformsInPoolApiConfigsPoolNamePlatformGet200 = {[key: string]: PlatformMinimal | PlatformEditable | PlatformOutput}; export type ReadPlatformInPoolApiConfigsPoolNamePlatformPlatformNameGetParams = { verbose?: boolean; @@ -2015,15 +2115,12 @@ export type ListBackendTestsApiConfigsBackendTestGet200 = {[key: string]: Backen export type GetConfigsHistoryApiConfigsHistoryGetParams = { /** * Number of records to skip - * @minimum 0 */ -offset?: number; +offset?: number | null; /** * Maximum number of records to return - * @maximum 1000 - * @exclusiveMinimum 0 */ -limit?: number; +limit?: number | null; /** * Sort order by creation time */ @@ -2031,32 +2128,31 @@ order?: ListOrder; /** * Filter by config types */ -config_types?: SrcLibUtilsConfigHistoryConfigHistoryType[]; +config_types?: SrcLibUtilsConfigHistoryConfigHistoryType[] | null; /** * Filter by config name */ -name?: string; +name?: string | null; /** * Filter by revision - * @exclusiveMinimum 0 */ -revision?: number; +revision?: number | null; /** * Filter by tags */ -tags?: string[]; +tags?: string[] | null; /** * Filter by creation time before */ -created_before?: string; +created_before?: string | null; /** * Filter by creation time after */ -created_after?: string; +created_after?: string | null; /** * Get config state at specific timestamp */ -at_timestamp?: string; +at_timestamp?: string | null; /** * Whether to omit data from the response */ @@ -2064,7 +2160,7 @@ omit_data?: boolean; }; export type GetConfigDiffApiConfigsDiffGetParams = { -config_type: SrcUtilsConnectorsPostgresConfigHistoryType; +config_type: SrcLibUtilsConfigHistoryConfigHistoryType; /** * First revision to compare * @exclusiveMinimum 0 @@ -2099,25 +2195,25 @@ access_token: string; export type CreateAccessTokenApiAuthAccessTokenTokenNamePostParams = { expires_at: string; description?: string; -roles?: string[]; +roles?: string[] | null; }; export type AdminCreateAccessTokenApiAuthUserUserIdAccessTokenTokenNamePostParams = { expires_at: string; description?: string; -roles?: string[]; +roles?: string[] | null; }; export type ListUsersApiAuthUserGetParams = { start_index?: number; count?: number; -id_prefix?: string; -roles?: string[]; +id_prefix?: string | null; +roles?: string[] | null; }; export type ListAppsApiAppGetParams = { -name?: string; -users?: string[]; +name?: string | null; +users?: string[] | null; all_users?: boolean; offset?: number; limit?: number; @@ -2125,7 +2221,7 @@ order?: ListOrder; }; export type GetAppApiAppUserNameGetParams = { -version?: number; +version?: number | null; limit?: number; order?: ListOrder; }; @@ -2135,54 +2231,54 @@ description: string; }; export type DeleteAppApiAppUserNameDeleteParams = { -version?: number; +version?: number | null; all_versions?: boolean; }; export type DeleteAppApiAppUserNameDelete200 = {[key: string]: number[]}; export type GetAppContentApiAppUserNameSpecGetParams = { -version?: number; +version?: number | null; }; export type CancelWorkflowApiWorkflowNameCancelPostParams = { -message?: string; +message?: string | null; force?: boolean; }; export type ListWorkflowApiWorkflowGetParams = { -users?: string[]; -name?: string; -statuses?: WorkflowStatus[]; +users?: string[] | null; +name?: string | null; +statuses?: WorkflowStatus[] | null; offset?: number; limit?: number; order?: ListOrder; all_users?: boolean; -pools?: string[]; +pools?: string[] | null; all_pools?: boolean; -submitted_before?: string; -submitted_after?: string; -tags?: string[]; -app?: string; -priority?: WorkflowPriority[]; +submitted_before?: string | null; +submitted_after?: string | null; +tags?: string[] | null; +app?: string | null; +priority?: WorkflowPriority[] | null; }; export type ListTaskApiTaskGetParams = { -workflow_id?: string; -statuses?: TaskGroupStatus[]; -users?: string[]; +workflow_id?: string | null; +statuses?: TaskGroupStatus[] | null; +users?: string[] | null; all_users?: boolean; -pools?: string[]; +pools?: string[] | null; all_pools?: boolean; -nodes?: string[]; -started_after?: string; -started_before?: string; +nodes?: string[] | null; +started_after?: string | null; +started_before?: string | null; offset?: number; limit?: number; order?: ListOrder; summary?: boolean; aggregate_by_workflow?: boolean; -priority?: WorkflowPriority[]; +priority?: WorkflowPriority[] | null; }; export type GetWorkflowApiWorkflowNameGetParams = { @@ -2191,22 +2287,22 @@ verbose?: boolean; }; export type GetWorkflowLogsApiWorkflowNameLogsGetParams = { -last_n_lines?: number; -task_name?: string; -retry_id?: number; -query?: string; +last_n_lines?: number | null; +task_name?: string | null; +retry_id?: number | null; +query?: string | null; }; export type GetWorkflowPodConditionsApiWorkflowNameEventsGetParams = { -task_name?: string; -retry_id?: number; +task_name?: string | null; +retry_id?: number | null; }; export type GetWorkflowErrorLogsApiWorkflowNameErrorLogsGetParams = { -last_n_lines?: number; -task_name?: string; -retry_id?: number; -query?: string; +last_n_lines?: number | null; +task_name?: string | null; +retry_id?: number | null; +query?: string | null; }; export type GetWorkflowSpecApiWorkflowNameSpecGetParams = { @@ -2214,8 +2310,8 @@ use_template?: boolean; }; export type TagWorkflowApiWorkflowNameTagPostParams = { -add?: string[]; -remove?: string[]; +add?: string[] | null; +remove?: string[] | null; }; export type ExecIntoGroupApiWorkflowNameExecGroupGroupNamePostParams = { @@ -2229,7 +2325,7 @@ entry_command: string; }; export type PortForwardTaskApiWorkflowNamePortforwardTaskNamePostParams = { -task_ports?: number[]; +task_ports?: number[] | null; use_udp?: boolean; }; @@ -2238,26 +2334,26 @@ task_port: number; }; export type GetResourcesApiResourcesGetParams = { -pools?: string[]; -platforms?: string[]; +pools?: string[] | null; +platforms?: string[] | null; all_pools?: boolean; concise?: boolean; }; export type GetPoolsApiPoolGetParams = { all_pools?: boolean; -pools?: string[]; +pools?: string[] | null; }; export type GetPoolQuotasApiPoolQuotaGetParams = { all_pools?: boolean; -pools?: string[]; +pools?: string[] | null; }; export type SubmitWorkflowApiPoolPoolNameWorkflowPostParams = { -workflow_id?: string; -app_uuid?: string; -app_version?: number; +workflow_id?: string | null; +app_uuid?: string | null; +app_version?: number | null; dry_run?: boolean; validation_only?: boolean; priority?: WorkflowPriority; @@ -2269,14 +2365,14 @@ default_only?: boolean; }; export type DeleteDatasetApiBucketBucketDatasetNameDeleteParams = { -tag?: string; +tag?: string | null; all_flag?: boolean; finish?: boolean; }; export type ChangeNameTagLabelMetadataApiBucketBucketDatasetNameAttributePostParams = { -tag?: string; -new_name?: string; +tag?: string | null; +new_name?: string | null; set_tag?: string[]; delete_tag?: string[]; delete_label?: string[]; @@ -2284,19 +2380,19 @@ delete_metadata?: string[]; }; export type GetInfoApiBucketBucketDatasetNameInfoGetParams = { -tag?: string; +tag?: string | null; all_flag?: boolean; count?: number; order?: ListOrder; }; export type ListDatasetFromBucketApiBucketListDatasetGetParams = { -name?: string; -user?: string[]; +name?: string | null; +user?: string[] | null; buckets?: string[]; -dataset_type?: DatasetType; -latest_before?: string; -latest_after?: string; +dataset_type?: DatasetType | null; +latest_before?: string | null; +latest_after?: string | null; all_users?: boolean; order?: ListOrder; count?: number; @@ -2330,9 +2426,9 @@ export const getReadServiceConfigsApiConfigsServiceGetUrl = () => { return `/api/configs/service` } -export const readServiceConfigsApiConfigsServiceGet = async ( options?: RequestInit): Promise => { +export const readServiceConfigsApiConfigsServiceGet = async ( options?: RequestInit): Promise => { - return customFetch(getReadServiceConfigsApiConfigsServiceGetUrl(), + return customFetch(getReadServiceConfigsApiConfigsServiceGetUrl(), { ...options, method: 'GET' @@ -2585,9 +2681,9 @@ export const getReadWorkflowConfigsApiConfigsWorkflowGetUrl = () => { return `/api/configs/workflow` } -export const readWorkflowConfigsApiConfigsWorkflowGet = async ( options?: RequestInit): Promise => { +export const readWorkflowConfigsApiConfigsWorkflowGet = async ( options?: RequestInit): Promise => { - return customFetch(getReadWorkflowConfigsApiConfigsWorkflowGetUrl(), + return customFetch(getReadWorkflowConfigsApiConfigsWorkflowGetUrl(), { ...options, method: 'GET' @@ -2840,9 +2936,9 @@ export const getReadDatasetConfigsApiConfigsDatasetGetUrl = () => { return `/api/configs/dataset` } -export const readDatasetConfigsApiConfigsDatasetGet = async ( options?: RequestInit): Promise => { +export const readDatasetConfigsApiConfigsDatasetGet = async ( options?: RequestInit): Promise => { - return customFetch(getReadDatasetConfigsApiConfigsDatasetGetUrl(), + return customFetch(getReadDatasetConfigsApiConfigsDatasetGetUrl(), { ...options, method: 'GET' @@ -3084,10 +3180,10 @@ export const usePatchDatasetConfigsApiConfigsDatasetPatch = { +export const getPatchDatasetApiConfigsDatasetNamePatchUrl = (name: string,) => { @@ -3095,27 +3191,27 @@ export const getDeleteDatasetApiConfigsDatasetNameDeleteUrl = (name: string,) => return `/api/configs/dataset/${name}` } -export const deleteDatasetApiConfigsDatasetNameDelete = async (name: string, - configsRequest: ConfigsRequest, options?: RequestInit): Promise => { +export const patchDatasetApiConfigsDatasetNamePatch = async (name: string, + patchDatasetRequest: PatchDatasetRequest, options?: RequestInit): Promise => { - return customFetch(getDeleteDatasetApiConfigsDatasetNameDeleteUrl(name), + return customFetch(getPatchDatasetApiConfigsDatasetNamePatchUrl(name), { ...options, - method: 'DELETE', + method: 'PATCH', headers: { 'Content-Type': 'application/json', ...options?.headers }, body: JSON.stringify( - configsRequest,) + patchDatasetRequest,) } );} -export const getDeleteDatasetApiConfigsDatasetNameDeleteMutationOptions = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: ConfigsRequest}, TContext>, request?: SecondParameter} -): UseMutationOptions>, TError,{name: string;data: ConfigsRequest}, TContext> => { +export const getPatchDatasetApiConfigsDatasetNamePatchMutationOptions = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: PatchDatasetRequest}, TContext>, request?: SecondParameter} +): UseMutationOptions>, TError,{name: string;data: PatchDatasetRequest}, TContext> => { -const mutationKey = ['deleteDatasetApiConfigsDatasetNameDelete']; +const mutationKey = ['patchDatasetApiConfigsDatasetNamePatch']; const {mutation: mutationOptions, request: requestOptions} = options ? options.mutation && 'mutationKey' in options.mutation && options.mutation.mutationKey ? options @@ -3125,10 +3221,10 @@ const {mutation: mutationOptions, request: requestOptions} = options ? - const mutationFn: MutationFunction>, {name: string;data: ConfigsRequest}> = (props) => { + const mutationFn: MutationFunction>, {name: string;data: PatchDatasetRequest}> = (props) => { const {name,data} = props ?? {}; - return deleteDatasetApiConfigsDatasetNameDelete(name,data,requestOptions) + return patchDatasetApiConfigsDatasetNamePatch(name,data,requestOptions) } @@ -3138,29 +3234,29 @@ const {mutation: mutationOptions, request: requestOptions} = options ? return { mutationFn, ...mutationOptions }} - export type DeleteDatasetApiConfigsDatasetNameDeleteMutationResult = NonNullable>> - export type DeleteDatasetApiConfigsDatasetNameDeleteMutationBody = ConfigsRequest - export type DeleteDatasetApiConfigsDatasetNameDeleteMutationError = HTTPValidationError + export type PatchDatasetApiConfigsDatasetNamePatchMutationResult = NonNullable>> + export type PatchDatasetApiConfigsDatasetNamePatchMutationBody = PatchDatasetRequest + export type PatchDatasetApiConfigsDatasetNamePatchMutationError = HTTPValidationError /** - * @summary Delete Dataset + * @summary Patch Dataset */ -export const useDeleteDatasetApiConfigsDatasetNameDelete = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: ConfigsRequest}, TContext>, request?: SecondParameter} +export const usePatchDatasetApiConfigsDatasetNamePatch = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: PatchDatasetRequest}, TContext>, request?: SecondParameter} , queryClient?: QueryClient): UseMutationResult< - Awaited>, + Awaited>, TError, - {name: string;data: ConfigsRequest}, + {name: string;data: PatchDatasetRequest}, TContext > => { - return useMutation(getDeleteDatasetApiConfigsDatasetNameDeleteMutationOptions(options), queryClient); + return useMutation(getPatchDatasetApiConfigsDatasetNamePatchMutationOptions(options), queryClient); } /** - * Patch dataset configuration for a specific bucket - * @summary Patch Dataset + * Delete dataset configuration for a specific bucket + * @summary Delete Dataset */ -export const getPatchDatasetApiConfigsDatasetNamePatchUrl = (name: string,) => { +export const getDeleteDatasetApiConfigsDatasetNameDeleteUrl = (name: string,) => { @@ -3168,27 +3264,27 @@ export const getPatchDatasetApiConfigsDatasetNamePatchUrl = (name: string,) => { return `/api/configs/dataset/${name}` } -export const patchDatasetApiConfigsDatasetNamePatch = async (name: string, - patchDatasetRequest: PatchDatasetRequest, options?: RequestInit): Promise => { +export const deleteDatasetApiConfigsDatasetNameDelete = async (name: string, + configsRequest: ConfigsRequest, options?: RequestInit): Promise => { - return customFetch(getPatchDatasetApiConfigsDatasetNamePatchUrl(name), + return customFetch(getDeleteDatasetApiConfigsDatasetNameDeleteUrl(name), { ...options, - method: 'PATCH', + method: 'DELETE', headers: { 'Content-Type': 'application/json', ...options?.headers }, body: JSON.stringify( - patchDatasetRequest,) + configsRequest,) } );} -export const getPatchDatasetApiConfigsDatasetNamePatchMutationOptions = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: PatchDatasetRequest}, TContext>, request?: SecondParameter} -): UseMutationOptions>, TError,{name: string;data: PatchDatasetRequest}, TContext> => { +export const getDeleteDatasetApiConfigsDatasetNameDeleteMutationOptions = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: ConfigsRequest}, TContext>, request?: SecondParameter} +): UseMutationOptions>, TError,{name: string;data: ConfigsRequest}, TContext> => { -const mutationKey = ['patchDatasetApiConfigsDatasetNamePatch']; +const mutationKey = ['deleteDatasetApiConfigsDatasetNameDelete']; const {mutation: mutationOptions, request: requestOptions} = options ? options.mutation && 'mutationKey' in options.mutation && options.mutation.mutationKey ? options @@ -3198,10 +3294,10 @@ const {mutation: mutationOptions, request: requestOptions} = options ? - const mutationFn: MutationFunction>, {name: string;data: PatchDatasetRequest}> = (props) => { + const mutationFn: MutationFunction>, {name: string;data: ConfigsRequest}> = (props) => { const {name,data} = props ?? {}; - return patchDatasetApiConfigsDatasetNamePatch(name,data,requestOptions) + return deleteDatasetApiConfigsDatasetNameDelete(name,data,requestOptions) } @@ -3211,22 +3307,22 @@ const {mutation: mutationOptions, request: requestOptions} = options ? return { mutationFn, ...mutationOptions }} - export type PatchDatasetApiConfigsDatasetNamePatchMutationResult = NonNullable>> - export type PatchDatasetApiConfigsDatasetNamePatchMutationBody = PatchDatasetRequest - export type PatchDatasetApiConfigsDatasetNamePatchMutationError = HTTPValidationError + export type DeleteDatasetApiConfigsDatasetNameDeleteMutationResult = NonNullable>> + export type DeleteDatasetApiConfigsDatasetNameDeleteMutationBody = ConfigsRequest + export type DeleteDatasetApiConfigsDatasetNameDeleteMutationError = HTTPValidationError /** - * @summary Patch Dataset + * @summary Delete Dataset */ -export const usePatchDatasetApiConfigsDatasetNamePatch = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: PatchDatasetRequest}, TContext>, request?: SecondParameter} +export const useDeleteDatasetApiConfigsDatasetNameDelete = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: ConfigsRequest}, TContext>, request?: SecondParameter} , queryClient?: QueryClient): UseMutationResult< - Awaited>, + Awaited>, TError, - {name: string;data: PatchDatasetRequest}, + {name: string;data: ConfigsRequest}, TContext > => { - return useMutation(getPatchDatasetApiConfigsDatasetNamePatchMutationOptions(options), queryClient); + return useMutation(getDeleteDatasetApiConfigsDatasetNameDeleteMutationOptions(options), queryClient); } /** @@ -3333,13 +3429,86 @@ export const invalidateListBackendsApiConfigsBackendGet = async ( queryClient: QueryClient, options?: InvalidateOptions ): Promise => { - await queryClient.invalidateQueries({ queryKey: getListBackendsApiConfigsBackendGetQueryKey() }, options); + await queryClient.invalidateQueries({ queryKey: getListBackendsApiConfigsBackendGetQueryKey() }, options); + + return queryClient; +} + + + +/** + * Override the config for a specific backend. + * @summary Update Backend + */ +export const getUpdateBackendApiConfigsBackendNamePostUrl = (name: string,) => { + + + + + return `/api/configs/backend/${name}` +} + +export const updateBackendApiConfigsBackendNamePost = async (name: string, + postBackendRequest: PostBackendRequest, options?: RequestInit): Promise => { + + return customFetch(getUpdateBackendApiConfigsBackendNamePostUrl(name), + { + ...options, + method: 'POST', + headers: { 'Content-Type': 'application/json', ...options?.headers }, + body: JSON.stringify( + postBackendRequest,) + } +);} + + + + +export const getUpdateBackendApiConfigsBackendNamePostMutationOptions = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: PostBackendRequest}, TContext>, request?: SecondParameter} +): UseMutationOptions>, TError,{name: string;data: PostBackendRequest}, TContext> => { + +const mutationKey = ['updateBackendApiConfigsBackendNamePost']; +const {mutation: mutationOptions, request: requestOptions} = options ? + options.mutation && 'mutationKey' in options.mutation && options.mutation.mutationKey ? + options + : {...options, mutation: {...options.mutation, mutationKey}} + : {mutation: { mutationKey, }, request: undefined}; + + + + + const mutationFn: MutationFunction>, {name: string;data: PostBackendRequest}> = (props) => { + const {name,data} = props ?? {}; + + return updateBackendApiConfigsBackendNamePost(name,data,requestOptions) + } + - return queryClient; -} + + + + return { mutationFn, ...mutationOptions }} + export type UpdateBackendApiConfigsBackendNamePostMutationResult = NonNullable>> + export type UpdateBackendApiConfigsBackendNamePostMutationBody = PostBackendRequest + export type UpdateBackendApiConfigsBackendNamePostMutationError = HTTPValidationError + /** + * @summary Update Backend + */ +export const useUpdateBackendApiConfigsBackendNamePost = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: PostBackendRequest}, TContext>, request?: SecondParameter} + , queryClient?: QueryClient): UseMutationResult< + Awaited>, + TError, + {name: string;data: PostBackendRequest}, + TContext + > => { + return useMutation(getUpdateBackendApiConfigsBackendNamePostMutationOptions(options), queryClient); + } + /** * Get info for a specific backend. * @summary Get Backend @@ -3451,79 +3620,6 @@ export const invalidateGetBackendApiConfigsBackendNameGet = async ( -/** - * Override the config for a specific backend. - * @summary Update Backend - */ -export const getUpdateBackendApiConfigsBackendNamePostUrl = (name: string,) => { - - - - - return `/api/configs/backend/${name}` -} - -export const updateBackendApiConfigsBackendNamePost = async (name: string, - postBackendRequest: PostBackendRequest, options?: RequestInit): Promise => { - - return customFetch(getUpdateBackendApiConfigsBackendNamePostUrl(name), - { - ...options, - method: 'POST', - headers: { 'Content-Type': 'application/json', ...options?.headers }, - body: JSON.stringify( - postBackendRequest,) - } -);} - - - - -export const getUpdateBackendApiConfigsBackendNamePostMutationOptions = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: PostBackendRequest}, TContext>, request?: SecondParameter} -): UseMutationOptions>, TError,{name: string;data: PostBackendRequest}, TContext> => { - -const mutationKey = ['updateBackendApiConfigsBackendNamePost']; -const {mutation: mutationOptions, request: requestOptions} = options ? - options.mutation && 'mutationKey' in options.mutation && options.mutation.mutationKey ? - options - : {...options, mutation: {...options.mutation, mutationKey}} - : {mutation: { mutationKey, }, request: undefined}; - - - - - const mutationFn: MutationFunction>, {name: string;data: PostBackendRequest}> = (props) => { - const {name,data} = props ?? {}; - - return updateBackendApiConfigsBackendNamePost(name,data,requestOptions) - } - - - - - - - return { mutationFn, ...mutationOptions }} - - export type UpdateBackendApiConfigsBackendNamePostMutationResult = NonNullable>> - export type UpdateBackendApiConfigsBackendNamePostMutationBody = PostBackendRequest - export type UpdateBackendApiConfigsBackendNamePostMutationError = HTTPValidationError - - /** - * @summary Update Backend - */ -export const useUpdateBackendApiConfigsBackendNamePost = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: PostBackendRequest}, TContext>, request?: SecondParameter} - , queryClient?: QueryClient): UseMutationResult< - Awaited>, - TError, - {name: string;data: PostBackendRequest}, - TContext - > => { - return useMutation(getUpdateBackendApiConfigsBackendNamePostMutationOptions(options), queryClient); - } - /** * Remove a backend. * @summary Delete Backend @@ -3811,9 +3907,9 @@ export const getReadPoolApiConfigsPoolNameGetUrl = (name: string, } export const readPoolApiConfigsPoolNameGet = async (name: string, - params?: ReadPoolApiConfigsPoolNameGetParams, options?: RequestInit): Promise => { + params?: ReadPoolApiConfigsPoolNameGetParams, options?: RequestInit): Promise => { - return customFetch(getReadPoolApiConfigsPoolNameGetUrl(name,params), + return customFetch(getReadPoolApiConfigsPoolNameGetUrl(name,params), { ...options, method: 'GET' @@ -3991,10 +4087,10 @@ export const usePutPoolApiConfigsPoolNamePut = { +export const getPatchPoolApiConfigsPoolNamePatchUrl = (name: string,) => { @@ -4002,27 +4098,27 @@ export const getDeletePoolApiConfigsPoolNameDeleteUrl = (name: string,) => { return `/api/configs/pool/${name}` } -export const deletePoolApiConfigsPoolNameDelete = async (name: string, - configsRequest: ConfigsRequest, options?: RequestInit): Promise => { +export const patchPoolApiConfigsPoolNamePatch = async (name: string, + patchPoolRequest: PatchPoolRequest, options?: RequestInit): Promise => { - return customFetch(getDeletePoolApiConfigsPoolNameDeleteUrl(name), + return customFetch(getPatchPoolApiConfigsPoolNamePatchUrl(name), { ...options, - method: 'DELETE', + method: 'PATCH', headers: { 'Content-Type': 'application/json', ...options?.headers }, body: JSON.stringify( - configsRequest,) + patchPoolRequest,) } );} -export const getDeletePoolApiConfigsPoolNameDeleteMutationOptions = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: ConfigsRequest}, TContext>, request?: SecondParameter} -): UseMutationOptions>, TError,{name: string;data: ConfigsRequest}, TContext> => { +export const getPatchPoolApiConfigsPoolNamePatchMutationOptions = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: PatchPoolRequest}, TContext>, request?: SecondParameter} +): UseMutationOptions>, TError,{name: string;data: PatchPoolRequest}, TContext> => { -const mutationKey = ['deletePoolApiConfigsPoolNameDelete']; +const mutationKey = ['patchPoolApiConfigsPoolNamePatch']; const {mutation: mutationOptions, request: requestOptions} = options ? options.mutation && 'mutationKey' in options.mutation && options.mutation.mutationKey ? options @@ -4032,10 +4128,10 @@ const {mutation: mutationOptions, request: requestOptions} = options ? - const mutationFn: MutationFunction>, {name: string;data: ConfigsRequest}> = (props) => { + const mutationFn: MutationFunction>, {name: string;data: PatchPoolRequest}> = (props) => { const {name,data} = props ?? {}; - return deletePoolApiConfigsPoolNameDelete(name,data,requestOptions) + return patchPoolApiConfigsPoolNamePatch(name,data,requestOptions) } @@ -4045,29 +4141,29 @@ const {mutation: mutationOptions, request: requestOptions} = options ? return { mutationFn, ...mutationOptions }} - export type DeletePoolApiConfigsPoolNameDeleteMutationResult = NonNullable>> - export type DeletePoolApiConfigsPoolNameDeleteMutationBody = ConfigsRequest - export type DeletePoolApiConfigsPoolNameDeleteMutationError = HTTPValidationError + export type PatchPoolApiConfigsPoolNamePatchMutationResult = NonNullable>> + export type PatchPoolApiConfigsPoolNamePatchMutationBody = PatchPoolRequest + export type PatchPoolApiConfigsPoolNamePatchMutationError = HTTPValidationError /** - * @summary Delete Pool + * @summary Patch Pool */ -export const useDeletePoolApiConfigsPoolNameDelete = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: ConfigsRequest}, TContext>, request?: SecondParameter} +export const usePatchPoolApiConfigsPoolNamePatch = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: PatchPoolRequest}, TContext>, request?: SecondParameter} , queryClient?: QueryClient): UseMutationResult< - Awaited>, + Awaited>, TError, - {name: string;data: ConfigsRequest}, + {name: string;data: PatchPoolRequest}, TContext > => { - return useMutation(getDeletePoolApiConfigsPoolNameDeleteMutationOptions(options), queryClient); + return useMutation(getPatchPoolApiConfigsPoolNamePatchMutationOptions(options), queryClient); } /** - * Patch Pool configurations - * @summary Patch Pool + * Delete Pool configurations + * @summary Delete Pool */ -export const getPatchPoolApiConfigsPoolNamePatchUrl = (name: string,) => { +export const getDeletePoolApiConfigsPoolNameDeleteUrl = (name: string,) => { @@ -4075,27 +4171,27 @@ export const getPatchPoolApiConfigsPoolNamePatchUrl = (name: string,) => { return `/api/configs/pool/${name}` } -export const patchPoolApiConfigsPoolNamePatch = async (name: string, - patchPoolRequest: PatchPoolRequest, options?: RequestInit): Promise => { +export const deletePoolApiConfigsPoolNameDelete = async (name: string, + configsRequest: ConfigsRequest, options?: RequestInit): Promise => { - return customFetch(getPatchPoolApiConfigsPoolNamePatchUrl(name), + return customFetch(getDeletePoolApiConfigsPoolNameDeleteUrl(name), { ...options, - method: 'PATCH', + method: 'DELETE', headers: { 'Content-Type': 'application/json', ...options?.headers }, body: JSON.stringify( - patchPoolRequest,) + configsRequest,) } );} -export const getPatchPoolApiConfigsPoolNamePatchMutationOptions = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: PatchPoolRequest}, TContext>, request?: SecondParameter} -): UseMutationOptions>, TError,{name: string;data: PatchPoolRequest}, TContext> => { +export const getDeletePoolApiConfigsPoolNameDeleteMutationOptions = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: ConfigsRequest}, TContext>, request?: SecondParameter} +): UseMutationOptions>, TError,{name: string;data: ConfigsRequest}, TContext> => { -const mutationKey = ['patchPoolApiConfigsPoolNamePatch']; +const mutationKey = ['deletePoolApiConfigsPoolNameDelete']; const {mutation: mutationOptions, request: requestOptions} = options ? options.mutation && 'mutationKey' in options.mutation && options.mutation.mutationKey ? options @@ -4105,10 +4201,10 @@ const {mutation: mutationOptions, request: requestOptions} = options ? - const mutationFn: MutationFunction>, {name: string;data: PatchPoolRequest}> = (props) => { + const mutationFn: MutationFunction>, {name: string;data: ConfigsRequest}> = (props) => { const {name,data} = props ?? {}; - return patchPoolApiConfigsPoolNamePatch(name,data,requestOptions) + return deletePoolApiConfigsPoolNameDelete(name,data,requestOptions) } @@ -4118,22 +4214,22 @@ const {mutation: mutationOptions, request: requestOptions} = options ? return { mutationFn, ...mutationOptions }} - export type PatchPoolApiConfigsPoolNamePatchMutationResult = NonNullable>> - export type PatchPoolApiConfigsPoolNamePatchMutationBody = PatchPoolRequest - export type PatchPoolApiConfigsPoolNamePatchMutationError = HTTPValidationError + export type DeletePoolApiConfigsPoolNameDeleteMutationResult = NonNullable>> + export type DeletePoolApiConfigsPoolNameDeleteMutationBody = ConfigsRequest + export type DeletePoolApiConfigsPoolNameDeleteMutationError = HTTPValidationError /** - * @summary Patch Pool + * @summary Delete Pool */ -export const usePatchPoolApiConfigsPoolNamePatch = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: PatchPoolRequest}, TContext>, request?: SecondParameter} +export const useDeletePoolApiConfigsPoolNameDelete = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: ConfigsRequest}, TContext>, request?: SecondParameter} , queryClient?: QueryClient): UseMutationResult< - Awaited>, + Awaited>, TError, - {name: string;data: PatchPoolRequest}, + {name: string;data: ConfigsRequest}, TContext > => { - return useMutation(getPatchPoolApiConfigsPoolNamePatchMutationOptions(options), queryClient); + return useMutation(getDeletePoolApiConfigsPoolNameDeleteMutationOptions(options), queryClient); } /** @@ -4359,9 +4455,9 @@ export const getReadPlatformInPoolApiConfigsPoolNamePlatformPlatformNameGetUrl = export const readPlatformInPoolApiConfigsPoolNamePlatformPlatformNameGet = async (name: string, platformName: string, - params?: ReadPlatformInPoolApiConfigsPoolNamePlatformPlatformNameGetParams, options?: RequestInit): Promise => { + params?: ReadPlatformInPoolApiConfigsPoolNamePlatformPlatformNameGetParams, options?: RequestInit): Promise => { - return customFetch(getReadPlatformInPoolApiConfigsPoolNamePlatformPlatformNameGetUrl(name,platformName,params), + return customFetch(getReadPlatformInPoolApiConfigsPoolNamePlatformPlatformNameGetUrl(name,platformName,params), { ...options, method: 'GET' @@ -5954,9 +6050,9 @@ export const getListRolesApiConfigsRoleGetUrl = () => { return `/api/configs/role` } -export const listRolesApiConfigsRoleGet = async ( options?: RequestInit): Promise => { +export const listRolesApiConfigsRoleGet = async ( options?: RequestInit): Promise => { - return customFetch(getListRolesApiConfigsRoleGetUrl(), + return customFetch(getListRolesApiConfigsRoleGetUrl(), { ...options, method: 'GET' @@ -6137,9 +6233,9 @@ export const getReadRoleApiConfigsRoleNameGetUrl = (name: string,) => { return `/api/configs/role/${name}` } -export const readRoleApiConfigsRoleNameGet = async (name: string, options?: RequestInit): Promise => { +export const readRoleApiConfigsRoleNameGet = async (name: string, options?: RequestInit): Promise => { - return customFetch(getReadRoleApiConfigsRoleNameGetUrl(name), + return customFetch(getReadRoleApiConfigsRoleNameGetUrl(name), { ...options, method: 'GET' @@ -6750,10 +6846,10 @@ export const usePutBackendTestApiConfigsBackendTestNamePut = { +export const getPatchBackendTestApiConfigsBackendTestNamePatchUrl = (name: string,) => { @@ -6761,27 +6857,27 @@ export const getDeleteBackendTestApiConfigsBackendTestNameDeleteUrl = (name: str return `/api/configs/backend_test/${name}` } -export const deleteBackendTestApiConfigsBackendTestNameDelete = async (name: string, - configsRequest: ConfigsRequest, options?: RequestInit): Promise => { +export const patchBackendTestApiConfigsBackendTestNamePatch = async (name: string, + patchBackendTestRequest: PatchBackendTestRequest, options?: RequestInit): Promise => { - return customFetch(getDeleteBackendTestApiConfigsBackendTestNameDeleteUrl(name), + return customFetch(getPatchBackendTestApiConfigsBackendTestNamePatchUrl(name), { ...options, - method: 'DELETE', + method: 'PATCH', headers: { 'Content-Type': 'application/json', ...options?.headers }, body: JSON.stringify( - configsRequest,) + patchBackendTestRequest,) } );} -export const getDeleteBackendTestApiConfigsBackendTestNameDeleteMutationOptions = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: ConfigsRequest}, TContext>, request?: SecondParameter} -): UseMutationOptions>, TError,{name: string;data: ConfigsRequest}, TContext> => { +export const getPatchBackendTestApiConfigsBackendTestNamePatchMutationOptions = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: PatchBackendTestRequest}, TContext>, request?: SecondParameter} +): UseMutationOptions>, TError,{name: string;data: PatchBackendTestRequest}, TContext> => { -const mutationKey = ['deleteBackendTestApiConfigsBackendTestNameDelete']; +const mutationKey = ['patchBackendTestApiConfigsBackendTestNamePatch']; const {mutation: mutationOptions, request: requestOptions} = options ? options.mutation && 'mutationKey' in options.mutation && options.mutation.mutationKey ? options @@ -6791,10 +6887,10 @@ const {mutation: mutationOptions, request: requestOptions} = options ? - const mutationFn: MutationFunction>, {name: string;data: ConfigsRequest}> = (props) => { + const mutationFn: MutationFunction>, {name: string;data: PatchBackendTestRequest}> = (props) => { const {name,data} = props ?? {}; - return deleteBackendTestApiConfigsBackendTestNameDelete(name,data,requestOptions) + return patchBackendTestApiConfigsBackendTestNamePatch(name,data,requestOptions) } @@ -6804,29 +6900,29 @@ const {mutation: mutationOptions, request: requestOptions} = options ? return { mutationFn, ...mutationOptions }} - export type DeleteBackendTestApiConfigsBackendTestNameDeleteMutationResult = NonNullable>> - export type DeleteBackendTestApiConfigsBackendTestNameDeleteMutationBody = ConfigsRequest - export type DeleteBackendTestApiConfigsBackendTestNameDeleteMutationError = HTTPValidationError + export type PatchBackendTestApiConfigsBackendTestNamePatchMutationResult = NonNullable>> + export type PatchBackendTestApiConfigsBackendTestNamePatchMutationBody = PatchBackendTestRequest + export type PatchBackendTestApiConfigsBackendTestNamePatchMutationError = HTTPValidationError /** - * @summary Delete Backend Test + * @summary Patch Backend Test */ -export const useDeleteBackendTestApiConfigsBackendTestNameDelete = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: ConfigsRequest}, TContext>, request?: SecondParameter} +export const usePatchBackendTestApiConfigsBackendTestNamePatch = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: PatchBackendTestRequest}, TContext>, request?: SecondParameter} , queryClient?: QueryClient): UseMutationResult< - Awaited>, + Awaited>, TError, - {name: string;data: ConfigsRequest}, + {name: string;data: PatchBackendTestRequest}, TContext > => { - return useMutation(getDeleteBackendTestApiConfigsBackendTestNameDeleteMutationOptions(options), queryClient); + return useMutation(getPatchBackendTestApiConfigsBackendTestNamePatchMutationOptions(options), queryClient); } /** - * Patch backend test configuration - * @summary Patch Backend Test + * Delete test configuration + * @summary Delete Backend Test */ -export const getPatchBackendTestApiConfigsBackendTestNamePatchUrl = (name: string,) => { +export const getDeleteBackendTestApiConfigsBackendTestNameDeleteUrl = (name: string,) => { @@ -6834,27 +6930,27 @@ export const getPatchBackendTestApiConfigsBackendTestNamePatchUrl = (name: strin return `/api/configs/backend_test/${name}` } -export const patchBackendTestApiConfigsBackendTestNamePatch = async (name: string, - patchBackendTestRequest: PatchBackendTestRequest, options?: RequestInit): Promise => { +export const deleteBackendTestApiConfigsBackendTestNameDelete = async (name: string, + configsRequest: ConfigsRequest, options?: RequestInit): Promise => { - return customFetch(getPatchBackendTestApiConfigsBackendTestNamePatchUrl(name), + return customFetch(getDeleteBackendTestApiConfigsBackendTestNameDeleteUrl(name), { ...options, - method: 'PATCH', + method: 'DELETE', headers: { 'Content-Type': 'application/json', ...options?.headers }, body: JSON.stringify( - patchBackendTestRequest,) + configsRequest,) } );} -export const getPatchBackendTestApiConfigsBackendTestNamePatchMutationOptions = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: PatchBackendTestRequest}, TContext>, request?: SecondParameter} -): UseMutationOptions>, TError,{name: string;data: PatchBackendTestRequest}, TContext> => { +export const getDeleteBackendTestApiConfigsBackendTestNameDeleteMutationOptions = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: ConfigsRequest}, TContext>, request?: SecondParameter} +): UseMutationOptions>, TError,{name: string;data: ConfigsRequest}, TContext> => { -const mutationKey = ['patchBackendTestApiConfigsBackendTestNamePatch']; +const mutationKey = ['deleteBackendTestApiConfigsBackendTestNameDelete']; const {mutation: mutationOptions, request: requestOptions} = options ? options.mutation && 'mutationKey' in options.mutation && options.mutation.mutationKey ? options @@ -6864,10 +6960,10 @@ const {mutation: mutationOptions, request: requestOptions} = options ? - const mutationFn: MutationFunction>, {name: string;data: PatchBackendTestRequest}> = (props) => { + const mutationFn: MutationFunction>, {name: string;data: ConfigsRequest}> = (props) => { const {name,data} = props ?? {}; - return patchBackendTestApiConfigsBackendTestNamePatch(name,data,requestOptions) + return deleteBackendTestApiConfigsBackendTestNameDelete(name,data,requestOptions) } @@ -6877,22 +6973,22 @@ const {mutation: mutationOptions, request: requestOptions} = options ? return { mutationFn, ...mutationOptions }} - export type PatchBackendTestApiConfigsBackendTestNamePatchMutationResult = NonNullable>> - export type PatchBackendTestApiConfigsBackendTestNamePatchMutationBody = PatchBackendTestRequest - export type PatchBackendTestApiConfigsBackendTestNamePatchMutationError = HTTPValidationError + export type DeleteBackendTestApiConfigsBackendTestNameDeleteMutationResult = NonNullable>> + export type DeleteBackendTestApiConfigsBackendTestNameDeleteMutationBody = ConfigsRequest + export type DeleteBackendTestApiConfigsBackendTestNameDeleteMutationError = HTTPValidationError /** - * @summary Patch Backend Test + * @summary Delete Backend Test */ -export const usePatchBackendTestApiConfigsBackendTestNamePatch = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: PatchBackendTestRequest}, TContext>, request?: SecondParameter} +export const useDeleteBackendTestApiConfigsBackendTestNameDelete = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: ConfigsRequest}, TContext>, request?: SecondParameter} , queryClient?: QueryClient): UseMutationResult< - Awaited>, + Awaited>, TError, - {name: string;data: PatchBackendTestRequest}, + {name: string;data: ConfigsRequest}, TContext > => { - return useMutation(getPatchBackendTestApiConfigsBackendTestNamePatchMutationOptions(options), queryClient); + return useMutation(getDeleteBackendTestApiConfigsBackendTestNameDeleteMutationOptions(options), queryClient); } /** @@ -6903,15 +6999,7 @@ export const getGetConfigsHistoryApiConfigsHistoryGetUrl = (params?: GetConfigsH const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["config_types","tags"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? 'null' : v.toString()); - }); - return; - } - + if (value !== undefined) { normalizedParams.append(key, value === null ? 'null' : value.toString()) } @@ -7793,15 +7881,7 @@ export const getCreateAccessTokenApiAuthAccessTokenTokenNamePostUrl = (tokenName const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["roles"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? 'null' : v.toString()); - }); - return; - } - + if (value !== undefined) { normalizedParams.append(key, value === null ? 'null' : value.toString()) } @@ -8201,15 +8281,7 @@ export const getAdminCreateAccessTokenApiAuthUserUserIdAccessTokenTokenNamePostU const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["roles"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? 'null' : v.toString()); - }); - return; - } - + if (value !== undefined) { normalizedParams.append(key, value === null ? 'null' : value.toString()) } @@ -8493,15 +8565,7 @@ export const getListUsersApiAuthUserGetUrl = (params?: ListUsersApiAuthUserGetPa const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["roles"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? 'null' : v.toString()); - }); - return; - } - + if (value !== undefined) { normalizedParams.append(key, value === null ? 'null' : value.toString()) } @@ -9364,15 +9428,7 @@ export const getListAppsApiAppGetUrl = (params?: ListAppsApiAppGetParams,) => { const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["users"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? 'null' : v.toString()); - }); - return; - } - + if (value !== undefined) { normalizedParams.append(key, value === null ? 'null' : value.toString()) } @@ -9690,44 +9746,37 @@ export const useCreateAppApiAppUserNamePost = { - const normalizedParams = new URLSearchParams(); +export const getUpdateAppApiAppUserNamePatchUrl = (name: string,) => { - Object.entries(params || {}).forEach(([key, value]) => { - - if (value !== undefined) { - normalizedParams.append(key, value === null ? 'null' : value.toString()) - } - }); - const stringifiedParams = normalizedParams.toString(); + - return stringifiedParams.length > 0 ? `/api/app/user/${name}?${stringifiedParams}` : `/api/app/user/${name}` + return `/api/app/user/${name}` } -export const deleteAppApiAppUserNameDelete = async (name: string, - params?: DeleteAppApiAppUserNameDeleteParams, options?: RequestInit): Promise => { +export const updateAppApiAppUserNamePatch = async (name: string, + updateAppApiAppUserNamePatchBody: string, options?: RequestInit): Promise => { - return customFetch(getDeleteAppApiAppUserNameDeleteUrl(name,params), + return customFetch(getUpdateAppApiAppUserNamePatchUrl(name), { ...options, - method: 'DELETE' - - + method: 'PATCH', + headers: { 'Content-Type': 'application/json', ...options?.headers }, + body: JSON.stringify( + updateAppApiAppUserNamePatchBody,) } );} -export const getDeleteAppApiAppUserNameDeleteMutationOptions = (options?: { mutation?:UseMutationOptions>, TError,{name: string;params?: DeleteAppApiAppUserNameDeleteParams}, TContext>, request?: SecondParameter} -): UseMutationOptions>, TError,{name: string;params?: DeleteAppApiAppUserNameDeleteParams}, TContext> => { +export const getUpdateAppApiAppUserNamePatchMutationOptions = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: string}, TContext>, request?: SecondParameter} +): UseMutationOptions>, TError,{name: string;data: string}, TContext> => { -const mutationKey = ['deleteAppApiAppUserNameDelete']; +const mutationKey = ['updateAppApiAppUserNamePatch']; const {mutation: mutationOptions, request: requestOptions} = options ? options.mutation && 'mutationKey' in options.mutation && options.mutation.mutationKey ? options @@ -9737,10 +9786,10 @@ const {mutation: mutationOptions, request: requestOptions} = options ? - const mutationFn: MutationFunction>, {name: string;params?: DeleteAppApiAppUserNameDeleteParams}> = (props) => { - const {name,params} = props ?? {}; + const mutationFn: MutationFunction>, {name: string;data: string}> = (props) => { + const {name,data} = props ?? {}; - return deleteAppApiAppUserNameDelete(name,params,requestOptions) + return updateAppApiAppUserNamePatch(name,data,requestOptions) } @@ -9750,56 +9799,63 @@ const {mutation: mutationOptions, request: requestOptions} = options ? return { mutationFn, ...mutationOptions }} - export type DeleteAppApiAppUserNameDeleteMutationResult = NonNullable>> - - export type DeleteAppApiAppUserNameDeleteMutationError = HTTPValidationError + export type UpdateAppApiAppUserNamePatchMutationResult = NonNullable>> + export type UpdateAppApiAppUserNamePatchMutationBody = string + export type UpdateAppApiAppUserNamePatchMutationError = HTTPValidationError /** - * @summary Delete App + * @summary Update App */ -export const useDeleteAppApiAppUserNameDelete = (options?: { mutation?:UseMutationOptions>, TError,{name: string;params?: DeleteAppApiAppUserNameDeleteParams}, TContext>, request?: SecondParameter} +export const useUpdateAppApiAppUserNamePatch = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: string}, TContext>, request?: SecondParameter} , queryClient?: QueryClient): UseMutationResult< - Awaited>, + Awaited>, TError, - {name: string;params?: DeleteAppApiAppUserNameDeleteParams}, + {name: string;data: string}, TContext > => { - return useMutation(getDeleteAppApiAppUserNameDeleteMutationOptions(options), queryClient); + return useMutation(getUpdateAppApiAppUserNamePatchMutationOptions(options), queryClient); } /** - * @summary Update App + * @summary Delete App */ -export const getUpdateAppApiAppUserNamePatchUrl = (name: string,) => { +export const getDeleteAppApiAppUserNameDeleteUrl = (name: string, + params?: DeleteAppApiAppUserNameDeleteParams,) => { + const normalizedParams = new URLSearchParams(); + Object.entries(params || {}).forEach(([key, value]) => { + + if (value !== undefined) { + normalizedParams.append(key, value === null ? 'null' : value.toString()) + } + }); - + const stringifiedParams = normalizedParams.toString(); - return `/api/app/user/${name}` + return stringifiedParams.length > 0 ? `/api/app/user/${name}?${stringifiedParams}` : `/api/app/user/${name}` } -export const updateAppApiAppUserNamePatch = async (name: string, - updateAppApiAppUserNamePatchBody: string, options?: RequestInit): Promise => { +export const deleteAppApiAppUserNameDelete = async (name: string, + params?: DeleteAppApiAppUserNameDeleteParams, options?: RequestInit): Promise => { - return customFetch(getUpdateAppApiAppUserNamePatchUrl(name), + return customFetch(getDeleteAppApiAppUserNameDeleteUrl(name,params), { ...options, - method: 'PATCH', - headers: { 'Content-Type': 'application/json', ...options?.headers }, - body: JSON.stringify( - updateAppApiAppUserNamePatchBody,) + method: 'DELETE' + + } );} -export const getUpdateAppApiAppUserNamePatchMutationOptions = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: string}, TContext>, request?: SecondParameter} -): UseMutationOptions>, TError,{name: string;data: string}, TContext> => { +export const getDeleteAppApiAppUserNameDeleteMutationOptions = (options?: { mutation?:UseMutationOptions>, TError,{name: string;params?: DeleteAppApiAppUserNameDeleteParams}, TContext>, request?: SecondParameter} +): UseMutationOptions>, TError,{name: string;params?: DeleteAppApiAppUserNameDeleteParams}, TContext> => { -const mutationKey = ['updateAppApiAppUserNamePatch']; +const mutationKey = ['deleteAppApiAppUserNameDelete']; const {mutation: mutationOptions, request: requestOptions} = options ? options.mutation && 'mutationKey' in options.mutation && options.mutation.mutationKey ? options @@ -9809,10 +9865,10 @@ const {mutation: mutationOptions, request: requestOptions} = options ? - const mutationFn: MutationFunction>, {name: string;data: string}> = (props) => { - const {name,data} = props ?? {}; + const mutationFn: MutationFunction>, {name: string;params?: DeleteAppApiAppUserNameDeleteParams}> = (props) => { + const {name,params} = props ?? {}; - return updateAppApiAppUserNamePatch(name,data,requestOptions) + return deleteAppApiAppUserNameDelete(name,params,requestOptions) } @@ -9822,22 +9878,22 @@ const {mutation: mutationOptions, request: requestOptions} = options ? return { mutationFn, ...mutationOptions }} - export type UpdateAppApiAppUserNamePatchMutationResult = NonNullable>> - export type UpdateAppApiAppUserNamePatchMutationBody = string - export type UpdateAppApiAppUserNamePatchMutationError = HTTPValidationError + export type DeleteAppApiAppUserNameDeleteMutationResult = NonNullable>> + + export type DeleteAppApiAppUserNameDeleteMutationError = HTTPValidationError /** - * @summary Update App + * @summary Delete App */ -export const useUpdateAppApiAppUserNamePatch = (options?: { mutation?:UseMutationOptions>, TError,{name: string;data: string}, TContext>, request?: SecondParameter} +export const useDeleteAppApiAppUserNameDelete = (options?: { mutation?:UseMutationOptions>, TError,{name: string;params?: DeleteAppApiAppUserNameDeleteParams}, TContext>, request?: SecondParameter} , queryClient?: QueryClient): UseMutationResult< - Awaited>, + Awaited>, TError, - {name: string;data: string}, + {name: string;params?: DeleteAppApiAppUserNameDeleteParams}, TContext > => { - return useMutation(getUpdateAppApiAppUserNamePatchMutationOptions(options), queryClient); + return useMutation(getDeleteAppApiAppUserNameDeleteMutationOptions(options), queryClient); } /** @@ -10125,15 +10181,7 @@ export const getListWorkflowApiWorkflowGetUrl = (params?: ListWorkflowApiWorkflo const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["users","statuses","pools","tags","priority"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? 'null' : v.toString()); - }); - return; - } - + if (value !== undefined) { normalizedParams.append(key, value === null ? 'null' : value.toString()) } @@ -10370,15 +10418,7 @@ export const getListTaskApiTaskGetUrl = (params?: ListTaskApiTaskGetParams,) => const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["statuses","users","pools","nodes","priority"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? 'null' : v.toString()); - }); - return; - } - + if (value !== undefined) { normalizedParams.append(key, value === null ? 'null' : value.toString()) } @@ -11132,15 +11172,7 @@ export const getTagWorkflowApiWorkflowNameTagPostUrl = (name: string, const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["add","remove"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? 'null' : v.toString()); - }); - return; - } - + if (value !== undefined) { normalizedParams.append(key, value === null ? 'null' : value.toString()) } @@ -11385,15 +11417,7 @@ export const getPortForwardTaskApiWorkflowNamePortforwardTaskNamePostUrl = (name const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["task_ports"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? 'null' : v.toString()); - }); - return; - } - + if (value !== undefined) { normalizedParams.append(key, value === null ? 'null' : value.toString()) } @@ -11883,15 +11907,7 @@ export const getGetResourcesApiResourcesGetUrl = (params?: GetResourcesApiResour const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["pools","platforms"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? 'null' : v.toString()); - }); - return; - } - + if (value !== undefined) { normalizedParams.append(key, value === null ? 'null' : value.toString()) } @@ -12124,15 +12140,7 @@ export const getGetPoolsApiPoolGetUrl = (params?: GetPoolsApiPoolGetParams,) => const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["pools"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? 'null' : v.toString()); - }); - return; - } - + if (value !== undefined) { normalizedParams.append(key, value === null ? 'null' : value.toString()) } @@ -12249,15 +12257,7 @@ export const getGetPoolQuotasApiPoolQuotaGetUrl = (params?: GetPoolQuotasApiPool const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["pools"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? 'null' : v.toString()); - }); - return; - } - + if (value !== undefined) { normalizedParams.append(key, value === null ? 'null' : value.toString()) } @@ -12396,7 +12396,7 @@ export const getSubmitWorkflowApiPoolPoolNameWorkflowPostUrl = (poolName: string } export const submitWorkflowApiPoolPoolNameWorkflowPost = async (poolName: string, - templateSpec: TemplateSpec, + templateSpecNull: TemplateSpec | null, params?: SubmitWorkflowApiPoolPoolNameWorkflowPostParams, options?: RequestInit): Promise => { return customFetch(getSubmitWorkflowApiPoolPoolNameWorkflowPostUrl(poolName,params), @@ -12405,7 +12405,7 @@ export const submitWorkflowApiPoolPoolNameWorkflowPost = async (poolName: string method: 'POST', headers: { 'Content-Type': 'application/json', ...options?.headers }, body: JSON.stringify( - templateSpec,) + templateSpecNull,) } );} @@ -12413,8 +12413,8 @@ export const submitWorkflowApiPoolPoolNameWorkflowPost = async (poolName: string export const getSubmitWorkflowApiPoolPoolNameWorkflowPostMutationOptions = (options?: { mutation?:UseMutationOptions>, TError,{poolName: string;data: TemplateSpec;params?: SubmitWorkflowApiPoolPoolNameWorkflowPostParams}, TContext>, request?: SecondParameter} -): UseMutationOptions>, TError,{poolName: string;data: TemplateSpec;params?: SubmitWorkflowApiPoolPoolNameWorkflowPostParams}, TContext> => { + TContext = unknown>(options?: { mutation?:UseMutationOptions>, TError,{poolName: string;data: TemplateSpec | null;params?: SubmitWorkflowApiPoolPoolNameWorkflowPostParams}, TContext>, request?: SecondParameter} +): UseMutationOptions>, TError,{poolName: string;data: TemplateSpec | null;params?: SubmitWorkflowApiPoolPoolNameWorkflowPostParams}, TContext> => { const mutationKey = ['submitWorkflowApiPoolPoolNameWorkflowPost']; const {mutation: mutationOptions, request: requestOptions} = options ? @@ -12426,7 +12426,7 @@ const {mutation: mutationOptions, request: requestOptions} = options ? - const mutationFn: MutationFunction>, {poolName: string;data: TemplateSpec;params?: SubmitWorkflowApiPoolPoolNameWorkflowPostParams}> = (props) => { + const mutationFn: MutationFunction>, {poolName: string;data: TemplateSpec | null;params?: SubmitWorkflowApiPoolPoolNameWorkflowPostParams}> = (props) => { const {poolName,data,params} = props ?? {}; return submitWorkflowApiPoolPoolNameWorkflowPost(poolName,data,params,requestOptions) @@ -12440,18 +12440,18 @@ const {mutation: mutationOptions, request: requestOptions} = options ? return { mutationFn, ...mutationOptions }} export type SubmitWorkflowApiPoolPoolNameWorkflowPostMutationResult = NonNullable>> - export type SubmitWorkflowApiPoolPoolNameWorkflowPostMutationBody = TemplateSpec + export type SubmitWorkflowApiPoolPoolNameWorkflowPostMutationBody = TemplateSpec | null export type SubmitWorkflowApiPoolPoolNameWorkflowPostMutationError = HTTPValidationError /** * @summary Submit Workflow */ export const useSubmitWorkflowApiPoolPoolNameWorkflowPost = (options?: { mutation?:UseMutationOptions>, TError,{poolName: string;data: TemplateSpec;params?: SubmitWorkflowApiPoolPoolNameWorkflowPostParams}, TContext>, request?: SecondParameter} + TContext = unknown>(options?: { mutation?:UseMutationOptions>, TError,{poolName: string;data: TemplateSpec | null;params?: SubmitWorkflowApiPoolPoolNameWorkflowPostParams}, TContext>, request?: SecondParameter} , queryClient?: QueryClient): UseMutationResult< Awaited>, TError, - {poolName: string;data: TemplateSpec;params?: SubmitWorkflowApiPoolPoolNameWorkflowPostParams}, + {poolName: string;data: TemplateSpec | null;params?: SubmitWorkflowApiPoolPoolNameWorkflowPostParams}, TContext > => { return useMutation(getSubmitWorkflowApiPoolPoolNameWorkflowPostMutationOptions(options), queryClient); @@ -12967,7 +12967,7 @@ export const getListDatasetFromBucketApiBucketListDatasetGetUrl = (params?: List const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["user","buckets"]; + const explodeParameters = ["buckets"]; if (Array.isArray(value) && explodeParameters.includes(key)) { value.forEach((v) => { @@ -14042,9 +14042,9 @@ export const getGetWorkflowPluginsConfigsApiPluginsConfigsGetUrl = () => { return `/api/plugins/configs` } -export const getWorkflowPluginsConfigsApiPluginsConfigsGet = async ( options?: RequestInit): Promise => { +export const getWorkflowPluginsConfigsApiPluginsConfigsGet = async ( options?: RequestInit): Promise => { - return customFetch(getGetWorkflowPluginsConfigsApiPluginsConfigsGetUrl(), + return customFetch(getGetWorkflowPluginsConfigsApiPluginsConfigsGetUrl(), { ...options, method: 'GET' diff --git a/src/ui/src/mocks/generated-mocks.ts b/src/ui/src/mocks/generated-mocks.ts index 85f5b105c..c97cd6a24 100644 --- a/src/ui/src/mocks/generated-mocks.ts +++ b/src/ui/src/mocks/generated-mocks.ts @@ -1,5 +1,5 @@ /** - * Generated by orval v8.5.3 🍺 + * Generated by orval v8.4.2 🍺 * Do not edit manually. * FastAPI * OpenAPI spec version: 0.1.0 @@ -59,12 +59,12 @@ export type AuthenticationConfigKeys = { [key: string]: AsymmetricKeyPair }; * Store info needed to login */ export interface LoginInfo { - device_endpoint?: string; - device_client_id?: string; - browser_endpoint?: string; - browser_client_id?: string; - token_endpoint?: string; - logout_endpoint?: string; + device_endpoint?: string | null; + device_client_id?: string | null; + browser_endpoint?: string | null; + browser_client_id?: string | null; + token_endpoint?: string | null; + logout_endpoint?: string | null; } /** @@ -99,7 +99,7 @@ export interface BackendSchedulerSettings { scheduler_timeout?: number; } -export type BackendNodeConditionsRules = { [key: string]: string }; +export type BackendNodeConditionsRules = { [key: string]: string } | null; /** * Settings for backend node conditions. @@ -133,14 +133,14 @@ export interface Backend { * Similar to connectors.Backend, but with optional fields. */ export interface BackendConfig { - description?: string; - k8s_uid?: string; - dashboard_url?: string; - grafana_url?: string; - tests?: string[]; - scheduler_settings?: BackendSchedulerSettings; - node_conditions?: BackendNodeConditions; - router_address?: string; + description?: string | null; + k8s_uid?: string | null; + dashboard_url?: string | null; + grafana_url?: string | null; + tests?: string[] | null; + scheduler_settings?: BackendSchedulerSettings | null; + node_conditions?: BackendNodeConditions | null; + router_address?: string | null; } /** @@ -205,9 +205,9 @@ export interface StaticDataCredential { /** The OSMO storage URI for the data service (e.g., s3://bucket) */ endpoint: string; /** The region for the data service */ - region?: string; + region?: string | null; /** HTTP endpoint URL override the storage URI (e.g., http://minio:9000) */ - override_url?: string; + override_url?: string | null; /** The authentication key for a data backend */ access_key_id: string; /** The encrypted authentication secret for a data backend */ @@ -223,7 +223,7 @@ export interface BucketConfig { region?: string; description?: string; mode?: string; - default_credential?: StaticDataCredential; + default_credential?: StaticDataCredential | null; } /** @@ -242,7 +242,7 @@ export type BucketInfoResponseBuckets = { [key: string]: BucketInfoEntry }; * Object storing Upload Response. */ export interface BucketInfoResponse { - default?: string; + default?: string | null; buckets: BucketInfoResponseBuckets; } @@ -274,9 +274,9 @@ export interface CancelResponse { * Config for storing information regarding CLI storage. */ export interface CliConfig { - latest_version?: string; - min_supported_version?: string; - client_install_url?: string; + latest_version?: string | null; + min_supported_version?: string | null; + client_install_url?: string | null; } /** @@ -316,7 +316,7 @@ export interface ConfigHistory { username: string; created_at: string; description: string; - tags?: string[]; + tags?: string[] | null; data?: unknown; } @@ -324,8 +324,8 @@ export interface ConfigHistory { * Request body for updating configurations with history tracking metadata. */ export interface ConfigsRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; } /** @@ -333,7 +333,7 @@ export interface ConfigsRequest { */ export interface CreateUserRequest { id: string; - roles?: string[]; + roles?: string[] | null; } /** @@ -344,7 +344,7 @@ export interface CredentialConfig { disable_data_validation?: string[]; } -export type CredentialGetResponseCredentialsItem = { [key: string]: string }; +export type CredentialGetResponseCredentialsItem = { [key: string]: string | null }; /** * Credential Response. @@ -372,9 +372,9 @@ export interface UserDataCredential { /** The OSMO storage URI for the data service (e.g., s3://bucket) */ endpoint: string; /** The region for the data service */ - region?: string; + region?: string | null; /** HTTP endpoint URL override the storage URI (e.g., http://minio:9000) */ - override_url?: string; + override_url?: string | null; /** The authentication key for a data backend */ access_key_id: string; /** The authentication secret for a data backend */ @@ -399,11 +399,11 @@ export interface UserCredential { */ export interface CredentialOptions { /** Authentication information for a Docker registry */ - registry_credential?: UserRegistryCredential; + registry_credential?: UserRegistryCredential | null; /** Authentication information for a data service */ - data_credential?: UserDataCredential; + data_credential?: UserDataCredential | null; /** Generic authentication information */ - generic_credential?: UserCredential; + generic_credential?: UserCredential | null; } /** @@ -427,9 +427,9 @@ export interface DataMetadataResponse { * Object storing Tag/Label/Metadata Response. */ export interface DataAttributeResponse { - tag_response?: DataTagResponse; - label_response?: DataMetadataResponse; - metadata_response?: DataMetadataResponse; + tag_response?: DataTagResponse | null; + label_response?: DataMetadataResponse | null; + metadata_response?: DataMetadataResponse | null; } /** @@ -445,7 +445,7 @@ export const DownloadType = { * Config for storing information about data. */ export interface DataConfig { - credential?: StaticDataCredential; + credential?: StaticDataCredential | null; base_url?: string; websocket_timeout?: number; data_timeout?: number; @@ -469,7 +469,7 @@ export interface DataInfoCollectionEntry { version: string; location: string; uri: string; - hash_location?: string; + hash_location?: string | null; size: number; } @@ -508,9 +508,6 @@ export interface DataInfoDatasetEntry { export type DataInfoResponseLabels = { [key: string]: unknown }; -/** - * An enumeration. - */ export type DatasetType = (typeof DatasetType)[keyof typeof DatasetType]; export const DatasetType = { @@ -525,10 +522,10 @@ export interface DataInfoResponse { name: string; id: string; bucket: string; - created_by?: string; - created_date?: string; - hash_location?: string; - hash_location_size?: number; + created_by?: string | null; + created_date?: string | null; + hash_location?: string | null; + hash_location_size?: number | null; labels: DataInfoResponseLabels; type: DatasetType; versions: (DataInfoDatasetEntry | DataInfoCollectionEntry)[]; @@ -542,10 +539,10 @@ export interface DataListEntry { id: string; bucket: string; create_time: string; - last_created?: string; - hash_location?: string; - hash_location_size?: number; - version_id?: string; + last_created?: string | null; + hash_location?: string | null; + hash_location_size?: number | null; + version_id?: string | null; type: DatasetType; } @@ -556,9 +553,6 @@ export interface DataListResponse { datasets: DataListEntry[]; } -/** - * An enumeration. - */ export type DatasetQueryType = (typeof DatasetQueryType)[keyof typeof DatasetQueryType]; export const DatasetQueryType = { @@ -574,13 +568,23 @@ export interface DataQueryResponse { datasets: (DataInfoResponse | DataInfoDatasetEntry)[]; } -export type DatasetConfigBuckets = { [key: string]: BucketConfig }; +export type DatasetConfigInputBuckets = { [key: string]: BucketConfig }; + +/** + * Stores any dataset configs External Admins control + */ +export interface DatasetConfigInput { + buckets?: DatasetConfigInputBuckets; + default_bucket?: string; +} + +export type DatasetConfigOutputBuckets = { [key: string]: BucketConfig }; /** * Stores any dataset configs External Admins control */ -export interface DatasetConfig { - buckets?: DatasetConfigBuckets; +export interface DatasetConfigOutput { + buckets?: DatasetConfigOutputBuckets; default_bucket?: string; } @@ -588,8 +592,8 @@ export interface DatasetConfig { * Request body for deleting a backend with history tracking metadata. */ export interface DeleteBackendRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; force?: boolean; } @@ -626,7 +630,7 @@ export interface PoolResourceCountable { * Resources allocated to the pool, for schedulers that support this feature */ export interface PoolResources { - gpu?: PoolResourceCountable; + gpu?: PoolResourceCountable | null; } /** @@ -658,17 +662,14 @@ export type PoolEditableCommonDefaultVariables = { [key: string]: unknown }; export type PoolEditablePlatforms = { [key: string]: PlatformEditable }; -/** - * Pool schema to expose through API endpoint. - */ export interface PoolEditable { name?: string; description?: string; - status?: PoolStatus; - download_type?: DownloadType; + status?: PoolStatus | null; + download_type?: DownloadType | null; enable_maintenance?: boolean; backend: string; - default_platform?: string; + default_platform?: string | null; default_exec_timeout?: string; default_queue_timeout?: string; max_exec_timeout?: string; @@ -750,24 +751,24 @@ export interface TaskQueryResponse { name: string; retry_id: number; status: TaskGroupStatus; - failure_message?: string; - exit_code?: number; + failure_message?: string | null; + exit_code?: number | null; logs: string; - error_logs?: string; - processing_start_time?: string; - scheduling_start_time?: string; - initializing_start_time?: string; + error_logs?: string | null; + processing_start_time?: string | null; + scheduling_start_time?: string | null; + initializing_start_time?: string | null; events: string; - start_time?: string; - end_time?: string; - input_download_start_time?: string; - input_download_end_time?: string; - output_upload_start_time?: string; - dashboard_url?: string; + start_time?: string | null; + end_time?: string | null; + input_download_start_time?: string | null; + input_download_end_time?: string | null; + output_upload_start_time?: string | null; + dashboard_url?: string | null; pod_name: string; - pod_ip?: string; + pod_ip?: string | null; task_uuid: string; - node_name?: string; + node_name?: string | null; lead?: boolean; } @@ -777,14 +778,14 @@ export interface TaskQueryResponse { export interface GroupQueryResponse { name: string; status: TaskGroupStatus; - start_time?: string; - end_time?: string; - processing_start_time?: string; - scheduling_start_time?: string; - initializing_start_time?: string; - remaining_upstream_groups?: string[]; - downstream_groups?: string[]; - failure_message?: string; + start_time?: string | null; + end_time?: string | null; + processing_start_time?: string | null; + scheduling_start_time?: string | null; + initializing_start_time?: string | null; + remaining_upstream_groups?: string[] | null; + downstream_groups?: string[] | null; + failure_message?: string | null; tasks?: TaskQueryResponse[]; } @@ -802,9 +803,9 @@ export interface HTTPValidationError { * Response for JWT token creation endpoints. */ export interface JwtTokenResponse { - token?: string; - expires_at?: number; - error?: string; + token?: string | null; + expires_at?: number | null; + error?: string | null; } /** @@ -829,7 +830,7 @@ export const ListOrder = { */ export interface ListTaskAggregatedEntry { user: string; - pool?: string; + pool?: string | null; storage: number; cpu: number; memory: number; @@ -851,17 +852,17 @@ export interface ListTaskEntry { workflow_uuid: string; task_name: string; retry_id: number; - pool?: string; - node?: string; - start_time?: string; - end_time?: string; - duration?: number; + pool?: string | null; + node?: string | null; + start_time?: string | null; + end_time?: string | null; + duration?: number | null; status: TaskGroupStatus; overview: string; logs: string; - error_logs?: string; - grafana_url?: string; - dashboard_url?: string; + error_logs?: string | null; + grafana_url?: string | null; + dashboard_url?: string | null; storage: number; cpu: number; memory: number; @@ -878,7 +879,7 @@ export interface ListTaskResponse { */ export interface ListTaskSummaryEntry { user: string; - pool?: string; + pool?: string | null; storage: number; cpu: number; memory: number; @@ -894,7 +895,7 @@ export interface ListTaskSummaryResponse { * Config for storing information about data. */ export interface LogConfig { - credential?: StaticDataCredential; + credential?: StaticDataCredential | null; } /** @@ -912,17 +913,14 @@ export type PoolMinimalDefaultExitActions = { [key: string]: string }; export type PoolMinimalPlatforms = { [key: string]: PlatformMinimal }; -/** - * Pool schema to expose through API endpoint. - */ export interface PoolMinimal { name?: string; description?: string; - status?: PoolStatus; - download_type?: DownloadType; + status?: PoolStatus | null; + download_type?: DownloadType | null; enable_maintenance?: boolean; backend: string; - default_platform?: string; + default_platform?: string | null; default_exec_timeout?: string; default_queue_timeout?: string; max_exec_timeout?: string; @@ -953,9 +951,6 @@ export interface NotificationConfig { smtp_settings?: SMTPConfig; } -/** - * An enumeration. - */ export type OperatorType = (typeof OperatorType)[keyof typeof OperatorType]; export const OperatorType = { @@ -994,8 +989,8 @@ export type PatchBackendTestRequestConfigsDict = { [key: string]: unknown }; * Request body for patching a test with history tracking metadata. */ export interface PatchBackendTestRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs_dict: PatchBackendTestRequestConfigsDict; } @@ -1005,8 +1000,8 @@ export type PatchConfigRequestConfigsDict = { [key: string]: unknown }; * Request body for patching configurations with history tracking metadata. */ export interface PatchConfigRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs_dict: PatchConfigRequestConfigsDict; } @@ -1016,8 +1011,8 @@ export type PatchDatasetRequestConfigsDict = { [key: string]: unknown }; * Request body for patching a dataset bucket configuration with history tracking metadata. */ export interface PatchDatasetRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs_dict: PatchDatasetRequestConfigsDict; } @@ -1027,16 +1022,16 @@ export type PatchPoolRequestConfigsDict = { [key: string]: unknown }; * Request body for patching a pool with history tracking metadata. */ export interface PatchPoolRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs_dict: PatchPoolRequestConfigsDict; } -export type PlatformLabels = { [key: string]: string }; +export type PlatformInputLabels = { [key: string]: string }; -export type PlatformDefaultVariables = { [key: string]: unknown }; +export type PlatformInputDefaultVariables = { [key: string]: unknown }; -export type PlatformParsedPodTemplate = { [key: string]: unknown }; +export type PlatformInputParsedPodTemplate = { [key: string]: unknown }; /** * Single Toleration Entry @@ -1044,8 +1039,8 @@ export type PlatformParsedPodTemplate = { [key: string]: unknown }; export interface Toleration { key: string; operator?: string; - value?: string; - effect?: string; + value?: string | null; + effect?: string | null; } /** @@ -1061,19 +1056,43 @@ export interface ResourceAssertion { /** * Single Platform Entry */ -export interface Platform { +export interface PlatformInput { + description?: string; + host_network_allowed?: boolean; + privileged_allowed?: boolean; + allowed_mounts?: string[]; + default_mounts?: string[]; + tolerations?: Toleration[]; + labels?: PlatformInputLabels; + default_variables?: PlatformInputDefaultVariables; + resource_validations?: string[]; + parsed_resource_validations?: ResourceAssertion[]; + override_pod_template?: string[]; + parsed_pod_template?: PlatformInputParsedPodTemplate; +} + +export type PlatformOutputLabels = { [key: string]: string }; + +export type PlatformOutputDefaultVariables = { [key: string]: unknown }; + +export type PlatformOutputParsedPodTemplate = { [key: string]: unknown }; + +/** + * Single Platform Entry + */ +export interface PlatformOutput { description?: string; host_network_allowed?: boolean; privileged_allowed?: boolean; allowed_mounts?: string[]; default_mounts?: string[]; tolerations?: Toleration[]; - labels?: PlatformLabels; - default_variables?: PlatformDefaultVariables; + labels?: PlatformOutputLabels; + default_variables?: PlatformOutputDefaultVariables; resource_validations?: string[]; parsed_resource_validations?: ResourceAssertion[]; override_pod_template?: string[]; - parsed_pod_template?: PlatformParsedPodTemplate; + parsed_pod_template?: PlatformOutputParsedPodTemplate; } /** @@ -1128,7 +1147,14 @@ export interface RsyncConfig { /** * Stores any plugins configs */ -export interface PluginsConfig { +export interface PluginsConfigInput { + rsync?: RsyncConfig; +} + +/** + * Stores any plugins configs + */ +export interface PluginsConfigOutput { rsync?: RsyncConfig; } @@ -1142,43 +1168,82 @@ export const PolicyEffect = { Deny: "Deny", } as const; -export type PoolDefaultExitActions = { [key: string]: string }; +export type PoolInputDefaultExitActions = { [key: string]: string }; -export type PoolCommonDefaultVariables = { [key: string]: unknown }; +export type PoolInputCommonDefaultVariables = { [key: string]: unknown }; -export type PoolParsedPodTemplate = { [key: string]: unknown }; +export type PoolInputParsedPodTemplate = { [key: string]: unknown }; -export type PoolParsedGroupTemplatesItem = { [key: string]: unknown }; +export type PoolInputParsedGroupTemplatesItem = { [key: string]: unknown }; -export type PoolPlatforms = { [key: string]: Platform }; +export type PoolInputPlatforms = { [key: string]: PlatformInput }; /** * Single Pool Entry */ -export interface Pool { +export interface PoolInput { name?: string; description?: string; - status?: PoolStatus; - download_type?: DownloadType; + status?: PoolStatus | null; + download_type?: DownloadType | null; + enable_maintenance?: boolean; + backend: string; + default_platform?: string | null; + default_exec_timeout?: string; + default_queue_timeout?: string; + max_exec_timeout?: string; + max_queue_timeout?: string; + default_exit_actions?: PoolInputDefaultExitActions; + resources?: PoolResources; + topology_keys?: TopologyKey[]; + common_default_variables?: PoolInputCommonDefaultVariables; + common_resource_validations?: string[]; + parsed_resource_validations?: ResourceAssertion[]; + common_pod_template?: string[]; + parsed_pod_template?: PoolInputParsedPodTemplate; + common_group_templates?: string[]; + parsed_group_templates?: PoolInputParsedGroupTemplatesItem[]; + platforms?: PoolInputPlatforms; + last_heartbeat?: string | null; +} + +export type PoolOutputDefaultExitActions = { [key: string]: string }; + +export type PoolOutputCommonDefaultVariables = { [key: string]: unknown }; + +export type PoolOutputParsedPodTemplate = { [key: string]: unknown }; + +export type PoolOutputParsedGroupTemplatesItem = { [key: string]: unknown }; + +export type PoolOutputPlatforms = { [key: string]: PlatformOutput }; + +/** + * Single Pool Entry + */ +export interface PoolOutput { + name?: string; + description?: string; + status?: PoolStatus | null; + download_type?: DownloadType | null; enable_maintenance?: boolean; backend: string; - default_platform?: string; + default_platform?: string | null; default_exec_timeout?: string; default_queue_timeout?: string; max_exec_timeout?: string; max_queue_timeout?: string; - default_exit_actions?: PoolDefaultExitActions; + default_exit_actions?: PoolOutputDefaultExitActions; resources?: PoolResources; topology_keys?: TopologyKey[]; - common_default_variables?: PoolCommonDefaultVariables; + common_default_variables?: PoolOutputCommonDefaultVariables; common_resource_validations?: string[]; parsed_resource_validations?: ResourceAssertion[]; common_pod_template?: string[]; - parsed_pod_template?: PoolParsedPodTemplate; + parsed_pod_template?: PoolOutputParsedPodTemplate; common_group_templates?: string[]; - parsed_group_templates?: PoolParsedGroupTemplatesItem[]; - platforms?: PoolPlatforms; - last_heartbeat?: string; + parsed_group_templates?: PoolOutputParsedGroupTemplatesItem[]; + platforms?: PoolOutputPlatforms; + last_heartbeat?: string | null; } /** @@ -1203,11 +1268,11 @@ export type PoolResourceUsagePlatforms = { [key: string]: PlatformMinimal }; export interface PoolResourceUsage { name?: string; description?: string; - status?: PoolStatus; - download_type?: DownloadType; + status?: PoolStatus | null; + download_type?: DownloadType | null; enable_maintenance?: boolean; backend: string; - default_platform?: string; + default_platform?: string | null; default_exec_timeout?: string; default_queue_timeout?: string; max_exec_timeout?: string; @@ -1261,8 +1326,8 @@ export interface PoolResponse { * Request body for creating a new backend with history tracking metadata. */ export interface PostBackendRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs: BackendConfig; } @@ -1270,11 +1335,11 @@ export interface PostBackendRequest { * Provides all User Profile Information */ export interface UserProfile { - username?: string; - email_notification?: boolean; - slack_notification?: boolean; - bucket?: string; - pool?: string; + username?: string | null; + email_notification?: boolean | null; + slack_notification?: boolean | null; + bucket?: string | null; + pool?: string | null; } /** @@ -1282,7 +1347,7 @@ export interface UserProfile { */ export interface TokenIdentity { name: string; - expires_at?: string; + expires_at?: string | null; } /** @@ -1293,15 +1358,15 @@ export interface ProfileResponse { profile: UserProfile; roles: string[]; pools: string[]; - token?: TokenIdentity; + token?: TokenIdentity | null; } /** * Request body for updating a test with history tracking metadata. */ export interface PutBackendTestRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs: BackendTests; } @@ -1311,8 +1376,8 @@ export type PutBackendTestsRequestConfigs = { [key: string]: BackendTests }; * Request body for updating a test with history tracking metadata. */ export interface PutBackendTestsRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs: PutBackendTestsRequestConfigs; } @@ -1320,9 +1385,9 @@ export interface PutBackendTestsRequest { * Request body for updating dataset configurations with history tracking metadata. */ export interface PutDatasetRequest { - description?: string; - tags?: string[]; - configs: DatasetConfig; + description?: string | null; + tags?: string[] | null; + configs: DatasetConfigInput; } export type PutGroupTemplateRequestConfigs = { [key: string]: unknown }; @@ -1331,8 +1396,8 @@ export type PutGroupTemplateRequestConfigs = { [key: string]: unknown }; * Request body for updating a group template with history tracking metadata. */ export interface PutGroupTemplateRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs: PutGroupTemplateRequestConfigs; } @@ -1342,8 +1407,8 @@ export type PutGroupTemplatesRequestConfigs = { [key: string]: { [key: string]: * Request body for updating group templates with history tracking metadata. */ export interface PutGroupTemplatesRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs: PutGroupTemplatesRequestConfigs; } @@ -1353,8 +1418,8 @@ export type PutPodTemplateRequestConfigs = { [key: string]: unknown }; * Request body for updating a pod template with history tracking metadata. */ export interface PutPodTemplateRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs: PutPodTemplateRequestConfigs; } @@ -1364,8 +1429,8 @@ export type PutPodTemplatesRequestConfigs = { [key: string]: { [key: string]: un * Request body for updating pod templates with history tracking metadata. */ export interface PutPodTemplatesRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs: PutPodTemplatesRequestConfigs; } @@ -1373,28 +1438,28 @@ export interface PutPodTemplatesRequest { * Request body for updating a platform in a pool with history tracking metadata. */ export interface PutPoolPlatformRequest { - description?: string; - tags?: string[]; - configs: Platform; + description?: string | null; + tags?: string[] | null; + configs: PlatformInput; } /** * Request body for updating a pool with history tracking metadata. */ export interface PutPoolRequest { - description?: string; - tags?: string[]; - configs: Pool; + description?: string | null; + tags?: string[] | null; + configs: PoolInput; } -export type PutPoolsRequestConfigs = { [key: string]: Pool }; +export type PutPoolsRequestConfigs = { [key: string]: PoolInput }; /** * Request body for updating pools with history tracking metadata. */ export interface PutPoolsRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs: PutPoolsRequestConfigs; } @@ -1404,8 +1469,8 @@ export type PutResourceValidationRequestConfigsItem = { [key: string]: unknown } * Request body for updating a resource validation with history tracking metadata. */ export interface PutResourceValidationRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs: PutResourceValidationRequestConfigsItem[]; } @@ -1419,8 +1484,8 @@ export type PutResourceValidationsRequestConfigsDict = { * Request body for updating resource validations with history tracking metadata. */ export interface PutResourceValidationsRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; configs_dict: PutResourceValidationsRequestConfigsDict; } @@ -1461,37 +1526,37 @@ export const SyncMode = { Note: Authorization checking is now handled by the authz_sidecar (Go service). This Python class is only used for role CRUD operations. */ -export interface Role { +export interface RoleInput { name: string; description: string; policies: RolePolicy[]; immutable?: boolean; sync_mode?: SyncMode; - external_roles?: string[]; + external_roles?: string[] | null; } /** * Request body for updating a role with history tracking metadata. */ export interface PutRoleRequest { - description?: string; - tags?: string[]; - configs: Role; + description?: string | null; + tags?: string[] | null; + configs: RoleInput; } /** * Request body for updating a test with history tracking metadata. */ export interface PutRolesRequest { - description?: string; - tags?: string[]; - configs: Role[]; + description?: string | null; + tags?: string[] | null; + configs: RoleInput[]; } /** * Stores any configs OSMO Admins control */ -export interface ServiceConfig { +export interface ServiceConfigInput { service_base_url?: string; service_auth?: AuthenticationConfig; cli_config?: CliConfig; @@ -1503,9 +1568,9 @@ export interface ServiceConfig { * Request body for updating service configurations with history tracking metadata. */ export interface PutServiceRequest { - description?: string; - tags?: string[]; - configs: ServiceConfig; + description?: string | null; + tags?: string[] | null; + configs: ServiceConfigInput; } /** @@ -1521,10 +1586,8 @@ export interface WorkflowInfo { If a limit is set, it must be greater than 0. */ export interface UserWorkflowLimitConfig { - /** @exclusiveMinimum 0 */ - max_num_workflows?: number; - /** @exclusiveMinimum 0 */ - max_num_tasks?: number; + max_num_workflows?: number | null; + max_num_tasks?: number | null; jinja_sandbox_workers?: number; jinja_sandbox_max_time?: number; jinja_sandbox_memory_limit?: number; @@ -1533,7 +1596,7 @@ export interface UserWorkflowLimitConfig { /** * Stores any workflow configs External Admins control */ -export interface WorkflowConfig { +export interface WorkflowConfigInput { workflow_data?: DataConfig; workflow_log?: LogConfig; workflow_app?: LogConfig; @@ -1542,7 +1605,7 @@ export interface WorkflowConfig { workflow_alerts?: NotificationConfig; credential_config?: CredentialConfig; user_workflow_limits?: UserWorkflowLimitConfig; - plugins_config?: PluginsConfig; + plugins_config?: PluginsConfigInput; max_num_tasks?: number; max_num_ports_per_task?: number; max_retry_per_task?: number; @@ -1564,17 +1627,17 @@ export interface WorkflowConfig { * Request body for updating workflow configurations with history tracking metadata. */ export interface PutWorkflowRequest { - description?: string; - tags?: string[]; - configs: WorkflowConfig; + description?: string | null; + tags?: string[] | null; + configs: WorkflowConfigInput; } /** * Request body for renaming a platform in a pool with history tracking metadata. */ export interface RenamePoolPlatformRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; new_name: string; } @@ -1582,8 +1645,8 @@ export interface RenamePoolPlatformRequest { * Request body for renaming a pool with history tracking metadata. */ export interface RenamePoolRequest { - description?: string; - tags?: string[]; + description?: string | null; + tags?: string[] | null; new_name: string; } @@ -1597,15 +1660,15 @@ export type ResourcesEntryNonWorkflowUsageFields = { [key: string]: unknown }; export type ResourcesEntryAllocatableFields = { [key: string]: unknown }; -export type ResourcesEntryPlatformAllocatableFields = { [key: string]: unknown }; +export type ResourcesEntryPlatformAllocatableFields = { [key: string]: unknown } | null; -export type ResourcesEntryPlatformAvailableFields = { [key: string]: unknown }; +export type ResourcesEntryPlatformAvailableFields = { [key: string]: unknown } | null; -export type ResourcesEntryPlatformWorkflowAllocatableFields = { [key: string]: unknown }; +export type ResourcesEntryPlatformWorkflowAllocatableFields = { [key: string]: unknown } | null; -export type ResourcesEntryConfigFields = { [key: string]: unknown }; +export type ResourcesEntryConfigFields = { [key: string]: unknown } | null; -export type ResourcesEntryLabelFields = { [key: string]: unknown }; +export type ResourcesEntryLabelFields = { [key: string]: unknown } | null; export type ResourcesEntryPoolPlatformLabels = { [key: string]: string[] }; @@ -1617,7 +1680,7 @@ export interface ResourcesEntry { exposed_fields: ResourcesEntryExposedFields; taints: ResourcesEntryTaintsItem[]; usage_fields: ResourcesEntryUsageFields; - conditions?: string[]; + conditions?: string[] | null; non_workflow_usage_fields: ResourcesEntryNonWorkflowUsageFields; allocatable_fields: ResourcesEntryAllocatableFields; platform_allocatable_fields?: ResourcesEntryPlatformAllocatableFields; @@ -1637,6 +1700,21 @@ export interface ResourcesResponse { resources: ResourcesEntry[]; } +/** + * Single Role Entry. + +Note: Authorization checking is now handled by the authz_sidecar (Go service). +This Python class is only used for role CRUD operations. + */ +export interface RoleOutput { + name: string; + description: string; + policies: RolePolicy[]; + immutable?: boolean; + sync_mode?: SyncMode; + external_roles?: string[] | null; +} + export type RoleUsersResponseUsersItem = { [key: string]: unknown }; /** @@ -1647,32 +1725,13 @@ export interface RoleUsersResponse { users: RoleUsersResponseUsersItem[]; } -/** - * Type of configs supported by config history - */ -export type SrcUtilsConnectorsPostgresConfigHistoryType = - (typeof SrcUtilsConnectorsPostgresConfigHistoryType)[keyof typeof SrcUtilsConnectorsPostgresConfigHistoryType]; - -export const SrcUtilsConnectorsPostgresConfigHistoryType = { - SERVICE: "SERVICE", - WORKFLOW: "WORKFLOW", - DATASET: "DATASET", - BACKEND: "BACKEND", - POOL: "POOL", - POD_TEMPLATE: "POD_TEMPLATE", - GROUP_TEMPLATE: "GROUP_TEMPLATE", - RESOURCE_VALIDATION: "RESOURCE_VALIDATION", - BACKEND_TEST: "BACKEND_TEST", - ROLE: "ROLE", -} as const; - /** * Request body for config rollback endpoint. */ export interface RollbackConfigRequest { - description?: string; - tags?: string[]; - config_type: SrcUtilsConnectorsPostgresConfigHistoryType; + description?: string | null; + tags?: string[] | null; + config_type: SrcLibUtilsConfigHistoryConfigHistoryType; /** * Revision to roll back to * @exclusiveMinimum 0 @@ -1689,15 +1748,26 @@ export interface RouterResponse { cookie: string; } +/** + * Stores any configs OSMO Admins control + */ +export interface ServiceConfigOutput { + service_base_url?: string; + service_auth?: AuthenticationConfig; + cli_config?: CliConfig; + max_pod_restart_limit?: string; + agent_queue_size?: number; +} + /** * Object storing workflow name, logs, and spec after submission. */ export interface SubmitResponse { name: string; - overview?: string; - logs?: string; - spec?: string; - dashboard_url?: string; + overview?: string | null; + logs?: string | null; + spec?: string | null; + dashboard_url?: string | null; } /** @@ -1706,9 +1776,9 @@ export interface SubmitResponse { export interface TaskEntry { workflow_id: string; task_name: string; - node?: string; - start_time?: string; - end_time?: string; + node?: string | null; + start_time?: string | null; + end_time?: string | null; status: TaskGroupStatus; storage: number; cpu: number; @@ -1723,7 +1793,7 @@ export interface TemplateSpec { file: string; set_variables?: string[]; set_string_variables?: string[]; - uploaded_templated_spec?: string; + uploaded_templated_spec?: string | null; } /** @@ -1738,9 +1808,9 @@ export interface TokenRequest { */ export interface UpdateConfigTagsRequest { /** Tags to add to the config */ - set_tags?: string[]; + set_tags?: string[] | null; /** Tags to remove from the config */ - delete_tags?: string[]; + delete_tags?: string[] | null; } /** @@ -1748,8 +1818,8 @@ export interface UpdateConfigTagsRequest { */ export interface User { id: string; - created_at?: string; - created_by?: string; + created_at?: string | null; + created_by?: string | null; } /** @@ -1794,12 +1864,12 @@ export interface UserRolesResponse { */ export interface UserWithRoles { id: string; - created_at?: string; - created_by?: string; + created_at?: string | null; + created_by?: string | null; roles?: UserRole[]; } -export type VerbosePoolConfigPools = { [key: string]: Pool }; +export type VerbosePoolConfigPools = { [key: string]: PoolOutput }; /** * Stores verbose pool configs. @@ -1818,6 +1888,36 @@ export interface Version { hash?: string; } +/** + * Stores any workflow configs External Admins control + */ +export interface WorkflowConfigOutput { + workflow_data?: DataConfig; + workflow_log?: LogConfig; + workflow_app?: LogConfig; + workflow_info?: WorkflowInfo; + backend_images?: OsmoImageConfig; + workflow_alerts?: NotificationConfig; + credential_config?: CredentialConfig; + user_workflow_limits?: UserWorkflowLimitConfig; + plugins_config?: PluginsConfigOutput; + max_num_tasks?: number; + max_num_ports_per_task?: number; + max_retry_per_task?: number; + max_retry_per_job?: number; + default_schedule_timeout?: number; + default_exec_timeout?: string; + default_queue_timeout?: string; + max_exec_timeout?: string; + max_queue_timeout?: string; + force_cleanup_delay?: string; + max_log_lines?: number; + max_task_log_lines?: number; + max_error_log_lines?: number; + max_event_log_lines?: number; + task_heartbeat_frequency?: string; +} + /** * Represents the state of plugins in a workflow upon submission. */ @@ -1867,32 +1967,32 @@ export interface WorkflowQueryResponse { name: string; uuid: string; submitted_by: string; - cancelled_by?: string; + cancelled_by?: string | null; spec: string; template_spec: string; logs: string; events: string; overview: string; - parent_name?: string; - parent_job_id?: number; - dashboard_url?: string; - grafana_url?: string; + parent_name?: string | null; + parent_job_id?: number | null; + dashboard_url?: string | null; + grafana_url?: string | null; tags?: string[]; submit_time: string; - start_time?: string; - end_time?: string; - exec_timeout?: number; - queue_timeout?: number; - duration?: number; + start_time?: string | null; + end_time?: string | null; + exec_timeout?: number | null; + queue_timeout?: number | null; + duration?: number | null; queued_time: number; status: WorkflowStatus; outputs?: string; groups: GroupQueryResponse[]; - pool?: string; - backend?: string; - app_owner?: string; - app_name?: string; - app_version?: number; + pool?: string | null; + backend?: string | null; + app_owner?: string | null; + app_name?: string | null; + app_version?: number | null; plugins: WorkflowPlugins; priority: string; } @@ -1903,7 +2003,7 @@ export interface SrcServiceCoreAppObjectsListEntry { description: string; created_date: string; owner: string; - latest_version: string; + latest_version: number; } export interface SrcServiceCoreAppObjectsListResponse { @@ -1919,20 +2019,20 @@ export interface SrcServiceCoreWorkflowObjectsListEntry { name: string; workflow_uuid: string; submit_time: string; - start_time?: string; - end_time?: string; + start_time?: string | null; + end_time?: string | null; queued_time: number; - duration?: number; + duration?: number | null; status: WorkflowStatus; overview: string; logs: string; - error_logs?: string; - grafana_url?: string; - dashboard_url?: string; - pool?: string; - app_owner?: string; - app_name?: string; - app_version?: number; + error_logs?: string | null; + grafana_url?: string | null; + dashboard_url?: string | null; + pool?: string | null; + app_owner?: string | null; + app_name?: string | null; + app_version?: number | null; priority: string; } @@ -1957,7 +2057,7 @@ export type PatchDatasetApiConfigsDatasetNamePatch200 = { [key: string]: unknown export type ListPoolsApiConfigsPoolGetParams = { verbose?: boolean; - backend?: string; + backend?: string | null; }; export type ReadPoolApiConfigsPoolNameGetParams = { @@ -1969,7 +2069,7 @@ export type ListPlatformsInPoolApiConfigsPoolNamePlatformGetParams = { }; export type ListPlatformsInPoolApiConfigsPoolNamePlatformGet200 = { - [key: string]: PlatformMinimal | PlatformEditable | Platform; + [key: string]: PlatformMinimal | PlatformEditable | PlatformOutput; }; export type ReadPlatformInPoolApiConfigsPoolNamePlatformPlatformNameGetParams = { @@ -1991,15 +2091,12 @@ export type ListBackendTestsApiConfigsBackendTestGet200 = { [key: string]: Backe export type GetConfigsHistoryApiConfigsHistoryGetParams = { /** * Number of records to skip - * @minimum 0 */ - offset?: number; + offset?: number | null; /** * Maximum number of records to return - * @maximum 1000 - * @exclusiveMinimum 0 */ - limit?: number; + limit?: number | null; /** * Sort order by creation time */ @@ -2007,32 +2104,31 @@ export type GetConfigsHistoryApiConfigsHistoryGetParams = { /** * Filter by config types */ - config_types?: SrcLibUtilsConfigHistoryConfigHistoryType[]; + config_types?: SrcLibUtilsConfigHistoryConfigHistoryType[] | null; /** * Filter by config name */ - name?: string; + name?: string | null; /** * Filter by revision - * @exclusiveMinimum 0 */ - revision?: number; + revision?: number | null; /** * Filter by tags */ - tags?: string[]; + tags?: string[] | null; /** * Filter by creation time before */ - created_before?: string; + created_before?: string | null; /** * Filter by creation time after */ - created_after?: string; + created_after?: string | null; /** * Get config state at specific timestamp */ - at_timestamp?: string; + at_timestamp?: string | null; /** * Whether to omit data from the response */ @@ -2040,7 +2136,7 @@ export type GetConfigsHistoryApiConfigsHistoryGetParams = { }; export type GetConfigDiffApiConfigsDiffGetParams = { - config_type: SrcUtilsConnectorsPostgresConfigHistoryType; + config_type: SrcLibUtilsConfigHistoryConfigHistoryType; /** * First revision to compare * @exclusiveMinimum 0 @@ -2075,25 +2171,25 @@ export type GetJwtTokenFromAccessTokenApiAuthJwtAccessTokenGetParams = { export type CreateAccessTokenApiAuthAccessTokenTokenNamePostParams = { expires_at: string; description?: string; - roles?: string[]; + roles?: string[] | null; }; export type AdminCreateAccessTokenApiAuthUserUserIdAccessTokenTokenNamePostParams = { expires_at: string; description?: string; - roles?: string[]; + roles?: string[] | null; }; export type ListUsersApiAuthUserGetParams = { start_index?: number; count?: number; - id_prefix?: string; - roles?: string[]; + id_prefix?: string | null; + roles?: string[] | null; }; export type ListAppsApiAppGetParams = { - name?: string; - users?: string[]; + name?: string | null; + users?: string[] | null; all_users?: boolean; offset?: number; limit?: number; @@ -2101,7 +2197,7 @@ export type ListAppsApiAppGetParams = { }; export type GetAppApiAppUserNameGetParams = { - version?: number; + version?: number | null; limit?: number; order?: ListOrder; }; @@ -2111,54 +2207,54 @@ export type CreateAppApiAppUserNamePostParams = { }; export type DeleteAppApiAppUserNameDeleteParams = { - version?: number; + version?: number | null; all_versions?: boolean; }; export type DeleteAppApiAppUserNameDelete200 = { [key: string]: number[] }; export type GetAppContentApiAppUserNameSpecGetParams = { - version?: number; + version?: number | null; }; export type CancelWorkflowApiWorkflowNameCancelPostParams = { - message?: string; + message?: string | null; force?: boolean; }; export type ListWorkflowApiWorkflowGetParams = { - users?: string[]; - name?: string; - statuses?: WorkflowStatus[]; + users?: string[] | null; + name?: string | null; + statuses?: WorkflowStatus[] | null; offset?: number; limit?: number; order?: ListOrder; all_users?: boolean; - pools?: string[]; + pools?: string[] | null; all_pools?: boolean; - submitted_before?: string; - submitted_after?: string; - tags?: string[]; - app?: string; - priority?: WorkflowPriority[]; + submitted_before?: string | null; + submitted_after?: string | null; + tags?: string[] | null; + app?: string | null; + priority?: WorkflowPriority[] | null; }; export type ListTaskApiTaskGetParams = { - workflow_id?: string; - statuses?: TaskGroupStatus[]; - users?: string[]; + workflow_id?: string | null; + statuses?: TaskGroupStatus[] | null; + users?: string[] | null; all_users?: boolean; - pools?: string[]; + pools?: string[] | null; all_pools?: boolean; - nodes?: string[]; - started_after?: string; - started_before?: string; + nodes?: string[] | null; + started_after?: string | null; + started_before?: string | null; offset?: number; limit?: number; order?: ListOrder; summary?: boolean; aggregate_by_workflow?: boolean; - priority?: WorkflowPriority[]; + priority?: WorkflowPriority[] | null; }; export type GetWorkflowApiWorkflowNameGetParams = { @@ -2167,22 +2263,22 @@ export type GetWorkflowApiWorkflowNameGetParams = { }; export type GetWorkflowLogsApiWorkflowNameLogsGetParams = { - last_n_lines?: number; - task_name?: string; - retry_id?: number; - query?: string; + last_n_lines?: number | null; + task_name?: string | null; + retry_id?: number | null; + query?: string | null; }; export type GetWorkflowPodConditionsApiWorkflowNameEventsGetParams = { - task_name?: string; - retry_id?: number; + task_name?: string | null; + retry_id?: number | null; }; export type GetWorkflowErrorLogsApiWorkflowNameErrorLogsGetParams = { - last_n_lines?: number; - task_name?: string; - retry_id?: number; - query?: string; + last_n_lines?: number | null; + task_name?: string | null; + retry_id?: number | null; + query?: string | null; }; export type GetWorkflowSpecApiWorkflowNameSpecGetParams = { @@ -2190,8 +2286,8 @@ export type GetWorkflowSpecApiWorkflowNameSpecGetParams = { }; export type TagWorkflowApiWorkflowNameTagPostParams = { - add?: string[]; - remove?: string[]; + add?: string[] | null; + remove?: string[] | null; }; export type ExecIntoGroupApiWorkflowNameExecGroupGroupNamePostParams = { @@ -2205,7 +2301,7 @@ export type ExecIntoTaskApiWorkflowNameExecTaskTaskNamePostParams = { }; export type PortForwardTaskApiWorkflowNamePortforwardTaskNamePostParams = { - task_ports?: number[]; + task_ports?: number[] | null; use_udp?: boolean; }; @@ -2214,26 +2310,26 @@ export type PortForwardWebserverApiWorkflowNameWebserverTaskNamePostParams = { }; export type GetResourcesApiResourcesGetParams = { - pools?: string[]; - platforms?: string[]; + pools?: string[] | null; + platforms?: string[] | null; all_pools?: boolean; concise?: boolean; }; export type GetPoolsApiPoolGetParams = { all_pools?: boolean; - pools?: string[]; + pools?: string[] | null; }; export type GetPoolQuotasApiPoolQuotaGetParams = { all_pools?: boolean; - pools?: string[]; + pools?: string[] | null; }; export type SubmitWorkflowApiPoolPoolNameWorkflowPostParams = { - workflow_id?: string; - app_uuid?: string; - app_version?: number; + workflow_id?: string | null; + app_uuid?: string | null; + app_version?: number | null; dry_run?: boolean; validation_only?: boolean; priority?: WorkflowPriority; @@ -2245,14 +2341,14 @@ export type GetBucketInfoApiBucketGetParams = { }; export type DeleteDatasetApiBucketBucketDatasetNameDeleteParams = { - tag?: string; + tag?: string | null; all_flag?: boolean; finish?: boolean; }; export type ChangeNameTagLabelMetadataApiBucketBucketDatasetNameAttributePostParams = { - tag?: string; - new_name?: string; + tag?: string | null; + new_name?: string | null; set_tag?: string[]; delete_tag?: string[]; delete_label?: string[]; @@ -2260,19 +2356,19 @@ export type ChangeNameTagLabelMetadataApiBucketBucketDatasetNameAttributePostPar }; export type GetInfoApiBucketBucketDatasetNameInfoGetParams = { - tag?: string; + tag?: string | null; all_flag?: boolean; count?: number; order?: ListOrder; }; export type ListDatasetFromBucketApiBucketListDatasetGetParams = { - name?: string; - user?: string[]; + name?: string | null; + user?: string[] | null; buckets?: string[]; - dataset_type?: DatasetType; - latest_before?: string; - latest_after?: string; + dataset_type?: DatasetType | null; + latest_before?: string | null; + latest_after?: string | null; all_users?: boolean; order?: ListOrder; count?: number; @@ -2295,7 +2391,7 @@ export type GetAvailableWorkflowTagsApiTagGet200 = { [key: string]: string[] }; * @summary Read Service Configs */ export type readServiceConfigsApiConfigsServiceGetResponse200 = { - data: ServiceConfig; + data: ServiceConfigOutput; status: 200; }; @@ -2422,7 +2518,7 @@ export const patchServiceConfigsApiConfigsServicePatch = async ( * @summary Read Workflow Configs */ export type readWorkflowConfigsApiConfigsWorkflowGetResponse200 = { - data: WorkflowConfig; + data: WorkflowConfigOutput; status: 200; }; @@ -2551,7 +2647,7 @@ export const patchWorkflowConfigsApiConfigsWorkflowPatch = async ( * @summary Read Dataset Configs */ export type readDatasetConfigsApiConfigsDatasetGetResponse200 = { - data: DatasetConfig; + data: DatasetConfigOutput; status: 200; }; @@ -2674,100 +2770,100 @@ export const patchDatasetConfigsApiConfigsDatasetPatch = async ( }; /** - * Delete dataset configuration for a specific bucket - * @summary Delete Dataset + * Patch dataset configuration for a specific bucket + * @summary Patch Dataset */ -export type deleteDatasetApiConfigsDatasetNameDeleteResponse200 = { - data: unknown; +export type patchDatasetApiConfigsDatasetNamePatchResponse200 = { + data: PatchDatasetApiConfigsDatasetNamePatch200; status: 200; }; -export type deleteDatasetApiConfigsDatasetNameDeleteResponse422 = { +export type patchDatasetApiConfigsDatasetNamePatchResponse422 = { data: HTTPValidationError; status: 422; }; -export type deleteDatasetApiConfigsDatasetNameDeleteResponseSuccess = - deleteDatasetApiConfigsDatasetNameDeleteResponse200 & { - headers: Headers; - }; -export type deleteDatasetApiConfigsDatasetNameDeleteResponseError = - deleteDatasetApiConfigsDatasetNameDeleteResponse422 & { +export type patchDatasetApiConfigsDatasetNamePatchResponseSuccess = + patchDatasetApiConfigsDatasetNamePatchResponse200 & { headers: Headers; }; +export type patchDatasetApiConfigsDatasetNamePatchResponseError = patchDatasetApiConfigsDatasetNamePatchResponse422 & { + headers: Headers; +}; -export type deleteDatasetApiConfigsDatasetNameDeleteResponse = - | deleteDatasetApiConfigsDatasetNameDeleteResponseSuccess - | deleteDatasetApiConfigsDatasetNameDeleteResponseError; +export type patchDatasetApiConfigsDatasetNamePatchResponse = + | patchDatasetApiConfigsDatasetNamePatchResponseSuccess + | patchDatasetApiConfigsDatasetNamePatchResponseError; -export const getDeleteDatasetApiConfigsDatasetNameDeleteUrl = (name: string) => { +export const getPatchDatasetApiConfigsDatasetNamePatchUrl = (name: string) => { return `/api/configs/dataset/${name}`; }; -export const deleteDatasetApiConfigsDatasetNameDelete = async ( +export const patchDatasetApiConfigsDatasetNamePatch = async ( name: string, - configsRequest: ConfigsRequest, + patchDatasetRequest: PatchDatasetRequest, options?: RequestInit, -): Promise => { - const res = await fetch(getDeleteDatasetApiConfigsDatasetNameDeleteUrl(name), { +): Promise => { + const res = await fetch(getPatchDatasetApiConfigsDatasetNamePatchUrl(name), { ...options, - method: "DELETE", + method: "PATCH", headers: { "Content-Type": "application/json", ...options?.headers }, - body: JSON.stringify(configsRequest), + body: JSON.stringify(patchDatasetRequest), }); const body = [204, 205, 304].includes(res.status) ? null : await res.text(); - const data: deleteDatasetApiConfigsDatasetNameDeleteResponse["data"] = body ? JSON.parse(body) : {}; - return { data, status: res.status, headers: res.headers } as deleteDatasetApiConfigsDatasetNameDeleteResponse; + const data: patchDatasetApiConfigsDatasetNamePatchResponse["data"] = body ? JSON.parse(body) : {}; + return { data, status: res.status, headers: res.headers } as patchDatasetApiConfigsDatasetNamePatchResponse; }; /** - * Patch dataset configuration for a specific bucket - * @summary Patch Dataset + * Delete dataset configuration for a specific bucket + * @summary Delete Dataset */ -export type patchDatasetApiConfigsDatasetNamePatchResponse200 = { - data: PatchDatasetApiConfigsDatasetNamePatch200; +export type deleteDatasetApiConfigsDatasetNameDeleteResponse200 = { + data: unknown; status: 200; }; -export type patchDatasetApiConfigsDatasetNamePatchResponse422 = { +export type deleteDatasetApiConfigsDatasetNameDeleteResponse422 = { data: HTTPValidationError; status: 422; }; -export type patchDatasetApiConfigsDatasetNamePatchResponseSuccess = - patchDatasetApiConfigsDatasetNamePatchResponse200 & { +export type deleteDatasetApiConfigsDatasetNameDeleteResponseSuccess = + deleteDatasetApiConfigsDatasetNameDeleteResponse200 & { + headers: Headers; + }; +export type deleteDatasetApiConfigsDatasetNameDeleteResponseError = + deleteDatasetApiConfigsDatasetNameDeleteResponse422 & { headers: Headers; }; -export type patchDatasetApiConfigsDatasetNamePatchResponseError = patchDatasetApiConfigsDatasetNamePatchResponse422 & { - headers: Headers; -}; -export type patchDatasetApiConfigsDatasetNamePatchResponse = - | patchDatasetApiConfigsDatasetNamePatchResponseSuccess - | patchDatasetApiConfigsDatasetNamePatchResponseError; +export type deleteDatasetApiConfigsDatasetNameDeleteResponse = + | deleteDatasetApiConfigsDatasetNameDeleteResponseSuccess + | deleteDatasetApiConfigsDatasetNameDeleteResponseError; -export const getPatchDatasetApiConfigsDatasetNamePatchUrl = (name: string) => { +export const getDeleteDatasetApiConfigsDatasetNameDeleteUrl = (name: string) => { return `/api/configs/dataset/${name}`; }; -export const patchDatasetApiConfigsDatasetNamePatch = async ( +export const deleteDatasetApiConfigsDatasetNameDelete = async ( name: string, - patchDatasetRequest: PatchDatasetRequest, + configsRequest: ConfigsRequest, options?: RequestInit, -): Promise => { - const res = await fetch(getPatchDatasetApiConfigsDatasetNamePatchUrl(name), { +): Promise => { + const res = await fetch(getDeleteDatasetApiConfigsDatasetNameDeleteUrl(name), { ...options, - method: "PATCH", + method: "DELETE", headers: { "Content-Type": "application/json", ...options?.headers }, - body: JSON.stringify(patchDatasetRequest), + body: JSON.stringify(configsRequest), }); const body = [204, 205, 304].includes(res.status) ? null : await res.text(); - const data: patchDatasetApiConfigsDatasetNamePatchResponse["data"] = body ? JSON.parse(body) : {}; - return { data, status: res.status, headers: res.headers } as patchDatasetApiConfigsDatasetNamePatchResponse; + const data: deleteDatasetApiConfigsDatasetNameDeleteResponse["data"] = body ? JSON.parse(body) : {}; + return { data, status: res.status, headers: res.headers } as deleteDatasetApiConfigsDatasetNameDeleteResponse; }; /** @@ -2803,95 +2899,95 @@ export const listBackendsApiConfigsBackendGet = async ( }; /** - * Get info for a specific backend. - * @summary Get Backend + * Override the config for a specific backend. + * @summary Update Backend */ -export type getBackendApiConfigsBackendNameGetResponse200 = { - data: Backend; +export type updateBackendApiConfigsBackendNamePostResponse200 = { + data: unknown; status: 200; }; -export type getBackendApiConfigsBackendNameGetResponse422 = { +export type updateBackendApiConfigsBackendNamePostResponse422 = { data: HTTPValidationError; status: 422; }; -export type getBackendApiConfigsBackendNameGetResponseSuccess = getBackendApiConfigsBackendNameGetResponse200 & { - headers: Headers; -}; -export type getBackendApiConfigsBackendNameGetResponseError = getBackendApiConfigsBackendNameGetResponse422 & { +export type updateBackendApiConfigsBackendNamePostResponseSuccess = + updateBackendApiConfigsBackendNamePostResponse200 & { + headers: Headers; + }; +export type updateBackendApiConfigsBackendNamePostResponseError = updateBackendApiConfigsBackendNamePostResponse422 & { headers: Headers; }; -export type getBackendApiConfigsBackendNameGetResponse = - | getBackendApiConfigsBackendNameGetResponseSuccess - | getBackendApiConfigsBackendNameGetResponseError; +export type updateBackendApiConfigsBackendNamePostResponse = + | updateBackendApiConfigsBackendNamePostResponseSuccess + | updateBackendApiConfigsBackendNamePostResponseError; -export const getGetBackendApiConfigsBackendNameGetUrl = (name: string) => { +export const getUpdateBackendApiConfigsBackendNamePostUrl = (name: string) => { return `/api/configs/backend/${name}`; }; -export const getBackendApiConfigsBackendNameGet = async ( +export const updateBackendApiConfigsBackendNamePost = async ( name: string, + postBackendRequest: PostBackendRequest, options?: RequestInit, -): Promise => { - const res = await fetch(getGetBackendApiConfigsBackendNameGetUrl(name), { +): Promise => { + const res = await fetch(getUpdateBackendApiConfigsBackendNamePostUrl(name), { ...options, - method: "GET", + method: "POST", + headers: { "Content-Type": "application/json", ...options?.headers }, + body: JSON.stringify(postBackendRequest), }); const body = [204, 205, 304].includes(res.status) ? null : await res.text(); - const data: getBackendApiConfigsBackendNameGetResponse["data"] = body ? JSON.parse(body) : {}; - return { data, status: res.status, headers: res.headers } as getBackendApiConfigsBackendNameGetResponse; + const data: updateBackendApiConfigsBackendNamePostResponse["data"] = body ? JSON.parse(body) : {}; + return { data, status: res.status, headers: res.headers } as updateBackendApiConfigsBackendNamePostResponse; }; /** - * Override the config for a specific backend. - * @summary Update Backend + * Get info for a specific backend. + * @summary Get Backend */ -export type updateBackendApiConfigsBackendNamePostResponse200 = { - data: unknown; +export type getBackendApiConfigsBackendNameGetResponse200 = { + data: Backend; status: 200; }; -export type updateBackendApiConfigsBackendNamePostResponse422 = { +export type getBackendApiConfigsBackendNameGetResponse422 = { data: HTTPValidationError; status: 422; }; -export type updateBackendApiConfigsBackendNamePostResponseSuccess = - updateBackendApiConfigsBackendNamePostResponse200 & { - headers: Headers; - }; -export type updateBackendApiConfigsBackendNamePostResponseError = updateBackendApiConfigsBackendNamePostResponse422 & { +export type getBackendApiConfigsBackendNameGetResponseSuccess = getBackendApiConfigsBackendNameGetResponse200 & { + headers: Headers; +}; +export type getBackendApiConfigsBackendNameGetResponseError = getBackendApiConfigsBackendNameGetResponse422 & { headers: Headers; }; -export type updateBackendApiConfigsBackendNamePostResponse = - | updateBackendApiConfigsBackendNamePostResponseSuccess - | updateBackendApiConfigsBackendNamePostResponseError; +export type getBackendApiConfigsBackendNameGetResponse = + | getBackendApiConfigsBackendNameGetResponseSuccess + | getBackendApiConfigsBackendNameGetResponseError; -export const getUpdateBackendApiConfigsBackendNamePostUrl = (name: string) => { +export const getGetBackendApiConfigsBackendNameGetUrl = (name: string) => { return `/api/configs/backend/${name}`; }; -export const updateBackendApiConfigsBackendNamePost = async ( +export const getBackendApiConfigsBackendNameGet = async ( name: string, - postBackendRequest: PostBackendRequest, options?: RequestInit, -): Promise => { - const res = await fetch(getUpdateBackendApiConfigsBackendNamePostUrl(name), { +): Promise => { + const res = await fetch(getGetBackendApiConfigsBackendNameGetUrl(name), { ...options, - method: "POST", - headers: { "Content-Type": "application/json", ...options?.headers }, - body: JSON.stringify(postBackendRequest), + method: "GET", }); const body = [204, 205, 304].includes(res.status) ? null : await res.text(); - const data: updateBackendApiConfigsBackendNamePostResponse["data"] = body ? JSON.parse(body) : {}; - return { data, status: res.status, headers: res.headers } as updateBackendApiConfigsBackendNamePostResponse; + const data: getBackendApiConfigsBackendNameGetResponse["data"] = body ? JSON.parse(body) : {}; + return { data, status: res.status, headers: res.headers } as getBackendApiConfigsBackendNameGetResponse; }; /** @@ -3051,7 +3147,7 @@ Should return Pool or PoolEditable objects * @summary Read Pool */ export type readPoolApiConfigsPoolNameGetResponse200 = { - data: Pool | PoolEditable; + data: PoolOutput | PoolEditable; status: 200; }; @@ -3149,97 +3245,97 @@ export const putPoolApiConfigsPoolNamePut = async ( }; /** - * Delete Pool configurations - * @summary Delete Pool + * Patch Pool configurations + * @summary Patch Pool */ -export type deletePoolApiConfigsPoolNameDeleteResponse200 = { +export type patchPoolApiConfigsPoolNamePatchResponse200 = { data: unknown; status: 200; }; -export type deletePoolApiConfigsPoolNameDeleteResponse422 = { +export type patchPoolApiConfigsPoolNamePatchResponse422 = { data: HTTPValidationError; status: 422; }; -export type deletePoolApiConfigsPoolNameDeleteResponseSuccess = deletePoolApiConfigsPoolNameDeleteResponse200 & { +export type patchPoolApiConfigsPoolNamePatchResponseSuccess = patchPoolApiConfigsPoolNamePatchResponse200 & { headers: Headers; }; -export type deletePoolApiConfigsPoolNameDeleteResponseError = deletePoolApiConfigsPoolNameDeleteResponse422 & { +export type patchPoolApiConfigsPoolNamePatchResponseError = patchPoolApiConfigsPoolNamePatchResponse422 & { headers: Headers; }; -export type deletePoolApiConfigsPoolNameDeleteResponse = - | deletePoolApiConfigsPoolNameDeleteResponseSuccess - | deletePoolApiConfigsPoolNameDeleteResponseError; +export type patchPoolApiConfigsPoolNamePatchResponse = + | patchPoolApiConfigsPoolNamePatchResponseSuccess + | patchPoolApiConfigsPoolNamePatchResponseError; -export const getDeletePoolApiConfigsPoolNameDeleteUrl = (name: string) => { +export const getPatchPoolApiConfigsPoolNamePatchUrl = (name: string) => { return `/api/configs/pool/${name}`; }; -export const deletePoolApiConfigsPoolNameDelete = async ( +export const patchPoolApiConfigsPoolNamePatch = async ( name: string, - configsRequest: ConfigsRequest, + patchPoolRequest: PatchPoolRequest, options?: RequestInit, -): Promise => { - const res = await fetch(getDeletePoolApiConfigsPoolNameDeleteUrl(name), { +): Promise => { + const res = await fetch(getPatchPoolApiConfigsPoolNamePatchUrl(name), { ...options, - method: "DELETE", + method: "PATCH", headers: { "Content-Type": "application/json", ...options?.headers }, - body: JSON.stringify(configsRequest), + body: JSON.stringify(patchPoolRequest), }); const body = [204, 205, 304].includes(res.status) ? null : await res.text(); - const data: deletePoolApiConfigsPoolNameDeleteResponse["data"] = body ? JSON.parse(body) : {}; - return { data, status: res.status, headers: res.headers } as deletePoolApiConfigsPoolNameDeleteResponse; + const data: patchPoolApiConfigsPoolNamePatchResponse["data"] = body ? JSON.parse(body) : {}; + return { data, status: res.status, headers: res.headers } as patchPoolApiConfigsPoolNamePatchResponse; }; /** - * Patch Pool configurations - * @summary Patch Pool + * Delete Pool configurations + * @summary Delete Pool */ -export type patchPoolApiConfigsPoolNamePatchResponse200 = { +export type deletePoolApiConfigsPoolNameDeleteResponse200 = { data: unknown; status: 200; }; -export type patchPoolApiConfigsPoolNamePatchResponse422 = { +export type deletePoolApiConfigsPoolNameDeleteResponse422 = { data: HTTPValidationError; status: 422; }; -export type patchPoolApiConfigsPoolNamePatchResponseSuccess = patchPoolApiConfigsPoolNamePatchResponse200 & { +export type deletePoolApiConfigsPoolNameDeleteResponseSuccess = deletePoolApiConfigsPoolNameDeleteResponse200 & { headers: Headers; }; -export type patchPoolApiConfigsPoolNamePatchResponseError = patchPoolApiConfigsPoolNamePatchResponse422 & { +export type deletePoolApiConfigsPoolNameDeleteResponseError = deletePoolApiConfigsPoolNameDeleteResponse422 & { headers: Headers; }; -export type patchPoolApiConfigsPoolNamePatchResponse = - | patchPoolApiConfigsPoolNamePatchResponseSuccess - | patchPoolApiConfigsPoolNamePatchResponseError; +export type deletePoolApiConfigsPoolNameDeleteResponse = + | deletePoolApiConfigsPoolNameDeleteResponseSuccess + | deletePoolApiConfigsPoolNameDeleteResponseError; -export const getPatchPoolApiConfigsPoolNamePatchUrl = (name: string) => { +export const getDeletePoolApiConfigsPoolNameDeleteUrl = (name: string) => { return `/api/configs/pool/${name}`; }; -export const patchPoolApiConfigsPoolNamePatch = async ( +export const deletePoolApiConfigsPoolNameDelete = async ( name: string, - patchPoolRequest: PatchPoolRequest, + configsRequest: ConfigsRequest, options?: RequestInit, -): Promise => { - const res = await fetch(getPatchPoolApiConfigsPoolNamePatchUrl(name), { +): Promise => { + const res = await fetch(getDeletePoolApiConfigsPoolNameDeleteUrl(name), { ...options, - method: "PATCH", + method: "DELETE", headers: { "Content-Type": "application/json", ...options?.headers }, - body: JSON.stringify(patchPoolRequest), + body: JSON.stringify(configsRequest), }); const body = [204, 205, 304].includes(res.status) ? null : await res.text(); - const data: patchPoolApiConfigsPoolNamePatchResponse["data"] = body ? JSON.parse(body) : {}; - return { data, status: res.status, headers: res.headers } as patchPoolApiConfigsPoolNamePatchResponse; + const data: deletePoolApiConfigsPoolNameDeleteResponse["data"] = body ? JSON.parse(body) : {}; + return { data, status: res.status, headers: res.headers } as deletePoolApiConfigsPoolNameDeleteResponse; }; /** @@ -3356,7 +3452,7 @@ export const listPlatformsInPoolApiConfigsPoolNamePlatformGet = async ( * @summary Read Platform In Pool */ export type readPlatformInPoolApiConfigsPoolNamePlatformPlatformNameGetResponse200 = { - data: PlatformMinimal | PlatformEditable | Platform; + data: PlatformMinimal | PlatformEditable | PlatformOutput; status: 200; }; @@ -4251,7 +4347,7 @@ export const deleteResourceValidationApiConfigsResourceValidationNameDelete = as * @summary List Roles */ export type listRolesApiConfigsRoleGetResponse200 = { - data: Role[]; + data: RoleOutput[]; status: 200; }; @@ -4329,7 +4425,7 @@ export const putRolesApiConfigsRolePut = async ( * @summary Read Role */ export type readRoleApiConfigsRoleNameGetResponse200 = { - data: Role; + data: RoleOutput; status: 200; }; @@ -4638,55 +4734,6 @@ export const putBackendTestApiConfigsBackendTestNamePut = async ( return { data, status: res.status, headers: res.headers } as putBackendTestApiConfigsBackendTestNamePutResponse; }; -/** - * Delete test configuration - * @summary Delete Backend Test - */ -export type deleteBackendTestApiConfigsBackendTestNameDeleteResponse200 = { - data: unknown; - status: 200; -}; - -export type deleteBackendTestApiConfigsBackendTestNameDeleteResponse422 = { - data: HTTPValidationError; - status: 422; -}; - -export type deleteBackendTestApiConfigsBackendTestNameDeleteResponseSuccess = - deleteBackendTestApiConfigsBackendTestNameDeleteResponse200 & { - headers: Headers; - }; -export type deleteBackendTestApiConfigsBackendTestNameDeleteResponseError = - deleteBackendTestApiConfigsBackendTestNameDeleteResponse422 & { - headers: Headers; - }; - -export type deleteBackendTestApiConfigsBackendTestNameDeleteResponse = - | deleteBackendTestApiConfigsBackendTestNameDeleteResponseSuccess - | deleteBackendTestApiConfigsBackendTestNameDeleteResponseError; - -export const getDeleteBackendTestApiConfigsBackendTestNameDeleteUrl = (name: string) => { - return `/api/configs/backend_test/${name}`; -}; - -export const deleteBackendTestApiConfigsBackendTestNameDelete = async ( - name: string, - configsRequest: ConfigsRequest, - options?: RequestInit, -): Promise => { - const res = await fetch(getDeleteBackendTestApiConfigsBackendTestNameDeleteUrl(name), { - ...options, - method: "DELETE", - headers: { "Content-Type": "application/json", ...options?.headers }, - body: JSON.stringify(configsRequest), - }); - - const body = [204, 205, 304].includes(res.status) ? null : await res.text(); - - const data: deleteBackendTestApiConfigsBackendTestNameDeleteResponse["data"] = body ? JSON.parse(body) : {}; - return { data, status: res.status, headers: res.headers } as deleteBackendTestApiConfigsBackendTestNameDeleteResponse; -}; - /** * Patch backend test configuration * @summary Patch Backend Test @@ -4737,8 +4784,57 @@ export const patchBackendTestApiConfigsBackendTestNamePatch = async ( }; /** - * List history of all configs - * @summary Get Configs History + * Delete test configuration + * @summary Delete Backend Test + */ +export type deleteBackendTestApiConfigsBackendTestNameDeleteResponse200 = { + data: unknown; + status: 200; +}; + +export type deleteBackendTestApiConfigsBackendTestNameDeleteResponse422 = { + data: HTTPValidationError; + status: 422; +}; + +export type deleteBackendTestApiConfigsBackendTestNameDeleteResponseSuccess = + deleteBackendTestApiConfigsBackendTestNameDeleteResponse200 & { + headers: Headers; + }; +export type deleteBackendTestApiConfigsBackendTestNameDeleteResponseError = + deleteBackendTestApiConfigsBackendTestNameDeleteResponse422 & { + headers: Headers; + }; + +export type deleteBackendTestApiConfigsBackendTestNameDeleteResponse = + | deleteBackendTestApiConfigsBackendTestNameDeleteResponseSuccess + | deleteBackendTestApiConfigsBackendTestNameDeleteResponseError; + +export const getDeleteBackendTestApiConfigsBackendTestNameDeleteUrl = (name: string) => { + return `/api/configs/backend_test/${name}`; +}; + +export const deleteBackendTestApiConfigsBackendTestNameDelete = async ( + name: string, + configsRequest: ConfigsRequest, + options?: RequestInit, +): Promise => { + const res = await fetch(getDeleteBackendTestApiConfigsBackendTestNameDeleteUrl(name), { + ...options, + method: "DELETE", + headers: { "Content-Type": "application/json", ...options?.headers }, + body: JSON.stringify(configsRequest), + }); + + const body = [204, 205, 304].includes(res.status) ? null : await res.text(); + + const data: deleteBackendTestApiConfigsBackendTestNameDeleteResponse["data"] = body ? JSON.parse(body) : {}; + return { data, status: res.status, headers: res.headers } as deleteBackendTestApiConfigsBackendTestNameDeleteResponse; +}; + +/** + * List history of all configs + * @summary Get Configs History */ export type getConfigsHistoryApiConfigsHistoryGetResponse200 = { data: GetConfigsHistoryResponse; @@ -4765,15 +4861,6 @@ export const getGetConfigsHistoryApiConfigsHistoryGetUrl = (params?: GetConfigsH const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["config_types", "tags"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? "null" : v.toString()); - }); - return; - } - if (value !== undefined) { normalizedParams.append(key, value === null ? "null" : value.toString()); } @@ -5332,15 +5419,6 @@ export const getCreateAccessTokenApiAuthAccessTokenTokenNamePostUrl = ( const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["roles"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? "null" : v.toString()); - }); - return; - } - if (value !== undefined) { normalizedParams.append(key, value === null ? "null" : value.toString()); } @@ -5573,15 +5651,6 @@ export const getAdminCreateAccessTokenApiAuthUserUserIdAccessTokenTokenNamePostU const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["roles"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? "null" : v.toString()); - }); - return; - } - if (value !== undefined) { normalizedParams.append(key, value === null ? "null" : value.toString()); } @@ -5775,15 +5844,6 @@ export const getListUsersApiAuthUserGetUrl = (params?: ListUsersApiAuthUserGetPa const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["roles"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? "null" : v.toString()); - }); - return; - } - if (value !== undefined) { normalizedParams.append(key, value === null ? "null" : value.toString()); } @@ -6260,15 +6320,6 @@ export const getListAppsApiAppGetUrl = (params?: ListAppsApiAppGetParams) => { const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["users"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? "null" : v.toString()); - }); - return; - } - if (value !== undefined) { normalizedParams.append(key, value === null ? "null" : value.toString()); } @@ -6405,6 +6456,52 @@ export const createAppApiAppUserNamePost = async ( return { data, status: res.status, headers: res.headers } as createAppApiAppUserNamePostResponse; }; +/** + * @summary Update App + */ +export type updateAppApiAppUserNamePatchResponse200 = { + data: EditResponse; + status: 200; +}; + +export type updateAppApiAppUserNamePatchResponse422 = { + data: HTTPValidationError; + status: 422; +}; + +export type updateAppApiAppUserNamePatchResponseSuccess = updateAppApiAppUserNamePatchResponse200 & { + headers: Headers; +}; +export type updateAppApiAppUserNamePatchResponseError = updateAppApiAppUserNamePatchResponse422 & { + headers: Headers; +}; + +export type updateAppApiAppUserNamePatchResponse = + | updateAppApiAppUserNamePatchResponseSuccess + | updateAppApiAppUserNamePatchResponseError; + +export const getUpdateAppApiAppUserNamePatchUrl = (name: string) => { + return `/api/app/user/${name}`; +}; + +export const updateAppApiAppUserNamePatch = async ( + name: string, + updateAppApiAppUserNamePatchBody: string, + options?: RequestInit, +): Promise => { + const res = await fetch(getUpdateAppApiAppUserNamePatchUrl(name), { + ...options, + method: "PATCH", + headers: { "Content-Type": "application/json", ...options?.headers }, + body: JSON.stringify(updateAppApiAppUserNamePatchBody), + }); + + const body = [204, 205, 304].includes(res.status) ? null : await res.text(); + + const data: updateAppApiAppUserNamePatchResponse["data"] = body ? JSON.parse(body) : {}; + return { data, status: res.status, headers: res.headers } as updateAppApiAppUserNamePatchResponse; +}; + /** * @summary Delete App */ @@ -6459,52 +6556,6 @@ export const deleteAppApiAppUserNameDelete = async ( return { data, status: res.status, headers: res.headers } as deleteAppApiAppUserNameDeleteResponse; }; -/** - * @summary Update App - */ -export type updateAppApiAppUserNamePatchResponse200 = { - data: EditResponse; - status: 200; -}; - -export type updateAppApiAppUserNamePatchResponse422 = { - data: HTTPValidationError; - status: 422; -}; - -export type updateAppApiAppUserNamePatchResponseSuccess = updateAppApiAppUserNamePatchResponse200 & { - headers: Headers; -}; -export type updateAppApiAppUserNamePatchResponseError = updateAppApiAppUserNamePatchResponse422 & { - headers: Headers; -}; - -export type updateAppApiAppUserNamePatchResponse = - | updateAppApiAppUserNamePatchResponseSuccess - | updateAppApiAppUserNamePatchResponseError; - -export const getUpdateAppApiAppUserNamePatchUrl = (name: string) => { - return `/api/app/user/${name}`; -}; - -export const updateAppApiAppUserNamePatch = async ( - name: string, - updateAppApiAppUserNamePatchBody: string, - options?: RequestInit, -): Promise => { - const res = await fetch(getUpdateAppApiAppUserNamePatchUrl(name), { - ...options, - method: "PATCH", - headers: { "Content-Type": "application/json", ...options?.headers }, - body: JSON.stringify(updateAppApiAppUserNamePatchBody), - }); - - const body = [204, 205, 304].includes(res.status) ? null : await res.text(); - - const data: updateAppApiAppUserNamePatchResponse["data"] = body ? JSON.parse(body) : {}; - return { data, status: res.status, headers: res.headers } as updateAppApiAppUserNamePatchResponse; -}; - /** * @summary Get App Content */ @@ -6700,15 +6751,6 @@ export const getListWorkflowApiWorkflowGetUrl = (params?: ListWorkflowApiWorkflo const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["users", "statuses", "pools", "tags", "priority"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? "null" : v.toString()); - }); - return; - } - if (value !== undefined) { normalizedParams.append(key, value === null ? "null" : value.toString()); } @@ -6807,15 +6849,6 @@ export const getListTaskApiTaskGetUrl = (params?: ListTaskApiTaskGetParams) => { const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["statuses", "users", "pools", "nodes", "priority"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? "null" : v.toString()); - }); - return; - } - if (value !== undefined) { normalizedParams.append(key, value === null ? "null" : value.toString()); } @@ -7172,15 +7205,6 @@ export const getTagWorkflowApiWorkflowNameTagPostUrl = ( const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["add", "remove"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? "null" : v.toString()); - }); - return; - } - if (value !== undefined) { normalizedParams.append(key, value === null ? "null" : value.toString()); } @@ -7374,15 +7398,6 @@ export const getPortForwardTaskApiWorkflowNamePortforwardTaskNamePostUrl = ( const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["task_ports"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? "null" : v.toString()); - }); - return; - } - if (value !== undefined) { normalizedParams.append(key, value === null ? "null" : value.toString()); } @@ -7702,15 +7717,6 @@ export const getGetResourcesApiResourcesGetUrl = (params?: GetResourcesApiResour const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["pools", "platforms"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? "null" : v.toString()); - }); - return; - } - if (value !== undefined) { normalizedParams.append(key, value === null ? "null" : value.toString()); } @@ -7811,15 +7817,6 @@ export const getGetPoolsApiPoolGetUrl = (params?: GetPoolsApiPoolGetParams) => { const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["pools"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? "null" : v.toString()); - }); - return; - } - if (value !== undefined) { normalizedParams.append(key, value === null ? "null" : value.toString()); } @@ -7873,15 +7870,6 @@ export const getGetPoolQuotasApiPoolQuotaGetUrl = (params?: GetPoolQuotasApiPool const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["pools"]; - - if (Array.isArray(value) && explodeParameters.includes(key)) { - value.forEach((v) => { - normalizedParams.append(key, v === null ? "null" : v.toString()); - }); - return; - } - if (value !== undefined) { normalizedParams.append(key, value === null ? "null" : value.toString()); } @@ -7964,7 +7952,7 @@ export const getSubmitWorkflowApiPoolPoolNameWorkflowPostUrl = ( export const submitWorkflowApiPoolPoolNameWorkflowPost = async ( poolName: string, - templateSpec: TemplateSpec, + templateSpecNull: TemplateSpec | null, params?: SubmitWorkflowApiPoolPoolNameWorkflowPostParams, options?: RequestInit, ): Promise => { @@ -7972,7 +7960,7 @@ export const submitWorkflowApiPoolPoolNameWorkflowPost = async ( ...options, method: "POST", headers: { "Content-Type": "application/json", ...options?.headers }, - body: JSON.stringify(templateSpec), + body: JSON.stringify(templateSpecNull), }); const body = [204, 205, 304].includes(res.status) ? null : await res.text(); @@ -8338,7 +8326,7 @@ export const getListDatasetFromBucketApiBucketListDatasetGetUrl = ( const normalizedParams = new URLSearchParams(); Object.entries(params || {}).forEach(([key, value]) => { - const explodeParameters = ["user", "buckets"]; + const explodeParameters = ["buckets"]; if (Array.isArray(value) && explodeParameters.includes(key)) { value.forEach((v) => { @@ -8748,7 +8736,7 @@ export const getAvailableWorkflowTagsApiTagGet = async ( * @summary Get Workflow Plugins Configs */ export type getWorkflowPluginsConfigsApiPluginsConfigsGetResponse200 = { - data: PluginsConfig; + data: PluginsConfigOutput; status: 200; }; @@ -8778,8 +8766,8 @@ export const getWorkflowPluginsConfigsApiPluginsConfigsGet = async ( }; export const getReadServiceConfigsApiConfigsServiceGetResponseMock = ( - overrideResponse: Partial> = {}, -): ServiceConfig => ({ + overrideResponse: Partial> = {}, +): ServiceConfigOutput => ({ service_base_url: faker.string.alpha({ length: { min: 10, max: 20 } }), service_auth: faker.helpers.arrayElement([ { @@ -8799,43 +8787,48 @@ export const getReadServiceConfigsApiConfigsServiceGetResponseMock = ( faker.string.alpha({ length: { min: 10, max: 20 } }), ), login_info: { - ...{ - device_endpoint: faker.helpers.arrayElement([ - faker.string.alpha({ length: { min: 10, max: 20 } }), - undefined, - ]), - device_client_id: faker.helpers.arrayElement([ - faker.string.alpha({ length: { min: 10, max: 20 } }), - undefined, - ]), - browser_endpoint: faker.helpers.arrayElement([ - faker.string.alpha({ length: { min: 10, max: 20 } }), - undefined, - ]), - browser_client_id: faker.helpers.arrayElement([ - faker.string.alpha({ length: { min: 10, max: 20 } }), - undefined, - ]), - token_endpoint: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - logout_endpoint: faker.helpers.arrayElement([ - faker.string.alpha({ length: { min: 10, max: 20 } }), - undefined, - ]), - }, - }, - max_token_duration: faker.string.alpha({ length: { min: 10, max: 20 } }), - }, + device_endpoint: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + device_client_id: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + browser_endpoint: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + browser_client_id: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + token_endpoint: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + logout_endpoint: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + }, + max_token_duration: faker.string.alpha({ length: { min: 10, max: 20 } }), + }, undefined, ]), cli_config: { - ...{ - latest_version: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - min_supported_version: faker.helpers.arrayElement([ - faker.string.alpha({ length: { min: 10, max: 20 } }), - undefined, - ]), - client_install_url: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - }, + latest_version: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + min_supported_version: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + client_install_url: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), }, max_pod_restart_limit: faker.string.alpha({ length: { min: 10, max: 20 } }), agent_queue_size: faker.number.int(), @@ -8849,126 +8842,135 @@ export const getPatchServiceConfigsApiConfigsServicePatchResponseMock = (): PatchServiceConfigsApiConfigsServicePatch200 => ({}); export const getReadWorkflowConfigsApiConfigsWorkflowGetResponseMock = ( - overrideResponse: Partial> = {}, -): WorkflowConfig => ({ + overrideResponse: Partial> = {}, +): WorkflowConfigOutput => ({ workflow_data: { - ...{ - credential: faker.helpers.arrayElement([ + credential: faker.helpers.arrayElement([ + faker.helpers.arrayElement([ { endpoint: faker.string.alpha({ length: { min: 10, max: 20 } }), - region: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - override_url: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + region: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + override_url: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), access_key_id: faker.string.alpha({ length: { min: 10, max: 20 } }), access_key: faker.internet.password(), }, - undefined, + null, ]), - base_url: faker.string.alpha({ length: { min: 10, max: 20 } }), - websocket_timeout: faker.number.int(), - data_timeout: faker.number.int(), - download_type: faker.helpers.arrayElement(Object.values(DownloadType)), - }, + undefined, + ]), + base_url: faker.string.alpha({ length: { min: 10, max: 20 } }), + websocket_timeout: faker.number.int(), + data_timeout: faker.number.int(), + download_type: faker.helpers.arrayElement(Object.values(DownloadType)), }, workflow_log: { - ...{ - credential: faker.helpers.arrayElement([ + credential: faker.helpers.arrayElement([ + faker.helpers.arrayElement([ { endpoint: faker.string.alpha({ length: { min: 10, max: 20 } }), - region: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - override_url: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + region: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + override_url: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), access_key_id: faker.string.alpha({ length: { min: 10, max: 20 } }), access_key: faker.internet.password(), }, - undefined, + null, ]), - }, + undefined, + ]), }, workflow_app: { - ...{ - credential: faker.helpers.arrayElement([ + credential: faker.helpers.arrayElement([ + faker.helpers.arrayElement([ { endpoint: faker.string.alpha({ length: { min: 10, max: 20 } }), - region: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - override_url: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + region: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + override_url: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), access_key_id: faker.string.alpha({ length: { min: 10, max: 20 } }), access_key: faker.internet.password(), }, - undefined, + null, ]), - }, + undefined, + ]), }, workflow_info: { - ...{ - tags: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => - faker.string.alpha({ length: { min: 10, max: 20 } }), - ), - max_name_length: faker.number.int(), - }, + tags: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => + faker.string.alpha({ length: { min: 10, max: 20 } }), + ), + max_name_length: faker.number.int(), }, backend_images: { - ...{ - init: faker.string.alpha({ length: { min: 10, max: 20 } }), - client: faker.string.alpha({ length: { min: 10, max: 20 } }), - credential: { - ...{ - registry: faker.string.alpha({ length: { min: 10, max: 20 } }), - username: faker.string.alpha({ length: { min: 10, max: 20 } }), - auth: faker.internet.password(), - }, - }, + init: faker.string.alpha({ length: { min: 10, max: 20 } }), + client: faker.string.alpha({ length: { min: 10, max: 20 } }), + credential: { + registry: faker.string.alpha({ length: { min: 10, max: 20 } }), + username: faker.string.alpha({ length: { min: 10, max: 20 } }), + auth: faker.internet.password(), }, }, workflow_alerts: { - ...{ - slack_token: faker.internet.password(), - smtp_settings: { - ...{ - host: faker.string.alpha({ length: { min: 10, max: 20 } }), - sender: faker.string.alpha({ length: { min: 10, max: 20 } }), - password: faker.internet.password(), - }, - }, + slack_token: faker.internet.password(), + smtp_settings: { + host: faker.string.alpha({ length: { min: 10, max: 20 } }), + sender: faker.string.alpha({ length: { min: 10, max: 20 } }), + password: faker.internet.password(), }, }, credential_config: { - ...{ - disable_registry_validation: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map( - () => faker.string.alpha({ length: { min: 10, max: 20 } }), - ), - disable_data_validation: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => - faker.string.alpha({ length: { min: 10, max: 20 } }), - ), - }, + disable_registry_validation: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map( + () => faker.string.alpha({ length: { min: 10, max: 20 } }), + ), + disable_data_validation: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => + faker.string.alpha({ length: { min: 10, max: 20 } }), + ), }, user_workflow_limits: { - ...{ - max_num_workflows: faker.helpers.arrayElement([faker.number.int({ min: 0 }), undefined]), - max_num_tasks: faker.helpers.arrayElement([faker.number.int({ min: 0 }), undefined]), - jinja_sandbox_workers: faker.number.int(), - jinja_sandbox_max_time: faker.number.float({ fractionDigits: 2 }), - jinja_sandbox_memory_limit: faker.number.int(), - }, + max_num_workflows: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.number.int({ min: 0 }), null]), + undefined, + ]), + max_num_tasks: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.number.int({ min: 0 }), null]), + undefined, + ]), + jinja_sandbox_workers: faker.number.int(), + jinja_sandbox_max_time: faker.number.float({ fractionDigits: 2 }), + jinja_sandbox_memory_limit: faker.number.int(), }, plugins_config: { - ...{ - rsync: { - ...{ - enabled: faker.datatype.boolean(), - enable_telemetry: faker.datatype.boolean(), - read_bandwidth_limit: faker.number.int({ min: 0 }), - write_bandwidth_limit: faker.number.int({ min: 0 }), - allowed_paths: { - [faker.string.alphanumeric(5)]: { - path: faker.string.alpha({ length: { min: 10, max: 20 } }), - writable: faker.datatype.boolean(), - }, - }, - daemon_debounce_delay: faker.number.float({ min: 0, fractionDigits: 2 }), - daemon_poll_interval: faker.number.float({ min: 0, fractionDigits: 2 }), - daemon_reconcile_interval: faker.number.float({ min: 0, fractionDigits: 2 }), - client_upload_rate_limit: faker.number.int({ min: 0 }), + rsync: { + enabled: faker.datatype.boolean(), + enable_telemetry: faker.datatype.boolean(), + read_bandwidth_limit: faker.number.int({ min: 0 }), + write_bandwidth_limit: faker.number.int({ min: 0 }), + allowed_paths: { + [faker.string.alphanumeric(5)]: { + path: faker.string.alpha({ length: { min: 10, max: 20 } }), + writable: faker.datatype.boolean(), }, }, + daemon_debounce_delay: faker.number.float({ min: 0, fractionDigits: 2 }), + daemon_poll_interval: faker.number.float({ min: 0, fractionDigits: 2 }), + daemon_reconcile_interval: faker.number.float({ min: 0, fractionDigits: 2 }), + client_upload_rate_limit: faker.number.int({ min: 0 }), }, }, max_num_tasks: faker.number.int(), @@ -8996,8 +8998,8 @@ export const getPatchWorkflowConfigsApiConfigsWorkflowPatchResponseMock = (): PatchWorkflowConfigsApiConfigsWorkflowPatch200 => ({}); export const getReadDatasetConfigsApiConfigsDatasetGetResponseMock = ( - overrideResponse: Partial> = {}, -): DatasetConfig => ({ + overrideResponse: Partial> = {}, +): DatasetConfigOutput => ({ buckets: { [faker.string.alphanumeric(5)]: { dataset_path: faker.helpers.fromRegExp( @@ -9007,13 +9009,22 @@ export const getReadDatasetConfigsApiConfigsDatasetGetResponseMock = ( description: "A test resource", mode: faker.string.alpha({ length: { min: 10, max: 20 } }), default_credential: faker.helpers.arrayElement([ - { - endpoint: faker.string.alpha({ length: { min: 10, max: 20 } }), - region: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - override_url: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - access_key_id: faker.string.alpha({ length: { min: 10, max: 20 } }), - access_key: faker.internet.password(), - }, + faker.helpers.arrayElement([ + { + endpoint: faker.string.alpha({ length: { min: 10, max: 20 } }), + region: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + override_url: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + access_key_id: faker.string.alpha({ length: { min: 10, max: 20 } }), + access_key: faker.internet.password(), + }, + null, + ]), undefined, ]), }, @@ -9052,9 +9063,12 @@ export const getListBackendsApiConfigsBackendGetResponseMock = ( }, node_conditions: { rules: faker.helpers.arrayElement([ - { - [faker.string.alphanumeric(5)]: faker.string.alpha({ length: { min: 10, max: 20 } }), - }, + faker.helpers.arrayElement([ + { + [faker.string.alphanumeric(5)]: faker.string.alpha({ length: { min: 10, max: 20 } }), + }, + null, + ]), undefined, ]), prefix: faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -9087,9 +9101,12 @@ export const getGetBackendApiConfigsBackendNameGetResponseMock = ( }, node_conditions: { rules: faker.helpers.arrayElement([ - { - [faker.string.alphanumeric(5)]: faker.string.alpha({ length: { min: 10, max: 20 } }), - }, + faker.helpers.arrayElement([ + { + [faker.string.alphanumeric(5)]: faker.string.alpha({ length: { min: 10, max: 20 } }), + }, + null, + ]), undefined, ]), prefix: faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -9108,15 +9125,18 @@ export const getListPoolsApiConfigsPoolGetResponseMock = (): VerbosePoolConfig | [faker.string.alphanumeric(5)]: { name: faker.string.alpha({ length: { min: 10, max: 20 } }), description: "A test resource", - status: faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(PoolStatus)), undefined]), + status: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(PoolStatus)), null]), + undefined, + ]), download_type: faker.helpers.arrayElement([ - faker.helpers.arrayElement(Object.values(DownloadType)), + faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(DownloadType)), null]), undefined, ]), enable_maintenance: faker.datatype.boolean(), backend: faker.string.alpha({ length: { min: 10, max: 20 } }), default_platform: faker.helpers.arrayElement([ - faker.string.alpha({ length: { min: 10, max: 20 } }), + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), undefined, ]), default_exec_timeout: faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -9127,12 +9147,13 @@ export const getListPoolsApiConfigsPoolGetResponseMock = (): VerbosePoolConfig | [faker.string.alphanumeric(5)]: faker.string.alpha({ length: { min: 10, max: 20 } }), }, resources: { - ...{ - gpu: faker.helpers.arrayElement([ + gpu: faker.helpers.arrayElement([ + faker.helpers.arrayElement([ { guarantee: faker.number.int(), maximum: faker.number.int(), weight: faker.number.int() }, - undefined, + null, ]), - }, + undefined, + ]), }, topology_keys: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({ key: faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -9176,8 +9197,14 @@ export const getListPoolsApiConfigsPoolGetResponseMock = (): VerbosePoolConfig | tolerations: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({ key: faker.string.alpha({ length: { min: 10, max: 20 } }), operator: faker.string.alpha({ length: { min: 10, max: 20 } }), - value: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - effect: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + value: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + effect: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), })), labels: { [faker.string.alphanumeric(5)]: faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -9201,7 +9228,10 @@ export const getListPoolsApiConfigsPoolGetResponseMock = (): VerbosePoolConfig | parsed_pod_template: {}, }, }, - last_heartbeat: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), + last_heartbeat: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), }, }, }, @@ -9210,15 +9240,18 @@ export const getListPoolsApiConfigsPoolGetResponseMock = (): VerbosePoolConfig | [faker.string.alphanumeric(5)]: { name: faker.string.alpha({ length: { min: 10, max: 20 } }), description: "A test resource", - status: faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(PoolStatus)), undefined]), + status: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(PoolStatus)), null]), + undefined, + ]), download_type: faker.helpers.arrayElement([ - faker.helpers.arrayElement(Object.values(DownloadType)), + faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(DownloadType)), null]), undefined, ]), enable_maintenance: faker.datatype.boolean(), backend: faker.string.alpha({ length: { min: 10, max: 20 } }), default_platform: faker.helpers.arrayElement([ - faker.string.alpha({ length: { min: 10, max: 20 } }), + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), undefined, ]), default_exec_timeout: faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -9229,12 +9262,13 @@ export const getListPoolsApiConfigsPoolGetResponseMock = (): VerbosePoolConfig | [faker.string.alphanumeric(5)]: faker.string.alpha({ length: { min: 10, max: 20 } }), }, resources: { - ...{ - gpu: faker.helpers.arrayElement([ + gpu: faker.helpers.arrayElement([ + faker.helpers.arrayElement([ { guarantee: faker.number.int(), maximum: faker.number.int(), weight: faker.number.int() }, - undefined, + null, ]), - }, + undefined, + ]), }, topology_keys: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({ key: faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -9273,16 +9307,25 @@ export const getListPoolsApiConfigsPoolGetResponseMock = (): VerbosePoolConfig | }, ]); -export const getReadPoolApiConfigsPoolNameGetResponseMock = (): Pool | PoolEditable => +export const getReadPoolApiConfigsPoolNameGetResponseMock = (): PoolOutput | PoolEditable => faker.helpers.arrayElement([ { name: faker.string.alpha({ length: { min: 10, max: 20 } }), description: "A test resource", - status: faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(PoolStatus)), undefined]), - download_type: faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(DownloadType)), undefined]), + status: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(PoolStatus)), null]), + undefined, + ]), + download_type: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(DownloadType)), null]), + undefined, + ]), enable_maintenance: faker.datatype.boolean(), backend: faker.string.alpha({ length: { min: 10, max: 20 } }), - default_platform: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + default_platform: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), default_exec_timeout: faker.string.alpha({ length: { min: 10, max: 20 } }), default_queue_timeout: faker.string.alpha({ length: { min: 10, max: 20 } }), max_exec_timeout: faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -9291,12 +9334,13 @@ export const getReadPoolApiConfigsPoolNameGetResponseMock = (): Pool | PoolEdita [faker.string.alphanumeric(5)]: faker.string.alpha({ length: { min: 10, max: 20 } }), }, resources: { - ...{ - gpu: faker.helpers.arrayElement([ + gpu: faker.helpers.arrayElement([ + faker.helpers.arrayElement([ { guarantee: faker.number.int(), maximum: faker.number.int(), weight: faker.number.int() }, - undefined, + null, ]), - }, + undefined, + ]), }, topology_keys: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({ key: faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -9338,8 +9382,14 @@ export const getReadPoolApiConfigsPoolNameGetResponseMock = (): Pool | PoolEdita tolerations: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({ key: faker.string.alpha({ length: { min: 10, max: 20 } }), operator: faker.string.alpha({ length: { min: 10, max: 20 } }), - value: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - effect: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + value: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + effect: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), })), labels: { [faker.string.alphanumeric(5)]: faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -9363,16 +9413,28 @@ export const getReadPoolApiConfigsPoolNameGetResponseMock = (): Pool | PoolEdita parsed_pod_template: {}, }, }, - last_heartbeat: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), + last_heartbeat: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), }, { name: faker.string.alpha({ length: { min: 10, max: 20 } }), description: "A test resource", - status: faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(PoolStatus)), undefined]), - download_type: faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(DownloadType)), undefined]), + status: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(PoolStatus)), null]), + undefined, + ]), + download_type: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(DownloadType)), null]), + undefined, + ]), enable_maintenance: faker.datatype.boolean(), backend: faker.string.alpha({ length: { min: 10, max: 20 } }), - default_platform: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + default_platform: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), default_exec_timeout: faker.string.alpha({ length: { min: 10, max: 20 } }), default_queue_timeout: faker.string.alpha({ length: { min: 10, max: 20 } }), max_exec_timeout: faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -9381,12 +9443,13 @@ export const getReadPoolApiConfigsPoolNameGetResponseMock = (): Pool | PoolEdita [faker.string.alphanumeric(5)]: faker.string.alpha({ length: { min: 10, max: 20 } }), }, resources: { - ...{ - gpu: faker.helpers.arrayElement([ + gpu: faker.helpers.arrayElement([ + faker.helpers.arrayElement([ { guarantee: faker.number.int(), maximum: faker.number.int(), weight: faker.number.int() }, - undefined, + null, ]), - }, + undefined, + ]), }, topology_keys: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({ key: faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -9464,8 +9527,14 @@ export const getListPlatformsInPoolApiConfigsPoolNamePlatformGetResponseMock = tolerations: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({ key: faker.string.alpha({ length: { min: 10, max: 20 } }), operator: faker.string.alpha({ length: { min: 10, max: 20 } }), - value: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - effect: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + value: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + effect: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), })), labels: { [faker.string.alphanumeric(5)]: faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -9493,7 +9562,7 @@ export const getListPlatformsInPoolApiConfigsPoolNamePlatformGetResponseMock = export const getReadPlatformInPoolApiConfigsPoolNamePlatformPlatformNameGetResponseMock = (): | PlatformMinimal | PlatformEditable - | Platform => + | PlatformOutput => faker.helpers.arrayElement([ { description: "A test resource", @@ -9534,8 +9603,14 @@ export const getReadPlatformInPoolApiConfigsPoolNamePlatformPlatformNameGetRespo tolerations: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({ key: faker.string.alpha({ length: { min: 10, max: 20 } }), operator: faker.string.alpha({ length: { min: 10, max: 20 } }), - value: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - effect: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + value: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + effect: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), })), labels: { [faker.string.alphanumeric(5)]: faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -9593,7 +9668,7 @@ export const getReadResourceValidationApiConfigsResourceValidationNameGetRespons assert_message: faker.string.alpha({ length: { min: 10, max: 20 } }), })); -export const getListRolesApiConfigsRoleGetResponseMock = (): Role[] => +export const getListRolesApiConfigsRoleGetResponseMock = (): RoleOutput[] => Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({ name: faker.string.alpha({ length: { min: 10, max: 20 } }), description: "A test resource", @@ -9612,16 +9687,19 @@ export const getListRolesApiConfigsRoleGetResponseMock = (): Role[] => immutable: faker.datatype.boolean(), sync_mode: faker.helpers.arrayElement(Object.values(SyncMode)), external_roles: faker.helpers.arrayElement([ - Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => - faker.string.alpha({ length: { min: 10, max: 20 } }), - ), + faker.helpers.arrayElement([ + Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => + faker.string.alpha({ length: { min: 10, max: 20 } }), + ), + null, + ]), undefined, ]), })); export const getReadRoleApiConfigsRoleNameGetResponseMock = ( - overrideResponse: Partial> = {}, -): Role => ({ + overrideResponse: Partial> = {}, +): RoleOutput => ({ name: faker.string.alpha({ length: { min: 10, max: 20 } }), description: "A test resource", policies: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({ @@ -9639,9 +9717,12 @@ export const getReadRoleApiConfigsRoleNameGetResponseMock = ( immutable: faker.datatype.boolean(), sync_mode: faker.helpers.arrayElement(Object.values(SyncMode)), external_roles: faker.helpers.arrayElement([ - Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => - faker.string.alpha({ length: { min: 10, max: 20 } }), - ), + faker.helpers.arrayElement([ + Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => + faker.string.alpha({ length: { min: 10, max: 20 } }), + ), + null, + ]), undefined, ]), ...overrideResponse, @@ -9692,9 +9773,12 @@ export const getGetConfigsHistoryApiConfigsHistoryGetResponseMock = ( created_at: faker.date.past().toISOString().slice(0, 19) + "Z", description: "A test resource", tags: faker.helpers.arrayElement([ - Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => - faker.string.alpha({ length: { min: 10, max: 20 } }), - ), + faker.helpers.arrayElement([ + Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => + faker.string.alpha({ length: { min: 10, max: 20 } }), + ), + null, + ]), undefined, ]), data: faker.helpers.arrayElement([{}, undefined]), @@ -9713,36 +9797,60 @@ export const getGetConfigDiffApiConfigsDiffGetResponseMock = ( export const getGetNewJwtTokenApiAuthJwtRefreshTokenGetResponseMock = ( overrideResponse: Partial> = {}, ): JwtTokenResponse => ({ - token: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - expires_at: faker.helpers.arrayElement([faker.number.int(), undefined]), - error: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + token: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + expires_at: faker.helpers.arrayElement([faker.helpers.arrayElement([faker.number.int(), null]), undefined]), + error: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), ...overrideResponse, }); export const getPostNewJwtTokenApiAuthJwtRefreshTokenPostResponseMock = ( overrideResponse: Partial> = {}, ): JwtTokenResponse => ({ - token: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - expires_at: faker.helpers.arrayElement([faker.number.int(), undefined]), - error: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + token: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + expires_at: faker.helpers.arrayElement([faker.helpers.arrayElement([faker.number.int(), null]), undefined]), + error: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), ...overrideResponse, }); export const getGetJwtTokenFromAccessTokenApiAuthJwtAccessTokenGetResponseMock = ( overrideResponse: Partial> = {}, ): JwtTokenResponse => ({ - token: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - expires_at: faker.helpers.arrayElement([faker.number.int(), undefined]), - error: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + token: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + expires_at: faker.helpers.arrayElement([faker.helpers.arrayElement([faker.number.int(), null]), undefined]), + error: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), ...overrideResponse, }); export const getPostJwtTokenFromAccessTokenApiAuthJwtAccessTokenPostResponseMock = ( overrideResponse: Partial> = {}, ): JwtTokenResponse => ({ - token: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - expires_at: faker.helpers.arrayElement([faker.number.int(), undefined]), - error: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + token: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + expires_at: faker.helpers.arrayElement([faker.helpers.arrayElement([faker.number.int(), null]), undefined]), + error: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), ...overrideResponse, }); @@ -9794,8 +9902,14 @@ export const getListUsersApiAuthUserGetResponseMock = ( items_per_page: faker.number.int(), users: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({ id: faker.string.alpha({ length: { min: 10, max: 20 } }), - created_at: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), - created_by: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + created_at: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), + created_by: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), })), ...overrideResponse, }); @@ -9804,8 +9918,14 @@ export const getCreateUserApiAuthUserPostResponseMock = ( overrideResponse: Partial> = {}, ): User => ({ id: faker.string.alpha({ length: { min: 10, max: 20 } }), - created_at: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), - created_by: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + created_at: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), + created_by: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), ...overrideResponse, }); @@ -9813,8 +9933,14 @@ export const getGetUserApiAuthUserUserIdGetResponseMock = ( overrideResponse: Partial> = {}, ): UserWithRoles => ({ id: faker.string.alpha({ length: { min: 10, max: 20 } }), - created_at: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), - created_by: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + created_at: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), + created_by: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), roles: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({ role_name: faker.string.alpha({ length: { min: 10, max: 20 } }), assigned_by: faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -9878,7 +10004,7 @@ export const getListAppsApiAppGetResponseMock = ( description: "A test resource", created_date: faker.date.past().toISOString().slice(0, 19) + "Z", owner: faker.string.alpha({ length: { min: 10, max: 20 } }), - latest_version: faker.string.alpha({ length: { min: 10, max: 20 } }), + latest_version: faker.number.int(), })), more_entries: faker.datatype.boolean(), ...overrideResponse, @@ -9901,12 +10027,6 @@ export const getGetAppApiAppUserNameGetResponseMock = ( ...overrideResponse, }); -export const getDeleteAppApiAppUserNameDeleteResponseMock = (): DeleteAppApiAppUserNameDelete200 => ({ - [faker.string.alphanumeric(5)]: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map( - () => faker.number.int(), - ), -}); - export const getUpdateAppApiAppUserNamePatchResponseMock = ( overrideResponse: Partial> = {}, ): EditResponse => ({ @@ -9918,6 +10038,12 @@ export const getUpdateAppApiAppUserNamePatchResponseMock = ( ...overrideResponse, }); +export const getDeleteAppApiAppUserNameDeleteResponseMock = (): DeleteAppApiAppUserNameDelete200 => ({ + [faker.string.alphanumeric(5)]: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map( + () => faker.number.int(), + ), +}); + export const getRenameAppApiAppUserNameRenamePostResponseMock = (): string => faker.word.sample(); export const getCancelWorkflowApiWorkflowNameCancelPostResponseMock = ( @@ -9932,20 +10058,47 @@ export const getListWorkflowApiWorkflowGetResponseMock = ( name: faker.string.alpha({ length: { min: 10, max: 20 } }), workflow_uuid: faker.string.alpha({ length: { min: 10, max: 20 } }), submit_time: faker.date.past().toISOString().slice(0, 19) + "Z", - start_time: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), - end_time: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), + start_time: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), + end_time: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), queued_time: faker.number.float({ fractionDigits: 2 }), - duration: faker.helpers.arrayElement([faker.number.float({ fractionDigits: 2 }), undefined]), + duration: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.number.float({ fractionDigits: 2 }), null]), + undefined, + ]), status: faker.helpers.arrayElement(Object.values(WorkflowStatus)), overview: faker.string.alpha({ length: { min: 10, max: 20 } }), logs: faker.string.alpha({ length: { min: 10, max: 20 } }), - error_logs: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - grafana_url: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - dashboard_url: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - pool: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - app_owner: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - app_name: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - app_version: faker.helpers.arrayElement([faker.number.int(), undefined]), + error_logs: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + grafana_url: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + dashboard_url: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + pool: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + app_owner: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + app_name: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + app_version: faker.helpers.arrayElement([faker.helpers.arrayElement([faker.number.int(), null]), undefined]), priority: faker.string.alpha({ length: { min: 10, max: 20 } }), })), more_entries: faker.datatype.boolean(), @@ -9957,13 +10110,22 @@ export const getGetWorkflowTaskApiWorkflowNameTaskTaskNameGetResponseMock = ( ): TaskEntry => ({ workflow_id: faker.string.alpha({ length: { min: 10, max: 20 } }), task_name: faker.string.alpha({ length: { min: 10, max: 20 } }), - node: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - start_time: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), - end_time: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), + node: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + start_time: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), + end_time: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), status: faker.helpers.arrayElement(Object.values(TaskGroupStatus)), - storage: faker.number.int(), + storage: faker.number.float({ fractionDigits: 2 }), cpu: faker.number.int(), - memory: faker.number.int(), + memory: faker.number.float({ fractionDigits: 2 }), gpu: faker.number.int(), ...overrideResponse, }); @@ -9976,10 +10138,13 @@ export const getListTaskApiTaskGetResponseMock = (): { summaries: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({ user: faker.string.alpha({ length: { min: 10, max: 20 } }), - pool: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - storage: faker.number.int(), + pool: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + storage: faker.number.float({ fractionDigits: 2 }), cpu: faker.number.int(), - memory: faker.number.int(), + memory: faker.number.float({ fractionDigits: 2 }), gpu: faker.number.int(), priority: faker.string.alpha({ length: { min: 10, max: 20 } }), })), @@ -9991,20 +10156,44 @@ export const getListTaskApiTaskGetResponseMock = (): workflow_uuid: faker.string.alpha({ length: { min: 10, max: 20 } }), task_name: faker.string.alpha({ length: { min: 10, max: 20 } }), retry_id: faker.number.int(), - pool: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - node: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - start_time: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), - end_time: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), - duration: faker.helpers.arrayElement([faker.number.float({ fractionDigits: 2 }), undefined]), + pool: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + node: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + start_time: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), + end_time: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), + duration: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.number.float({ fractionDigits: 2 }), null]), + undefined, + ]), status: faker.helpers.arrayElement(Object.values(TaskGroupStatus)), overview: faker.string.alpha({ length: { min: 10, max: 20 } }), logs: faker.string.alpha({ length: { min: 10, max: 20 } }), - error_logs: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - grafana_url: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - dashboard_url: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - storage: faker.number.int(), + error_logs: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + grafana_url: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + dashboard_url: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + storage: faker.number.float({ fractionDigits: 2 }), cpu: faker.number.int(), - memory: faker.number.int(), + memory: faker.number.float({ fractionDigits: 2 }), gpu: faker.number.int(), priority: faker.string.alpha({ length: { min: 10, max: 20 } }), })), @@ -10012,10 +10201,13 @@ export const getListTaskApiTaskGetResponseMock = (): { summaries: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({ user: faker.string.alpha({ length: { min: 10, max: 20 } }), - pool: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - storage: faker.number.int(), + pool: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + storage: faker.number.float({ fractionDigits: 2 }), cpu: faker.number.int(), - memory: faker.number.int(), + memory: faker.number.float({ fractionDigits: 2 }), gpu: faker.number.int(), priority: faker.string.alpha({ length: { min: 10, max: 20 } }), workflow_id: faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -10029,100 +10221,181 @@ export const getGetWorkflowApiWorkflowNameGetResponseMock = ( name: faker.string.alpha({ length: { min: 10, max: 20 } }), uuid: faker.string.alpha({ length: { min: 10, max: 20 } }), submitted_by: faker.string.alpha({ length: { min: 10, max: 20 } }), - cancelled_by: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + cancelled_by: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), spec: faker.string.alpha({ length: { min: 10, max: 20 } }), template_spec: faker.string.alpha({ length: { min: 10, max: 20 } }), logs: faker.string.alpha({ length: { min: 10, max: 20 } }), events: faker.string.alpha({ length: { min: 10, max: 20 } }), overview: faker.string.alpha({ length: { min: 10, max: 20 } }), - parent_name: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - parent_job_id: faker.helpers.arrayElement([faker.number.int(), undefined]), - dashboard_url: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - grafana_url: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + parent_name: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + parent_job_id: faker.helpers.arrayElement([faker.helpers.arrayElement([faker.number.int(), null]), undefined]), + dashboard_url: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + grafana_url: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), tags: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => faker.string.alpha({ length: { min: 10, max: 20 } }), ), submit_time: faker.date.past().toISOString().slice(0, 19) + "Z", - start_time: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), - end_time: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), - exec_timeout: faker.helpers.arrayElement([faker.number.float({ fractionDigits: 2 }), undefined]), - queue_timeout: faker.helpers.arrayElement([faker.number.float({ fractionDigits: 2 }), undefined]), - duration: faker.helpers.arrayElement([faker.number.float({ fractionDigits: 2 }), undefined]), + start_time: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), + end_time: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), + exec_timeout: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.number.float({ fractionDigits: 2 }), null]), + undefined, + ]), + queue_timeout: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.number.float({ fractionDigits: 2 }), null]), + undefined, + ]), + duration: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.number.float({ fractionDigits: 2 }), null]), + undefined, + ]), queued_time: faker.number.float({ fractionDigits: 2 }), status: faker.helpers.arrayElement(Object.values(WorkflowStatus)), outputs: faker.string.alpha({ length: { min: 10, max: 20 } }), groups: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({ name: faker.string.alpha({ length: { min: 10, max: 20 } }), status: faker.helpers.arrayElement(Object.values(TaskGroupStatus)), - start_time: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), - end_time: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), - processing_start_time: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), - scheduling_start_time: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), + start_time: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), + end_time: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), + processing_start_time: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), + scheduling_start_time: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), initializing_start_time: faker.helpers.arrayElement([ - faker.date.past().toISOString().slice(0, 19) + "Z", + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), undefined, ]), remaining_upstream_groups: faker.helpers.arrayElement([ - Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => - faker.string.alpha({ length: { min: 10, max: 20 } }), - ), + faker.helpers.arrayElement([ + Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => + faker.string.alpha({ length: { min: 10, max: 20 } }), + ), + null, + ]), undefined, ]), downstream_groups: faker.helpers.arrayElement([ - Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => - faker.string.alpha({ length: { min: 10, max: 20 } }), - ), + faker.helpers.arrayElement([ + Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => + faker.string.alpha({ length: { min: 10, max: 20 } }), + ), + null, + ]), + undefined, + ]), + failure_message: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), undefined, ]), - failure_message: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), tasks: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({ name: faker.string.alpha({ length: { min: 10, max: 20 } }), retry_id: faker.number.int(), status: faker.helpers.arrayElement(Object.values(TaskGroupStatus)), - failure_message: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - exit_code: faker.helpers.arrayElement([faker.number.int(), undefined]), + failure_message: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + exit_code: faker.helpers.arrayElement([faker.helpers.arrayElement([faker.number.int(), null]), undefined]), logs: faker.string.alpha({ length: { min: 10, max: 20 } }), - error_logs: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + error_logs: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), processing_start_time: faker.helpers.arrayElement([ - faker.date.past().toISOString().slice(0, 19) + "Z", + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), undefined, ]), scheduling_start_time: faker.helpers.arrayElement([ - faker.date.past().toISOString().slice(0, 19) + "Z", + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), undefined, ]), initializing_start_time: faker.helpers.arrayElement([ - faker.date.past().toISOString().slice(0, 19) + "Z", + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), undefined, ]), events: faker.string.alpha({ length: { min: 10, max: 20 } }), - start_time: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), - end_time: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), + start_time: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), + end_time: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), input_download_start_time: faker.helpers.arrayElement([ - faker.date.past().toISOString().slice(0, 19) + "Z", + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), undefined, ]), input_download_end_time: faker.helpers.arrayElement([ - faker.date.past().toISOString().slice(0, 19) + "Z", + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), undefined, ]), output_upload_start_time: faker.helpers.arrayElement([ - faker.date.past().toISOString().slice(0, 19) + "Z", + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), + dashboard_url: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), undefined, ]), - dashboard_url: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), pod_name: faker.string.alpha({ length: { min: 10, max: 20 } }), - pod_ip: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + pod_ip: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), task_uuid: faker.string.alpha({ length: { min: 10, max: 20 } }), - node_name: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + node_name: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), lead: faker.datatype.boolean(), })), })), - pool: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - backend: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - app_owner: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - app_name: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - app_version: faker.helpers.arrayElement([faker.number.int(), undefined]), + pool: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + backend: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + app_owner: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + app_name: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + app_version: faker.helpers.arrayElement([faker.helpers.arrayElement([faker.number.int(), null]), undefined]), plugins: { rsync: faker.datatype.boolean() }, priority: faker.string.alpha({ length: { min: 10, max: 20 } }), ...overrideResponse, @@ -10192,7 +10465,10 @@ export const getGetUserCredentialApiCredentialsGetResponseMock = ( overrideResponse: Partial> = {}, ): CredentialGetResponse => ({ credentials: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({ - [faker.string.alphanumeric(5)]: faker.string.alpha({ length: { min: 10, max: 20 } }), + [faker.string.alphanumeric(5)]: faker.helpers.arrayElement([ + faker.string.alpha({ length: { min: 10, max: 20 } }), + null, + ]), })), ...overrideResponse, }); @@ -10201,7 +10477,10 @@ export const getDeleteUsersCredentialApiCredentialsCredNameDeleteResponseMock = overrideResponse: Partial> = {}, ): CredentialGetResponse => ({ credentials: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({ - [faker.string.alphanumeric(5)]: faker.string.alpha({ length: { min: 10, max: 20 } }), + [faker.string.alphanumeric(5)]: faker.helpers.arrayElement([ + faker.string.alpha({ length: { min: 10, max: 20 } }), + null, + ]), })), ...overrideResponse, }); @@ -10215,19 +10494,25 @@ export const getGetResourcesApiResourcesGetResponseMock = (): ResourcesResponse taints: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({})), usage_fields: {}, conditions: faker.helpers.arrayElement([ - Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => - faker.string.alpha({ length: { min: 10, max: 20 } }), - ), + faker.helpers.arrayElement([ + Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => + faker.string.alpha({ length: { min: 10, max: 20 } }), + ), + null, + ]), undefined, ]), non_workflow_usage_fields: {}, allocatable_fields: {}, - platform_allocatable_fields: faker.helpers.arrayElement([{}, undefined]), - platform_available_fields: faker.helpers.arrayElement([{}, undefined]), - platform_workflow_allocatable_fields: faker.helpers.arrayElement([{}, undefined]), - config_fields: faker.helpers.arrayElement([{}, undefined]), + platform_allocatable_fields: faker.helpers.arrayElement([faker.helpers.arrayElement([null]), undefined]), + platform_available_fields: faker.helpers.arrayElement([faker.helpers.arrayElement([null]), undefined]), + platform_workflow_allocatable_fields: faker.helpers.arrayElement([ + faker.helpers.arrayElement([null]), + undefined, + ]), + config_fields: faker.helpers.arrayElement([faker.helpers.arrayElement([null]), undefined]), backend: faker.string.alpha({ length: { min: 10, max: 20 } }), - label_fields: faker.helpers.arrayElement([{}, undefined]), + label_fields: faker.helpers.arrayElement([faker.helpers.arrayElement([null]), undefined]), pool_platform_labels: { [faker.string.alphanumeric(5)]: Array.from( { length: faker.number.int({ min: 1, max: 10 }) }, @@ -10258,19 +10543,22 @@ export const getGetOneResourceApiResourcesNameGetResponseMock = ( taints: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({})), usage_fields: {}, conditions: faker.helpers.arrayElement([ - Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => - faker.string.alpha({ length: { min: 10, max: 20 } }), - ), + faker.helpers.arrayElement([ + Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => + faker.string.alpha({ length: { min: 10, max: 20 } }), + ), + null, + ]), undefined, ]), non_workflow_usage_fields: {}, allocatable_fields: {}, - platform_allocatable_fields: faker.helpers.arrayElement([{}, undefined]), - platform_available_fields: faker.helpers.arrayElement([{}, undefined]), - platform_workflow_allocatable_fields: faker.helpers.arrayElement([{}, undefined]), - config_fields: faker.helpers.arrayElement([{}, undefined]), + platform_allocatable_fields: faker.helpers.arrayElement([faker.helpers.arrayElement([null]), undefined]), + platform_available_fields: faker.helpers.arrayElement([faker.helpers.arrayElement([null]), undefined]), + platform_workflow_allocatable_fields: faker.helpers.arrayElement([faker.helpers.arrayElement([null]), undefined]), + config_fields: faker.helpers.arrayElement([faker.helpers.arrayElement([null]), undefined]), backend: faker.string.alpha({ length: { min: 10, max: 20 } }), - label_fields: faker.helpers.arrayElement([{}, undefined]), + label_fields: faker.helpers.arrayElement([faker.helpers.arrayElement([null]), undefined]), pool_platform_labels: { [faker.string.alphanumeric(5)]: Array.from( { length: faker.number.int({ min: 1, max: 10 }) }, @@ -10289,11 +10577,20 @@ export const getGetPoolsApiPoolGetResponseMock = ( [faker.string.alphanumeric(5)]: { name: faker.string.alpha({ length: { min: 10, max: 20 } }), description: "A test resource", - status: faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(PoolStatus)), undefined]), - download_type: faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(DownloadType)), undefined]), + status: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(PoolStatus)), null]), + undefined, + ]), + download_type: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(DownloadType)), null]), + undefined, + ]), enable_maintenance: faker.datatype.boolean(), backend: faker.string.alpha({ length: { min: 10, max: 20 } }), - default_platform: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + default_platform: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), default_exec_timeout: faker.string.alpha({ length: { min: 10, max: 20 } }), default_queue_timeout: faker.string.alpha({ length: { min: 10, max: 20 } }), max_exec_timeout: faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -10302,12 +10599,13 @@ export const getGetPoolsApiPoolGetResponseMock = ( [faker.string.alphanumeric(5)]: faker.string.alpha({ length: { min: 10, max: 20 } }), }, resources: { - ...{ - gpu: faker.helpers.arrayElement([ + gpu: faker.helpers.arrayElement([ + faker.helpers.arrayElement([ { guarantee: faker.number.int(), maximum: faker.number.int(), weight: faker.number.int() }, - undefined, + null, ]), - }, + undefined, + ]), }, topology_keys: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({ key: faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -10338,11 +10636,20 @@ export const getGetPoolQuotasApiPoolQuotaGetResponseMock = ( pools: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({ name: faker.string.alpha({ length: { min: 10, max: 20 } }), description: "A test resource", - status: faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(PoolStatus)), undefined]), - download_type: faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(DownloadType)), undefined]), + status: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(PoolStatus)), null]), + undefined, + ]), + download_type: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.helpers.arrayElement(Object.values(DownloadType)), null]), + undefined, + ]), enable_maintenance: faker.datatype.boolean(), backend: faker.string.alpha({ length: { min: 10, max: 20 } }), - default_platform: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + default_platform: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), default_exec_timeout: faker.string.alpha({ length: { min: 10, max: 20 } }), default_queue_timeout: faker.string.alpha({ length: { min: 10, max: 20 } }), max_exec_timeout: faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -10351,12 +10658,13 @@ export const getGetPoolQuotasApiPoolQuotaGetResponseMock = ( [faker.string.alphanumeric(5)]: faker.string.alpha({ length: { min: 10, max: 20 } }), }, resources: { - ...{ - gpu: faker.helpers.arrayElement([ + gpu: faker.helpers.arrayElement([ + faker.helpers.arrayElement([ { guarantee: faker.number.int(), maximum: faker.number.int(), weight: faker.number.int() }, - undefined, + null, ]), - }, + undefined, + ]), }, topology_keys: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => ({ key: faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -10400,10 +10708,22 @@ export const getSubmitWorkflowApiPoolPoolNameWorkflowPostResponseMock = ( overrideResponse: Partial> = {}, ): SubmitResponse => ({ name: faker.string.alpha({ length: { min: 10, max: 20 } }), - overview: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - logs: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - spec: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - dashboard_url: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + overview: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + logs: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + spec: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + dashboard_url: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), ...overrideResponse, }); @@ -10411,17 +10731,32 @@ export const getRestartWorkflowApiPoolPoolNameWorkflowWorkflowIdRestartPostRespo overrideResponse: Partial> = {}, ): SubmitResponse => ({ name: faker.string.alpha({ length: { min: 10, max: 20 } }), - overview: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - logs: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - spec: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - dashboard_url: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + overview: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + logs: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + spec: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + dashboard_url: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), ...overrideResponse, }); export const getGetBucketInfoApiBucketGetResponseMock = ( overrideResponse: Partial> = {}, ): BucketInfoResponse => ({ - default: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + default: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), buckets: { [faker.string.alphanumeric(5)]: { path: faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -10450,16 +10785,19 @@ export const getChangeNameTagLabelMetadataApiBucketBucketDatasetNameAttributePos overrideResponse: Partial> = {}, ): DataAttributeResponse => ({ tag_response: faker.helpers.arrayElement([ - { - version_id: faker.string.alpha({ length: { min: 10, max: 20 } }), - tags: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => - faker.string.alpha({ length: { min: 10, max: 20 } }), - ), - }, + faker.helpers.arrayElement([ + { + version_id: faker.string.alpha({ length: { min: 10, max: 20 } }), + tags: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => + faker.string.alpha({ length: { min: 10, max: 20 } }), + ), + }, + null, + ]), undefined, ]), - label_response: faker.helpers.arrayElement([{ metadata: {} }, undefined]), - metadata_response: faker.helpers.arrayElement([{ metadata: {} }, undefined]), + label_response: faker.helpers.arrayElement([faker.helpers.arrayElement([{ metadata: {} }, null]), undefined]), + metadata_response: faker.helpers.arrayElement([faker.helpers.arrayElement([{ metadata: {} }, null]), undefined]), ...overrideResponse, }); @@ -10469,10 +10807,19 @@ export const getGetInfoApiBucketBucketDatasetNameInfoGetResponseMock = ( name: faker.string.alpha({ length: { min: 10, max: 20 } }), id: faker.string.alpha({ length: { min: 10, max: 20 } }), bucket: faker.string.alpha({ length: { min: 10, max: 20 } }), - created_by: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - created_date: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), - hash_location: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - hash_location_size: faker.helpers.arrayElement([faker.number.int(), undefined]), + created_by: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + created_date: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), + hash_location: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + hash_location_size: faker.helpers.arrayElement([faker.helpers.arrayElement([faker.number.int(), null]), undefined]), labels: {}, type: faker.helpers.arrayElement(Object.values(DatasetType)), versions: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => @@ -10501,7 +10848,10 @@ export const getGetInfoApiBucketBucketDatasetNameInfoGetResponseMock = ( version: faker.string.alpha({ length: { min: 10, max: 20 } }), location: faker.string.alpha({ length: { min: 10, max: 20 } }), uri: faker.string.alpha({ length: { min: 10, max: 20 } }), - hash_location: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + hash_location: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), size: faker.number.int(), }, ]), @@ -10517,10 +10867,19 @@ export const getListDatasetFromBucketApiBucketListDatasetGetResponseMock = ( id: faker.string.alpha({ length: { min: 10, max: 20 } }), bucket: faker.string.alpha({ length: { min: 10, max: 20 } }), create_time: faker.date.past().toISOString().slice(0, 19) + "Z", - last_created: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), - hash_location: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - hash_location_size: faker.helpers.arrayElement([faker.number.int(), undefined]), - version_id: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + last_created: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), + hash_location: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + hash_location_size: faker.helpers.arrayElement([faker.helpers.arrayElement([faker.number.int(), null]), undefined]), + version_id: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), type: faker.helpers.arrayElement(Object.values(DatasetType)), })), ...overrideResponse, @@ -10536,10 +10895,22 @@ export const getQueryDatasetApiBucketBucketQueryGetResponseMock = ( name: faker.string.alpha({ length: { min: 10, max: 20 } }), id: faker.string.alpha({ length: { min: 10, max: 20 } }), bucket: faker.string.alpha({ length: { min: 10, max: 20 } }), - created_by: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - created_date: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), - hash_location: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - hash_location_size: faker.helpers.arrayElement([faker.number.int(), undefined]), + created_by: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + created_date: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), + hash_location: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + hash_location_size: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.number.int(), null]), + undefined, + ]), labels: {}, type: faker.helpers.arrayElement(Object.values(DatasetType)), versions: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => @@ -10569,7 +10940,7 @@ export const getQueryDatasetApiBucketBucketQueryGetResponseMock = ( location: faker.string.alpha({ length: { min: 10, max: 20 } }), uri: faker.string.alpha({ length: { min: 10, max: 20 } }), hash_location: faker.helpers.arrayElement([ - faker.string.alpha({ length: { min: 10, max: 20 } }), + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), undefined, ]), size: faker.number.int(), @@ -10605,11 +10976,26 @@ export const getGetNotificationSettingsApiProfileSettingsGetResponseMock = ( overrideResponse: Partial> = {}, ): ProfileResponse => ({ profile: { - username: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - email_notification: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]), - slack_notification: faker.helpers.arrayElement([faker.datatype.boolean(), undefined]), - bucket: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), - pool: faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), undefined]), + username: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + email_notification: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.datatype.boolean(), null]), + undefined, + ]), + slack_notification: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.datatype.boolean(), null]), + undefined, + ]), + bucket: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), + pool: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.string.alpha({ length: { min: 10, max: 20 } }), null]), + undefined, + ]), }, roles: Array.from({ length: faker.number.int({ min: 1, max: 10 }) }, (_, i) => i + 1).map(() => faker.string.alpha({ length: { min: 10, max: 20 } }), @@ -10618,10 +11004,16 @@ export const getGetNotificationSettingsApiProfileSettingsGetResponseMock = ( faker.string.alpha({ length: { min: 10, max: 20 } }), ), token: faker.helpers.arrayElement([ - { - name: faker.string.alpha({ length: { min: 10, max: 20 } }), - expires_at: faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", undefined]), - }, + faker.helpers.arrayElement([ + { + name: faker.string.alpha({ length: { min: 10, max: 20 } }), + expires_at: faker.helpers.arrayElement([ + faker.helpers.arrayElement([faker.date.past().toISOString().slice(0, 19) + "Z", null]), + undefined, + ]), + }, + null, + ]), undefined, ]), ...overrideResponse, @@ -10651,33 +11043,31 @@ export const getGetAvailableWorkflowTagsApiTagGetResponseMock = (): GetAvailable }); export const getGetWorkflowPluginsConfigsApiPluginsConfigsGetResponseMock = ( - overrideResponse: Partial> = {}, -): PluginsConfig => ({ + overrideResponse: Partial> = {}, +): PluginsConfigOutput => ({ rsync: { - ...{ - enabled: faker.datatype.boolean(), - enable_telemetry: faker.datatype.boolean(), - read_bandwidth_limit: faker.number.int({ min: 0 }), - write_bandwidth_limit: faker.number.int({ min: 0 }), - allowed_paths: { - [faker.string.alphanumeric(5)]: { - path: faker.string.alpha({ length: { min: 10, max: 20 } }), - writable: faker.datatype.boolean(), - }, + enabled: faker.datatype.boolean(), + enable_telemetry: faker.datatype.boolean(), + read_bandwidth_limit: faker.number.int({ min: 0 }), + write_bandwidth_limit: faker.number.int({ min: 0 }), + allowed_paths: { + [faker.string.alphanumeric(5)]: { + path: faker.string.alpha({ length: { min: 10, max: 20 } }), + writable: faker.datatype.boolean(), }, - daemon_debounce_delay: faker.number.float({ min: 0, fractionDigits: 2 }), - daemon_poll_interval: faker.number.float({ min: 0, fractionDigits: 2 }), - daemon_reconcile_interval: faker.number.float({ min: 0, fractionDigits: 2 }), - client_upload_rate_limit: faker.number.int({ min: 0 }), }, + daemon_debounce_delay: faker.number.float({ min: 0, fractionDigits: 2 }), + daemon_poll_interval: faker.number.float({ min: 0, fractionDigits: 2 }), + daemon_reconcile_interval: faker.number.float({ min: 0, fractionDigits: 2 }), + client_upload_rate_limit: faker.number.int({ min: 0 }), }, ...overrideResponse, }); export const getReadServiceConfigsApiConfigsServiceGetMockHandler = ( overrideResponse?: - | ServiceConfig - | ((info: Parameters[1]>[0]) => Promise | ServiceConfig), + | ServiceConfigOutput + | ((info: Parameters[1]>[0]) => Promise | ServiceConfigOutput), options?: RequestHandlerOptions, ) => { return http.get( @@ -10752,8 +11142,8 @@ export const getPatchServiceConfigsApiConfigsServicePatchMockHandler = ( export const getReadWorkflowConfigsApiConfigsWorkflowGetMockHandler = ( overrideResponse?: - | WorkflowConfig - | ((info: Parameters[1]>[0]) => Promise | WorkflowConfig), + | WorkflowConfigOutput + | ((info: Parameters[1]>[0]) => Promise | WorkflowConfigOutput), options?: RequestHandlerOptions, ) => { return http.get( @@ -10828,8 +11218,8 @@ export const getPatchWorkflowConfigsApiConfigsWorkflowPatchMockHandler = ( export const getReadDatasetConfigsApiConfigsDatasetGetMockHandler = ( overrideResponse?: - | DatasetConfig - | ((info: Parameters[1]>[0]) => Promise | DatasetConfig), + | DatasetConfigOutput + | ((info: Parameters[1]>[0]) => Promise | DatasetConfigOutput), options?: RequestHandlerOptions, ) => { return http.get( @@ -10902,24 +11292,6 @@ export const getPatchDatasetConfigsApiConfigsDatasetPatchMockHandler = ( ); }; -export const getDeleteDatasetApiConfigsDatasetNameDeleteMockHandler = ( - overrideResponse?: unknown | ((info: Parameters[1]>[0]) => Promise | unknown), - options?: RequestHandlerOptions, -) => { - return http.delete( - "*/api/configs/dataset/:name", - async (info: Parameters[1]>[0]) => { - await delay(0); - if (typeof overrideResponse === "function") { - await overrideResponse(info); - } - - return new HttpResponse(null, { status: 200 }); - }, - options, - ); -}; - export const getPatchDatasetApiConfigsDatasetNamePatchMockHandler = ( overrideResponse?: | PatchDatasetApiConfigsDatasetNamePatch200 @@ -10946,6 +11318,24 @@ export const getPatchDatasetApiConfigsDatasetNamePatchMockHandler = ( ); }; +export const getDeleteDatasetApiConfigsDatasetNameDeleteMockHandler = ( + overrideResponse?: unknown | ((info: Parameters[1]>[0]) => Promise | unknown), + options?: RequestHandlerOptions, +) => { + return http.delete( + "*/api/configs/dataset/:name", + async (info: Parameters[1]>[0]) => { + await delay(0); + if (typeof overrideResponse === "function") { + await overrideResponse(info); + } + + return new HttpResponse(null, { status: 200 }); + }, + options, + ); +}; + export const getListBackendsApiConfigsBackendGetMockHandler = ( overrideResponse?: | ListBackendsResponse @@ -10970,6 +11360,24 @@ export const getListBackendsApiConfigsBackendGetMockHandler = ( ); }; +export const getUpdateBackendApiConfigsBackendNamePostMockHandler = ( + overrideResponse?: unknown | ((info: Parameters[1]>[0]) => Promise | unknown), + options?: RequestHandlerOptions, +) => { + return http.post( + "*/api/configs/backend/:name", + async (info: Parameters[1]>[0]) => { + await delay(0); + if (typeof overrideResponse === "function") { + await overrideResponse(info); + } + + return new HttpResponse(null, { status: 200 }); + }, + options, + ); +}; + export const getGetBackendApiConfigsBackendNameGetMockHandler = ( overrideResponse?: Backend | ((info: Parameters[1]>[0]) => Promise | Backend), options?: RequestHandlerOptions, @@ -10992,24 +11400,6 @@ export const getGetBackendApiConfigsBackendNameGetMockHandler = ( ); }; -export const getUpdateBackendApiConfigsBackendNamePostMockHandler = ( - overrideResponse?: unknown | ((info: Parameters[1]>[0]) => Promise | unknown), - options?: RequestHandlerOptions, -) => { - return http.post( - "*/api/configs/backend/:name", - async (info: Parameters[1]>[0]) => { - await delay(0); - if (typeof overrideResponse === "function") { - await overrideResponse(info); - } - - return new HttpResponse(null, { status: 200 }); - }, - options, - ); -}; - export const getDeleteBackendApiConfigsBackendNameDeleteMockHandler = ( overrideResponse?: unknown | ((info: Parameters[1]>[0]) => Promise | unknown), options?: RequestHandlerOptions, @@ -11075,9 +11465,11 @@ export const getPutPoolsApiConfigsPoolPutMockHandler = ( export const getReadPoolApiConfigsPoolNameGetMockHandler = ( overrideResponse?: - | Pool + | PoolOutput | PoolEditable - | ((info: Parameters[1]>[0]) => Promise | Pool | PoolEditable), + | (( + info: Parameters[1]>[0], + ) => Promise | PoolOutput | PoolEditable), options?: RequestHandlerOptions, ) => { return http.get( @@ -11116,13 +11508,13 @@ export const getPutPoolApiConfigsPoolNamePutMockHandler = ( ); }; -export const getDeletePoolApiConfigsPoolNameDeleteMockHandler = ( - overrideResponse?: unknown | ((info: Parameters[1]>[0]) => Promise | unknown), +export const getPatchPoolApiConfigsPoolNamePatchMockHandler = ( + overrideResponse?: unknown | ((info: Parameters[1]>[0]) => Promise | unknown), options?: RequestHandlerOptions, ) => { - return http.delete( + return http.patch( "*/api/configs/pool/:name", - async (info: Parameters[1]>[0]) => { + async (info: Parameters[1]>[0]) => { await delay(0); if (typeof overrideResponse === "function") { await overrideResponse(info); @@ -11134,13 +11526,13 @@ export const getDeletePoolApiConfigsPoolNameDeleteMockHandler = ( ); }; -export const getPatchPoolApiConfigsPoolNamePatchMockHandler = ( - overrideResponse?: unknown | ((info: Parameters[1]>[0]) => Promise | unknown), +export const getDeletePoolApiConfigsPoolNameDeleteMockHandler = ( + overrideResponse?: unknown | ((info: Parameters[1]>[0]) => Promise | unknown), options?: RequestHandlerOptions, ) => { - return http.patch( + return http.delete( "*/api/configs/pool/:name", - async (info: Parameters[1]>[0]) => { + async (info: Parameters[1]>[0]) => { await delay(0); if (typeof overrideResponse === "function") { await overrideResponse(info); @@ -11202,10 +11594,14 @@ export const getReadPlatformInPoolApiConfigsPoolNamePlatformPlatformNameGetMockH overrideResponse?: | PlatformMinimal | PlatformEditable - | Platform + | PlatformOutput | (( info: Parameters[1]>[0], - ) => Promise | PlatformMinimal | PlatformEditable | Platform), + ) => + | Promise + | PlatformMinimal + | PlatformEditable + | PlatformOutput), options?: RequestHandlerOptions, ) => { return http.get( @@ -11583,7 +11979,9 @@ export const getDeleteResourceValidationApiConfigsResourceValidationNameDeleteMo }; export const getListRolesApiConfigsRoleGetMockHandler = ( - overrideResponse?: Role[] | ((info: Parameters[1]>[0]) => Promise | Role[]), + overrideResponse?: + | RoleOutput[] + | ((info: Parameters[1]>[0]) => Promise | RoleOutput[]), options?: RequestHandlerOptions, ) => { return http.get( @@ -11623,7 +12021,9 @@ export const getPutRolesApiConfigsRolePutMockHandler = ( }; export const getReadRoleApiConfigsRoleNameGetMockHandler = ( - overrideResponse?: Role | ((info: Parameters[1]>[0]) => Promise | Role), + overrideResponse?: + | RoleOutput + | ((info: Parameters[1]>[0]) => Promise | RoleOutput), options?: RequestHandlerOptions, ) => { return http.get( @@ -11766,13 +12166,13 @@ export const getPutBackendTestApiConfigsBackendTestNamePutMockHandler = ( ); }; -export const getDeleteBackendTestApiConfigsBackendTestNameDeleteMockHandler = ( - overrideResponse?: unknown | ((info: Parameters[1]>[0]) => Promise | unknown), +export const getPatchBackendTestApiConfigsBackendTestNamePatchMockHandler = ( + overrideResponse?: unknown | ((info: Parameters[1]>[0]) => Promise | unknown), options?: RequestHandlerOptions, ) => { - return http.delete( + return http.patch( "*/api/configs/backend_test/:name", - async (info: Parameters[1]>[0]) => { + async (info: Parameters[1]>[0]) => { await delay(0); if (typeof overrideResponse === "function") { await overrideResponse(info); @@ -11784,13 +12184,13 @@ export const getDeleteBackendTestApiConfigsBackendTestNameDeleteMockHandler = ( ); }; -export const getPatchBackendTestApiConfigsBackendTestNamePatchMockHandler = ( - overrideResponse?: unknown | ((info: Parameters[1]>[0]) => Promise | unknown), +export const getDeleteBackendTestApiConfigsBackendTestNameDeleteMockHandler = ( + overrideResponse?: unknown | ((info: Parameters[1]>[0]) => Promise | unknown), options?: RequestHandlerOptions, ) => { - return http.patch( + return http.delete( "*/api/configs/backend_test/:name", - async (info: Parameters[1]>[0]) => { + async (info: Parameters[1]>[0]) => { await delay(0); if (typeof overrideResponse === "function") { await overrideResponse(info); @@ -12430,17 +12830,15 @@ export const getCreateAppApiAppUserNamePostMockHandler = ( ); }; -export const getDeleteAppApiAppUserNameDeleteMockHandler = ( +export const getUpdateAppApiAppUserNamePatchMockHandler = ( overrideResponse?: - | DeleteAppApiAppUserNameDelete200 - | (( - info: Parameters[1]>[0], - ) => Promise | DeleteAppApiAppUserNameDelete200), + | EditResponse + | ((info: Parameters[1]>[0]) => Promise | EditResponse), options?: RequestHandlerOptions, ) => { - return http.delete( + return http.patch( "*/api/app/user/:name", - async (info: Parameters[1]>[0]) => { + async (info: Parameters[1]>[0]) => { await delay(0); return HttpResponse.json( @@ -12448,7 +12846,7 @@ export const getDeleteAppApiAppUserNameDeleteMockHandler = ( ? typeof overrideResponse === "function" ? await overrideResponse(info) : overrideResponse - : getDeleteAppApiAppUserNameDeleteResponseMock(), + : getUpdateAppApiAppUserNamePatchResponseMock(), { status: 200 }, ); }, @@ -12456,15 +12854,17 @@ export const getDeleteAppApiAppUserNameDeleteMockHandler = ( ); }; -export const getUpdateAppApiAppUserNamePatchMockHandler = ( +export const getDeleteAppApiAppUserNameDeleteMockHandler = ( overrideResponse?: - | EditResponse - | ((info: Parameters[1]>[0]) => Promise | EditResponse), + | DeleteAppApiAppUserNameDelete200 + | (( + info: Parameters[1]>[0], + ) => Promise | DeleteAppApiAppUserNameDelete200), options?: RequestHandlerOptions, ) => { - return http.patch( + return http.delete( "*/api/app/user/:name", - async (info: Parameters[1]>[0]) => { + async (info: Parameters[1]>[0]) => { await delay(0); return HttpResponse.json( @@ -12472,7 +12872,7 @@ export const getUpdateAppApiAppUserNamePatchMockHandler = ( ? typeof overrideResponse === "function" ? await overrideResponse(info) : overrideResponse - : getUpdateAppApiAppUserNamePatchResponseMock(), + : getDeleteAppApiAppUserNameDeleteResponseMock(), { status: 200 }, ); }, @@ -13418,8 +13818,8 @@ export const getGetAvailableWorkflowTagsApiTagGetMockHandler = ( export const getGetWorkflowPluginsConfigsApiPluginsConfigsGetMockHandler = ( overrideResponse?: - | PluginsConfig - | ((info: Parameters[1]>[0]) => Promise | PluginsConfig), + | PluginsConfigOutput + | ((info: Parameters[1]>[0]) => Promise | PluginsConfigOutput), options?: RequestHandlerOptions, ) => { return http.get( @@ -13449,18 +13849,18 @@ export const getFastAPIMock = () => [ getReadDatasetConfigsApiConfigsDatasetGetMockHandler(), getPutDatasetConfigsApiConfigsDatasetPutMockHandler(), getPatchDatasetConfigsApiConfigsDatasetPatchMockHandler(), - getDeleteDatasetApiConfigsDatasetNameDeleteMockHandler(), getPatchDatasetApiConfigsDatasetNamePatchMockHandler(), + getDeleteDatasetApiConfigsDatasetNameDeleteMockHandler(), getListBackendsApiConfigsBackendGetMockHandler(), - getGetBackendApiConfigsBackendNameGetMockHandler(), getUpdateBackendApiConfigsBackendNamePostMockHandler(), + getGetBackendApiConfigsBackendNameGetMockHandler(), getDeleteBackendApiConfigsBackendNameDeleteMockHandler(), getListPoolsApiConfigsPoolGetMockHandler(), getPutPoolsApiConfigsPoolPutMockHandler(), getReadPoolApiConfigsPoolNameGetMockHandler(), getPutPoolApiConfigsPoolNamePutMockHandler(), - getDeletePoolApiConfigsPoolNameDeleteMockHandler(), getPatchPoolApiConfigsPoolNamePatchMockHandler(), + getDeletePoolApiConfigsPoolNameDeleteMockHandler(), getRenamePoolApiConfigsPoolNameRenamePutMockHandler(), getListPlatformsInPoolApiConfigsPoolNamePlatformGetMockHandler(), getReadPlatformInPoolApiConfigsPoolNamePlatformPlatformNameGetMockHandler(), @@ -13490,8 +13890,8 @@ export const getFastAPIMock = () => [ getPutBackendTestsApiConfigsBackendTestPutMockHandler(), getReadBackendTestApiConfigsBackendTestNameGetMockHandler(), getPutBackendTestApiConfigsBackendTestNamePutMockHandler(), - getDeleteBackendTestApiConfigsBackendTestNameDeleteMockHandler(), getPatchBackendTestApiConfigsBackendTestNamePatchMockHandler(), + getDeleteBackendTestApiConfigsBackendTestNameDeleteMockHandler(), getGetConfigsHistoryApiConfigsHistoryGetMockHandler(), getRollbackConfigApiConfigsHistoryRollbackPostMockHandler(), getDeleteConfigHistoryRevisionApiConfigsHistoryConfigTypeRevisionRevisionDeleteMockHandler(), @@ -13520,8 +13920,8 @@ export const getFastAPIMock = () => [ getListAppsApiAppGetMockHandler(), getGetAppApiAppUserNameGetMockHandler(), getCreateAppApiAppUserNamePostMockHandler(), - getDeleteAppApiAppUserNameDeleteMockHandler(), getUpdateAppApiAppUserNamePatchMockHandler(), + getDeleteAppApiAppUserNameDeleteMockHandler(), getGetAppContentApiAppUserNameSpecGetMockHandler(), getRenameAppApiAppUserNameRenamePostMockHandler(), getCancelWorkflowApiWorkflowNameCancelPostMockHandler(), diff --git a/src/ui/src/mocks/generators/event-generator.ts b/src/ui/src/mocks/generators/event-generator.ts index e3013f944..423054020 100644 --- a/src/ui/src/mocks/generators/event-generator.ts +++ b/src/ui/src/mocks/generators/event-generator.ts @@ -37,13 +37,13 @@ const BASE_SEED = 22222; export interface EventWorkflowInput { name: string; submit_time: string; - end_time?: string; + end_time?: string | null; groups: Array<{ tasks?: Array<{ name: string; status: TaskGroupStatus; - start_time?: string; - node_name?: string; + start_time?: string | null; + node_name?: string | null; }>; }>; } @@ -85,7 +85,7 @@ export class EventGenerator { new Date(task.start_time || workflow.submit_time), lifecycleStatus, task.status, - task.node_name, + task.node_name ?? undefined, ); events.push(...taskEvents); } @@ -673,7 +673,7 @@ export class EventGenerator { const events = this.generateEventsForWorkflow(workflow, taskName ?? undefined); const lines = this.formatEventLines(events); - if (workflow.end_time !== undefined) { + if (workflow.end_time !== undefined && workflow.end_time !== null) { return new HttpResponse(buildChunkedStream(lines.join("\n")), { headers: EVENT_HEADERS }); } diff --git a/src/ui/src/mocks/generators/log-generator.ts b/src/ui/src/mocks/generators/log-generator.ts index 17308f7ed..732f623d6 100644 --- a/src/ui/src/mocks/generators/log-generator.ts +++ b/src/ui/src/mocks/generators/log-generator.ts @@ -37,8 +37,8 @@ const LOG_RESPONSE_HEADERS = { /** Minimal workflow shape needed by log handlers — satisfied by MockWorkflow and WorkflowQueryResponse. */ export interface LogWorkflowInput { name: string; - start_time?: string; - end_time?: string; + start_time?: string | null; + end_time?: string | null; groups: Array<{ name: string; tasks?: Array<{ name: string; task_uuid?: string }>; @@ -48,8 +48,8 @@ export interface LogWorkflowInput { /** Minimal task shape needed by handleTaskLogs. */ export interface LogTaskInput { name: string; - start_time?: string; - end_time?: string; + start_time?: string | null; + end_time?: string | null; } const BASE_SEED = 11111; @@ -330,7 +330,7 @@ export class LogGenerator { const workflowStartTime = workflow.start_time ? new Date(workflow.start_time) : undefined; - if (workflow.end_time !== undefined) { + if (workflow.end_time != null) { const allLogs = this.generateForWorkflow({ workflowName: name, taskNames, diff --git a/src/utils/auth.py b/src/utils/auth.py index 7c8c4ea1c..24fcfa038 100644 --- a/src/utils/auth.py +++ b/src/utils/auth.py @@ -37,22 +37,18 @@ class AsymmetricKeyPair(pydantic.BaseModel): public_key: str private_key: pydantic.SecretStr - class Config: - keep_untouched = (property,) + model_config = pydantic.ConfigDict(ignored_types=(property,)) @classmethod def generate(cls) -> 'AsymmetricKeyPair': return AsymmetricKeyPair() # type: ignore - @pydantic.root_validator() - @classmethod - def validate_valid_key(cls, values): - - public = values['public_key'] + @pydantic.model_validator(mode='after') + def validate_valid_key(self) -> 'AsymmetricKeyPair': # Make sure the keys are valid - jwk.JWK.from_json(public) + jwk.JWK.from_json(self.public_key) # TODO: Properly validate the private/public key match - return values + return self def _get_cached_pem_key(self) -> bytes: cached = self.__dict__.get('_pem_key_cache') @@ -98,13 +94,13 @@ class AuthenticationConfig(pydantic.BaseModel): # The maximum duration of a token max_token_duration: str = '365d' - @pydantic.validator('max_token_duration') + @pydantic.field_validator('max_token_duration') @classmethod def validate_max_token_duration(cls, value: str) -> str: try: common.to_timedelta(value) except ValueError as e: - raise osmo_errors.OSMOUserError(f'Invalid max_token_duration format: {str(e)}') + raise osmo_errors.OSMOUserError(f'Invalid max_token_duration format: {str(e)}') from e return value @classmethod @@ -121,14 +117,11 @@ def generate_default(cls) -> 'AuthenticationConfig': issuer=issuer, audience=issuer) - @pydantic.root_validator() - @classmethod - def validate_active_key(cls, values): - active_key = values.get('active_key') - keys = values.get('keys', []) - if active_key not in keys: - raise ValueError(f'active_key "{active_key}" not in keys') - return values + @pydantic.model_validator(mode='after') + def validate_active_key(self) -> 'AuthenticationConfig': + if self.active_key not in self.keys: + raise ValueError(f'active_key "{self.active_key}" not in keys') + return self def get_keyset(self) -> Dict: return {'keys': [ diff --git a/src/utils/backend_messages.py b/src/utils/backend_messages.py index 912a7372f..f41d8a1c6 100644 --- a/src/utils/backend_messages.py +++ b/src/utils/backend_messages.py @@ -19,7 +19,7 @@ import datetime import enum import logging -from typing import Dict, List, Optional +from typing import Any, Dict, List, Optional import pydantic @@ -60,14 +60,14 @@ class MessageType(enum.Enum): ACK = 'ack' NODE_CONDITIONS = 'node_conditions' -class LoggingBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class LoggingBody(pydantic.BaseModel, extra='forbid'): """ Represents the container log body. """ type: LoggingType text: str workflow_uuid: str | None = None -class MessageBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class MessageBody(pydantic.BaseModel, extra='forbid'): """ Used for Message Structure """ @@ -75,8 +75,20 @@ class MessageBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): body: Dict | LoggingBody uuid: str = pydantic.Field(default_factory=common.generate_unique_id) + @pydantic.field_validator('body', mode='before') + @classmethod + def coerce_model_to_dict(cls, value: Any) -> Any: + """Coerce BaseModel instances to dicts for the Dict branch of the union. -class InitBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): + In Pydantic v1, passing a model to a Dict field auto-coerced via .dict(). + v2 requires an explicit dict, so we convert non-LoggingBody models here. + """ + if isinstance(value, pydantic.BaseModel) and not isinstance(value, LoggingBody): + return value.model_dump() + return value + + +class InitBody(pydantic.BaseModel, extra='forbid'): """ Represents the log body. """ k8s_uid: str k8s_namespace: str @@ -84,7 +96,7 @@ class InitBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): node_condition_prefix: str -class PodLogBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class PodLogBody(pydantic.BaseModel, extra='forbid'): """ Represents the log body. """ text: str task: str # task_uuid @@ -92,16 +104,16 @@ class PodLogBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): mask: bool = True -class ConditionMessage(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class ConditionMessage(pydantic.BaseModel, extra='forbid'): """ Represents the condition message body. """ reason: str | None = None message: str | None = None timestamp: datetime.datetime - status: bool + status: str type: str -class UpdatePodBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class UpdatePodBody(pydantic.BaseModel, extra='forbid'): """ Represents the update pod body. """ workflow_uuid: str task_uuid: str @@ -111,12 +123,12 @@ class UpdatePodBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): pod_ip: str | None = None message: str = '' status: str - exit_code: int | None + exit_code: int | None = None backend: str conditions: List[ConditionMessage] = [] -class ResourceBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class ResourceBody(pydantic.BaseModel, extra='forbid'): """ Represents the resource body. """ hostname: str available: bool @@ -127,29 +139,29 @@ class ResourceBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): taints: List[Dict] = [] -class ResourceUsageBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class ResourceUsageBody(pydantic.BaseModel, extra='forbid'): """ Represents the resource usage body. """ hostname: str usage_fields: Dict non_workflow_usage_fields: Dict -class DeleteResourceBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DeleteResourceBody(pydantic.BaseModel, extra='forbid'): """ Represents the delete resource body. """ resource: str -class NodeBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class NodeBody(pydantic.BaseModel, extra='forbid'): """ Represents the node body. """ node_hashes: List[str] -class TaskListBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class TaskListBody(pydantic.BaseModel, extra='forbid'): """ Represents the list of pod names. """ task_list: List[str] -class MonitorPodBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class MonitorPodBody(pydantic.BaseModel, extra='forbid'): """ Represents the container log body. """ workflow_uuid: str task_uuid: str @@ -157,12 +169,12 @@ class MonitorPodBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): message: str = '' -class HeartbeatBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class HeartbeatBody(pydantic.BaseModel, extra='forbid'): """ Represents the container log body. """ time: datetime.datetime -class MetricsBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class MetricsBody(pydantic.BaseModel, extra='forbid'): """ Represents the container log body. """ type: MetricsType value: float @@ -171,7 +183,7 @@ class MetricsBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): description: str -class PodConditionsBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class PodConditionsBody(pydantic.BaseModel, extra='forbid'): """ Represents the pod conditions body. """ workflow_uuid: str task_uuid: str @@ -179,7 +191,7 @@ class PodConditionsBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): conditions: List[ConditionMessage] = [] -class PodEventBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class PodEventBody(pydantic.BaseModel, extra='forbid'): """ Represents the pod event body. """ pod_name: str reason: str @@ -187,14 +199,14 @@ class PodEventBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): message: str -class NodeConditionsBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class NodeConditionsBody(pydantic.BaseModel, extra='forbid'): """ Body for node conditions messages from service to backend listener. """ rules: Dict[str, str]|None = None -class AckBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class AckBody(pydantic.BaseModel, extra='forbid'): """ Body for acknowledgment messages from service back to backend listener. """ @@ -204,46 +216,50 @@ class AckBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): class MessageOptions(pydantic.BaseModel): """ Message options """ init: Optional[InitBody] = pydantic.Field( - description='Message for websocket init') + default=None, description='Message for websocket init') pod_log: Optional[PodLogBody] = pydantic.Field( - description='Message for error_logs') + default=None, description='Message for error_logs') update_pod: Optional[UpdatePodBody] = pydantic.Field( - description='Message for events') + default=None, description='Message for events') monitor_pod: Optional[MonitorPodBody] = pydantic.Field( - description='Message for monitoring pod') + default=None, description='Message for monitoring pod') resource: Optional[ResourceBody] = pydantic.Field( - description='Message for resource change') + default=None, description='Message for resource change') resource_usage: Optional[ResourceUsageBody] = pydantic.Field( - description='Message for resource usage change') + default=None, description='Message for resource usage change') delete_resource: Optional[DeleteResourceBody] = pydantic.Field( - description='Message for resource change') + default=None, description='Message for resource change') node_hash: Optional[NodeBody] = pydantic.Field( - description='Message for list of current nodes') + default=None, description='Message for list of current nodes') task_list: Optional[TaskListBody] = pydantic.Field( + default=None, description='Message for list of current pods in backend based on the task_uuids') heartbeat: Optional[HeartbeatBody] = pydantic.Field( - description='Message for service heartbeat') + default=None, description='Message for service heartbeat') job_status: Optional[jobs_base.JobResult] = pydantic.Field( - description='Message of job status') + default=None, description='Message of job status') metrics: Optional[MetricsBody] = pydantic.Field( - description='Message to send metrics') + default=None, description='Message to send metrics') logging: Optional[LoggingBody] = pydantic.Field( - description='Message to send logs') + default=None, description='Message to send logs') pod_conditions: Optional[PodConditionsBody] = pydantic.Field( - description='Message to send pod conditions') + default=None, description='Message to send pod conditions') pod_event: Optional[PodEventBody] = pydantic.Field( - description='Message to send pod event') + default=None, description='Message to send pod event') ack: Optional[AckBody] = pydantic.Field( - description='Message for acknowledgment') + default=None, description='Message for acknowledgment') node_conditions: Optional[NodeConditionsBody] = pydantic.Field( - description='Message for node conditions') + default=None, description='Message for node conditions') - @pydantic.root_validator(pre=True) + @pydantic.model_validator(mode='before') + @classmethod def validate(cls, values): # pylint: disable=no-self-argument """ A valid message can only be one of the two types """ + if not isinstance(values, dict): + return values num_fields_set = sum(1 for value in values.values() if value is not None) if num_fields_set != 1: raise osmo_errors.OSMOUserError( - f'Exactly one of the following must be set {cls.__fields__.keys()}') + f'Exactly one of the following must be set {cls.model_fields.keys()}') return values diff --git a/src/utils/connectors/cluster.py b/src/utils/connectors/cluster.py index 5894a635b..703ab1586 100644 --- a/src/utils/connectors/cluster.py +++ b/src/utils/connectors/cluster.py @@ -20,7 +20,7 @@ import pydantic -class ClusterConfig(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class ClusterConfig(pydantic.BaseModel, extra='forbid'): """ A class for managing the config for the execution cluster. """ cluster_host: str = pydantic.Field( default='https://localhost:6443', diff --git a/src/utils/connectors/postgres.py b/src/utils/connectors/postgres.py index eaebf43d5..986eb87cd 100644 --- a/src/utils/connectors/postgres.py +++ b/src/utils/connectors/postgres.py @@ -45,7 +45,7 @@ from src.lib.data import storage from src.lib.data.storage import constants from src.lib.utils import (common, credentials, jinja_sandbox, login, - osmo_errors, role, validation) + osmo_errors, role) from src.utils import auth, notify from src.utils.secret_manager import Encrypted, SecretManager @@ -54,12 +54,6 @@ def backend_action_queue_name(backend_name: str) -> str: return f'backend-connections:{backend_name}' -class ExtraType(enum.Enum): - """ Setting for Pydantic Extra """ - ALLOW = pydantic.Extra.allow - FORBID = pydantic.Extra.forbid - IGNORE = pydantic.Extra.ignore - class CredentialType(enum.Enum): """ User profile type / table name if exist """ @@ -128,82 +122,83 @@ class ClusterResources(pydantic.BaseModel): class PostgresConfig(pydantic.BaseModel): """ Manages the config for the postgres database. """ postgres_host: str = pydantic.Field( - command_line='postgres_host', - env='OSMO_POSTGRES_HOST', default='localhost', - description='The hostname of the postgres server to connect to.') + description='The hostname of the postgres server to connect to.', + json_schema_extra={'command_line': 'postgres_host', 'env': 'OSMO_POSTGRES_HOST'}) postgres_port: int = pydantic.Field( - command_line='postgres_port', - env='OSMO_POSTGRES_PORT', default=5432, - description='The port of the postgres server to connect to.') + description='The port of the postgres server to connect to.', + json_schema_extra={'command_line': 'postgres_port', 'env': 'OSMO_POSTGRES_PORT'}) postgres_user: str = pydantic.Field( - command_line='postgres_user', - env='OSMO_POSTGRES_USER', default='postgres', - description='The user of the postgres server.') + description='The user of the postgres server.', + json_schema_extra={'command_line': 'postgres_user', 'env': 'OSMO_POSTGRES_USER'}) postgres_password: str = pydantic.Field( - command_line='postgres_password', - env='OSMO_POSTGRES_PASSWORD', - description='The password to connect to the postgres server.') + description='The password to connect to the postgres server.', + json_schema_extra={'command_line': 'postgres_password', 'env': 'OSMO_POSTGRES_PASSWORD'}) postgres_database_name: str = pydantic.Field( - command_line='postgres_database_name', - env='OSMO_POSTGRES_DATABASE_NAME', default='osmo_db', - description='The database name for postgres server.') + description='The database name for postgres server.', + json_schema_extra={ + 'command_line': 'postgres_database_name', + 'env': 'OSMO_POSTGRES_DATABASE_NAME' + }) postgres_reconnect_retry: int = pydantic.Field( - command_line='postgres_reconnect_retry', - env='OSMO_POSTGRES_RECONNECT_RETRY', - type=validation.positive_integer, default=5, - description='Reconnect try count after connection error') + gt=0, + description='Reconnect try count after connection error', + json_schema_extra={ + 'command_line': 'postgres_reconnect_retry', + 'env': 'OSMO_POSTGRES_RECONNECT_RETRY' + }) mek_file: str = pydantic.Field( - command_line='mek_file', - env='OSMO_MEK_FILE', default='/home/osmo/vault-agent/secrets/vault-secrets.yaml', - description='Path to the file that stores master encryption keys' - ) + description='Path to the file that stores master encryption keys', + json_schema_extra={'command_line': 'mek_file', 'env': 'OSMO_MEK_FILE'}) method: Literal['dev'] | None = pydantic.Field( - command_line='method', default=None, description='If set to "dev", use the default local mek file' - 'ingoring `mek_file` field.') + 'ingoring `mek_file` field.', + json_schema_extra={'command_line': 'method'}) dev_user: str = pydantic.Field( - command_line='dev_user', default='testuser', description='If method is set to "dev", the browser flow to the service will use this ' - 'user name.') + 'user name.', + json_schema_extra={'command_line': 'dev_user'}) # Deployment configuration fields from Helm values for auto-initialization osmo_image_location: str | None = pydantic.Field( - command_line='osmo_image_location', default=None, - description='The image registry location for OSMO images') + description='The image registry location for OSMO images', + json_schema_extra={'command_line': 'osmo_image_location'}) osmo_image_tag: str | None = pydantic.Field( - command_line='osmo_image_tag', default=None, - description='The image tag for OSMO images') + description='The image tag for OSMO images', + json_schema_extra={'command_line': 'osmo_image_tag'}) service_hostname: str | None = pydantic.Field( - command_line='service_hostname', default=None, - description='The public hostname for the OSMO service (used for URL generation)') + description='The public hostname for the OSMO service (used for URL generation)', + json_schema_extra={'command_line': 'service_hostname'}) postgres_pool_minconn: int = pydantic.Field( - command_line='postgres_pool_minconn', - type=validation.positive_integer, - env='OSMO_POSTGRES_POOL_MINCONN', default=1, - description='Minimum number of connections to keep in the connection pool') + gt=0, + description='Minimum number of connections to keep in the connection pool', + json_schema_extra={ + 'command_line': 'postgres_pool_minconn', + 'env': 'OSMO_POSTGRES_POOL_MINCONN' + }) postgres_pool_maxconn: int = pydantic.Field( - command_line='postgres_pool_maxconn', - type=validation.positive_integer, - env='OSMO_POSTGRES_POOL_MAXCONN', default=10, - description='Maximum number of connections allowed in the connection pool') + gt=0, + description='Maximum number of connections allowed in the connection pool', + json_schema_extra={ + 'command_line': 'postgres_pool_maxconn', + 'env': 'OSMO_POSTGRES_POOL_MAXCONN' + }) schema_version: str = pydantic.Field( - command_line='schema_version', - env='OSMO_SCHEMA_VERSION', default='public', description='pgroll schema version to use. ' - 'Set to "public" to use the default schema without pgroll versioning.') + 'Set to "public" to use the default schema without pgroll versioning.', + json_schema_extra={'command_line': 'schema_version', 'env': 'OSMO_SCHEMA_VERSION'}) def retry(func=None, *, reconnect: bool = True): @@ -379,7 +374,6 @@ def __init__(self, config: PostgresConfig): PostgresConnector._instance = self mek_file = self.config.mek_file if self.config.method == 'dev': - ExtraArgBaseModel.set_extra(ExtraType.ALLOW) mek_file = os.path.join(os.path.dirname(__file__), '..', 'secret_manager', 'mek.yaml') self.secret_manager = SecretManager( mek_file, @@ -1329,7 +1323,7 @@ def set_default_values(configs: 'DynamicConfig', config_type: ConfigType): ) elif config_type == ConfigHistoryType.BACKEND: data = [ - backend.dict(by_alias=True, exclude_unset=True) + backend.model_dump(by_alias=True, exclude_unset=True) for backend in Backend.list_from_db(self) ] elif config_type == ConfigHistoryType.POOL: @@ -1365,7 +1359,7 @@ def set_default_values(configs: 'DynamicConfig', config_type: ConfigType): 'system', # username ['initial-config'], # tags 'Initial configuration', # description - json.dumps(data, default=pydantic.json.pydantic_encoder), # data + json.dumps(data, default=common.pydantic_encoder), # data config_type.value.lower(), # for WHERE NOT EXISTS ), ) @@ -1578,7 +1572,7 @@ def create_config_history_entry( username, tags, description, - json.dumps(data, default=pydantic.json.pydantic_encoder), + json.dumps(data, default=common.pydantic_encoder), ), ) @@ -1739,13 +1733,31 @@ def fetch_from_db(cls, database: PostgresConnector, ) class ExtraArgBaseModel(pydantic.BaseModel): - """ BaseModel class which can be used to enable validation """ - class Config: - extra = ExtraType.IGNORE.value + """BaseModel that rejects unknown fields by default. + User input is validated strictly (extra='forbid'). Database reads go + through ``from_db`` which constructs with extra='ignore' so that legacy + columns that no longer exist in code are silently dropped. + """ + model_config = pydantic.ConfigDict(extra='forbid', populate_by_name=True) + + @classmethod + def from_db(cls, data: Dict): + """Construct from database data, tolerating unknown fields at all nesting levels.""" + return cls.model_validate(data, context={'_from_db': True}) + + @pydantic.model_validator(mode='before') @classmethod - def set_extra(cls, extra_type: ExtraType): - cls.__config__.extra = extra_type.value + def _strip_extra_from_db(cls, values: Any, info: pydantic.ValidationInfo) -> Any: + if not isinstance(values, dict): + return values + if info.context and info.context.get('_from_db'): + allowed_keys = set(cls.model_fields.keys()) + for field_info in cls.model_fields.values(): + if field_info.alias: + allowed_keys.add(field_info.alias) + return {k: v for k, v in values.items() if k in allowed_keys} + return values class OsmoImageConfig(ExtraArgBaseModel): @@ -1765,15 +1777,17 @@ class TopologyRequirementType(str, enum.Enum): PREFERRED = 'preferred' -class TopologyRequirement(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class TopologyRequirement(pydantic.BaseModel, extra='forbid'): """Single topology requirement for a resource""" key: str # References pool's topology_keys[].key group: str = 'default' # Logical grouping of tasks requirementType: TopologyRequirementType = TopologyRequirementType.REQUIRED # pylint: disable=invalid-name -class ResourceSpec(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class ResourceSpec(pydantic.BaseModel): """ Represents the resource spec in an OSMO2 workflow. """ + model_config = pydantic.ConfigDict(extra='forbid', coerce_numbers_to_str=True) + cpu: int | None = None storage: str | None = None memory: str | None = None @@ -1784,8 +1798,8 @@ class ResourceSpec(pydantic.BaseModel, extra=pydantic.Extra.forbid): def update(self, other: 'ResourceSpec') -> 'ResourceSpec': """ Apply all fields from the other resource spec to this one """ - self_dict = self.dict(exclude_none=True) - other_dict = other.dict(exclude_none=True) + self_dict = self.model_dump(exclude_none=True) + other_dict = other.model_dump(exclude_none=True) return ResourceSpec(**common.recursive_dict_update(self_dict, other_dict)) @classmethod @@ -1810,12 +1824,12 @@ def validate_unit_value(cls, value: str | None, allocatable: str) -> str | None: value = f'{common.convert_resource_value_str(value, target="Ki")}Ki' return value - @pydantic.validator('memory') + @pydantic.field_validator('memory') @classmethod def validate_memory(cls, value: str | None) -> str | None: return cls.validate_unit_value(value, 'memory') - @pydantic.validator('storage') + @pydantic.field_validator('storage') @classmethod def validate_storage(cls, value: str | None) -> str | None: return cls.validate_unit_value(value, 'storage') @@ -1966,9 +1980,7 @@ def get_comparison_function(self, value) -> Callable[[float | str, float | str], right_operand: str assert_message: str - class Config: - use_enum_values = True - extra = 'forbid' + model_config = pydantic.ConfigDict(use_enum_values=True, extra='forbid') def evaluate(self, tokens: Dict[str, Any], task_name: str): @@ -2005,8 +2017,8 @@ def process_operand(operand: str) -> int | float | str | None: ) comparison_function = self.get_comparison_function(self.operator) - assert comparison_function(processed_left_operand, \ - processed_right_operand), processed_assert_msg + if not comparison_function(processed_left_operand, processed_right_operand): + raise AssertionError(processed_assert_msg) class BackendResourceConfig(pydantic.BaseModel): @@ -2032,7 +2044,7 @@ class BackendResource(pydantic.BaseModel): usage_fields: Dict[str, str] non_workflow_usage_fields: Dict[str, str] taint_fields: List[Dict] - config_fields: Dict[str, Dict[str, BackendResourceConfig]] | None + config_fields: Dict[str, Dict[str, BackendResourceConfig]] | None = None pool_platform_labels: Dict[str, List[str]] updated_allocatable_fields: Dict[str, Dict[str, Dict]] # Allocatable field accounting for osmo-ctrl usage and non-workflow pod usage @@ -2381,7 +2393,7 @@ def list_from_db(cls, backends: List[str] | None = None, config_fields = cls._create_config_fields( pool_platform_labels, pool_config) \ if pool_config else None - all_resources.append(BackendResource.construct( + all_resources.append(BackendResource.model_construct( label_fields=label_fields, taint_fields=taints, allocatable_fields=allocatable_fields, @@ -2607,7 +2619,6 @@ class BucketConfig(ExtraArgBaseModel): description: str = '' # Mode for read-only or read-write or write-only mode: str = BucketMode.READ_WRITE.value - # Default cred to use doesn't have one # Only applies to workflow operations, NOT user cli since we cannot forward the credential # to the user @@ -2625,8 +2636,7 @@ def valid_access(self, bucket_name: str, access_type: BucketModeAccess): class DynamicConfig(ExtraArgBaseModel): """ Manages the dynamic configs for the postgres database. """ - class Config: - validate_assignment = True + model_config = pydantic.ConfigDict(validate_assignment=True) @classmethod def deserialize(cls, config_dict: Dict, postgres: PostgresConnector): @@ -2702,9 +2712,9 @@ def _decrypt(result_data: Any, else: return encrypted_data, None - dynamic_config = cls(**config_dict) - encrypted_dict = dynamic_config.dict(exclude_unset=True) - decrypted_dict = dynamic_config.dict(exclude_unset=True) + dynamic_config = cls.from_db(config_dict) + encrypted_dict = dynamic_config.model_dump(exclude_unset=True) + decrypted_dict = dynamic_config.model_dump(exclude_unset=True) for key in config_dict: if not hasattr(dynamic_config, key): @@ -2757,13 +2767,13 @@ def serialize_helper(self, config_dict: Dict, postgres: PostgresConnector, def serialize(self, postgres: PostgresConnector, exclude_unset=True) -> Dict[str, str | None]: """Encrypts all secret fields and returns a dictionary """ - config_dict = self.dict(by_alias=True, exclude_unset=exclude_unset) + config_dict = self.model_dump(by_alias=True, exclude_unset=exclude_unset) result = self.serialize_helper(config_dict, postgres, top_level=True) return result def plaintext_dict(self, *args, **kwargs): """Returns as a dictionary with all SecretStrs converted to str""" - data = self.dict(*args, **kwargs) + data = self.model_dump(*args, **kwargs) def _convert_secrets(node): # Recurse for dict and list if isinstance(node, dict): @@ -2981,7 +2991,13 @@ def list_from_db(cls, database: PostgresConnector, names: Optional[List[str]] = fetch_cmd = f'SELECT * FROM resource_validations {list_of_names} ORDER BY name;' spec_rows = database.execute_fetch_command(fetch_cmd, fetch_input, True) - return {spec_row['name']: spec_row['resource_validations'] for spec_row in spec_rows} + return { + spec_row['name']: [ + item if isinstance(item, ResourceAssertion) else ResourceAssertion(**item) + for item in spec_row['resource_validations'] + ] + for spec_row in spec_rows + } @classmethod def fetch_from_db(cls, database: PostgresConnector, name: str) -> List[ResourceAssertion]: @@ -2993,7 +3009,10 @@ def fetch_from_db(cls, database: PostgresConnector, name: str) -> List[ResourceA spec_row = spec_rows[0] - return spec_row['resource_validations'] + return [ + item if isinstance(item, ResourceAssertion) else ResourceAssertion(**item) + for item in spec_row['resource_validations'] + ] @classmethod def get_pools(cls, database: PostgresConnector, name: str) -> List[Dict]: @@ -3033,7 +3052,9 @@ def insert_into_db(self, database: PostgresConnector, name: str): ''' database.execute_commit_command( insert_cmd, - (name,[json.dumps(validation.dict()) for validation in self.resource_validations])) + (name, + [json.dumps(validation.model_dump()) + for validation in self.resource_validations])) for pool_info in ResourceValidation.get_pools(database, name): Pool.update_resource_validations(database, pool_info['name']) @@ -3232,9 +3253,11 @@ class PlatformMinimal(PlatformBase): default_mounts: List[str] = [] -class PlatformEditable(PlatformBase, extra=pydantic.Extra.ignore): +class PlatformEditable(PlatformBase): """ Single Platform Entry """ + model_config = pydantic.ConfigDict(extra='ignore') + default_variables: Dict = {} resource_validations: List[str] = [] override_pod_template: List[str] = [] @@ -3314,7 +3337,7 @@ class PoolMinimal(PoolBase): platforms: Dict[str, PlatformMinimal] = {} -class PoolEditable(PoolBase, extra=pydantic.Extra.ignore): +class PoolEditable(PoolBase, extra='ignore'): common_default_variables: Dict = {} common_resource_validations: List[str] = [] common_pod_template: List[str] = [] @@ -3322,7 +3345,7 @@ class PoolEditable(PoolBase, extra=pydantic.Extra.ignore): platforms: Dict[str, PlatformEditable] = {} -class Pool(PoolBase, extra=pydantic.Extra.ignore): +class Pool(PoolBase, extra='ignore'): """ Single Pool Entry """ common_default_variables: Dict = {} common_resource_validations: List[str] = [] @@ -3366,7 +3389,7 @@ def update_resource_validations(cls, database: PostgresConnector, name: str): database.execute_commit_command( insert_cmd, (json.dumps(pool_info.platforms, default=common.pydantic_encoder), - json.dumps(pool_info.parsed_resource_validations), + json.dumps(pool_info.parsed_resource_validations, default=common.pydantic_encoder), name)) @classmethod @@ -3693,7 +3716,9 @@ def insert_into_db(self, database: PostgresConnector, name: str): self.max_exec_timeout, self.max_queue_timeout, json.dumps(self.default_exit_actions), json.dumps(self.common_default_variables), - self.common_resource_validations, json.dumps(self.parsed_resource_validations), + self.common_resource_validations, + json.dumps(self.parsed_resource_validations, + default=common.pydantic_encoder), self.common_pod_template, json.dumps(self.parsed_pod_template), self.common_group_templates, json.dumps(self.parsed_group_templates), self.enable_maintenance, @@ -3768,10 +3793,10 @@ def fetch_platform_config( if pool_type == PoolType.VERBOSE: return platforms elif pool_type == PoolType.EDITABLE: - return {platform_name: PlatformEditable(**platform.dict()) + return {platform_name: PlatformEditable(**platform.model_dump()) for platform_name, platform in platforms.items()} elif pool_type == PoolType.MINIMAL: - return {platform_name: PlatformMinimal(**platform.dict()) + return {platform_name: PlatformMinimal(**platform.model_dump()) for platform_name, platform in platforms.items()} else: raise osmo_errors.OSMOServerError(f'Unknown pool type: {pool_type.name}') @@ -3783,7 +3808,7 @@ class ListOrder(enum.Enum): DESC = 'DESC' -class PostgresUpdateCommand(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class PostgresUpdateCommand(pydantic.BaseModel, extra='forbid'): """ A class for creating database updating command. """ table: str conditions: List[str] = [] @@ -3839,7 +3864,7 @@ def get_args(self) -> Tuple[str, Tuple[Any, ...]]: return command, tuple(args) -class PostgresSelectCommand(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class PostgresSelectCommand(pydantic.BaseModel, extra='forbid'): """ A class for creating database selecting command. """ table: str conditions: List[str] = [] @@ -3937,9 +3962,9 @@ class BackendTestBase(pydantic.BaseModel): description: str cron_schedule: str = pydantic.Field(..., min_length=1) test_timeout: str = pydantic.Field(default='300s') - node_conditions: List[str] = pydantic.Field(min_items=1) + node_conditions: List[str] = pydantic.Field(min_length=1) - @pydantic.validator('name') + @pydantic.field_validator('name') @classmethod def validate_name_rfc1123(cls, v: str) -> str: """ @@ -3963,7 +3988,7 @@ def validate_name_rfc1123(cls, v: str) -> str: return v - @pydantic.validator('cron_schedule') + @pydantic.field_validator('cron_schedule') @classmethod def validate_cron_schedule(cls, v: str) -> str: """ @@ -4018,7 +4043,7 @@ def validate_cron_schedule(cls, v: str) -> str: return v - @pydantic.validator('test_timeout') + @pydantic.field_validator('test_timeout') @classmethod def validate_test_timeout(cls, v: str) -> str: """ @@ -4053,7 +4078,7 @@ def validate_test_timeout(cls, v: str) -> str: return v - @pydantic.validator('node_conditions') + @pydantic.field_validator('node_conditions') @classmethod def validate_node_conditions(cls, v: List[str]) -> List[str]: """ @@ -4176,7 +4201,7 @@ def _parse_duration_to_seconds(duration: str) -> int: class BackendTests(BackendTestBase): """ Represents a test config. """ - common_pod_template: List[str] = pydantic.Field(min_items=1) + common_pod_template: List[str] = pydantic.Field(min_length=1) parsed_pod_template: Dict = {} @classmethod diff --git a/src/utils/connectors/redis.py b/src/utils/connectors/redis.py index 428ab82b0..bcb758263 100644 --- a/src/utils/connectors/redis.py +++ b/src/utils/connectors/redis.py @@ -94,30 +94,25 @@ class RedisConfig(pydantic.BaseModel): """Manages the configuration for the redis database""" redis_host: str = pydantic.Field( - command_line='redis_host', - env='OSMO_REDIS_HOST', default='localhost', - description='The hostname of the redis server to connect to.') + description='The hostname of the redis server to connect to.', + json_schema_extra={'command_line': 'redis_host', 'env': 'OSMO_REDIS_HOST'}) redis_port: int = pydantic.Field( - command_line='redis_port', - env='OSMO_REDIS_PORT', default=6379, - description='The port of the redis server to connect to.') + description='The port of the redis server to connect to.', + json_schema_extra={'command_line': 'redis_port', 'env': 'OSMO_REDIS_PORT'}) redis_password: Optional[str] = pydantic.Field( - command_line='redis_password', - env='OSMO_REDIS_PASSWORD', default=None, - description='The password, if any, to authenticate with the redis server') + description='The password, if any, to authenticate with the redis server', + json_schema_extra={'command_line': 'redis_password', 'env': 'OSMO_REDIS_PASSWORD'}) redis_tls_enable: bool = pydantic.Field( - command_line='redis_tls_enable', - env='OSMO_REDIS_TLS_ENABLE', default=False, - description='Flag to connect to redis server using TLS, false by default') + description='Flag to connect to redis server using TLS, false by default', + json_schema_extra={'command_line': 'redis_tls_enable', 'env': 'OSMO_REDIS_TLS_ENABLE'}) redis_db_number: int = pydantic.Field( - command_line='redis_db_number', - env='OSMO_REDIS_DB_NUMBER', default=0, - description='Redis database number to connect to. Default value is 0') + description='Redis database number to connect to. Default value is 0', + json_schema_extra={'command_line': 'redis_db_number', 'env': 'OSMO_REDIS_DB_NUMBER'}) @property def redis_url(self): @@ -187,7 +182,7 @@ def workflow_logs(self) -> bool: return self.name in ('STDOUT', 'STDERR', 'DOWNLOAD', 'UPLOAD') -class LogStreamBody(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class LogStreamBody(pydantic.BaseModel, extra='forbid'): """ Represents the log stream body. """ source: str retry_id: int diff --git a/src/utils/job/backend_jobs.py b/src/utils/job/backend_jobs.py index ca3b427a4..64bedd4ed 100644 --- a/src/utils/job/backend_jobs.py +++ b/src/utils/job/backend_jobs.py @@ -456,7 +456,7 @@ def _get_allowed_job_type(cls): def _get_job_id(cls, values): return f'{values["backend"]}-modify-queues-{common.generate_unique_id()}' - @pydantic.validator('job_id', check_fields=False) + @pydantic.field_validator('job_id') @classmethod def validate_job_id(cls, value: str) -> str: """ @@ -590,7 +590,7 @@ def _get_allowed_job_type(cls): def _get_job_id(cls, values): return f'{values["backend"]}-sync-tests-{common.generate_unique_id()}' - @pydantic.validator('job_id', check_fields=False) + @pydantic.field_validator('job_id') @classmethod def validate_job_id(cls, value: str) -> str: """ diff --git a/src/utils/job/common.py b/src/utils/job/common.py index 7df6ada29..95b2d5e35 100644 --- a/src/utils/job/common.py +++ b/src/utils/job/common.py @@ -34,8 +34,8 @@ USER_BIN_LOCATION = '/osmo/usr/bin' RUN_LOCATION = '/osmo/run' -NamePattern = Annotated[str, pydantic.Field(regex=f'^{NAMEREGEX}$')] -TaskNamePattern = Annotated[str, pydantic.Field(regex=f'^{TASKNAMEREGEX}$')] +NamePattern = Annotated[str, pydantic.Field(pattern=f'^{NAMEREGEX}$')] +TaskNamePattern = Annotated[str, pydantic.Field(pattern=f'^{TASKNAMEREGEX}$')] def get_log_path(workflow_config: connectors.WorkflowConfig) -> storage.StoragePath: diff --git a/src/utils/job/jobs.py b/src/utils/job/jobs.py index 27f5d4379..d202d6a34 100644 --- a/src/utils/job/jobs.py +++ b/src/utils/job/jobs.py @@ -59,9 +59,7 @@ class JobExecutionContext(pydantic.BaseModel): postgres: connectors.PostgresConnector redis: connectors.RedisConfig - class Config: - arbitrary_types_allowed = True - extra = 'forbid' + model_config = pydantic.ConfigDict(arbitrary_types_allowed=True, extra='forbid') def cleanup_workflow_group(context: JobExecutionContext, workflow_id: str, workflow_uuid: str, @@ -134,7 +132,7 @@ def send_delayed_job_to_queue(self, delay_duration: datetime.timedelta): job into the job queue. """ redis_client = connectors.RedisConnector.get_instance().client - serialized_job = self.json() + serialized_job = self.model_dump_json() timeout_time = time.time() + delay_duration.total_seconds() redis_client.zadd(DELAYED_JOB_QUEUE, {serialized_job: timeout_time}) self.log_delayed_submission(delay_duration) @@ -178,7 +176,7 @@ class SubmitWorkflow(WorkflowJob): spec: workflow.WorkflowSpec original_spec: Dict group_and_task_uuids: Dict[str, common.UuidPattern] - parent_workflow_id: task_common.NamePattern | None + parent_workflow_id: task_common.NamePattern | None = None app_uuid: str | None = None app_version: int | None = None task_db_keys: Dict[str, str] | None = None @@ -188,7 +186,7 @@ class SubmitWorkflow(WorkflowJob): def _get_job_id(cls, values): return f'{values["workflow_uuid"]}-submit' - @pydantic.validator('job_id') + @pydantic.field_validator('job_id') @classmethod def validate_job_id(cls, value: str) -> str: """ @@ -232,11 +230,11 @@ def execute(self, context: JobExecutionContext, self.group_and_task_uuids) group_entries.append(( group_obj.workflow_id_internal, group_obj.name, - group_obj.group_uuid, group_obj.spec.json(), + group_obj.group_uuid, group_obj.spec.model_dump_json(), task.TaskGroupStatus.SUBMITTING.name, None, _encode_hstore(group_obj.remaining_upstream_groups), _encode_hstore(group_obj.downstream_groups), - group_obj.scheduler_settings.json() + group_obj.scheduler_settings.model_dump_json() if group_obj.scheduler_settings else None, json.dumps(group_obj.group_template_resource_types), )) @@ -295,7 +293,7 @@ def execute(self, context: JobExecutionContext, ready_group_names.append(group_obj.name) if backend.scheduler_settings is not None: scheduler_settings_by_group[group_obj.name] = ( - backend.scheduler_settings.json()) + backend.scheduler_settings.model_dump_json()) transitioned_names = task.TaskGroup.batch_set_groups_to_processing( context.postgres, workflow_obj.workflow_id, @@ -333,7 +331,7 @@ def handle_failure(self, context: JobExecutionContext, error: str): time=common.current_time(), io_type=connectors.redis.IOType.OSMO_CTRL, source='OSMO', retry_id=0, text='Failed SubmitWorkflow for workflow ' + f'{workflow_obj.workflow_id} with error: {error}') - redis_client.xadd(f'{self.workflow_id}-logs', json.loads(logs.json())) + redis_client.xadd(f'{self.workflow_id}-logs', json.loads(logs.model_dump_json())) redis_client.expire(f'{self.workflow_id}-logs', connectors.MAX_LOG_TTL, nx=True) for group_obj in workflow_obj.get_group_objs(): @@ -376,7 +374,7 @@ def _get_job_id(cls, values): digest = hashlib.sha256(all_paths.encode('utf-8')).hexdigest()[:32] return f'{values["workflow_uuid"]}-{digest}-upload-files' - @pydantic.validator('job_id') + @pydantic.field_validator('job_id') @classmethod def validate_job_id(cls, value: str) -> str: """ @@ -468,7 +466,7 @@ class CreateGroup(BackendJob, WorkflowJob, backend_job_defs.BackendCreateGroupMi def _get_job_id(cls, values): return f'{values["workflow_uuid"]}-{values["group_name"]}-submit' - @pydantic.validator('job_id', check_fields=False) + @pydantic.field_validator('job_id') @classmethod def validate_job_id(cls, value: str) -> str: """ @@ -556,7 +554,7 @@ class CleanupGroup(BackendJob, WorkflowJob, backend_job_defs.BackendCleanupGroup def _get_job_id(cls, values): return f'{values["workflow_uuid"]}-{values["group_name"]}-backend-cleanup' - @pydantic.validator('job_id', check_fields=False) + @pydantic.field_validator('job_id') @classmethod def validate_job_id(cls, value: str) -> str: """ @@ -625,7 +623,7 @@ def _get_job_id(cls, values): return '-'.join(name_list) - @pydantic.root_validator + @pydantic.model_validator(mode='before') @classmethod def validate_job_id(cls, values): """ @@ -645,7 +643,7 @@ def validate_job_id(cls, values): f'Job id for an UpdateGroup is in valid: {job_id} should ends with {suffix}.') return values - @pydantic.root_validator + @pydantic.model_validator(mode='before') @classmethod def validate_retry_id(cls, values): """ @@ -898,8 +896,10 @@ def execute(self, context: JobExecutionContext, # Need to check status here because the status could have changed due to fetch_status if group_obj.status == task.TaskGroupStatus.PROCESSING: delayed_job = copy.deepcopy(self) + job_id_suffix = UpdateGroup._get_job_id( + delayed_job.model_dump()) delayed_job.job_id = \ - f'{common.generate_unique_id(5)}-{UpdateGroup._get_job_id(delayed_job.dict())}' + f'{common.generate_unique_id(5)}-{job_id_suffix}' delayed_job.send_delayed_job_to_queue( datetime.timedelta(minutes=1)) @@ -1036,7 +1036,7 @@ def execute(self, context: JobExecutionContext, if backend.scheduler_settings is not None: for group_name in downstream_names: scheduler_settings_by_group[group_name] = ( - backend.scheduler_settings.json()) + backend.scheduler_settings.model_dump_json()) transitioned_names = task.TaskGroup.batch_set_groups_to_processing( context.postgres, self.workflow_id, @@ -1282,7 +1282,7 @@ class RescheduleTask(BackendJob, WorkflowJob): def _get_job_id(cls, values): return f'{values["workflow_uuid"]}-{values["task_name"]}-{values["retry_id"]}-reschedule' - @pydantic.validator('job_id') + @pydantic.field_validator('job_id') @classmethod def validate_job_id(cls, value: str) -> str: """ @@ -1294,7 +1294,7 @@ def validate_job_id(cls, value: str) -> str: return value def _delay_cleanup_pod(self): - cleanup_job = CleanupGroup(**self.cleanup_job.dict()) + cleanup_job = CleanupGroup(**self.cleanup_job.model_dump()) # Update retry id label if cleanup_job.error_log_spec: cleanup_job.error_log_spec.labels['osmo.retry_id'] = str(self.retry_id) @@ -1380,7 +1380,7 @@ class CleanupWorkflow(WorkflowJob): def _get_job_id(cls, values): return f'{values["workflow_uuid"]}-cleanup' - @pydantic.validator('job_id') + @pydantic.field_validator('job_id') @classmethod def validate_job_id(cls, value: str) -> str: """ @@ -1425,17 +1425,21 @@ def execute(self, context: JobExecutionContext, f'abnormally, view task status at:\n{status_url}\n\n' +\ f'View task error logs at:\n{error_logs_url}\n{end_delimiter}' logs = connectors.redis.LogStreamBody( - time=common.current_time(), io_type=connectors.redis.IOType.DUMP, - source='OSMO', retry_id=0, text=log_message) - redis_batch_pipeline.xadd(f'{self.workflow_id}-logs', json.loads(logs.json())) + time=common.current_time(), + io_type=connectors.redis.IOType.DUMP, + source='OSMO', retry_id=0, + text=log_message) + log_key = f'{self.workflow_id}-logs' + redis_batch_pipeline.xadd( + log_key, json.loads(logs.model_dump_json())) logs = connectors.redis.LogStreamBody( time=common.current_time(), io_type=connectors.redis.IOType.END_FLAG, source='', retry_id=0, text='') - redis_batch_pipeline.xadd(f'{self.workflow_id}-logs', json.loads(logs.json())) + redis_batch_pipeline.xadd(f'{self.workflow_id}-logs', json.loads(logs.model_dump_json())) redis_batch_pipeline.expire(f'{self.workflow_id}-logs', connectors.MAX_LOG_TTL, nx=True) redis_batch_pipeline.xadd(common.get_workflow_events_redis_name(self.workflow_uuid), - json.loads(logs.json())) + json.loads(logs.model_dump_json())) redis_batch_pipeline.expire(common.get_workflow_events_redis_name(self.workflow_uuid), connectors.MAX_LOG_TTL, nx=True) for group in workflow_obj.groups: @@ -1444,14 +1448,14 @@ def execute(self, context: JobExecutionContext, redis_batch_pipeline.xadd( common.get_redis_task_log_name( self.workflow_id, task_obj.name, retry_idx), - json.loads(logs.json())) + json.loads(logs.model_dump_json())) redis_batch_pipeline.expire( common.get_redis_task_log_name( self.workflow_id, task_obj.name, retry_idx), connectors.MAX_LOG_TTL, nx=True) redis_batch_pipeline.xadd( f'{self.workflow_id}-{task_obj.task_uuid}-{task_obj.retry_id}-error-logs', - json.loads(logs.json())) + json.loads(logs.model_dump_json())) redis_batch_pipeline.expire( f'{self.workflow_id}-{task_obj.task_uuid}-{task_obj.retry_id}-error-logs', connectors.MAX_LOG_TTL, nx=True) @@ -1603,7 +1607,7 @@ class CancelWorkflow(WorkflowJob): def _get_job_id(cls, values): return f'{values["workflow_uuid"]}-cancel' - @pydantic.validator('job_id') + @pydantic.field_validator('job_id') @classmethod def validate_job_id(cls, value: str) -> str: """ @@ -1784,7 +1788,7 @@ class UploadApp(FrontendJob): def _get_job_id(cls, values): return f'{values["app_uuid"]}-{values["app_version"]}-upload-app' - @pydantic.validator('job_id') + @pydantic.field_validator('job_id') @classmethod def validate_job_id(cls, value: str) -> str: """ @@ -1845,7 +1849,7 @@ class DeleteApp(FrontendJob): def _get_job_id(cls, values): return f'{values["app_uuid"]}-{values["app_versions"]}-delete-app' - @pydantic.validator('job_id') + @pydantic.field_validator('job_id') @classmethod def validate_job_id(cls, value: str) -> str: """ diff --git a/src/utils/job/jobs_base.py b/src/utils/job/jobs_base.py index fbb489901..1155c7a48 100644 --- a/src/utils/job/jobs_base.py +++ b/src/utils/job/jobs_base.py @@ -21,11 +21,11 @@ import enum import json import logging -from typing import Dict, List, Optional +from typing import Any, Dict, List, Optional import uuid import pydantic -import kombu # type: ignore +import kombu # type: ignore import kombu.pools # type: ignore from src.lib.utils import osmo_errors @@ -50,7 +50,7 @@ class JobStatus(enum.Enum): class JobResult(pydantic.BaseModel): """ Describes the result of a job """ status: JobStatus = JobStatus.SUCCESS - message: Optional[str] + message: Optional[str] = None @property def retry(self): @@ -63,7 +63,6 @@ def __str__(self) -> str: return self.status.name - class Job(pydantic.BaseModel): """ Represents some task that needs to be executed by a worker. Pydantic @@ -73,7 +72,7 @@ class Job(pydantic.BaseModel): super_type: str = 'frontend' job_type: str | None = None job_id: str | None = None - job_uuid: str = '' + job_uuid: str = pydantic.Field(default_factory=lambda: str(uuid.uuid4())) @classmethod def _get_job_id(cls, values): @@ -88,22 +87,24 @@ def _get_allowed_job_type(cls) -> List[str]: def _get_allowed_super_type(cls) -> List[str]: return ['frontend', 'backend'] - @pydantic.root_validator(pre=True) + @pydantic.model_validator(mode='before') @classmethod - def validate_job_type_and_id(cls, values) -> str: + def validate_job_type_and_id(cls, values) -> Any: """ Validates job_type. Returns the value of job_type if valid. """ + if not isinstance(values, dict): + return values # If no value is provided, then this is a newly created job. Set the job type based on the # class name if 'job_type' not in values or values['job_type'] is None: values['job_type'] = cls.__name__ # If a value is provided, make sure it is correct. # values['job_type'] not in cls.__name__ and - elif not (values['job_type'] == cls.__name__ or \ - values['job_type'] in cls._get_allowed_job_type()): + elif not (values['job_type'] == cls.__name__ or + values['job_type'] in cls._get_allowed_job_type()): raise osmo_errors.OSMOServerError( - f'Tried to initialize a {cls.__name__} instance with ' \ + f'Tried to initialize a {cls.__name__} instance with ' f'job_type as {values["job_type"]} or not in {cls._get_allowed_job_type()}') if 'job_id' not in values or values['job_id'] is None: @@ -111,7 +112,7 @@ def validate_job_type_and_id(cls, values) -> str: return values - @pydantic.validator('super_type', always=True) + @pydantic.field_validator('super_type') @classmethod def validate_super_type(cls, value) -> str: """ @@ -122,7 +123,7 @@ def validate_super_type(cls, value) -> str: f'Tried to initialize a {cls.__name__} instance with super_type as {value}') return value - @pydantic.validator('job_uuid', always=True) + @pydantic.field_validator('job_uuid') @classmethod def validate_job_uuid(cls, value: str) -> str: """ @@ -141,10 +142,6 @@ def get_metadata(self) -> Dict[str, str]: def __str__(self) -> str: return f'(type={self.job_type}, id={self.job_id})' - class Config: - allow_extra = False - ignore_extra = False - def log_submission(self): logging.info('Submitted new job %s to the job queue', self) @@ -163,9 +160,9 @@ def send_job(self, redis_client, redis_config: connectors.RedisConfig, key_name: priority = connectors.JOB_PRIORITY.get( self.job_type or '', connectors.DEFAULT_JOB_PRIORITY) with kombu.Connection(redis_config.redis_url, - transport_options=options) as conn: + transport_options=options) as conn: with kombu.pools.producers[conn].acquire(block=True) as producer: - producer.publish(json.loads(self.json()), exchange=exchange, + producer.publish(json.loads(self.model_dump_json()), exchange=exchange, declare=jobs, routing_key=self.job_type, priority=priority) self.log_submission() diff --git a/src/utils/job/kb_objects.py b/src/utils/job/kb_objects.py index d13ea92df..c5bb5d41f 100644 --- a/src/utils/job/kb_objects.py +++ b/src/utils/job/kb_objects.py @@ -620,11 +620,9 @@ class FileMount(pydantic.BaseModel): digest: str = '' k8s_factory: K8sObjectFactory - class Config: - arbitrary_types_allowed = True - extra = 'forbid' + model_config = pydantic.ConfigDict(arbitrary_types_allowed=True, extra='forbid') - @pydantic.root_validator(pre=True) + @pydantic.model_validator(mode='before') @classmethod def digest_validator(cls, values): """By default, build the digest from the content and path""" @@ -665,7 +663,7 @@ def secret(self, labels: Dict[str, str]) -> Dict: self.name, labels, {os.path.basename(self.path): self.content}, {}) -class HostMount(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class HostMount(pydantic.BaseModel, extra='forbid'): """ Encodes text contents to uniformly support text and binary files. """ name: str path: str diff --git a/src/utils/job/task.py b/src/utils/job/task.py index 089097e1f..6eb3c08d8 100644 --- a/src/utils/job/task.py +++ b/src/utils/job/task.py @@ -264,12 +264,12 @@ def has_error_logs(self) -> bool: and not self.canceled()) -class TaskInputOutput(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class TaskInputOutput(pydantic.BaseModel, extra='forbid'): """ Represents an input/output that is another task """ task: task_common.TaskNamePattern regex: str = '' - @pydantic.validator('regex') + @pydantic.field_validator('regex') @classmethod def validate_regex(cls, regex: str) -> str | None: """ @@ -314,9 +314,9 @@ def __hash__(self): return hash((self.__class__.__name__, self.task)) -class DatasetInputOutput(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class DatasetInputOutput(pydantic.BaseModel, extra='forbid'): """ Represents an input/output that is a dataset """ - class _Dataset(pydantic.BaseModel, extra=pydantic.Extra.forbid): + class _Dataset(pydantic.BaseModel, extra='forbid'): """ Represents dataset info """ name: str path: str = '' @@ -325,7 +325,7 @@ class _Dataset(pydantic.BaseModel, extra=pydantic.Extra.forbid): regex: str = '' localpath: str | None = None - @pydantic.validator('name') + @pydantic.field_validator('name') @classmethod def validate_name(cls, name: str) -> str: """ @@ -340,7 +340,7 @@ def validate_name(cls, name: str) -> str: raise ValueError(f'Invalid name: {err}') from err return name - @pydantic.validator('path') + @pydantic.field_validator('path') @classmethod def validate_path(cls, path: str) -> str: """ @@ -349,13 +349,13 @@ def validate_path(cls, path: str) -> str: Raises: ValueError: path fails validation. """ - try: - re.fullmatch(PATH_REGEX, path) - except re.error as err: - raise ValueError(f'Invalid path: {path}') from err + if not path: + return path + if re.fullmatch(PATH_REGEX, path) is None: + raise ValueError(f'Invalid path: {path}') return path - @pydantic.validator('metadata') + @pydantic.field_validator('metadata') @classmethod def validate_metadata(cls, metadata: List[str]) -> List[str]: """ @@ -365,13 +365,11 @@ def validate_metadata(cls, metadata: List[str]) -> List[str]: ValueError: metadata fails validation. """ for path in metadata: - try: - re.fullmatch(PATH_REGEX, path) - except re.error as err: - raise ValueError(f'Invalid path: {path}') from err + if re.fullmatch(PATH_REGEX, path) is None: + raise ValueError(f'Invalid path: {path}') return metadata - @pydantic.validator('labels') + @pydantic.field_validator('labels') @classmethod def validate_labels(cls, labels: List[str]) -> List[str]: """ @@ -381,13 +379,11 @@ def validate_labels(cls, labels: List[str]) -> List[str]: ValueError: labels fails validation. """ for path in labels: - try: - re.fullmatch(PATH_REGEX, path) - except re.error as err: - raise ValueError(f'Invalid path: {path}') from err + if re.fullmatch(PATH_REGEX, path) is None: + raise ValueError(f'Invalid path: {path}') return labels - @pydantic.validator('regex') + @pydantic.field_validator('regex') @classmethod def validate_regex(cls, regex: str) -> str | None: """ @@ -411,16 +407,16 @@ def __hash__(self): return hash((self.__class__.__name__, self.dataset.name, self.dataset.path)) -class UpdateDatasetOutput(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class UpdateDatasetOutput(pydantic.BaseModel, extra='forbid'): """ Represents an input/output that is a dataset """ - class _Dataset(pydantic.BaseModel, extra=pydantic.Extra.forbid): + class _Dataset(pydantic.BaseModel, extra='forbid'): """ Represents dataset info """ name: str paths: List[str] = [] metadata: List[str] = [] labels: List[str] = [] - @pydantic.validator('name') + @pydantic.field_validator('name') @classmethod def validate_name(cls, name: str) -> str: """ @@ -435,7 +431,7 @@ def validate_name(cls, name: str) -> str: raise ValueError(f'Invalid name: {err}') from err return name - @pydantic.validator('paths') + @pydantic.field_validator('paths') @classmethod def validate_paths(cls, paths: List[str]) -> List[str]: """ @@ -445,13 +441,11 @@ def validate_paths(cls, paths: List[str]) -> List[str]: ValueError: paths fails validation. """ for path in paths: - try: - re.fullmatch(PATH_REGEX, path) - except re.error as err: - raise ValueError(f'Invalid path: {path}') from err + if re.fullmatch(PATH_REGEX, path) is None: + raise ValueError(f'Invalid path: {path}') return paths - @pydantic.validator('metadata') + @pydantic.field_validator('metadata') @classmethod def validate_metadata(cls, metadata: List[str]) -> List[str]: """ @@ -461,13 +455,11 @@ def validate_metadata(cls, metadata: List[str]) -> List[str]: ValueError: metadata fails validation. """ for path in metadata: - try: - re.fullmatch(PATH_REGEX, path) - except re.error as err: - raise ValueError(f'Invalid path: {path}') from err + if re.fullmatch(PATH_REGEX, path) is None: + raise ValueError(f'Invalid path: {path}') return metadata - @pydantic.validator('labels') + @pydantic.field_validator('labels') @classmethod def validate_labels(cls, labels: List[str]) -> List[str]: """ @@ -477,10 +469,8 @@ def validate_labels(cls, labels: List[str]) -> List[str]: ValueError: labels fails validation. """ for path in labels: - try: - re.fullmatch(PATH_REGEX, path) - except re.error as err: - raise ValueError(f'Invalid path: {path}') from err + if re.fullmatch(PATH_REGEX, path) is None: + raise ValueError(f'Invalid path: {path}') return labels update_dataset: _Dataset @@ -489,12 +479,12 @@ def __hash__(self): return hash((self.__class__.__name__, self.update_dataset.name)) -class URLInputOutput(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class URLInputOutput(pydantic.BaseModel, extra='forbid'): """ Represents a url used for input/output """ url: str regex: str = '' - @pydantic.validator('regex') + @pydantic.field_validator('regex') @classmethod def validate_regex(cls, regex: str) -> str | None: """ @@ -522,23 +512,25 @@ def __hash__(self): OutputType = DatasetInputOutput | URLInputOutput | UpdateDatasetOutput -class CheckpointSpec(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class CheckpointSpec(pydantic.BaseModel, extra='forbid'): """ Represents a checkpoint spec """ path: str url: constants.StorageBackendPattern frequency: datetime.timedelta regex: str = '' - @pydantic.validator('frequency', pre=True) + @pydantic.field_validator('frequency', mode='before') @classmethod - def validate_frequency(cls, value) ->datetime.timedelta: + def validate_frequency(cls, value) -> datetime.timedelta: + if isinstance(value, bool): + raise ValueError('Checkpoint frequency must be a duration, not a boolean') if isinstance(value, (int, float)): return datetime.timedelta(seconds=value) if isinstance(value, datetime.timedelta): return value return common.to_timedelta(value) - @pydantic.validator('regex') + @pydantic.field_validator('regex') @classmethod def validate_regex(cls, regex: str) -> str | None: """ @@ -557,19 +549,23 @@ def validate_regex(cls, regex: str) -> str | None: raise ValueError(f'Invalid regex: {regex}') from err -class TaskKPI(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class TaskKPI(pydantic.BaseModel): """ Represents a KPI stored in a task """ + model_config = pydantic.ConfigDict(extra='forbid', coerce_numbers_to_str=True) + index: str path: str -class File(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class File(pydantic.BaseModel): """ Encodes text contents to uniformly support text and binary files. """ + model_config = pydantic.ConfigDict(extra='forbid', coerce_numbers_to_str=True) + base64: bool = False path: str contents: str - @pydantic.validator('path') + @pydantic.field_validator('path') @classmethod def validate_path(cls, path: str) -> str: """ @@ -594,8 +590,14 @@ def encoded_contents(self) -> str: return self.contents -class TaskSpec(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class TaskSpec(pydantic.BaseModel): """ Represents the container spec in a task spec. """ + # Pydantic v2 is strict about str types. YAML users naturally write unquoted + # integers (e.g. exitActions: {RESCHEDULE: 3}, args: [echo, 42]) or booleans + # (e.g. environment: {DEBUG: true}) which parse as int/bool, not str. + # coerce_numbers_to_str restores v1 behavior for int/float→str coercion. + model_config = pydantic.ConfigDict(extra='forbid', coerce_numbers_to_str=True) + name: task_common.NamePattern image: str command: List[str] @@ -619,10 +621,64 @@ class TaskSpec(pydantic.BaseModel, extra=pydantic.Extra.forbid): # A simplified resource representation in the workflow spec resource: str = 'default' - @pydantic.validator('downloadType', pre=True) + @pydantic.field_validator('environment', 'exitActions', mode='before') + @classmethod + def coerce_dict_str_values(cls, value: Any) -> Any: + """Coerce non-string scalar dict values (e.g. YAML booleans/ints) to strings. + + coerce_numbers_to_str handles int/float but not bool. YAML users write + environment: {DEBUG: true} or exitActions: {RESCHEDULE: 3} which parse + as bool/int, so we coerce here. Non-scalar values (None, list, dict) are + left as-is so they fail downstream validation. + """ + if isinstance(value, dict): + return { + str(k): str(v) if isinstance(v, (str, int, float, bool)) else v + for k, v in value.items() + } + return value + + @pydantic.field_validator('credentials', mode='before') + @classmethod + def coerce_credential_values(cls, value: Any) -> Any: + """Coerce credential values to their expected types. + + Credential values can be str (path) or Dict[str, str] (key mappings). + YAML may parse values as int/bool instead of str. + + Raises: + ValueError: If value is not a str or dict, or contains invalid + inner types (None, list, object). + """ + if isinstance(value, str): + return value + if isinstance(value, dict): + result: Dict[str, Union[str, Dict[str, str]]] = {} + for k, v in value.items(): + if isinstance(v, dict): + for dk, dv in v.items(): + if not isinstance(dv, (str, int, float, bool)): + raise ValueError( + f'credential key mapping value for {k}.{dk} must be a scalar, ' + f'got {type(dv).__name__}' + ) + result[str(k)] = {str(dk): str(dv) for dk, dv in v.items()} + elif isinstance(v, (str, int, float, bool)): + result[str(k)] = str(v) + else: + raise ValueError( + f'credential value for {k} must be a str or dict, ' + f'got {type(v).__name__}' + ) + return result + raise ValueError( + f'credentials must be a str or dict, got {type(value).__name__}' + ) + + @pydantic.field_validator('downloadType', mode='before') @classmethod def validate_download_type(cls, download_type: Optional[Union[str, connectors.DownloadType]], - values: Dict) -> Optional[connectors.DownloadType]: + info: pydantic.ValidationInfo) -> Optional[connectors.DownloadType]: """ Validates downloadType. Converts string values to DownloadType enum. @@ -631,7 +687,7 @@ def validate_download_type(cls, download_type: Optional[Union[str, connectors.Do """ if download_type is None: return None - name = values.get('name', '') + name = info.data.get('name', '') if isinstance(download_type, connectors.DownloadType): return download_type if isinstance(download_type, str): @@ -641,9 +697,11 @@ def validate_download_type(cls, download_type: Optional[Union[str, connectors.Do valid_types = [dt.value for dt in connectors.DownloadType] raise ValueError(f'Task "{name}" uses invalid downloadType "{download_type}". ' f'Valid types are: {valid_types}') + raise ValueError( + f'Task "{name}" has unsupported downloadType type: {type(download_type).__name__}' + ) - - @pydantic.validator('name') + @pydantic.field_validator('name') @classmethod def validate_name(cls, name: task_common.NamePattern) -> task_common.NamePattern: """ @@ -657,30 +715,30 @@ def validate_name(cls, name: task_common.NamePattern) -> task_common.NamePattern 'This is a restricted name.') return name - @pydantic.validator('command') + @pydantic.field_validator('command') @classmethod - def validate_command(cls, command: List[str], values: Dict) -> List[str]: + def validate_command(cls, command: List[str], info: pydantic.ValidationInfo) -> List[str]: """ Validates command. Returns the value of command if valid. Raises: ValueError: Containers fails validation. """ - name = values.get('name', '') + name = info.data.get('name', '') if not command: raise ValueError(f'Container {name} should have at least one command.') return command - @pydantic.validator('files') + @pydantic.field_validator('files') @classmethod - def validate_files(cls, files: List[File], values: Dict) -> List[File]: + def validate_files(cls, files: List[File], info: pydantic.ValidationInfo) -> List[File]: """ Validates that all file paths are unique. Returns the list if valid Raises: ValueError: There are duplicate file paths """ - name = values.get('name', '') + name = info.data.get('name', '') all_paths: Set[str] = set() for file in files: if file.path in all_paths: @@ -701,10 +759,12 @@ def propagate_resource_values(self, resources: Dict[str, connectors.ResourceSpec f'Requesting undefined resource {self.resource}.') self.resources = resource_spec - @pydantic.validator('exitActions') + @pydantic.field_validator('exitActions') @classmethod - def validate_exit_actions(cls, exit_actions: Dict[str, str], values: Dict) -> Dict[str, str]: - name = values.get('name', '') + def validate_exit_actions( + cls, exit_actions: Dict[str, str], + info: pydantic.ValidationInfo) -> Dict[str, str]: + name = info.data.get('name', '') regex = re.compile(CODE_REGEX) for key, value in exit_actions.items(): try: @@ -769,7 +829,7 @@ def get_resource_from_spec(self, resource: Dict[str, Any], def to_pod_resource_spec(self, resource: connectors.ResourceSpec) -> Dict: """ Convert the resource spec from WorkflowSpec to the K8 pod resource spec. """ - resource_spec = resource.dict() + resource_spec = resource.model_dump() pod_resource_spec = {} for resource_type in common.ALLOCATABLE_RESOURCES_LABELS: resource = self.get_resource_from_spec( @@ -894,14 +954,14 @@ def parse(self, workflow_id: str, host_tokens: Dict[str, str]) -> 'TaskSpec': else: raise osmo_errors.OSMOUsageError('Unknown Input Type') - parsed_json = self.json() + parsed_json = self.model_dump_json() for key, value in tokens.items(): parsed_json = re.sub('{{[ ]*' + key + '[ ]*}}', value, parsed_json) return TaskSpec(**json.loads(parsed_json)) def saved_spec(self) -> Dict: - base_spec = self.dict(exclude_defaults=True) + base_spec = self.model_dump(exclude_defaults=True) if 'resources' in base_spec: del base_spec['resources'] if 'backend' in base_spec: @@ -916,9 +976,7 @@ class TaskGroupSpec(pydantic.BaseModel): ignoreNonleadStatus: bool = True # pylint: disable=invalid-name tasks: List[TaskSpec] - class Config: - use_enum_values = True - extra = 'forbid' + model_config = pydantic.ConfigDict(use_enum_values=True, extra='forbid') @property def inputs(self) -> List[InputType]: @@ -927,16 +985,16 @@ def inputs(self) -> List[InputType]: inputs |= set(task.inputs) return list(inputs) - @pydantic.validator('tasks') + @pydantic.field_validator('tasks') @classmethod - def validate_tasks(cls, value: List[TaskSpec], values: Dict) -> List[TaskSpec]: + def validate_tasks(cls, value: List[TaskSpec], info: pydantic.ValidationInfo) -> List[TaskSpec]: """ Validates tasks. Returns the value of tasks if valid. Raises: ValueError: Containers fails validation. """ - group_name = values['name'] + group_name = info.data.get('name', '') # Need at least one task if not value: @@ -1005,12 +1063,12 @@ def parse(self, database: connectors.PostgresConnector, tasks=tasks) def saved_spec(self) -> Dict: - base_spec = self.dict(exclude_defaults=True) + base_spec = self.model_dump(exclude_defaults=True) base_spec['tasks'] = [task.saved_spec() for task in self.tasks] return base_spec -class TaskGroupMetrics(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class TaskGroupMetrics(pydantic.BaseModel, extra='forbid'): """ Represents metrics submitted by each user task in a workflow """ retry_id: int = 0 @@ -1034,12 +1092,11 @@ class Task(pydantic.BaseModel): failure_message: str | None = None database: connectors.PostgresConnector exit_actions: Dict[str, str] - node_name: str | None - pod_ip: str | None + node_name: str | None = None + pod_ip: str | None = None lead: bool - class Config: - arbitrary_types_allowed = True + model_config = pydantic.ConfigDict(arbitrary_types_allowed=True) @staticmethod def batch_insert_to_db( @@ -1598,14 +1655,12 @@ class TaskGroup(pydantic.BaseModel): # Used by cleanup to avoid dependency on the current pool config. group_template_resource_types: List[Dict[str, Any]] = [] - class Config: - arbitrary_types_allowed = True - extra = 'forbid' + model_config = pydantic.ConfigDict(arbitrary_types_allowed=True, extra='forbid') def insert_to_db(self, status: TaskGroupStatus = TaskGroupStatus.SUBMITTING, failure_message: str | None = None): """ Creates an entry in the database for the group. """ - spec = self.spec.json() + spec = self.spec.model_dump_json() insert_cmd = ''' INSERT INTO groups (workflow_id, name, group_uuid, spec, status, failure_message, @@ -1619,7 +1674,7 @@ def insert_to_db(self, status: TaskGroupStatus = TaskGroupStatus.SUBMITTING, failure_message, _encode_hstore(self.remaining_upstream_groups), _encode_hstore(self.downstream_groups), - self.scheduler_settings.json() if self.scheduler_settings else None, + self.scheduler_settings.model_dump_json() if self.scheduler_settings else None, json.dumps(self.group_template_resource_types))) @staticmethod @@ -1972,7 +2027,7 @@ def update_status_to_db(self, update_time: datetime.datetime, status: TaskGroupS update_cmd = connectors.PostgresUpdateCommand(table='groups') update_cmd.add_condition('workflow_id = %s AND name = %s', [self.workflow_id, self.name]) if scheduler_settings is not None: - update_cmd.add_field('scheduler_settings', scheduler_settings.json()) + update_cmd.add_field('scheduler_settings', scheduler_settings.model_dump_json()) if group_status == TaskGroupStatus.WAITING: update_cmd.add_condition("status IN ('SUBMITTING')", []) if group_status == TaskGroupStatus.PROCESSING: diff --git a/src/utils/job/task_io.py b/src/utils/job/task_io.py index a9d3e41fe..e68a5619d 100644 --- a/src/utils/job/task_io.py +++ b/src/utils/job/task_io.py @@ -34,7 +34,7 @@ class DownloadTypeMetrics(enum.Enum): NOT_APPLICABLE = 'N/A' -class TaskIOMetrics(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class TaskIOMetrics(pydantic.BaseModel, extra='forbid'): """ Represents metrics submitted by each user task in a workflow """ group_name: task_common.NamePattern @@ -50,8 +50,9 @@ class TaskIOMetrics(pydantic.BaseModel, extra=pydantic.Extra.forbid): download_type: DownloadTypeMetrics -class TaskIO(pydantic.BaseModel, extra=pydantic.Extra.forbid, arbitrary_types_allowed=True): +class TaskIO(pydantic.BaseModel): """ Represents the task object . """ + model_config = pydantic.ConfigDict(extra='forbid', arbitrary_types_allowed=True) workflow_id: task_common.NamePattern group_name: task_common.NamePattern task_name: task_common.NamePattern diff --git a/src/utils/job/tests/test_task_db.py b/src/utils/job/tests/test_task_db.py index cb4c95515..fd49bc1df 100644 --- a/src/utils/job/tests/test_task_db.py +++ b/src/utils/job/tests/test_task_db.py @@ -88,7 +88,7 @@ def _insert_group(self, group_name: str = GROUP_NAME, (workflow_id, name, group_uuid, spec, status, cleaned_up, remaining_upstream_groups, downstream_groups) VALUES (%s, %s, %s, %s, %s, FALSE, NULL, NULL)''', - (WORKFLOW_ID, group_name, group_uuid, spec.json(), status)) + (WORKFLOW_ID, group_name, group_uuid, spec.model_dump_json(), status)) def _insert_task(self, task_name: str, retry_id: int = 0, status: str = 'RUNNING', lead: bool = False, @@ -363,7 +363,7 @@ def test_batch_insert_creates_all_groups(self): for name in ['group1', 'group2', 'group3']: entries.append(( WORKFLOW_ID, name, common.generate_unique_id(), - spec.json(), 'SUBMITTING', None, '', '', None, '[]', + spec.model_dump_json(), 'SUBMITTING', None, '', '', None, '[]', )) task.TaskGroup.batch_insert_to_db(self._get_db(), entries) @@ -394,9 +394,9 @@ def test_batch_insert_skips_duplicates(self): ) entries = [ (WORKFLOW_ID, 'group1', common.generate_unique_id(), - spec.json(), 'SUBMITTING', None, '', '', None, '[]'), + spec.model_dump_json(), 'SUBMITTING', None, '', '', None, '[]'), (WORKFLOW_ID, 'group2', common.generate_unique_id(), - spec.json(), 'SUBMITTING', None, '', '', None, '[]'), + spec.model_dump_json(), 'SUBMITTING', None, '', '', None, '[]'), ] task.TaskGroup.batch_insert_to_db(self._get_db(), entries) diff --git a/src/utils/job/workflow.py b/src/utils/job/workflow.py index 026c6c314..daf59ac9f 100644 --- a/src/utils/job/workflow.py +++ b/src/utils/job/workflow.py @@ -100,28 +100,28 @@ def failed(self) -> bool: return not self.alive() and self.name != 'COMPLETED' -class ResourcesEntry(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class ResourcesEntry(pydantic.BaseModel, extra='forbid'): """ Entry for resources API results. """ hostname: str exposed_fields: Dict taints: List[Dict] usage_fields: Dict - conditions: List[str] | None + conditions: List[str] | None = None non_workflow_usage_fields: Dict allocatable_fields: Dict - platform_allocatable_fields: Dict | None - platform_available_fields: Dict | None - platform_workflow_allocatable_fields: Dict | None - config_fields: Dict | None + platform_allocatable_fields: Dict | None = None + platform_available_fields: Dict | None = None + platform_workflow_allocatable_fields: Dict | None = None + config_fields: Dict | None = None backend: str - label_fields: Dict | None + label_fields: Dict | None = None pool_platform_labels: Dict[str, List[str]] resource_type: connectors.BackendResourceType @classmethod def from_backend_resource(cls, resource: connectors.BackendResource, verbose: bool) -> 'ResourcesEntry': - return ResourcesEntry.construct( + return ResourcesEntry.model_construct( hostname=resource.name, backend=resource.backend, usage_fields=resource.converted_usage_fields, @@ -185,12 +185,12 @@ def build_resource_lookup_table(resource_entry: ResourcesEntry, return mapping -class TimeoutSpec(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class TimeoutSpec(pydantic.BaseModel, extra='forbid'): """ Represents the timeout spec. """ exec_timeout: datetime.timedelta | None = None queue_timeout: datetime.timedelta | None = None - @pydantic.validator('exec_timeout', 'queue_timeout', pre=True) + @pydantic.field_validator('exec_timeout', 'queue_timeout', mode='before') @classmethod def validate_timeout(cls, value) -> Optional[datetime.timedelta]: if isinstance(value, (int, float)): @@ -242,7 +242,7 @@ def split_assertion_rules(assertions: List[connectors.ResourceAssertion]) -> \ return static_assertions, k8_assertions -class WorkflowSpec(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class WorkflowSpec(pydantic.BaseModel, extra='forbid'): """ Represents the workflow spec from the workflow service. """ name: task_common.NamePattern pool: str = '' @@ -252,7 +252,7 @@ class WorkflowSpec(pydantic.BaseModel, extra=pydantic.Extra.forbid): timeout: TimeoutSpec = TimeoutSpec() backend: str = '' - @pydantic.root_validator() + @pydantic.model_validator(mode='before') @classmethod def validate_tasks_groups(cls, values): """ @@ -280,12 +280,33 @@ def _validate_name(spec_name: str): name_set.add(name) for task_spec in values.get('tasks', []): - _validate_name(task_spec.name) + if hasattr(task_spec, 'name'): + spec_name = task_spec.name + elif isinstance(task_spec, dict) and 'name' in task_spec: + spec_name = task_spec['name'] + else: + continue + _validate_name(spec_name) for group_spec in values.get('groups', []): - _validate_name(group_spec.name) - for task_spec in group_spec.tasks: - _validate_name(task_spec.name) + if hasattr(group_spec, 'name'): + group_name = group_spec.name + elif isinstance(group_spec, dict) and 'name' in group_spec: + group_name = group_spec['name'] + else: + continue + _validate_name(group_name) + group_tasks = (group_spec.tasks + if hasattr(group_spec, 'tasks') + else group_spec.get('tasks', [])) + for task_spec in group_tasks: + if hasattr(task_spec, 'name'): + spec_name = task_spec.name + elif isinstance(task_spec, dict) and 'name' in task_spec: + spec_name = task_spec['name'] + else: + continue + _validate_name(spec_name) return values @@ -367,7 +388,7 @@ def parse(self, database: connectors.PostgresConnector, try: groups = [group.initialize_group_tasks(group_and_task_uuids, self.resources) for group in self.groups] - if 'timeout' in self.dict(exclude_defaults=True): + if 'timeout' in self.model_fields_set: return WorkflowSpec(name=self.name, groups=groups, timeout=self.timeout, resources=self.resources, backend=self.backend, pool=self.pool) return WorkflowSpec(name=self.name, groups=groups, @@ -744,29 +765,48 @@ def saved_spec(self) -> Dict: base_spec = { 'name': self.name, 'groups': [group.saved_spec() for group in self.groups], - 'resources': {key: resource.dict(exclude_defaults=True) + 'resources': {key: resource.model_dump(exclude_defaults=True) for key, resource in self.resources.items()} } - if 'timeout' in self.dict(exclude_defaults=True): - base_spec['timeout'] = self.timeout.dict() + if 'timeout' in self.model_fields_set: + base_spec['timeout'] = self.timeout.model_dump() return base_spec -class VersionedWorkflowSpec(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class VersionedWorkflowSpec(pydantic.BaseModel, extra='forbid'): """Control the WorkflowSpec version. """ version: int = 2 # Default to OSMO workflow spec version 2 workflow: WorkflowSpec - @pydantic.validator('version', pre=True, always=True) + @pydantic.field_validator('version', mode='before') @classmethod - def validate_version(cls, value: int) -> int: - """ Validates that the version is supported. """ - if value != 2: + def validate_version(cls, value: Any) -> int: + """Validates that the version is supported. + + mode='before' receives raw input (may be str from YAML/JSON), + so we must coerce before comparing. + """ + if isinstance(value, bool): + raise ValueError(f'Unsupported workflow version: {value}.') + if isinstance(value, float): + if not value.is_integer(): + raise ValueError(f'Unsupported workflow version: {value}.') + coerced = int(value) + elif isinstance(value, str): + try: + coerced = int(value) + except ValueError as exc: + raise ValueError(f'Unsupported workflow version: {value}.') from exc + elif isinstance(value, int): + coerced = value + else: + raise ValueError(f'Unsupported workflow version: {value}.') + if coerced != 2: raise ValueError(f'Unsupported workflow version: {value}.') - return value + return coerced -class TemplateSpec(pydantic.BaseModel, extra=pydantic.Extra.forbid): +class TemplateSpec(pydantic.BaseModel, extra='forbid'): """ Template Spec. """ file: str set_variables: List[str] = [] @@ -878,11 +918,11 @@ class Workflow(pydantic.BaseModel): status: WorkflowStatus = WorkflowStatus.PENDING timeout: TimeoutSpec = TimeoutSpec() priority: wf_priority.WorkflowPriority - cancelled_by: str | None + cancelled_by: str | None = None outputs: str = '' backend: str # TODO make pool not None - pool: str | None + pool: str | None = None version: int | None = 0 failure_message: str | None = '' parent_name: task_common.NamePattern | None = None @@ -891,9 +931,7 @@ class Workflow(pydantic.BaseModel): parent_job_id: int | None = None plugins: task_common.WorkflowPlugins = task_common.WorkflowPlugins() - class Config: - arbitrary_types_allowed = True - extra = 'forbid' + model_config = pydantic.ConfigDict(arbitrary_types_allowed=True, extra='forbid') def insert_to_db(self, version: int = 2): """ Creates an entry in the database for the overall workflow. """ @@ -957,10 +995,16 @@ def insert_to_db(self, version: int = 2): insert_cmd, (self.workflow_name, self.workflow_name, self.workflow_name, self.workflow_uuid, - self.user, self.submit_time, self.start_time, self.end_time, - self.status.name, self.logs, exec_timeout, queue_timeout, self.backend, - self.pool, version, self.failure_message, self.parent_name, - self.parent_job_id, self.app_uuid, self.app_version, self.plugins.json(), + self.user, self.submit_time, + self.start_time, self.end_time, + self.status.name, self.logs, + exec_timeout, queue_timeout, + self.backend, + self.pool, version, + self.failure_message, self.parent_name, + self.parent_job_id, self.app_uuid, + self.app_version, + self.plugins.model_dump_json(), self.priority.value)) break except osmo_errors.OSMODatabaseError as err: diff --git a/src/utils/metrics/metrics.py b/src/utils/metrics/metrics.py index bb4464d18..f95dab30c 100644 --- a/src/utils/metrics/metrics.py +++ b/src/utils/metrics/metrics.py @@ -47,20 +47,23 @@ class InstrumentType(enum.Enum): class MetricsCreatorConfig(pydantic.BaseModel): """ Manages the config for the Metrics Creator. """ metrics_prometheus_port: int = pydantic.Field( - command_line='metrics_prometheus_port', - env='METRICS_PROMETHEUS_PORT', default=9464, - description='The port on which the Prometheus scrape endpoint is exposed.') + description='The port on which the Prometheus scrape endpoint is exposed.', + json_schema_extra={ + 'command_line': 'metrics_prometheus_port', + 'env': 'METRICS_PROMETHEUS_PORT' + }) metrics_otel_collector_component: str = pydantic.Field( - command_line='metrics_otel_collector_component', - env='METRICS_OTEL_COLLECTOR_COMPONENT', default='osmo_service_component', - description='The osmo service component') + description='The osmo service component', + json_schema_extra={ + 'command_line': 'metrics_otel_collector_component', + 'env': 'METRICS_OTEL_COLLECTOR_COMPONENT' + }) metrics_otel_enable: bool = pydantic.Field( - command_line='metrics_otel_enable', - env='METRICS_OTEL_ENABLE', default=False, - description='If set false, will disable metrics') + description='If set false, will disable metrics', + json_schema_extra={'command_line': 'metrics_otel_enable', 'env': 'METRICS_OTEL_ENABLE'}) class MetricCreator: """ diff --git a/src/utils/progress_check/progress_check.py b/src/utils/progress_check/progress_check.py index 7ff4664c3..8712d916a 100644 --- a/src/utils/progress_check/progress_check.py +++ b/src/utils/progress_check/progress_check.py @@ -26,18 +26,16 @@ class ProgressCheckConfig(static_config.StaticConfig): progress_interval: str = pydantic.Field( - command_line='progress_interval', - env='OSMO_PROGRESS_INTERVAL', default='10', description='Check for progress within the last seconds. Exit with ' + 'code 0 if there was progress, otherwise exit with code 1. To check multiple ' + - 'files, it may be a list of intervals separated by ":"') + 'files, it may be a list of intervals separated by ":"', + json_schema_extra={'command_line': 'progress_interval', 'env': 'OSMO_PROGRESS_INTERVAL'}) progress_file: str = pydantic.Field( - command_line='progress_file', - env='OSMO_PROGRESS_FILE', default='/var/run/osmo/last_progress', description='The file to read progress timestamps from (For liveness/startup probes). To ' + - 'check multiple files, a list may be provided delimited by ":"') + 'check multiple files, a list may be provided delimited by ":"', + json_schema_extra={'command_line': 'progress_file', 'env': 'OSMO_PROGRESS_FILE'}) def main(): diff --git a/src/utils/static_config.py b/src/utils/static_config.py index 3b98a8311..ae3ea4121 100644 --- a/src/utils/static_config.py +++ b/src/utils/static_config.py @@ -20,15 +20,25 @@ import os import sys import typing +from typing import Any, ClassVar, Dict, Optional import pydantic +from pydantic.fields import FieldInfo import yaml +def _get_field_extras(field: FieldInfo) -> Dict[str, Any]: + """Get json_schema_extra as a dict, handling Callable and None cases.""" + extra = field.json_schema_extra + if isinstance(extra, dict): + return extra + return {} + + class StaticConfig(pydantic.BaseModel): """ A class for reading in config information from either command line, files, or environment variables """ - _instance = None + _instance: ClassVar[Optional[Any]] = None @classmethod def load(cls): if cls._instance is not None: @@ -43,22 +53,23 @@ def load(cls): 'times. If a config parameter is duplicated in more than one ' \ 'file, the value in the last file is used.') - for name, field in cls.__fields__.items(): - if 'command_line' in field.field_info.extra: - help_message = field.field_info.description + for _, field in cls.model_fields.items(): + extras = _get_field_extras(field) + if 'command_line' in extras: + help_message = field.description or '' if field.default is not None: - help_message += f' (default: {str(field.default)})' - parser.add_argument(f'--{field.field_info.extra["command_line"]}', - action=field.field_info.extra.get('action', 'store'), + help_message += f' (default: {field.default!s})' + parser.add_argument(f'--{extras["command_line"]}', + action=extras.get('action', 'store'), help=help_message) args = parser.parse_args() # Initialize config with default values config = {} - for name, field in cls.__fields__.items(): + for name, field in cls.model_fields.items(): # If the default is None and its not optional, then dont set the default because the # user must provide this value - if not field.required: + if not field.is_required(): config[name] = field.default # Load any config files. The later files override anything from the earlier files @@ -66,7 +77,7 @@ def load(cls): with open(config_file, encoding='utf-8') as file: config.update(yaml.safe_load(file)) for key in config: - if key not in cls.__fields__.keys(): + if key not in cls.model_fields.keys(): raise ValueError(f'Unrecognized key "{key}" in config file {config_file}') args_dict = vars(args) args_dict.pop('config') @@ -76,10 +87,11 @@ def load(cls): # 2. Command line argument # 3. Config file # 4. Default - for name, field in cls.__fields__.items(): - env_name = field.field_info.extra.get('env') - arg_name = field.field_info.extra.get('command_line') - is_list = typing.get_origin(field.outer_type_) is list + for name, field in cls.model_fields.items(): + extras = _get_field_extras(field) + env_name = extras.get('env') + arg_name = extras.get('command_line') + is_list = typing.get_origin(field.annotation) is list # Do we have an environment variable? If so, use that if env_name is not None and env_name in os.environ: if is_list: @@ -98,18 +110,21 @@ def load(cls): except pydantic.ValidationError as error: # Parse through errors and print them in a more user friendly manner for type_error in error.errors(): - if type_error['type'] not in ('type_error.none.not_allowed', 'value_error.missing'): + if type_error['type'] not in ('type_error.none.not_allowed', 'value_error.missing', + 'missing', 'none_required'): print(type_error) else: - field = cls.__fields__[str(type_error['loc'][0])] # pylint: disable=E1136 - print(f'ERROR: No value provided for config {field.name} ' \ + field_name = str(type_error['loc'][0]) + field = cls.model_fields[field_name] # pylint: disable=E1136 + extras = _get_field_extras(field) + print(f'ERROR: No value provided for config {field_name} ' \ 'via any of the following methods:') - print(f'- Config file key: {field.name}') - if 'command_line' in field.field_info.extra: - command_line = field.field_info.extra['command_line'] + print(f'- Config file key: {field_name}') + if 'command_line' in extras: + command_line = extras['command_line'] print(f'- Command line argument: --{command_line}') - if 'env' in field.field_info.extra: - env = field.field_info.extra['env'] + if 'env' in extras: + env = extras['env'] print(f'- Environment variable: {env}') sys.exit(1) return cls._instance