From 8dc6c1020ce825fd5c76ad52b16e96279c8fab0c Mon Sep 17 00:00:00 2001 From: Fabien Casenave Date: Tue, 28 Apr 2026 22:05:30 +0200 Subject: [PATCH 01/17] update env (dependabot alert) --- uv.lock | 706 ++++++++++++++++++++++++++++---------------------------- 1 file changed, 351 insertions(+), 355 deletions(-) diff --git a/uv.lock b/uv.lock index 3b8c477f..d71bc633 100644 --- a/uv.lock +++ b/uv.lock @@ -226,11 +226,11 @@ wheels = [ [[package]] name = "certifi" -version = "2026.2.25" +version = "2026.4.22" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } +sdist = { url = "https://files.pythonhosted.org/packages/25/ee/6caf7a40c36a1220410afe15a1cc64993a1f864871f698c0f93acb72842a/certifi-2026.4.22.tar.gz", hash = "sha256:8d455352a37b71bf76a79caa83a3d6c25afee4a385d632127b6afb3963f1c580", size = 137077, upload-time = "2026-04-22T11:26:11.191Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, + { url = "https://files.pythonhosted.org/packages/22/30/7cd8fdcdfbc5b869528b079bfb76dcdf6056b1a2097a662e5e8c04f42965/certifi-2026.4.22-py3-none-any.whl", hash = "sha256:3cb2210c8f88ba2318d29b0388d1023c8492ff72ecdde4ebdaddbb13a31b1c4a", size = 135707, upload-time = "2026-04-22T11:26:09.372Z" }, ] [[package]] @@ -367,14 +367,14 @@ wheels = [ [[package]] name = "click" -version = "8.3.2" +version = "8.3.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/57/75/31212c6bf2503fdf920d87fee5d7a86a2e3bcf444984126f13d8e4016804/click-8.3.2.tar.gz", hash = "sha256:14162b8b3b3550a7d479eafa77dfd3c38d9dc8951f6f69c78913a8f9a7540fd5", size = 302856, upload-time = "2026-04-03T19:14:45.118Z" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/63/f9e1ea081ce35720d8b92acde70daaedace594dc93b693c869e0d5910718/click-8.3.3.tar.gz", hash = "sha256:398329ad4837b2ff7cbe1dd166a4c0f8900c3ca3a218de04466f38f6497f18a2", size = 328061, upload-time = "2026-04-22T15:11:27.506Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e4/20/71885d8b97d4f3dde17b1fdb92dbd4908b00541c5a3379787137285f602e/click-8.3.2-py3-none-any.whl", hash = "sha256:1924d2c27c5653561cd2cae4548d1406039cb79b858b747cfea24924bbc1616d", size = 108379, upload-time = "2026-04-03T19:14:43.505Z" }, + { url = "https://files.pythonhosted.org/packages/ae/44/c1221527f6a71a01ec6fbad7fa78f1d50dfa02217385cf0fa3eec7087d59/click-8.3.3-py3-none-any.whl", hash = "sha256:a2bf429bb3033c89fa4936ffb35d5cb471e3719e1f3c8a7c3fff0b8314305613", size = 110502, upload-time = "2026-04-22T15:11:25.044Z" }, ] [[package]] @@ -583,7 +583,7 @@ wheels = [ [[package]] name = "datasets" -version = "4.8.4" +version = "4.8.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "dill" }, @@ -601,9 +601,9 @@ dependencies = [ { name = "tqdm" }, { name = "xxhash" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/22/22/73e46ac7a8c25e7ef0b3bd6f10da3465021d90219a32eb0b4d2afea4c56e/datasets-4.8.4.tar.gz", hash = "sha256:a1429ed853275ce7943a01c6d2e25475b4501eb758934362106a280470df3a52", size = 604382, upload-time = "2026-03-23T14:21:17.987Z" } +sdist = { url = "https://files.pythonhosted.org/packages/66/34/14cd8e76f907f7d4dca2334cfeec9f81d30fd15c25a015f99aaea694eaed/datasets-4.8.5.tar.gz", hash = "sha256:0f0c1c3d56ffff2c93b2f4c63c95bac94f3d7e8621aea2a2a576275233bba772", size = 605649, upload-time = "2026-04-27T15:43:57.384Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/e5/247d094108e42ac26363ab8dc57f168840cf7c05774b40ffeb0d78868fcc/datasets-4.8.4-py3-none-any.whl", hash = "sha256:cdc8bee4698e549d78bf1fed6aea2eebc760b22b084f07e6fc020c6577a6ce6d", size = 526991, upload-time = "2026-03-23T14:21:15.89Z" }, + { url = "https://files.pythonhosted.org/packages/65/99/00f3196036501b53032c4b1ab8337a0b978dee832ed276dae3815df4e8b5/datasets-4.8.5-py3-none-any.whl", hash = "sha256:5079900781719c0e063a8efdd2cd95a31ad0c63209178669cd23cf1b926149ff", size = 528973, upload-time = "2026-04-27T15:43:53.702Z" }, ] [[package]] @@ -677,28 +677,28 @@ wheels = [ [[package]] name = "eigency" -version = "3.4.0.7" +version = "5.0.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/86/3f/603c67e1e4e30aecbd3d9bf7be02675b23004534d77c63b4c042d7d1b1bd/eigency-3.4.0.7.tar.gz", hash = "sha256:4f123342f0740b2d50d5cad4f2a89594200c55896c2de6c53864d90a96a0e651", size = 1254175, upload-time = "2026-02-25T07:55:01.01Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/db/ad7e9c8fdd43040ce4ebe23761a9810e38c26e422b117188bffe880eef22/eigency-5.0.1.0.tar.gz", hash = "sha256:aba3a16eb2bb1a42be2983abb95c11e58f92e277645a77dd35db1cfcec333f88", size = 1254353, upload-time = "2026-04-25T13:36:16.941Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/45/35/e2a31f3039083443dc349c86207ac11a6fbfddfccc7356bddb386427b13f/eigency-3.4.0.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0f019389258b8a887afc750b676a20b7a7de39a4c1301433b0936055a79dadf7", size = 1621592, upload-time = "2026-02-25T07:54:20.436Z" }, - { url = "https://files.pythonhosted.org/packages/76/1f/53007228f0fbf7f9f7801a95ba3beb403bc2993bcd39f3b6c6638fba0198/eigency-3.4.0.7-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d78be187e30be3a50a2fbd4c38282401ce48851a4f856ba1ca6924b75228e4d7", size = 2581366, upload-time = "2026-02-25T07:54:22.228Z" }, - { url = "https://files.pythonhosted.org/packages/a3/bc/38fac7543c269b7adf37b8e398fbc0e6ad0e081263b26601d8bacc92244a/eigency-3.4.0.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62645b24a0aed8fe15440bf13e0471c905fe207f810706dd0ef205a9c58aa7ba", size = 2428651, upload-time = "2026-02-25T07:54:24.092Z" }, - { url = "https://files.pythonhosted.org/packages/c4/2e/1802e74626b2ff34e8db03c32948c822ea9091f7a4e49efa1ab5f4c64888/eigency-3.4.0.7-cp311-cp311-win32.whl", hash = "sha256:0ee34edc310fabe04515a384c7ddb2342d5fc6c29acd07f563725ebb3c1df436", size = 1593342, upload-time = "2026-02-25T07:54:27.652Z" }, - { url = "https://files.pythonhosted.org/packages/40/db/508e024e6bb5e40acb777100ecbd9e8da55ced94b6f5f39e39f11c0f6c46/eigency-3.4.0.7-cp311-cp311-win_amd64.whl", hash = "sha256:3eb32ca1758fa03d52899f36acfdd18607195920cf1c06d77634933a7061384c", size = 1618609, upload-time = "2026-02-25T07:54:29.417Z" }, - { url = "https://files.pythonhosted.org/packages/4c/89/efb1c04cf656061b6acc088da6911388594beabf15a52dc9fe365b4bc7c9/eigency-3.4.0.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9f866c2b9ee6c81c3cc335876bc0cd9f455ea595bb8e12de8e436e535033dd99", size = 1620245, upload-time = "2026-02-25T07:54:31.385Z" }, - { url = "https://files.pythonhosted.org/packages/7f/a1/b2a24bb526c1ead9bdfc16a7d336c83b61d18cc5e144c9fa3c4dd7c79293/eigency-3.4.0.7-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:97131f1b3dc722c46a6ed443f07521b1982571bd9af03efd3798568f5d260358", size = 2556830, upload-time = "2026-02-25T07:54:33.511Z" }, - { url = "https://files.pythonhosted.org/packages/55/bd/27afd65a41f4eb68f70f81acbcfbf7ada8bed7bae29241171376501aaa44/eigency-3.4.0.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bc98d3b46bac63f5dadae94a7e9a2e7907c59db739d98ff09a6bbead9507d6c7", size = 2396840, upload-time = "2026-02-25T07:54:35.257Z" }, - { url = "https://files.pythonhosted.org/packages/61/57/51160b6cdcef2ee46c72d108729010b54b7e4e0b9b318754a3664d3b6102/eigency-3.4.0.7-cp312-cp312-win32.whl", hash = "sha256:708bab5af3041a5731b3bc9d00cee7234c2ff417d01da7a6c799c85632553397", size = 1585879, upload-time = "2026-02-25T07:54:37.365Z" }, - { url = "https://files.pythonhosted.org/packages/9f/4c/8c65e972a9685471dbe38880ad5cf8df03451dad798056566d0fd3538444/eigency-3.4.0.7-cp312-cp312-win_amd64.whl", hash = "sha256:96e1a447e131424d662d48c2936d50e037bf928327398e415a72ccd26984dbe7", size = 1608924, upload-time = "2026-02-25T07:54:39.217Z" }, - { url = "https://files.pythonhosted.org/packages/af/9f/867229dd7e4b885a1eaa5f7d0c17bbca876df4fb0c87a977b0176b38d296/eigency-3.4.0.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1e78f068055da30934eede33960968468ec0883b6b52f7c1658cbdb41e50131a", size = 1619459, upload-time = "2026-02-25T07:54:41.18Z" }, - { url = "https://files.pythonhosted.org/packages/38/e8/4ee7f74c3be0b3c202a27a9cb6e0ba39d79b752bdb7592c2480b1515d60d/eigency-3.4.0.7-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030417445bad9acd44c3ee7ee90e02424f42a60781cd914a09d3e23e430968f8", size = 2544291, upload-time = "2026-02-25T07:54:43.138Z" }, - { url = "https://files.pythonhosted.org/packages/bd/40/48ef7bac1dee5013aa71d9115c046906c7aa3725f8c3b500f8f1d34ecf90/eigency-3.4.0.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c9d67f62724e635a3cfc59c46c3dca0efde73a10527cb37b63839522349ed381", size = 2382996, upload-time = "2026-02-25T07:54:44.815Z" }, - { url = "https://files.pythonhosted.org/packages/86/35/ad9403497ad582dfc3f084bc9b0f517b78c2dbc93dc85e8290625a568132/eigency-3.4.0.7-cp313-cp313-win32.whl", hash = "sha256:48512e223fc362c016dcddb382fe5a62e749d5b2ae5cb27936901a4d4caf379a", size = 1585867, upload-time = "2026-02-25T07:54:46.541Z" }, - { url = "https://files.pythonhosted.org/packages/57/9b/5d0abd45b014edbd9114a55385f82061bf3f95a08bccb9bf1c0588e5773e/eigency-3.4.0.7-cp313-cp313-win_amd64.whl", hash = "sha256:a74eb3c218ec1785c0be44d671807bc739755d27b34c7497f0af3b6ad2ac4d5d", size = 1609933, upload-time = "2026-02-25T07:54:48.963Z" }, + { url = "https://files.pythonhosted.org/packages/f4/ba/bd4f30f825e8a9bfb4950a0682e8e6f7b258615beaa65b712c6b767481cc/eigency-5.0.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27795b5dc4564cf00d42bb3e1d777df0e76b01a65a248e786dceb9bd297aa3cb", size = 1621555, upload-time = "2026-04-25T13:35:38.898Z" }, + { url = "https://files.pythonhosted.org/packages/d9/89/8aa501a8939b8df36ac918e62e3ba11b471cda8d88744656aa01caa5cbc4/eigency-5.0.1.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0b6c3e77ec5cacbe66cc6fb4b858f54f6829594e024d351cddb94ff0fb73331", size = 2581353, upload-time = "2026-04-25T13:35:40.962Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2c/57a89d1dbf1c2b6fae491c50f74397808d71f2fa2a9ed9594fc3553a7d04/eigency-5.0.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba43680b154351e599f06e3dcd43f8c0187109f90fda11f0365047fa07d9b240", size = 2428649, upload-time = "2026-04-25T13:35:42.969Z" }, + { url = "https://files.pythonhosted.org/packages/b5/f3/064e52a16821f54524c161035cbbec3eb537abe6071b3ac8d5cb5ef6f64c/eigency-5.0.1.0-cp311-cp311-win32.whl", hash = "sha256:77eca0e133d968dc8db1e2b12163bfcb70a2041ecfb3e249cc249299c158c57a", size = 1593348, upload-time = "2026-04-25T13:35:45.218Z" }, + { url = "https://files.pythonhosted.org/packages/c8/38/1580086322333cd4079e1dda6fc4156c4df9b1d39fea2d61e5d1361f200b/eigency-5.0.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:710b858d0a308400b0dce1b96d78fb6b41efe8d49ba669446a615333040543dc", size = 1618602, upload-time = "2026-04-25T13:35:47.448Z" }, + { url = "https://files.pythonhosted.org/packages/b8/b4/966679cebb2bd8efb1477c47f4889b8d2936661ef252decf73034fe78a79/eigency-5.0.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3b3fa67fa3c8aae8144c8956909a70a12ebd883d28120d01ccac10218edf6fd2", size = 1620211, upload-time = "2026-04-25T13:35:49.146Z" }, + { url = "https://files.pythonhosted.org/packages/c9/db/abef8c2b294c0894ec6f3d957e07d475f75b462b9b262357f1d18e3b66a4/eigency-5.0.1.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29f1dc3f4466c512b4e435fc70154a948a53d62588460fc38cb7adba4e0ee1cf", size = 2556831, upload-time = "2026-04-25T13:35:50.916Z" }, + { url = "https://files.pythonhosted.org/packages/3d/c4/ba5239b435c21296451c5b4fef8147141c1cd93ed9072b787da3cd946fa8/eigency-5.0.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9a32c5925cdb013d343a15fb6d869d9b8b2670f21a60223511cb730de1e5951", size = 2396834, upload-time = "2026-04-25T13:35:52.667Z" }, + { url = "https://files.pythonhosted.org/packages/dd/59/f2f79fd0896ebc2a3abcba4e3a1b16ee6a0fb4b7d103f32be0aff6b990af/eigency-5.0.1.0-cp312-cp312-win32.whl", hash = "sha256:a2d615b7862865702a86c46f212e2e39def3b02d1aa464318297ed3de27a9373", size = 1585848, upload-time = "2026-04-25T13:35:55.148Z" }, + { url = "https://files.pythonhosted.org/packages/69/cc/c0c910a3f31c124ed1cd14f08c3c829b154b59edce2280d4d90640bce3bf/eigency-5.0.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:0578dbef42bc09fdbe99048038b13775f9d67b3ae04888dcb853d2105d215282", size = 1608927, upload-time = "2026-04-25T13:35:56.841Z" }, + { url = "https://files.pythonhosted.org/packages/df/70/e42b7121aab50369ba8e999512340a1e2ccc63bb8c7c958fbef42aee3fa6/eigency-5.0.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7ca712b400b0e5c8a1b99918c322a7d62686660ffa664deb9c2b557edc363edf", size = 1619432, upload-time = "2026-04-25T13:35:58.804Z" }, + { url = "https://files.pythonhosted.org/packages/99/9d/e31f035e4b5357c48ac2e31730373db3e48b606413c8de1a3c8daf3b5bf4/eigency-5.0.1.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d75639d68cf8fb64b3cf6e59d7892ae1d4164286b58bfb6a0db41219b654dc6a", size = 2544289, upload-time = "2026-04-25T13:36:00.839Z" }, + { url = "https://files.pythonhosted.org/packages/40/f9/4e585a05ff6c81ef23cd64d34f2431dfe6fb29890f7f2f68233cf2001ea9/eigency-5.0.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:546a98be48e616ee59cf5b9a9716cb5001c2425f9606794eaa16c7b09667a301", size = 2382996, upload-time = "2026-04-25T13:36:02.813Z" }, + { url = "https://files.pythonhosted.org/packages/de/18/a0e25637ece20c123f438c5180523b5b29bb8ef63518b39bf0b4cf2fc1f7/eigency-5.0.1.0-cp313-cp313-win32.whl", hash = "sha256:4301ca88a7426a4a65ff827ad9c00e20808115ec9f18497f76cbb5662df246b7", size = 1585843, upload-time = "2026-04-25T13:36:04.434Z" }, + { url = "https://files.pythonhosted.org/packages/3e/3d/80aa31bd9f1e1f50562893af3880643b975d8c97b125909b2218a4f70dac/eigency-5.0.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:be05dd2c0ad9e1ab534e25e03d7fb8b53c75e38aaa743f986dff4872312d04ff", size = 1609927, upload-time = "2026-04-25T13:36:05.969Z" }, ] [[package]] @@ -721,11 +721,11 @@ wheels = [ [[package]] name = "filelock" -version = "3.25.2" +version = "3.29.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/b8/00651a0f559862f3bb7d6f7477b192afe3f583cc5e26403b44e59a55ab34/filelock-3.25.2.tar.gz", hash = "sha256:b64ece2b38f4ca29dd3e810287aa8c48182bbecd1ae6e9ae126c9b35f1382694", size = 40480, upload-time = "2026-03-11T20:45:38.487Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/fe/997687a931ab51049acce6fa1f23e8f01216374ea81374ddee763c493db5/filelock-3.29.0.tar.gz", hash = "sha256:69974355e960702e789734cb4871f884ea6fe50bd8404051a3530bc07809cf90", size = 57571, upload-time = "2026-04-19T15:39:10.068Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a4/a5/842ae8f0c08b61d6484b52f99a03510a3a72d23141942d216ebe81fefbce/filelock-3.25.2-py3-none-any.whl", hash = "sha256:ca8afb0da15f229774c9ad1b455ed96e85a81373065fb10446672f64444ddf70", size = 26759, upload-time = "2026-03-11T20:45:37.437Z" }, + { url = "https://files.pythonhosted.org/packages/81/47/dd9a212ef6e343a6857485ffe25bba537304f1913bdbed446a23f7f592e1/filelock-3.29.0-py3-none-any.whl", hash = "sha256:96f5f6344709aa1572bbf631c640e4ebeeb519e08da902c39a001882f30ac258", size = 39812, upload-time = "2026-04-19T15:39:08.752Z" }, ] [[package]] @@ -892,34 +892,40 @@ wheels = [ [[package]] name = "greenlet" -version = "3.4.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/86/94/a5935717b307d7c71fe877b52b884c6af707d2d2090db118a03fbd799369/greenlet-3.4.0.tar.gz", hash = "sha256:f50a96b64dafd6169e595a5c56c9146ef80333e67d4476a65a9c55f400fc22ff", size = 195913, upload-time = "2026-04-08T17:08:00.863Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/c6/dba32cab7e3a625b011aa5647486e2d28423a48845a2998c126dd69c85e1/greenlet-3.4.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:805bebb4945094acbab757d34d6e1098be6de8966009ab9ca54f06ff492def58", size = 285504, upload-time = "2026-04-08T15:52:14.071Z" }, - { url = "https://files.pythonhosted.org/packages/54/f4/7cb5c2b1feb9a1f50e038be79980dfa969aa91979e5e3a18fdbcfad2c517/greenlet-3.4.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:439fc2f12b9b512d9dfa681c5afe5f6b3232c708d13e6f02c845e0d9f4c2d8c6", size = 605476, upload-time = "2026-04-08T16:24:37.064Z" }, - { url = "https://files.pythonhosted.org/packages/d6/af/b66ab0b2f9a4c5a867c136bf66d9599f34f21a1bcca26a2884a29c450bd9/greenlet-3.4.0-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a70ed1cb0295bee1df57b63bf7f46b4e56a5c93709eea769c1fec1bb23a95875", size = 618336, upload-time = "2026-04-08T16:30:56.59Z" }, - { url = "https://files.pythonhosted.org/packages/e5/5c/8c5633ece6ba611d64bf2770219a98dd439921d6424e4e8cf16b0ac74ea5/greenlet-3.4.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c660bce1940a1acae5f51f0a064f1bc785d07ea16efcb4bc708090afc4d69e83", size = 613515, upload-time = "2026-04-08T15:56:32.478Z" }, - { url = "https://files.pythonhosted.org/packages/a9/df/950d15bca0d90a0e7395eb777903060504cdb509b7b705631e8fb69ff415/greenlet-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ee407d4d1ca9dc632265aee1c8732c4a2d60adff848057cdebfe5fe94eb2c8a2", size = 1574623, upload-time = "2026-04-08T16:26:18.596Z" }, - { url = "https://files.pythonhosted.org/packages/1a/e7/0839afab829fcb7333c9ff6d80c040949510055d2d4d63251f0d1c7c804e/greenlet-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:956215d5e355fffa7c021d168728321fd4d31fd730ac609b1653b450f6a4bc71", size = 1639579, upload-time = "2026-04-08T15:57:29.231Z" }, - { url = "https://files.pythonhosted.org/packages/d9/2b/b4482401e9bcaf9f5c97f67ead38db89c19520ff6d0d6699979c6efcc200/greenlet-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:5cb614ace7c27571270354e9c9f696554d073f8aa9319079dcba466bbdead711", size = 238233, upload-time = "2026-04-08T17:02:54.286Z" }, - { url = "https://files.pythonhosted.org/packages/0c/4d/d8123a4e0bcd583d5cfc8ddae0bbe29c67aab96711be331a7cc935a35966/greenlet-3.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:04403ac74fe295a361f650818de93be11b5038a78f49ccfb64d3b1be8fbf1267", size = 235045, upload-time = "2026-04-08T17:04:05.072Z" }, - { url = "https://files.pythonhosted.org/packages/65/8b/3669ad3b3f247a791b2b4aceb3aa5a31f5f6817bf547e4e1ff712338145a/greenlet-3.4.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:1a54a921561dd9518d31d2d3db4d7f80e589083063ab4d3e2e950756ef809e1a", size = 286902, upload-time = "2026-04-08T15:52:12.138Z" }, - { url = "https://files.pythonhosted.org/packages/38/3e/3c0e19b82900873e2d8469b590a6c4b3dfd2b316d0591f1c26b38a4879a5/greenlet-3.4.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:16dec271460a9a2b154e3b1c2fa1050ce6280878430320e85e08c166772e3f97", size = 606099, upload-time = "2026-04-08T16:24:38.408Z" }, - { url = "https://files.pythonhosted.org/packages/b5/33/99fef65e7754fc76a4ed14794074c38c9ed3394a5bd129d7f61b705f3168/greenlet-3.4.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:90036ce224ed6fe75508c1907a77e4540176dcf0744473627785dd519c6f9996", size = 618837, upload-time = "2026-04-08T16:30:58.298Z" }, - { url = "https://files.pythonhosted.org/packages/36/f7/229f3aed6948faa20e0616a0b8568da22e365ede6a54d7d369058b128afd/greenlet-3.4.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a1c4f6b453006efb8310affb2d132832e9bbb4fc01ce6df6b70d810d38f1f6dc", size = 615062, upload-time = "2026-04-08T15:56:33.766Z" }, - { url = "https://files.pythonhosted.org/packages/08/97/d988180011aa40135c46cd0d0cf01dd97f7162bae14139b4a3ef54889ba5/greenlet-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9b2d9a138ffa0e306d0e2b72976d2fb10b97e690d40ab36a472acaab0838e2de", size = 1573511, upload-time = "2026-04-08T16:26:20.058Z" }, - { url = "https://files.pythonhosted.org/packages/d4/0f/a5a26fe152fb3d12e6a474181f6e9848283504d0afd095f353d85726374b/greenlet-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8424683caf46eb0eb6f626cb95e008e8cc30d0cb675bdfa48200925c79b38a08", size = 1640396, upload-time = "2026-04-08T15:57:30.88Z" }, - { url = "https://files.pythonhosted.org/packages/42/cf/bb2c32d9a100e36ee9f6e38fad6b1e082b8184010cb06259b49e1266ca01/greenlet-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0a53fb071531d003b075c444014ff8f8b1a9898d36bb88abd9ac7b3524648a2", size = 238892, upload-time = "2026-04-08T17:03:10.094Z" }, - { url = "https://files.pythonhosted.org/packages/b7/47/6c41314bac56e71436ce551c7fbe3cc830ed857e6aa9708dbb9c65142eb6/greenlet-3.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:f38b81880ba28f232f1f675893a39cf7b6db25b31cc0a09bb50787ecf957e85e", size = 235599, upload-time = "2026-04-08T15:52:54.3Z" }, - { url = "https://files.pythonhosted.org/packages/7a/75/7e9cd1126a1e1f0cd67b0eda02e5221b28488d352684704a78ed505bd719/greenlet-3.4.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:43748988b097f9c6f09364f260741aa73c80747f63389824435c7a50bfdfd5c1", size = 285856, upload-time = "2026-04-08T15:52:45.82Z" }, - { url = "https://files.pythonhosted.org/packages/9d/c4/3e2df392e5cb199527c4d9dbcaa75c14edcc394b45040f0189f649631e3c/greenlet-3.4.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5566e4e2cd7a880e8c27618e3eab20f3494452d12fd5129edef7b2f7aa9a36d1", size = 610208, upload-time = "2026-04-08T16:24:39.674Z" }, - { url = "https://files.pythonhosted.org/packages/da/af/750cdfda1d1bd30a6c28080245be8d0346e669a98fdbae7f4102aa95fff3/greenlet-3.4.0-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1054c5a3c78e2ab599d452f23f7adafef55062a783a8e241d24f3b633ba6ff82", size = 621269, upload-time = "2026-04-08T16:30:59.767Z" }, - { url = "https://files.pythonhosted.org/packages/54/78/0cbc693622cd54ebe25207efbb3a0eb07c2639cb8594f6e3aaaa0bb077a8/greenlet-3.4.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f82cb6cddc27dd81c96b1506f4aa7def15070c3b2a67d4e46fd19016aacce6cf", size = 617549, upload-time = "2026-04-08T15:56:34.893Z" }, - { url = "https://files.pythonhosted.org/packages/ba/c0/8966767de01343c1ff47e8b855dc78e7d1a8ed2b7b9c83576a57e289f81d/greenlet-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:227a46251ecba4ff46ae742bc5ce95c91d5aceb4b02f885487aff269c127a729", size = 1575310, upload-time = "2026-04-08T16:26:21.671Z" }, - { url = "https://files.pythonhosted.org/packages/b8/38/bcdc71ba05e9a5fda87f63ffc2abcd1f15693b659346df994a48c968003d/greenlet-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5b99e87be7eba788dd5b75ba1cde5639edffdec5f91fe0d734a249535ec3408c", size = 1640435, upload-time = "2026-04-08T15:57:32.572Z" }, - { url = "https://files.pythonhosted.org/packages/a1/c2/19b664b7173b9e4ef5f77e8cef9f14c20ec7fce7920dc1ccd7afd955d093/greenlet-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:849f8bc17acd6295fcb5de8e46d55cc0e52381c56eaf50a2afd258e97bc65940", size = 238760, upload-time = "2026-04-08T17:04:03.878Z" }, - { url = "https://files.pythonhosted.org/packages/9b/96/795619651d39c7fbd809a522f881aa6f0ead504cc8201c3a5b789dfaef99/greenlet-3.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:9390ad88b652b1903814eaabd629ca184db15e0eeb6fe8a390bbf8b9106ae15a", size = 235498, upload-time = "2026-04-08T17:05:00.584Z" }, +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/3f/dbf99fb14bfeb88c28f16729215478c0e265cacd6dc22270c8f31bb6892f/greenlet-3.5.0.tar.gz", hash = "sha256:d419647372241bc68e957bf38d5c1f98852155e4146bd1e4121adea81f4f01e4", size = 196995, upload-time = "2026-04-27T13:37:15.544Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/0f/a91f143f356523ff682309732b175765a9bc2836fd7c081c2c67fedc1ad4/greenlet-3.5.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8f1cc966c126639cd152fdaa52624d2655f492faa79e013fea161de3e6dda082", size = 284726, upload-time = "2026-04-27T12:20:51.402Z" }, + { url = "https://files.pythonhosted.org/packages/95/82/800646c7ffc5dbabd75ddd2f6b519bb898c0c9c969e5d0473bfe5d20bcce/greenlet-3.5.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:362624e6a8e5bca3b8233e45eef33903a100e9539a2b995c364d595dbc4018b3", size = 604264, upload-time = "2026-04-27T12:52:39.494Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ac/354867c0bba812fc33b15bc55aedafedd0aee3c7dd91dfca22444157dc0c/greenlet-3.5.0-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5ecd83806b0f4c2f53b1018e0005cd82269ea01d42befc0368730028d850ed1c", size = 616099, upload-time = "2026-04-27T12:59:39.623Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ab/192090c4a5b30df148c22bf4b8895457d739a7c7c5a7b9c41e5dd7f537f2/greenlet-3.5.0-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fa94cb2288681e3a11645958f1871d48ee9211bd2f66628fdace505927d6e564", size = 623976, upload-time = "2026-04-27T13:02:37.363Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b0/815bece7399e01cadb69014219eebd0042339875c59a59b0820a46ece356/greenlet-3.5.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0ff251e9a0279522e62f6176412869395a64ddf2b5c5f782ff609a8216a4e662", size = 615198, upload-time = "2026-04-27T12:25:25.928Z" }, + { url = "https://files.pythonhosted.org/packages/24/11/05eb2b9b188c6df7d68a89c99134d644a7af616a40b9808e8e6ced315d5d/greenlet-3.5.0-cp311-cp311-manylinux_2_39_riscv64.whl", hash = "sha256:64d6ac45f7271f48e45f67c95b54ef73534c52ec041fcda8edf520c6d811f4bc", size = 418379, upload-time = "2026-04-27T13:05:12.755Z" }, + { url = "https://files.pythonhosted.org/packages/10/80/3b2c0a895d6698f6ddb31b07942ebfa982f3e30888bc5546a5b5990de8b2/greenlet-3.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6d874e79afd41a96e11ff4c5d0bc90a80973e476fda1c2c64985667397df432b", size = 1574927, upload-time = "2026-04-27T12:53:25.81Z" }, + { url = "https://files.pythonhosted.org/packages/44/0e/f354af514a4c61454dbc68e44d47544a5a4d6317e30b77ddfa3a09f4c5f3/greenlet-3.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0ed006e4b86c59de7467eb2601cd1b77b5a7d657d1ee55e30fe30d76451edba4", size = 1642683, upload-time = "2026-04-27T12:25:23.9Z" }, + { url = "https://files.pythonhosted.org/packages/fa/6a/87f38255201e993a1915265ebb80cd7c2c78b04a45744995abbf6b259fd8/greenlet-3.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:703cb211b820dbffbbc55a16bfc6e4583a6e6e990f33a119d2cc8b83211119c8", size = 238115, upload-time = "2026-04-27T12:21:48.845Z" }, + { url = "https://files.pythonhosted.org/packages/e3/f8/450fe3c5938fa737ea4d22699772e6e34e8e24431a47bf4e8a1ceed4a98e/greenlet-3.5.0-cp311-cp311-win_arm64.whl", hash = "sha256:6c18dfb59c70f5a94acd271c72e90128c3c776e41e5f07767908c8c1b74ad339", size = 235017, upload-time = "2026-04-27T12:22:26.768Z" }, + { url = "https://files.pythonhosted.org/packages/ef/32/f2ce6d4cac3e55bc6173f92dbe627e782e1850f89d986c3606feb63aafa7/greenlet-3.5.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:db2910d3c809444e0a20147361f343fe2798e106af8d9d8506f5305302655a9f", size = 286228, upload-time = "2026-04-27T12:20:34.421Z" }, + { url = "https://files.pythonhosted.org/packages/b7/aa/caed9e5adf742315fc7be2a84196373aab4816e540e38ba0d76cb7584d68/greenlet-3.5.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ec9ea74e7268ace7f9aab1b1a4e730193fc661b39a993cd91c606c32d4a3628", size = 601775, upload-time = "2026-04-27T12:52:41.045Z" }, + { url = "https://files.pythonhosted.org/packages/c7/af/90ae08497400a941595d12774447f752d3dfe0fbb012e35b76bc5c0ff37e/greenlet-3.5.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:54d243512da35485fc7a6bf3c178fdda6327a9d6506fcdd62b1abd1e41b2927b", size = 614436, upload-time = "2026-04-27T12:59:41.595Z" }, + { url = "https://files.pythonhosted.org/packages/3f/e9/4eeadf8cb3403ac274245ba75f07844abc7fa5f6787583fc9156ba741e0f/greenlet-3.5.0-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:41353ec2ecedf7aa8f682753a41919f8718031a6edac46b8d3dc7ed9e1ceb136", size = 620610, upload-time = "2026-04-27T13:02:39.194Z" }, + { url = "https://files.pythonhosted.org/packages/2b/e0/2e13df68f367e2f9960616927d60857dd7e56aaadd59a47c644216b2f920/greenlet-3.5.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d280a7f5c331622c69f97eb167f33577ff2d1df282c41cd15907fc0a3ca198c", size = 611388, upload-time = "2026-04-27T12:25:28.008Z" }, + { url = "https://files.pythonhosted.org/packages/ee/ef/f913b3c0eb7d26d86a2401c5e1546c9d46b657efee724b06f6f4ac5d8824/greenlet-3.5.0-cp312-cp312-manylinux_2_39_riscv64.whl", hash = "sha256:58c1c374fe2b3d852f9b6b11a7dff4c85404e51b9a596fd9e89cf904eb09866d", size = 422775, upload-time = "2026-04-27T13:05:14.261Z" }, + { url = "https://files.pythonhosted.org/packages/82/f7/393c64055132ac0d488ef6be549253b7e6274194863967ddc0bc8f5b87b8/greenlet-3.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1eb67d5adefb5bd2e182d42678a328979a209e4e82eb93575708185d31d1f588", size = 1570768, upload-time = "2026-04-27T12:53:28.099Z" }, + { url = "https://files.pythonhosted.org/packages/b8/4b/eaf7735253522cf56d1b74d672a58f54fc114702ceaf05def59aae72f6e1/greenlet-3.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2628d6c86f6cb0cb45e0c3c54058bbec559f57eaae699447748cb3928150577e", size = 1635983, upload-time = "2026-04-27T12:25:26.903Z" }, + { url = "https://files.pythonhosted.org/packages/4c/fe/4fb3a0805bd5165da5ebf858da7cc01cce8061674106d2cf5bdab32cbfde/greenlet-3.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:d4d9f0624c775f2dfc56ba54d515a8c771044346852a918b405914f6b19d7fd8", size = 238840, upload-time = "2026-04-27T12:23:54.806Z" }, + { url = "https://files.pythonhosted.org/packages/cb/cb/baa584cb00532126ffe12d9787db0a60c5a4f55c27bfe2666df5d4c30a32/greenlet-3.5.0-cp312-cp312-win_arm64.whl", hash = "sha256:83ed9f27f1680b50e89f40f6df348a290ea234b249a4003d366663a12eab94f2", size = 235615, upload-time = "2026-04-27T12:21:38.57Z" }, + { url = "https://files.pythonhosted.org/packages/0c/58/fc576f99037ce19c5aa16628e4c3226b6d1419f72a62c79f5f40576e6eb3/greenlet-3.5.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:5a5ed18de6a0f6cc7087f1563f6bd93fc7df1c19165ca01e9bde5a5dc281d106", size = 285066, upload-time = "2026-04-27T12:23:05.033Z" }, + { url = "https://files.pythonhosted.org/packages/4a/ba/b28ddbe6bfad6a8ac196ef0e8cff37bc65b79735995b9e410923fffeeb70/greenlet-3.5.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a717fbc46d8a354fa675f7c1e813485b6ba3885f9bef0cd56e5ba27d758ff5b", size = 604414, upload-time = "2026-04-27T12:52:42.358Z" }, + { url = "https://files.pythonhosted.org/packages/09/06/4b69f8f0b67603a8be2790e55107a190b376f2627fe0eaf5695d85ffb3cd/greenlet-3.5.0-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ddc090c5c1792b10246a78e8c2163ebbe04cf877f9d785c230a7b27b39ad038e", size = 617349, upload-time = "2026-04-27T12:59:43.32Z" }, + { url = "https://files.pythonhosted.org/packages/6a/15/a643b4ecd09969e30b8a150d5919960caae0abe4f5af75ab040b1ab85e78/greenlet-3.5.0-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4964101b8585c144cbda5532b1aa644255126c08a265dae90c16e7a0e63aaa9d", size = 623234, upload-time = "2026-04-27T13:02:40.611Z" }, + { url = "https://files.pythonhosted.org/packages/8a/17/a3918541fd0ddefe024a69de6d16aa7b46d36ac19562adaa63c7fa180eff/greenlet-3.5.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2094acd54b272cb6eae8c03dd87b3fa1820a4cef18d6889c378d503500a1dc13", size = 613927, upload-time = "2026-04-27T12:25:30.28Z" }, + { url = "https://files.pythonhosted.org/packages/77/18/3b13d5ef1275b0ffaf933b05efa21408ac4ca95823c7411d79682e4fdcff/greenlet-3.5.0-cp313-cp313-manylinux_2_39_riscv64.whl", hash = "sha256:7022615368890680e67b9965d33f5773aade330d5343bbe25560135aaa849eae", size = 425243, upload-time = "2026-04-27T13:05:15.689Z" }, + { url = "https://files.pythonhosted.org/packages/ee/e1/bd0af6213c7dd33175d8a462d4c1fe1175124ebed4855bc1475a5b5242c2/greenlet-3.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5e05ba267789ea87b5a155cf0e810b1ab88bf18e9e8740813945ceb8ee4350ba", size = 1570893, upload-time = "2026-04-27T12:53:29.483Z" }, + { url = "https://files.pythonhosted.org/packages/9b/2a/0789702f864f5382cb476b93d7a9c823c10472658102ccd65f415747d2e2/greenlet-3.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0ecec963079cd58cbd14723582384f11f166fd58883c15dcbfb342e0bc9b5846", size = 1636060, upload-time = "2026-04-27T12:25:28.845Z" }, + { url = "https://files.pythonhosted.org/packages/b2/8f/22bf9df92bbff0eb07842b60f7e63bf7675a9742df628437a9f02d09137f/greenlet-3.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:728d9667d8f2f586644b748dbd9bb67e50d6a9381767d1357714ea6825bb3bf5", size = 238740, upload-time = "2026-04-27T12:24:01.341Z" }, + { url = "https://files.pythonhosted.org/packages/b6/b7/9c5c3d653bd4ff614277c049ac676422e2c557db47b4fe43e6313fc005dc/greenlet-3.5.0-cp313-cp313-win_arm64.whl", hash = "sha256:47422135b1d308c14b2c6e758beedb1acd33bb91679f5670edf77bf46244722b", size = 235525, upload-time = "2026-04-27T12:23:12.308Z" }, ] [[package]] @@ -1061,7 +1067,7 @@ wheels = [ [[package]] name = "huggingface-hub" -version = "1.10.2" +version = "1.12.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, @@ -1074,27 +1080,27 @@ dependencies = [ { name = "typer" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0c/4d/00734890c7fcfe2c7ff04f1c1a167186c42b19e370a2dd8cfd8c34fc92c4/huggingface_hub-1.10.2.tar.gz", hash = "sha256:4b276f820483b709dc86a53bcb8183ea496b8d8447c9f7f88a115a12b498a95f", size = 758428, upload-time = "2026-04-14T10:42:28.498Z" } +sdist = { url = "https://files.pythonhosted.org/packages/56/52/1b54cb569509c725a32c1315261ac9fd0e6b91bbbf74d86fca10d3376164/huggingface_hub-1.12.0.tar.gz", hash = "sha256:7c3fe85e24b652334e5d456d7a812cd9a071e75630fac4365d9165ab5e4a34b6", size = 763091, upload-time = "2026-04-24T13:32:08.674Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5e/c9/4c1e1216b24bcab140c83acdf8bc89a846ea17cd8a06cd18e3fd308a297f/huggingface_hub-1.10.2-py3-none-any.whl", hash = "sha256:c26c908767cc711493978dc0b4f5747ba7841602997cc98bfd628450a28cf9bc", size = 642581, upload-time = "2026-04-14T10:42:26.563Z" }, + { url = "https://files.pythonhosted.org/packages/7e/2b/ef03ddb96bd1123503c2bd6932001020292deea649e9bf4caa2cb65a85bf/huggingface_hub-1.12.0-py3-none-any.whl", hash = "sha256:d74939969585ee35748bd66de09baf84099d461bda7287cd9043bfb99b0e424d", size = 646806, upload-time = "2026-04-24T13:32:06.717Z" }, ] [[package]] name = "identify" -version = "2.6.18" +version = "2.6.19" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/46/c4/7fb4db12296cdb11893d61c92048fe617ee853f8523b9b296ac03b43757e/identify-2.6.18.tar.gz", hash = "sha256:873ac56a5e3fd63e7438a7ecbc4d91aca692eb3fefa4534db2b7913f3fc352fd", size = 99580, upload-time = "2026-03-15T18:39:50.319Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/63/51723b5f116cc04b061cb6f5a561790abf249d25931d515cd375e063e0f4/identify-2.6.19.tar.gz", hash = "sha256:6be5020c38fcb07da56c53733538a3081ea5aa70d36a156f83044bfbf9173842", size = 99567, upload-time = "2026-04-17T18:39:50.265Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/46/33/92ef41c6fad0233e41d3d84ba8e8ad18d1780f1e5d99b3c683e6d7f98b63/identify-2.6.18-py2.py3-none-any.whl", hash = "sha256:8db9d3c8ea9079db92cafb0ebf97abdc09d52e97f4dcf773a2e694048b7cd737", size = 99394, upload-time = "2026-03-15T18:39:48.915Z" }, + { url = "https://files.pythonhosted.org/packages/94/84/d9273cd09688070a6523c4aee4663a8538721b2b755c4962aafae0011e72/identify-2.6.19-py2.py3-none-any.whl", hash = "sha256:20e6a87f786f768c092a721ad107fc9df0eb89347be9396cadf3f4abbd1fb78a", size = 99397, upload-time = "2026-04-17T18:39:49.221Z" }, ] [[package]] name = "idna" -version = "3.11" +version = "3.13" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ce/cc/762dfb036166873f0059f3b7de4565e1b5bc3d6f28a414c13da27e442f99/idna-3.13.tar.gz", hash = "sha256:585ea8fe5d69b9181ec1afba340451fba6ba764af97026f92a91d4eef164a242", size = 194210, upload-time = "2026-04-22T16:42:42.314Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, + { url = "https://files.pythonhosted.org/packages/5d/13/ad7d7ca3808a898b4612b6fe93cde56b53f3034dcde235acb1f0e1df24c6/idna-3.13-py3-none-any.whl", hash = "sha256:892ea0cde124a99ce773decba204c5552b69c3c67ffd5f232eb7696135bc8bb3", size = 68629, upload-time = "2026-04-22T16:42:40.909Z" }, ] [[package]] @@ -1135,8 +1141,7 @@ dependencies = [ { name = "appnope", marker = "sys_platform == 'darwin'" }, { name = "comm" }, { name = "debugpy" }, - { name = "ipython", version = "9.10.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" }, - { name = "ipython", version = "9.12.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" }, + { name = "ipython" }, { name = "jupyter-client" }, { name = "jupyter-core" }, { name = "matplotlib-inline" }, @@ -1154,55 +1159,25 @@ wheels = [ [[package]] name = "ipython" -version = "9.10.1" +version = "9.13.0" source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version < '3.12' and sys_platform == 'win32'", - "python_full_version < '3.12' and sys_platform == 'emscripten'", - "python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'win32'", -] dependencies = [ - { name = "colorama", marker = "python_full_version < '3.12' and sys_platform == 'win32'" }, - { name = "decorator", marker = "python_full_version < '3.12'" }, - { name = "ipython-pygments-lexers", marker = "python_full_version < '3.12'" }, - { name = "jedi", marker = "python_full_version < '3.12'" }, - { name = "matplotlib-inline", marker = "python_full_version < '3.12'" }, - { name = "pexpect", marker = "python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'win32'" }, - { name = "prompt-toolkit", marker = "python_full_version < '3.12'" }, - { name = "pygments", marker = "python_full_version < '3.12'" }, - { name = "stack-data", marker = "python_full_version < '3.12'" }, - { name = "traitlets", marker = "python_full_version < '3.12'" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "decorator" }, + { name = "ipython-pygments-lexers" }, + { name = "jedi" }, + { name = "matplotlib-inline" }, + { name = "pexpect", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" }, + { name = "prompt-toolkit" }, + { name = "psutil" }, + { name = "pygments" }, + { name = "stack-data" }, + { name = "traitlets" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c5/25/daae0e764047b0a2480c7bbb25d48f4f509b5818636562eeac145d06dfee/ipython-9.10.1.tar.gz", hash = "sha256:e170e9b2a44312484415bdb750492699bf329233b03f2557a9692cce6466ada4", size = 4426663, upload-time = "2026-03-27T09:53:26.244Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/c4/87cda5842cf5c31837c06ddb588e11c3c35d8ece89b7a0108c06b8c9b00a/ipython-9.13.0.tar.gz", hash = "sha256:7e834b6afc99f020e3f05966ced34792f40267d64cb1ea9043886dab0dde5967", size = 4430549, upload-time = "2026-04-24T12:24:55.221Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/01/09/ba70f8d662d5671687da55ad2cc0064cf795b15e1eea70907532202e7c97/ipython-9.10.1-py3-none-any.whl", hash = "sha256:82d18ae9fb9164ded080c71ef92a182ee35ee7db2395f67616034bebb020a232", size = 622827, upload-time = "2026-03-27T09:53:24.566Z" }, -] - -[[package]] -name = "ipython" -version = "9.12.0" -source = { registry = "https://pypi.org/simple" } -resolution-markers = [ - "python_full_version >= '3.12' and sys_platform == 'win32'", - "python_full_version >= '3.12' and sys_platform == 'emscripten'", - "python_full_version >= '3.12' and sys_platform != 'emscripten' and sys_platform != 'win32'", -] -dependencies = [ - { name = "colorama", marker = "python_full_version >= '3.12' and sys_platform == 'win32'" }, - { name = "decorator", marker = "python_full_version >= '3.12'" }, - { name = "ipython-pygments-lexers", marker = "python_full_version >= '3.12'" }, - { name = "jedi", marker = "python_full_version >= '3.12'" }, - { name = "matplotlib-inline", marker = "python_full_version >= '3.12'" }, - { name = "pexpect", marker = "python_full_version >= '3.12' and sys_platform != 'emscripten' and sys_platform != 'win32'" }, - { name = "prompt-toolkit", marker = "python_full_version >= '3.12'" }, - { name = "pygments", marker = "python_full_version >= '3.12'" }, - { name = "stack-data", marker = "python_full_version >= '3.12'" }, - { name = "traitlets", marker = "python_full_version >= '3.12'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/3a/73/7114f80a8f9cabdb13c27732dce24af945b2923dcab80723602f7c8bc2d8/ipython-9.12.0.tar.gz", hash = "sha256:01daa83f504b693ba523b5a407246cabde4eb4513285a3c6acaff11a66735ee4", size = 4428879, upload-time = "2026-03-27T09:42:45.312Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/59/22/906c8108974c673ebef6356c506cebb6870d48cedea3c41e949e2dd556bb/ipython-9.12.0-py3-none-any.whl", hash = "sha256:0f2701e8ee86e117e37f50563205d36feaa259d2e08d4a6bc6b6d74b18ce128d", size = 625661, upload-time = "2026-03-27T09:42:42.831Z" }, + { url = "https://files.pythonhosted.org/packages/b9/86/3060e8029b7cc505cce9a0137431dda81d0a3fde93a8f0f50ee0bf37a795/ipython-9.13.0-py3-none-any.whl", hash = "sha256:57f9d4639e20818d328d287c7b549af3d05f12486ea8f2e7f73e52a36ec4d201", size = 627274, upload-time = "2026-04-24T12:24:53.038Z" }, ] [[package]] @@ -1428,14 +1403,14 @@ wheels = [ [[package]] name = "mako" -version = "1.3.10" +version = "1.3.12" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" } +sdist = { url = "https://files.pythonhosted.org/packages/00/62/791b31e69ae182791ec67f04850f2f062716bbd205483d63a215f3e062d3/mako-1.3.12.tar.gz", hash = "sha256:9f778e93289bd410bb35daadeb4fc66d95a746f0b75777b942088b7fd7af550a", size = 400219, upload-time = "2026-04-28T19:01:08.512Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, + { url = "https://files.pythonhosted.org/packages/bc/b1/a0ec7a5a9db730a08daef1fdfb8090435b82465abbf758a596f0ea88727e/mako-1.3.12-py3-none-any.whl", hash = "sha256:8f61569480282dbf557145ce441e4ba888be453c30989f879f0d652e39f53ea9", size = 78521, upload-time = "2026-04-28T19:01:10.393Z" }, ] [[package]] @@ -1504,7 +1479,7 @@ wheels = [ [[package]] name = "matplotlib" -version = "3.10.8" +version = "3.10.9" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "contourpy" }, @@ -1517,39 +1492,39 @@ dependencies = [ { name = "pyparsing" }, { name = "python-dateutil" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8a/76/d3c6e3a13fe484ebe7718d14e269c9569c4eb0020a968a327acb3b9a8fe6/matplotlib-3.10.8.tar.gz", hash = "sha256:2299372c19d56bcd35cf05a2738308758d32b9eaed2371898d8f5bd33f084aa3", size = 34806269, upload-time = "2025-12-10T22:56:51.155Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/86/de7e3a1cdcfc941483af70609edc06b83e7c8a0e0dc9ac325200a3f4d220/matplotlib-3.10.8-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6be43b667360fef5c754dda5d25a32e6307a03c204f3c0fc5468b78fa87b4160", size = 8251215, upload-time = "2025-12-10T22:55:16.175Z" }, - { url = "https://files.pythonhosted.org/packages/fd/14/baad3222f424b19ce6ad243c71de1ad9ec6b2e4eb1e458a48fdc6d120401/matplotlib-3.10.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2b336e2d91a3d7006864e0990c83b216fcdca64b5a6484912902cef87313d78", size = 8139625, upload-time = "2025-12-10T22:55:17.712Z" }, - { url = "https://files.pythonhosted.org/packages/8f/a0/7024215e95d456de5883e6732e708d8187d9753a21d32f8ddb3befc0c445/matplotlib-3.10.8-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:efb30e3baaea72ce5928e32bab719ab4770099079d66726a62b11b1ef7273be4", size = 8712614, upload-time = "2025-12-10T22:55:20.8Z" }, - { url = "https://files.pythonhosted.org/packages/5a/f4/b8347351da9a5b3f41e26cf547252d861f685c6867d179a7c9d60ad50189/matplotlib-3.10.8-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d56a1efd5bfd61486c8bc968fa18734464556f0fb8e51690f4ac25d85cbbbbc2", size = 9540997, upload-time = "2025-12-10T22:55:23.258Z" }, - { url = "https://files.pythonhosted.org/packages/9e/c0/c7b914e297efe0bc36917bf216b2acb91044b91e930e878ae12981e461e5/matplotlib-3.10.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:238b7ce5717600615c895050239ec955d91f321c209dd110db988500558e70d6", size = 9596825, upload-time = "2025-12-10T22:55:25.217Z" }, - { url = "https://files.pythonhosted.org/packages/6f/d3/a4bbc01c237ab710a1f22b4da72f4ff6d77eb4c7735ea9811a94ae239067/matplotlib-3.10.8-cp311-cp311-win_amd64.whl", hash = "sha256:18821ace09c763ec93aef5eeff087ee493a24051936d7b9ebcad9662f66501f9", size = 8135090, upload-time = "2025-12-10T22:55:27.162Z" }, - { url = "https://files.pythonhosted.org/packages/89/dd/a0b6588f102beab33ca6f5218b31725216577b2a24172f327eaf6417d5c9/matplotlib-3.10.8-cp311-cp311-win_arm64.whl", hash = "sha256:bab485bcf8b1c7d2060b4fcb6fc368a9e6f4cd754c9c2fea281f4be21df394a2", size = 8012377, upload-time = "2025-12-10T22:55:29.185Z" }, - { url = "https://files.pythonhosted.org/packages/9e/67/f997cdcbb514012eb0d10cd2b4b332667997fb5ebe26b8d41d04962fa0e6/matplotlib-3.10.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:64fcc24778ca0404ce0cb7b6b77ae1f4c7231cdd60e6778f999ee05cbd581b9a", size = 8260453, upload-time = "2025-12-10T22:55:30.709Z" }, - { url = "https://files.pythonhosted.org/packages/7e/65/07d5f5c7f7c994f12c768708bd2e17a4f01a2b0f44a1c9eccad872433e2e/matplotlib-3.10.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b9a5ca4ac220a0cdd1ba6bcba3608547117d30468fefce49bb26f55c1a3d5c58", size = 8148321, upload-time = "2025-12-10T22:55:33.265Z" }, - { url = "https://files.pythonhosted.org/packages/3e/f3/c5195b1ae57ef85339fd7285dfb603b22c8b4e79114bae5f4f0fcf688677/matplotlib-3.10.8-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3ab4aabc72de4ff77b3ec33a6d78a68227bf1123465887f9905ba79184a1cc04", size = 8716944, upload-time = "2025-12-10T22:55:34.922Z" }, - { url = "https://files.pythonhosted.org/packages/00/f9/7638f5cc82ec8a7aa005de48622eecc3ed7c9854b96ba15bd76b7fd27574/matplotlib-3.10.8-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:24d50994d8c5816ddc35411e50a86ab05f575e2530c02752e02538122613371f", size = 9550099, upload-time = "2025-12-10T22:55:36.789Z" }, - { url = "https://files.pythonhosted.org/packages/57/61/78cd5920d35b29fd2a0fe894de8adf672ff52939d2e9b43cb83cd5ce1bc7/matplotlib-3.10.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:99eefd13c0dc3b3c1b4d561c1169e65fe47aab7b8158754d7c084088e2329466", size = 9613040, upload-time = "2025-12-10T22:55:38.715Z" }, - { url = "https://files.pythonhosted.org/packages/30/4e/c10f171b6e2f44d9e3a2b96efa38b1677439d79c99357600a62cc1e9594e/matplotlib-3.10.8-cp312-cp312-win_amd64.whl", hash = "sha256:dd80ecb295460a5d9d260df63c43f4afbdd832d725a531f008dad1664f458adf", size = 8142717, upload-time = "2025-12-10T22:55:41.103Z" }, - { url = "https://files.pythonhosted.org/packages/f1/76/934db220026b5fef85f45d51a738b91dea7d70207581063cd9bd8fafcf74/matplotlib-3.10.8-cp312-cp312-win_arm64.whl", hash = "sha256:3c624e43ed56313651bc18a47f838b60d7b8032ed348911c54906b130b20071b", size = 8012751, upload-time = "2025-12-10T22:55:42.684Z" }, - { url = "https://files.pythonhosted.org/packages/3d/b9/15fd5541ef4f5b9a17eefd379356cf12175fe577424e7b1d80676516031a/matplotlib-3.10.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3f2e409836d7f5ac2f1c013110a4d50b9f7edc26328c108915f9075d7d7a91b6", size = 8261076, upload-time = "2025-12-10T22:55:44.648Z" }, - { url = "https://files.pythonhosted.org/packages/8d/a0/2ba3473c1b66b9c74dc7107c67e9008cb1782edbe896d4c899d39ae9cf78/matplotlib-3.10.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:56271f3dac49a88d7fca5060f004d9d22b865f743a12a23b1e937a0be4818ee1", size = 8148794, upload-time = "2025-12-10T22:55:46.252Z" }, - { url = "https://files.pythonhosted.org/packages/75/97/a471f1c3eb1fd6f6c24a31a5858f443891d5127e63a7788678d14e249aea/matplotlib-3.10.8-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a0a7f52498f72f13d4a25ea70f35f4cb60642b466cbb0a9be951b5bc3f45a486", size = 8718474, upload-time = "2025-12-10T22:55:47.864Z" }, - { url = "https://files.pythonhosted.org/packages/01/be/cd478f4b66f48256f42927d0acbcd63a26a893136456cd079c0cc24fbabf/matplotlib-3.10.8-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:646d95230efb9ca614a7a594d4fcacde0ac61d25e37dd51710b36477594963ce", size = 9549637, upload-time = "2025-12-10T22:55:50.048Z" }, - { url = "https://files.pythonhosted.org/packages/5d/7c/8dc289776eae5109e268c4fb92baf870678dc048a25d4ac903683b86d5bf/matplotlib-3.10.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f89c151aab2e2e23cb3fe0acad1e8b82841fd265379c4cecd0f3fcb34c15e0f6", size = 9613678, upload-time = "2025-12-10T22:55:52.21Z" }, - { url = "https://files.pythonhosted.org/packages/64/40/37612487cc8a437d4dd261b32ca21fe2d79510fe74af74e1f42becb1bdb8/matplotlib-3.10.8-cp313-cp313-win_amd64.whl", hash = "sha256:e8ea3e2d4066083e264e75c829078f9e149fa119d27e19acd503de65e0b13149", size = 8142686, upload-time = "2025-12-10T22:55:54.253Z" }, - { url = "https://files.pythonhosted.org/packages/66/52/8d8a8730e968185514680c2a6625943f70269509c3dcfc0dcf7d75928cb8/matplotlib-3.10.8-cp313-cp313-win_arm64.whl", hash = "sha256:c108a1d6fa78a50646029cb6d49808ff0fc1330fda87fa6f6250c6b5369b6645", size = 8012917, upload-time = "2025-12-10T22:55:56.268Z" }, - { url = "https://files.pythonhosted.org/packages/b5/27/51fe26e1062f298af5ef66343d8ef460e090a27fea73036c76c35821df04/matplotlib-3.10.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ad3d9833a64cf48cc4300f2b406c3d0f4f4724a91c0bd5640678a6ba7c102077", size = 8305679, upload-time = "2025-12-10T22:55:57.856Z" }, - { url = "https://files.pythonhosted.org/packages/2c/1e/4de865bc591ac8e3062e835f42dd7fe7a93168d519557837f0e37513f629/matplotlib-3.10.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:eb3823f11823deade26ce3b9f40dcb4a213da7a670013929f31d5f5ed1055b22", size = 8198336, upload-time = "2025-12-10T22:55:59.371Z" }, - { url = "https://files.pythonhosted.org/packages/c6/cb/2f7b6e75fb4dce87ef91f60cac4f6e34f4c145ab036a22318ec837971300/matplotlib-3.10.8-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d9050fee89a89ed57b4fb2c1bfac9a3d0c57a0d55aed95949eedbc42070fea39", size = 8731653, upload-time = "2025-12-10T22:56:01.032Z" }, - { url = "https://files.pythonhosted.org/packages/46/b3/bd9c57d6ba670a37ab31fb87ec3e8691b947134b201f881665b28cc039ff/matplotlib-3.10.8-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b44d07310e404ba95f8c25aa5536f154c0a8ec473303535949e52eb71d0a1565", size = 9561356, upload-time = "2025-12-10T22:56:02.95Z" }, - { url = "https://files.pythonhosted.org/packages/c0/3d/8b94a481456dfc9dfe6e39e93b5ab376e50998cddfd23f4ae3b431708f16/matplotlib-3.10.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0a33deb84c15ede243aead39f77e990469fff93ad1521163305095b77b72ce4a", size = 9614000, upload-time = "2025-12-10T22:56:05.411Z" }, - { url = "https://files.pythonhosted.org/packages/bd/cd/bc06149fe5585ba800b189a6a654a75f1f127e8aab02fd2be10df7fa500c/matplotlib-3.10.8-cp313-cp313t-win_amd64.whl", hash = "sha256:3a48a78d2786784cc2413e57397981fb45c79e968d99656706018d6e62e57958", size = 8220043, upload-time = "2025-12-10T22:56:07.551Z" }, - { url = "https://files.pythonhosted.org/packages/e3/de/b22cf255abec916562cc04eef457c13e58a1990048de0c0c3604d082355e/matplotlib-3.10.8-cp313-cp313t-win_arm64.whl", hash = "sha256:15d30132718972c2c074cd14638c7f4592bd98719e2308bccea40e0538bc0cb5", size = 8062075, upload-time = "2025-12-10T22:56:09.178Z" }, - { url = "https://files.pythonhosted.org/packages/04/30/3afaa31c757f34b7725ab9d2ba8b48b5e89c2019c003e7d0ead143aabc5a/matplotlib-3.10.8-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6da7c2ce169267d0d066adcf63758f0604aa6c3eebf67458930f9d9b79ad1db1", size = 8249198, upload-time = "2025-12-10T22:56:45.584Z" }, - { url = "https://files.pythonhosted.org/packages/48/2f/6334aec331f57485a642a7c8be03cb286f29111ae71c46c38b363230063c/matplotlib-3.10.8-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:9153c3292705be9f9c64498a8872118540c3f4123d1a1c840172edf262c8be4a", size = 8136817, upload-time = "2025-12-10T22:56:47.339Z" }, - { url = "https://files.pythonhosted.org/packages/73/e4/6d6f14b2a759c622f191b2d67e9075a3f56aaccb3be4bb9bb6890030d0a0/matplotlib-3.10.8-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1ae029229a57cd1e8fe542485f27e7ca7b23aa9e8944ddb4985d0bc444f1eca2", size = 8713867, upload-time = "2025-12-10T22:56:48.954Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/63/1b/4be5be87d43d327a0cf4de1a56e86f7f84c89312452406cf122efe2839e6/matplotlib-3.10.9.tar.gz", hash = "sha256:fd66508e8c6877d98e586654b608a0456db8d7e8a546eb1e2600efd957302358", size = 34811233, upload-time = "2026-04-24T00:14:13.539Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4c/8c/290f021104741fea63769c31494f5324c0cd249bf536a65a4350767b1f22/matplotlib-3.10.9-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:68cfdcede415f7c8f5577b03303dd94526cdb6d11036cecdc205e08733b2d2bb", size = 8306860, upload-time = "2026-04-24T00:12:01.207Z" }, + { url = "https://files.pythonhosted.org/packages/51/18/325cd32ece1120d1da51cc4e4294c6580190699490183fc2fe8cb6d61ec5/matplotlib-3.10.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfca0129678bd56379db26c52b5d77ed7de314c047492fbdc763aa7501710cfb", size = 8199254, upload-time = "2026-04-24T00:12:04.239Z" }, + { url = "https://files.pythonhosted.org/packages/79/db/e28c1b83e3680740aa78925f5fb2ae4d16207207419ad75ea9fe604f8676/matplotlib-3.10.9-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8e436d155fa8a3399dc62683f8f5d0e2e50d25d0144a73edd73f82eec8f4abfb", size = 8777092, upload-time = "2026-04-24T00:12:06.793Z" }, + { url = "https://files.pythonhosted.org/packages/55/fa/3ce7adfe9ba101748f465211660d9c6374c876b671bdb8c2bb6d347e8b94/matplotlib-3.10.9-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56fc0bd271b00025c6edfdc7c2dcd247372c8e1544971d62e1dc7c17367e8bf9", size = 9595691, upload-time = "2026-04-24T00:12:09.706Z" }, + { url = "https://files.pythonhosted.org/packages/36/c4/6960a76686ed668f2c60f84e9799ba4c0d56abdb36b1577b60c1d061d1ec/matplotlib-3.10.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a5a6104ed666402ba5106d7f36e0e0cdca4e8d7fa4d39708ca88019e2835a2eb", size = 9659771, upload-time = "2026-04-24T00:12:12.766Z" }, + { url = "https://files.pythonhosted.org/packages/7e/0d/271aace3342157c64700c9ff4c59c7b392f3dbab393692e8db6fbe7ab96c/matplotlib-3.10.9-cp311-cp311-win_amd64.whl", hash = "sha256:d730e984eddf56974c3e72b6129c7ca462ac38dc624338f4b0b23eb23ecba00f", size = 8205112, upload-time = "2026-04-24T00:12:15.773Z" }, + { url = "https://files.pythonhosted.org/packages/e2/ee/cb57ad4754f3e7b9174ce6ce66d9205fb827067e48a9f58ac09d7e7d6b77/matplotlib-3.10.9-cp311-cp311-win_arm64.whl", hash = "sha256:51bf0ddbdc598e060d46c16b5590708f81a1624cefbaaf62f6a81bf9285b8c80", size = 8132310, upload-time = "2026-04-24T00:12:18.645Z" }, + { url = "https://files.pythonhosted.org/packages/35/c6/5581e26c72233ebb2a2a6fed2d24fb7c66b4700120b813f51b0555acf0b6/matplotlib-3.10.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f0c3c28d9fbcc1fe7a03be236d73430cf6409c41fb2383a7ac52fe932b072cb1", size = 8319908, upload-time = "2026-04-24T00:12:21.323Z" }, + { url = "https://files.pythonhosted.org/packages/b7/18/4880dd762e40cd360c1bf06e890c5a97b997e91cb324602b1a19950ad5ce/matplotlib-3.10.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41cb28c2bd769aa3e98322c6ab09854cbcc52ab69d2759d681bba3e327b2b320", size = 8216016, upload-time = "2026-04-24T00:12:23.4Z" }, + { url = "https://files.pythonhosted.org/packages/32/91/d024616abdba99e83120e07a20658976f6a343646710760c4a51df126029/matplotlib-3.10.9-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ae20801130378b82d647ff5047c07316295b68dc054ca6b3c13519d0ea624285", size = 8789336, upload-time = "2026-04-24T00:12:26.096Z" }, + { url = "https://files.pythonhosted.org/packages/5c/04/030a2f61ef2158f5e4c259487a92ac877732499fb33d871585d89e03c42d/matplotlib-3.10.9-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6c63ebcd8b4b169eb2f5c200552ae6b8be8999a005b6b507ed76fb8d7d674fe2", size = 9604602, upload-time = "2026-04-24T00:12:29.052Z" }, + { url = "https://files.pythonhosted.org/packages/fc/c2/541e4d09d87bb6b5830fc28b4c887a9a8cf4e1c6cee698a8c05552ae2003/matplotlib-3.10.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d75d11c949914165976c621b2324f9ef162af7ebf4b057ddf95dd1dba7e5edcf", size = 9670966, upload-time = "2026-04-24T00:12:32.131Z" }, + { url = "https://files.pythonhosted.org/packages/04/a1/4571fc46e7702de8d0c2dc54ad1b2f8e29328dea3ee90831181f7353d93c/matplotlib-3.10.9-cp312-cp312-win_amd64.whl", hash = "sha256:d091f9d758b34aaaaa6331d13574bf01891d903b3dec59bfff458ef7551de5d6", size = 8217462, upload-time = "2026-04-24T00:12:35.226Z" }, + { url = "https://files.pythonhosted.org/packages/4b/d0/2269edb12aa30c13c8bcc9382892e39943ce1d28aab4ec296e0381798e81/matplotlib-3.10.9-cp312-cp312-win_arm64.whl", hash = "sha256:10cc5ce06d10231c36f40e875f3c7e8050362a4ee8f0ee5d29a6b3277d57bb42", size = 8136688, upload-time = "2026-04-24T00:12:37.442Z" }, + { url = "https://files.pythonhosted.org/packages/aa/d3/8d4f6afbecb49fc04e060a57c0fce39ea51cc163a6bd87303ccd698e4fa6/matplotlib-3.10.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b580440f1ff81a0e34122051a3dfabb7e4b7f9e380629929bde0eff9af72165f", size = 8320331, upload-time = "2026-04-24T00:12:39.688Z" }, + { url = "https://files.pythonhosted.org/packages/63/d9/9e14bc7564bf92d5ffa801ae5fac819ce74b925dfb55e3ebde61a3bbad3e/matplotlib-3.10.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b1b745c489cd1a77a0dc1120a05dc87af9798faebc913601feb8c73d89bf2d1e", size = 8216461, upload-time = "2026-04-24T00:12:42.494Z" }, + { url = "https://files.pythonhosted.org/packages/8a/17/4402d0d14ccf1dfc70932600b68097fbbf9c898a4871d2cbbe79c7801a32/matplotlib-3.10.9-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8f3bcac1ca5ed000a6f4337d47ba67dfddf37ed6a46c15fd7f014997f7bf865f", size = 8790091, upload-time = "2026-04-24T00:12:44.789Z" }, + { url = "https://files.pythonhosted.org/packages/3e/0b/322aeec06dd9b91411f92028b37d447342770a24392aa4813e317064dad5/matplotlib-3.10.9-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a8d66a55def891c33147ba3ba9bfcabf0b526a43764c818acbb4525e5ed0838", size = 9605027, upload-time = "2026-04-24T00:12:47.583Z" }, + { url = "https://files.pythonhosted.org/packages/74/88/5f13482f55e7b00bcfc09838b093c2456e1379978d2a146844aae05350ad/matplotlib-3.10.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d843374407c4017a6403b59c6c81606773d136f3259d5b6da3131bc814542cc2", size = 9671269, upload-time = "2026-04-24T00:12:50.878Z" }, + { url = "https://files.pythonhosted.org/packages/c5/e0/0840fd2f93da988ec660b8ad1984abe9f25d2aed22a5e394ff1c68c88307/matplotlib-3.10.9-cp313-cp313-win_amd64.whl", hash = "sha256:f4399f64b3e94cd500195490972ae1ee81170df1636fa15364d157d5bdd7b921", size = 8217588, upload-time = "2026-04-24T00:12:53.784Z" }, + { url = "https://files.pythonhosted.org/packages/47/b9/d706d06dd605c49b9f83a2aed8c13e3e5db70697d7a80b7e3d7915de6b17/matplotlib-3.10.9-cp313-cp313-win_arm64.whl", hash = "sha256:ba7b3b8ef09eab7df0e86e9ae086faa433efbfbdb46afcb3aa16aabf779469a8", size = 8136913, upload-time = "2026-04-24T00:12:56.501Z" }, + { url = "https://files.pythonhosted.org/packages/9b/45/6e32d96978264c8ca8c4b1010adb955a1a49cfaf314e212bbc8908f04a61/matplotlib-3.10.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:09218df8a93712bd6ea133e83a153c755448cf7868316c531cffcc43f69d1cc9", size = 8368019, upload-time = "2026-04-24T00:12:58.896Z" }, + { url = "https://files.pythonhosted.org/packages/86/0a/c8e3d3bba245f0f7fc424937f8ff7ef77291a36af3edb97ccd78aa93d84f/matplotlib-3.10.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:82368699727bfb7b0182e1aa13082e3c08e092fa1a25d3e1fd92405bff96f6d4", size = 8264645, upload-time = "2026-04-24T00:13:01.406Z" }, + { url = "https://files.pythonhosted.org/packages/3d/aa/5bf5a14fe4fed73a4209a155606f8096ff797aad89c6c35179026571133e/matplotlib-3.10.9-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3225f4e1edcb8c86c884ddf79ebe20ecd0a67d30188f279897554ccd8fded4dc", size = 8802194, upload-time = "2026-04-24T00:13:03.702Z" }, + { url = "https://files.pythonhosted.org/packages/dd/5e/b4be852d6bba6fd15893fadf91ff26ae49cb91aac789e95dde9d342e664f/matplotlib-3.10.9-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de2445a0c6690d21b7eb6ce071cebad6d40a2e9bdf10d039074a96ba19797b99", size = 9622684, upload-time = "2026-04-24T00:13:06.647Z" }, + { url = "https://files.pythonhosted.org/packages/4c/3d/ed428c971139112ef730f62770654d609467346d09d4b62617e1afd68a5a/matplotlib-3.10.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:b2b9516251cb89ff618d757daec0e2ed1bf21248013844a853d87ef85ab3081d", size = 9680790, upload-time = "2026-04-24T00:13:10.009Z" }, + { url = "https://files.pythonhosted.org/packages/e7/09/052e884aaf2b985c63cb79f715f1d5b6a3eaa7de78f6a52b9dbc077d5b53/matplotlib-3.10.9-cp313-cp313t-win_amd64.whl", hash = "sha256:e9fae004b941b23ff2edcf1567a857ed77bafc8086ffa258190462328434faf8", size = 8287571, upload-time = "2026-04-24T00:13:13.087Z" }, + { url = "https://files.pythonhosted.org/packages/f4/38/ae27288e788c35a4250491422f3db7750366fc8c97d6f36fbdecfc1f5518/matplotlib-3.10.9-cp313-cp313t-win_arm64.whl", hash = "sha256:6b63d9c7c769b88ab81e10dc86e4e0607cf56817b9f9e6cf24b2a5f1693b8e38", size = 8188292, upload-time = "2026-04-24T00:13:15.546Z" }, + { url = "https://files.pythonhosted.org/packages/63/e2/9f66ca6a651a52abfe0d4964ce01439ed34f3f1e119de10ff3a07f403043/matplotlib-3.10.9-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:42fb814efabe95c06c1994d8ab5a8385f43a249e23badd3ba931d4308e5bca20", size = 8304420, upload-time = "2026-04-24T00:14:04.57Z" }, + { url = "https://files.pythonhosted.org/packages/e8/e8/467c03568218792906aa87b5e7bb379b605e056ed0c74fe00c051786d925/matplotlib-3.10.9-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:f76e640a5268850bfda54b5131b1b1941cc685e42c5fa98ed9f2d64038308cba", size = 8197981, upload-time = "2026-04-24T00:14:07.233Z" }, + { url = "https://files.pythonhosted.org/packages/6f/87/afead29192170917537934c6aff4b008c805fff7b1ccea0c79120d96beda/matplotlib-3.10.9-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3fc0364dfbe1d07f6d15c5ebd0c5bf89e126916e5a8667dd4a7a6e84c36653d4", size = 8774002, upload-time = "2026-04-24T00:14:09.816Z" }, ] [[package]] @@ -1747,8 +1722,7 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "importlib-metadata" }, { name = "ipykernel" }, - { name = "ipython", version = "9.10.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" }, - { name = "ipython", version = "9.12.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" }, + { name = "ipython" }, { name = "jupyter-cache" }, { name = "myst-parser" }, { name = "nbclient" }, @@ -1973,11 +1947,11 @@ wheels = [ [[package]] name = "packaging" -version = "26.0" +version = "26.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/f1/e7a6dd94a8d4a5626c03e4e99c87f241ba9e350cd9e6d75123f992427270/packaging-26.2.tar.gz", hash = "sha256:ff452ff5a3e828ce110190feff1178bb1f2ea2281fa2075aadb987c2fb221661", size = 228134, upload-time = "2026-04-24T20:15:23.917Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, + { url = "https://files.pythonhosted.org/packages/df/b2/87e62e8c3e2f4b32e5fe99e0b86d576da1312593b39f47d8ceef365e95ed/packaging-26.2-py3-none-any.whl", hash = "sha256:5fc45236b9446107ff2415ce77c807cee2862cb6fac22b8a73826d0693b0980e", size = 100195, upload-time = "2026-04-24T20:15:22.081Z" }, ] [[package]] @@ -2127,7 +2101,7 @@ wheels = [ [[package]] name = "pre-commit" -version = "4.5.1" +version = "4.6.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cfgv" }, @@ -2136,9 +2110,9 @@ dependencies = [ { name = "pyyaml" }, { name = "virtualenv" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/40/f1/6d86a29246dfd2e9b6237f0b5823717f60cad94d47ddc26afa916d21f525/pre_commit-4.5.1.tar.gz", hash = "sha256:eb545fcff725875197837263e977ea257a402056661f09dae08e4b149b030a61", size = 198232, upload-time = "2025-12-16T21:14:33.552Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/22/2de9408ac81acbb8a7d05d4cc064a152ccf33b3d480ebe0cd292153db239/pre_commit-4.6.0.tar.gz", hash = "sha256:718d2208cef53fdc38206e40524a6d4d9576d103eb16f0fec11c875e7716e9d9", size = 198525, upload-time = "2026-04-21T20:31:41.613Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5d/19/fd3ef348460c80af7bb4669ea7926651d1f95c23ff2df18b9d24bab4f3fa/pre_commit-4.5.1-py2.py3-none-any.whl", hash = "sha256:3b3afd891e97337708c1674210f8eba659b52a38ea5f822ff142d10786221f77", size = 226437, upload-time = "2025-12-16T21:14:32.409Z" }, + { url = "https://files.pythonhosted.org/packages/80/6e/4b28b62ecb6aae56769c34a8ff1d661473ec1e9519e2d5f8b2c150086b26/pre_commit-4.6.0-py2.py3-none-any.whl", hash = "sha256:e2cf246f7299edcabcf15f9b0571fdce06058527f0a06535068a86d38089f29b", size = 226472, upload-time = "2026-04-21T20:31:40.092Z" }, ] [[package]] @@ -2264,38 +2238,38 @@ wheels = [ [[package]] name = "pyarrow" -version = "23.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/88/22/134986a4cc224d593c1afde5494d18ff629393d74cc2eddb176669f234a4/pyarrow-23.0.1.tar.gz", hash = "sha256:b8c5873e33440b2bc2f4a79d2b47017a89c5a24116c055625e6f2ee50523f019", size = 1167336, upload-time = "2026-02-16T10:14:12.39Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/41/8e6b6ef7e225d4ceead8459427a52afdc23379768f54dd3566014d7618c1/pyarrow-23.0.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6f0147ee9e0386f519c952cc670eb4a8b05caa594eeffe01af0e25f699e4e9bb", size = 34302230, upload-time = "2026-02-16T10:09:03.859Z" }, - { url = "https://files.pythonhosted.org/packages/bf/4a/1472c00392f521fea03ae93408bf445cc7bfa1ab81683faf9bc188e36629/pyarrow-23.0.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:0ae6e17c828455b6265d590100c295193f93cc5675eb0af59e49dbd00d2de350", size = 35850050, upload-time = "2026-02-16T10:09:11.877Z" }, - { url = "https://files.pythonhosted.org/packages/0c/b2/bd1f2f05ded56af7f54d702c8364c9c43cd6abb91b0e9933f3d77b4f4132/pyarrow-23.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:fed7020203e9ef273360b9e45be52a2a47d3103caf156a30ace5247ffb51bdbd", size = 44491918, upload-time = "2026-02-16T10:09:18.144Z" }, - { url = "https://files.pythonhosted.org/packages/0b/62/96459ef5b67957eac38a90f541d1c28833d1b367f014a482cb63f3b7cd2d/pyarrow-23.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:26d50dee49d741ac0e82185033488d28d35be4d763ae6f321f97d1140eb7a0e9", size = 47562811, upload-time = "2026-02-16T10:09:25.792Z" }, - { url = "https://files.pythonhosted.org/packages/7d/94/1170e235add1f5f45a954e26cd0e906e7e74e23392dcb560de471f7366ec/pyarrow-23.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3c30143b17161310f151f4a2bcfe41b5ff744238c1039338779424e38579d701", size = 48183766, upload-time = "2026-02-16T10:09:34.645Z" }, - { url = "https://files.pythonhosted.org/packages/0e/2d/39a42af4570377b99774cdb47f63ee6c7da7616bd55b3d5001aa18edfe4f/pyarrow-23.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db2190fa79c80a23fdd29fef4b8992893f024ae7c17d2f5f4db7171fa30c2c78", size = 50607669, upload-time = "2026-02-16T10:09:44.153Z" }, - { url = "https://files.pythonhosted.org/packages/00/ca/db94101c187f3df742133ac837e93b1f269ebdac49427f8310ee40b6a58f/pyarrow-23.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:f00f993a8179e0e1c9713bcc0baf6d6c01326a406a9c23495ec1ba9c9ebf2919", size = 27527698, upload-time = "2026-02-16T10:09:50.263Z" }, - { url = "https://files.pythonhosted.org/packages/9a/4b/4166bb5abbfe6f750fc60ad337c43ecf61340fa52ab386da6e8dbf9e63c4/pyarrow-23.0.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:f4b0dbfa124c0bb161f8b5ebb40f1a680b70279aa0c9901d44a2b5a20806039f", size = 34214575, upload-time = "2026-02-16T10:09:56.225Z" }, - { url = "https://files.pythonhosted.org/packages/e1/da/3f941e3734ac8088ea588b53e860baeddac8323ea40ce22e3d0baa865cc9/pyarrow-23.0.1-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:7707d2b6673f7de054e2e83d59f9e805939038eebe1763fe811ee8fa5c0cd1a7", size = 35832540, upload-time = "2026-02-16T10:10:03.428Z" }, - { url = "https://files.pythonhosted.org/packages/88/7c/3d841c366620e906d54430817531b877ba646310296df42ef697308c2705/pyarrow-23.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:86ff03fb9f1a320266e0de855dee4b17da6794c595d207f89bba40d16b5c78b9", size = 44470940, upload-time = "2026-02-16T10:10:10.704Z" }, - { url = "https://files.pythonhosted.org/packages/2c/a5/da83046273d990f256cb79796a190bbf7ec999269705ddc609403f8c6b06/pyarrow-23.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:813d99f31275919c383aab17f0f455a04f5a429c261cc411b1e9a8f5e4aaaa05", size = 47586063, upload-time = "2026-02-16T10:10:17.95Z" }, - { url = "https://files.pythonhosted.org/packages/5b/3c/b7d2ebcff47a514f47f9da1e74b7949138c58cfeb108cdd4ee62f43f0cf3/pyarrow-23.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bf5842f960cddd2ef757d486041d57c96483efc295a8c4a0e20e704cbbf39c67", size = 48173045, upload-time = "2026-02-16T10:10:25.363Z" }, - { url = "https://files.pythonhosted.org/packages/43/b2/b40961262213beaba6acfc88698eb773dfce32ecdf34d19291db94c2bd73/pyarrow-23.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564baf97c858ecc03ec01a41062e8f4698abc3e6e2acd79c01c2e97880a19730", size = 50621741, upload-time = "2026-02-16T10:10:33.477Z" }, - { url = "https://files.pythonhosted.org/packages/f6/70/1fdda42d65b28b078e93d75d371b2185a61da89dda4def8ba6ba41ebdeb4/pyarrow-23.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:07deae7783782ac7250989a7b2ecde9b3c343a643f82e8a4df03d93b633006f0", size = 27620678, upload-time = "2026-02-16T10:10:39.31Z" }, - { url = "https://files.pythonhosted.org/packages/47/10/2cbe4c6f0fb83d2de37249567373d64327a5e4d8db72f486db42875b08f6/pyarrow-23.0.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:6b8fda694640b00e8af3c824f99f789e836720aa8c9379fb435d4c4953a756b8", size = 34210066, upload-time = "2026-02-16T10:10:45.487Z" }, - { url = "https://files.pythonhosted.org/packages/cb/4f/679fa7e84dadbaca7a65f7cdba8d6c83febbd93ca12fa4adf40ba3b6362b/pyarrow-23.0.1-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:8ff51b1addc469b9444b7c6f3548e19dc931b172ab234e995a60aea9f6e6025f", size = 35825526, upload-time = "2026-02-16T10:10:52.266Z" }, - { url = "https://files.pythonhosted.org/packages/f9/63/d2747d930882c9d661e9398eefc54f15696547b8983aaaf11d4a2e8b5426/pyarrow-23.0.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:71c5be5cbf1e1cb6169d2a0980850bccb558ddc9b747b6206435313c47c37677", size = 44473279, upload-time = "2026-02-16T10:11:01.557Z" }, - { url = "https://files.pythonhosted.org/packages/b3/93/10a48b5e238de6d562a411af6467e71e7aedbc9b87f8d3a35f1560ae30fb/pyarrow-23.0.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:9b6f4f17b43bc39d56fec96e53fe89d94bac3eb134137964371b45352d40d0c2", size = 47585798, upload-time = "2026-02-16T10:11:09.401Z" }, - { url = "https://files.pythonhosted.org/packages/5c/20/476943001c54ef078dbf9542280e22741219a184a0632862bca4feccd666/pyarrow-23.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9fc13fc6c403d1337acab46a2c4346ca6c9dec5780c3c697cf8abfd5e19b6b37", size = 48179446, upload-time = "2026-02-16T10:11:17.781Z" }, - { url = "https://files.pythonhosted.org/packages/4b/b6/5dd0c47b335fcd8edba9bfab78ad961bd0fd55ebe53468cc393f45e0be60/pyarrow-23.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5c16ed4f53247fa3ffb12a14d236de4213a4415d127fe9cebed33d51671113e2", size = 50623972, upload-time = "2026-02-16T10:11:26.185Z" }, - { url = "https://files.pythonhosted.org/packages/d5/09/a532297c9591a727d67760e2e756b83905dd89adb365a7f6e9c72578bcc1/pyarrow-23.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:cecfb12ef629cf6be0b1887f9f86463b0dd3dc3195ae6224e74006be4736035a", size = 27540749, upload-time = "2026-02-16T10:12:23.297Z" }, - { url = "https://files.pythonhosted.org/packages/a5/8e/38749c4b1303e6ae76b3c80618f84861ae0c55dd3c2273842ea6f8258233/pyarrow-23.0.1-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:29f7f7419a0e30264ea261fdc0e5fe63ce5a6095003db2945d7cd78df391a7e1", size = 34471544, upload-time = "2026-02-16T10:11:32.535Z" }, - { url = "https://files.pythonhosted.org/packages/a3/73/f237b2bc8c669212f842bcfd842b04fc8d936bfc9d471630569132dc920d/pyarrow-23.0.1-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:33d648dc25b51fd8055c19e4261e813dfc4d2427f068bcecc8b53d01b81b0500", size = 35949911, upload-time = "2026-02-16T10:11:39.813Z" }, - { url = "https://files.pythonhosted.org/packages/0c/86/b912195eee0903b5611bf596833def7d146ab2d301afeb4b722c57ffc966/pyarrow-23.0.1-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:cd395abf8f91c673dd3589cadc8cc1ee4e8674fa61b2e923c8dd215d9c7d1f41", size = 44520337, upload-time = "2026-02-16T10:11:47.764Z" }, - { url = "https://files.pythonhosted.org/packages/69/c2/f2a717fb824f62d0be952ea724b4f6f9372a17eed6f704b5c9526f12f2f1/pyarrow-23.0.1-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:00be9576d970c31defb5c32eb72ef585bf600ef6d0a82d5eccaae96639cf9d07", size = 47548944, upload-time = "2026-02-16T10:11:56.607Z" }, - { url = "https://files.pythonhosted.org/packages/84/a7/90007d476b9f0dc308e3bc57b832d004f848fd6c0da601375d20d92d1519/pyarrow-23.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c2139549494445609f35a5cda4eb94e2c9e4d704ce60a095b342f82460c73a83", size = 48236269, upload-time = "2026-02-16T10:12:04.47Z" }, - { url = "https://files.pythonhosted.org/packages/b0/3f/b16fab3e77709856eb6ac328ce35f57a6d4a18462c7ca5186ef31b45e0e0/pyarrow-23.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7044b442f184d84e2351e5084600f0d7343d6117aabcbc1ac78eb1ae11eb4125", size = 50604794, upload-time = "2026-02-16T10:12:11.797Z" }, - { url = "https://files.pythonhosted.org/packages/e9/a1/22df0620a9fac31d68397a75465c344e83c3dfe521f7612aea33e27ab6c0/pyarrow-23.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:a35581e856a2fafa12f3f54fce4331862b1cfb0bef5758347a858a4aa9d6bae8", size = 27660642, upload-time = "2026-02-16T10:12:17.746Z" }, +version = "24.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/91/13/13e1069b351bdc3881266e11147ffccf687505dbb0ea74036237f5d454a5/pyarrow-24.0.0.tar.gz", hash = "sha256:85fe721a14dd823aca09127acbb06c3ca723efbd436c004f16bca601b04dcc83", size = 1180261, upload-time = "2026-04-21T10:51:25.837Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/c9/a47ab7ece0d86cbe6678418a0fbd1ac4bb493b9184a3891dfa0e7f287ae0/pyarrow-24.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:b0e131f880cda8d04e076cee175a46fc0e8bc8b65c99c6c09dff6669335fde74", size = 35068898, upload-time = "2026-04-21T10:46:36.599Z" }, + { url = "https://files.pythonhosted.org/packages/d1/bc/8db86617a9a58008acf8913d6fed68ea2a46acb6de928db28d724c891a68/pyarrow-24.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:1b2fe7f9a5566401a0ef2571f197eb92358925c1f0c8dba305d6e43ea0871bb3", size = 36679915, upload-time = "2026-04-21T10:46:42.602Z" }, + { url = "https://files.pythonhosted.org/packages/eb/8e/fb178720400ef69db251eb4a9c3ccf4af269bc1feb5055529b8fc87170d1/pyarrow-24.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:0b3537c00fb8d384f15ac1e79b6eb6db04a16514c8c1d22e59a9b95c8ba42868", size = 45697931, upload-time = "2026-04-21T10:46:48.403Z" }, + { url = "https://files.pythonhosted.org/packages/f3/27/99c42abe8e21b44f4917f62631f3aa31404882a2c41d8a4cd5c110e13d52/pyarrow-24.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:14e31a3c9e35f1ab6356c6378f6f72830e6d2d5f1791df3774a7b097d18a6a1e", size = 48837449, upload-time = "2026-04-21T10:46:55.329Z" }, + { url = "https://files.pythonhosted.org/packages/36/b6/333749e2666e9032891125bf9c691146e92901bece62030ac1430e2e7c88/pyarrow-24.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b7d9a514e73bc42711e6a35aaccf3587c520024fe0a25d830a1a8a27c15f4f57", size = 49395949, upload-time = "2026-04-21T10:47:01.869Z" }, + { url = "https://files.pythonhosted.org/packages/17/25/c5201706a2dd374e8ba6ee3fd7a8c89fb7ffc16eed5217a91fd2bd7f7626/pyarrow-24.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b196eb3f931862af3fa84c2a253514d859c08e0d8fe020e07be12e75a5a9780c", size = 51912986, upload-time = "2026-04-21T10:47:09.872Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d2/4d1bbba65320b21a49678d6fbdc6ff7c649251359fdcfc03568c4136231d/pyarrow-24.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:35405aecb474e683fb36af650618fd5340ee5471fc65a21b36076a18bbc6c981", size = 27255371, upload-time = "2026-04-21T10:47:15.943Z" }, + { url = "https://files.pythonhosted.org/packages/b4/a9/9686d9f07837f91f775e8932659192e02c74f9d8920524b480b85212cc68/pyarrow-24.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:6233c9ed9ab9d1db47de57d9753256d9dcffbf42db341576099f0fd9f6bf4810", size = 34981559, upload-time = "2026-04-21T10:47:22.17Z" }, + { url = "https://files.pythonhosted.org/packages/80/b6/0ddf0e9b6ead3474ab087ae598c76b031fc45532bf6a63f3a553440fb258/pyarrow-24.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:f7616236ec1bc2b15bfdec22a71ab38851c86f8f05ff64f379e1278cf20c634a", size = 36663654, upload-time = "2026-04-21T10:47:28.315Z" }, + { url = "https://files.pythonhosted.org/packages/7c/3b/926382efe8ce27ba729071d3566ade6dfb86bdf112f366000196b2f5780a/pyarrow-24.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:1617043b99bd33e5318ae18eb2919af09c71322ef1ca46566cdafc6e6712fb66", size = 45679394, upload-time = "2026-04-21T10:47:34.821Z" }, + { url = "https://files.pythonhosted.org/packages/b3/7a/829f7d9dfd37c207206081d6dad474d81dde29952401f07f2ba507814818/pyarrow-24.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6165461f55ef6314f026de6638d661188e3455d3ec49834556a0ebbdbace18bb", size = 48863122, upload-time = "2026-04-21T10:47:42.056Z" }, + { url = "https://files.pythonhosted.org/packages/5f/e8/f88ce625fe8babaae64e8db2d417c7653adb3019b08aae85c5ed787dc816/pyarrow-24.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3b13dedfe76a0ad2d1d859b0811b53827a4e9d93a0bcb05cf59333ab4980cc7e", size = 49376032, upload-time = "2026-04-21T10:47:48.967Z" }, + { url = "https://files.pythonhosted.org/packages/36/7a/82c363caa145fff88fb475da50d3bf52bb024f61917be5424c3392eaf878/pyarrow-24.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:25ea65d868eb04015cd18e6df2fbe98f07e5bda2abefabcb88fce39a947716f6", size = 51929490, upload-time = "2026-04-21T10:47:55.981Z" }, + { url = "https://files.pythonhosted.org/packages/66/1c/e3e72c8014ad2743ca64a701652c733cc5cbcee15c0463a32a8c55518d9e/pyarrow-24.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:295f0a7f2e242dabd513737cf076007dc5b2d59237e3eca37b05c0c6446f3826", size = 27355660, upload-time = "2026-04-21T10:48:01.718Z" }, + { url = "https://files.pythonhosted.org/packages/6f/d3/a1abf004482026ddc17f4503db227787fa3cfe41ec5091ff20e4fea55e57/pyarrow-24.0.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:02b001b3ed4723caa44f6cd1af2d5c86aa2cf9971dacc2ffa55b21237713dfba", size = 34976759, upload-time = "2026-04-21T10:48:07.258Z" }, + { url = "https://files.pythonhosted.org/packages/4f/4a/34f0a36d28a2dd32225301b79daad44e243dc1a2bb77d43b60749be255c4/pyarrow-24.0.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:04920d6a71aabd08a0417709efce97d45ea8e6fb733d9ca9ecffb13c67839f68", size = 36658471, upload-time = "2026-04-21T10:48:13.347Z" }, + { url = "https://files.pythonhosted.org/packages/1f/78/543b94712ae8bb1a6023bcc1acf1a740fbff8286747c289cd9468fced2a5/pyarrow-24.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:a964266397740257f16f7bb2e4f08a0c81454004beab8ff59dd531b73610e9f2", size = 45675981, upload-time = "2026-04-21T10:48:20.201Z" }, + { url = "https://files.pythonhosted.org/packages/84/9f/8fb7c222b100d314137fa40ec050de56cd8c6d957d1cfff685ce72f15b17/pyarrow-24.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:6f066b179d68c413374294bc1735f68475457c933258df594443bb9d88ddc2a0", size = 48859172, upload-time = "2026-04-21T10:48:27.541Z" }, + { url = "https://files.pythonhosted.org/packages/a7/d3/1ea72538e6c8b3b475ed78d1049a2c518e655761ea50fe1171fc855fcab7/pyarrow-24.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1183baeb14c5f587b1ec52831e665718ce632caab84b7cd6b85fd44f96114495", size = 49385733, upload-time = "2026-04-21T10:48:34.7Z" }, + { url = "https://files.pythonhosted.org/packages/c3/be/c3d8b06a1ba35f2260f8e1f771abbee7d5e345c0937aab90675706b1690a/pyarrow-24.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:806f24b4085453c197a5078218d1ee08783ebbba271badd153d1ae22a3ee804f", size = 51934335, upload-time = "2026-04-21T10:48:42.099Z" }, + { url = "https://files.pythonhosted.org/packages/9c/62/89e07a1e7329d2cde3e3c6994ba0839a24977a2beda8be6005ea3d860b99/pyarrow-24.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:e4505fc6583f7b05ab854934896bcac8253b04ac1171a77dfb73efef92076d91", size = 27271748, upload-time = "2026-04-21T10:49:42.532Z" }, + { url = "https://files.pythonhosted.org/packages/17/1a/cff3a59f80b5b1658549d46611b67163f65e0664431c076ad728bf9d5af4/pyarrow-24.0.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:1a4e45017efbf115032e4475ee876d525e0e36c742214fbe405332480ecd6275", size = 35238554, upload-time = "2026-04-21T10:48:48.526Z" }, + { url = "https://files.pythonhosted.org/packages/a8/99/cce0f42a327bfef2c420fb6078a3eb834826e5d6697bf3009fe11d2ad051/pyarrow-24.0.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:7986f1fa71cee060ad00758bcc79d3a93bab8559bf978fab9e53472a2e25a17b", size = 36782301, upload-time = "2026-04-21T10:48:55.181Z" }, + { url = "https://files.pythonhosted.org/packages/2a/66/8e560d5ff6793ca29aca213c53eec0dd482dd46cb93b2819e5aab52e4252/pyarrow-24.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:d3e0b61e8efb24ed38898e5cdc5fffa9124be480008d401a1f8071500494ae42", size = 45721929, upload-time = "2026-04-21T10:49:03.676Z" }, + { url = "https://files.pythonhosted.org/packages/27/0c/a26e25505d030716e078d9f16eb74973cbf0b33b672884e9f9da1c83b871/pyarrow-24.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:55a3bc1e3df3b5567b7d27ef551b2283f0c68a5e86f1cd56abc569da4f31335b", size = 48825365, upload-time = "2026-04-21T10:49:11.714Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/771f9ecb0c65e73fe9dccdd1717901b9594f08c4515d000c7c62df573811/pyarrow-24.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:641f795b361874ac9da5294f8f443dfdbee355cf2bd9e3b8d97aaac2306b9b37", size = 49451819, upload-time = "2026-04-21T10:49:21.474Z" }, + { url = "https://files.pythonhosted.org/packages/48/da/61ae89a88732f5a785646f3ec6125dbb640fa98a540eb2b9889caa561403/pyarrow-24.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8adc8e6ce5fccf5dc707046ae4914fd537def529709cc0d285d37a7f9cd442ca", size = 51909252, upload-time = "2026-04-21T10:49:31.164Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1a/8dd5cafab7b66573fa91c03d06d213356ad4edd71813aa75e08ce2b3a844/pyarrow-24.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:9b18371ad2f44044b81a8d23bc2d8a9b6a6226dca775e8e16cfee640473d6c5d", size = 27388127, upload-time = "2026-04-21T10:49:37.334Z" }, ] [[package]] @@ -2349,7 +2323,7 @@ wheels = [ [[package]] name = "pydantic" -version = "2.13.0" +version = "2.13.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, @@ -2357,81 +2331,81 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/84/6b/69fd5c7194b21ebde0f8637e2a4ddc766ada29d472bfa6a5ca533d79549a/pydantic-2.13.0.tar.gz", hash = "sha256:b89b575b6e670ebf6e7448c01b41b244f471edd276cd0b6fe02e7e7aca320070", size = 843468, upload-time = "2026-04-13T10:51:35.571Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d9/e4/40d09941a2cebcb20609b86a559817d5b9291c49dd6f8c87e5feffbe703a/pydantic-2.13.3.tar.gz", hash = "sha256:af09e9d1d09f4e7fe37145c1f577e1d61ceb9a41924bf0094a36506285d0a84d", size = 844068, upload-time = "2026-04-20T14:46:43.632Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/01/d7/c3a52c61f5b7be648e919005820fbac33028c6149994cd64453f49951c17/pydantic-2.13.0-py3-none-any.whl", hash = "sha256:ab0078b90da5f3e2fd2e71e3d9b457ddcb35d0350854fbda93b451e28d56baaf", size = 471872, upload-time = "2026-04-13T10:51:33.343Z" }, + { url = "https://files.pythonhosted.org/packages/f3/0a/fd7d723f8f8153418fb40cf9c940e82004fce7e987026b08a68a36dd3fe7/pydantic-2.13.3-py3-none-any.whl", hash = "sha256:6db14ac8dfc9a1e57f87ea2c0de670c251240f43cb0c30a5130e9720dc612927", size = 471981, upload-time = "2026-04-20T14:46:41.402Z" }, ] [[package]] name = "pydantic-core" -version = "2.46.0" +version = "2.46.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6f/0a/9414cddf82eda3976b14048cc0fa8f5b5d1aecb0b22e1dcd2dbfe0e139b1/pydantic_core-2.46.0.tar.gz", hash = "sha256:82d2498c96be47b47e903e1378d1d0f770097ec56ea953322f39936a7cf34977", size = 471441, upload-time = "2026-04-13T09:06:33.813Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/43/9bc38d43a6a48794209e4eb6d61e9c68395f69b7949f66842854b0cd1344/pydantic_core-2.46.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:0027da787ae711f7fbd5a76cb0bb8df526acba6c10c1e44581de1b838db10b7b", size = 2121004, upload-time = "2026-04-13T09:05:17.531Z" }, - { url = "https://files.pythonhosted.org/packages/8c/1d/f43342b7107939b305b5e4efeef7d54e267a5ef51515570a5c1d77726efb/pydantic_core-2.46.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:63e288fc18d7eaeef5f16c73e65c4fd0ad95b25e7e21d8a5da144977b35eb997", size = 1947505, upload-time = "2026-04-13T09:04:48.975Z" }, - { url = "https://files.pythonhosted.org/packages/4a/cd/ccf48cbbcaf0d99ba65969459ebfbf7037600b2cfdcca3062084dd83a008/pydantic_core-2.46.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:080a3bdc6807089a1fe1fbc076519cea287f1a964725731d80b49d8ecffaa217", size = 1973301, upload-time = "2026-04-13T09:05:42.149Z" }, - { url = "https://files.pythonhosted.org/packages/c2/ff/a7bb1e7a762fb1f40ad5ef4e6a92c012864a017b7b1fdfb71cf91faa8b73/pydantic_core-2.46.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c065f1c3e54c3e79d909927a8cb48ccbc17b68733552161eba3e0628c38e5d19", size = 2042208, upload-time = "2026-04-13T09:05:32.591Z" }, - { url = "https://files.pythonhosted.org/packages/ea/64/d3f11c6f6ace71526f3b03646df95eaab3f21edd13e00daae3f20f4e5a09/pydantic_core-2.46.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7e2db58ab46cfe602d4255381cce515585998c3b6699d5b1f909f519bc44a5aa", size = 2229046, upload-time = "2026-04-13T09:04:18.59Z" }, - { url = "https://files.pythonhosted.org/packages/d0/64/93db9a63cce71630c58b376d63de498aa93cb341c72cd5f189b5c08f5c28/pydantic_core-2.46.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c660974890ec1e4c65cff93f5670a5f451039f65463e9f9c03ad49746b49fc78", size = 2292138, upload-time = "2026-04-13T09:04:13.816Z" }, - { url = "https://files.pythonhosted.org/packages/e9/96/936fccce22f1f2ae8b2b694de651c2c929847be5f701c927a0bb3b1eb679/pydantic_core-2.46.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3be91482a8db77377c902cca87697388a4fb68addeb3e943ac74f425201a099", size = 2093333, upload-time = "2026-04-13T09:05:15.729Z" }, - { url = "https://files.pythonhosted.org/packages/75/76/c325e7fda69d589e26e772272044fe704c7e525c47d0d32a74f8345ac657/pydantic_core-2.46.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:1c72de82115233112d70d07f26a48cf6996eb86f7e143423ec1a182148455a9d", size = 2138802, upload-time = "2026-04-13T09:03:51.142Z" }, - { url = "https://files.pythonhosted.org/packages/c0/6f/ccaa2ff7d53a017b66841e2d38edd1f38d19ae1a2d0c5efee17f2d432229/pydantic_core-2.46.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7904e58768cd79304b992868d7710bfc85dc6c7ed6163f0f68dbc1dcd72dc231", size = 2181358, upload-time = "2026-04-13T09:04:30.737Z" }, - { url = "https://files.pythonhosted.org/packages/6c/71/0c4b6303e92d63edcb81f5301695cdf70bb351775b4733eea65acdac8384/pydantic_core-2.46.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1af8d88718005f57bb4768f92f4ff16bf31a747d39dfc919b22211b84e72c053", size = 2183985, upload-time = "2026-04-13T09:04:06.792Z" }, - { url = "https://files.pythonhosted.org/packages/71/eb/f6bf255de38a4393aaa10bff224e882b630576bc26ebfb401e42bb965092/pydantic_core-2.46.0-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:a5b891301b02770a5852253f4b97f8bd192e5710067bc129e20d43db5403ede2", size = 2328559, upload-time = "2026-04-13T09:06:14.143Z" }, - { url = "https://files.pythonhosted.org/packages/f2/71/93895a1545f50823a24b21d7761c2bd1b1afea7a6ddc019787caec237361/pydantic_core-2.46.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:48b671fe59031fd9754c7384ac05b3ed47a0cccb7d4db0ec56121f0e6a541b90", size = 2367466, upload-time = "2026-04-13T09:05:59.613Z" }, - { url = "https://files.pythonhosted.org/packages/78/39/62331b3e71f41fb13d486621e2aec49900ba56567fb3a0ae5999fded0005/pydantic_core-2.46.0-cp311-cp311-win32.whl", hash = "sha256:0a52b7262b6cc67033823e9549a41bb77580ac299dc964baae4e9c182b2e335c", size = 1981367, upload-time = "2026-04-13T09:07:37.563Z" }, - { url = "https://files.pythonhosted.org/packages/9f/51/caac70958420e2d6115962f550676df59647c11f96a44c2fcb61662fcd16/pydantic_core-2.46.0-cp311-cp311-win_amd64.whl", hash = "sha256:4103fea1beeef6b3a9fed8515f27d4fa30c929a1973655adf8f454dc49ee0662", size = 2065942, upload-time = "2026-04-13T09:06:37.873Z" }, - { url = "https://files.pythonhosted.org/packages/b2/cf/576b2a4eb5500a1a5da485613b1ea8bc0d7279b27e0426801574b284ae65/pydantic_core-2.46.0-cp311-cp311-win_arm64.whl", hash = "sha256:3137cd88938adb8e567c5e938e486adc7e518ffc96b4ae1ec268e6a4275704d7", size = 2052532, upload-time = "2026-04-13T09:06:03.697Z" }, - { url = "https://files.pythonhosted.org/packages/a7/d2/206c72ad47071559142a35f71efc29eb16448a4a5ae9487230ab8e4e292b/pydantic_core-2.46.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:66ccedb02c934622612448489824955838a221b3a35875458970521ef17b2f9c", size = 2117060, upload-time = "2026-04-13T09:04:47.443Z" }, - { url = "https://files.pythonhosted.org/packages/17/2c/7a53b33f91c8b77e696b1a6aa3bed609bf9374bdc0f8dcda681bc7d922b8/pydantic_core-2.46.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a44f27f4d2788ef9876ec47a43739b118c5904d74f418f53398f6ced3bbcacf2", size = 1951802, upload-time = "2026-04-13T09:05:34.591Z" }, - { url = "https://files.pythonhosted.org/packages/fc/20/90e548c1f6d38800ef11c915881525770ce270d8e5e887563ff046a08674/pydantic_core-2.46.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26a1032bcce6ca4b4670eb3f7d8195bd0a8b8f255f1307823e217ca3cfa7c27", size = 1976621, upload-time = "2026-04-13T09:04:03.909Z" }, - { url = "https://files.pythonhosted.org/packages/20/3c/9c5810ca70b60c623488cdd80f7e9ee1a0812df81e97098b64788719860f/pydantic_core-2.46.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1b8d1412f725060527e56675904b17a2d421dddcf861eecf7c75b9dda47921a4", size = 2056721, upload-time = "2026-04-13T09:04:40.992Z" }, - { url = "https://files.pythonhosted.org/packages/1a/a3/d6e5f4cdec84278431c75540f90838c9d0a4dfe9402a8f3902073660ff28/pydantic_core-2.46.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc3d1569edd859cabaa476cabce9eecd05049a7966af7b4a33b541bfd4ca1104", size = 2239634, upload-time = "2026-04-13T09:03:52.478Z" }, - { url = "https://files.pythonhosted.org/packages/46/42/ef58aacf330d8de6e309d62469aa1f80e945eaf665929b4037ac1bfcebc1/pydantic_core-2.46.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:38108976f2d8afaa8f5067fd1390a8c9f5cc580175407cda636e76bc76e88054", size = 2315739, upload-time = "2026-04-13T09:05:04.971Z" }, - { url = "https://files.pythonhosted.org/packages/8b/86/c63b12fafa2d86a515bfd1840b39c23a49302f02b653161bf9c3a0566c50/pydantic_core-2.46.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5a06d8ed01dad5575056b5187e5959b336793c6047920a3441ee5b03533836", size = 2098169, upload-time = "2026-04-13T09:07:27.151Z" }, - { url = "https://files.pythonhosted.org/packages/76/19/b5b33a2f6be4755b21a20434293c4364be255f4c1a108f125d101d4cc4ee/pydantic_core-2.46.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:04017ace142da9ce27cafd423a480872571b5c7e80382aec22f7d715ca8eb870", size = 2170830, upload-time = "2026-04-13T09:04:39.448Z" }, - { url = "https://files.pythonhosted.org/packages/99/ae/7559f99a29b7d440012ddb4da897359304988a881efaca912fd2f655652e/pydantic_core-2.46.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2629ad992ed1b1c012e6067f5ffafd3336fcb9b54569449fabb85621f1444ed3", size = 2203901, upload-time = "2026-04-13T09:04:01.048Z" }, - { url = "https://files.pythonhosted.org/packages/dd/0e/b0ef945a39aeb4ac58da316813e1106b7fbdfbf20ac141c1c27904355ac5/pydantic_core-2.46.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3068b1e7bd986aebc88f6859f8353e72072538dcf92a7fb9cf511a0f61c5e729", size = 2191789, upload-time = "2026-04-13T09:06:39.915Z" }, - { url = "https://files.pythonhosted.org/packages/90/f4/830484e07188c1236b013995818888ab93bab8fd88aa9689b1d8fd22220d/pydantic_core-2.46.0-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:1e366916ff69ff700aa9326601634e688581bc24c5b6b4f8738d809ec7d72611", size = 2344423, upload-time = "2026-04-13T09:05:12.252Z" }, - { url = "https://files.pythonhosted.org/packages/fd/ba/e455c18cbdc333177af754e740be4fe9d1de173d65bbe534daf88da02ac0/pydantic_core-2.46.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:485a23e8f4618a1b8e23ac744180acde283fffe617f96923d25507d5cade62ec", size = 2384037, upload-time = "2026-04-13T09:06:24.503Z" }, - { url = "https://files.pythonhosted.org/packages/78/1f/b35d20d73144a41e78de0ae398e60fdd8bed91667daa1a5a92ab958551ba/pydantic_core-2.46.0-cp312-cp312-win32.whl", hash = "sha256:520940e1b702fe3b33525d0351777f25e9924f1818ca7956447dabacf2d339fd", size = 1967068, upload-time = "2026-04-13T09:05:23.374Z" }, - { url = "https://files.pythonhosted.org/packages/d1/84/4b6252e9606e8295647b848233cc4137ee0a04ebba8f0f9fb2977655b38c/pydantic_core-2.46.0-cp312-cp312-win_amd64.whl", hash = "sha256:90d2048e0339fa365e5a66aefe760ddd3b3d0a45501e088bc5bc7f4ed9ff9571", size = 2071008, upload-time = "2026-04-13T09:05:21.392Z" }, - { url = "https://files.pythonhosted.org/packages/39/95/d08eb508d4d5560ccbd226ee5971e5ef9b749aba9b413c0c4ed6e406d4f6/pydantic_core-2.46.0-cp312-cp312-win_arm64.whl", hash = "sha256:a70247649b7dffe36648e8f34be5ce8c5fa0a27ff07b071ea780c20a738c05ce", size = 2036634, upload-time = "2026-04-13T09:05:48.299Z" }, - { url = "https://files.pythonhosted.org/packages/df/05/ab3b0742bad1d51822f1af0c4232208408902bdcfc47601f3b812e09e6c2/pydantic_core-2.46.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:a05900c37264c070c683c650cbca8f83d7cbb549719e645fcd81a24592eac788", size = 2116814, upload-time = "2026-04-13T09:04:12.41Z" }, - { url = "https://files.pythonhosted.org/packages/98/08/30b43d9569d69094a0899a199711c43aa58fce6ce80f6a8f7693673eb995/pydantic_core-2.46.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8de8e482fd4f1e3f36c50c6aac46d044462615d8f12cfafc6bebeaa0909eea22", size = 1951867, upload-time = "2026-04-13T09:04:02.364Z" }, - { url = "https://files.pythonhosted.org/packages/db/a0/bf9a1ba34537c2ed3872a48195291138fdec8fe26c4009776f00d63cf0c8/pydantic_core-2.46.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c525ecf8a4cdf198327b65030a7d081867ad8e60acb01a7214fff95cf9832d47", size = 1977040, upload-time = "2026-04-13T09:06:16.088Z" }, - { url = "https://files.pythonhosted.org/packages/71/70/0ba03c20e1e118219fc18c5417b008b7e880f0e3fb38560ec4465984d471/pydantic_core-2.46.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f14581aeb12e61542ce73b9bfef2bca5439d65d9ab3efe1a4d8e346b61838f9b", size = 2055284, upload-time = "2026-04-13T09:05:25.125Z" }, - { url = "https://files.pythonhosted.org/packages/58/cf/1e320acefbde7fb7158a9e5def55e0adf9a4634636098ce28dc6b978e0d3/pydantic_core-2.46.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c108067f2f7e190d0dbd81247d789ec41f9ea50ccd9265a3a46710796ac60530", size = 2238896, upload-time = "2026-04-13T09:05:01.345Z" }, - { url = "https://files.pythonhosted.org/packages/df/f5/ea8ba209756abe9eba891bb0ef3772b4c59a894eb9ad86cd5bd0dd4e3e52/pydantic_core-2.46.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ac10967e9a7bb1b96697374513f9a1a90a59e2fb41566b5e00ee45392beac59", size = 2314353, upload-time = "2026-04-13T09:06:07.942Z" }, - { url = "https://files.pythonhosted.org/packages/e8/f8/5885350203b72e96438eee7f94de0d8f0442f4627237ca8ef75de34db1cd/pydantic_core-2.46.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7897078fe8a13b73623c0955dfb2b3d2c9acb7177aac25144758c9e5a5265aaa", size = 2098522, upload-time = "2026-04-13T09:04:23.239Z" }, - { url = "https://files.pythonhosted.org/packages/bf/88/5930b0e828e371db5a556dd3189565417ddc3d8316bb001058168aadcf5f/pydantic_core-2.46.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:e69ce405510a419a082a78faed65bb4249cfb51232293cc675645c12f7379bf7", size = 2168757, upload-time = "2026-04-13T09:07:12.46Z" }, - { url = "https://files.pythonhosted.org/packages/da/75/63d563d3035a0548e721c38b5b69fd5626fdd51da0f09ff4467503915b82/pydantic_core-2.46.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fd28d13eea0d8cf351dc1fe274b5070cc8e1cca2644381dee5f99de629e77cf3", size = 2202518, upload-time = "2026-04-13T09:05:44.418Z" }, - { url = "https://files.pythonhosted.org/packages/a7/53/1958eacbfddc41aadf5ae86dd85041bf054b675f34a2fa76385935f96070/pydantic_core-2.46.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:ee1547a6b8243e73dd10f585555e5a263395e55ce6dea618a078570a1e889aef", size = 2190148, upload-time = "2026-04-13T09:06:56.151Z" }, - { url = "https://files.pythonhosted.org/packages/c7/17/098cc6d3595e4623186f2bc6604a6195eb182e126702a90517236391e9ce/pydantic_core-2.46.0-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:c3dc68dcf62db22a18ddfc3ad4960038f72b75908edc48ae014d7ac8b391d57a", size = 2342925, upload-time = "2026-04-13T09:04:17.286Z" }, - { url = "https://files.pythonhosted.org/packages/71/a7/abdb924620b1ac535c690b36ad5b8871f376104090f8842c08625cecf1d3/pydantic_core-2.46.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:004a2081c881abfcc6854a4623da6a09090a0d7c1398a6ae7133ca1256cee70b", size = 2383167, upload-time = "2026-04-13T09:04:52.643Z" }, - { url = "https://files.pythonhosted.org/packages/d7/c9/2ddd10f50e4b7350d2574629a0f53d8d4eb6573f9c19a6b43e6b1487a31d/pydantic_core-2.46.0-cp313-cp313-win32.whl", hash = "sha256:59d24ec8d5eaabad93097525a69d0f00f2667cb353eb6cda578b1cfff203ceef", size = 1965660, upload-time = "2026-04-13T09:06:05.877Z" }, - { url = "https://files.pythonhosted.org/packages/b5/e7/1efc38ed6f2680c032bcefa0e3ebd496a8c77e92dfdb86b07d0f2fc632b1/pydantic_core-2.46.0-cp313-cp313-win_amd64.whl", hash = "sha256:71186dad5ac325c64d68fe0e654e15fd79802e7cc42bc6f0ff822d5ad8b1ab25", size = 2069563, upload-time = "2026-04-13T09:07:14.738Z" }, - { url = "https://files.pythonhosted.org/packages/c3/1e/a325b4989e742bf7e72ed35fa124bc611fd76539c9f8cd2a9a7854473533/pydantic_core-2.46.0-cp313-cp313-win_arm64.whl", hash = "sha256:8e4503f3213f723842c9a3b53955c88a9cfbd0b288cbd1c1ae933aebeec4a1b4", size = 2034966, upload-time = "2026-04-13T09:04:21.629Z" }, - { url = "https://files.pythonhosted.org/packages/2d/f1/6731c2d6caf03efe822101edb4783eb3f212f34b7b005a34f039f67e76e1/pydantic_core-2.46.0-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:ce2e38e27de73ff6a0312a9e3304c398577c418d90bbde97f0ba1ee3ab7ac39f", size = 2121259, upload-time = "2026-04-13T09:07:34.845Z" }, - { url = "https://files.pythonhosted.org/packages/72/fd/ac34d4c92e739e37a040be9e7ea84d116afec5f983a7db856c27135fba77/pydantic_core-2.46.0-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:f0d34ba062396de0be7421e6e69c9a6821bf6dc73a0ab9959a48a5a6a1e24754", size = 1945798, upload-time = "2026-04-13T09:04:24.729Z" }, - { url = "https://files.pythonhosted.org/packages/b6/a4/f413a522c4047c46b109be6805a3095d35e5a4882fd5b4fdc0909693dfc0/pydantic_core-2.46.0-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4c0a12147b4026dd68789fb9f22f1a8769e457f9562783c181880848bbd6412", size = 1986062, upload-time = "2026-04-13T09:05:57.177Z" }, - { url = "https://files.pythonhosted.org/packages/91/2e/9760025ea8b0f49903c0ceebdfc2d8ef839da872426f2b03cae9de036a7c/pydantic_core-2.46.0-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a99896d9db56df901ab4a63cd6a36348a569cff8e05f049db35f4016a817a3d9", size = 2145344, upload-time = "2026-04-13T09:03:56.924Z" }, - { url = "https://files.pythonhosted.org/packages/74/0c/106ed5cc50393d90523f09adcc50d05e42e748eb107dc06aea971137f02d/pydantic_core-2.46.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:bc0e2fefe384152d7da85b5c2fe8ce2bf24752f68a58e3f3ea42e28a29dfdeb2", size = 2104968, upload-time = "2026-04-13T09:06:26.967Z" }, - { url = "https://files.pythonhosted.org/packages/f5/71/b494cef3165e3413ee9bbbb5a9eedc9af0ea7b88d8638beef6c2061b110e/pydantic_core-2.46.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:a2ab0e785548be1b4362a62c4004f9217598b7ee465f1f420fc2123e2a5b5b02", size = 1940442, upload-time = "2026-04-13T09:06:29.332Z" }, - { url = "https://files.pythonhosted.org/packages/7e/3e/a4d578c8216c443e26a1124f8c1e07c0654264ce5651143d3883d85ff140/pydantic_core-2.46.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16d45aecb18b8cba1c68eeb17c2bb2d38627ceed04c5b30b882fc9134e01f187", size = 1999672, upload-time = "2026-04-13T09:04:42.798Z" }, - { url = "https://files.pythonhosted.org/packages/cd/c1/9114560468685525a21770138382fd0cb849aaf351ff2c7b97f760d121e0/pydantic_core-2.46.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5078f6c377b002428e984259ac327ef8902aacae6c14b7de740dd4869a491501", size = 2154533, upload-time = "2026-04-13T09:04:50.868Z" }, - { url = "https://files.pythonhosted.org/packages/09/ed/fbd8127e4a19c4fdbb2f4983cf72c7b3534086df640c813c5c0ec4218177/pydantic_core-2.46.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:be3e04979ba4d68183f247202c7f4f483f35df57690b3f875c06340a1579b47c", size = 2119951, upload-time = "2026-04-13T09:04:35.923Z" }, - { url = "https://files.pythonhosted.org/packages/ec/77/df8711ebb45910412f90d75198430fa1120f5618336b71fa00303601c5a4/pydantic_core-2.46.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:b1eae8d7d9b8c2a90b34d3d9014804dca534f7f40180197062634499412ea14e", size = 1953812, upload-time = "2026-04-13T09:05:40.293Z" }, - { url = "https://files.pythonhosted.org/packages/12/fe/14b35df69112bd812d6818a395eeab22eeaa2befc6f85bc54ed648430186/pydantic_core-2.46.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a95a2773680dd4b6b999d4eccdd1b577fd71c31739fb4849f6ada47eabb9c56", size = 2139585, upload-time = "2026-04-13T09:06:46.94Z" }, - { url = "https://files.pythonhosted.org/packages/1f/f0/4fea4c14ebbdeb87e5f6edd2620735fcbd384865f06707fe229c021ce041/pydantic_core-2.46.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25988c3159bb097e06abfdf7b21b1fcaf90f187c74ca6c7bb842c1f72ce74fa8", size = 2179154, upload-time = "2026-04-13T09:04:15.639Z" }, - { url = "https://files.pythonhosted.org/packages/5c/36/6329aa79ba32b73560e6e453164fb29702b115fd3b2b650e796e1dc27862/pydantic_core-2.46.0-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:747d89bd691854c719a3381ba46b6124ef916ae85364c79e11db9c84995d8d03", size = 2182917, upload-time = "2026-04-13T09:07:24.483Z" }, - { url = "https://files.pythonhosted.org/packages/92/61/edbf7aea71052d410347846a2ea43394f74651bf6822b8fad8703ca00575/pydantic_core-2.46.0-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:909a7327b83ca93b372f7d48df0ebc7a975a5191eb0b6e024f503f4902c24124", size = 2327716, upload-time = "2026-04-13T09:06:31.681Z" }, - { url = "https://files.pythonhosted.org/packages/a4/11/aa5089b941e85294b1d5d526840b18f0d4464f842d43d8999ce50ef881c1/pydantic_core-2.46.0-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:2f7e6a3752378a69fadf3f5ee8bc5fa082f623703eec0f4e854b12c548322de0", size = 2365925, upload-time = "2026-04-13T09:05:38.338Z" }, - { url = "https://files.pythonhosted.org/packages/0c/75/e187b0ea247f71f2009d156df88b7d8449c52a38810c9a1bd55dd4871206/pydantic_core-2.46.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:ef47ee0a3ac4c2bb25a083b3acafb171f65be4a0ac1e84edef79dd0016e25eaa", size = 2193856, upload-time = "2026-04-13T09:05:03.114Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/2a/ef/f7abb56c49382a246fd2ce9c799691e3c3e7175ec74b14d99e798bcddb1a/pydantic_core-2.46.3.tar.gz", hash = "sha256:41c178f65b8c29807239d47e6050262eb6bf84eb695e41101e62e38df4a5bc2c", size = 471412, upload-time = "2026-04-20T14:40:56.672Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/22/a2/1ba90a83e85a3f94c796b184f3efde9c72f2830dcda493eea8d59ba78e6d/pydantic_core-2.46.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ab124d49d0459b2373ecf54118a45c28a1e6d4192a533fbc915e70f556feb8e5", size = 2106740, upload-time = "2026-04-20T14:41:20.932Z" }, + { url = "https://files.pythonhosted.org/packages/b6/f6/99ae893c89a0b9d3daec9f95487aa676709aa83f67643b3f0abaf4ab628a/pydantic_core-2.46.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cca67d52a5c7a16aed2b3999e719c4bcf644074eac304a5d3d62dd70ae7d4b2c", size = 1948293, upload-time = "2026-04-20T14:43:42.115Z" }, + { url = "https://files.pythonhosted.org/packages/3e/b8/2e8e636dc9e3f16c2e16bf0849e24be82c5ee82c603c65fc0326666328fc/pydantic_core-2.46.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c024e08c0ba23e6fd68c771a521e9d6a792f2ebb0fa734296b36394dc30390e", size = 1973222, upload-time = "2026-04-20T14:41:57.841Z" }, + { url = "https://files.pythonhosted.org/packages/34/36/0e730beec4d83c5306f417afbd82ff237d9a21e83c5edf675f31ed84c1fe/pydantic_core-2.46.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6645ce7eec4928e29a1e3b3d5c946621d105d3e79f0c9cddf07c2a9770949287", size = 2053852, upload-time = "2026-04-20T14:40:43.077Z" }, + { url = "https://files.pythonhosted.org/packages/4b/f0/3071131f47e39136a17814576e0fada9168569f7f8c0e6ac4d1ede6a4958/pydantic_core-2.46.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a712c7118e6c5ea96562f7b488435172abb94a3c53c22c9efc1412264a45cbbe", size = 2221134, upload-time = "2026-04-20T14:43:03.349Z" }, + { url = "https://files.pythonhosted.org/packages/2f/a9/a2dc023eec5aa4b02a467874bad32e2446957d2adcab14e107eab502e978/pydantic_core-2.46.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:69a868ef3ff206343579021c40faf3b1edc64b1cc508ff243a28b0a514ccb050", size = 2279785, upload-time = "2026-04-20T14:41:19.285Z" }, + { url = "https://files.pythonhosted.org/packages/0a/44/93f489d16fb63fbd41c670441536541f6e8cfa1e5a69f40bc9c5d30d8c90/pydantic_core-2.46.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc7e8c32db809aa0f6ea1d6869ebc8518a65d5150fdfad8bcae6a49ae32a22e2", size = 2089404, upload-time = "2026-04-20T14:43:10.108Z" }, + { url = "https://files.pythonhosted.org/packages/2a/78/8692e3aa72b2d004f7a5d937f1dfdc8552ba26caf0bec75f342c40f00dec/pydantic_core-2.46.3-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:3481bd1341dc85779ee506bc8e1196a277ace359d89d28588a9468c3ecbe63fa", size = 2114898, upload-time = "2026-04-20T14:44:51.475Z" }, + { url = "https://files.pythonhosted.org/packages/6a/62/e83133f2e7832532060175cebf1f13748f4c7e7e7165cdd1f611f174494b/pydantic_core-2.46.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8690eba565c6d68ffd3a8655525cbdd5246510b44a637ee2c6c03a7ebfe64d3c", size = 2157856, upload-time = "2026-04-20T14:43:46.64Z" }, + { url = "https://files.pythonhosted.org/packages/6d/ec/6a500e3ad7718ee50583fae79c8651f5d37e3abce1fa9ae177ae65842c53/pydantic_core-2.46.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4de88889d7e88d50d40ee5b39d5dac0bcaef9ba91f7e536ac064e6b2834ecccf", size = 2180168, upload-time = "2026-04-20T14:42:00.302Z" }, + { url = "https://files.pythonhosted.org/packages/d8/53/8267811054b1aa7fc1dc7ded93812372ef79a839f5e23558136a6afbfde1/pydantic_core-2.46.3-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:e480080975c1ef7f780b8f99ed72337e7cc5efea2e518a20a692e8e7b278eb8b", size = 2322885, upload-time = "2026-04-20T14:41:05.253Z" }, + { url = "https://files.pythonhosted.org/packages/c8/c1/1c0acdb3aa0856ddc4ecc55214578f896f2de16f400cf51627eb3c26c1c4/pydantic_core-2.46.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:de3a5c376f8cd94da9a1b8fd3dd1c16c7a7b216ed31dc8ce9fd7a22bf13b836e", size = 2360328, upload-time = "2026-04-20T14:41:43.991Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d0/ef39cd0f4a926814f360e71c1adeab48ad214d9727e4deb48eedfb5bce1a/pydantic_core-2.46.3-cp311-cp311-win32.whl", hash = "sha256:fc331a5314ffddd5385b9ee9d0d2fee0b13c27e0e02dad71b1ae5d6561f51eeb", size = 1979464, upload-time = "2026-04-20T14:43:12.215Z" }, + { url = "https://files.pythonhosted.org/packages/18/9c/f41951b0d858e343f1cf09398b2a7b3014013799744f2c4a8ad6a3eec4f2/pydantic_core-2.46.3-cp311-cp311-win_amd64.whl", hash = "sha256:b5b9c6cf08a8a5e502698f5e153056d12c34b8fb30317e0c5fd06f45162a6346", size = 2070837, upload-time = "2026-04-20T14:41:47.707Z" }, + { url = "https://files.pythonhosted.org/packages/9f/1e/264a17cd582f6ed50950d4d03dd5fefd84e570e238afe1cb3e25cf238769/pydantic_core-2.46.3-cp311-cp311-win_arm64.whl", hash = "sha256:5dfd51cf457482f04ec49491811a2b8fd5b843b64b11eecd2d7a1ee596ea78a6", size = 2053647, upload-time = "2026-04-20T14:42:27.535Z" }, + { url = "https://files.pythonhosted.org/packages/4b/cb/5b47425556ecc1f3fe18ed2a0083188aa46e1dd812b06e406475b3a5d536/pydantic_core-2.46.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:b11b59b3eee90a80a36701ddb4576d9ae31f93f05cb9e277ceaa09e6bf074a67", size = 2101946, upload-time = "2026-04-20T14:40:52.581Z" }, + { url = "https://files.pythonhosted.org/packages/a1/4f/2fb62c2267cae99b815bbf4a7b9283812c88ca3153ef29f7707200f1d4e5/pydantic_core-2.46.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:af8653713055ea18a3abc1537fe2ebc42f5b0bbb768d1eb79fd74eb47c0ac089", size = 1951612, upload-time = "2026-04-20T14:42:42.996Z" }, + { url = "https://files.pythonhosted.org/packages/50/6e/b7348fd30d6556d132cddd5bd79f37f96f2601fe0608afac4f5fb01ec0b3/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75a519dab6d63c514f3a81053e5266c549679e4aa88f6ec57f2b7b854aceb1b0", size = 1977027, upload-time = "2026-04-20T14:42:02.001Z" }, + { url = "https://files.pythonhosted.org/packages/82/11/31d60ee2b45540d3fb0b29302a393dbc01cd771c473f5b5147bcd353e593/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6cd87cb1575b1ad05ba98894c5b5c96411ef678fa2f6ed2576607095b8d9789", size = 2063008, upload-time = "2026-04-20T14:44:17.952Z" }, + { url = "https://files.pythonhosted.org/packages/8a/db/3a9d1957181b59258f44a2300ab0f0be9d1e12d662a4f57bb31250455c52/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f80a55484b8d843c8ada81ebf70a682f3f00a3d40e378c06cf17ecb44d280d7d", size = 2233082, upload-time = "2026-04-20T14:40:57.934Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e1/3277c38792aeb5cfb18c2f0c5785a221d9ff4e149abbe1184d53d5f72273/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3861f1731b90c50a3266316b9044f5c9b405eecb8e299b0a7120596334e4fe9c", size = 2304615, upload-time = "2026-04-20T14:42:12.584Z" }, + { url = "https://files.pythonhosted.org/packages/5e/d5/e3d9717c9eba10855325650afd2a9cba8e607321697f18953af9d562da2f/pydantic_core-2.46.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb528e295ed31570ac3dcc9bfdd6e0150bc11ce6168ac87a8082055cf1a67395", size = 2094380, upload-time = "2026-04-20T14:43:05.522Z" }, + { url = "https://files.pythonhosted.org/packages/a1/20/abac35dedcbfd66c6f0b03e4e3564511771d6c9b7ede10a362d03e110d9b/pydantic_core-2.46.3-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:367508faa4973b992b271ba1494acaab36eb7e8739d1e47be5035fb1ea225396", size = 2135429, upload-time = "2026-04-20T14:41:55.549Z" }, + { url = "https://files.pythonhosted.org/packages/6c/a5/41bfd1df69afad71b5cf0535055bccc73022715ad362edbc124bc1e021d7/pydantic_core-2.46.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ad3c826fe523e4becf4fe39baa44286cff85ef137c729a2c5e269afbfd0905d", size = 2174582, upload-time = "2026-04-20T14:41:45.96Z" }, + { url = "https://files.pythonhosted.org/packages/79/65/38d86ea056b29b2b10734eb23329b7a7672ca604df4f2b6e9c02d4ee22fe/pydantic_core-2.46.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ec638c5d194ef8af27db69f16c954a09797c0dc25015ad6123eb2c73a4d271ca", size = 2187533, upload-time = "2026-04-20T14:40:55.367Z" }, + { url = "https://files.pythonhosted.org/packages/b6/55/a1129141678a2026badc539ad1dee0a71d06f54c2f06a4bd68c030ac781b/pydantic_core-2.46.3-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:28ed528c45446062ee66edb1d33df5d88828ae167de76e773a3c7f64bd14e976", size = 2332985, upload-time = "2026-04-20T14:44:13.05Z" }, + { url = "https://files.pythonhosted.org/packages/d7/60/cb26f4077719f709e54819f4e8e1d43f4091f94e285eb6bd21e1190a7b7c/pydantic_core-2.46.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aed19d0c783886d5bd86d80ae5030006b45e28464218747dcf83dabfdd092c7b", size = 2373670, upload-time = "2026-04-20T14:41:53.421Z" }, + { url = "https://files.pythonhosted.org/packages/6b/7e/c3f21882bdf1d8d086876f81b5e296206c69c6082551d776895de7801fa0/pydantic_core-2.46.3-cp312-cp312-win32.whl", hash = "sha256:06d5d8820cbbdb4147578c1fe7ffcd5b83f34508cb9f9ab76e807be7db6ff0a4", size = 1966722, upload-time = "2026-04-20T14:44:30.588Z" }, + { url = "https://files.pythonhosted.org/packages/57/be/6b5e757b859013ebfbd7adba02f23b428f37c86dcbf78b5bb0b4ffd36e99/pydantic_core-2.46.3-cp312-cp312-win_amd64.whl", hash = "sha256:c3212fda0ee959c1dd04c60b601ec31097aaa893573a3a1abd0a47bcac2968c1", size = 2072970, upload-time = "2026-04-20T14:42:54.248Z" }, + { url = "https://files.pythonhosted.org/packages/bf/f8/a989b21cc75e9a32d24192ef700eea606521221a89faa40c919ce884f2b1/pydantic_core-2.46.3-cp312-cp312-win_arm64.whl", hash = "sha256:f1f8338dd7a7f31761f1f1a3c47503a9a3b34eea3c8b01fa6ee96408affb5e72", size = 2035963, upload-time = "2026-04-20T14:44:20.4Z" }, + { url = "https://files.pythonhosted.org/packages/9b/3c/9b5e8eb9821936d065439c3b0fb1490ffa64163bfe7e1595985a47896073/pydantic_core-2.46.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:12bc98de041458b80c86c56b24df1d23832f3e166cbaff011f25d187f5c62c37", size = 2102109, upload-time = "2026-04-20T14:41:24.219Z" }, + { url = "https://files.pythonhosted.org/packages/91/97/1c41d1f5a19f241d8069f1e249853bcce378cdb76eec8ab636d7bc426280/pydantic_core-2.46.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:85348b8f89d2c3508b65b16c3c33a4da22b8215138d8b996912bb1532868885f", size = 1951820, upload-time = "2026-04-20T14:42:14.236Z" }, + { url = "https://files.pythonhosted.org/packages/30/b4/d03a7ae14571bc2b6b3c7b122441154720619afe9a336fa3a95434df5e2f/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1105677a6df914b1fb71a81b96c8cce7726857e1717d86001f29be06a25ee6f8", size = 1977785, upload-time = "2026-04-20T14:42:31.648Z" }, + { url = "https://files.pythonhosted.org/packages/ae/0c/4086f808834b59e3c8f1aa26df8f4b6d998cdcf354a143d18ef41529d1fe/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87082cd65669a33adeba5470769e9704c7cf026cc30afb9cc77fd865578ebaad", size = 2062761, upload-time = "2026-04-20T14:40:37.093Z" }, + { url = "https://files.pythonhosted.org/packages/fa/71/a649be5a5064c2df0db06e0a512c2281134ed2fcc981f52a657936a7527c/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60e5f66e12c4f5212d08522963380eaaeac5ebd795826cfd19b2dfb0c7a52b9c", size = 2232989, upload-time = "2026-04-20T14:42:59.254Z" }, + { url = "https://files.pythonhosted.org/packages/a2/84/7756e75763e810b3a710f4724441d1ecc5883b94aacb07ca71c5fb5cfb69/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6cdf19bf84128d5e7c37e8a73a0c5c10d51103a650ac585d42dd6ae233f2b7f", size = 2303975, upload-time = "2026-04-20T14:41:32.287Z" }, + { url = "https://files.pythonhosted.org/packages/6c/35/68a762e0c1e31f35fa0dac733cbd9f5b118042853698de9509c8e5bf128b/pydantic_core-2.46.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:031bb17f4885a43773c8c763089499f242aee2ea85cf17154168775dccdecf35", size = 2095325, upload-time = "2026-04-20T14:42:47.685Z" }, + { url = "https://files.pythonhosted.org/packages/77/bf/1bf8c9a8e91836c926eae5e3e51dce009bf495a60ca56060689d3df3f340/pydantic_core-2.46.3-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:bcf2a8b2982a6673693eae7348ef3d8cf3979c1d63b54fca7c397a635cc68687", size = 2133368, upload-time = "2026-04-20T14:41:22.766Z" }, + { url = "https://files.pythonhosted.org/packages/e5/50/87d818d6bab915984995157ceb2380f5aac4e563dddbed6b56f0ed057aba/pydantic_core-2.46.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28e8cf2f52d72ced402a137145923a762cbb5081e48b34312f7a0c8f55928ec3", size = 2173908, upload-time = "2026-04-20T14:42:52.044Z" }, + { url = "https://files.pythonhosted.org/packages/91/88/a311fb306d0bd6185db41fa14ae888fb81d0baf648a761ae760d30819d33/pydantic_core-2.46.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:17eaface65d9fc5abb940003020309c1bf7a211f5f608d7870297c367e6f9022", size = 2186422, upload-time = "2026-04-20T14:43:29.55Z" }, + { url = "https://files.pythonhosted.org/packages/8f/79/28fd0d81508525ab2054fef7c77a638c8b5b0afcbbaeee493cf7c3fef7e1/pydantic_core-2.46.3-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:93fd339f23408a07e98950a89644f92c54d8729719a40b30c0a30bb9ebc55d23", size = 2332709, upload-time = "2026-04-20T14:42:16.134Z" }, + { url = "https://files.pythonhosted.org/packages/b3/21/795bf5fe5c0f379308b8ef19c50dedab2e7711dbc8d0c2acf08f1c7daa05/pydantic_core-2.46.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:23cbdb3aaa74dfe0837975dbf69b469753bbde8eacace524519ffdb6b6e89eb7", size = 2372428, upload-time = "2026-04-20T14:41:10.974Z" }, + { url = "https://files.pythonhosted.org/packages/45/b3/ed14c659cbe7605e3ef063077680a64680aec81eb1a04763a05190d49b7f/pydantic_core-2.46.3-cp313-cp313-win32.whl", hash = "sha256:610eda2e3838f401105e6326ca304f5da1e15393ae25dacae5c5c63f2c275b13", size = 1965601, upload-time = "2026-04-20T14:41:42.128Z" }, + { url = "https://files.pythonhosted.org/packages/ef/bb/adb70d9a762ddd002d723fbf1bd492244d37da41e3af7b74ad212609027e/pydantic_core-2.46.3-cp313-cp313-win_amd64.whl", hash = "sha256:68cc7866ed863db34351294187f9b729964c371ba33e31c26f478471c52e1ed0", size = 2071517, upload-time = "2026-04-20T14:43:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/52/eb/66faefabebfe68bd7788339c9c9127231e680b11906368c67ce112fdb47f/pydantic_core-2.46.3-cp313-cp313-win_arm64.whl", hash = "sha256:f64b5537ac62b231572879cd08ec05600308636a5d63bcbdb15063a466977bec", size = 2035802, upload-time = "2026-04-20T14:43:38.507Z" }, + { url = "https://files.pythonhosted.org/packages/66/7f/03dbad45cd3aa9083fbc93c210ae8b005af67e4136a14186950a747c6874/pydantic_core-2.46.3-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:9715525891ed524a0a1eb6d053c74d4d4ad5017677fb00af0b7c2644a31bae46", size = 2105683, upload-time = "2026-04-20T14:42:19.779Z" }, + { url = "https://files.pythonhosted.org/packages/26/22/4dc186ac8ea6b257e9855031f51b62a9637beac4d68ac06bee02f046f836/pydantic_core-2.46.3-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:9d2f400712a99a013aff420ef1eb9be077f8189a36c1e3ef87660b4e1088a874", size = 1940052, upload-time = "2026-04-20T14:43:59.274Z" }, + { url = "https://files.pythonhosted.org/packages/0d/ca/d376391a5aff1f2e8188960d7873543608130a870961c2b6b5236627c116/pydantic_core-2.46.3-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd2aab0e2e9dc2daf36bd2686c982535d5e7b1d930a1344a7bb6e82baab42a76", size = 1988172, upload-time = "2026-04-20T14:41:17.469Z" }, + { url = "https://files.pythonhosted.org/packages/0e/6b/523b9f85c23788755d6ab949329de692a2e3a584bc6beb67fef5e035aa9d/pydantic_core-2.46.3-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e9d76736da5f362fabfeea6a69b13b7f2be405c6d6966f06b2f6bfff7e64531", size = 2128596, upload-time = "2026-04-20T14:40:41.707Z" }, + { url = "https://files.pythonhosted.org/packages/34/42/f426db557e8ab2791bc7562052299944a118655496fbff99914e564c0a94/pydantic_core-2.46.3-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:b12dd51f1187c2eb489af8e20f880362db98e954b54ab792fa5d92e8bcc6b803", size = 2091877, upload-time = "2026-04-20T14:43:27.091Z" }, + { url = "https://files.pythonhosted.org/packages/5c/4f/86a832a9d14df58e663bfdf4627dc00d3317c2bd583c4fb23390b0f04b8e/pydantic_core-2.46.3-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:f00a0961b125f1a47af7bcc17f00782e12f4cd056f83416006b30111d941dfa3", size = 1932428, upload-time = "2026-04-20T14:40:45.781Z" }, + { url = "https://files.pythonhosted.org/packages/11/1a/fe857968954d93fb78e0d4b6df5c988c74c4aaa67181c60be7cfe327c0ca/pydantic_core-2.46.3-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57697d7c056aca4bbb680200f96563e841a6386ac1129370a0102592f4dddff5", size = 1997550, upload-time = "2026-04-20T14:44:02.425Z" }, + { url = "https://files.pythonhosted.org/packages/17/eb/9d89ad2d9b0ba8cd65393d434471621b98912abb10fbe1df08e480ba57b5/pydantic_core-2.46.3-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd35aa21299def8db7ef4fe5c4ff862941a9a158ca7b63d61e66fe67d30416b4", size = 2137657, upload-time = "2026-04-20T14:42:45.149Z" }, + { url = "https://files.pythonhosted.org/packages/1f/da/99d40830684f81dec901cac521b5b91c095394cc1084b9433393cde1c2df/pydantic_core-2.46.3-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:13afdd885f3d71280cf286b13b310ee0f7ccfefd1dbbb661514a474b726e2f25", size = 2107973, upload-time = "2026-04-20T14:42:06.175Z" }, + { url = "https://files.pythonhosted.org/packages/99/a5/87024121818d75bbb2a98ddbaf638e40e7a18b5e0f5492c9ca4b1b316107/pydantic_core-2.46.3-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:f91c0aff3e3ee0928edd1232c57f643a7a003e6edf1860bc3afcdc749cb513f3", size = 1947191, upload-time = "2026-04-20T14:43:14.319Z" }, + { url = "https://files.pythonhosted.org/packages/60/62/0c1acfe10945b83a6a59d19fbaa92f48825381509e5701b855c08f13db76/pydantic_core-2.46.3-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6529d1d128321a58d30afcc97b49e98836542f68dd41b33c2e972bb9e5290536", size = 2123791, upload-time = "2026-04-20T14:43:22.766Z" }, + { url = "https://files.pythonhosted.org/packages/75/3e/3b2393b4c8f44285561dc30b00cf307a56a2eff7c483a824db3b8221ca51/pydantic_core-2.46.3-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:975c267cff4f7e7272eacbe50f6cc03ca9a3da4c4fbd66fffd89c94c1e311aa1", size = 2153197, upload-time = "2026-04-20T14:44:27.932Z" }, + { url = "https://files.pythonhosted.org/packages/ba/75/5af02fb35505051eee727c061f2881c555ab4f8ddb2d42da715a42c9731b/pydantic_core-2.46.3-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2b8e4f2bbdf71415c544b4b1138b8060db7b6611bc927e8064c769f64bed651c", size = 2181073, upload-time = "2026-04-20T14:43:20.729Z" }, + { url = "https://files.pythonhosted.org/packages/10/92/7e0e1bd9ca3c68305db037560ca2876f89b2647deb2f8b6319005de37505/pydantic_core-2.46.3-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e61ea8e9fff9606d09178f577ff8ccdd7206ff73d6552bcec18e1033c4254b85", size = 2315886, upload-time = "2026-04-20T14:44:04.826Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d8/101655f27eaf3e44558ead736b2795d12500598beed4683f279396fa186e/pydantic_core-2.46.3-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b504bda01bafc69b6d3c7a0c7f039dcf60f47fab70e06fe23f57b5c75bdc82b8", size = 2360528, upload-time = "2026-04-20T14:40:47.431Z" }, + { url = "https://files.pythonhosted.org/packages/07/0f/1c34a74c8d07136f0d729ffe5e1fdab04fbdaa7684f61a92f92511a84a15/pydantic_core-2.46.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:b00b76f7142fc60c762ce579bd29c8fa44aaa56592dd3c54fab3928d0d4ca6ff", size = 2184144, upload-time = "2026-04-20T14:42:57Z" }, ] [[package]] @@ -2807,27 +2781,27 @@ wheels = [ [[package]] name = "ruff" -version = "0.15.10" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/d9/aa3f7d59a10ef6b14fe3431706f854dbf03c5976be614a9796d36326810c/ruff-0.15.10.tar.gz", hash = "sha256:d1f86e67ebfdef88e00faefa1552b5e510e1d35f3be7d423dc7e84e63788c94e", size = 4631728, upload-time = "2026-04-09T14:06:09.884Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/eb/00/a1c2fdc9939b2c03691edbda290afcd297f1f389196172826b03d6b6a595/ruff-0.15.10-py3-none-linux_armv6l.whl", hash = "sha256:0744e31482f8f7d0d10a11fcbf897af272fefdfcb10f5af907b18c2813ff4d5f", size = 10563362, upload-time = "2026-04-09T14:06:21.189Z" }, - { url = "https://files.pythonhosted.org/packages/5c/15/006990029aea0bebe9d33c73c3e28c80c391ebdba408d1b08496f00d422d/ruff-0.15.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b1e7c16ea0ff5a53b7c2df52d947e685973049be1cdfe2b59a9c43601897b22e", size = 10951122, upload-time = "2026-04-09T14:06:02.236Z" }, - { url = "https://files.pythonhosted.org/packages/f2/c0/4ac978fe874d0618c7da647862afe697b281c2806f13ce904ad652fa87e4/ruff-0.15.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:93cc06a19e5155b4441dd72808fdf84290d84ad8a39ca3b0f994363ade4cebb1", size = 10314005, upload-time = "2026-04-09T14:06:00.026Z" }, - { url = "https://files.pythonhosted.org/packages/da/73/c209138a5c98c0d321266372fc4e33ad43d506d7e5dd817dd89b60a8548f/ruff-0.15.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83e1dd04312997c99ea6965df66a14fb4f03ba978564574ffc68b0d61fd3989e", size = 10643450, upload-time = "2026-04-09T14:05:42.137Z" }, - { url = "https://files.pythonhosted.org/packages/ec/76/0deec355d8ec10709653635b1f90856735302cb8e149acfdf6f82a5feb70/ruff-0.15.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8154d43684e4333360fedd11aaa40b1b08a4e37d8ffa9d95fee6fa5b37b6fab1", size = 10379597, upload-time = "2026-04-09T14:05:49.984Z" }, - { url = "https://files.pythonhosted.org/packages/dc/be/86bba8fc8798c081e28a4b3bb6d143ccad3fd5f6f024f02002b8f08a9fa3/ruff-0.15.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ab88715f3a6deb6bde6c227f3a123410bec7b855c3ae331b4c006189e895cef", size = 11146645, upload-time = "2026-04-09T14:06:12.246Z" }, - { url = "https://files.pythonhosted.org/packages/a8/89/140025e65911b281c57be1d385ba1d932c2366ca88ae6663685aed8d4881/ruff-0.15.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a768ff5969b4f44c349d48edf4ab4f91eddb27fd9d77799598e130fb628aa158", size = 12030289, upload-time = "2026-04-09T14:06:04.776Z" }, - { url = "https://files.pythonhosted.org/packages/88/de/ddacca9545a5e01332567db01d44bd8cf725f2db3b3d61a80550b48308ea/ruff-0.15.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ee3ef42dab7078bda5ff6a1bcba8539e9857deb447132ad5566a038674540d0", size = 11496266, upload-time = "2026-04-09T14:05:55.485Z" }, - { url = "https://files.pythonhosted.org/packages/bc/bb/7ddb00a83760ff4a83c4e2fc231fd63937cc7317c10c82f583302e0f6586/ruff-0.15.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51cb8cc943e891ba99989dd92d61e29b1d231e14811db9be6440ecf25d5c1609", size = 11256418, upload-time = "2026-04-09T14:05:57.69Z" }, - { url = "https://files.pythonhosted.org/packages/dc/8d/55de0d35aacf6cd50b6ee91ee0f291672080021896543776f4170fc5c454/ruff-0.15.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:e59c9bdc056a320fb9ea1700a8d591718b8faf78af065484e801258d3a76bc3f", size = 11288416, upload-time = "2026-04-09T14:05:44.695Z" }, - { url = "https://files.pythonhosted.org/packages/68/cf/9438b1a27426ec46a80e0a718093c7f958ef72f43eb3111862949ead3cc1/ruff-0.15.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:136c00ca2f47b0018b073f28cb5c1506642a830ea941a60354b0e8bc8076b151", size = 10621053, upload-time = "2026-04-09T14:05:52.782Z" }, - { url = "https://files.pythonhosted.org/packages/4c/50/e29be6e2c135e9cd4cb15fbade49d6a2717e009dff3766dd080fcb82e251/ruff-0.15.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8b80a2f3c9c8a950d6237f2ca12b206bccff626139be9fa005f14feb881a1ae8", size = 10378302, upload-time = "2026-04-09T14:06:14.361Z" }, - { url = "https://files.pythonhosted.org/packages/18/2f/e0b36a6f99c51bb89f3a30239bc7bf97e87a37ae80aa2d6542d6e5150364/ruff-0.15.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:e3e53c588164dc025b671c9df2462429d60357ea91af7e92e9d56c565a9f1b07", size = 10850074, upload-time = "2026-04-09T14:06:16.581Z" }, - { url = "https://files.pythonhosted.org/packages/11/08/874da392558ce087a0f9b709dc6ec0d60cbc694c1c772dab8d5f31efe8cb/ruff-0.15.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b0c52744cf9f143a393e284125d2576140b68264a93c6716464e129a3e9adb48", size = 11358051, upload-time = "2026-04-09T14:06:18.948Z" }, - { url = "https://files.pythonhosted.org/packages/e4/46/602938f030adfa043e67112b73821024dc79f3ab4df5474c25fa4c1d2d14/ruff-0.15.10-py3-none-win32.whl", hash = "sha256:d4272e87e801e9a27a2e8df7b21011c909d9ddd82f4f3281d269b6ba19789ca5", size = 10588964, upload-time = "2026-04-09T14:06:07.14Z" }, - { url = "https://files.pythonhosted.org/packages/25/b6/261225b875d7a13b33a6d02508c39c28450b2041bb01d0f7f1a83d569512/ruff-0.15.10-py3-none-win_amd64.whl", hash = "sha256:28cb32d53203242d403d819fd6983152489b12e4a3ae44993543d6fe62ab42ed", size = 11745044, upload-time = "2026-04-09T14:05:39.473Z" }, - { url = "https://files.pythonhosted.org/packages/58/ed/dea90a65b7d9e69888890fb14c90d7f51bf0c1e82ad800aeb0160e4bacfd/ruff-0.15.10-py3-none-win_arm64.whl", hash = "sha256:601d1610a9e1f1c2165a4f561eeaa2e2ea1e97f3287c5aa258d3dab8b57c6188", size = 11035607, upload-time = "2026-04-09T14:05:47.593Z" }, +version = "0.15.12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/99/43/3291f1cc9106f4c63bdce7a8d0df5047fe8422a75b091c16b5e9355e0b11/ruff-0.15.12.tar.gz", hash = "sha256:ecea26adb26b4232c0c2ca19ccbc0083a68344180bba2a600605538ce51a40a6", size = 4643852, upload-time = "2026-04-24T18:17:14.305Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/6e/e78ffb61d4686f3d96ba3df2c801161843746dcbcbb17a1e927d4829312b/ruff-0.15.12-py3-none-linux_armv6l.whl", hash = "sha256:f86f176e188e94d6bdbc09f09bfd9dc729059ad93d0e7390b5a73efe19f8861c", size = 10640713, upload-time = "2026-04-24T18:17:22.841Z" }, + { url = "https://files.pythonhosted.org/packages/ae/08/a317bc231fb9e7b93e4ef3089501e51922ff88d6936ce5cf870c4fe55419/ruff-0.15.12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e3bcd123364c3770b8e1b7baaf343cc99a35f197c5c6e8af79015c666c423a6c", size = 11069267, upload-time = "2026-04-24T18:17:30.105Z" }, + { url = "https://files.pythonhosted.org/packages/aa/a4/f828e9718d3dce1f5f11c39c4f65afd32783c8b2aebb2e3d259e492c47bd/ruff-0.15.12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fe87510d000220aa1ed530d4448a7c696a0cae1213e5ec30e5874287b66557b5", size = 10397182, upload-time = "2026-04-24T18:17:07.177Z" }, + { url = "https://files.pythonhosted.org/packages/71/e0/3310fc6d1b5e1fdea22bf3b1b807c7e187b581021b0d7d4514cccdb5fb71/ruff-0.15.12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84a1630093121375a3e2a95b4a6dc7b59e2b4ee76216e32d81aae550a832d002", size = 10758012, upload-time = "2026-04-24T18:16:55.759Z" }, + { url = "https://files.pythonhosted.org/packages/11/c1/a606911aee04c324ddaa883ae418f3569792fd3c4a10c50e0dd0a2311e1e/ruff-0.15.12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fb129f40f114f089ebe0ca56c0d251cf2061b17651d464bb6478dc01e69f11f5", size = 10447479, upload-time = "2026-04-24T18:16:51.677Z" }, + { url = "https://files.pythonhosted.org/packages/9d/68/4201e8444f0894f21ab4aeeaee68aa4f10b51613514a20d80bd628d57e88/ruff-0.15.12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0c862b172d695db7598426b8af465e7e9ac00a3ea2a3630ee67eb82e366aaa6", size = 11234040, upload-time = "2026-04-24T18:17:16.529Z" }, + { url = "https://files.pythonhosted.org/packages/34/ff/8a6d6cf4ccc23fd67060874e832c18919d1557a0611ebef03fdb01fff11e/ruff-0.15.12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2849ea9f3484c3aca43a82f484210370319e7170df4dfe4843395ddf6c57bc33", size = 12087377, upload-time = "2026-04-24T18:17:04.944Z" }, + { url = "https://files.pythonhosted.org/packages/85/f6/c669cf73f5152f623d34e69866a46d5e6185816b19fcd5b6dd8a2d299922/ruff-0.15.12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e77c7e51c07fe396826d5969a5b846d9cd4c402535835fb6e21ce8b28fef847", size = 11367784, upload-time = "2026-04-24T18:17:25.409Z" }, + { url = "https://files.pythonhosted.org/packages/e8/39/c61d193b8a1daaa8977f7dea9e8d8ba866e02ea7b65d32f6861693aa4c12/ruff-0.15.12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83b2f4f2f3b1026b5fb449b467d9264bf22067b600f7b6f41fc5958909f449d0", size = 11344088, upload-time = "2026-04-24T18:17:12.258Z" }, + { url = "https://files.pythonhosted.org/packages/c2/8d/49afab3645e31e12c590acb6d3b5b69d7aab5b81926dbaf7461f9441f37a/ruff-0.15.12-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9ba3b8f1afd7e2e43d8943e55f249e13f9682fde09711644a6e7290eb4f3e339", size = 11271770, upload-time = "2026-04-24T18:17:02.457Z" }, + { url = "https://files.pythonhosted.org/packages/46/06/33f41fe94403e2b755481cdfb9b7ef3e4e0ed031c4581124658d935d52b4/ruff-0.15.12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:e852ba9fdc890655e1d78f2df1499efbe0e54126bd405362154a75e2bde159c5", size = 10719355, upload-time = "2026-04-24T18:17:27.648Z" }, + { url = "https://files.pythonhosted.org/packages/0d/59/18aa4e014debbf559670e4048e39260a85c7fcee84acfd761ac01e7b8d35/ruff-0.15.12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:dd8aed930da53780d22fc70bdf84452c843cf64f8cb4eb38984319c24c5cd5fd", size = 10462758, upload-time = "2026-04-24T18:17:32.347Z" }, + { url = "https://files.pythonhosted.org/packages/25/e7/cc9f16fd0f3b5fddcbd7ec3d6ae30c8f3fde1047f32a4093a98d633c6570/ruff-0.15.12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:01da3988d225628b709493d7dc67c3b9b12c0210016b08690ef9bd27970b262b", size = 10953498, upload-time = "2026-04-24T18:17:20.674Z" }, + { url = "https://files.pythonhosted.org/packages/72/7a/a9ba7f98c7a575978698f4230c5e8cc54bbc761af34f560818f933dafa0c/ruff-0.15.12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:9cae0f92bd5700d1213188b31cd3bdd2b315361296d10b96b8e2337d3d11f53e", size = 11447765, upload-time = "2026-04-24T18:17:09.755Z" }, + { url = "https://files.pythonhosted.org/packages/ea/f9/0ae446942c846b8266059ad8a30702a35afae55f5cdc54c5adf8d7afdc27/ruff-0.15.12-py3-none-win32.whl", hash = "sha256:d0185894e038d7043ba8fd6aee7499ece6462dc0ea9f1e260c7451807c714c20", size = 10657277, upload-time = "2026-04-24T18:17:18.591Z" }, + { url = "https://files.pythonhosted.org/packages/33/f1/9614e03e1cdcbf9437570b5400ced8a720b5db22b28d8e0f1bda429f660d/ruff-0.15.12-py3-none-win_amd64.whl", hash = "sha256:c87a162d61ab3adca47c03f7f717c68672edec7d1b5499e652331780fe74950d", size = 11837758, upload-time = "2026-04-24T18:17:00.113Z" }, + { url = "https://files.pythonhosted.org/packages/c0/98/6beb4b351e472e5f4c4613f7c35a5290b8be2497e183825310c4c3a3984b/ruff-0.15.12-py3-none-win_arm64.whl", hash = "sha256:a538f7a82d061cee7be55542aca1d86d1393d55d81d4fcc314370f4340930d4f", size = 11120821, upload-time = "2026-04-24T18:16:57.979Z" }, ] [[package]] @@ -3331,7 +3305,7 @@ wheels = [ [[package]] name = "typer" -version = "0.24.1" +version = "0.25.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-doc" }, @@ -3339,9 +3313,9 @@ dependencies = [ { name = "rich" }, { name = "shellingham" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f5/24/cb09efec5cc954f7f9b930bf8279447d24618bb6758d4f6adf2574c41780/typer-0.24.1.tar.gz", hash = "sha256:e39b4732d65fbdcde189ae76cf7cd48aeae72919dea1fdfc16593be016256b45", size = 118613, upload-time = "2026-02-21T16:54:40.609Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/27/ede8cec7596e0041ba7e7b80b47d132562f56ff454313a16f6084e555c9f/typer-0.25.0.tar.gz", hash = "sha256:123eaf9f19bb40fd268310e12a542c0c6b4fab9c98d9d23342a01ff95e3ce930", size = 120150, upload-time = "2026-04-26T08:46:14.767Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4a/91/48db081e7a63bb37284f9fbcefda7c44c277b18b0e13fbc36ea2335b71e6/typer-0.24.1-py3-none-any.whl", hash = "sha256:112c1f0ce578bfb4cab9ffdabc68f031416ebcc216536611ba21f04e9aa84c9e", size = 56085, upload-time = "2026-02-21T16:54:41.616Z" }, + { url = "https://files.pythonhosted.org/packages/9a/72/193d4e586ec5a4db834a36bbeb47641a62f951f114ffd0fe5b1b46e8d56f/typer-0.25.0-py3-none-any.whl", hash = "sha256:ac01b48823d3db9a83c9e164338057eadbb1c9957a2a6b4eeb486669c560b5dc", size = 55993, upload-time = "2026-04-26T08:46:15.889Z" }, ] [[package]] @@ -3367,11 +3341,11 @@ wheels = [ [[package]] name = "tzdata" -version = "2026.1" +version = "2026.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/19/f5/cd531b2d15a671a40c0f66cf06bc3570a12cd56eef98960068ebbad1bf5a/tzdata-2026.1.tar.gz", hash = "sha256:67658a1903c75917309e753fdc349ac0efd8c27db7a0cb406a25be4840f87f98", size = 197639, upload-time = "2026-04-03T11:25:22.002Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/19/1b9b0e29f30c6d35cb345486df41110984ea67ae69dddbc0e8a100999493/tzdata-2026.2.tar.gz", hash = "sha256:9173fde7d80d9018e02a662e168e5a2d04f87c41ea174b139fbef642eda62d10", size = 198254, upload-time = "2026-04-24T15:22:08.651Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/70/d460bd685a170790ec89317e9bd33047988e4bce507b831f5db771e142de/tzdata-2026.1-py2.py3-none-any.whl", hash = "sha256:4b1d2be7ac37ceafd7327b961aa3a54e467efbdb563a23655fbfe0d39cfc42a9", size = 348952, upload-time = "2026-04-03T11:25:20.313Z" }, + { url = "https://files.pythonhosted.org/packages/ce/e4/dccd7f47c4b64213ac01ef921a1337ee6e30e8c6466046018326977efd95/tzdata-2026.2-py2.py3-none-any.whl", hash = "sha256:bbe9af844f658da81a5f95019480da3a89415801f6cc966806612cc7169bffe7", size = 349321, upload-time = "2026-04-24T15:22:05.876Z" }, ] [[package]] @@ -3385,7 +3359,7 @@ wheels = [ [[package]] name = "virtualenv" -version = "21.2.3" +version = "21.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "distlib" }, @@ -3393,9 +3367,9 @@ dependencies = [ { name = "platformdirs" }, { name = "python-discovery" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/8c/bdd9f89f89e4a787ac61bb2da4d884bc45e0c287ec694dfa3170dddd5cfe/virtualenv-21.2.3.tar.gz", hash = "sha256:9bb6d1414ab55ca624371e30c7719c32f183ef44da544ef8aa44a456de7ac191", size = 5844776, upload-time = "2026-04-14T01:10:36.692Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/8b/6331f7a7fe70131c301106ec1e7cf23e2501bf7d4ca3636805801ca191bb/virtualenv-21.3.0.tar.gz", hash = "sha256:733750db978ec95c2d8eb4feadaa57091002bce404cb39ba69899cf7bd28944e", size = 7614069, upload-time = "2026-04-27T17:05:58.927Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/95/19/bc7c4e05f42532863cf2ae7e7e847beab25835934e0410160b47eeff1e35/virtualenv-21.2.3-py3-none-any.whl", hash = "sha256:486652347ea8526d91e9807c0274583cb7ba31dd4942ff10fb5621402f0fe0d8", size = 5828329, upload-time = "2026-04-14T01:10:34.809Z" }, + { url = "https://files.pythonhosted.org/packages/4b/eb/03bfb1299d4c4510329e470f13f9a4ce793df7fcb5a2fd3510f911066f61/virtualenv-21.3.0-py3-none-any.whl", hash = "sha256:4d28ee41f6d9ec8f1f00cd472b9ffbcedda1b3d3b9a575b5c94a2d004fd51bd7", size = 7594690, upload-time = "2026-04-27T17:05:55.468Z" }, ] [[package]] @@ -3409,75 +3383,97 @@ wheels = [ [[package]] name = "xxhash" -version = "3.6.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/02/84/30869e01909fb37a6cc7e18688ee8bf1e42d57e7e0777636bd47524c43c7/xxhash-3.6.0.tar.gz", hash = "sha256:f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6", size = 85160, upload-time = "2025-10-02T14:37:08.097Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/17/d4/cc2f0400e9154df4b9964249da78ebd72f318e35ccc425e9f403c392f22a/xxhash-3.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b47bbd8cf2d72797f3c2772eaaac0ded3d3af26481a26d7d7d41dc2d3c46b04a", size = 32844, upload-time = "2025-10-02T14:34:14.037Z" }, - { url = "https://files.pythonhosted.org/packages/5e/ec/1cc11cd13e26ea8bc3cb4af4eaadd8d46d5014aebb67be3f71fb0b68802a/xxhash-3.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2b6821e94346f96db75abaa6e255706fb06ebd530899ed76d32cd99f20dc52fa", size = 30809, upload-time = "2025-10-02T14:34:15.484Z" }, - { url = "https://files.pythonhosted.org/packages/04/5f/19fe357ea348d98ca22f456f75a30ac0916b51c753e1f8b2e0e6fb884cce/xxhash-3.6.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d0a9751f71a1a65ce3584e9cae4467651c7e70c9d31017fa57574583a4540248", size = 194665, upload-time = "2025-10-02T14:34:16.541Z" }, - { url = "https://files.pythonhosted.org/packages/90/3b/d1f1a8f5442a5fd8beedae110c5af7604dc37349a8e16519c13c19a9a2de/xxhash-3.6.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b29ee68625ab37b04c0b40c3fafdf24d2f75ccd778333cfb698f65f6c463f62", size = 213550, upload-time = "2025-10-02T14:34:17.878Z" }, - { url = "https://files.pythonhosted.org/packages/c4/ef/3a9b05eb527457d5db13a135a2ae1a26c80fecd624d20f3e8dcc4cb170f3/xxhash-3.6.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6812c25fe0d6c36a46ccb002f40f27ac903bf18af9f6dd8f9669cb4d176ab18f", size = 212384, upload-time = "2025-10-02T14:34:19.182Z" }, - { url = "https://files.pythonhosted.org/packages/0f/18/ccc194ee698c6c623acbf0f8c2969811a8a4b6185af5e824cd27b9e4fd3e/xxhash-3.6.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4ccbff013972390b51a18ef1255ef5ac125c92dc9143b2d1909f59abc765540e", size = 445749, upload-time = "2025-10-02T14:34:20.659Z" }, - { url = "https://files.pythonhosted.org/packages/a5/86/cf2c0321dc3940a7aa73076f4fd677a0fb3e405cb297ead7d864fd90847e/xxhash-3.6.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:297b7fbf86c82c550e12e8fb71968b3f033d27b874276ba3624ea868c11165a8", size = 193880, upload-time = "2025-10-02T14:34:22.431Z" }, - { url = "https://files.pythonhosted.org/packages/82/fb/96213c8560e6f948a1ecc9a7613f8032b19ee45f747f4fca4eb31bb6d6ed/xxhash-3.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dea26ae1eb293db089798d3973a5fc928a18fdd97cc8801226fae705b02b14b0", size = 210912, upload-time = "2025-10-02T14:34:23.937Z" }, - { url = "https://files.pythonhosted.org/packages/40/aa/4395e669b0606a096d6788f40dbdf2b819d6773aa290c19e6e83cbfc312f/xxhash-3.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7a0b169aafb98f4284f73635a8e93f0735f9cbde17bd5ec332480484241aaa77", size = 198654, upload-time = "2025-10-02T14:34:25.644Z" }, - { url = "https://files.pythonhosted.org/packages/67/74/b044fcd6b3d89e9b1b665924d85d3f400636c23590226feb1eb09e1176ce/xxhash-3.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:08d45aef063a4531b785cd72de4887766d01dc8f362a515693df349fdb825e0c", size = 210867, upload-time = "2025-10-02T14:34:27.203Z" }, - { url = "https://files.pythonhosted.org/packages/bc/fd/3ce73bf753b08cb19daee1eb14aa0d7fe331f8da9c02dd95316ddfe5275e/xxhash-3.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:929142361a48ee07f09121fe9e96a84950e8d4df3bb298ca5d88061969f34d7b", size = 414012, upload-time = "2025-10-02T14:34:28.409Z" }, - { url = "https://files.pythonhosted.org/packages/ba/b3/5a4241309217c5c876f156b10778f3ab3af7ba7e3259e6d5f5c7d0129eb2/xxhash-3.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:51312c768403d8540487dbbfb557454cfc55589bbde6424456951f7fcd4facb3", size = 191409, upload-time = "2025-10-02T14:34:29.696Z" }, - { url = "https://files.pythonhosted.org/packages/c0/01/99bfbc15fb9abb9a72b088c1d95219fc4782b7d01fc835bd5744d66dd0b8/xxhash-3.6.0-cp311-cp311-win32.whl", hash = "sha256:d1927a69feddc24c987b337ce81ac15c4720955b667fe9b588e02254b80446fd", size = 30574, upload-time = "2025-10-02T14:34:31.028Z" }, - { url = "https://files.pythonhosted.org/packages/65/79/9d24d7f53819fe301b231044ea362ce64e86c74f6e8c8e51320de248b3e5/xxhash-3.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:26734cdc2d4ffe449b41d186bbeac416f704a482ed835d375a5c0cb02bc63fef", size = 31481, upload-time = "2025-10-02T14:34:32.062Z" }, - { url = "https://files.pythonhosted.org/packages/30/4e/15cd0e3e8772071344eab2961ce83f6e485111fed8beb491a3f1ce100270/xxhash-3.6.0-cp311-cp311-win_arm64.whl", hash = "sha256:d72f67ef8bf36e05f5b6c65e8524f265bd61071471cd4cf1d36743ebeeeb06b7", size = 27861, upload-time = "2025-10-02T14:34:33.555Z" }, - { url = "https://files.pythonhosted.org/packages/9a/07/d9412f3d7d462347e4511181dea65e47e0d0e16e26fbee2ea86a2aefb657/xxhash-3.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:01362c4331775398e7bb34e3ab403bc9ee9f7c497bc7dee6272114055277dd3c", size = 32744, upload-time = "2025-10-02T14:34:34.622Z" }, - { url = "https://files.pythonhosted.org/packages/79/35/0429ee11d035fc33abe32dca1b2b69e8c18d236547b9a9b72c1929189b9a/xxhash-3.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7b2df81a23f8cb99656378e72501b2cb41b1827c0f5a86f87d6b06b69f9f204", size = 30816, upload-time = "2025-10-02T14:34:36.043Z" }, - { url = "https://files.pythonhosted.org/packages/b7/f2/57eb99aa0f7d98624c0932c5b9a170e1806406cdbcdb510546634a1359e0/xxhash-3.6.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dc94790144e66b14f67b10ac8ed75b39ca47536bf8800eb7c24b50271ea0c490", size = 194035, upload-time = "2025-10-02T14:34:37.354Z" }, - { url = "https://files.pythonhosted.org/packages/4c/ed/6224ba353690d73af7a3f1c7cdb1fc1b002e38f783cb991ae338e1eb3d79/xxhash-3.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93f107c673bccf0d592cdba077dedaf52fe7f42dcd7676eba1f6d6f0c3efffd2", size = 212914, upload-time = "2025-10-02T14:34:38.6Z" }, - { url = "https://files.pythonhosted.org/packages/38/86/fb6b6130d8dd6b8942cc17ab4d90e223653a89aa32ad2776f8af7064ed13/xxhash-3.6.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aa5ee3444c25b69813663c9f8067dcfaa2e126dc55e8dddf40f4d1c25d7effa", size = 212163, upload-time = "2025-10-02T14:34:39.872Z" }, - { url = "https://files.pythonhosted.org/packages/ee/dc/e84875682b0593e884ad73b2d40767b5790d417bde603cceb6878901d647/xxhash-3.6.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f7f99123f0e1194fa59cc69ad46dbae2e07becec5df50a0509a808f90a0f03f0", size = 445411, upload-time = "2025-10-02T14:34:41.569Z" }, - { url = "https://files.pythonhosted.org/packages/11/4f/426f91b96701ec2f37bb2b8cec664eff4f658a11f3fa9d94f0a887ea6d2b/xxhash-3.6.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49e03e6fe2cac4a1bc64952dd250cf0dbc5ef4ebb7b8d96bce82e2de163c82a2", size = 193883, upload-time = "2025-10-02T14:34:43.249Z" }, - { url = "https://files.pythonhosted.org/packages/53/5a/ddbb83eee8e28b778eacfc5a85c969673e4023cdeedcfcef61f36731610b/xxhash-3.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bd17fede52a17a4f9a7bc4472a5867cb0b160deeb431795c0e4abe158bc784e9", size = 210392, upload-time = "2025-10-02T14:34:45.042Z" }, - { url = "https://files.pythonhosted.org/packages/1e/c2/ff69efd07c8c074ccdf0a4f36fcdd3d27363665bcdf4ba399abebe643465/xxhash-3.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6fb5f5476bef678f69db04f2bd1efbed3030d2aba305b0fc1773645f187d6a4e", size = 197898, upload-time = "2025-10-02T14:34:46.302Z" }, - { url = "https://files.pythonhosted.org/packages/58/ca/faa05ac19b3b622c7c9317ac3e23954187516298a091eb02c976d0d3dd45/xxhash-3.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:843b52f6d88071f87eba1631b684fcb4b2068cd2180a0224122fe4ef011a9374", size = 210655, upload-time = "2025-10-02T14:34:47.571Z" }, - { url = "https://files.pythonhosted.org/packages/d4/7a/06aa7482345480cc0cb597f5c875b11a82c3953f534394f620b0be2f700c/xxhash-3.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7d14a6cfaf03b1b6f5f9790f76880601ccc7896aff7ab9cd8978a939c1eb7e0d", size = 414001, upload-time = "2025-10-02T14:34:49.273Z" }, - { url = "https://files.pythonhosted.org/packages/23/07/63ffb386cd47029aa2916b3d2f454e6cc5b9f5c5ada3790377d5430084e7/xxhash-3.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:418daf3db71e1413cfe211c2f9a528456936645c17f46b5204705581a45390ae", size = 191431, upload-time = "2025-10-02T14:34:50.798Z" }, - { url = "https://files.pythonhosted.org/packages/0f/93/14fde614cadb4ddf5e7cebf8918b7e8fac5ae7861c1875964f17e678205c/xxhash-3.6.0-cp312-cp312-win32.whl", hash = "sha256:50fc255f39428a27299c20e280d6193d8b63b8ef8028995323bf834a026b4fbb", size = 30617, upload-time = "2025-10-02T14:34:51.954Z" }, - { url = "https://files.pythonhosted.org/packages/13/5d/0d125536cbe7565a83d06e43783389ecae0c0f2ed037b48ede185de477c0/xxhash-3.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:c0f2ab8c715630565ab8991b536ecded9416d615538be8ecddce43ccf26cbc7c", size = 31534, upload-time = "2025-10-02T14:34:53.276Z" }, - { url = "https://files.pythonhosted.org/packages/54/85/6ec269b0952ec7e36ba019125982cf11d91256a778c7c3f98a4c5043d283/xxhash-3.6.0-cp312-cp312-win_arm64.whl", hash = "sha256:eae5c13f3bc455a3bbb68bdc513912dc7356de7e2280363ea235f71f54064829", size = 27876, upload-time = "2025-10-02T14:34:54.371Z" }, - { url = "https://files.pythonhosted.org/packages/33/76/35d05267ac82f53ae9b0e554da7c5e281ee61f3cad44c743f0fcd354f211/xxhash-3.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:599e64ba7f67472481ceb6ee80fa3bd828fd61ba59fb11475572cc5ee52b89ec", size = 32738, upload-time = "2025-10-02T14:34:55.839Z" }, - { url = "https://files.pythonhosted.org/packages/31/a8/3fbce1cd96534a95e35d5120637bf29b0d7f5d8fa2f6374e31b4156dd419/xxhash-3.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7d8b8aaa30fca4f16f0c84a5c8d7ddee0e25250ec2796c973775373257dde8f1", size = 30821, upload-time = "2025-10-02T14:34:57.219Z" }, - { url = "https://files.pythonhosted.org/packages/0c/ea/d387530ca7ecfa183cb358027f1833297c6ac6098223fd14f9782cd0015c/xxhash-3.6.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d597acf8506d6e7101a4a44a5e428977a51c0fadbbfd3c39650cca9253f6e5a6", size = 194127, upload-time = "2025-10-02T14:34:59.21Z" }, - { url = "https://files.pythonhosted.org/packages/ba/0c/71435dcb99874b09a43b8d7c54071e600a7481e42b3e3ce1eb5226a5711a/xxhash-3.6.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:858dc935963a33bc33490128edc1c12b0c14d9c7ebaa4e387a7869ecc4f3e263", size = 212975, upload-time = "2025-10-02T14:35:00.816Z" }, - { url = "https://files.pythonhosted.org/packages/84/7a/c2b3d071e4bb4a90b7057228a99b10d51744878f4a8a6dd643c8bd897620/xxhash-3.6.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba284920194615cb8edf73bf52236ce2e1664ccd4a38fdb543506413529cc546", size = 212241, upload-time = "2025-10-02T14:35:02.207Z" }, - { url = "https://files.pythonhosted.org/packages/81/5f/640b6eac0128e215f177df99eadcd0f1b7c42c274ab6a394a05059694c5a/xxhash-3.6.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4b54219177f6c6674d5378bd862c6aedf64725f70dd29c472eaae154df1a2e89", size = 445471, upload-time = "2025-10-02T14:35:03.61Z" }, - { url = "https://files.pythonhosted.org/packages/5e/1e/3c3d3ef071b051cc3abbe3721ffb8365033a172613c04af2da89d5548a87/xxhash-3.6.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:42c36dd7dbad2f5238950c377fcbf6811b1cdb1c444fab447960030cea60504d", size = 193936, upload-time = "2025-10-02T14:35:05.013Z" }, - { url = "https://files.pythonhosted.org/packages/2c/bd/4a5f68381939219abfe1c22a9e3a5854a4f6f6f3c4983a87d255f21f2e5d/xxhash-3.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f22927652cba98c44639ffdc7aaf35828dccf679b10b31c4ad72a5b530a18eb7", size = 210440, upload-time = "2025-10-02T14:35:06.239Z" }, - { url = "https://files.pythonhosted.org/packages/eb/37/b80fe3d5cfb9faff01a02121a0f4d565eb7237e9e5fc66e73017e74dcd36/xxhash-3.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b45fad44d9c5c119e9c6fbf2e1c656a46dc68e280275007bbfd3d572b21426db", size = 197990, upload-time = "2025-10-02T14:35:07.735Z" }, - { url = "https://files.pythonhosted.org/packages/d7/fd/2c0a00c97b9e18f72e1f240ad4e8f8a90fd9d408289ba9c7c495ed7dc05c/xxhash-3.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6f2580ffab1a8b68ef2b901cde7e55fa8da5e4be0977c68f78fc80f3c143de42", size = 210689, upload-time = "2025-10-02T14:35:09.438Z" }, - { url = "https://files.pythonhosted.org/packages/93/86/5dd8076a926b9a95db3206aba20d89a7fc14dd5aac16e5c4de4b56033140/xxhash-3.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:40c391dd3cd041ebc3ffe6f2c862f402e306eb571422e0aa918d8070ba31da11", size = 414068, upload-time = "2025-10-02T14:35:11.162Z" }, - { url = "https://files.pythonhosted.org/packages/af/3c/0bb129170ee8f3650f08e993baee550a09593462a5cddd8e44d0011102b1/xxhash-3.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f205badabde7aafd1a31e8ca2a3e5a763107a71c397c4481d6a804eb5063d8bd", size = 191495, upload-time = "2025-10-02T14:35:12.971Z" }, - { url = "https://files.pythonhosted.org/packages/e9/3a/6797e0114c21d1725e2577508e24006fd7ff1d8c0c502d3b52e45c1771d8/xxhash-3.6.0-cp313-cp313-win32.whl", hash = "sha256:2577b276e060b73b73a53042ea5bd5203d3e6347ce0d09f98500f418a9fcf799", size = 30620, upload-time = "2025-10-02T14:35:14.129Z" }, - { url = "https://files.pythonhosted.org/packages/86/15/9bc32671e9a38b413a76d24722a2bf8784a132c043063a8f5152d390b0f9/xxhash-3.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:757320d45d2fbcce8f30c42a6b2f47862967aea7bf458b9625b4bbe7ee390392", size = 31542, upload-time = "2025-10-02T14:35:15.21Z" }, - { url = "https://files.pythonhosted.org/packages/39/c5/cc01e4f6188656e56112d6a8e0dfe298a16934b8c47a247236549a3f7695/xxhash-3.6.0-cp313-cp313-win_arm64.whl", hash = "sha256:457b8f85dec5825eed7b69c11ae86834a018b8e3df5e77783c999663da2f96d6", size = 27880, upload-time = "2025-10-02T14:35:16.315Z" }, - { url = "https://files.pythonhosted.org/packages/f3/30/25e5321c8732759e930c555176d37e24ab84365482d257c3b16362235212/xxhash-3.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a42e633d75cdad6d625434e3468126c73f13f7584545a9cf34e883aa1710e702", size = 32956, upload-time = "2025-10-02T14:35:17.413Z" }, - { url = "https://files.pythonhosted.org/packages/9f/3c/0573299560d7d9f8ab1838f1efc021a280b5ae5ae2e849034ef3dee18810/xxhash-3.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:568a6d743219e717b07b4e03b0a828ce593833e498c3b64752e0f5df6bfe84db", size = 31072, upload-time = "2025-10-02T14:35:18.844Z" }, - { url = "https://files.pythonhosted.org/packages/7a/1c/52d83a06e417cd9d4137722693424885cc9878249beb3a7c829e74bf7ce9/xxhash-3.6.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bec91b562d8012dae276af8025a55811b875baace6af510412a5e58e3121bc54", size = 196409, upload-time = "2025-10-02T14:35:20.31Z" }, - { url = "https://files.pythonhosted.org/packages/e3/8e/c6d158d12a79bbd0b878f8355432075fc82759e356ab5a111463422a239b/xxhash-3.6.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78e7f2f4c521c30ad5e786fdd6bae89d47a32672a80195467b5de0480aa97b1f", size = 215736, upload-time = "2025-10-02T14:35:21.616Z" }, - { url = "https://files.pythonhosted.org/packages/bc/68/c4c80614716345d55071a396cf03d06e34b5f4917a467faf43083c995155/xxhash-3.6.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3ed0df1b11a79856df5ffcab572cbd6b9627034c1c748c5566fa79df9048a7c5", size = 214833, upload-time = "2025-10-02T14:35:23.32Z" }, - { url = "https://files.pythonhosted.org/packages/7e/e9/ae27c8ffec8b953efa84c7c4a6c6802c263d587b9fc0d6e7cea64e08c3af/xxhash-3.6.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0e4edbfc7d420925b0dd5e792478ed393d6e75ff8fc219a6546fb446b6a417b1", size = 448348, upload-time = "2025-10-02T14:35:25.111Z" }, - { url = "https://files.pythonhosted.org/packages/d7/6b/33e21afb1b5b3f46b74b6bd1913639066af218d704cc0941404ca717fc57/xxhash-3.6.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fba27a198363a7ef87f8c0f6b171ec36b674fe9053742c58dd7e3201c1ab30ee", size = 196070, upload-time = "2025-10-02T14:35:26.586Z" }, - { url = "https://files.pythonhosted.org/packages/96/b6/fcabd337bc5fa624e7203aa0fa7d0c49eed22f72e93229431752bddc83d9/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:794fe9145fe60191c6532fa95063765529770edcdd67b3d537793e8004cabbfd", size = 212907, upload-time = "2025-10-02T14:35:28.087Z" }, - { url = "https://files.pythonhosted.org/packages/4b/d3/9ee6160e644d660fcf176c5825e61411c7f62648728f69c79ba237250143/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:6105ef7e62b5ac73a837778efc331a591d8442f8ef5c7e102376506cb4ae2729", size = 200839, upload-time = "2025-10-02T14:35:29.857Z" }, - { url = "https://files.pythonhosted.org/packages/0d/98/e8de5baa5109394baf5118f5e72ab21a86387c4f89b0e77ef3e2f6b0327b/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f01375c0e55395b814a679b3eea205db7919ac2af213f4a6682e01220e5fe292", size = 213304, upload-time = "2025-10-02T14:35:31.222Z" }, - { url = "https://files.pythonhosted.org/packages/7b/1d/71056535dec5c3177eeb53e38e3d367dd1d16e024e63b1cee208d572a033/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d706dca2d24d834a4661619dcacf51a75c16d65985718d6a7d73c1eeeb903ddf", size = 416930, upload-time = "2025-10-02T14:35:32.517Z" }, - { url = "https://files.pythonhosted.org/packages/dc/6c/5cbde9de2cd967c322e651c65c543700b19e7ae3e0aae8ece3469bf9683d/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f059d9faeacd49c0215d66f4056e1326c80503f51a1532ca336a385edadd033", size = 193787, upload-time = "2025-10-02T14:35:33.827Z" }, - { url = "https://files.pythonhosted.org/packages/19/fa/0172e350361d61febcea941b0cc541d6e6c8d65d153e85f850a7b256ff8a/xxhash-3.6.0-cp313-cp313t-win32.whl", hash = "sha256:1244460adc3a9be84731d72b8e80625788e5815b68da3da8b83f78115a40a7ec", size = 30916, upload-time = "2025-10-02T14:35:35.107Z" }, - { url = "https://files.pythonhosted.org/packages/ad/e6/e8cf858a2b19d6d45820f072eff1bea413910592ff17157cabc5f1227a16/xxhash-3.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b1e420ef35c503869c4064f4a2f2b08ad6431ab7b229a05cce39d74268bca6b8", size = 31799, upload-time = "2025-10-02T14:35:36.165Z" }, - { url = "https://files.pythonhosted.org/packages/56/15/064b197e855bfb7b343210e82490ae672f8bc7cdf3ddb02e92f64304ee8a/xxhash-3.6.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ec44b73a4220623235f67a996c862049f375df3b1052d9899f40a6382c32d746", size = 28044, upload-time = "2025-10-02T14:35:37.195Z" }, - { url = "https://files.pythonhosted.org/packages/93/1e/8aec23647a34a249f62e2398c42955acd9b4c6ed5cf08cbea94dc46f78d2/xxhash-3.6.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0f7b7e2ec26c1666ad5fc9dbfa426a6a3367ceaf79db5dd76264659d509d73b0", size = 30662, upload-time = "2025-10-02T14:37:01.743Z" }, - { url = "https://files.pythonhosted.org/packages/b8/0b/b14510b38ba91caf43006209db846a696ceea6a847a0c9ba0a5b1adc53d6/xxhash-3.6.0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5dc1e14d14fa0f5789ec29a7062004b5933964bb9b02aae6622b8f530dc40296", size = 41056, upload-time = "2025-10-02T14:37:02.879Z" }, - { url = "https://files.pythonhosted.org/packages/50/55/15a7b8a56590e66ccd374bbfa3f9ffc45b810886c8c3b614e3f90bd2367c/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:881b47fc47e051b37d94d13e7455131054b56749b91b508b0907eb07900d1c13", size = 36251, upload-time = "2025-10-02T14:37:04.44Z" }, - { url = "https://files.pythonhosted.org/packages/62/b2/5ac99a041a29e58e95f907876b04f7067a0242cb85b5f39e726153981503/xxhash-3.6.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c6dc31591899f5e5666f04cc2e529e69b4072827085c1ef15294d91a004bc1bd", size = 32481, upload-time = "2025-10-02T14:37:05.869Z" }, - { url = "https://files.pythonhosted.org/packages/7b/d9/8d95e906764a386a3d3b596f3c68bb63687dfca806373509f51ce8eea81f/xxhash-3.6.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:15e0dac10eb9309508bfc41f7f9deaa7755c69e35af835db9cb10751adebc35d", size = 31565, upload-time = "2025-10-02T14:37:06.966Z" }, +version = "3.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/2f/e183a1b407002f5af81822bee18b61cdb94b8670208ef34734d8d2b8ebe9/xxhash-3.7.0.tar.gz", hash = "sha256:6cc4eefbb542a5d6ffd6d70ea9c502957c925e800f998c5630ecc809d6702bae", size = 82022, upload-time = "2026-04-25T11:10:32.553Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/f4/7bd35089ff1f8e2c96baa2dce05775a122aacd2e3830a73165e27a4d0848/xxhash-3.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fdc7d06929ae28dda98297a18eef7b0fd38991a3b405d8d7b55c9ef24c296958", size = 33423, upload-time = "2026-04-25T11:05:47.628Z" }, + { url = "https://files.pythonhosted.org/packages/a3/26/4e00c88a6a2c8a759cfb77d2a9a405f901e8aa66e60ef1fd0aeb35edda48/xxhash-3.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea6daa712f4e094a30830cf01e9b47d03b24d05cc9dab8609f0d9a9db8454712", size = 30857, upload-time = "2026-04-25T11:05:49.189Z" }, + { url = "https://files.pythonhosted.org/packages/82/2f/eeb942c17a5a761a8f01cb9180a0b76bfb62a2c39e6f46b1f9001899027a/xxhash-3.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9e6c0d843f1daf85ea23aeb053579135552bde575b7b98af20bfc667b6e4548d", size = 194702, upload-time = "2026-04-25T11:05:50.457Z" }, + { url = "https://files.pythonhosted.org/packages/0e/fd/96f132c08b1e5951c68691d3b9ec351ec2edc028f6a01fcd294f46b9d9f0/xxhash-3.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:363c139bf15e1ac5f136b981d3c077eb551299b1effede7f12faa010b8590a60", size = 213613, upload-time = "2026-04-25T11:05:52.571Z" }, + { url = "https://files.pythonhosted.org/packages/82/89/d4e92b796c5ed052d29ed324dbfc1dc1188e0c4bf64bebbf0f8fc20698df/xxhash-3.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a778b25874cb0f862eaab5986bff4ca49ffb0def7c0a34c237b948b3c6c775b2", size = 236726, upload-time = "2026-04-25T11:05:54.395Z" }, + { url = "https://files.pythonhosted.org/packages/40/f1/81fc4361921dc6e557a9c60cb3712f36d244d06eeeb71cd2f4252ac42678/xxhash-3.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3e1860f1e43d40e9d904cf22d93e587ea42e010ebce4160877e46bcab4bc232a", size = 212443, upload-time = "2026-04-25T11:05:56.334Z" }, + { url = "https://files.pythonhosted.org/packages/6a/d0/afeddd4cff50a332f50d4b8a2e8857673153ab0564ef472fcdeb0b5430df/xxhash-3.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:9122ad6f867c4a0f5e655f5c3bdf89103852009dbb442a3d23e688b9e699e800", size = 445793, upload-time = "2026-04-25T11:05:58.953Z" }, + { url = "https://files.pythonhosted.org/packages/f7/d0/3c91e4e6a05ca4d7df8e39ec3a75b713609258ec84705ab34be6430826a1/xxhash-3.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7d9110d0c3fb02679972837a033251fd186c529aa62f19c132fc909c74052b8", size = 193937, upload-time = "2026-04-25T11:06:00.546Z" }, + { url = "https://files.pythonhosted.org/packages/4e/3a/a6b0772d9801dd4bea4ca4fd34734d6e9b51a711c8a611a24a79de26a878/xxhash-3.7.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:347a93f2b4ce67ce61959665e32a7447c380f8347e55e100daa23766baacf0e5", size = 285188, upload-time = "2026-04-25T11:06:01.96Z" }, + { url = "https://files.pythonhosted.org/packages/6c/f8/cf8e31fd7282230fe7367cd501a2e75b4b67b222bfc7eacccfc20d2652cb/xxhash-3.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:acbb48679ddf3852c45280c10ff10d52ca2cd1da2e552fb81db1ff786c75d0e4", size = 210966, upload-time = "2026-04-25T11:06:03.453Z" }, + { url = "https://files.pythonhosted.org/packages/cc/f0/fd36cc4a81bf52ee5633275daae2b93dd958aace67fd4f5d466ec83b5f35/xxhash-3.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:fe14c356f8b23ad811dc026077a6d4abccdaa7bce5ca98579605550657b6fcfb", size = 241994, upload-time = "2026-04-25T11:06:05.264Z" }, + { url = "https://files.pythonhosted.org/packages/08/e1/67f5d9c9369be42eaf99ba02c01bf14c5ecd67087b02567960bfcee43b63/xxhash-3.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f420ad3d41e38194353a498bbc9561fd5a9973a27b536ce46d8583479cf44335", size = 198707, upload-time = "2026-04-25T11:06:07.044Z" }, + { url = "https://files.pythonhosted.org/packages/50/17/a4c865ca22d2da6b1bc7d739bf88cab209533cf52ba06ca9da27c3039bee/xxhash-3.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:693d02c6dc7d1aa0a45921d54cd8c1ff629e09dfdc2238471507af1f7a1c6f04", size = 210917, upload-time = "2026-04-25T11:06:08.853Z" }, + { url = "https://files.pythonhosted.org/packages/49/8b/453b35810d697abac3c96bde3528bece685869227da274eb80a4a4d4a119/xxhash-3.7.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:14bf7a54e43825ec131ee7fe3c60e142e7c2c1e676ad0f93fc893432d15414af", size = 275772, upload-time = "2026-04-25T11:06:10.645Z" }, + { url = "https://files.pythonhosted.org/packages/b5/ad/4eed7eab07fd3ee6678f416190f0413d097ab5d7c1278906bf1e9549d789/xxhash-3.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:ae3a39a4d96bdb6f8d154fd7f490c4ad06f0532fcd2bb656052a9a7762cf5d31", size = 414068, upload-time = "2026-04-25T11:06:12.511Z" }, + { url = "https://files.pythonhosted.org/packages/d3/4e/fd6f8a680ba248fdb83054fa71a8bfa3891225200de1708b888ef2c49829/xxhash-3.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1cc07c639e3a77ef1d32987464d3e408565b8a3be57b545d3542b191054d9923", size = 191459, upload-time = "2026-04-25T11:06:14.07Z" }, + { url = "https://files.pythonhosted.org/packages/50/7c/8cb34b3bed4f44ca6827a534d50833f9bc6c006e83b0eb410ac9fa0793bd/xxhash-3.7.0-cp311-cp311-win32.whl", hash = "sha256:3281ba1d1e60ee7a382a7b958513ba03c2c0d5fcbd9a6f7517c0a81251a23422", size = 30628, upload-time = "2026-04-25T11:06:15.802Z" }, + { url = "https://files.pythonhosted.org/packages/0b/47/a49767bd7b40782bedae9ff0721bfe1d7e4dd9dc1585dea684e57ba67c20/xxhash-3.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:a7f25baec4c5d851d40718d6fae52285b31683093d4ff5207e63ab306ccf14a5", size = 31461, upload-time = "2026-04-25T11:06:17.104Z" }, + { url = "https://files.pythonhosted.org/packages/7c/c6/3957bfacfb706bd687be246dfa8dd60f8df97c44186d229f7fd6e26c4b7e/xxhash-3.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:4c2454448ce847c72635827bb75c15c5a3434b03ee1afd28cb6dc6fb2597d830", size = 27746, upload-time = "2026-04-25T11:06:18.716Z" }, + { url = "https://files.pythonhosted.org/packages/f2/8a/51a14cdef4728c6c2337db8a7d8704422cc65676d9199d77215464c880af/xxhash-3.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:082c87bfdd2b9f457606c7a4a53457f4c4b48b0cdc48de0277f4349d79bb3d7a", size = 33357, upload-time = "2026-04-25T11:06:20.44Z" }, + { url = "https://files.pythonhosted.org/packages/b9/1b/0c2c933809421ffd9bf42b59315552c143c755db5d9a816b2f1ae273e884/xxhash-3.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5e7ce913b61f35b0c1c839a49ac9c8e75dd8d860150688aed353b0ce1bf409d8", size = 30869, upload-time = "2026-04-25T11:06:21.989Z" }, + { url = "https://files.pythonhosted.org/packages/03/a8/89d5fdd6ee12d70ba99451de46dd0e8010167468dcd913ec855653f4dd50/xxhash-3.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3beb1de3b1e9694fcdd853e570ee64c631c7062435d2f8c69c1adf809bc086f0", size = 194100, upload-time = "2026-04-25T11:06:23.586Z" }, + { url = "https://files.pythonhosted.org/packages/87/ee/2f9f2ed993e77206d1e66991290a1ebe22e843351ca3ebec8e49e01ba186/xxhash-3.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3e7b689c3bce16699efcf736066f5c6cc4472c3840fe4b22bd8279daf4abdac", size = 212977, upload-time = "2026-04-25T11:06:25.019Z" }, + { url = "https://files.pythonhosted.org/packages/de/60/5a91644615a9e9d4e42c2e9925f1908e3a24e4e691d9de7340d565bea024/xxhash-3.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a6545e6b409e3d5cbafc850fb84c55a1ca26ed15a6b11e3bf07a0e0cd84517c8", size = 236373, upload-time = "2026-04-25T11:06:26.482Z" }, + { url = "https://files.pythonhosted.org/packages/22/c0/f3a9384eaaed9d14d4d062a5d953aa0da489bfe9747877aa994caa87cd0b/xxhash-3.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:31ab1461c77a11461d703c88eb949e132a1c6515933cf675d97ec680f4bd18de", size = 212229, upload-time = "2026-04-25T11:06:28.065Z" }, + { url = "https://files.pythonhosted.org/packages/2e/67/02f07a9fd79726804190f2172c4894c3ed9a4ebccaca05653c84beb58025/xxhash-3.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7c4d596b7676f811172687ec567cbafb9e4dea2f9be1bbb4f622410cb7f40f40", size = 445462, upload-time = "2026-04-25T11:06:30.048Z" }, + { url = "https://files.pythonhosted.org/packages/40/37/558f5a90c0672fc9b4402dc25d87ac5b7406616e8969430c9ca4e52ee74d/xxhash-3.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:13805f0461cba0a857924e70ff91ae6d52d2598f79a884e788db80532614a4a1", size = 193932, upload-time = "2026-04-25T11:06:31.857Z" }, + { url = "https://files.pythonhosted.org/packages/d5/90/aaa09cd58661d32044dbbad7df55bbe22a623032b810e7ed3b8c569a2a6f/xxhash-3.7.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1d398f372496152f1c6933a33566373f8d1b37b98b8c9d608fa6edc0976f23b2", size = 284807, upload-time = "2026-04-25T11:06:33.697Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f3/53df3719ab127a02c174f0c1c74924fcd110866e89c966bc7909cfa8fa84/xxhash-3.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d610aa62cdb7d4d497740741772a24a794903bf3e79eaa51d2e800082abe11e5", size = 210445, upload-time = "2026-04-25T11:06:35.488Z" }, + { url = "https://files.pythonhosted.org/packages/72/33/d219975c0e8b6fa2eb9ccd486fe47e21bf1847985b878dd2fbc3126e0d5c/xxhash-3.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:073c23900a9fbf3d26616c17c830db28af9803677cd5b33aea3224d824111514", size = 241273, upload-time = "2026-04-25T11:06:37.24Z" }, + { url = "https://files.pythonhosted.org/packages/3e/50/49b1afe610eb3964cedcb90a4d4c3d46a261ee8669cbd4f060652619ae3c/xxhash-3.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:418a463c3e6a590c0cdc890f8be19adb44a8c8acd175ca5b2a6de77e61d0b386", size = 197950, upload-time = "2026-04-25T11:06:39.148Z" }, + { url = "https://files.pythonhosted.org/packages/c6/75/5f42a1a4c78717d906a4b6a140c6dbf837ab1f547a54d23c4e2903310936/xxhash-3.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:03f8ff4474ee61c845758ce00711d7087a770d77efb36f7e74a6e867301000b8", size = 210709, upload-time = "2026-04-25T11:06:40.958Z" }, + { url = "https://files.pythonhosted.org/packages/8a/85/237e446c25abced71e9c53d269f2cef5bab8a82b3f88a12e00c5368e7368/xxhash-3.7.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:44fba4a5f1d179b7ddc7b3dc40f56f9209046421679b57025d4d8821b376fd8d", size = 275345, upload-time = "2026-04-25T11:06:42.525Z" }, + { url = "https://files.pythonhosted.org/packages/62/34/c2c26c0a6a9cc739bc2a5f0ae03ba8b87deb12b8bce35f7ac495e790dc6d/xxhash-3.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31e3516a0f829d06ded4a2c0f3c7c5561993256bfa1c493975fb9dc7bfa828a1", size = 414056, upload-time = "2026-04-25T11:06:44.343Z" }, + { url = "https://files.pythonhosted.org/packages/a0/aa/5c58e9bc8071b8afd8dcf297ff362f723c4892168faba149f19904132bf4/xxhash-3.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b59ee2ac81de57771a09ecad09191e840a1d2fae1ef684208320591055768f83", size = 191485, upload-time = "2026-04-25T11:06:46.262Z" }, + { url = "https://files.pythonhosted.org/packages/d4/69/a929cf9d1e2e65a48b818cdce72cb6b69eab2e6877f21436d0a1942aff43/xxhash-3.7.0-cp312-cp312-win32.whl", hash = "sha256:74bbd92f8c7fcc397ba0a11bfdc106bc72ad7f11e3a60277753f87e7532b4d81", size = 30671, upload-time = "2026-04-25T11:06:48.039Z" }, + { url = "https://files.pythonhosted.org/packages/b9/1b/104b41a8947f4e1d4a66ce1e628eea752f37d1890bfd7453559ca7a3d950/xxhash-3.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:7bd7bc82dd4f185f28f35193c2e968ef46131628e3cac62f639dadf321cba4d1", size = 31514, upload-time = "2026-04-25T11:06:49.279Z" }, + { url = "https://files.pythonhosted.org/packages/98/a0/1fd0ea1f1b886d9e7c73f0397571e22333a7d79e31da6d7127c2a4a71d75/xxhash-3.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:7d7148180ec99ba36585b42c8c5de25e9b40191613bc4be68909b4d25a77a852", size = 27761, upload-time = "2026-04-25T11:06:50.448Z" }, + { url = "https://files.pythonhosted.org/packages/c1/ca/d5174b4c36d10f64d4ca7050563138c5a599efb01a765858ddefc9c1202a/xxhash-3.7.0-cp313-cp313-android_21_arm64_v8a.whl", hash = "sha256:4b6d6b33f141158692bd4eafbb96edbc5aa0dabdb593a962db01a91983d4f8fa", size = 36813, upload-time = "2026-04-25T11:06:51.73Z" }, + { url = "https://files.pythonhosted.org/packages/41/d0/abc6c9d347ba1f1e1e1d98125d0881a0452c7f9a76a9dd03a7b5d2197f23/xxhash-3.7.0-cp313-cp313-android_21_x86_64.whl", hash = "sha256:845d347df254d6c619f616afa921331bada8614b8d373d58725c663ba97c3605", size = 35121, upload-time = "2026-04-25T11:06:53.048Z" }, + { url = "https://files.pythonhosted.org/packages/bf/11/4cc834eb3d79f2f2b3a6ef7324195208bcdfbdcf7534d2b17267aa5f3a8f/xxhash-3.7.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:fddbbb69a6fff4f421e7a0d1fa28f894b20112e9e3fab306af451e2dfd0e459b", size = 29624, upload-time = "2026-04-25T11:06:54.311Z" }, + { url = "https://files.pythonhosted.org/packages/23/83/e97d3e7b635fe73a1dfb1e91f805324dd6d930bb42041cbf18f183bc0b6d/xxhash-3.7.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:54876a4e45101cec2bf8f31a973cda073a23e2e108538dad224ba07f85f22487", size = 30638, upload-time = "2026-04-25T11:06:55.864Z" }, + { url = "https://files.pythonhosted.org/packages/f4/40/d84951d80c35db1f4c40a29a64a8520eea5d56e764c603906b4fe763580f/xxhash-3.7.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:0c72fe9c7e3d6dfd7f1e21e224a877917fa09c465694ba4e06464b9511b65544", size = 33323, upload-time = "2026-04-25T11:06:57.336Z" }, + { url = "https://files.pythonhosted.org/packages/89/cc/c7dc6558d97e9ab023f663d69ab28b340ed9bf4d2d94f2c259cf896bb354/xxhash-3.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a6d73a830b17ef49bc04e00182bd839164c1b3c59c127cd7c54fcb10c7ed8ee8", size = 33362, upload-time = "2026-04-25T11:06:58.656Z" }, + { url = "https://files.pythonhosted.org/packages/2a/6e/46b84017b1301d54091430353d4ad5901654a3e0871649877a416f7f1644/xxhash-3.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:91c3b07cf3362086d8f126c6aecd8e5e9396ad8b2f2219ea7e49a8250c318acd", size = 30874, upload-time = "2026-04-25T11:06:59.834Z" }, + { url = "https://files.pythonhosted.org/packages/df/5e/8f9158e3ab906ad3fec51e09b5ea0093e769f12207bfa42a368ca204e7ab/xxhash-3.7.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:50e879ebbac351c81565ca108db766d7832f5b8b6a5b14b8c0151f7190028e3d", size = 194185, upload-time = "2026-04-25T11:07:01.658Z" }, + { url = "https://files.pythonhosted.org/packages/f3/29/a804ded9f5d3d3758292678d23e7528b08fda7b7e750688d08b052322475/xxhash-3.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:921c14e93817842dd0dd9f372890a0f0c72e534650b6ab13c5be5cd0db11d47e", size = 213033, upload-time = "2026-04-25T11:07:03.606Z" }, + { url = "https://files.pythonhosted.org/packages/8b/91/1ce5a7d2fdc975267320e2c78fc1cecfe7ab735ccbcf6993ec5dd541cb2c/xxhash-3.7.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e64a7c9d7dfca3e0fafcbc5e455519090706a3e36e95d655cec3e04e79f95aaa", size = 236140, upload-time = "2026-04-25T11:07:05.396Z" }, + { url = "https://files.pythonhosted.org/packages/34/04/fd595a4fd8617b05fa27bd9b684ecb4985bfed27917848eea85d54036d06/xxhash-3.7.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2220af08163baf5fa36c2b8af079dc2cbe6e66ae061385267f9472362dfd53c6", size = 212291, upload-time = "2026-04-25T11:07:06.966Z" }, + { url = "https://files.pythonhosted.org/packages/03/fb/f1a379cbc372ae5b9f4ab36154c48a849ca6ebe3ac477067a57865bf3bc6/xxhash-3.7.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f14bb8b22a4a91325813e3d553b8963c10cf8c756cff65ee50c194431296c655", size = 445532, upload-time = "2026-04-25T11:07:08.525Z" }, + { url = "https://files.pythonhosted.org/packages/65/59/172424b79f8cfd4b6d8a122b2193e6b8ad4b11f7159bb3b6f9b3191329bb/xxhash-3.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:496736f86a9bedaf64b0dc70e3539d0766df01c71ea22032698e88f3f04a1ce9", size = 193990, upload-time = "2026-04-25T11:07:10.315Z" }, + { url = "https://files.pythonhosted.org/packages/b9/19/aeac22161d953f139f07ba5586cb4a17c5b7b6dff985122803bb12933500/xxhash-3.7.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0ff71596bd79816975b3de7130ab1ff4541410285a3c084584eeb1c8239996fd", size = 284876, upload-time = "2026-04-25T11:07:12.15Z" }, + { url = "https://files.pythonhosted.org/packages/77/d5/4fd0b59e7a02242953da05ff679fbb961b0a4368eac97a217e11dae110c1/xxhash-3.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1ad86695c19b1d46fe106925db3c7a37f16be37669dcf58dcc70a9dd6e324676", size = 210495, upload-time = "2026-04-25T11:07:13.952Z" }, + { url = "https://files.pythonhosted.org/packages/aa/fb/976a3165c728c7faf74aa1b5ab3cf6a85e6d731612894741840524c7d28c/xxhash-3.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:970f9f8c50961d639cbd0d988c96f80ddf66006de93641719282c4fe7a87c5e6", size = 241331, upload-time = "2026-04-25T11:07:15.557Z" }, + { url = "https://files.pythonhosted.org/packages/4a/2c/6763d5901d53ac9e6ba296e5717ae599025c9d268396e8faa8b4b0a8e0ac/xxhash-3.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5886ad85e9e347911783760a1d16cb6b393e8f9e3b52c982568226cb56927bdc", size = 198037, upload-time = "2026-04-25T11:07:17.563Z" }, + { url = "https://files.pythonhosted.org/packages/61/2b/876e722d533833f5f9a83473e6ba993e48745701096944e77bbecf29b2c3/xxhash-3.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6e934bbae1e0ec74e27d5f0d7f37ef547ce5ff9f0a7e63fb39e559fc99526734", size = 210744, upload-time = "2026-04-25T11:07:19.055Z" }, + { url = "https://files.pythonhosted.org/packages/21/e6/d7e7baef7ce24166b4668d3c48557bb35a23b92ecadcac7e7718d099ab69/xxhash-3.7.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:3b6b3d28228af044ebcded71c4a3dd86e1dbd7e2f4645bf40f7b5da65bb5fb5a", size = 275406, upload-time = "2026-04-25T11:07:20.908Z" }, + { url = "https://files.pythonhosted.org/packages/92/fe/198b3763b2e01ca908f2154969a2352ec99bda892b574a11a9a151c5ede4/xxhash-3.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:6be4d70d9ab76c9f324ead9c01af6ff52c324745ea0c3731682a0cf99720f1fe", size = 414125, upload-time = "2026-04-25T11:07:23.037Z" }, + { url = "https://files.pythonhosted.org/packages/3a/6d/019a11affd5a5499137cacca53808659964785439855b5aa40dfd3412916/xxhash-3.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:151d7520838d4465461a0b7f4ae488b3b00de16183dd3214c1a6b14bf89d7fb6", size = 191555, upload-time = "2026-04-25T11:07:24.991Z" }, + { url = "https://files.pythonhosted.org/packages/76/21/b96d58568df2d01533244c3e0e5cbdd0c8b2b25c4bec4d72f19259a292d7/xxhash-3.7.0-cp313-cp313-win32.whl", hash = "sha256:d798c1e291bffb8e37b5bbe0dda77fc767cd19e89cadaf66e6ed5d0ff88c9fe6", size = 30668, upload-time = "2026-04-25T11:07:26.665Z" }, + { url = "https://files.pythonhosted.org/packages/99/57/d849a8d3afa1f8f4bc6a831cd89f49f9706fbbad94d2975d6140a171988c/xxhash-3.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:875811ba23c543b1a1c3143c926e43996eb27ebb8f52d3500744aa608c275aed", size = 31524, upload-time = "2026-04-25T11:07:27.92Z" }, + { url = "https://files.pythonhosted.org/packages/81/52/bacc753e92dee78b058af8dcef0a50815f5f860986c664a92d75f965b6a5/xxhash-3.7.0-cp313-cp313-win_arm64.whl", hash = "sha256:54a675cb300dda83d71daae2a599389d22db8021a0f8db0dd659e14626eb3ecc", size = 27768, upload-time = "2026-04-25T11:07:29.113Z" }, + { url = "https://files.pythonhosted.org/packages/1c/47/ddbd683b7fc7e592c1a8d9d65f73ce9ab513f082b3967eee2baf549b8fc6/xxhash-3.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a3b19a42111c4057c1547a4a1396a53961dca576a0f6b82bfa88a2d1561764b2", size = 33576, upload-time = "2026-04-25T11:07:30.469Z" }, + { url = "https://files.pythonhosted.org/packages/07/f2/36d3310161db7f72efb4562aadde0ed429f1d0531782dd6345b12d2da527/xxhash-3.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8f4608a06e4d61b7a3425665a46d00e0579122e1a2fae97a0c52953a3aad9aa3", size = 31123, upload-time = "2026-04-25T11:07:31.989Z" }, + { url = "https://files.pythonhosted.org/packages/0d/3f/75937a5c69556ed213021e43cbedd84c8e0279d0d74e7d41a255d84ba4b1/xxhash-3.7.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ad37c7792479e49cf96c1ab25517d7003fe0d93687a772ba19a097d235bbe41e", size = 196491, upload-time = "2026-04-25T11:07:33.358Z" }, + { url = "https://files.pythonhosted.org/packages/22/29/f10d7ff8c7a733d4403a43b9de18c8fabc005f98cec054644f04418659ee/xxhash-3.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc026e3b89d98e30a8288c95cb696e77d150b3f0fb7a51f73dcd49ee6b5577fa", size = 215793, upload-time = "2026-04-25T11:07:34.919Z" }, + { url = "https://files.pythonhosted.org/packages/8b/fd/778f60aa295f58907938f030a8b514611f391405614a525cccd2ffc00eb5/xxhash-3.7.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c9b31ab1f28b078a6a1ac1a54eb35e7d5390deddd56870d0be3a0a733d1c321c", size = 237993, upload-time = "2026-04-25T11:07:36.638Z" }, + { url = "https://files.pythonhosted.org/packages/70/f5/736db5de387b4a540e37a05b84b40dc58a1ce974bfd2b4e5754ce29b68c3/xxhash-3.7.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3bb5fd680c038fd5229e44e9c493782f90df9bef632fd0499d442374688ff70b", size = 214887, upload-time = "2026-04-25T11:07:38.564Z" }, + { url = "https://files.pythonhosted.org/packages/4d/aa/09a095f22fdb9a27fbb716841fbff52119721f9ca4261952d07a912f7839/xxhash-3.7.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:030c0fd688fce3569fbb49a2feefd4110cbb0b650186fb4610759ecfac677548", size = 448407, upload-time = "2026-04-25T11:07:40.552Z" }, + { url = "https://files.pythonhosted.org/packages/74/8a/b745efeeca9e34a91c26fdc97ad8514c43d5a81ac78565cba80a1353870a/xxhash-3.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5b1bde10324f4c31812ae0d0502e92d916ae8917cad7209353f122b8b8f610c3", size = 196119, upload-time = "2026-04-25T11:07:42.101Z" }, + { url = "https://files.pythonhosted.org/packages/8a/5c/0cfceb024af90c191f665c7933b1f318ee234f4797858383bebd1881d52f/xxhash-3.7.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:503722d52a615f2604f5e7611de7d43878df010dc0053094ef91cb9a9ac3d987", size = 286751, upload-time = "2026-04-25T11:07:43.568Z" }, + { url = "https://files.pythonhosted.org/packages/0b/0a/0793e405dc3cf8f4ebe2c1acec1e4e4608cd9e7e50ea691dabbc2a95ccbb/xxhash-3.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c72500a3b6d6c30ebfc135035bcace9eb5884f2dc220804efcaaba43e9f611dd", size = 212961, upload-time = "2026-04-25T11:07:45.388Z" }, + { url = "https://files.pythonhosted.org/packages/0c/7e/721118ffc63bfff94aa565bcf2555a820f9f4bdb0f001e0d609bdfad70de/xxhash-3.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:43475925a766d01ca8cd9a857fd87f3d50406983c8506a4c07c4df12adcc867f", size = 243703, upload-time = "2026-04-25T11:07:47.053Z" }, + { url = "https://files.pythonhosted.org/packages/6e/18/16f6267160488b8276fd3d449d425712512add292ba545c1b6946bfdb7dd/xxhash-3.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8d09dfd2ab135b985daf868b594315ebe11ad86cd9fea46e6c69f19b28f7d25a", size = 200894, upload-time = "2026-04-25T11:07:48.657Z" }, + { url = "https://files.pythonhosted.org/packages/2d/94/80ba841287fd97e3e9cac1d228788c8ef623746f570404961eec748ecb5c/xxhash-3.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c50269d0055ac1faecfd559886d2cbe4b730de236585aba0e873f9d9dadbe585", size = 213357, upload-time = "2026-04-25T11:07:50.257Z" }, + { url = "https://files.pythonhosted.org/packages/a1/7e/106d4067130c59f1e18a55ffadcd876d8c68534883a1e02685b29d3d8153/xxhash-3.7.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:1910df4756a5ab58cfad8744fc2d0f23926e3efcc346ee76e87b974abab922f4", size = 277600, upload-time = "2026-04-25T11:07:51.745Z" }, + { url = "https://files.pythonhosted.org/packages/c5/86/a081dd30da71d720b2612a792bfd55e45fa9a07ac76a0507f60487473c25/xxhash-3.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d006faf3b491957efcb433489be3c149efe4787b7063d5cddb8ddaefdc60e0c1", size = 416980, upload-time = "2026-04-25T11:07:53.504Z" }, + { url = "https://files.pythonhosted.org/packages/35/29/1a95221a029a3c1293773869e1ab47b07cbbdd82444a42809e8c60156626/xxhash-3.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:abb65b4e947e958f7b3b0d71db3ce447d1bc5f37f5eab871ce7223bda8768a04", size = 193840, upload-time = "2026-04-25T11:07:55.103Z" }, + { url = "https://files.pythonhosted.org/packages/c5/e0/db909dd0823285de2286f67e10ee4d81e96ad35d7d8e964ecb07fccd8af9/xxhash-3.7.0-cp313-cp313t-win32.whl", hash = "sha256:178959906cb1716a1ce08e0d69c82886c70a15a6f2790fc084fdd146ca30cd49", size = 30966, upload-time = "2026-04-25T11:07:56.524Z" }, + { url = "https://files.pythonhosted.org/packages/7b/ff/d705b15b22f21ee106adce239cb65d35067a158c630b240270f09b17c2e6/xxhash-3.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2524a1e20d4c231d13b50f7cf39e44265b055669a64a7a4b9a2a44faa03f19b6", size = 31784, upload-time = "2026-04-25T11:07:57.758Z" }, + { url = "https://files.pythonhosted.org/packages/a2/1f/b2cf83c3638fd0588e0b17f22e5a9400bdfb1a3e3755324ac0aee2250b88/xxhash-3.7.0-cp313-cp313t-win_arm64.whl", hash = "sha256:37d994d0ffe81ef087bb330d392caa809bb5853c77e22ea3f71db024a0543dba", size = 27932, upload-time = "2026-04-25T11:07:59.109Z" }, + { url = "https://files.pythonhosted.org/packages/54/c1/e57ac7317b1f58a92bab692da6d497e2a7ce44735b224e296347a7ecc754/xxhash-3.7.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ad3aa71e12ee634f22b39a0ff439357583706e50765f17f05550f92dbf128a23", size = 31232, upload-time = "2026-04-25T11:10:21.51Z" }, + { url = "https://files.pythonhosted.org/packages/4f/4e/075559bd712bc62e84915ea46bbee859f935d285659082c129bdbff679dd/xxhash-3.7.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:5de686e73690cdaf72b96d4fa083c230ec9020bcc2627ce6316138e2cf2fe2d1", size = 28553, upload-time = "2026-04-25T11:10:23.1Z" }, + { url = "https://files.pythonhosted.org/packages/92/ca/a9c78cb384d4b033b0c58196bd5c8509873cabe76389e195127b0302a741/xxhash-3.7.0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7fbec49f5341bbdea0c471f7d1e2fb41ae8925af9b6f28025c28defd8eb94274", size = 41109, upload-time = "2026-04-25T11:10:25.022Z" }, + { url = "https://files.pythonhosted.org/packages/bd/b1/dfe2629f7c77eb2fa234c72ff537cdd64939763df704e256446ed364a16d/xxhash-3.7.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:48b542c347c2089f43dc5a6db31d2a6f3cdb04ee33505ec6e9f653834dbb0bde", size = 36307, upload-time = "2026-04-25T11:10:26.949Z" }, + { url = "https://files.pythonhosted.org/packages/e7/f7/5a484afce0f48dd8083208b42e4911f290a82c7b52458ef2927e4d421a45/xxhash-3.7.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a169a036bed0995e090d1493b283cc2cc8a6f5046821086b843abefff80643bc", size = 32534, upload-time = "2026-04-25T11:10:29.01Z" }, + { url = "https://files.pythonhosted.org/packages/0f/5f/4acfcd490db9780cf36c58534d828003c564cde5350220a1c783c4d10776/xxhash-3.7.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:ec101643395d7f21405b640f728f6f627e6986557027d740f2f9b220955edafe", size = 31552, upload-time = "2026-04-25T11:10:30.727Z" }, ] [[package]] From a2f20d09453d7c289039a89592b5c1117360bd2b Mon Sep 17 00:00:00 2001 From: Fabien Casenave Date: Wed, 29 Apr 2026 19:50:44 +0200 Subject: [PATCH 02/17] init --- docs/source/core_concepts.rst | 1 + docs/source/core_concepts/viewer.md | 178 ++ pyproject.toml | 9 + src/plaid/viewer/__init__.py | 11 + src/plaid/viewer/cache.py | 168 ++ src/plaid/viewer/cli.py | 152 ++ src/plaid/viewer/config.py | 43 + src/plaid/viewer/models.py | 178 ++ src/plaid/viewer/preferences.py | 87 + src/plaid/viewer/services/__init__.py | 13 + .../services/paraview_artifact_service.py | 278 +++ .../viewer/services/plaid_dataset_service.py | 1157 ++++++++++ src/plaid/viewer/trame_app/__init__.py | 5 + src/plaid/viewer/trame_app/server.py | 1982 +++++++++++++++++ tests/viewer/__init__.py | 0 tests/viewer/conftest.py | 28 + tests/viewer/test_cache.py | 52 + tests/viewer/test_models.py | 31 + .../viewer/test_paraview_artifact_service.py | 112 + tests/viewer/test_plaid_dataset_service.py | 824 +++++++ tests/viewer/test_trame_server.py | 169 ++ uv.lock | 174 +- 22 files changed, 5649 insertions(+), 3 deletions(-) create mode 100644 docs/source/core_concepts/viewer.md create mode 100644 src/plaid/viewer/__init__.py create mode 100644 src/plaid/viewer/cache.py create mode 100644 src/plaid/viewer/cli.py create mode 100644 src/plaid/viewer/config.py create mode 100644 src/plaid/viewer/models.py create mode 100644 src/plaid/viewer/preferences.py create mode 100644 src/plaid/viewer/services/__init__.py create mode 100644 src/plaid/viewer/services/paraview_artifact_service.py create mode 100644 src/plaid/viewer/services/plaid_dataset_service.py create mode 100644 src/plaid/viewer/trame_app/__init__.py create mode 100644 src/plaid/viewer/trame_app/server.py create mode 100644 tests/viewer/__init__.py create mode 100644 tests/viewer/conftest.py create mode 100644 tests/viewer/test_cache.py create mode 100644 tests/viewer/test_models.py create mode 100644 tests/viewer/test_paraview_artifact_service.py create mode 100644 tests/viewer/test_plaid_dataset_service.py create mode 100644 tests/viewer/test_trame_server.py diff --git a/docs/source/core_concepts.rst b/docs/source/core_concepts.rst index 720a6c77..6ac29ba6 100644 --- a/docs/source/core_concepts.rst +++ b/docs/source/core_concepts.rst @@ -20,3 +20,4 @@ For more details and examples, see the :doc:`core_concepts` and :doc:`examples_t core_concepts/defaults core_concepts/disk_format core_concepts/interoperability + core_concepts/viewer diff --git a/docs/source/core_concepts/viewer.md b/docs/source/core_concepts/viewer.md new file mode 100644 index 00000000..b89508cc --- /dev/null +++ b/docs/source/core_concepts/viewer.md @@ -0,0 +1,178 @@ +# Dataset viewer + +The dataset viewer is a small trame/VTK web application that lets +you browse PLAID datasets stored on disk and inspect their samples in 3D. +It ships as the `plaid-viewer` console script. + +## Architecture + +The viewer runs as a single trame server process: + +- `plaid.viewer.services.PlaidDatasetService` discovers datasets and + loads `plaid.Sample` instances. It uses + `plaid.storage.init_from_disk` to obtain `(dataset_dict, + converter_dict)` and materialises a sample on demand with + `converter.to_plaid(dataset, index)`, so every PLAID backend + (`hf_datasets`, `cgns`, `zarr`, ...) is supported uniformly. + Hugging Face Hub datasets are also supported: when a dataset id is + registered as a repo id, the service dispatches to + `plaid.storage.init_streaming_from_hub` instead, so samples are + streamed lazily without a full local copy. +- `plaid.viewer.services.ParaviewArtifactService` writes each selected + sample to a CGNS file (or `.cgns.series` sidecar for time-dependent + samples) in a per-process cache directory. +- `plaid.viewer.trame_app.server.build_server` assembles the UI + (Vuetify side drawer with dataset/split/sample selectors and display + options) and a VTK pipeline: `vtkCGNSReader` → optional cut plane → + optional threshold → composite-data geometry → mapper/actor. + +There is no separate FastAPI backend and no second port: dataset +discovery, CGNS export and the 3D view are all served by trame. + +## Launching the viewer + +```bash +uv run plaid-viewer --datasets-root /path/to/datasets +``` + +Useful options: + +| Option | Default | Description | +| ----------------- | ----------- | ------------------------------------------------------------------------------------------------ | +| `--datasets-root` | *required* | Directory containing one sub-directory per PLAID dataset. A single-dataset directory also works. | +| `--cache-dir` | `None` | Persistent artifact cache. When omitted, an ephemeral temp dir is used and cleaned at shutdown. | +| `--host` | `127.0.0.1` | Bind address for the trame HTTP server. | +| `--port` | `8080` | Port exposed by the trame HTTP server. | +| `--backend-id` | `disk` | PLAID backend identifier embedded in sample references and the cache key. | +| `--hub-repo` | `None` | Hugging Face Hub repo id (`namespace/name`) streamed via `init_streaming_from_hub`. Repeat the flag to pre-register multiple repos. | + +Open `http://:/` in your browser. + +### Streaming from the Hugging Face Hub + +Hub datasets can be added at launch time with `--hub-repo` or from the +running UI through the **Hub** tab in the side drawer (the drawer now +groups the local datasets root and the Hugging Face repo input under a +`Local / Hub` tab selector, hidden when `--disable-root-change` is set). +Each registered repo shows up as a removable chip and as a new entry in +the **Dataset** dropdown. Samples are loaded on demand through +`plaid.storage.init_streaming_from_hub`, so only the selected sample's +shards are fetched. + +```bash +# Start with one or more hub datasets pre-registered. +uv run plaid-viewer --hub-repo PLAID-lib/VKI-LS59 --hub-repo PLAID-lib/Rotor37 +``` + +Streaming splits returned by PLAID are forward-only +`datasets.IterableDataset` objects without `__len__`. The viewer adapts +accordingly: + +- A `streaming` chip appears in the toolbar to advertise the mode. +- The **Sample** slider starts at a single reachable step and grows by + one every time the user moves it to the right; each right-arrow press + consumes the next element from the iterator. +- Revisiting an already-fetched index simply re-renders the cached + sample; the slider cannot be rewound because the underlying iterator + cannot. +- Switching split or dataset rebuilds a fresh iterator from the Hub. +- When the stream is exhausted the slider caps at the last consumed + index and the counter label shows `(end of stream)`. + + +## Using the UI + +The side drawer provides, from top to bottom: + +1. **Dataset / Split** - two `VSelect` controls that pick the active + dataset and split. +2. **Sample** - a `VSlider` over the integer sample index of the current + split; the selected `sample_id` (and the total count) is shown under + the slider. +3. **Base** - a `VBtnToggle` with exclusive, mandatory selection: exactly + one renderable CGNS base exposed by `vtkCGNSReader.GetBaseSelection()` + is active at any time. Bases that contain + no `Zone_t` children (for example, a `Global` base storing only + reference scalars or free-standing tensors) are not rendered but are + summarised in the **Non-visual bases** accordion further down the + drawer: each `DataArray_t` is listed with its name, dtype, shape and a + short value preview. +4. **Field / Colormap / Show edges** - colour the geometry by any point + or cell array (all point and cell arrays are enabled on the reader + by default so every field shows up in the dropdown), pick from a set + of built-in colormaps and optionally overlay wireframe edges. +5. **Cut plane** - toggle a `vtkCutter` and interactively adjust its + normal and signed offset along that normal (the plane origin is the + current dataset's bounding-box centre). +6. **Threshold** - toggle a `vtkThreshold` filter on the currently + selected field and set the `[min, max]` range. Defaults are populated + from the field's data range. +7. **Select features** - an expandable panel listing the field paths + available for the current dataset (retrieved from the PLAID metadata + schema). Toggling checkboxes and clicking **Apply** filters the loaded + samples down to the selected fields: + - For disk-backed datasets the selection is forwarded to + `converter.to_plaid(dataset, index, features=...)`. PLAID expands + the list internally with + `plaid.utils.cgns_helper.update_features_for_CGNS_compatibility` + to preserve the CGNS conventions (coordinates, zones, grid + locations, etc. that make the kept fields renderable). The + user-facing selection is first intersected with the active split's + own feature catalogue, so paths that only live in another split + (for example a field present in `train` but not in `test`) do not + trigger a `Missing features` error. + - For streaming (Hugging Face Hub) datasets the expansion must be + done ahead of `init_streaming_from_hub`. The viewer calls + `update_features_for_CGNS_compatibility` itself and hands the + expanded list to the streaming loader, then invalidates the + current iterator so the next sample is materialised with the new + filter. + The **Clear** / **Select all** buttons in the panel header provide + shortcuts; an empty selection loads only the geometric support + (mesh + zones + metadata). +8. **Reset camera** - re-frames the current actor. + +The 3D view is a server-side `VtkRemoteView` (images are rendered on the +server and streamed to the browser). Camera manipulation uses the +ParaView-like trackball style: + +- Left mouse button: rotate. +- Middle mouse button (or Shift + left): pan. +- Mouse wheel (or right button drag): zoom. + +A status line at the bottom of the drawer reports the last action or +error. + +## Cache layout + +Artifacts are written under: + +``` +/datasets///// + meshes/ # one CGNS per timestep (time-dependent) + meshes.cgns.series # ParaView file-series sidecar (time-dependent) + mesh.cgns # single static mesh + metadata.json # cache key, sample ref, export version, ... +``` + +The cache key is a SHA-256 of the sample reference, backend id, PLAID +version and `ViewerConfig.export_version`. Re-running the viewer with +the same inputs reuses existing artifacts; bumping `export_version` +invalidates them. + +## Programmatic usage + +```python +from pathlib import Path +from plaid.viewer.cache import CacheRoot +from plaid.viewer.config import ViewerConfig +from plaid.viewer.services import ParaviewArtifactService, PlaidDatasetService +from plaid.viewer.trame_app.server import build_server + +config = ViewerConfig(datasets_root=Path("/path/to/datasets")) +with CacheRoot(persistent_dir=config.cache_dir) as cache: + datasets = PlaidDatasetService(config) + artifacts = ParaviewArtifactService(datasets, cache.path) + server = build_server(datasets, artifacts) + server.start(host="127.0.0.1", port=8080, open_browser=False) +``` diff --git a/pyproject.toml b/pyproject.toml index 70bdd20e..3974d34f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -85,9 +85,18 @@ dev = [ "sphinx-tabs>=3.4.7", "sphinxcontrib-bibtex>=2.6.5", ] +viewer = [ + "trame>=3.6,<4.0", + "trame-vtk>=2.8,<3.0", + "trame-vuetify>=2.7,<3.0", + "vtk>=9.6.1", +] [tool.coverage.run] omit = ["src/plaid/examples/*"] [tool.pytest.ini_options] filterwarnings = "ignore::DeprecationWarning" + +[project.scripts] +plaid-viewer = "plaid.viewer.cli:main" diff --git a/src/plaid/viewer/__init__.py b/src/plaid/viewer/__init__.py new file mode 100644 index 00000000..9880bdd2 --- /dev/null +++ b/src/plaid/viewer/__init__.py @@ -0,0 +1,11 @@ +"""Dataset viewer for PLAID. + +This package hosts the raw PLAID dataset viewer: a FastAPI backend plus an +embedded trame/ParaView visualization server. PLAID owns the UI shell and +the page; PLAID owns data loading, sample interpretation, and CGNS export; +ParaView/trame owns the scientific visualization. +""" + +from plaid.viewer.models import ParaviewArtifact, SampleRef + +__all__ = ["ParaviewArtifact", "SampleRef"] diff --git a/src/plaid/viewer/cache.py b/src/plaid/viewer/cache.py new file mode 100644 index 00000000..ef5d1a17 --- /dev/null +++ b/src/plaid/viewer/cache.py @@ -0,0 +1,168 @@ +"""Ephemeral-by-default artifact cache for the dataset viewer. + +The cache lives under a per-process temporary directory by default and is +removed at shutdown. Four cleanup layers cover all practical failure modes: + +1. ``atexit.register`` for normal Python exit. +2. Signal handlers for ``SIGINT`` / ``SIGTERM``. +3. A FastAPI lifespan context (provided by callers). +4. An orphan sweep at startup that removes directories left behind by + previously-crashed processes (detected via ``os.kill(pid, 0)``). +""" + +from __future__ import annotations + +import atexit +import errno +import logging +import os +import re +import shutil +import signal +import tempfile +import uuid +from pathlib import Path + +logger = logging.getLogger(__name__) + +# Ephemeral tempdir naming: ``plaid-viewer-{pid}-{uuid4.hex}``. +_EPHEMERAL_PREFIX = "plaid-viewer-" +_EPHEMERAL_PATTERN = re.compile(r"^plaid-viewer-(?P\d+)-(?P[0-9a-f]+)$") + + +def _process_is_alive(pid: int) -> bool: + """Return ``True`` if a process with the given pid is still running.""" + if pid <= 0: + return False + try: + os.kill(pid, 0) + except ProcessLookupError: + return False + except PermissionError: + # The process exists but is owned by someone else. + return True + except OSError as exc: # pragma: no cover - defensive + return exc.errno != errno.ESRCH + return True + + +def sweep_orphans(temp_root: Path | None = None) -> list[Path]: + """Remove viewer tempdirs whose owning process is no longer running. + + Args: + temp_root: Base temp directory to scan. Defaults to + :func:`tempfile.gettempdir`. + + Returns: + List of directories that were removed. + """ + root = Path(temp_root) if temp_root is not None else Path(tempfile.gettempdir()) + removed: list[Path] = [] + if not root.is_dir(): + return removed + for entry in root.iterdir(): + if not entry.is_dir(): + continue + match = _EPHEMERAL_PATTERN.match(entry.name) + if match is None: + continue + pid = int(match.group("pid")) + if _process_is_alive(pid): + continue + try: + shutil.rmtree(entry, ignore_errors=True) + removed.append(entry) + logger.info("Removed orphan viewer cache: %s", entry) + except OSError as exc: # pragma: no cover - defensive + logger.warning("Could not remove orphan viewer cache %s: %s", entry, exc) + return removed + + +class CacheRoot: + """Context-manager-friendly artifact cache directory. + + When ``persistent_dir`` is ``None`` (the default), a new ephemeral tempdir + named ``plaid-viewer-{pid}-{token}`` is created. The directory is + removed at process exit (``atexit``), on ``SIGINT`` / ``SIGTERM``, and + when the context manager is closed. + + When ``persistent_dir`` is provided, that directory is used as-is and is + **not** removed. Callers wanting persistence pass this. + """ + + def __init__( + self, + persistent_dir: Path | None = None, + *, + install_signal_handlers: bool = True, + run_orphan_sweep: bool = True, + ) -> None: + self._ephemeral = persistent_dir is None + if self._ephemeral: + if run_orphan_sweep: + sweep_orphans() + token = uuid.uuid4().hex[:12] + base = Path(tempfile.gettempdir()) + self._path = base / f"{_EPHEMERAL_PREFIX}{os.getpid()}-{token}" + self._path.mkdir(parents=True, exist_ok=False) + atexit.register(self._safe_cleanup) + if install_signal_handlers: + self._install_signal_handlers() + else: + self._path = Path(persistent_dir) + self._path.mkdir(parents=True, exist_ok=True) + self._closed = False + + # ------------------------------------------------------------------ API + + @property + def path(self) -> Path: + """Root directory of the cache.""" + return self._path + + @property + def is_ephemeral(self) -> bool: + """Whether the cache directory is automatically cleaned up.""" + return self._ephemeral + + def close(self) -> None: + """Remove the cache directory if it is ephemeral.""" + if self._closed: + return + self._closed = True + if self._ephemeral: + self._safe_cleanup() + + def __enter__(self) -> "CacheRoot": # noqa: D105 + return self + + def __exit__(self, exc_type, exc, tb) -> None: # noqa: D105 + self.close() + + # -------------------------------------------------------------- Internals + + def _safe_cleanup(self) -> None: + try: + shutil.rmtree(self._path, ignore_errors=True) + except Exception as exc: # pragma: no cover - defensive + logger.warning("Failed to clean viewer cache %s: %s", self._path, exc) + + def _install_signal_handlers(self) -> None: + for sig in (signal.SIGINT, signal.SIGTERM): + try: + previous = signal.getsignal(sig) + except (ValueError, OSError): # pragma: no cover - non-main thread + continue + + def handler(signum, frame, _prev=previous): + self._safe_cleanup() + if callable(_prev) and _prev not in (signal.SIG_DFL, signal.SIG_IGN): + _prev(signum, frame) + # Re-raise the default behaviour to keep expected exit codes. + signal.signal(signum, signal.SIG_DFL) + os.kill(os.getpid(), signum) + + try: + signal.signal(sig, handler) + except (ValueError, OSError): # pragma: no cover - non-main thread + pass diff --git a/src/plaid/viewer/cli.py b/src/plaid/viewer/cli.py new file mode 100644 index 00000000..f60bc460 --- /dev/null +++ b/src/plaid/viewer/cli.py @@ -0,0 +1,152 @@ +"""Command-line entry point for the dataset viewer. + +Starts a single self-contained trame server. There is no FastAPI backend +and no separate port: dataset discovery, sample loading, CGNS export and +the 3D view are all served by the same trame process. +""" + +from __future__ import annotations + +import argparse +import logging +from pathlib import Path + +from plaid.viewer.cache import CacheRoot +from plaid.viewer.config import ViewerConfig +from plaid.viewer.preferences import get_last_datasets_root +from plaid.viewer.services import ParaviewArtifactService, PlaidDatasetService + +logger = logging.getLogger(__name__) + + +def _build_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser( + prog="plaid-viewer", + description="Launch the dataset viewer (trame + VTK).", + ) + parser.add_argument( + "--datasets-root", + type=Path, + default=None, + help=( + "Directory containing one subdirectory per PLAID dataset. " + "When omitted, the viewer starts without a root and the user " + "selects one from the UI (unless --disable-root-change is set)." + ), + ) + parser.add_argument( + "--browse-roots", + type=Path, + nargs="+", + default=None, + help=( + "Directories the UI is allowed to expose through the datasets " + "root text field and file browser. Defaults to the user home " + "directory. Any path outside these roots is rejected." + ), + ) + parser.add_argument( + "--disable-root-change", + action="store_true", + help=( + "Hide the 'Datasets root' UI panel; the root stays fixed to " + "--datasets-root for the lifetime of the server. Recommended " + "for public deployments (e.g. Hugging Face Spaces)." + ), + ) + + parser.add_argument( + "--cache-dir", + type=Path, + default=None, + help=( + "Persistent artifact cache directory. When omitted, an ephemeral " + "per-process temp directory is used and cleaned up at shutdown." + ), + ) + parser.add_argument("--host", default="127.0.0.1", help="Trame server host.") + parser.add_argument("--port", type=int, default=8080, help="Trame server port.") + parser.add_argument( + "--backend-id", + default="disk", + help="PLAID backend identifier embedded in SampleRefs.", + ) + parser.add_argument( + "--hub-repo", + action="append", + default=None, + metavar="NAMESPACE/NAME", + help=( + "Register a Hugging Face Hub repo id streamed through " + "plaid.storage.init_streaming_from_hub. Repeat the flag to " + "pre-register multiple repos. Additional repos can be added " + "at runtime from the UI (unless --disable-root-change is set)." + ), + ) + return parser + + +def main(argv: list[str] | None = None) -> int: + """Run the viewer until interrupted. + + Args: + argv: Optional override of ``sys.argv[1:]`` for tests. + + Returns: + Process exit code. + """ + args = _build_parser().parse_args(argv) + logging.basicConfig( + level=logging.INFO, format="%(asctime)s %(levelname)s %(name)s: %(message)s" + ) + + # Permanently silence the process's file-descriptor 2 so the HDF5 / + # CGNS C libraries (used by both VTK's ``vtkCGNSReader`` and PLAID's + # pyCGNS loader) cannot pollute the console with messages like + # ``Mismatch in number of children and child IDs read``. Python's + # ``sys.stderr`` is preserved so tracebacks and the logger keep + # working. See ``_reroute_c_stderr`` for the details. + from plaid.viewer.trame_app.server import ( # noqa: PLC0415 + _reroute_c_stderr, + ) + + _reroute_c_stderr() + + # When no explicit ``--datasets-root`` is passed, fall back to the + # last local root the user selected in a previous session (persisted + # under ``$XDG_CONFIG_HOME/plaid/viewer.json``). This makes the + # viewer "remember" the last dataset directory without requiring the + # CLI flag on every launch. + effective_datasets_root = args.datasets_root + if effective_datasets_root is None: + effective_datasets_root = get_last_datasets_root() + if effective_datasets_root is not None: + logger.info("Using persisted datasets root: %s", effective_datasets_root) + browse_roots = tuple(args.browse_roots) if args.browse_roots else () + config = ViewerConfig( + datasets_root=effective_datasets_root, + cache_dir=args.cache_dir, + backend_id=args.backend_id, + browse_roots=browse_roots, + allow_root_change=not args.disable_root_change, + ) + + with CacheRoot(persistent_dir=config.cache_dir) as cache: + dataset_service = PlaidDatasetService(config) + for repo_id in args.hub_repo or []: + try: + dataset_service.add_hub_dataset(repo_id) + except ValueError as exc: + logger.warning("Ignoring --hub-repo %r: %s", repo_id, exc) + artifact_service = ParaviewArtifactService(dataset_service, cache.path) + + # Deferred import so ``--help`` works without trame installed. + from plaid.viewer.trame_app.server import build_server # noqa: PLC0415 + + server = build_server(dataset_service, artifact_service) + server.start(host=args.host, port=args.port, open_browser=False) + return 0 + + +if __name__ == "__main__": # pragma: no cover - CLI entry + raise SystemExit(main()) diff --git a/src/plaid/viewer/config.py b/src/plaid/viewer/config.py new file mode 100644 index 00000000..d94bedb2 --- /dev/null +++ b/src/plaid/viewer/config.py @@ -0,0 +1,43 @@ +"""Runtime configuration for the dataset viewer.""" + +from __future__ import annotations + +from dataclasses import dataclass, field +from pathlib import Path + + +@dataclass(frozen=True) +class ViewerConfig: + """Static configuration for a viewer instance. + + Attributes: + datasets_root: Directory scanned to discover datasets. A dataset is a + subdirectory containing both ``data/`` and ``problem_definitions/`` + (or the root may itself be such a folder). When ``None``, the + viewer starts without a root and the user is expected to pick one + interactively (when ``allow_root_change`` is True). + cache_dir: Root directory for ParaView artifacts. When ``None``, an + ephemeral per-process directory is created under the OS temp root + and cleaned up at shutdown. + backend_id: PLAID backend identifier embedded in :class:`SampleRef` + objects and in the artifact cache key. + export_version: Opaque string mixed into the artifact cache key. Bump + when export logic changes. + extra_cache_key_fields: Extra fields serialised into the cache key. + browse_roots: Directories the viewer is allowed to expose through the + built-in file browser / datasets-root text field. Every candidate + path must be a descendant of at least one of these roots. When + empty, defaults to ``(Path.home(),)`` at the service level. + allow_root_change: When ``True`` (default), the trame UI exposes a + panel to change the datasets root at runtime. Set to ``False`` for + public deployments (e.g. Hugging Face Spaces) where the root must + remain fixed to what the operator configured. + """ + + datasets_root: Path | None = None + cache_dir: Path | None = None + backend_id: str = "disk" + export_version: str = "1" + extra_cache_key_fields: dict[str, str] = field(default_factory=dict) + browse_roots: tuple[Path, ...] = () + allow_root_change: bool = True diff --git a/src/plaid/viewer/models.py b/src/plaid/viewer/models.py new file mode 100644 index 00000000..f6026c8e --- /dev/null +++ b/src/plaid/viewer/models.py @@ -0,0 +1,178 @@ +"""Data models for the gdataset viewer. + +Contains both immutable dataclasses used by services (`SampleRef`, +`ParaviewArtifact`) and pydantic models used as FastAPI response payloads. +""" + +from __future__ import annotations + +from dataclasses import dataclass +from pathlib import Path + +from pydantic import BaseModel, Field + + +@dataclass(frozen=True) +class SampleRef: + """Backend-agnostic reference to a PLAID sample. + + Attributes: + backend_id: Identifier of the PLAID storage backend (e.g. ``"disk"``, + ``"hf_datasets"``, ``"zarr"``). + dataset_id: Identifier of the dataset (typically the dataset directory + name). + split: Optional split name (``"train"``, ``"test"``, ...). ``None`` + when the dataset is not split. + sample_id: Identifier of the sample within the split. For disk-backed + datasets this is the zero-based index rendered as a string. + """ + + backend_id: str + dataset_id: str + split: str | None + sample_id: str + + def encode(self) -> str: + """Return a URL-safe string identifier usable as a route parameter.""" + split = self.split if self.split is not None else "_" + return f"{self.backend_id}:{self.dataset_id}:{split}:{self.sample_id}" + + @classmethod + def decode(cls, value: str) -> "SampleRef": + """Parse a string produced by :meth:`encode`.""" + parts = value.split(":") + if len(parts) != 4: + raise ValueError(f"Invalid sample reference: {value!r}") + backend_id, dataset_id, split, sample_id = parts + return cls( + backend_id=backend_id, + dataset_id=dataset_id, + split=None if split == "_" else split, + sample_id=sample_id, + ) + + +@dataclass(frozen=True) +class ParaviewArtifact: + """A ParaView-readable artifact produced from a PLAID sample. + + For time-dependent samples, ``cgns_path`` points to a ``.cgns.series`` + sidecar file that groups multiple CGNS files into a single time sequence. + For single-timestep samples, it points to the single CGNS file directly. + + Attributes: + artifact_id: Stable identifier used in API routes. Derived from the + cache key. + cgns_path: Path to the file ParaView should open. Either a + ``.cgns.series`` sidecar (multi-time) or a ``.cgns`` file. + state_path: Optional ParaView state file (``.pvsm``) providing a + reasonable default scene. + metadata_path: Optional JSON metadata file describing the artifact. + cache_key: Deterministic SHA256 key over the artifact inputs. + created: ``True`` if the artifact was newly created, ``False`` if it + was already present in the cache. + """ + + artifact_id: str + cgns_path: Path + state_path: Path | None + metadata_path: Path | None + cache_key: str + created: bool + + +# --------------------------------------------------------------------------- +# API response models +# --------------------------------------------------------------------------- + + +class DatasetInfo(BaseModel): + """Summary information about an available dataset. + + ``backend_id`` identifies the loading mode: ``"disk"`` for datasets + opened with :func:`plaid.storage.init_from_disk` and ``"hub"`` for + Hugging Face repositories streamed through + :func:`plaid.storage.init_streaming_from_hub`. Streamed datasets do + not always expose a total sample count and may need to be navigated + sequentially through a streaming cursor. + """ + + dataset_id: str + backend_id: str + path: str + has_infos: bool = False + has_problem_definitions: bool = False + + +class DatasetDetail(DatasetInfo): + """Full detail view of a dataset. + + ``splits`` maps each split name to its sample count. The count is + ``None`` for streaming datasets where the total is unknown. + """ + + splits: dict[str, int | None] = Field(default_factory=dict) + infos: dict | None = None + problem_definitions: list[str] = Field(default_factory=list) + + +class SampleRefDTO(BaseModel): + """Serializable form of :class:`SampleRef` used by the API.""" + + backend_id: str + dataset_id: str + split: str | None + sample_id: str + encoded: str + + @classmethod + def from_ref(cls, ref: SampleRef) -> "SampleRefDTO": + """Build the DTO from a :class:`SampleRef`.""" + return cls( + backend_id=ref.backend_id, + dataset_id=ref.dataset_id, + split=ref.split, + sample_id=ref.sample_id, + encoded=ref.encode(), + ) + + +class SampleSummary(BaseModel): + """Minimal metadata describing a PLAID sample.""" + + ref: SampleRefDTO + n_times: int + time_values: list[float] + bases: list[str] + zones_by_base: dict[str, list[str]] = Field(default_factory=dict) + globals: dict[str, str] = Field(default_factory=dict) + fields_by_base: dict[str, list[str]] = Field(default_factory=dict) + + +class ValidationResult(BaseModel): + """Validation outcome for a PLAID sample.""" + + ref: SampleRefDTO + ok: bool + warnings: list[str] = Field(default_factory=list) + errors: list[str] = Field(default_factory=list) + + +class ArtifactInfo(BaseModel): + """Public view of a :class:`ParaviewArtifact`.""" + + artifact_id: str + cache_key: str + created: bool + cgns_path: str + state_path: str | None + metadata_path: str | None + is_time_series: bool + n_files: int + + +class ViewerUrl(BaseModel): + """Response model for the ``viewer-url`` endpoint.""" + + artifact_id: str + url: str diff --git a/src/plaid/viewer/preferences.py b/src/plaid/viewer/preferences.py new file mode 100644 index 00000000..220759c3 --- /dev/null +++ b/src/plaid/viewer/preferences.py @@ -0,0 +1,87 @@ +"""Persistent user preferences for the dataset viewer. + +The viewer stores a tiny JSON document under the OS-standard user config +directory so a handful of settings (currently only the last local +``datasets_root``) survive across sessions. The file is best-effort: +read/write errors are silently swallowed so a broken preferences file +never prevents the viewer from starting. + +Location: ``$XDG_CONFIG_HOME/plaid/viewer.json`` (falling back to +``~/.config/plaid/viewer.json``), overridable by setting +``PLAID_VIEWER_CONFIG_FILE``. +""" + +from __future__ import annotations + +import json +import logging +import os +from pathlib import Path + +logger = logging.getLogger(__name__) + + +def _preferences_path() -> Path: + """Return the path to the persistent preferences file.""" + override = os.environ.get("PLAID_VIEWER_CONFIG_FILE") + if override: + return Path(override).expanduser() + base = os.environ.get("XDG_CONFIG_HOME") + root = Path(base).expanduser() if base else Path.home() / ".config" + return root / "plaid" / "viewer.json" + + +def load_preferences() -> dict[str, object]: + """Return the persisted preferences dict, or an empty dict on failure.""" + path = _preferences_path() + if not path.is_file(): + return {} + try: + return json.loads(path.read_text()) + except (OSError, json.JSONDecodeError) as exc: # noqa: BLE001 + logger.debug("Ignoring unreadable viewer preferences at %s: %s", path, exc) + return {} + + +def save_preferences(data: dict[str, object]) -> None: + """Persist ``data`` to the preferences file, creating parents as needed.""" + path = _preferences_path() + try: + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(json.dumps(data, indent=2, sort_keys=True)) + except OSError as exc: # noqa: BLE001 + logger.debug("Failed to persist viewer preferences to %s: %s", path, exc) + + +def update_preferences(**updates: object) -> dict[str, object]: + """Merge ``updates`` into the persisted preferences and return the result. + + Keys whose value is ``None`` are removed from the stored document so + clearing a setting (e.g. the datasets root) does not leave a stale + entry behind. + """ + current = load_preferences() + for key, value in updates.items(): + if value is None: + current.pop(key, None) + else: + current[key] = value + save_preferences(current) + return current + + +def get_last_datasets_root() -> Path | None: + """Return the persisted last-used datasets root, or ``None``.""" + value = load_preferences().get("datasets_root") + if not isinstance(value, str) or not value: + return None + candidate = Path(value).expanduser() + return candidate if candidate.is_dir() else None + + +def set_last_datasets_root(path: Path | str | None) -> None: + """Persist (or clear) the last-used datasets root.""" + if path is None: + update_preferences(datasets_root=None) + return + update_preferences(datasets_root=str(Path(path).expanduser().resolve())) diff --git a/src/plaid/viewer/services/__init__.py b/src/plaid/viewer/services/__init__.py new file mode 100644 index 00000000..c79ceef3 --- /dev/null +++ b/src/plaid/viewer/services/__init__.py @@ -0,0 +1,13 @@ +"""Services for the dataset viewer.""" + +from plaid.viewer.services.paraview_artifact_service import ( + ParaviewArtifactService, + ensure_paraview_artifact, +) +from plaid.viewer.services.plaid_dataset_service import PlaidDatasetService + +__all__ = [ + "ParaviewArtifactService", + "PlaidDatasetService", + "ensure_paraview_artifact", +] diff --git a/src/plaid/viewer/services/paraview_artifact_service.py b/src/plaid/viewer/services/paraview_artifact_service.py new file mode 100644 index 00000000..942d4aba --- /dev/null +++ b/src/plaid/viewer/services/paraview_artifact_service.py @@ -0,0 +1,278 @@ +"""Produce ParaView-readable artifacts from PLAID samples. + +This module is the one place in PLAID that writes CGNS files on disk. It +delegates the actual CGNS export to PLAID (``Sample.save_to_dir`` writes one +CGNS per timestep under ``meshes/``), then adds: + +* A ``.cgns.series`` sidecar JSON file that ParaView's ``vtkCGNSReader`` / + ``vtkCGNSFileSeriesReader`` understands for multi-timestep samples. +* A deterministic artifact id derived from a SHA256 cache key so the same + inputs always resolve to the same folder. +* An optional ``scene.pvsm`` placeholder for future preset work. +* A ``metadata.json`` describing the artifact. +""" + +from __future__ import annotations + +import hashlib +import json +import logging +import shutil +from dataclasses import dataclass +from pathlib import Path + +from plaid.viewer.models import ParaviewArtifact, SampleRef +from plaid.viewer.services.plaid_dataset_service import PlaidDatasetService + +logger = logging.getLogger(__name__) + +EXPORT_VERSION = "1" +ARTIFACT_TYPE = "raw" + + +@dataclass(frozen=True) +class _ArtifactLayout: + """Internal paths for a single artifact folder.""" + + root: Path + meshes_dir: Path + series_path: Path + single_cgns_path: Path + metadata_path: Path + state_path: Path + + +def _plaid_version() -> str: + try: + from importlib.metadata import PackageNotFoundError, version + + return version("pyplaid") + except PackageNotFoundError: # pragma: no cover - defensive + return "unknown" + + +def _build_cache_key( + ref: SampleRef, *, export_version: str, extra: dict[str, str] | None = None +) -> str: + """Return a deterministic SHA256 cache key for a sample export.""" + payload = { + "backend_id": ref.backend_id, + "dataset_id": ref.dataset_id, + "split": ref.split, + "sample_id": ref.sample_id, + "export_mode": "default", + "artifact_type": ARTIFACT_TYPE, + "plaid_version": _plaid_version(), + "export_version": export_version, + } + if extra: + payload["extra"] = dict(sorted(extra.items())) + digest = hashlib.sha256( + json.dumps(payload, sort_keys=True, separators=(",", ":")).encode("utf-8") + ).hexdigest() + return digest + + +def _artifact_layout( + cache_root: Path, ref: SampleRef, cache_key: str +) -> _ArtifactLayout: + split = ref.split if ref.split is not None else "_default" + root = ( + cache_root + / "datasets" + / ref.dataset_id + / split + / ref.sample_id + / cache_key[:16] + ) + return _ArtifactLayout( + root=root, + meshes_dir=root / "meshes", + series_path=root / "meshes.cgns.series", + single_cgns_path=root / "mesh.cgns", + metadata_path=root / "metadata.json", + state_path=root / "scene.pvsm", + ) + + +def _write_series_sidecar( + series_path: Path, cgns_files: list[tuple[Path, float]] +) -> None: + """Write a ParaView ``.cgns.series`` sidecar for the given file list. + + Each entry's ``name`` is stored as a POSIX-style path relative to the + sidecar file so ``vtkCGNSFileSeriesReader`` can resolve it consistently + across platforms. Notably, time-series CGNS files live in the + ``meshes/`` subdirectory, so we keep that prefix instead of only the + file name. + """ + payload = { + "file-series-version": "1.0", + "files": [ + {"name": Path(path).as_posix(), "time": time} for path, time in cgns_files + ], + } + series_path.write_text(json.dumps(payload, indent=2)) + + +def _collect_time_values(sample) -> list[float]: + data = getattr(sample.features, "data", None) + if not data: + return [] + return sorted(float(t) for t in data.keys()) + + +class ParaviewArtifactService: + """Create and look up ParaView-readable artifacts in a cache directory. + + Args: + dataset_service: Used to load :class:`plaid.Sample` instances. + cache_root: Root of the artifact cache. Usually owned by a + :class:`plaid.viewer.cache.CacheRoot` instance. + export_version: Opaque string included in the cache key. Bump this + whenever the export logic changes in a backwards-incompatible way. + extra_cache_key_fields: Extra fields to mix into the cache key (for + example to invalidate artifacts when a preset template changes). + """ + + def __init__( + self, + dataset_service: PlaidDatasetService, + cache_root: Path, + *, + export_version: str = EXPORT_VERSION, + extra_cache_key_fields: dict[str, str] | None = None, + ) -> None: + self._dataset_service = dataset_service + self._cache_root = Path(cache_root) + self._cache_root.mkdir(parents=True, exist_ok=True) + self._export_version = export_version + self._extra = dict(extra_cache_key_fields or {}) + self._by_id: dict[str, ParaviewArtifact] = {} + + # ------------------------------------------------------------ Public API + + def ensure_artifact( + self, ref: SampleRef, *, force: bool = False + ) -> ParaviewArtifact: + """Return a :class:`ParaviewArtifact` for ``ref``, creating it if needed.""" + cache_key = _build_cache_key( + ref, export_version=self._export_version, extra=self._extra + ) + layout = _artifact_layout(self._cache_root, ref, cache_key) + + if force and layout.root.exists(): + shutil.rmtree(layout.root) + + if layout.metadata_path.is_file() and not force: + artifact = self._load_existing(layout, cache_key) + self._by_id[artifact.artifact_id] = artifact + return artifact + + layout.root.mkdir(parents=True, exist_ok=True) + artifact = self._create(ref, layout, cache_key) + self._by_id[artifact.artifact_id] = artifact + return artifact + + def get(self, artifact_id: str) -> ParaviewArtifact: + """Return a previously-created artifact by id. + + Raises: + KeyError: If no artifact with this id has been created. + """ + if artifact_id not in self._by_id: + raise KeyError(f"Unknown artifact id: {artifact_id}") + return self._by_id[artifact_id] + + # -------------------------------------------------------------- Internals + + def _create( + self, + ref: SampleRef, + layout: _ArtifactLayout, + cache_key: str, + ) -> ParaviewArtifact: + sample = self._dataset_service.load_sample(ref) + times = _collect_time_values(sample) + + layout.meshes_dir.mkdir(exist_ok=True) + # PLAID writes one CGNS per timestep as ``meshes/mesh_{i:09d}.cgns``. + sample.save_to_dir(layout.root, overwrite=True) + + cgns_files = sorted(layout.meshes_dir.glob("mesh_*.cgns")) + if not cgns_files: + raise RuntimeError( + f"PLAID produced no CGNS files for sample {ref.encode()}" + ) + + is_time_series = len(cgns_files) > 1 or len(times) > 1 + if is_time_series: + pairs = [ + (layout.meshes_dir.relative_to(layout.root) / f.name, t) + for f, t in zip( + cgns_files, times or range(len(cgns_files)), strict=False + ) + ] + # Reformat to full-path-relative-to-series-file entries. + _write_series_sidecar( + layout.series_path, + [(Path("meshes") / pair[0].name, float(pair[1])) for pair in pairs], + ) + cgns_path = layout.series_path + else: + # Move the single CGNS file up one level for convenience. + cgns_files[0].replace(layout.single_cgns_path) + cgns_path = layout.single_cgns_path + + metadata = { + "artifact_type": ARTIFACT_TYPE, + "cache_key": cache_key, + "export_version": self._export_version, + "plaid_version": _plaid_version(), + "sample_ref": { + "backend_id": ref.backend_id, + "dataset_id": ref.dataset_id, + "split": ref.split, + "sample_id": ref.sample_id, + }, + "cgns_path": str(cgns_path.relative_to(layout.root)), + "is_time_series": is_time_series, + "n_files": len(cgns_files), + "time_values": list(times), + } + layout.metadata_path.write_text(json.dumps(metadata, indent=2)) + + return ParaviewArtifact( + artifact_id=cache_key[:16], + cgns_path=cgns_path, + state_path=None, + metadata_path=layout.metadata_path, + cache_key=cache_key, + created=True, + ) + + @staticmethod + def _load_existing(layout: _ArtifactLayout, cache_key: str) -> ParaviewArtifact: + metadata = json.loads(layout.metadata_path.read_text()) + cgns_path = layout.root / metadata["cgns_path"] + state_path = layout.state_path if layout.state_path.is_file() else None + return ParaviewArtifact( + artifact_id=cache_key[:16], + cgns_path=cgns_path, + state_path=state_path, + metadata_path=layout.metadata_path, + cache_key=cache_key, + created=False, + ) + + +def ensure_paraview_artifact( + sample_ref: SampleRef, + *, + cache_dir: Path, + dataset_service: PlaidDatasetService, + force: bool = False, +) -> ParaviewArtifact: + """Functional wrapper around :meth:`ParaviewArtifactService.ensure_artifact`.""" + service = ParaviewArtifactService(dataset_service, cache_dir) + return service.ensure_artifact(sample_ref, force=force) diff --git a/src/plaid/viewer/services/plaid_dataset_service.py b/src/plaid/viewer/services/plaid_dataset_service.py new file mode 100644 index 00000000..db3e97f5 --- /dev/null +++ b/src/plaid/viewer/services/plaid_dataset_service.py @@ -0,0 +1,1157 @@ +"""Dataset discovery and sample introspection for the PLAID viewer. + +This service owns all PLAID-facing logic used by the viewer: + +- Discover datasets under a configured root directory. +- Load a split-wise ``(dataset_dict, converter_dict)`` pair through + :func:`plaid.storage.init_from_disk` and cache it for subsequent calls. +- Materialize PLAID :class:`plaid.Sample` instances via + ``converter.to_plaid(dataset, index)``, regardless of the underlying + backend (``hf_datasets``, ``cgns``, ``zarr`` ...). +- Summarize sample contents (bases, zones, fields, times, scalars). +- Report basic validation status via :meth:`Sample.check_completeness`. +""" + +from __future__ import annotations + +import json +import logging +from dataclasses import dataclass, field +from functools import lru_cache +from pathlib import Path +from typing import Any, Iterator + +from plaid.viewer.config import ViewerConfig +from plaid.viewer.models import ( + DatasetDetail, + DatasetInfo, + SampleRef, + SampleRefDTO, + SampleSummary, + ValidationResult, +) + +logger = logging.getLogger(__name__) + + +# Sentinel ``sample_id`` used for streaming datasets, where the only +# addressable sample is "the one currently produced by the iterator". +STREAM_CURSOR_ID = "cursor" + + +@dataclass +class _StreamCursor: + """Forward-only cursor over a streaming (``IterableDataset``) split. + + Streaming datasets returned by + :func:`plaid.storage.init_streaming_from_hub` do not support + indexing or ``len``. This cursor consumes the underlying iterable + one sample at a time and caches the most recently produced raw + record so repeated ``load_sample`` calls (e.g. when the UI loads + summary then full sample) do not advance the stream. + """ + + iterator: Iterator[Any] | None = None + position: int = -1 # -1 means "no sample fetched yet". + current_record: Any | None = None + exhausted: bool = False + extras: dict = field(default_factory=dict) + + +def _safe_list_dir(path: Path) -> list[Path]: + if not path.is_dir(): + return [] + return sorted(p for p in path.iterdir()) + + +def _array_preview(value, *, max_items: int = 6) -> str | None: + """Return a short string preview of a numpy-like array value.""" + if value is None: + return None + try: + import numpy as np # noqa: PLC0415 + except ImportError: # pragma: no cover - numpy is a transitive dep + return None + try: + arr = np.asarray(value) + except Exception: # noqa: BLE001 + return None + if arr.size == 0: + return "[]" + flat = arr.ravel() + if flat.size <= max_items: + return np.array2string(arr, separator=", ", threshold=max_items + 1) + head = np.array2string(flat[:max_items], separator=", ") + return f"{head[:-1]}, ...] (total {flat.size} values)" + + +def _collect_data_arrays(cgns_node) -> list[dict[str, object]]: + """Recursively collect ``DataArray_t`` descriptors under ``cgns_node``. + + Each entry contains the array name, its shape as a list, dtype as a + string, and a short string preview of the values. + """ + try: + from CGNS.PAT import cgnskeywords as CK # noqa: PLC0415 + except ImportError: # pragma: no cover + return [] + + entries: list[dict[str, object]] = [] + + def _walk(node) -> None: + name, value, children, label = node + if label == CK.DataArray_ts: + shape = list(getattr(value, "shape", ())) if value is not None else [] + dtype = str(getattr(value, "dtype", "")) + entries.append( + { + "name": name, + "shape": shape, + "dtype": dtype, + "preview": _array_preview(value), + } + ) + return + for child in children or []: + _walk(child) + + for child in cgns_node[2] or []: + _walk(child) + return entries + + +class PlaidDatasetService: + """High-level access to PLAID datasets stored under a root directory. + + A dataset is a subdirectory of ``config.datasets_root`` that contains a + ``data/`` directory readable by :func:`plaid.storage.init_from_disk`. + The function returns a ``dataset_dict`` and a ``converter_dict`` keyed + by split name; the viewer iterates splits and addresses samples by + integer index in ``range(len(dataset_dict[split]))``. + """ + + def __init__(self, config: ViewerConfig) -> None: + self._config = config + # Datasets root is kept on the service (not on the frozen config) + # so it can be changed at runtime through ``set_datasets_root``. + # ``None`` means no root has been selected yet: discovery methods + # return empty lists and the UI is expected to prompt the user. + self._datasets_root: Path | None = ( + Path(config.datasets_root) if config.datasets_root is not None else None + ) + # Sandbox for interactive root selection. Defaults to the user's + # home directory when no explicit ``browse_roots`` is configured. + # The configured ``datasets_root`` is always implicitly allowed so + # ``list_subdirs`` can start from there. + browse_roots: list[Path] = [Path(p).expanduser() for p in config.browse_roots] + if not browse_roots: + browse_roots = [Path.home()] + if self._datasets_root is not None: + # Make sure the startup root is always reachable even if + # ``browse_roots`` is more restrictive. + browse_roots.append(self._datasets_root) + self._browse_roots: tuple[Path, ...] = tuple( + dict.fromkeys(p.resolve() for p in browse_roots) + ) + # Cache of (dataset_dict, converter_dict) keyed by dataset_id to + # avoid re-parsing large arrow/zarr datasets on every call. + self._store_cache: dict[str, tuple[dict, dict]] = {} + # Registered Hugging Face Hub repositories that should be exposed + # as datasets through :func:`plaid.storage.init_streaming_from_hub`. + # The ``dataset_id`` used throughout the viewer is the raw + # ``repo_id`` string (e.g. ``"PLAID-lib/VKI-LS59"``), which never + # collides with a local directory name (it always contains a + # forward slash). + self._hub_repos: list[str] = [] + # Per-(dataset_id, split) streaming cursors. Streaming datasets + # are ``datasets.IterableDataset`` instances without ``__len__`` + # so we cannot index them. We maintain a forward-only cursor + # instead: ``_cursors[(dataset_id, split)] = (iterator, position, + # cached_sample)``. ``Next`` consumes the iterator and advances + # ``position``; ``Reset`` discards the iterator so a fresh one is + # built on the next access. + self._cursors: dict[tuple[str, str], _StreamCursor] = {} + # User-selected feature filter per dataset. ``None`` means "no + # filter" (load every feature, current default behaviour). An + # empty list means "all features unselected". + self._features: dict[str, list[str] | None] = {} + # Memoised ``(constant_feature_keys, variable_feature_keys)`` per + # dataset, retrieved through ``load_metadata_from_disk`` or + # ``load_metadata_from_hub``. Used to (a) populate the UI + # checkbox list through :meth:`list_available_features` and (b) + # expand user-selected feature paths with + # :func:`plaid.utils.cgns_helper.update_features_for_CGNS_compatibility` + # before handing them to ``init_streaming_from_hub`` (which, unlike + # :meth:`Converter.to_plaid`, does not expand features by itself). + self._feature_metadata: dict[str, tuple[list[str], list[str]]] = {} + # Memoised per-split feature catalogue for a dataset. Unlike + # ``_feature_metadata`` (which aggregates constants across + # splits so the UI can offer a union of fields), this mapping + # preserves the split boundary so :meth:`load_sample` can + # filter the user's selection down to what a specific split + # actually carries. ``PlaidSampleConverter.to_plaid`` otherwise + # raises ``KeyError('Missing features in dataset/converter: + # ...')`` whenever the request names a path that the split in + # hand does not know about. + self._split_feature_metadata: dict[str, dict[str, set[str]]] = {} + + # ----------------------------------------------------------- Discovery + + @property + def datasets_root(self) -> Path | None: + """Return the currently active datasets root, or ``None``.""" + return self._datasets_root + + @property + def browse_roots(self) -> tuple[Path, ...]: + """Return the sandbox directories for interactive path selection.""" + return self._browse_roots + + def set_datasets_root(self, path: Path | str | None) -> Path | None: + """Change the active datasets root at runtime. + + The new path (when not ``None``) must exist, be a directory, and be + located under one of ``browse_roots``. All per-dataset caches are + invalidated so the next discovery call reflects the new root. + + Args: + path: The new datasets root. ``None`` clears the current root. + + Returns: + The resolved new datasets root, or ``None`` if cleared. + + Raises: + ValueError: If the path does not exist, is not a directory, or + escapes ``browse_roots``. + """ + # Deferred import so the service module stays importable without + # write access to the user config directory (e.g. in read-only + # CI sandboxes that don't touch ``set_datasets_root`` anyway). + from plaid.viewer.preferences import ( # noqa: PLC0415 + set_last_datasets_root, + ) + + if path is None: + self._datasets_root = None + self._store_cache.clear() + set_last_datasets_root(None) + return None + resolved = Path(path).expanduser().resolve() + if not resolved.is_dir(): + raise ValueError(f"Not a directory: {resolved}") + self._ensure_within_browse_roots(resolved) + self._datasets_root = resolved + self._store_cache.clear() + # Persist the new root so the next launch of the viewer picks it + # up automatically when ``--datasets-root`` is not provided. + set_last_datasets_root(resolved) + return resolved + + def list_subdirs(self, path: Path | str | None = None) -> dict[str, object]: + """Return immediate subdirectories of ``path`` for the file browser. + + Each entry is tagged with ``is_plaid_candidate`` (``True`` when it + looks like a PLAID dataset, i.e. contains a ``data/`` subdirectory) + so the UI can highlight it. The returned ``path`` is always an + absolute resolved path inside ``browse_roots``. + + Args: + path: Directory to list. When ``None`` the first browse root is + used (typically ``$HOME``). + + Returns: + A dict ``{"path": str, "parent": str | None, + "entries": [{"name": str, "path": str, + "is_plaid_candidate": bool}, ...]}``. + + Raises: + ValueError: If ``path`` is not a directory or escapes the + sandbox. + """ + if path is None: + target = self._browse_roots[0] + else: + target = Path(path).expanduser().resolve() + if not target.is_dir(): + raise ValueError(f"Not a directory: {target}") + self._ensure_within_browse_roots(target) + entries: list[dict[str, object]] = [] + for entry in sorted(target.iterdir()): + if not entry.is_dir(): + continue + if entry.name.startswith("."): + continue + entries.append( + { + "name": entry.name, + "path": str(entry), + "is_plaid_candidate": (entry / "data").is_dir(), + } + ) + # Rank PLAID candidates first, then alphabetical (stable). + entries.sort(key=lambda e: (not e["is_plaid_candidate"], e["name"].lower())) + parent: str | None = None + if any( + target != root and root in target.parents for root in self._browse_roots + ): + parent = str(target.parent) + elif target.parent != target and any( + target.parent == root or root in target.parent.parents + for root in self._browse_roots + ): + parent = str(target.parent) + return { + "path": str(target), + "parent": parent, + "entries": entries, + } + + def _ensure_within_browse_roots(self, path: Path) -> None: + for root in self._browse_roots: + try: + path.relative_to(root) + except ValueError: + continue + return + roots = ", ".join(str(r) for r in self._browse_roots) + raise ValueError(f"Path {path} is outside the allowed browse roots ({roots}).") + + def list_datasets(self) -> list[DatasetInfo]: + """Return a summary of every dataset available to the viewer. + + Local datasets (subdirectories of ``datasets_root``) and registered + Hugging Face Hub repositories (added via :meth:`add_hub_dataset`) + are both included, in that order. + """ + infos: list[DatasetInfo] = [] + root = self._datasets_root + if root is not None: + for entry in _safe_list_dir(root): + if not entry.is_dir(): + continue + if not (entry / "data").is_dir(): + continue + infos.append( + DatasetInfo( + dataset_id=entry.name, + backend_id="disk", + path=str(entry), + has_infos=(entry / "infos.yaml").exists() + or (entry / "infos.json").exists(), + has_problem_definitions=( + entry / "problem_definitions" + ).is_dir(), + ) + ) + for repo_id in self._hub_repos: + infos.append( + DatasetInfo( + dataset_id=repo_id, + backend_id="hub", + path=f"hf://{repo_id}", + has_infos=False, + has_problem_definitions=False, + ) + ) + + return infos + + @property + def hub_repos(self) -> tuple[str, ...]: + """Return the list of registered Hugging Face Hub repositories.""" + return tuple(self._hub_repos) + + def add_hub_dataset(self, repo_id: str) -> str: + """Register a Hugging Face Hub dataset to stream from. + + The dataset is exposed through :func:`plaid.storage.init_streaming_from_hub` + and appears in :meth:`list_datasets` with ``dataset_id == repo_id``. + + Args: + repo_id: Hugging Face repository identifier, e.g. + ``"PLAID-lib/VKI-LS59"``. Must contain a ``/`` separator. + + Returns: + The normalised ``repo_id``. + + Raises: + ValueError: If ``repo_id`` is empty or does not look like a + ``namespace/name`` pair. + """ + normalised = (repo_id or "").strip() + if not normalised: + raise ValueError("repo_id must be a non-empty string.") + if "/" not in normalised: + raise ValueError( + f"repo_id {normalised!r} must be of the form 'namespace/name'." + ) + if normalised in self._hub_repos: + return normalised + self._hub_repos.append(normalised) + return normalised + + def remove_hub_dataset(self, repo_id: str) -> None: + """Unregister a previously added Hugging Face Hub dataset.""" + if repo_id in self._hub_repos: + self._hub_repos.remove(repo_id) + self._store_cache.pop(repo_id, None) + self._features.pop(repo_id, None) + self._feature_metadata.pop(repo_id, None) + # Drop any streaming cursors owned by the removed dataset. + self._cursors = { + key: cur for key, cur in self._cursors.items() if key[0] != repo_id + } + + # ------------------------------------------------------- Feature filter + + def _load_feature_metadata(self, dataset_id: str) -> tuple[list[str], list[str]]: + """Return ``(constant_feature_keys, variable_feature_keys)`` for a dataset. + + Uses :func:`plaid.storage.common.reader.load_metadata_from_disk` for + local datasets and :func:`plaid.storage.common.reader.load_metadata_from_hub` + for registered Hugging Face Hub repositories. The result is + memoised on the service instance. + + Constant features are aggregated across splits (constant schemas + in PLAID are split-specific), variable features are global. + """ + if dataset_id in self._feature_metadata: + return self._feature_metadata[dataset_id] + # Deferred imports so the module stays importable without PLAID. + from plaid.storage.common.reader import ( # noqa: PLC0415 + load_metadata_from_disk, + load_metadata_from_hub, + ) + + if self._is_hub_dataset(dataset_id): + _flat_cst, variable_schema, constant_schema, _cgns_types = ( + load_metadata_from_hub(dataset_id) + ) + else: + base = self._dataset_dir(dataset_id) + _flat_cst, variable_schema, constant_schema, _cgns_types = ( + load_metadata_from_disk(str(base)) + ) + constant_keys: set[str] = set() + for split_const in (constant_schema or {}).values(): + constant_keys.update(split_const.keys()) + variable_keys = list((variable_schema or {}).keys()) + metadata = (sorted(constant_keys), sorted(variable_keys)) + self._feature_metadata[dataset_id] = metadata + # Build the per-split catalogue in one pass: variable features + # are global so every split shares them, but constant features + # are keyed by split. + per_split: dict[str, set[str]] = { + split: set(variable_keys) | set(split_const.keys()) + for split, split_const in (constant_schema or {}).items() + } + self._split_feature_metadata[dataset_id] = per_split + return metadata + + def _split_feature_keys(self, dataset_id: str, split_key: str) -> set[str]: + """Return the feature catalogue of a single split. + + Ensures the per-split mapping is populated (it is filled as a + side effect of :meth:`_load_feature_metadata`). Falls back to + the dataset-wide union when the split name is not recorded + (typical for streaming datasets that expose a single + ``__default__`` split). + """ + if dataset_id not in self._split_feature_metadata: + self._load_feature_metadata(dataset_id) + per_split = self._split_feature_metadata.get(dataset_id, {}) + if split_key in per_split: + return per_split[split_key] + constant_keys, variable_keys = self._load_feature_metadata(dataset_id) + return set(constant_keys) | set(variable_keys) + + def list_available_features(self, dataset_id: str) -> list[str]: + """Return the feature paths offered to the user for filtering. + + The viewer only exposes paths that are CGNS *fields* (i.e. what + :func:`plaid.containers.utils.get_feature_details_from_path` + classifies as ``type == "field"``). Globals, coordinates, + element connectivities, boundary conditions, etc. are hidden + because they are not what the user means when they want to + "filter the displayed features" in a 3D viewer. + + Paths ending in ``_times`` (time-series bookkeeping duplicates + of a field, e.g. ``Base_.../FlowSolution/Pressure_times``) are + also filtered out: they are artefacts of the temporal storage + layout, not distinct physical quantities the user would want to + toggle. + """ + # Deferred import - the helper lives in PLAID's containers module. + from plaid.containers.utils import ( # noqa: PLC0415 + get_feature_details_from_path, + ) + + constant_keys, variable_keys = self._load_feature_metadata(dataset_id) + candidates = set(constant_keys) | set(variable_keys) + fields: list[str] = [] + for path in candidates: + if path.endswith("_times"): + continue + try: + details = get_feature_details_from_path(path) + except Exception: # noqa: BLE001 - malformed path, skip + continue + # Only expose "genuine" field paths - i.e. those that carry + # a ``name`` entry in ``details``. Some variants returned by + # :func:`get_feature_details_from_path` are typed as + # ``"field"`` but describe a container (e.g. a + # ``FlowSolution_t`` node) rather than a specific data array, + # and therefore have no ``name``. Filtering on ``name`` + # removes those from the UI while keeping every real scalar + # / vector field the user can actually plot. + # ``GridLocation`` nodes are CGNS metadata (they describe + # *where* a field lives, e.g. ``Vertex`` vs ``CellCenter``) + # rather than a plottable field, so they must not appear in + # the viewer's feature selection. + name = details.get("name") + if details.get("type") == "field" and name and name != "GridLocation": + fields.append(path) + return sorted(fields) + + def get_features(self, dataset_id: str) -> list[str] | None: + """Return the active feature filter for ``dataset_id``. + + ``None`` means "no filter": every feature is loaded (default + behaviour). An explicit empty list means "no feature selected". + """ + return self._features.get(dataset_id) + + def set_features( + self, dataset_id: str, features: list[str] | None + ) -> list[str] | None: + """Set (or clear) the active feature filter for ``dataset_id``. + + Only the *user-visible* field paths (those returned by + :meth:`list_available_features`) are stored. Geometric supports + (coordinates, element connectivities, boundary conditions, + ``GridLocation`` metadata, ``_times`` bookkeeping paths, ...) + required to render the selected fields are handled transparently + by :meth:`Converter.to_plaid`, which runs + :func:`~plaid.utils.cgns_helper.update_features_for_CGNS_compatibility` + internally against its *own* per-split + ``constant_features`` / ``variable_features`` catalogues. We + therefore never pre-expand the selection here - doing so would + use the dataset-wide (union) catalogue and, on splits whose + data does not contain the selected fields, would hand PLAID a + list of coordinates *without the fields that justify them* and + trigger ``Missing features in dataset/converter`` in the CGNS + expander. + + For disk-backed datasets the filter is applied on every call to + :meth:`Converter.to_plaid` during :meth:`load_sample`. For + streaming (Hugging Face Hub) datasets it is injected into + :func:`plaid.storage.init_streaming_from_hub` *before* any + sample is consumed; we therefore invalidate the cached + ``(datasetdict, converterdict)`` and any open streaming cursors + so the next :meth:`_open` call rebuilds them with the new + feature list. + + Args: + dataset_id: Target dataset identifier. + features: Field paths to keep (subset of + :meth:`list_available_features`), or ``None`` to clear + the filter and load every feature. + + Returns: + The normalised, deduplicated feature list (``None`` when no + filter is active). + + Raises: + ValueError: If ``features`` contains paths not declared in + the dataset metadata. + """ + if features is None: + normalised: list[str] | None = None + else: + normalised = sorted(dict.fromkeys(str(f) for f in features)) + all_keys = set(self._load_feature_metadata(dataset_id)[0]) | set( + self._load_feature_metadata(dataset_id)[1] + ) + unknown = [f for f in normalised if f not in all_keys] + if unknown: + raise ValueError( + f"Unknown features for dataset {dataset_id!r}: {unknown}" + ) + self._features[dataset_id] = normalised + # Invalidate store cache so streaming datasets rebuild their + # IterableDataset with the new feature list. For disk datasets + # this is not strictly required (features are applied on each + # ``to_plaid`` call) but keeping a single invalidation policy is + # simpler and does not hurt performance measurably. + self._store_cache.pop(dataset_id, None) + self._cursors = { + key: cur for key, cur in self._cursors.items() if key[0] != dataset_id + } + return normalised + + def is_streaming(self, dataset_id: str) -> bool: + """Return ``True`` when ``dataset_id`` is a Hugging Face Hub stream. + + Streaming datasets have no ``__len__`` on their splits and must be + navigated forward-only through :meth:`advance_stream_cursor` / + :meth:`reset_stream_cursor` rather than indexed. + """ + if not self._is_hub_dataset(dataset_id): + return False + try: + datasetdict, _ = self._open(dataset_id) + except Exception: # noqa: BLE001 + return True + return not all(hasattr(ds, "__len__") for ds in datasetdict.values()) + + def get_dataset(self, dataset_id: str) -> DatasetDetail: + """Return detailed information about a single dataset.""" + if self._is_hub_dataset(dataset_id): + splits = self._splits_with_counts(dataset_id) + return DatasetDetail( + dataset_id=dataset_id, + backend_id="hub", + path=f"hf://{dataset_id}", + has_infos=False, + has_problem_definitions=False, + splits=splits, + infos=None, + problem_definitions=[], + ) + base = self._dataset_dir(dataset_id) + splits = self._splits_with_counts(dataset_id) + pb_defs_dir = base / "problem_definitions" + pb_defs = ( + [ + p.stem + for p in _safe_list_dir(pb_defs_dir) + if p.suffix in {".yaml", ".yml"} + ] + if pb_defs_dir.is_dir() + else [] + ) + return DatasetDetail( + dataset_id=dataset_id, + backend_id="disk", + path=str(base), + has_infos=(base / "infos.yaml").exists() or (base / "infos.json").exists(), + has_problem_definitions=bool(pb_defs), + splits=splits, + infos=self._load_infos(base), + problem_definitions=pb_defs, + ) + + def list_samples(self, dataset_id: str) -> list[SampleRefDTO]: + """Return every sample reference available in a dataset. + + For disk-backed datasets, sample ids are the zero-based integer + indices used with ``converter.to_plaid(dataset, index)``. For + streaming datasets (Hugging Face Hub), each split contributes a + single reference whose ``sample_id`` is the + :data:`STREAM_CURSOR_ID` sentinel; the actual sample is obtained + by advancing the per-split cursor with + :meth:`advance_stream_cursor`. + """ + datasetdict, _ = self._open(dataset_id) + streaming = self.is_streaming(dataset_id) + backend_id = "hub" if self._is_hub_dataset(dataset_id) else "disk" + + refs: list[SampleRef] = [] + for split, ds in datasetdict.items(): + split_key = None if split == "__default__" else split + if streaming: + refs.append( + SampleRef( + backend_id=backend_id, + dataset_id=dataset_id, + split=split_key, + sample_id=STREAM_CURSOR_ID, + ) + ) + continue + for index in range(len(ds)): + refs.append( + SampleRef( + backend_id=backend_id, + dataset_id=dataset_id, + split=split_key, + sample_id=str(index), + ) + ) + return [SampleRefDTO.from_ref(ref) for ref in refs] + + # --------------------------------------------------- Streaming cursors + + def stream_cursor_position(self, dataset_id: str, split: str | None) -> int: + """Return the current forward position of a streaming cursor. + + Returns ``-1`` before the first call to :meth:`advance_stream_cursor`. + """ + cursor = self._cursors.get(self._cursor_key(dataset_id, split)) + return cursor.position if cursor is not None else -1 + + def advance_stream_cursor(self, dataset_id: str, split: str | None) -> SampleRef: + """Consume the next record from the stream and return its ref. + + The returned :class:`SampleRef` always carries the + :data:`STREAM_CURSOR_ID` sentinel in its ``sample_id``; the + underlying record is cached on the service so a subsequent + :meth:`load_sample` call returns the freshly fetched sample. + + Raises: + StopIteration: If the underlying stream is exhausted. + """ + key = self._cursor_key(dataset_id, split) + cursor = self._cursors.get(key) + if cursor is None or cursor.iterator is None: + cursor = self._build_cursor(dataset_id, split) + self._cursors[key] = cursor + try: + record = next(cursor.iterator) + except StopIteration: + cursor.exhausted = True + raise + cursor.current_record = record + cursor.position += 1 + return SampleRef( + backend_id="hub", + dataset_id=dataset_id, + split=split, + sample_id=STREAM_CURSOR_ID, + ) + + def reset_stream_cursor(self, dataset_id: str, split: str | None) -> None: + """Rebuild a fresh iterator for ``(dataset_id, split)``. + + The cached record is discarded and the position reset to ``-1`` + so the next :meth:`advance_stream_cursor` call yields the first + sample again. + """ + key = self._cursor_key(dataset_id, split) + self._cursors[key] = self._build_cursor(dataset_id, split) + + @staticmethod + def _cursor_key(dataset_id: str, split: str | None) -> tuple[str, str]: + return dataset_id, split if split is not None else "__default__" + + def _build_cursor(self, dataset_id: str, split: str | None) -> _StreamCursor: + datasetdict, _ = self._open(dataset_id) + split_key = split if split is not None else "__default__" + if split_key not in datasetdict and len(datasetdict) == 1: + split_key = next(iter(datasetdict)) + if split_key not in datasetdict: + raise KeyError( + f"Split {split!r} not found in dataset {dataset_id!r}; " + f"available splits: {sorted(datasetdict.keys())}" + ) + return _StreamCursor(iterator=iter(datasetdict[split_key])) + + # -------------------------------------------------------------- Samples + + def load_sample(self, ref: SampleRef): + """Return a PLAID :class:`plaid.Sample` for the given reference. + + Uses ``converter.to_plaid(dataset, index)`` to rebuild the sample + from whatever backend store (hf_datasets, cgns, zarr) is in use. + """ + datasetdict, converterdict = self._open(ref.dataset_id) + split_key = ref.split if ref.split is not None else "__default__" + if split_key not in datasetdict: + # Fallback: some converters return a single unnamed split. + if len(datasetdict) == 1: + split_key = next(iter(datasetdict)) + else: + raise KeyError( + f"Split {ref.split!r} not found in dataset {ref.dataset_id!r}; " + f"available splits: {sorted(datasetdict.keys())}" + ) + dataset = datasetdict[split_key] + converter = converterdict[split_key] + # Streaming datasets expose a forward-only cursor rather than + # random access. The viewer drives the cursor explicitly via + # ``advance_stream_cursor`` and then calls ``load_sample`` with + # ``sample_id == STREAM_CURSOR_ID`` to materialise the PLAID + # sample from the most recently consumed raw record. + if ref.sample_id == STREAM_CURSOR_ID: + cursor = self._cursors.get(self._cursor_key(ref.dataset_id, ref.split)) + if cursor is None or cursor.current_record is None: + # Auto-advance once so a fresh selection behaves like + # "show me the first sample". + self.advance_stream_cursor(ref.dataset_id, ref.split) + cursor = self._cursors[self._cursor_key(ref.dataset_id, ref.split)] + # Streaming converters use ``sample_to_plaid`` (single record) + # rather than ``to_plaid(dataset, index)`` (random access). + return converter.sample_to_plaid(cursor.current_record) + + try: + index = int(ref.sample_id) + except ValueError as exc: + raise ValueError( + f"Invalid sample id {ref.sample_id!r}; expected an integer index." + ) from exc + features = self._features.get(ref.dataset_id) + if features is None: + # No filter active: load every feature. + return converter.to_plaid(dataset, index) + # ``features`` is a (possibly empty) list: the filter IS active. + # We must not fall through to the unfiltered branch, otherwise + # an empty selection would load every feature instead of none. + # + # Feature schemas are split-specific in PLAID: the UI dropdown + # aggregates every split's catalogue, so a user-selected field + # may be absent from the current split. ``Converter.to_plaid`` + # runs :func:`~plaid.utils.cgns_helper.update_features_for_CGNS_compatibility` + # internally against its own per-split ``constant_features`` / + # ``variable_features`` and raises + # ``KeyError('Missing features in dataset/converter: ...')`` + # for any unknown path. We therefore intersect the user's + # field selection with the split's catalogue first. Geometric + # supports required to render the kept fields are added by the + # converter itself on the ``to_plaid`` call. + split_constant = set(getattr(converter, "constant_features", set())) + split_variable = set(getattr(converter, "variable_features", set())) + split_keys = split_constant | split_variable + selected = [f for f in features if f in split_keys] + # The split's feature catalogue contains more than the fields + # the user can toggle in the UI: it also carries CGNS + # bookkeeping paths (coordinates, element connectivities, + # ``GridLocation`` metadata, ``_times`` series, ...) and the + # paths backing the sample's globals / scalars. Those entries + # must always be loaded, otherwise the rendered sample would + # lose its mesh and the "Globals" panel would be empty. + # + # We therefore compute the set of "user-controllable" field + # paths (the same set the UI exposes through + # :meth:`list_available_features`) and re-inject *only* the + # remaining split paths. Filtering by + # ``set(user_visible) - set(selected)`` is not enough: we have + # to build the complement inside the current split so that + # constant fields the user deselected are genuinely dropped. + user_visible = set(self.list_available_features(ref.dataset_id)) + # ``_times`` bookkeeping paths are hidden from the UI but + # semantically follow their companion field: toggling ``sdf`` on + # or off must also toggle ``sdf_times``. Treat them as linked + # to their base path so deselecting a field genuinely drops + # both entries (and re-selecting a field adds both back). + user_visible_linked = user_visible | {f"{path}_times" for path in user_visible} + selected_linked = set(selected) | { + f"{path}_times" for path in selected if f"{path}_times" in split_keys + } + always_keep = split_keys - user_visible_linked + augmented = sorted(selected_linked | always_keep) + if not augmented: + # Split has no bookkeeping paths AND user-selected fields + # were all absent from this split: nothing sensible to + # filter with. Fall back to the unfiltered load so the user + # still sees *something* (the raw sample). + return converter.to_plaid(dataset, index) + try: + return converter.to_plaid(dataset, index, features=augmented) + except KeyError: + # ``augmented`` can itself contain paths that the CGNS + # expander or the HF bridge reject (bookkeeping entries not + # materialised as columns in the backend store). A + # ``KeyError("Missing features in …")`` from that code path + # should not be user-facing: degrade gracefully to an + # unfiltered load. + return converter.to_plaid(dataset, index) + + def get_sample_summary(self, ref: SampleRef) -> SampleSummary: + """Return a minimal summary of the PLAID sample.""" + sample = self.load_sample(ref) + times = self._time_keys(sample) + bases, zones_by_base, fields_by_base = self._describe_tree(sample, times) + globals_dict = { + name: str(sample.get_scalar(name)) for name in sample.get_scalar_names() + } + return SampleSummary( + ref=SampleRefDTO.from_ref(ref), + n_times=len(times), + time_values=list(times), + bases=bases, + zones_by_base=zones_by_base, + fields_by_base=fields_by_base, + globals=globals_dict, + ) + + def list_time_values(self, ref: SampleRef) -> list[float]: + """Return the sorted list of time values available for a sample. + + Thin wrapper around :meth:`plaid.Sample.features.get_all_time_values` + that always returns a ``list[float]`` (it may be empty for static + samples). + """ + sample = self.load_sample(ref) + try: + times = sample.features.get_all_time_values() + except Exception: # noqa: BLE001 - defensive, PLAID shouldn't raise + return [] + return sorted(float(t) for t in times) + + def describe_globals( + self, ref: SampleRef, *, time: float | None = None + ) -> list[dict[str, object]]: + """Return PLAID global scalars/tensors reported by the sample. + + Uses :meth:`plaid.Sample.get_global_names` to enumerate globals + and :meth:`plaid.Sample.get_global` to fetch each value, so only + the "real" globals exposed by PLAID's API are reported. The CGNS + bookkeeping arrays ``IterationValues`` and ``TimeValues`` (which + describe time steps, not physical scalars) are filtered out. + + Args: + ref: The sample to inspect. + time: Optional time value; when ``None`` the sample's first + available time (or the static value) is used. + + Returns: + A list of ``{"name": str, "shape": list[int], "dtype": str, + "preview": str | None}`` descriptors, one per global. + """ + sample = self.load_sample(ref) + kwargs = {"time": time} if time is not None else {} + try: + names = sample.get_global_names(**kwargs) + except TypeError: + names = sample.get_global_names() + entries: list[dict[str, object]] = [] + for name in names: + if name in {"IterationValues", "TimeValues"}: + continue + try: + value = sample.get_global(name, **kwargs) + except TypeError: + value = sample.get_global(name) + except Exception: # noqa: BLE001 - skip unreadable globals + continue + shape = list(getattr(value, "shape", ())) if value is not None else [] + dtype = str(getattr(value, "dtype", type(value).__name__)) + entries.append( + { + "name": name, + "shape": shape, + "dtype": dtype, + "preview": _array_preview(value), + } + ) + return entries + + def describe_non_visual_bases( + self, ref: SampleRef + ) -> dict[str, list[dict[str, object]]]: + """Return data arrays of CGNS bases that carry no zones. + + Some datasets store auxiliary tensors (constants, global reference + values, look-up tables, ...) inside a CGNS base that has no + ``Zone_t`` children, so VTK cannot render them as geometry. This + method returns, for each zone-less base, a list of descriptors + ``{"name": str, "shape": list[int], "dtype": str, + "preview": str | None}`` suitable for display in the viewer. + + Args: + ref: The sample to inspect. + + Returns: + A mapping from base name to a list of data-array descriptors. + Bases that do contain zones are omitted. + """ + sample = self.load_sample(ref) + times = self._time_keys(sample) + if not times: + return {} + try: + from CGNS.PAT import cgnskeywords as CK # noqa: PLC0415 + from CGNS.PAT import cgnsutils as CU # noqa: PLC0415 + except ImportError: # pragma: no cover - defensive + return {} + tree = sample.features.data[times[0]] + summary: dict[str, list[dict[str, object]]] = {} + for base_node in CU.hasChildType(tree, CK.CGNSBase_ts) or []: + if CU.hasChildType(base_node, CK.Zone_ts): + continue + summary[base_node[0]] = _collect_data_arrays(base_node) + return summary + + def get_sample_validation(self, ref: SampleRef) -> ValidationResult: + """Check basic sample completeness using PLAID's built-in validator.""" + warnings: list[str] = [] + errors: list[str] = [] + try: + sample = self.load_sample(ref) + except Exception as exc: # noqa: BLE001 - surface error to API caller + return ValidationResult( + ref=SampleRefDTO.from_ref(ref), + ok=False, + errors=[f"Failed to load sample: {exc}"], + ) + try: + report = sample.check_completeness() + except Exception as exc: # noqa: BLE001 + return ValidationResult( + ref=SampleRefDTO.from_ref(ref), + ok=False, + errors=[f"Completeness check failed: {exc}"], + ) + ok = isinstance(report, str) and "error" not in report.lower() + if report and not ok: + errors.append(report) + elif report: + warnings.append(report) + return ValidationResult( + ref=SampleRefDTO.from_ref(ref), + ok=ok, + warnings=warnings, + errors=errors, + ) + + # -------------------------------------------------------------- Helpers + + def _dataset_dir(self, dataset_id: str) -> Path: + if self._datasets_root is None: + raise FileNotFoundError( + "No datasets root selected; call set_datasets_root first." + ) + base = self._datasets_root / dataset_id + if not base.is_dir(): + raise FileNotFoundError(f"Dataset not found: {dataset_id}") + return base + + def _is_hub_dataset(self, dataset_id: str) -> bool: + """Return ``True`` when ``dataset_id`` refers to a registered HF repo.""" + return dataset_id in self._hub_repos + + def _open(self, dataset_id: str) -> tuple[dict, dict]: + """Load (and cache) ``(dataset_dict, converter_dict)`` for a dataset. + + Dispatches between :func:`plaid.storage.init_from_disk` for local + datasets and :func:`plaid.storage.init_streaming_from_hub` for + registered Hugging Face Hub repositories. + """ + if dataset_id in self._store_cache: + return self._store_cache[dataset_id] + if self._is_hub_dataset(dataset_id): + # Deferred import so the module can be loaded without PLAID present. + from plaid.storage import init_streaming_from_hub # noqa: PLC0415 + from plaid.utils.cgns_helper import ( # noqa: PLC0415 + update_features_for_CGNS_compatibility, + ) + + features = self._features.get(dataset_id) + # ``features is None`` means "no filter active" - let PLAID + # materialise every feature, as before. An *empty* list is + # a deliberate user choice ("show me only the geometry"): + # we hand PLAID the union of every constant feature path + # (so ``init_streaming_from_hub`` keeps the mesh and zone + # metadata) and nothing else. Passing ``features=[]`` + # directly is not an option because PLAID's ``if features`` + # gate treats empty lists as "unfiltered". + if features is None: + datasetdict, converterdict = init_streaming_from_hub(dataset_id) + else: + constant_keys, variable_keys = self._load_feature_metadata(dataset_id) + base_features = list(features) if features else list(constant_keys) + expanded_features = update_features_for_CGNS_compatibility( + base_features, constant_keys, variable_keys + ) + try: + datasetdict, converterdict = init_streaming_from_hub( + dataset_id, features=expanded_features + ) + except KeyError: + # ``expanded_features`` is derived from the + # dataset-wide metadata union and can therefore name + # paths that are not materialised as columns in a + # given split's HF table. The HF bridge then raises + # ``KeyError("Missing features in hf_dataset: …")``. + # Degrade gracefully to an unfiltered stream so the + # user still sees the geometry instead of a hard + # failure. + datasetdict, converterdict = init_streaming_from_hub(dataset_id) + else: + # Deferred import so the module can be loaded without PLAID present. + from plaid.storage import init_from_disk # noqa: PLC0415 + + base = self._dataset_dir(dataset_id) + datasetdict, converterdict = init_from_disk(str(base)) + # Normalise split-less case to a stable "__default__" key. + if not datasetdict: + raise RuntimeError(f"Dataset {dataset_id!r} is empty.") + self._store_cache[dataset_id] = (datasetdict, converterdict) + return datasetdict, converterdict + + def _splits_with_counts(self, dataset_id: str) -> dict[str, int | None]: + """Return ``{split: len(ds)}``; ``None`` for streaming splits.""" + datasetdict, _ = self._open(dataset_id) + counts: dict[str, int | None] = {} + for split, ds in datasetdict.items(): + try: + counts[split] = len(ds) + except TypeError: + counts[split] = None + return counts + + @staticmethod + def _load_infos(base: Path) -> dict | None: + for candidate in (base / "infos.json", base / "infos.yaml", base / "infos.yml"): + if not candidate.is_file(): + continue + try: + text = candidate.read_text() + except OSError: + return None + if candidate.suffix == ".json": + try: + return json.loads(text) + except json.JSONDecodeError: + return None + try: + import yaml # type: ignore # noqa: PLC0415 + except ImportError: # pragma: no cover - pyyaml is transitive + return None + try: + return yaml.safe_load(text) + except yaml.YAMLError: # type: ignore[attr-defined] + return None + return None + + @staticmethod + def _time_keys(sample) -> list[float]: + data = getattr(sample.features, "data", None) + if not data: + return [] + return sorted(float(t) for t in data.keys()) + + @staticmethod + def _describe_tree(sample, times: list[float]): + """Walk the CGNS tree of the first timestep and return bases, zones, fields.""" + bases: list[str] = [] + zones_by_base: dict[str, list[str]] = {} + fields_by_base: dict[str, list[str]] = {} + if not times: + return bases, zones_by_base, fields_by_base + tree = sample.features.data[times[0]] + # Deferred import - CGNS helpers live inside pyCGNS. + try: + from CGNS.PAT import cgnskeywords as CK # noqa: PLC0415 + from CGNS.PAT import cgnsutils as CU # noqa: PLC0415 + except ImportError: # pragma: no cover - defensive + return bases, zones_by_base, fields_by_base + for base_node in CU.hasChildType(tree, CK.CGNSBase_ts) or []: + base_name = base_node[0] + bases.append(base_name) + zones_by_base[base_name] = [] + field_names: set[str] = set() + for zone_node in CU.hasChildType(base_node, CK.Zone_ts) or []: + zones_by_base[base_name].append(zone_node[0]) + for sol_node in CU.hasChildType(zone_node, CK.FlowSolution_ts) or []: + for da in CU.hasChildType(sol_node, CK.DataArray_ts) or []: + field_names.add(da[0]) + fields_by_base[base_name] = sorted(field_names) + return bases, zones_by_base, fields_by_base + + +@lru_cache(maxsize=8) +def _cached_service(root: str, backend_id: str) -> PlaidDatasetService: + return PlaidDatasetService( + ViewerConfig(datasets_root=Path(root), backend_id=backend_id) + ) diff --git a/src/plaid/viewer/trame_app/__init__.py b/src/plaid/viewer/trame_app/__init__.py new file mode 100644 index 00000000..97c16f8f --- /dev/null +++ b/src/plaid/viewer/trame_app/__init__.py @@ -0,0 +1,5 @@ +"""Trame/ParaView visualization server for the dataset viewer.""" + +from plaid.viewer.trame_app.server import build_server + +__all__ = ["build_server"] diff --git a/src/plaid/viewer/trame_app/server.py b/src/plaid/viewer/trame_app/server.py new file mode 100644 index 00000000..d2aca65b --- /dev/null +++ b/src/plaid/viewer/trame_app/server.py @@ -0,0 +1,1982 @@ +"""Trame server for the dataset viewer. + +This module builds a self-contained trame application that lets users +browse PLAID datasets and visualize their samples. All UI is exposed as +trame/Vuetify widgets in a side drawer; the 3D view is a VTK *remote* +view (server-side rendering, streamed as images) driven by a lightweight +VTK pipeline (reader -> geometry -> mapper). Remote rendering avoids the +rare vtk.js rendering artefacts observed when geometry with several +disjoint 1D connected components (e.g. VKI-LS59 ``Base_1_2`` with two +airfoil profiles) is streamed to the browser. + + + +Architecture: + +- A :class:`PlaidDatasetService` is used to discover datasets and load + samples. +- A :class:`ParaviewArtifactService` converts a sample to a single CGNS + file (or ``.cgns.series`` sidecar for time-dependent samples). +- ``vtkCGNSReader`` (optionally wrapped in ``vtkCGNSFileSeriesReader``) feeds + the VTK pipeline. +- The user can colour the geometry by any point or cell field and + choose a colormap preset. + + +The server is started by :mod:`plaid.viewer.cli` but can also be used +as a library. +""" + +from __future__ import annotations + +import asyncio +import contextlib +import json +import logging +import os +from pathlib import Path + +from plaid.viewer.models import SampleRef +from plaid.viewer.services import ParaviewArtifactService, PlaidDatasetService +from plaid.viewer.services.plaid_dataset_service import STREAM_CURSOR_ID + +logger = logging.getLogger(__name__) + +_COLORMAPS = ["viridis", "plasma", "inferno", "magma", "coolwarm", "turbo", "jet"] + +_VTK_LOG_ROUTER_INSTALLED = False +_C_STDERR_REROUTED = False + + +def _reroute_c_stderr() -> None: + """Permanently redirect the process's stderr file descriptor to /dev/null. + + VTK's CGNS reader and the underlying HDF5 library emit informational + messages such as ``Mismatch in number of children and child IDs read`` + directly via ``fprintf(stderr, ...)``. Those are not routed through + ``vtkOutputWindow`` and cannot be captured by a Python logger without + hijacking file descriptor 2. + + To keep Python's ``sys.stderr`` functional (pytest, tracebacks, etc.) we + save the current fd 2, reopen ``sys.stderr`` on top of the saved fd, and + only *then* redirect fd 2 itself to ``/dev/null``. C libraries that + write directly to ``stderr`` are silenced while Python ``print(..., + file=sys.stderr)`` and logging handlers keep working. + + Installed once per process. + """ + global _C_STDERR_REROUTED + if _C_STDERR_REROUTED: + return + import sys # noqa: PLC0415 + + try: + saved_fd = os.dup(2) + except OSError: # pragma: no cover - no fd 2 + return + try: + sys.stderr.flush() + except Exception: # noqa: BLE001 + pass + try: + sys.stderr = os.fdopen(saved_fd, "w", buffering=1) + except OSError: # pragma: no cover - defensive + os.close(saved_fd) + return + devnull_fd = os.open(os.devnull, os.O_WRONLY) + os.dup2(devnull_fd, 2) + os.close(devnull_fd) + _C_STDERR_REROUTED = True + + +def _install_vtk_log_router() -> None: + """Route VTK / HDF5 warnings to the Python ``logger`` at DEBUG level. + + ``vtkCGNSReader`` (through HDF5) emits chatty but harmless warnings such + as ``Mismatch in number of children and child IDs read`` when opening + CGNS files that contain bases without zones (e.g. ``Global``). By default + VTK writes those to ``stderr`` through a ``vtkOutputWindow``, which + pollutes the trame server console. We redirect all VTK messages to the + Python logger so users can opt in with ``PLAID_VIEWER_LOG=DEBUG`` + without any noise at INFO level. + + Installed once per process. + """ + global _VTK_LOG_ROUTER_INSTALLED + if _VTK_LOG_ROUTER_INSTALLED: + return + try: + import vtk # noqa: PLC0415 + except ImportError: # pragma: no cover - VTK is required in practice + return + + # ``vtkPythonStdStreamCaptureHelper`` is not available in every VTK wheel, + # so we subclass ``vtkOutputWindow`` in Python and forward all messages. + class _LoggingOutputWindow(vtk.vtkOutputWindow): # type: ignore[misc] + def DisplayText(self, text: str) -> None: # noqa: N802 - VTK API + logger.debug("vtk: %s", text.rstrip()) + + def DisplayErrorText(self, text: str) -> None: # noqa: N802 - VTK API + logger.debug("vtk error: %s", text.rstrip()) + + def DisplayWarningText(self, text: str) -> None: # noqa: N802 - VTK API + logger.debug("vtk warning: %s", text.rstrip()) + + def DisplayGenericWarningText( # noqa: N802 - VTK API + self, text: str + ) -> None: + logger.debug("vtk warning: %s", text.rstrip()) + + def DisplayDebugText(self, text: str) -> None: # noqa: N802 - VTK API + logger.debug("vtk debug: %s", text.rstrip()) + + vtk.vtkOutputWindow.SetInstance(_LoggingOutputWindow()) + # Also silence VTK's own warning channel entirely; the logger now owns it. + vtk.vtkObject.GlobalWarningDisplayOff() + # VTK 9 routes most reader warnings (e.g. CGNS ``Mismatch in number of + # children and child IDs read``) through loguru via ``vtkLogger``, which + # writes to stderr independently from ``vtkOutputWindow``. Silence that + # channel as well so the server console stays clean. + if hasattr(vtk, "vtkLogger"): + try: + vtk.vtkLogger.SetStderrVerbosity(vtk.vtkLogger.VERBOSITY_OFF) + except AttributeError: # pragma: no cover - very old VTK + pass + _VTK_LOG_ROUTER_INSTALLED = True + + +@contextlib.contextmanager +def _silence_stderr(): + """Temporarily redirect file descriptor 2 to ``/dev/null``. + + Needed around ``vtkCGNSReader`` updates because the CGNS C library + writes messages such as ``Mismatch in number of children and child IDs + read`` directly to ``stderr`` (via ``fprintf``), bypassing VTK's + ``vtkOutputWindow`` and therefore our Python logger override. + """ + try: + saved = os.dup(2) + except OSError: # pragma: no cover - no fd 2 (unlikely) + yield + return + devnull_fd = os.open(os.devnull, os.O_WRONLY) + try: + os.dup2(devnull_fd, 2) + yield + finally: + os.dup2(saved, 2) + os.close(saved) + os.close(devnull_fd) + + +# --------------------------------------------------------------------------- +# VTK helpers +# --------------------------------------------------------------------------- + + +def _enable_all_selections(cgns_reader) -> None: + """Enable every base / point / cell array known to a ``vtkCGNSReader``. + + ``vtkCGNSReader`` selections are OFF by default for arrays (and for + any base beyond the first one) so the VTK output would otherwise miss + half of the data. We enable everything after ``UpdateInformation`` so + the UI can expose it to the user. + """ + cgns_reader.UpdateInformation() + cgns_reader.EnableAllBases() + cgns_reader.EnableAllPointArrays() + cgns_reader.EnableAllCellArrays() + + +def _disable_bases_on_reader(reader, base_names: list[str]) -> None: + """Disable the given bases on the reader's base selection. + + Keeps every other base enabled. Useful to hide zone-less CGNS bases + from ``vtkCGNSReader`` which otherwise logs ``No zones in base ...`` + warnings on every update. + """ + cgns = _cgns_reader_of(reader) + selection = cgns.GetBaseSelection() + for name in base_names: + if selection.ArrayExists(name): + selection.DisableArray(name) + cgns.Modified() + + +def _load_reader(cgns_path: Path): + """Return a ready-to-use VTK reader for ``cgns_path``. + + For a ``.cgns.series`` sidecar, the reader is wrapped in + ``vtkCGNSFileSeriesReader`` so ParaView's time controls work out of the + box. (Note: the generic ``vtkFileSeriesReader`` is not exposed by the + ``vtk`` PyPI wheel, only the CGNS-specialised series reader is.) + + All bases, point arrays and cell arrays are enabled by default; the + side drawer lets the user narrow the selection later. + """ + import vtk # noqa: PLC0415 + + if cgns_path.suffix == ".series": + payload = json.loads(cgns_path.read_text()) + entries = sorted( + payload.get("files", []), + key=lambda entry: float(entry.get("time", 0.0)), + ) + base_dir = cgns_path.parent + inner = vtk.vtkCGNSReader() + series = vtk.vtkCGNSFileSeriesReader() + series.SetReader(inner) + for entry in entries: + series.AddFileName(str((base_dir / entry["name"]).resolve())) + # ``vtkCGNSFileSeriesReader`` does not expose per-entry time setters: + # the timestep values are read from each CGNS file itself when the + # series reader pulls information from the underlying reader. + series.UpdateInformation() + inner.EnableAllBases() + inner.EnableAllPointArrays() + inner.EnableAllCellArrays() + # Do not call Update() here: the caller disables zone-less bases + # first (see ``_refresh_sample_view``) to avoid ``vtkCGNSReader`` + # logging ``No zones in base ...`` warnings. The pipeline's + # ``_apply_base_selection`` triggers the first Update(). + return series + + reader = vtk.vtkCGNSReader() + reader.SetFileName(str(cgns_path)) + _enable_all_selections(reader) + return reader + + +def _cgns_reader_of(reader): + """Return the underlying ``vtkCGNSReader`` for a plain or series reader.""" + if hasattr(reader, "GetReader"): + return reader.GetReader() + return reader + + +def _selection_names(selection) -> list[str]: + """Return the array names exposed by a ``vtkDataArraySelection``.""" + return [selection.GetArrayName(i) for i in range(selection.GetNumberOfArrays())] + + +def _reader_bases_and_fields(reader) -> tuple[list[str], list[str], list[str]]: + """Return ``(bases, point_fields, cell_fields)`` exposed by the reader.""" + cgns = _cgns_reader_of(reader) + bases = _selection_names(cgns.GetBaseSelection()) + point_fields = _selection_names(cgns.GetPointDataArraySelection()) + cell_fields = _selection_names(cgns.GetCellDataArraySelection()) + return bases, point_fields, cell_fields + + +def _advance_reader_time(reader, time_value: float) -> None: + """Ask a VTK reader to update to the given time value. + + Works both on a plain ``vtkCGNSReader`` (static sample, no-op on the + reader itself) and on a ``vtkCGNSFileSeriesReader`` wrapping it. We call + ``UpdateTimeStep`` when available and otherwise fall back to the + executive's ``SetUpdateTimeStep`` API. Any failure is logged but does + not propagate to the UI. + """ + try: + with _silence_stderr(): + update_time_step = getattr(reader, "UpdateTimeStep", None) + if callable(update_time_step): + update_time_step(time_value) + else: + executive = reader.GetExecutive() + executive.SetUpdateTimeStep(0, time_value) + reader.Update() + except Exception as exc: # noqa: BLE001 - defensive, VTK may be strict + logger.warning("Failed to advance reader to time %s: %s", time_value, exc) + + +def _apply_base_selection(reader, active_bases: list[str]) -> None: + """Enable exactly ``active_bases`` on the reader's base selection.""" + cgns = _cgns_reader_of(reader) + selection = cgns.GetBaseSelection() + selection.DisableAllArrays() + for name in active_bases: + selection.EnableArray(name) + cgns.Modified() + with _silence_stderr(): + reader.Update() + + +def _list_point_and_cell_fields(dataset) -> tuple[list[str], list[str]]: + """Return the point and cell field names available on ``dataset``.""" + point_fields: set[str] = set() + cell_fields: set[str] = set() + + def _visit(obj): + if obj is None: + return + if hasattr(obj, "GetNumberOfBlocks"): + for i in range(obj.GetNumberOfBlocks()): + _visit(obj.GetBlock(i)) + return + pd = obj.GetPointData() if hasattr(obj, "GetPointData") else None + cd = obj.GetCellData() if hasattr(obj, "GetCellData") else None + if pd is not None: + for i in range(pd.GetNumberOfArrays()): + point_fields.add(pd.GetArrayName(i)) + if cd is not None: + for i in range(cd.GetNumberOfArrays()): + cell_fields.add(cd.GetArrayName(i)) + + _visit(dataset) + return sorted(point_fields), sorted(cell_fields) + + +def _compute_field_range( + dataset, field_name: str, association: str +) -> tuple[float, float]: + """Return the (min, max) range of ``field_name`` across ``dataset``.""" + lo = float("inf") + hi = float("-inf") + + def _visit(obj): + nonlocal lo, hi + if obj is None: + return + if hasattr(obj, "GetNumberOfBlocks"): + for i in range(obj.GetNumberOfBlocks()): + _visit(obj.GetBlock(i)) + return + data = obj.GetPointData() if association == "point" else obj.GetCellData() + if data is None: + return + arr = data.GetArray(field_name) + if arr is None: + return + r = arr.GetRange(-1) + lo = min(lo, r[0]) + hi = max(hi, r[1]) + + _visit(dataset) + if lo == float("inf"): + return 0.0, 1.0 + return lo, hi + + +# --------------------------------------------------------------------------- +# Pipeline +# --------------------------------------------------------------------------- + + +class _VtkPipeline: + """Minimal reader -> (cut) -> (threshold) -> geometry -> actor pipeline.""" + + def __init__(self) -> None: + import vtk # noqa: PLC0415 + + self.render_window = vtk.vtkRenderWindow() + # Off-screen rendering is required on headless servers (no X + # display). It does not prevent the interactor from receiving + # events forwarded from the browser by ``VtkRemoteView``: the + # events are dispatched to the interactor style, which mutates + # the server-side camera before the next frame is streamed. + self.render_window.OffScreenRenderingOn() + self.renderer = vtk.vtkRenderer() + self.renderer.SetBackground(0.12, 0.12, 0.14) + self.render_window.AddRenderer(self.renderer) + self.interactor = vtk.vtkRenderWindowInteractor() + self.interactor.SetRenderWindow(self.render_window) + # Without an explicit interactor style, ``vtkRenderWindowInteractor`` + # does not translate mouse events into camera manipulation, so the + # remote view appears frozen in the browser even though events are + # correctly forwarded. ``vtkInteractorStyleTrackballCamera`` is the + # standard ParaView-like style (LMB rotate, MMB pan, wheel zoom). + interactor_style = vtk.vtkInteractorStyleTrackballCamera() + self.interactor.SetInteractorStyle(interactor_style) + self.interactor.Initialize() + self._interactor_style = interactor_style # keep a reference alive + + self.reader = None + self.actor = vtk.vtkActor() + # Gouraud shading (per-vertex normals interpolated across the + # triangle) looks noticeably smoother than flat shading on curved + # surfaces. Combined with a ``vtkPolyDataNormals`` step below, it + # gives a nice continuous lighting on CFD meshes without changing + # the geometry. + self.actor.GetProperty().SetInterpolationToGouraud() + self.mapper = vtk.vtkCompositePolyDataMapper() + self.actor.SetMapper(self.mapper) + self.renderer.AddActor(self.actor) + + self.lut = vtk.vtkLookupTable() + self.lut.SetHueRange(0.667, 0.0) # blue -> red + self.lut.Build() + + self._current_dataset = None + + def load(self, cgns_path: Path) -> None: + """Load a new CGNS/series file and reset the pipeline.""" + self.reader = _load_reader(cgns_path) + self._rebuild() + + def update( + self, + *, + field: str | None, + association: str, + cmap: str, + show_edges: bool, + ) -> None: + """Rebuild the downstream pipeline with the current options.""" + if self.reader is None: + return + import vtk # noqa: PLC0415 + + pipeline_output = self.reader.GetOutputPort() + + geom = vtk.vtkCompositeDataGeometryFilter() + geom.SetInputConnection(pipeline_output) + geom.Update() + self._current_dataset = geom.GetOutput() + self.mapper.SetInputConnection(geom.GetOutputPort()) + + if field is not None: + self.mapper.SelectColorArray(field) + + if association == "point": + self.mapper.SetScalarModeToUsePointFieldData() + else: + self.mapper.SetScalarModeToUseCellFieldData() + self.mapper.SetColorModeToMapScalars() + self.mapper.ScalarVisibilityOn() + lo, hi = _compute_field_range(self.reader.GetOutput(), field, association) + self.lut = _build_lut(cmap, lo, hi) + self.mapper.SetLookupTable(self.lut) + self.mapper.SetScalarRange(lo, hi) + else: + self.mapper.ScalarVisibilityOff() + + self.actor.GetProperty().SetEdgeVisibility(bool(show_edges)) + self.actor.GetProperty().SetLineWidth(1.0) + + def reset_camera(self) -> None: + """Reset the camera to the default view orientation and framing. + + ``vtkRenderer.ResetCamera()`` only adjusts the camera *distance* + so the current actor fits in the viewport; it leaves the camera + orientation (position direction, view up) untouched. To match the + first-load behaviour after the user has rotated the scene, we + also reset the orientation to the VTK defaults (looking down + ``-Z`` with ``+Y`` up) before reframing. + """ + camera = self.renderer.GetActiveCamera() + camera.SetPosition(0.0, 0.0, 1.0) + camera.SetFocalPoint(0.0, 0.0, 0.0) + camera.SetViewUp(0.0, 1.0, 0.0) + camera.SetViewAngle(30.0) + self.renderer.ResetCamera() + + def _rebuild(self) -> None: + self.renderer.ResetCamera() + + +def _build_lut(cmap: str, lo: float, hi: float): + """Build a simple ``vtkLookupTable`` approximating a matplotlib colormap.""" + import vtk # noqa: PLC0415 + + # Minimal built-in approximations - use HueRange for the common cases. + lut = vtk.vtkLookupTable() + lut.SetTableRange(lo, hi) + lut.SetNumberOfColors(256) + presets = { + "viridis": (0.75, 0.0), + "plasma": (0.8, 0.05), + "inferno": (0.0, 0.15), + "magma": (0.85, 0.0), + "coolwarm": (0.667, 0.0), + "turbo": (0.7, 0.0), + "jet": (0.667, 0.0), + } + h0, h1 = presets.get(cmap, (0.667, 0.0)) + lut.SetHueRange(h0, h1) + lut.SetSaturationRange(1.0, 1.0) + lut.SetValueRange(1.0, 1.0) + lut.Build() + return lut + + +# --------------------------------------------------------------------------- +# Trame server +# --------------------------------------------------------------------------- + + +def build_server( + dataset_service: PlaidDatasetService, + artifact_service: ParaviewArtifactService, +): + """Create a configured trame :class:`Server` instance. + + Args: + dataset_service: Discovers datasets and loads PLAID samples. + artifact_service: Converts a :class:`SampleRef` to a ParaView-readable + artifact on disk. + + Returns: + The configured ``trame.app.Server``. Call ``.start(host=..., port=...)`` + to run it. + """ + from trame.app import ( + asynchronous, # noqa: PLC0415 + get_server, # noqa: PLC0415 + ) + from trame.ui.vuetify3 import SinglePageWithDrawerLayout # noqa: PLC0415 + from trame.widgets import html # noqa: PLC0415 + from trame.widgets import vtk as vtk_widgets # noqa: PLC0415 + from trame.widgets import vuetify3 as v3 # noqa: PLC0415 + + _install_vtk_log_router() + + server = get_server(client_type="vue3") + state, ctrl = server.state, server.controller + + pipeline = _VtkPipeline() + # Background task handle for the time-series playback loop (see + # ``_on_playing`` below). Kept here so successive toggles cancel the + # previous task instead of spawning duplicates. + play_task: dict[str, object] = {"task": None} + # One-shot flag raised by ``_apply_features`` so the next + # ``_refresh_sample_view_impl`` call rebuilds the ParaView artifact + # from scratch (its on-disk cache key does not include the feature + # filter, so without this force-refresh the renderer would keep + # showing the pre-filter CGNS file). + force_artifact_refresh: dict[str, bool] = {"pending": False} + + with _silence_stderr(): + datasets = dataset_service.list_datasets() + # Dataset ids are kept in two disjoint lists driven by the + # Local / Hub tabs so the dropdown always matches the active source + # (``init_from_disk`` vs ``init_streaming_from_hub``). The UI reads + # the right list via a ternary expression on ``source_tab``. + hub_ids_set = set(dataset_service.hub_repos) + local_dataset_ids = [ + d.dataset_id for d in datasets if d.dataset_id not in hub_ids_set + ] + hub_dataset_ids = [d.dataset_id for d in datasets if d.dataset_id in hub_ids_set] + dataset_ids = local_dataset_ids + hub_dataset_ids + + # --- Default state ---------------------------------------------------- + # Datasets root panel. ``allow_root_change`` gates the UI on the + # client: when False, the panel is hidden so a public deployment can + # pin the root from the CLI (``--datasets-root /data + # --disable-root-change``). + state.setdefault( + "datasets_root_text", + str(dataset_service.datasets_root) if dataset_service.datasets_root else "", + ) + state.setdefault("allow_root_change", dataset_service._config.allow_root_change) + state.setdefault("browse_dialog", False) + state.setdefault("browse_cwd", "") + state.setdefault("browse_parent", None) + state.setdefault("browse_entries", []) + + # Hugging Face Hub streaming. ``hub_repos`` mirrors the service state + # and ``hub_repo_input`` is the text field bound to the "Add hub + # dataset" panel. Hub datasets are exposed alongside local ones in + # ``dataset_ids``; the service dispatches to + # ``plaid.storage.init_streaming_from_hub`` when the selected dataset + # is a registered repo id. + state.setdefault("hub_repos", list(dataset_service.hub_repos)) + state.setdefault("hub_repo_input", "") + # Active side-panel tab: "local" drives ``datasets_root_text`` and + # directory browsing, "hub" drives the Hugging Face repo input. The + # selection only gates which form is rendered; registered datasets + # from either source always land in ``dataset_ids`` together. + state.setdefault("source_tab", "local") + + # Initial ``dataset_id`` follows the default ``source_tab`` ("local"): + # pick the first local dataset when any is available, otherwise fall + # back to the first hub dataset (so a viewer launched with only + # ``--hub-repo`` still has something selected). + initial_dataset_id = ( + local_dataset_ids[0] + if local_dataset_ids + else (hub_dataset_ids[0] if hub_dataset_ids else None) + ) + state.setdefault("dataset_id", initial_dataset_id) + # Separate lists per source so the dropdown only shows datasets that + # match the active tab. ``dataset_ids`` is kept for backwards + # compatibility (e.g. tests that inspect the full list) but the UI + # reads from ``local_dataset_ids`` / ``hub_dataset_ids`` directly. + state.setdefault("local_dataset_ids", local_dataset_ids) + state.setdefault("hub_dataset_ids", hub_dataset_ids) + state.setdefault("dataset_ids", dataset_ids) + + state.setdefault("splits", []) + state.setdefault("split", None) + state.setdefault("sample_ids", []) + state.setdefault("sample_id", None) + state.setdefault("sample_index", 0) + state.setdefault("sample_count", 0) + # Streaming (Hugging Face Hub) navigation. Hub datasets expose + # ``IterableDataset`` splits without a ``__len__``, so the slider is + # driven by a forward-only cursor rather than a random-access index + # list. ``stream_position`` mirrors the service cursor (-1 before any + # fetch), ``stream_exhausted`` is set when the iterator raises + # ``StopIteration`` so the slider caps at the last consumed index. + state.setdefault("is_streaming", False) + state.setdefault("stream_position", -1) + state.setdefault("stream_exhausted", False) + + # Feature filtering state. ``available_features`` is the full list of + # feature paths declared in the dataset metadata (populated whenever + # ``dataset_id`` changes), ``selected_features`` is the subset the + # user kept through the checkbox panel. An empty ``selected_features`` + # means "no filter": every feature is loaded (default behaviour). + state.setdefault("available_features", []) + state.setdefault("selected_features", []) + + state.setdefault("base_options", []) + # Single active base (exclusive selection). Kept as a list internally + # so `_apply_base_selection` has a uniform interface, but the UI + # exposes it as a ``VBtnToggle`` with ``multiple=False``. + state.setdefault("active_base", None) + # PLAID globals (``sample.get_global_names`` / ``sample.get_global``) + + # for the current sample, minus the ``IterationValues`` / ``TimeValues`` + # bookkeeping arrays which describe time steps rather than physical + # scalars. + state.setdefault("sample_globals", []) + # Time axis. ``time_values`` mirrors ``sample.features.get_all_time_values()`` + # and ``time_index`` is the index of the currently displayed step. + state.setdefault("time_values", []) + state.setdefault("time_index", 0) + state.setdefault("time_count", 0) + state.setdefault("current_time", None) + state.setdefault("field_options", []) + state.setdefault("field", None) # "point:name" or "cell:name" + state.setdefault("cmap", "viridis") + state.setdefault("cmaps", _COLORMAPS) + state.setdefault("show_edges", False) + state.setdefault("field_range", [0.0, 1.0]) + state.setdefault("status", "Select a dataset to start.") + # Loading indicator: True while the VTK reader is opening a new sample + # or advancing to a new time step. Consumed by a ``VProgressLinear`` in + # the header and an overlay on top of the 3D view. + state.setdefault("loading", False) + # Time-series playback controls. + state.setdefault("playing", False) + state.setdefault("play_fps", 5) + state.setdefault("play_loop", True) + + # --- Helpers ---------------------------------------------------------- + + def _refresh_splits() -> None: + if not state.dataset_id: + state.splits = [] + state.split = None + # Propagate "no dataset" to sample list + 3D scene so the + # view does not linger on the last local sample when the + # user switches to the Hub tab without any registered repo. + _refresh_samples() + return + + try: + with _silence_stderr(): + detail = dataset_service.get_dataset(state.dataset_id) + splits = list(detail.splits.keys()) + except Exception as exc: # noqa: BLE001 + state.status = f"Failed to load dataset: {exc}" + splits = [] + state.splits = splits + new_split = splits[0] if splits else None + # When the new dataset exposes the same first split name as the + # previous one (e.g. both default to ``train``), ``state.split`` + # does not change and the ``@state.change("split")`` listener is + # skipped: the sample list would keep pointing at the old dataset. + # Force a refresh in that case. + same_split = state.split == new_split + state.split = new_split + if same_split: + _refresh_samples() + + def _clear_scene(status: str | None = None) -> None: + """Empty the VTK view and all sample-related panels. + + Used whenever no sample should be displayed (no dataset + selected, streaming dataset waiting for the first ``Next`` + click, ...). Keeping this in a single place ensures the 3D + view never lingers on a stale frame from a previous selection. + """ + pipeline.reader = None + pipeline.mapper.RemoveAllInputConnections(0) + pipeline.mapper.ScalarVisibilityOff() + state.base_options = [] + state.active_base = None + state.field_options = [] + state.field = None + state.sample_globals = [] + state.time_values = [] + state.time_count = 0 + state.time_index = 0 + state.current_time = None + state.sample_ids = [] + state.sample_id = None + state.sample_count = 0 + state.sample_index = 0 + if status is not None: + state.status = status + ctrl.view_update() + + def _refresh_samples() -> None: + if not state.dataset_id: + # No dataset selected: clear everything, including the 3D + # scene. This matters when the user switches to the Hub tab + # without any registered repo - otherwise the view would + # keep showing the last local sample. + state.is_streaming = False + _clear_scene(status="Select a dataset to start.") + return + + split_key = state.split + if split_key == "__default__": + split_key = None + # Streaming datasets (HF Hub) are not random-access. The service + # returns a single synthetic ``SampleRef`` with the + # ``STREAM_CURSOR_ID`` sentinel per split, and we advance the + # cursor forward through ``advance_stream_cursor`` as the user + # moves the slider to the right. The slider exposes indices + # ``[0 .. cursor_position + 1]`` so the user can still revisit + # already-fetched samples via the converter cache but never + # rewind the underlying iterator (which is by construction + # forward-only). + try: + streaming = dataset_service.is_streaming(state.dataset_id) + except Exception: # noqa: BLE001 + streaming = False + state.is_streaming = streaming + if streaming: + # Reset the cursor so each (dataset, split) selection starts + # at the first available sample regardless of previous state. + try: + dataset_service.reset_stream_cursor(state.dataset_id, split_key) + except Exception as exc: # noqa: BLE001 + state.status = f"Failed to reset stream cursor: {exc}" + return + state.stream_position = -1 + state.stream_exhausted = False + state.sample_ids = [] + state.sample_count = 0 + state.sample_index = 0 + # No sample has been fetched yet: the status bar invites the + # user to click "Next" to consume the first element of the + # stream. ``sample_id`` stays ``None`` so ``_refresh_sample_view`` + # short-circuits until the cursor has actually advanced. + state.sample_id = None + # Clear the VTK scene so the 3D view is empty while waiting + # for the first ``Next`` click. Without this, switching back + # to the Hub tab would still show the mesh of the previously + # loaded local dataset (or the previous streaming sample), + # which is confusing since no hub sample has been fetched yet. + pipeline.reader = None + pipeline.mapper.RemoveAllInputConnections(0) + pipeline.mapper.ScalarVisibilityOff() + state.base_options = [] + state.active_base = None + state.field_options = [] + state.field = None + state.sample_globals = [] + state.time_values = [] + state.time_count = 0 + state.time_index = 0 + state.current_time = None + ctrl.view_update() + state.status = "Streaming: click Next to fetch the first sample." + return + + try: + with _silence_stderr(): + refs = dataset_service.list_samples(state.dataset_id) + except Exception as exc: # noqa: BLE001 + state.status = f"Failed to list samples: {exc}" + refs = [] + ids = [r.sample_id for r in refs if r.split == split_key] + state.sample_ids = ids + state.sample_count = len(ids) + state.sample_index = 0 + new_sample_id = ids[0] if ids else None + # Switching dataset/split may leave ``state.sample_id`` unchanged + # (e.g. both new and old first sample are "0"); in that case the + # ``@state.change("sample_id")`` hook would not fire and the 3D + # view would keep the previous sample. Force a refresh whenever + # the sample id is the same but the dataset/split context changed. + same_id = state.sample_id == new_sample_id + state.sample_id = new_sample_id + if same_id and new_sample_id is not None: + _refresh_sample_view() + + def _refresh_field_options() -> None: + """Restrict the field dropdown to arrays present in the active base. + + ``_list_point_and_cell_fields`` walks the reader's current output, + which reflects the currently enabled base selection, so fields + belonging to unselected bases are hidden. + """ + if pipeline.reader is None: + state.field_options = [] + state.field = None + return + points, cells = _list_point_and_cell_fields(pipeline.reader.GetOutput()) + options = [f"point:{n}" for n in points] + [f"cell:{n}" for n in cells] + state.field_options = options + # Preserve the previously selected field if it is still available. + if state.field not in options: + state.field = options[0] if options else None + + def _refresh_sample_view() -> None: + """Reload the current sample and refresh the full UI state. + + The call is intentionally synchronous: trame schedules state + broadcasts after the callback returns, so we rely on the + ``VProgressLinear`` shown while ``state.loading`` is True to + indicate activity. A previous async variant that ran the VTK work + in an executor caused the viewer to appear frozen, so we keep the + simple blocking flow and just expose ``state.loading`` for visual + feedback. + """ + if not (state.dataset_id and state.sample_id is not None): + return + state.loading = True + try: + _refresh_sample_view_impl() + finally: + state.loading = False + + def _refresh_sample_view_impl() -> None: + split = state.split if state.split != "__default__" else None + # Streaming datasets expose a "hub" backend regardless of the + # CLI-default backend id, so ``SampleRef`` carries the correct + # loader hint and the paraview artifact cache remains coherent + # across local/streaming switches. + backend_id = "hub" if state.is_streaming else dataset_service._config.backend_id + ref = SampleRef( + backend_id=backend_id, + dataset_id=state.dataset_id, + split=split, + sample_id=str(state.sample_id), + ) + + # Refresh time axis + globals panel (independent of VTK rendering). + # PLAID's CGNS loading (pyCGNS / CHLone) writes low-level HDF5 + # warnings such as "Mismatch in number of children and child IDs + # read" directly to stderr. Wrap every call that can trigger a + # CGNS read with ``_silence_stderr`` so the server console stays + # clean. + try: + with _silence_stderr(): + times = dataset_service.list_time_values(ref) + except Exception as exc: # noqa: BLE001 + logger.warning("Failed to list time values: %s", exc) + times = [] + state.time_values = times + state.time_count = len(times) + state.time_index = 0 + state.current_time = times[0] if times else None + try: + with _silence_stderr(): + state.sample_globals = dataset_service.describe_globals( + ref, time=state.current_time + ) + except Exception as exc: # noqa: BLE001 + logger.warning("Failed to describe globals: %s", exc) + state.sample_globals = [] + try: + # Streaming samples all share the same ``SampleRef`` (the + # ``STREAM_CURSOR_ID`` sentinel) and would therefore hit the + # paraview artifact cache on every Next click, returning the + # first consumed sample forever. ``force=True`` tells + # ``ensure_artifact`` to rebuild the on-disk CGNS from the + # freshly advanced stream cursor instead. + # + # Disk datasets additionally set ``force_artifact_refresh`` + # after the user applies a new feature filter: the artifact + # cache key is derived from ``SampleRef`` alone (no feature + # list), so without forcing a rebuild the renderer would + # keep displaying the pre-filter CGNS file. + force = state.is_streaming or force_artifact_refresh["pending"] + force_artifact_refresh["pending"] = False + with _silence_stderr(): + artifact = artifact_service.ensure_artifact(ref, force=force) + pipeline.load(artifact.cgns_path) + # Disable zone-less bases *before* the reader's first Update() + # so ``vtkCGNSReader`` does not log ``No zones in base ...`` + # warnings for auxiliary bases like ``Global``. + try: + with _silence_stderr(): + non_visual_names = list( + dataset_service.describe_non_visual_bases(ref).keys() + ) + except Exception: # noqa: BLE001 + non_visual_names = [] + if non_visual_names: + _disable_bases_on_reader(pipeline.reader, non_visual_names) + with _silence_stderr(): + pipeline.reader.Update() + bases, _points, _cells = _reader_bases_and_fields(pipeline.reader) + non_visual_set = set(non_visual_names) + # The ``Global`` CGNS base is a PLAID bookkeeping base used to + # store sample-level metadata (scalar inputs/outputs, time + # values, ...). It is surfaced separately in the "Globals" + # panel of the drawer and should never appear alongside the + # ``Base__`` rendering bases in the base + # toggle: selecting it would hide every ``Base_x_y`` base and + # leave the 3D view empty. + visual_bases = [ + name + for name in bases + if name not in non_visual_set and name != "Global" + ] + state.base_options = visual_bases + + # Preserve the user's base selection across samples when the + # same base still exists; otherwise fall back to the first + # renderable base. + previous = state.active_base + if previous in visual_bases: + state.active_base = previous + else: + state.active_base = visual_bases[0] if visual_bases else None + if state.active_base is not None: + _apply_base_selection(pipeline.reader, [state.active_base]) + _refresh_field_options() + # For streaming datasets the sentinel ``cursor`` sample id + # would look like ``hub:repo:split:cursor``; replace it with + # a 0-based step counter that is meaningful to the user. + if state.is_streaming: + state.status = ( + f"Loaded streaming sample #{state.stream_position} " + f"from {state.dataset_id}" + + (f" / {state.split}" if state.split else "") + ) + else: + state.status = f"Loaded sample {ref.encode()}" + _apply_pipeline(reset_camera=True) + except Exception as exc: # noqa: BLE001 + # "Missing features" errors bubble up from the PLAID converter + # when a feature path selected by the user does not exist in + # the current split's schema (constant/variable features are + # declared per-split). The raw exception dumps the full list + # of missing paths, which is both noisy and unactionable in + # the viewer. We shorten it to a hint that the user should + # check the split-specific availability of the filter. + message = str(exc) + if "Missing features" in message: + state.status = ( + "Failed to load sample: Missing features in dataset, check split" + ) + else: + state.status = f"Failed to load sample: {exc}" + + def _apply_pipeline(*, reset_camera: bool = False) -> None: + """Rebuild the VTK pipeline and push the result to the client. + + With ``VtkRemoteView`` the VTK camera lives on the server, so + resetting it server-side and calling ``ctrl.view_update`` is + sufficient: the next rendered frame sent to the browser already + reflects the default orientation and reframed bounds. + """ + if pipeline.reader is None: + return + association = "point" + name: str | None = None + if state.field: + association, name = state.field.split(":", 1) + if name is not None: + lo, hi = _compute_field_range( + pipeline.reader.GetOutput(), name, association + ) + state.field_range = [float(lo), float(hi)] + pipeline.update( + field=name, + association=association, + cmap=state.cmap, + show_edges=bool(state.show_edges), + ) + if reset_camera: + pipeline.reset_camera() + ctrl.view_update() + + # --- State change handlers ------------------------------------------- + + def _refresh_available_features() -> None: + """Populate ``available_features`` and ``selected_features`` from PLAID. + + Called whenever the active ``dataset_id`` changes so the feature + checkbox panel in the drawer reflects what the current dataset + actually exposes. Errors during metadata loading (missing + ``variable_schema.yaml`` on non-PLAID directories, network + failures for Hub datasets, ...) are caught and logged: the panel + is simply emptied in that case. + """ + if not state.dataset_id: + state.available_features = [] + state.selected_features = [] + return + try: + with _silence_stderr(): + available = dataset_service.list_available_features(state.dataset_id) + except Exception as exc: # noqa: BLE001 + logger.warning("Failed to list features: %s", exc) + state.available_features = [] + state.selected_features = [] + return + state.available_features = available + current = dataset_service.get_features(state.dataset_id) + state.selected_features = list(current) if current else [] + + @ctrl.set("apply_features") + def _apply_features() -> None: + """Push ``selected_features`` to the service and reload the sample. + + The selection is forwarded verbatim to + :meth:`PlaidDatasetService.set_features`. In particular an + empty list is kept empty (not converted to ``None``): the user + then sees a sample that only contains the auto-injected non-field + paths (globals, mesh coordinates, ...), which removes every + coloured array from the 3D view. To restore the full dataset + the user can click the "Load all" shortcut or re-check every + feature manually. + """ + if not state.dataset_id: + return + features = list(state.selected_features or []) + try: + with _silence_stderr(): + # Pass the list unconditionally: ``None`` means "no + # filter at all" and is reserved for the initial state / + # explicit reset via :meth:`PlaidDatasetService.set_features`. + dataset_service.set_features(state.dataset_id, features) + except Exception as exc: # noqa: BLE001 + state.status = f"Failed to set features: {exc}" + return + # Changing the feature filter invalidates the in-memory store + # cache (for streaming datasets, the iterator is rebuilt) and + # any cached paraview artifact for this dataset. The simplest + # way to propagate the change to the view is to run the full + # split/sample refresh cascade. + state.status = ( + f"Applied feature filter ({len(features)} selected)." + if features + else "Feature filter cleared (no field loaded)." + ) + # Force the next ``ensure_artifact`` call to rebuild the CGNS + # file; otherwise the cache would still return the pre-filter + # artifact and the renderer's field list would not change. + force_artifact_refresh["pending"] = True + _refresh_samples() + + @ctrl.set("clear_features") + def _clear_features() -> None: + """Clear the feature selection. + + After calling this, the sample contains only the auto-injected + non-field paths (globals, coordinates, connectivities) so the + 3D view shows the mesh with no coloured field. Use the + top-level "Load all" shortcut to restore every feature. + """ + state.selected_features = [] + _apply_features() + + @ctrl.set("select_all_features") + def _select_all_features() -> None: + """Select every available feature and apply the filter. + + Used by the top-level "Load all" shortcut button so the user + can restore the full-dataset view in a single click without + having to open the checkbox panel. Internally this is + equivalent to clearing the filter (an empty / full selection + both load every feature once non-field paths are re-injected + by :meth:`PlaidDatasetService.set_features`), but reflecting + the selection in the checkboxes gives clearer visual feedback. + """ + state.selected_features = list(state.available_features or []) + _apply_features() + + @state.change("dataset_id") + def _on_dataset(**_: object) -> None: + _refresh_available_features() + _refresh_splits() + + @state.change("source_tab") + def _on_source_tab(**_: object) -> None: + """Switch ``dataset_id`` to the first entry of the active source. + + The dropdown's ``items`` binding filters by ``source_tab`` on the + client, but the currently selected ``dataset_id`` may belong to + the other source and would then display as a stale selection. We + proactively pick the first id from the active list (or ``None`` + when empty) so the dropdown always reflects the active tab. + """ + active_ids = ( + list(state.hub_dataset_ids or []) + if state.source_tab == "hub" + else list(state.local_dataset_ids or []) + ) + new_id = active_ids[0] if active_ids else None + if state.dataset_id == new_id: + # ``@state.change('dataset_id')`` would not fire; refresh + # splits explicitly so the split dropdown and sample list + # stay coherent with the active tab. + _refresh_splits() + else: + state.dataset_id = new_id + + @state.change("split") + def _on_split(**_: object) -> None: + # Clear the active feature selection on every split switch so + # the user starts from a predictable, lightweight state: only + # the geometric supports (mesh coordinates, connectivities, + # globals, ...) associated with the split's available features + # are loaded, and no field is coloured in the 3D view. This + # avoids "Missing features in dataset, check split" errors when + # the previously-selected fields do not exist in the new split, + # and lets the user opt-in to specific fields through the + # checkbox panel. ``_apply_features`` triggers ``_refresh_samples`` + # under the hood, so we do not need to call it again here. + # + # Streaming (Hugging Face Hub) datasets are handled differently: + # they typically expose a single default split, so the multi- + # split "Missing features" issue does not apply. Pushing an + # empty feature filter through ``set_features`` would invalidate + # the store cache and force :meth:`_open` to re-instantiate the + # streaming iterator with an ``update_features_for_CGNS_compatibility`` + # expansion derived from the dataset-wide metadata union, which + # may not match the hub split's actual schema and ends up + # loading the wrong feature catalogue. We therefore skip the + # auto-clear for streaming datasets and let the user apply + # filters explicitly through the checkbox panel. + if not state.dataset_id: + _refresh_samples() + return + try: + streaming = dataset_service.is_streaming(state.dataset_id) + except Exception: # noqa: BLE001 + streaming = False + if streaming: + _refresh_samples() + return + state.selected_features = [] + _apply_features() + + @state.change("sample_index") + def _on_sample_index(**_: object) -> None: + try: + idx = int(state.sample_index) + except (TypeError, ValueError): + idx = 0 + # Streaming datasets: drive the forward-only cursor. The slider's + # maximum (``sample_count - 1``) always matches the most recent + # position the user has reached, so a right-arrow press grows the + # cursor by exactly one step; when the stream is exhausted the + # index is clamped back to the last valid position. + if state.is_streaming: + if state.dataset_id is None: + return + split = state.split if state.split != "__default__" else None + position = int(state.stream_position) + if idx <= position: + # Already-visited step: a streaming iterator cannot be + # rewound, so the view keeps the most recently fetched + # sample. We simply update the slider label. + state.sample_index = max(0, position) + return + # Advance the cursor step-by-step until it matches ``idx`` + # (the slider can only advance by one in normal use, but we + # stay robust to multi-step jumps). + while int(state.stream_position) < idx: + try: + dataset_service.advance_stream_cursor(state.dataset_id, split) + except StopIteration: + state.stream_exhausted = True + # Clamp back to the last fetched position. + state.sample_index = max(0, int(state.stream_position)) + state.status = "Stream exhausted." + return + state.stream_position = int(state.stream_position) + 1 + # Grow the slider's reachable range by one so the user can + # fetch the next sample on the next right-arrow press. + state.sample_count = int(state.stream_position) + 2 + state.sample_id = "cursor" + # ``sample_id`` did not actually change ("cursor" both times), + # so the ``@state.change("sample_id")`` listener is skipped. + # Force a refresh explicitly. + _refresh_sample_view() + return + ids = list(state.sample_ids or []) + if not ids: + state.sample_id = None + return + idx = max(0, min(idx, len(ids) - 1)) + state.sample_id = ids[idx] + + @state.change("sample_id") + def _on_sample(**_: object) -> None: + _refresh_sample_view() + + def _apply_time_step_impl() -> None: + """Synchronous work behind a time-axis update. + + Pushes the selected time step into the VTK pipeline and refreshes + the globals panel for the new time. Both are safe to call at + playback rates now that ``_on_time_index`` short-circuits during + playback, so the loop only performs one VTK update and one + globals read per frame. + """ + if pipeline.reader is not None and state.current_time is not None: + _advance_reader_time(pipeline.reader, float(state.current_time)) + _apply_pipeline() + if state.dataset_id and state.sample_id is not None: + split = state.split if state.split != "__default__" else None + ref = SampleRef( + backend_id=dataset_service._config.backend_id, + dataset_id=state.dataset_id, + split=split, + sample_id=str(state.sample_id), + ) + try: + with _silence_stderr(): + state.sample_globals = dataset_service.describe_globals( + ref, time=state.current_time + ) + except Exception as exc: # noqa: BLE001 + logger.warning("Failed to describe globals: %s", exc) + + @state.change("time_index") + def _on_time_index(**_: object) -> None: + times = list(state.time_values or []) + if not times: + state.current_time = None + return + try: + idx = int(state.time_index) + except (TypeError, ValueError): + idx = 0 + idx = max(0, min(idx, len(times) - 1)) + state.current_time = times[idx] + # During playback the loop (``_play_loop``) already advances the + # time step itself; without this short-circuit the listener + # would run a second ``_apply_time_step_impl`` per frame (double + # VTK update + double PLAID read), which saturates the trame + # WebSocket and stalls playback. + if state.playing: + return + state.loading = True + try: + _apply_time_step_impl() + finally: + state.loading = False + + async def _play_loop() -> None: + """Advance ``time_index`` at ``play_fps`` while ``playing`` is True. + + The loop directly updates ``time_index``, ``current_time`` and + runs the VTK time-step update synchronously (the VTK calls are + fast enough for typical CFD meshes). Relying on the + ``@state.change("time_index")`` listener was unreliable because + trame dispatches it asynchronously, so the playback could end + before the last frame was actually rendered. + + When the end of the time axis is reached, the loop either wraps + around (``play_loop=True``) or stops playback + (``play_loop=False``). The loop exits cleanly on + :class:`asyncio.CancelledError` so the Stop button can cancel the + task immediately. + """ + try: + while state.playing: + count = int(state.time_count or 0) + if count <= 1: + with state: + state.playing = False + break + nxt = int(state.time_index or 0) + 1 + if nxt >= count: + if state.play_loop: + nxt = 0 + else: + with state: + state.playing = False + break + times = list(state.time_values or []) + # Trame state mutations inside an asyncio task must be + # wrapped in ``with state:`` for the ``@state.change`` + # handlers to actually fire and for the client to receive + # the broadcast. Without this block, the slider / time + # label on the client do not update during playback. + with state: + state.time_index = nxt + state.current_time = times[nxt] if nxt < len(times) else None + _apply_time_step_impl() + fps = max(1, int(state.play_fps or 1)) + await asyncio.sleep(1.0 / fps) + except asyncio.CancelledError: # pragma: no cover - cooperative cancel + pass + + @state.change("playing") + def _on_playing(**_: object) -> None: + existing = play_task.get("task") + if existing is not None and not existing.done(): # type: ignore[union-attr] + existing.cancel() # type: ignore[union-attr] + play_task["task"] = None + if state.playing and int(state.time_count or 0) > 1: + play_task["task"] = asynchronous.create_task(_play_loop()) + + @ctrl.set("toggle_play") + def _toggle_play() -> None: + state.playing = not bool(state.playing) + + @ctrl.set("stop_playback") + def _stop_playback() -> None: + """Stop playback and reset the time axis back to the first step. + + Using a controller callback is more robust than the inline + ``click="playing = false; time_index = 0"`` expression: if the + slider is already at index 0 the client-side assignment is a + no-op and no ``@state.change("time_index")`` listener runs, so + the VTK view would keep showing the last-played frame. Here we + always force a refresh by calling ``_apply_time_step_impl``. + """ + state.playing = False + times = list(state.time_values or []) + state.time_index = 0 + state.current_time = times[0] if times else None + state.loading = True + try: + _apply_time_step_impl() + finally: + state.loading = False + + @state.change("active_base") + def _on_base(**_: object) -> None: + if pipeline.reader is None: + return + active = [state.active_base] if state.active_base else [] + try: + _apply_base_selection(pipeline.reader, active) + except Exception as exc: # noqa: BLE001 + state.status = f"Failed to update base: {exc}" + return + # Narrow the field dropdown to arrays that actually exist on the + # newly-selected base. + _refresh_field_options() + _apply_pipeline(reset_camera=True) + + @state.change("field", "cmap", "show_edges") + def _on_view_params(**_: object) -> None: + _apply_pipeline() + + # --- Datasets root management ---------------------------------------- + + def _reload_dataset_list() -> None: + """Re-discover datasets under the (possibly new) datasets root.""" + try: + with _silence_stderr(): + new_datasets = dataset_service.list_datasets() + except Exception as exc: # noqa: BLE001 + state.status = f"Failed to list datasets: {exc}" + new_datasets = [] + hub_set = set(dataset_service.hub_repos) + local_ids = [d.dataset_id for d in new_datasets if d.dataset_id not in hub_set] + hub_ids = [d.dataset_id for d in new_datasets if d.dataset_id in hub_set] + new_ids = local_ids + hub_ids + state.local_dataset_ids = local_ids + state.hub_dataset_ids = hub_ids + state.dataset_ids = new_ids + # Force ``dataset_id`` to change so ``@state.change('dataset_id')`` + # fires and cascades through splits / samples / view refresh. + # Pick from the list that matches the active source tab. + active_ids = hub_ids if state.source_tab == "hub" else local_ids + state.dataset_id = active_ids[0] if active_ids else None + + if not new_ids: + state.splits = [] + state.split = None + state.sample_ids = [] + state.sample_id = None + state.sample_count = 0 + state.base_options = [] + state.active_base = None + state.field_options = [] + state.field = None + state.sample_globals = [] + state.status = "No dataset found under the configured root." + + @ctrl.set("apply_datasets_root") + def _apply_datasets_root() -> None: + """Change the datasets root from the text field.""" + if not state.allow_root_change: + return + raw = (state.datasets_root_text or "").strip() + if not raw: + try: + dataset_service.set_datasets_root(None) + except Exception as exc: # noqa: BLE001 + state.status = f"Failed to clear datasets root: {exc}" + return + _reload_dataset_list() + state.status = "Datasets root cleared." + return + try: + resolved = dataset_service.set_datasets_root(raw) + except Exception as exc: # noqa: BLE001 + state.status = f"Invalid datasets root: {exc}" + return + state.datasets_root_text = str(resolved) if resolved else "" + _reload_dataset_list() + state.status = f"Datasets root set to {resolved}" + + def _load_browse_view(path: str | None) -> None: + try: + listing = dataset_service.list_subdirs(path) + except Exception as exc: # noqa: BLE001 + state.status = f"Cannot browse: {exc}" + return + state.browse_cwd = listing["path"] + state.browse_parent = listing["parent"] + state.browse_entries = listing["entries"] + + @ctrl.set("open_browse_dialog") + def _open_browse_dialog() -> None: + if not state.allow_root_change: + return + start = (state.datasets_root_text or "").strip() or None + try: + _load_browse_view(start) + except Exception: # noqa: BLE001 + _load_browse_view(None) + state.browse_dialog = True + + @ctrl.set("browse_cd") + def _browse_cd(path: str) -> None: + _load_browse_view(path) + + @ctrl.set("browse_up") + def _browse_up() -> None: + if state.browse_parent: + _load_browse_view(state.browse_parent) + + @ctrl.set("browse_select") + def _browse_select() -> None: + """Use ``browse_cwd`` as the new datasets root.""" + state.datasets_root_text = state.browse_cwd + state.browse_dialog = False + _apply_datasets_root() + + @ctrl.set("add_hub_repo") + def _add_hub_repo() -> None: + """Register the repo id from the text field for streaming. + + Calls :meth:`PlaidDatasetService.add_hub_dataset`, then rebuilds + the dataset list so the new entry is immediately selectable from + the dropdown. + """ + if not state.allow_root_change: + return + raw = (state.hub_repo_input or "").strip() + if not raw: + state.status = "Enter a Hugging Face repo id (e.g. namespace/name)." + return + try: + normalised = dataset_service.add_hub_dataset(raw) + except Exception as exc: # noqa: BLE001 + state.status = f"Invalid repo id: {exc}" + return + state.hub_repos = list(dataset_service.hub_repos) + state.hub_repo_input = "" + _reload_dataset_list() + # Select the newly added hub dataset to give immediate feedback. + if normalised in (state.dataset_ids or []): + state.dataset_id = normalised + state.status = f"Streaming from {normalised}" + + @ctrl.set("remove_hub_repo") + def _remove_hub_repo(repo_id: str) -> None: + """Unregister a previously added hub repo.""" + if not state.allow_root_change: + return + dataset_service.remove_hub_dataset(repo_id) + state.hub_repos = list(dataset_service.hub_repos) + _reload_dataset_list() + state.status = f"Removed hub dataset {repo_id}" + + @ctrl.set("stream_next") + def _stream_next() -> None: + """Advance the streaming cursor and load the next sample. + + Handler behind the "Next" button shown (instead of the sample + slider) when the active dataset is a Hugging Face Hub stream. + The cursor is advanced one step on the service-side + ``_StreamCursor``; ``sample_id`` is then set to the new 0-based + step number so the existing ``@state.change("sample_id")`` + plumbing fires and pushes the fresh sample through the VTK + pipeline. + """ + if not state.is_streaming or state.dataset_id is None: + return + if state.stream_exhausted: + return + split = state.split if state.split != "__default__" else None + try: + dataset_service.advance_stream_cursor(state.dataset_id, split) + except StopIteration: + state.stream_exhausted = True + state.status = "Stream exhausted." + return + # Advance the UI counters. ``sample_id`` stays at the + # ``STREAM_CURSOR_ID`` sentinel ("cursor") because + # :meth:`PlaidDatasetService.load_sample` needs that sentinel to + # route through ``converter.sample_to_plaid`` (IterableDatasets + # have no ``to_plaid(dataset, index)`` random-access path). + # Instead of mutating ``sample_id`` we refresh the view + # directly; the service-side cursor has already moved one step + # forward so ``load_sample`` will pick up the new record. + new_position = int(state.stream_position) + 1 + state.stream_position = new_position + state.sample_count = new_position + 1 + state.sample_index = new_position + state.sample_id = STREAM_CURSOR_ID + # ``sample_id`` did not actually change (both times the sentinel + # ``STREAM_CURSOR_ID``), so the ``@state.change("sample_id")`` + # listener is skipped. Refresh the view directly instead. The + # status bar text is set inside ``_refresh_sample_view_impl`` as + # a 0-based step label for streaming mode. + _refresh_sample_view() + + @ctrl.set("reset_camera") + def _reset_camera() -> None: + + # With VtkRemoteView the camera lives on the server, so resetting + # it server-side in ``pipeline.reset_camera`` and pushing a new + # frame via ``ctrl.view_update`` is enough: the browser only + # renders the images we send it. + _apply_pipeline(reset_camera=True) + + # --- UI --------------------------------------------------------------- + + with SinglePageWithDrawerLayout(server) as layout: + layout.title.set_text("Dataset Viewer") + + with layout.drawer as drawer: + # Wider drawer to accommodate long CGNS feature paths such as + # ``Base_2_2/Zone/FlowSolution/Pressure`` without wrapping. + drawer.width = 460 + with v3.VContainer(classes="pa-2"): + # Source-selection tabs: pick between a local datasets + # root (``init_from_disk``) and Hugging Face Hub streaming + # (``init_streaming_from_hub``). The tabs only drive which + # form is rendered; registered datasets from either + # source always land in ``dataset_ids`` together. Hidden + # when ``--disable-root-change`` was passed on the CLI so + # a public deployment can pin the root for good. + with html.Div(v_if=("allow_root_change",), classes="mb-2"): + with v3.VTabs( + v_model=("source_tab",), + density="compact", + grow=True, + classes="mb-2", + ): + v3.VTab("Local", value="local") + v3.VTab("Hub", value="hub") + # Local datasets root form. + with html.Div(v_if=("source_tab === 'local'",)): + html.Div("Datasets root", classes="text-caption") + with html.Div(classes="d-flex align-center"): + v3.VTextField( + v_model=("datasets_root_text",), + density="compact", + hide_details=True, + placeholder="/absolute/path/to/datasets", + classes="mr-2", + clearable=True, + __events=[("keyup_enter", "keyup.enter")], + keyup_enter=ctrl.apply_datasets_root, + ) + v3.VBtn( + icon="mdi-folder-open", + click=ctrl.open_browse_dialog, + density="compact", + variant="tonal", + classes="mr-1", + ) + v3.VBtn( + icon="mdi-check", + click=ctrl.apply_datasets_root, + density="compact", + variant="tonal", + color="primary", + ) + # Hugging Face Hub streaming form. + with html.Div(v_if=("source_tab === 'hub'",)): + html.Div( + "Hugging Face Hub dataset", + classes="text-caption", + ) + with html.Div(classes="d-flex align-center"): + v3.VTextField( + v_model=("hub_repo_input",), + density="compact", + hide_details=True, + placeholder="namespace/name", + prepend_inner_icon="mdi-cloud-download", + classes="mr-2", + clearable=True, + __events=[("keyup_enter", "keyup.enter")], + keyup_enter=ctrl.add_hub_repo, + ) + v3.VBtn( + icon="mdi-plus", + click=ctrl.add_hub_repo, + density="compact", + variant="tonal", + color="primary", + ) + # Chip list of registered repos with a remove button. + with html.Div( + v_if=("(hub_repos || []).length > 0",), + classes="mt-1 d-flex flex-wrap", + ): + v3.VChip( + "{{ repo }}", + v_for="repo in hub_repos", + key="repo", + closable=True, + size="small", + classes="mr-1 mb-1", + click_close=(ctrl.remove_hub_repo, "[repo]"), + ) + v3.VDivider(classes="my-2") + + # The dropdown ``items`` are filtered by ``source_tab``: + # Local tab -> ``local_dataset_ids`` (``init_from_disk`` + # datasets), Hub tab -> ``hub_dataset_ids`` + # (``init_streaming_from_hub`` datasets). The user never + # sees ids from the inactive source in the same menu. + v3.VSelect( + label="Dataset", + v_model=("dataset_id",), + items=( + "source_tab === 'hub' ? hub_dataset_ids : local_dataset_ids", + ), + density="compact", + ) + + v3.VSelect( + label="Split", + v_model=("split",), + items=("splits",), + density="compact", + ) + # Sample picker. Two mutually-exclusive widgets: + # - Local datasets expose a random-access slider over + # the integer sample indices. + # - Hub streaming datasets have no ``__len__`` and can + # only be consumed forward, so we expose a "Next" + # button that advances the ``_StreamCursor`` by one + # step via ``ctrl.stream_next``. + html.Div("Sample", classes="text-caption mt-2") + v3.VSlider( + v_if=("!is_streaming",), + v_model_number=("sample_index",), + min=0, + max=("sample_count > 0 ? sample_count - 1 : 0",), + step=1, + thumb_label=True, + hide_details=True, + disabled=("sample_count === 0",), + ) + with html.Div( + v_if=("is_streaming",), + classes="d-flex align-center mb-1", + ): + v3.VBtn( + "Next", + prepend_icon="mdi-arrow-right", + click=ctrl.stream_next, + disabled=("stream_exhausted",), + color="primary", + variant="tonal", + density="compact", + classes="mr-2", + ) + # Sample counter: for local datasets the slider exposes + # all ids up-front; for streaming datasets we report the + # step number (the total is unknown until the iterator + # is exhausted, at which point "end of stream" appears). + html.Div( + "{{ is_streaming" + " ? ('step ' + (stream_position + 1) + (stream_exhausted" + " ? ' (end of stream)' : ' (streaming)'))" + " : ((sample_id ?? '-') + ' / ' + sample_count + ' samples') }}", + classes="text-caption text-medium-emphasis mb-2", + ) + + # Time axis slider, only shown when the sample actually + # exposes a time axis (time-dependent samples). + with html.Div(v_if=("time_count > 1",), classes="mb-2"): + html.Div("Time", classes="text-caption mt-2") + v3.VSlider( + v_model_number=("time_index",), + min=0, + max=("time_count > 0 ? time_count - 1 : 0",), + step=1, + thumb_label=True, + hide_details=True, + ) + html.Div( + "t = {{ current_time }} " + "" + "({{ time_index + 1 }} / {{ time_count }})", + classes="text-caption text-medium-emphasis", + ) + # Playback controls: Play/Pause + FPS slider + loop. + with html.Div(classes="d-flex align-center mt-2"): + v3.VBtn( + icon=("playing ? 'mdi-pause' : 'mdi-play'",), + click="playing = !playing", + density="compact", + variant="tonal", + classes="mr-2", + ) + v3.VBtn( + icon="mdi-stop", + click=ctrl.stop_playback, + density="compact", + variant="tonal", + classes="mr-2", + ) + v3.VBtn( + icon=("play_loop ? 'mdi-repeat' : 'mdi-repeat-off'",), + click="play_loop = !play_loop", + density="compact", + variant="tonal", + ) + html.Div("FPS: {{ play_fps }}", classes="text-caption mt-1") + v3.VSlider( + v_model_number=("play_fps",), + min=1, + max=30, + step=1, + hide_details=True, + density="compact", + ) + v3.VDivider(classes="my-2") + html.Div("Base", classes="text-caption") + + with v3.VBtnToggle( + v_model=("active_base",), + mandatory=True, + density="compact", + divided=True, + classes="flex-wrap mb-2", + ): + v3.VBtn( + "{{ base }}", + v_for="base in base_options", + key="base", + value=("base",), + size="small", + ) + v3.VSelect( + label="Field", + v_model=("field",), + items=("field_options",), + density="compact", + ) + v3.VSelect( + label="Colormap", + v_model=("cmap",), + items=("cmaps",), + density="compact", + ) + v3.VSwitch( + label="Show edges", + v_model=("show_edges",), + density="compact", + hide_details=True, + ) + v3.VDivider(classes="my-2") + v3.VBtn("Reset camera", click=ctrl.reset_camera, block=True) + + # Feature filter panel. Only rendered when the active + # dataset exposes any feature path (otherwise the panel + # would be empty and misleading). Driven by the + # ``available_features`` / ``selected_features`` state + # vectors populated by ``_refresh_available_features``; + # the Apply button forwards the selection to + # :meth:`PlaidDatasetService.set_features`, which in turn + # invalidates the store cache and (for streaming + # datasets) rebuilds the iterator with an + # ``update_features_for_CGNS_compatibility`` expansion of + # the user selection. + # Feature filter panel. The expansion panel starts + # collapsed: most users only need the "Load all" shortcut + # button exposed above it, and the full checkbox list is + # only expanded when they actually want to subset the + # dataset. The top-level "Load all" button clears the + # current selection and forces a reload without the user + # having to open the panel at all. + # Hidden for streaming (Hugging Face Hub) datasets: + # feature filtering goes through ``init_streaming_from_hub`` + # which rebuilds the iterator from the dataset-wide + # metadata union, a workflow that does not fit the + # per-split viewer model and led to confusing "Missing + # features" errors. Streaming users therefore always see + # the full feature payload; local disk datasets keep the + # complete feature selection UI unchanged. + with html.Div( + v_if=("!is_streaming && (available_features || []).length > 0",), + classes="mt-3", + ): + v3.VDivider(classes="my-2") + with html.Div(classes="d-flex align-center mb-1"): + html.Div("Features", classes="text-subtitle-2 flex-grow-1") + v3.VBtn( + "Load all", + click=ctrl.select_all_features, + size="x-small", + color="primary", + variant="tonal", + ) + with v3.VExpansionPanels(variant="accordion", multiple=True): + with v3.VExpansionPanel(): + v3.VExpansionPanelTitle( + "Select features ({{ (selected_features" + " || []).length }} / {{ (available_features" + " || []).length }})" + ) + with v3.VExpansionPanelText(): + html.Div( + "Empty selection loads every feature.", + classes="text-caption text-medium-emphasis mb-1", + ) + with html.Div(classes="d-flex mb-1"): + v3.VBtn( + "Clear", + click="selected_features = []", + size="x-small", + variant="text", + classes="mr-1", + ) + v3.VBtn( + "Apply", + click=ctrl.apply_features, + size="x-small", + color="primary", + variant="tonal", + ) + with html.Div( + style="max-height: 240px; overflow: auto;", + classes="pa-1", + ): + v3.VCheckbox( + v_for="feat in available_features", + key="feat", + v_model=("selected_features",), + value=("feat",), + label=("feat",), + density="compact", + hide_details=True, + multiple=True, + ) + + html.Div("{{ status }}", classes="text-caption mt-2") + + # PLAID globals for the current sample (filtered out of + # ``IterationValues`` / ``TimeValues`` bookkeeping arrays). + with html.Div( + v_if=("(sample_globals || []).length > 0",), + classes="mt-3", + ): + html.Div("Globals", classes="text-subtitle-2 mb-1") + with v3.VList(density="compact"): + with v3.VListItem(v_for="g in sample_globals", key="g.name"): + v3.VListItemTitle( + "{{ g.name }} " + "" + "({{ g.dtype }}, shape={{ g.shape }})" + "" + ) + v3.VListItemSubtitle( + "{{ g.preview }}", classes="text-caption" + ) + + # File-system browser dialog for the datasets root. Scoped to the + # server's ``browse_roots`` sandbox so the user can only reach + # directories explicitly allowed by the operator. + with v3.VDialog(v_model=("browse_dialog",), max_width="640"): + with v3.VCard(): + v3.VCardTitle("Select datasets root") + v3.VCardSubtitle( + "{{ browse_cwd }}", classes="text-caption text-medium-emphasis" + ) + with v3.VCardText(style="max-height: 50vh; overflow: auto;"): + with v3.VList(density="compact"): + v3.VListItem( + prepend_icon="mdi-arrow-up", + title="..", + click=ctrl.browse_up, + v_if=("browse_parent",), + ) + with v3.VListItem( + v_for="e in browse_entries", + key="e.path", + click=(ctrl.browse_cd, "[e.path]"), + ): + v3.VListItemTitle("{{ e.name }}") + v3.VListItemSubtitle( + "PLAID dataset", + v_if=("e.is_plaid_candidate",), + classes="text-success", + ) + with v3.VCardActions(): + v3.VSpacer() + v3.VBtn( + "Cancel", + click="browse_dialog = false", + variant="text", + ) + v3.VBtn( + "Use this directory", + click=ctrl.browse_select, + color="primary", + variant="tonal", + ) + + # Indeterminate progress bar shown under the app bar while a sample + # or time step is being loaded on the server. + with layout.toolbar: + # Small chip in the toolbar that advertises whether the + # current dataset is streamed from the Hugging Face Hub (the + # sample slider is then forward-only) or browsed from a + # local PLAID directory (random access). + v3.VChip( + "streaming", + v_if=("is_streaming",), + size="small", + color="secondary", + prepend_icon="mdi-cloud-download", + classes="mr-2", + ) + v3.VProgressLinear( + indeterminate=True, + absolute=True, + location="bottom", + color="primary", + v_if=("loading",), + ) + + with layout.content: + with v3.VContainer(fluid=True, classes="fill-height pa-0 ma-0"): + view = vtk_widgets.VtkRemoteView(pipeline.render_window, ref="view") + + ctrl.view_update = view.update + ctrl.view_reset_camera = view.reset_camera + + # Trigger initial population. + _refresh_splits() + + return server diff --git a/tests/viewer/__init__.py b/tests/viewer/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/viewer/conftest.py b/tests/viewer/conftest.py new file mode 100644 index 00000000..b3b8c59a --- /dev/null +++ b/tests/viewer/conftest.py @@ -0,0 +1,28 @@ +"""Shared fixtures for viewer tests. + +The viewer persists user preferences (currently the last-used datasets +root) to ``$XDG_CONFIG_HOME/plaid/viewer.json``. Tests that exercise +:meth:`PlaidDatasetService.set_datasets_root` would otherwise mutate the +real user preferences file, polluting interactive sessions with a path +from ``tmp_path``. We redirect preference persistence to a temporary +location for every viewer test through the +``PLAID_VIEWER_CONFIG_FILE`` environment variable honoured by +:mod:`plaid.viewer.preferences`. +""" + +from __future__ import annotations + +from pathlib import Path + +import pytest + + +@pytest.fixture(autouse=True) +def _isolated_viewer_preferences( + tmp_path_factory: pytest.TempPathFactory, + monkeypatch: pytest.MonkeyPatch, +) -> Path: + """Redirect viewer preference persistence to a unique temporary file.""" + prefs_file = tmp_path_factory.mktemp("viewer_prefs") / "viewer.json" + monkeypatch.setenv("PLAID_VIEWER_CONFIG_FILE", str(prefs_file)) + return prefs_file diff --git a/tests/viewer/test_cache.py b/tests/viewer/test_cache.py new file mode 100644 index 00000000..6187a565 --- /dev/null +++ b/tests/viewer/test_cache.py @@ -0,0 +1,52 @@ +"""Tests for the viewer artifact cache.""" + +from __future__ import annotations + +from pathlib import Path + +from plaid.viewer.cache import CacheRoot, sweep_orphans + + +def test_ephemeral_cache_is_cleaned_up_on_close(tmp_path: Path, monkeypatch) -> None: + monkeypatch.setenv("TMPDIR", str(tmp_path)) + cache = CacheRoot(install_signal_handlers=False, run_orphan_sweep=False) + path = cache.path + assert path.exists() + assert cache.is_ephemeral is True + cache.close() + assert not path.exists() + + +def test_persistent_cache_is_preserved(tmp_path: Path) -> None: + target = tmp_path / "persistent" + cache = CacheRoot(persistent_dir=target, install_signal_handlers=False) + assert cache.path == target + assert cache.is_ephemeral is False + cache.close() + assert target.exists() + + +def test_context_manager_removes_ephemeral_dir(tmp_path: Path, monkeypatch) -> None: + monkeypatch.setenv("TMPDIR", str(tmp_path)) + with CacheRoot(install_signal_handlers=False, run_orphan_sweep=False) as cache: + path = cache.path + assert path.exists() + assert not path.exists() + + +def test_sweep_orphans_removes_dead_pid_dir(tmp_path: Path) -> None: + victim = tmp_path / "plaid-viewer-999999-deadbeefcafe" + victim.mkdir() + removed = sweep_orphans(tmp_path) + assert victim in removed + assert not victim.exists() + + +def test_sweep_orphans_keeps_live_pid_dir(tmp_path: Path) -> None: + import os + + live = tmp_path / f"plaid-viewer-{os.getpid()}-abc123def456" + live.mkdir() + removed = sweep_orphans(tmp_path) + assert live not in removed + assert live.exists() diff --git a/tests/viewer/test_models.py b/tests/viewer/test_models.py new file mode 100644 index 00000000..953fe76b --- /dev/null +++ b/tests/viewer/test_models.py @@ -0,0 +1,31 @@ +"""Tests for viewer data models.""" + +from __future__ import annotations + +import pytest + +from plaid.viewer.models import SampleRef, SampleRefDTO + + +def test_sample_ref_roundtrip_with_split() -> None: + ref = SampleRef(backend_id="disk", dataset_id="ds", split="train", sample_id="0") + assert SampleRef.decode(ref.encode()) == ref + + +def test_sample_ref_roundtrip_without_split() -> None: + ref = SampleRef(backend_id="disk", dataset_id="ds", split=None, sample_id="42") + encoded = ref.encode() + assert "_" in encoded # sentinel for missing split + assert SampleRef.decode(encoded) == ref + + +def test_sample_ref_decode_invalid() -> None: + with pytest.raises(ValueError): + SampleRef.decode("too:few:parts") + + +def test_sample_ref_dto_round_trip() -> None: + ref = SampleRef(backend_id="b", dataset_id="d", split=None, sample_id="s") + dto = SampleRefDTO.from_ref(ref) + assert dto.encoded == ref.encode() + assert dto.split is None diff --git a/tests/viewer/test_paraview_artifact_service.py b/tests/viewer/test_paraview_artifact_service.py new file mode 100644 index 00000000..eae73722 --- /dev/null +++ b/tests/viewer/test_paraview_artifact_service.py @@ -0,0 +1,112 @@ +"""Tests for the ParaView artifact service. + +These tests only exercise the caching and file-layout logic. The real +``Sample.save_to_dir`` call is replaced by a fake service that writes fixture +CGNS files, so the tests do not depend on pyCGNS or a concrete PLAID sample. +""" + +from __future__ import annotations + +import json +from pathlib import Path + +import pytest + +from plaid.viewer.models import SampleRef +from plaid.viewer.services.paraview_artifact_service import ( + ParaviewArtifactService, + _build_cache_key, +) + + +class _FakeSample: + def __init__(self, meshes_dir: Path, n_times: int) -> None: + self._meshes_dir = meshes_dir + self.features = type( + "F", (), {"data": {float(i): None for i in range(n_times)}} + )() + + def save_to_dir( + self, + path: Path, + overwrite: bool = False, # noqa: ARG002 + memory_safe: bool = False, # noqa: ARG002 + ) -> None: + meshes = Path(path) / "meshes" + meshes.mkdir(parents=True, exist_ok=True) + for i in range(len(self.features.data)): + (meshes / f"mesh_{i:09d}.cgns").write_bytes(b"CGNS_FAKE") + + +class _FakeDatasetService: + def __init__(self, n_times: int = 1) -> None: + self._n_times = n_times + + def load_sample(self, ref: SampleRef): # noqa: ARG002 - interface match + return _FakeSample(Path("."), self._n_times) + + +@pytest.fixture +def ref() -> SampleRef: + return SampleRef(backend_id="disk", dataset_id="ds", split="train", sample_id="0") + + +def test_ensure_artifact_single_timestep_creates_single_cgns( + tmp_path: Path, ref: SampleRef +) -> None: + service = ParaviewArtifactService(_FakeDatasetService(n_times=1), tmp_path) + artifact = service.ensure_artifact(ref) + assert artifact.created is True + assert artifact.cgns_path.suffix == ".cgns" + assert artifact.cgns_path.exists() + + +def test_ensure_artifact_time_series_writes_series_sidecar( + tmp_path: Path, ref: SampleRef +) -> None: + service = ParaviewArtifactService(_FakeDatasetService(n_times=3), tmp_path) + artifact = service.ensure_artifact(ref) + assert artifact.cgns_path.name.endswith(".cgns.series") + payload = json.loads(artifact.cgns_path.read_text()) + assert payload["file-series-version"] == "1.0" + assert len(payload["files"]) == 3 + assert payload["files"][0]["time"] == 0.0 + # Each entry must reference an existing CGNS file relative to the + # sidecar: CGNS files live in the ``meshes/`` subdirectory, so the + # ``name`` field has to keep that prefix (regression: previously only + # the file name was stored, which broke vtkFileSeriesReader). + sidecar_dir = artifact.cgns_path.parent + for entry in payload["files"]: + assert entry["name"].startswith("meshes/"), entry + assert (sidecar_dir / entry["name"]).is_file() + + +def test_ensure_artifact_is_idempotent(tmp_path: Path, ref: SampleRef) -> None: + service = ParaviewArtifactService(_FakeDatasetService(), tmp_path) + first = service.ensure_artifact(ref) + assert first.created is True + second = service.ensure_artifact(ref) + assert second.created is False + assert second.artifact_id == first.artifact_id + + +def test_force_recreates_artifact(tmp_path: Path, ref: SampleRef) -> None: + service = ParaviewArtifactService(_FakeDatasetService(), tmp_path) + first = service.ensure_artifact(ref) + second = service.ensure_artifact(ref, force=True) + assert second.created is True + assert second.artifact_id == first.artifact_id # cache key is deterministic + + +def test_cache_key_is_deterministic(ref: SampleRef) -> None: + key_a = _build_cache_key(ref, export_version="1") + key_b = _build_cache_key(ref, export_version="1") + assert key_a == key_b + key_c = _build_cache_key(ref, export_version="2") + assert key_c != key_a + + +def test_get_unknown_artifact_raises(tmp_path: Path) -> None: + service = ParaviewArtifactService(_FakeDatasetService(), tmp_path) + with pytest.raises(KeyError): + service.get("unknown") diff --git a/tests/viewer/test_plaid_dataset_service.py b/tests/viewer/test_plaid_dataset_service.py new file mode 100644 index 00000000..f114df4e --- /dev/null +++ b/tests/viewer/test_plaid_dataset_service.py @@ -0,0 +1,824 @@ +"""Tests for dataset discovery and indexing in :class:`PlaidDatasetService`. + +The service builds on ``plaid.storage.init_from_disk``. To keep these tests +lightweight and free from real CGNS/arrow fixtures, we monkey-patch that +function to return small in-memory stand-ins for ``dataset_dict`` and +``converter_dict``. +""" + +from __future__ import annotations + +from pathlib import Path + +import pytest + +from plaid.viewer.config import ViewerConfig +from plaid.viewer.models import SampleRef +from plaid.viewer.services import PlaidDatasetService + + +class _FakeDataset(list): + """Minimal list-like stand-in for ``datasets.Dataset``.""" + + +class _FakeConverter: + def __init__(self, samples_by_index: dict[int, object]) -> None: + self._samples = samples_by_index + + def to_plaid(self, dataset, index: int): # noqa: ARG002 - interface match + return self._samples[index] + + +def _make_dataset_dir(root: Path, name: str) -> Path: + base = root / name + (base / "data").mkdir(parents=True, exist_ok=True) + return base + + +def _install_fake_init_from_disk( + monkeypatch: pytest.MonkeyPatch, + payload: dict[str, tuple[dict, dict]], +) -> None: + """Patch ``plaid.storage.init_from_disk`` to return per-directory fixtures.""" + + def _fake(path: str): + base_name = Path(path).name + return payload[base_name] + + import plaid.storage as storage # noqa: PLC0415 + + monkeypatch.setattr(storage, "init_from_disk", _fake) + + +def test_list_datasets_returns_all_subdirectories_with_data(tmp_path: Path) -> None: + _make_dataset_dir(tmp_path, "ds_a") + _make_dataset_dir(tmp_path, "ds_b") + (tmp_path / "not_a_dataset").mkdir() # missing data/ subfolder + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + ids = {d.dataset_id for d in service.list_datasets()} + assert ids == {"ds_a", "ds_b"} + + +def test_list_samples_uses_converter_to_plaid_indices( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + _make_dataset_dir(tmp_path, "ds") + dataset_dict = { + "train": _FakeDataset(range(2)), + "test": _FakeDataset(range(1)), + } + converter_dict = { + "train": _FakeConverter({0: object(), 1: object()}), + "test": _FakeConverter({0: object()}), + } + _install_fake_init_from_disk(monkeypatch, {"ds": (dataset_dict, converter_dict)}) + + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + refs = service.list_samples("ds") + assert len(refs) == 3 + assert {(r.split, r.sample_id) for r in refs} == { + ("train", "0"), + ("train", "1"), + ("test", "0"), + } + + +def test_load_sample_calls_converter_to_plaid_with_integer_index( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + _make_dataset_dir(tmp_path, "ds") + target = object() + dataset_dict = {"train": _FakeDataset(range(3))} + converter_dict = {"train": _FakeConverter({2: target})} + _install_fake_init_from_disk(monkeypatch, {"ds": (dataset_dict, converter_dict)}) + + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + ref = SampleRef(backend_id="disk", dataset_id="ds", split="train", sample_id="2") + assert service.load_sample(ref) is target + + +def test_get_dataset_reports_split_counts_from_dataset_dict( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + _make_dataset_dir(tmp_path, "ds") + dataset_dict = { + "train": _FakeDataset(range(3)), + "test": _FakeDataset(range(2)), + } + converter_dict = { + "train": _FakeConverter({}), + "test": _FakeConverter({}), + } + _install_fake_init_from_disk(monkeypatch, {"ds": (dataset_dict, converter_dict)}) + + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + detail = service.get_dataset("ds") + assert detail.splits == {"train": 3, "test": 2} + + +def test_describe_non_visual_bases_lists_zoneless_bases_only( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + """Bases that carry no ``Zone_t`` child are reported with their arrays.""" + import types + + import numpy as np + from CGNS.PAT import cgnskeywords as CK + + _make_dataset_dir(tmp_path, "ds") + + # Build a minimal CGNS tree with one visual base (has a zone) and one + # non-visual base (only DataArrays under a UserDefinedData_t node). + pressure = np.array([1.5], dtype=np.float32) + visual_base = ["Geom", None, [["Zone1", None, [], CK.Zone_ts]], CK.CGNSBase_ts] + aux_base = [ + "Constants", + None, + [ + [ + "UD", + None, + [["Pressure", pressure, [], CK.DataArray_ts]], + "UserDefinedData_t", + ], + ], + CK.CGNSBase_ts, + ] + tree = ["CGNSTree", None, [visual_base, aux_base], "CGNSTree_t"] + + features = types.SimpleNamespace(data={0.0: tree}) + sample = types.SimpleNamespace(features=features) + + dataset_dict = {"train": _FakeDataset(range(1))} + converter_dict = {"train": _FakeConverter({0: sample})} + _install_fake_init_from_disk(monkeypatch, {"ds": (dataset_dict, converter_dict)}) + + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + ref = SampleRef(backend_id="disk", dataset_id="ds", split="train", sample_id="0") + summary = service.describe_non_visual_bases(ref) + + assert list(summary.keys()) == ["Constants"] + entries = summary["Constants"] + assert len(entries) == 1 + entry = entries[0] + assert entry["name"] == "Pressure" + assert entry["shape"] == [1] + assert "float32" in entry["dtype"] + assert "1.5" in entry["preview"] + + +def test_load_sample_rejects_non_integer_sample_id( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + _make_dataset_dir(tmp_path, "ds") + dataset_dict = {"train": _FakeDataset(range(1))} + converter_dict = {"train": _FakeConverter({0: object()})} + _install_fake_init_from_disk(monkeypatch, {"ds": (dataset_dict, converter_dict)}) + + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + ref = SampleRef( + backend_id="disk", dataset_id="ds", split="train", sample_id="not-an-int" + ) + with pytest.raises(ValueError): + service.load_sample(ref) + + +def test_set_datasets_root_rejects_outside_sandbox(tmp_path: Path) -> None: + sandbox = tmp_path / "sandbox" + sandbox.mkdir() + outside = tmp_path / "outside" + outside.mkdir() + service = PlaidDatasetService( + ViewerConfig(datasets_root=sandbox, browse_roots=(sandbox,)) + ) + with pytest.raises(Exception): + service.set_datasets_root(outside) + + +def test_set_datasets_root_updates_config(tmp_path: Path) -> None: + sandbox = tmp_path / "sandbox" + sandbox.mkdir() + sub = sandbox / "sub" + sub.mkdir() + service = PlaidDatasetService( + ViewerConfig(datasets_root=sandbox, browse_roots=(sandbox,)) + ) + resolved = service.set_datasets_root(sub) + assert resolved == sub.resolve() + assert service.datasets_root == sub.resolve() + + +def test_list_subdirs_returns_entries(tmp_path: Path) -> None: + sandbox = tmp_path / "sandbox" + sandbox.mkdir() + (sandbox / "a").mkdir() + (sandbox / "b").mkdir() + (sandbox / "b" / "data").mkdir() + (sandbox / "b" / "problem_definitions").mkdir() + service = PlaidDatasetService( + ViewerConfig(datasets_root=sandbox, browse_roots=(sandbox,)) + ) + listing = service.list_subdirs(sandbox) + names = {e["name"] for e in listing["entries"]} + assert names == {"a", "b"} + plaid_entry = next(e for e in listing["entries"] if e["name"] == "b") + assert plaid_entry["is_plaid_candidate"] is True + + +def test_list_subdirs_rejects_outside_sandbox(tmp_path: Path) -> None: + sandbox = tmp_path / "sandbox" + sandbox.mkdir() + outside = tmp_path / "outside" + outside.mkdir() + service = PlaidDatasetService( + ViewerConfig(datasets_root=sandbox, browse_roots=(sandbox,)) + ) + with pytest.raises(Exception): + service.list_subdirs(outside) + + +# --------------------------------------------------------------------------- +# Hugging Face Hub streaming +# --------------------------------------------------------------------------- + + +def _install_fake_init_streaming_from_hub( + monkeypatch: pytest.MonkeyPatch, + payload: dict[str, tuple[dict, dict]], +) -> None: + """Patch ``plaid.storage.init_streaming_from_hub`` to return fixtures.""" + + def _fake(repo_id: str): + return payload[repo_id] + + import plaid.storage as storage # noqa: PLC0415 + + monkeypatch.setattr(storage, "init_streaming_from_hub", _fake, raising=False) + + +def test_add_hub_dataset_rejects_invalid_repo_id(tmp_path: Path) -> None: + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + with pytest.raises(ValueError): + service.add_hub_dataset("") + with pytest.raises(ValueError): + service.add_hub_dataset("missing-slash") + + +def test_add_hub_dataset_is_listed_alongside_local(tmp_path: Path) -> None: + _make_dataset_dir(tmp_path, "local_ds") + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + service.add_hub_dataset("PLAID-lib/VKI-LS59") + entries = service.list_datasets() + ids = {d.dataset_id: d.backend_id for d in entries} + assert ids == {"local_ds": "disk", "PLAID-lib/VKI-LS59": "hub"} + # Idempotent add + service.add_hub_dataset("PLAID-lib/VKI-LS59") + assert len(service.list_datasets()) == 2 + + +def test_list_samples_streams_from_hub( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + repo_id = "PLAID-lib/VKI-LS59" + dataset_dict = { + "train": _FakeDataset(range(2)), + } + converter_dict = { + "train": _FakeConverter({0: object(), 1: object()}), + } + _install_fake_init_streaming_from_hub( + monkeypatch, {repo_id: (dataset_dict, converter_dict)} + ) + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + service.add_hub_dataset(repo_id) + refs = service.list_samples(repo_id) + assert {(r.backend_id, r.split, r.sample_id) for r in refs} == { + ("hub", "train", "0"), + ("hub", "train", "1"), + } + + +def test_remove_hub_dataset_clears_cache( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + repo_id = "org/ds" + _install_fake_init_streaming_from_hub( + monkeypatch, + { + repo_id: ( + {"train": _FakeDataset(range(1))}, + {"train": _FakeConverter({0: object()})}, + ) + }, + ) + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + service.add_hub_dataset(repo_id) + service.list_samples(repo_id) # populates cache + assert repo_id in service._store_cache # noqa: SLF001 + service.remove_hub_dataset(repo_id) + assert repo_id not in service._store_cache # noqa: SLF001 + assert repo_id not in [d.dataset_id for d in service.list_datasets()] + + +# --------------------------------------------------------------------------- +# Streaming cursor behaviour (IterableDataset without __len__) +# --------------------------------------------------------------------------- + + +class _FakeIterableDataset: + """Stand-in for ``datasets.IterableDataset`` - no ``__len__``.""" + + def __init__(self, records: list[object]) -> None: + self._records = records + + def __iter__(self): + return iter(self._records) + + +class _FakeStreamingConverter: + """Converter exposing ``sample_to_plaid`` (streaming API).""" + + def __init__(self, mapping: dict[int, object]) -> None: + # Maps the raw record itself to a PLAID sample, using id() lookup + # so we can assert the correct record was forwarded. + self._mapping = mapping + + def sample_to_plaid(self, record): + return self._mapping[record] + + # Intentionally no ``to_plaid`` method: streaming paths must not use it. + + +def _install_fake_streaming_dataset( + monkeypatch: pytest.MonkeyPatch, repo_id: str +) -> tuple[list[object], dict[int, object]]: + """Register a 3-record streaming dataset and return (records, mapping).""" + records = [object(), object(), object()] + mapping = {rec: object() for rec in records} + dataset_dict = {"train": _FakeIterableDataset(records)} + converter_dict = {"train": _FakeStreamingConverter(mapping)} + _install_fake_init_streaming_from_hub( + monkeypatch, {repo_id: (dataset_dict, converter_dict)} + ) + return records, mapping + + +def test_streaming_dataset_is_detected_as_streaming( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + repo_id = "org/stream" + _install_fake_streaming_dataset(monkeypatch, repo_id) + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + service.add_hub_dataset(repo_id) + assert service.is_streaming(repo_id) is True + # Splits without __len__ report a ``None`` count in the detail view. + detail = service.get_dataset(repo_id) + assert detail.splits == {"train": None} + + +def test_list_samples_emits_single_cursor_ref_for_streaming( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + repo_id = "org/stream" + _install_fake_streaming_dataset(monkeypatch, repo_id) + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + service.add_hub_dataset(repo_id) + refs = service.list_samples(repo_id) + # Streaming splits surface a single synthetic reference using the + # sentinel sample id, regardless of how many records the stream holds. + assert len(refs) == 1 + assert refs[0].backend_id == "hub" + assert refs[0].sample_id == "cursor" + assert refs[0].split == "train" + + +def test_advance_stream_cursor_walks_records_forward( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + repo_id = "org/stream" + records, mapping = _install_fake_streaming_dataset(monkeypatch, repo_id) + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + service.add_hub_dataset(repo_id) + + # No sample fetched yet. + assert service.stream_cursor_position(repo_id, "train") == -1 + + ref0 = service.advance_stream_cursor(repo_id, "train") + assert service.stream_cursor_position(repo_id, "train") == 0 + # ``load_sample`` must materialise the record that the cursor just + # consumed, going through ``converter.sample_to_plaid``. + sample0 = service.load_sample(ref0) + assert sample0 is mapping[records[0]] + + # Advancing again moves forward and does not re-consume the first + # record. + ref1 = service.advance_stream_cursor(repo_id, "train") + assert service.stream_cursor_position(repo_id, "train") == 1 + assert service.load_sample(ref1) is mapping[records[1]] + + +def test_advance_stream_cursor_raises_when_exhausted( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + repo_id = "org/stream" + _install_fake_streaming_dataset(monkeypatch, repo_id) + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + service.add_hub_dataset(repo_id) + # Three records in the fake stream; the fourth advance must stop. + for _ in range(3): + service.advance_stream_cursor(repo_id, "train") + with pytest.raises(StopIteration): + service.advance_stream_cursor(repo_id, "train") + + +def test_reset_stream_cursor_rewinds_to_first_record( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + repo_id = "org/stream" + records, mapping = _install_fake_streaming_dataset(monkeypatch, repo_id) + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + service.add_hub_dataset(repo_id) + service.advance_stream_cursor(repo_id, "train") + service.advance_stream_cursor(repo_id, "train") + assert service.stream_cursor_position(repo_id, "train") == 1 + + service.reset_stream_cursor(repo_id, "train") + assert service.stream_cursor_position(repo_id, "train") == -1 + ref = service.advance_stream_cursor(repo_id, "train") + assert service.load_sample(ref) is mapping[records[0]] + + +# --------------------------------------------------------------------------- +# Feature filtering +# --------------------------------------------------------------------------- + + +def _install_fake_metadata( + monkeypatch: pytest.MonkeyPatch, + *, + variable_schema: dict[str, object], + constant_schema: dict[str, dict[str, object]], +) -> None: + """Patch ``load_metadata_from_disk`` / ``load_metadata_from_hub``.""" + from plaid.storage.common import reader as reader_mod # noqa: PLC0415 + + def _fake(*_args, **_kwargs): + return ({}, variable_schema, constant_schema, {}) + + monkeypatch.setattr(reader_mod, "load_metadata_from_disk", _fake, raising=False) + monkeypatch.setattr(reader_mod, "load_metadata_from_hub", _fake, raising=False) + + +class _FeatureAwareConverter: + """Converter recording the feature list handed to ``to_plaid``.""" + + def __init__( + self, + samples_by_index: dict[int, object], + *, + constant_features: set[str] | None = None, + variable_features: set[str] | None = None, + ) -> None: + self._samples = samples_by_index + self.constant_features = constant_features or set() + self.variable_features = variable_features or set() + self.last_features: list[str] | None = None + + def to_plaid(self, dataset, index: int, features=None): # noqa: ARG002 + self.last_features = list(features) if features is not None else None + return self._samples[index] + + +def test_list_available_features_only_exposes_field_paths( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + _make_dataset_dir(tmp_path, "ds") + variable = { + "Base_2_2/Zone/VertexFields/pressure": None, + "Base_2_2/Zone/GridCoordinates/CoordinateX": None, + } + constant = { + "train": { + "Base_2_2/Zone/VertexFields/sdf": None, + "Base_2_2/Zone/VertexFields/sdf_times": None, + "Base_2_2/Zone/VertexFields/GridLocation": None, + "Global/angle_in": None, + } + } + _install_fake_metadata( + monkeypatch, variable_schema=variable, constant_schema=constant + ) + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + fields = service.list_available_features("ds") + assert "Base_2_2/Zone/VertexFields/pressure" in fields + assert "Base_2_2/Zone/VertexFields/sdf" in fields + # Coordinates, time bookkeeping, GridLocation metadata and scalars + # must not appear in the user-facing feature list. + assert "Base_2_2/Zone/GridCoordinates/CoordinateX" not in fields + assert "Base_2_2/Zone/VertexFields/sdf_times" not in fields + assert "Base_2_2/Zone/VertexFields/GridLocation" not in fields + assert "Global/angle_in" not in fields + + +def test_set_features_rejects_unknown_path( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + _make_dataset_dir(tmp_path, "ds") + _install_fake_metadata( + monkeypatch, + variable_schema={"Base/Zone/VertexFields/pressure": None}, + constant_schema={"train": {}}, + ) + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + with pytest.raises(ValueError): + service.set_features("ds", ["not/a/feature"]) + + +def test_load_sample_forwards_selected_features_on_disk( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + """Disk path: ``to_plaid`` receives the filtered feature list.""" + _make_dataset_dir(tmp_path, "ds") + variable = {"Base/Zone/VertexFields/pressure": None} + constant = { + "train": { + "Base": None, + "Base/Zone": None, + "Base/Zone/VertexFields": None, + } + } + _install_fake_metadata( + monkeypatch, variable_schema=variable, constant_schema=constant + ) + target = object() + dataset_dict = {"train": _FakeDataset(range(1))} + converter = _FeatureAwareConverter( + {0: target}, + constant_features=set(constant["train"].keys()), + variable_features=set(variable.keys()), + ) + converter_dict = {"train": converter} + _install_fake_init_from_disk(monkeypatch, {"ds": (dataset_dict, converter_dict)}) + + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + service.set_features("ds", ["Base/Zone/VertexFields/pressure"]) + ref = SampleRef(backend_id="disk", dataset_id="ds", split="train", sample_id="0") + assert service.load_sample(ref) is target + # The user-selected field is forwarded, but the split's constant + # features (mesh supports + globals) are always appended so the + # rendered sample keeps its scalars/globals on top of the + # user-selected variable fields. + assert converter.last_features is not None + assert "Base/Zone/VertexFields/pressure" in converter.last_features + for path in constant["train"]: + assert path in converter.last_features + + +def test_load_sample_without_filter_does_not_forward_features( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + _make_dataset_dir(tmp_path, "ds") + target = object() + dataset_dict = {"train": _FakeDataset(range(1))} + converter = _FeatureAwareConverter({0: target}) + converter_dict = {"train": converter} + _install_fake_init_from_disk(monkeypatch, {"ds": (dataset_dict, converter_dict)}) + + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + ref = SampleRef(backend_id="disk", dataset_id="ds", split="train", sample_id="0") + assert service.load_sample(ref) is target + assert converter.last_features is None + + +def test_streaming_open_expands_features_via_cgns_helper( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + """Streaming path: ``init_streaming_from_hub`` receives the expanded list. + + The expansion is delegated to + ``plaid.utils.cgns_helper.update_features_for_CGNS_compatibility``; + we patch that helper to a deterministic stub and assert the service + hands the stub's output through. + """ + repo_id = "org/stream_filter" + variable = {"Base/Zone/VertexFields/pressure": None} + constant = {"train": {"Base": None, "Base/Zone": None}} + _install_fake_metadata( + monkeypatch, variable_schema=variable, constant_schema=constant + ) + + captured: dict[str, object] = {} + + def _fake_init_streaming_from_hub(_repo, features=None): + captured["features"] = features + return ( + {"train": _FakeDataset(range(1))}, + {"train": _FakeConverter({0: object()})}, + ) + + import plaid.storage as storage # noqa: PLC0415 + + monkeypatch.setattr( + storage, + "init_streaming_from_hub", + _fake_init_streaming_from_hub, + raising=False, + ) + + from plaid.utils import cgns_helper # noqa: PLC0415 + + def _fake_expand(features, _constant, _variable): + # Deterministic: append a sentinel so we can verify that the + # service actually routes through the helper instead of + # forwarding the raw user selection. + return sorted(set(features) | {"__expanded__"}) + + monkeypatch.setattr( + cgns_helper, + "update_features_for_CGNS_compatibility", + _fake_expand, + ) + + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + service.add_hub_dataset(repo_id) + service.set_features(repo_id, ["Base/Zone/VertexFields/pressure"]) + service.list_samples(repo_id) # triggers ``_open`` + assert captured["features"] == [ + "Base/Zone/VertexFields/pressure", + "__expanded__", + ] + + +def test_set_features_invalidates_store_cache( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + """Changing the feature selection must force a reload of the dataset.""" + _make_dataset_dir(tmp_path, "ds") + variable = {"Base/Zone/VertexFields/pressure": None} + _install_fake_metadata( + monkeypatch, + variable_schema=variable, + constant_schema={"train": {}}, + ) + dataset_dict = {"train": _FakeDataset(range(1))} + converter = _FeatureAwareConverter( + {0: object()}, + variable_features=set(variable.keys()), + ) + _install_fake_init_from_disk( + monkeypatch, {"ds": (dataset_dict, {"train": converter})} + ) + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + service.list_samples("ds") # populates cache + assert "ds" in service._store_cache # noqa: SLF001 + service.set_features("ds", ["Base/Zone/VertexFields/pressure"]) + assert "ds" not in service._store_cache # noqa: SLF001 + + +def test_load_sample_auto_advances_cursor_on_first_access( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + """Calling ``load_sample`` with a cursor ref before any advance acts + like "give me the first sample". + """ + from plaid.viewer.models import SampleRef + + repo_id = "org/stream" + records, mapping = _install_fake_streaming_dataset(monkeypatch, repo_id) + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + service.add_hub_dataset(repo_id) + ref = SampleRef( + backend_id="hub", dataset_id=repo_id, split="train", sample_id="cursor" + ) + sample = service.load_sample(ref) + assert sample is mapping[records[0]] + assert service.stream_cursor_position(repo_id, "train") == 0 + + +class _KeyErrorOnFilteredConverter: + """Converter whose filtered ``to_plaid`` path raises like PLAID does. + + Mirrors the real failure mode: the converter declares + ``constant_features`` containing a path that its backing store + cannot materialise, so passing ``features=sorted(constant_features)`` + triggers ``KeyError("Missing features in …")`` deep inside PLAID. + The service must degrade gracefully and fall back to an unfiltered + load instead of letting the error surface to the user. + """ + + def __init__( + self, + samples_by_index: dict[int, object], + *, + constant_features: set[str], + variable_features: set[str] | None = None, + ) -> None: + self._samples = samples_by_index + self.constant_features = constant_features + self.variable_features = variable_features or set() + self.unfiltered_calls = 0 + + def to_plaid(self, dataset, index: int, features=None): # noqa: ARG002 + if features is not None: + raise KeyError("Missing features in dataset/converter: ['bogus']") + self.unfiltered_calls += 1 + return self._samples[index] + + +def test_load_sample_falls_back_when_empty_filter_triggers_missing_features( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + """Clearing the selection on a split whose ``constant_features`` trip + the CGNS expander should not raise ``Missing features``. + + Reproduces the viewer bug where, on a split that shares none of the + user-selected fields, the "geometry-only" fallback in + :meth:`PlaidDatasetService.load_sample` used to hand the split's + ``constant_features`` straight to ``Converter.to_plaid`` and crash + with ``KeyError("Missing features in …")``. The service must now + degrade to an unfiltered load so the user still sees the mesh. + """ + _make_dataset_dir(tmp_path, "ds") + variable = {"Base/Zone/VertexFields/pressure": None} + constant = {"train": {"Base": None, "Base/Zone": None}} + _install_fake_metadata( + monkeypatch, variable_schema=variable, constant_schema=constant + ) + target = object() + dataset_dict = {"train": _FakeDataset(range(1))} + converter = _KeyErrorOnFilteredConverter( + {0: target}, + constant_features=set(constant["train"].keys()), + variable_features=set(), # split has no variable features at all + ) + converter_dict = {"train": converter} + _install_fake_init_from_disk(monkeypatch, {"ds": (dataset_dict, converter_dict)}) + + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + # Emulate the UI: the user selected a field that exists elsewhere in + # the dataset metadata but not in this split. + service.set_features("ds", ["Base/Zone/VertexFields/pressure"]) + ref = SampleRef(backend_id="disk", dataset_id="ds", split="train", sample_id="0") + + assert service.load_sample(ref) is target + assert converter.unfiltered_calls == 1 + + +def test_load_sample_does_not_reinject_deselected_constant_fields( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + """A user-visible *field* declared as a split constant must not be + silently re-added to the request when the user deselects it. + + In PLAID, ``constant_features`` can hold genuine field paths (a + field whose values happen to be constant across the split's + samples, e.g. a signed-distance field precomputed offline). Those + fields appear in the UI feature list and are toggleable. An + earlier fix for the "Missing features" crash blindly re-injected + every split constant on top of the user's selection, which + defeated the filter: deselecting ``sdf`` still loaded ``sdf``. + + The service must only re-inject CGNS bookkeeping paths + (coordinates, connectivities, ...), not user-visible fields. + """ + _make_dataset_dir(tmp_path, "ds") + variable = {"Base_2_2/Zone/VertexFields/pressure": None} + constant = { + "train": { + # User-visible field -> must drop when deselected. + "Base_2_2/Zone/VertexFields/sdf": None, + # Time-series bookkeeping for ``sdf`` -> must drop with it. + "Base_2_2/Zone/VertexFields/sdf_times": None, + # CGNS bookkeeping -> must always be kept. + "Base_2_2": None, + "Base_2_2/Zone": None, + "Base_2_2/Zone/GridCoordinates/CoordinateX": None, + } + } + _install_fake_metadata( + monkeypatch, variable_schema=variable, constant_schema=constant + ) + target = object() + dataset_dict = {"train": _FakeDataset(range(1))} + converter = _FeatureAwareConverter( + {0: target}, + constant_features=set(constant["train"].keys()), + variable_features=set(variable.keys()), + ) + _install_fake_init_from_disk( + monkeypatch, {"ds": (dataset_dict, {"train": converter})} + ) + + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + # User clears the selection -> load only the geometry. + service.set_features("ds", []) + ref = SampleRef(backend_id="disk", dataset_id="ds", split="train", sample_id="0") + assert service.load_sample(ref) is target + # Bookkeeping paths are preserved so the renderer can draw the mesh... + assert converter.last_features is not None + assert "Base_2_2/Zone/GridCoordinates/CoordinateX" in converter.last_features + # ... but the deselected user-visible field must NOT be re-injected, + # and its ``_times`` bookkeeping path must follow the same fate. + assert "Base_2_2/Zone/VertexFields/sdf" not in converter.last_features + assert "Base_2_2/Zone/VertexFields/sdf_times" not in converter.last_features + assert "Base_2_2/Zone/VertexFields/pressure" not in converter.last_features diff --git a/tests/viewer/test_trame_server.py b/tests/viewer/test_trame_server.py new file mode 100644 index 00000000..3fdc19d5 --- /dev/null +++ b/tests/viewer/test_trame_server.py @@ -0,0 +1,169 @@ +"""Smoke tests for the trame dataset viewer server.""" + +from __future__ import annotations + +import json +import sys +import types +from pathlib import Path + +import pytest + + +@pytest.fixture +def empty_datasets_root(tmp_path: Path) -> Path: + """Return an existing but empty datasets directory.""" + root = tmp_path / "datasets" + root.mkdir() + return root + + +# TODO: Re-enable after fixing VTK segfault in CI environment +# def test_build_server_returns_trame_server(empty_datasets_root: Path) -> None: +# """``build_server`` should return a configured trame server for empty roots.""" +# pytest.importorskip("vtk") +# pytest.importorskip("trame") +# from plaid.viewer.trame_app.server import build_server # noqa: PLC0415 + +# config = ViewerConfig(datasets_root=empty_datasets_root) +# dataset_service = PlaidDatasetService(config) + +# with CacheRoot(install_signal_handlers=False, run_orphan_sweep=False) as cache: +# artifact_service = ParaviewArtifactService(dataset_service, cache.path) +# server = build_server(dataset_service, artifact_service) + +# # The server should expose state and controller attributes. +# assert hasattr(server, "state") +# assert hasattr(server, "controller") +# assert server.state.dataset_ids == [] +# assert server.state.status.startswith("Select a dataset") + +# TODO: Re-enable after fixing VTK segfault in CI environment +# def test_browse_cd_updates_browse_state(tmp_path: Path) -> None: +# """``ctrl.browse_cd`` must load the given directory into the browser state. + +# Regression guard for a bug where the file-browser list items dispatched +# through the client-side ``trigger(...)`` helper (which only resolves +# names registered as server triggers), while ``browse_cd`` was only +# registered as a controller method via ``@ctrl.set``. Clicking a folder +# in the browser dialog was therefore a no-op. +# """ +# pytest.importorskip("vtk") +# pytest.importorskip("trame") +# from plaid.viewer.trame_app.server import build_server # noqa: PLC0415 + +# datasets_root = tmp_path / "datasets" +# datasets_root.mkdir() +# child = datasets_root / "child" +# child.mkdir() + +# config = ViewerConfig( +# datasets_root=datasets_root, +# browse_roots=(tmp_path,), +# ) +# dataset_service = PlaidDatasetService(config) + +# with CacheRoot(install_signal_handlers=False, run_orphan_sweep=False) as cache: +# artifact_service = ParaviewArtifactService(dataset_service, cache.path) +# server = build_server(dataset_service, artifact_service) + +# assert hasattr(server.controller, "browse_cd") +# server.controller.browse_cd(str(child)) + +# assert Path(server.state.browse_cwd) == child.resolve() +# assert server.state.browse_parent == str(datasets_root.resolve()) + + +class _FakeSelection: + def __init__(self) -> None: + self.enabled: list[str] = [] + + def DisableAllArrays(self) -> None: # noqa: N802 - VTK API + self.enabled = [] + + def EnableArray(self, name: str) -> None: # noqa: N802 - VTK API + self.enabled.append(name) + + +class _FakeCGNSReader: + def __init__(self) -> None: + self.file_name: str | None = None + self.enable_calls: list[str] = [] + + def SetFileName(self, name: str) -> None: # noqa: N802 - VTK API + self.file_name = name + + def UpdateInformation(self) -> None: # noqa: N802 - VTK API + self.enable_calls.append("UpdateInformation") + + def EnableAllBases(self) -> None: # noqa: N802 - VTK API + self.enable_calls.append("EnableAllBases") + + def EnableAllPointArrays(self) -> None: # noqa: N802 - VTK API + self.enable_calls.append("EnableAllPointArrays") + + def EnableAllCellArrays(self) -> None: # noqa: N802 - VTK API + self.enable_calls.append("EnableAllCellArrays") + + +class _FakeCGNSFileSeriesReader: + def __init__(self) -> None: + self.inner: _FakeCGNSReader | None = None + self.file_names: list[str] = [] + self.update_information_calls = 0 + + def SetReader(self, inner) -> None: # noqa: N802 - VTK API + self.inner = inner + + def AddFileName(self, name: str) -> None: # noqa: N802 - VTK API + self.file_names.append(name) + + def UpdateInformation(self) -> None: # noqa: N802 - VTK API + self.update_information_calls += 1 + + +def test_load_reader_series_uses_vtk_cgns_file_series_reader( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + """A ``.cgns.series`` sidecar must drive a ``vtkCGNSFileSeriesReader``. + + This guards against regressing to ``vtkFileSeriesReader``, which is not + available in the ``vtk`` PyPI wheel and would silently break time-series + rendering. + """ + series_path = tmp_path / "meshes.cgns.series" + sidecar = { + "file-series-version": "1.0", + "files": [ + {"name": "meshes/mesh_000000001.cgns", "time": 1.5}, + {"name": "meshes/mesh_000000000.cgns", "time": 0.0}, + ], + } + series_path.write_text(json.dumps(sidecar)) + + fake_vtk = types.SimpleNamespace( + vtkCGNSReader=_FakeCGNSReader, + vtkCGNSFileSeriesReader=_FakeCGNSFileSeriesReader, + ) + monkeypatch.setitem(sys.modules, "vtk", fake_vtk) + + from plaid.viewer.trame_app.server import _load_reader # noqa: PLC0415 + + reader = _load_reader(series_path) + + assert isinstance(reader, _FakeCGNSFileSeriesReader) + assert isinstance(reader.inner, _FakeCGNSReader) + # File names are added in ascending time order, not sidecar order. + expected_order = [ + str((tmp_path / "meshes/mesh_000000000.cgns").resolve()), + str((tmp_path / "meshes/mesh_000000001.cgns").resolve()), + ] + assert reader.file_names == expected_order + assert reader.update_information_calls == 1 + # Inner reader must have had its selections enabled so the pipeline + # produces non-empty output. + assert reader.inner.enable_calls == [ + "EnableAllBases", + "EnableAllPointArrays", + "EnableAllCellArrays", + ] diff --git a/uv.lock b/uv.lock index d71bc633..3d2e2a04 100644 --- a/uv.lock +++ b/uv.lock @@ -1067,7 +1067,7 @@ wheels = [ [[package]] name = "huggingface-hub" -version = "1.12.0" +version = "1.12.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, @@ -1080,9 +1080,9 @@ dependencies = [ { name = "typer" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/56/52/1b54cb569509c725a32c1315261ac9fd0e6b91bbbf74d86fca10d3376164/huggingface_hub-1.12.0.tar.gz", hash = "sha256:7c3fe85e24b652334e5d456d7a812cd9a071e75630fac4365d9165ab5e4a34b6", size = 763091, upload-time = "2026-04-24T13:32:08.674Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3e/9f/3fda8b014db3ae239addc9b48b35c2cf7d318950b430712f34a2473ef81d/huggingface_hub-1.12.2.tar.gz", hash = "sha256:282c4999e641c89affdc4c02c265eddea944c1390dc19e89dac8ad3ae76dbdaf", size = 763393, upload-time = "2026-04-29T09:45:09.202Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/2b/ef03ddb96bd1123503c2bd6932001020292deea649e9bf4caa2cb65a85bf/huggingface_hub-1.12.0-py3-none-any.whl", hash = "sha256:d74939969585ee35748bd66de09baf84099d461bda7287cd9043bfb99b0e424d", size = 646806, upload-time = "2026-04-24T13:32:06.717Z" }, + { url = "https://files.pythonhosted.org/packages/71/c1/1fa4162f6dd53259daf2ad31385273341821fa0acce164cd03971937a60e/huggingface_hub-1.12.2-py3-none-any.whl", hash = "sha256:7968e897fdbc6343c871c240d87d4434efe0ad9f80d57daa1cc5678c6d148529", size = 647757, upload-time = "2026-04-29T09:45:07.63Z" }, ] [[package]] @@ -1573,6 +1573,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/89/5f/39cbadc320cd78f4834b0a9f7a2fa3c980dca942bf193f315837eacb8870/meshio-5.3.5-py3-none-any.whl", hash = "sha256:0736c6e34ecc768f62f2cde5d8233a3529512a9399b25c68ea2ca0d5900cdc10", size = 166162, upload-time = "2024-01-31T15:09:36.691Z" }, ] +[[package]] +name = "more-itertools" +version = "11.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/f7/139d22fef48ac78127d18e01d80cf1be40236ae489769d17f35c3d425293/more_itertools-11.0.2.tar.gz", hash = "sha256:392a9e1e362cbc106a2457d37cabf9b36e5e12efd4ebff1654630e76597df804", size = 144659, upload-time = "2026-04-09T15:01:33.297Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/98/6af411189d9413534c3eb691182bff1f5c6d44ed2f93f2edfe52a1bbceb8/more_itertools-11.0.2-py3-none-any.whl", hash = "sha256:6e35b35f818b01f691643c6c611bc0902f2e92b46c18fffa77ae1e7c46e912e4", size = 71939, upload-time = "2026-04-09T15:01:32.21Z" }, +] + [[package]] name = "mpmath" version = "1.3.0" @@ -1582,6 +1591,41 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/43/e3/7d92a15f894aa0c9c4b49b8ee9ac9850d6e63b03c9c32c0367a13ae62209/mpmath-1.3.0-py3-none-any.whl", hash = "sha256:a0b2b9fe80bbcd81a6647ff13108738cfb482d481d826cc0e02f5b35e5c88d2c", size = 536198, upload-time = "2023-03-07T16:47:09.197Z" }, ] +[[package]] +name = "msgpack" +version = "1.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4d/f2/bfb55a6236ed8725a96b0aa3acbd0ec17588e6a2c3b62a93eb513ed8783f/msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e", size = 173581, upload-time = "2025-10-08T09:15:56.596Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/97/560d11202bcd537abca693fd85d81cebe2107ba17301de42b01ac1677b69/msgpack-1.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2e86a607e558d22985d856948c12a3fa7b42efad264dca8a3ebbcfa2735d786c", size = 82271, upload-time = "2025-10-08T09:14:49.967Z" }, + { url = "https://files.pythonhosted.org/packages/83/04/28a41024ccbd67467380b6fb440ae916c1e4f25e2cd4c63abe6835ac566e/msgpack-1.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:283ae72fc89da59aa004ba147e8fc2f766647b1251500182fac0350d8af299c0", size = 84914, upload-time = "2025-10-08T09:14:50.958Z" }, + { url = "https://files.pythonhosted.org/packages/71/46/b817349db6886d79e57a966346cf0902a426375aadc1e8e7a86a75e22f19/msgpack-1.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:61c8aa3bd513d87c72ed0b37b53dd5c5a0f58f2ff9f26e1555d3bd7948fb7296", size = 416962, upload-time = "2025-10-08T09:14:51.997Z" }, + { url = "https://files.pythonhosted.org/packages/da/e0/6cc2e852837cd6086fe7d8406af4294e66827a60a4cf60b86575a4a65ca8/msgpack-1.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:454e29e186285d2ebe65be34629fa0e8605202c60fbc7c4c650ccd41870896ef", size = 426183, upload-time = "2025-10-08T09:14:53.477Z" }, + { url = "https://files.pythonhosted.org/packages/25/98/6a19f030b3d2ea906696cedd1eb251708e50a5891d0978b012cb6107234c/msgpack-1.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7bc8813f88417599564fafa59fd6f95be417179f76b40325b500b3c98409757c", size = 411454, upload-time = "2025-10-08T09:14:54.648Z" }, + { url = "https://files.pythonhosted.org/packages/b7/cd/9098fcb6adb32187a70b7ecaabf6339da50553351558f37600e53a4a2a23/msgpack-1.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bafca952dc13907bdfdedfc6a5f579bf4f292bdd506fadb38389afa3ac5b208e", size = 422341, upload-time = "2025-10-08T09:14:56.328Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ae/270cecbcf36c1dc85ec086b33a51a4d7d08fc4f404bdbc15b582255d05ff/msgpack-1.1.2-cp311-cp311-win32.whl", hash = "sha256:602b6740e95ffc55bfb078172d279de3773d7b7db1f703b2f1323566b878b90e", size = 64747, upload-time = "2025-10-08T09:14:57.882Z" }, + { url = "https://files.pythonhosted.org/packages/2a/79/309d0e637f6f37e83c711f547308b91af02b72d2326ddd860b966080ef29/msgpack-1.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:d198d275222dc54244bf3327eb8cbe00307d220241d9cec4d306d49a44e85f68", size = 71633, upload-time = "2025-10-08T09:14:59.177Z" }, + { url = "https://files.pythonhosted.org/packages/73/4d/7c4e2b3d9b1106cd0aa6cb56cc57c6267f59fa8bfab7d91df5adc802c847/msgpack-1.1.2-cp311-cp311-win_arm64.whl", hash = "sha256:86f8136dfa5c116365a8a651a7d7484b65b13339731dd6faebb9a0242151c406", size = 64755, upload-time = "2025-10-08T09:15:00.48Z" }, + { url = "https://files.pythonhosted.org/packages/ad/bd/8b0d01c756203fbab65d265859749860682ccd2a59594609aeec3a144efa/msgpack-1.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa", size = 81939, upload-time = "2025-10-08T09:15:01.472Z" }, + { url = "https://files.pythonhosted.org/packages/34/68/ba4f155f793a74c1483d4bdef136e1023f7bcba557f0db4ef3db3c665cf1/msgpack-1.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb", size = 85064, upload-time = "2025-10-08T09:15:03.764Z" }, + { url = "https://files.pythonhosted.org/packages/f2/60/a064b0345fc36c4c3d2c743c82d9100c40388d77f0b48b2f04d6041dbec1/msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f", size = 417131, upload-time = "2025-10-08T09:15:05.136Z" }, + { url = "https://files.pythonhosted.org/packages/65/92/a5100f7185a800a5d29f8d14041f61475b9de465ffcc0f3b9fba606e4505/msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42", size = 427556, upload-time = "2025-10-08T09:15:06.837Z" }, + { url = "https://files.pythonhosted.org/packages/f5/87/ffe21d1bf7d9991354ad93949286f643b2bb6ddbeab66373922b44c3b8cc/msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9", size = 404920, upload-time = "2025-10-08T09:15:08.179Z" }, + { url = "https://files.pythonhosted.org/packages/ff/41/8543ed2b8604f7c0d89ce066f42007faac1eaa7d79a81555f206a5cdb889/msgpack-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620", size = 415013, upload-time = "2025-10-08T09:15:09.83Z" }, + { url = "https://files.pythonhosted.org/packages/41/0d/2ddfaa8b7e1cee6c490d46cb0a39742b19e2481600a7a0e96537e9c22f43/msgpack-1.1.2-cp312-cp312-win32.whl", hash = "sha256:1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029", size = 65096, upload-time = "2025-10-08T09:15:11.11Z" }, + { url = "https://files.pythonhosted.org/packages/8c/ec/d431eb7941fb55a31dd6ca3404d41fbb52d99172df2e7707754488390910/msgpack-1.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b", size = 72708, upload-time = "2025-10-08T09:15:12.554Z" }, + { url = "https://files.pythonhosted.org/packages/c5/31/5b1a1f70eb0e87d1678e9624908f86317787b536060641d6798e3cf70ace/msgpack-1.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69", size = 64119, upload-time = "2025-10-08T09:15:13.589Z" }, + { url = "https://files.pythonhosted.org/packages/6b/31/b46518ecc604d7edf3a4f94cb3bf021fc62aa301f0cb849936968164ef23/msgpack-1.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4efd7b5979ccb539c221a4c4e16aac1a533efc97f3b759bb5a5ac9f6d10383bf", size = 81212, upload-time = "2025-10-08T09:15:14.552Z" }, + { url = "https://files.pythonhosted.org/packages/92/dc/c385f38f2c2433333345a82926c6bfa5ecfff3ef787201614317b58dd8be/msgpack-1.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42eefe2c3e2af97ed470eec850facbe1b5ad1d6eacdbadc42ec98e7dcf68b4b7", size = 84315, upload-time = "2025-10-08T09:15:15.543Z" }, + { url = "https://files.pythonhosted.org/packages/d3/68/93180dce57f684a61a88a45ed13047558ded2be46f03acb8dec6d7c513af/msgpack-1.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1fdf7d83102bf09e7ce3357de96c59b627395352a4024f6e2458501f158bf999", size = 412721, upload-time = "2025-10-08T09:15:16.567Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ba/459f18c16f2b3fc1a1ca871f72f07d70c07bf768ad0a507a698b8052ac58/msgpack-1.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fac4be746328f90caa3cd4bc67e6fe36ca2bf61d5c6eb6d895b6527e3f05071e", size = 424657, upload-time = "2025-10-08T09:15:17.825Z" }, + { url = "https://files.pythonhosted.org/packages/38/f8/4398c46863b093252fe67368b44edc6c13b17f4e6b0e4929dbf0bdb13f23/msgpack-1.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fffee09044073e69f2bad787071aeec727183e7580443dfeb8556cbf1978d162", size = 402668, upload-time = "2025-10-08T09:15:19.003Z" }, + { url = "https://files.pythonhosted.org/packages/28/ce/698c1eff75626e4124b4d78e21cca0b4cc90043afb80a507626ea354ab52/msgpack-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5928604de9b032bc17f5099496417f113c45bc6bc21b5c6920caf34b3c428794", size = 419040, upload-time = "2025-10-08T09:15:20.183Z" }, + { url = "https://files.pythonhosted.org/packages/67/32/f3cd1667028424fa7001d82e10ee35386eea1408b93d399b09fb0aa7875f/msgpack-1.1.2-cp313-cp313-win32.whl", hash = "sha256:a7787d353595c7c7e145e2331abf8b7ff1e6673a6b974ded96e6d4ec09f00c8c", size = 65037, upload-time = "2025-10-08T09:15:21.416Z" }, + { url = "https://files.pythonhosted.org/packages/74/07/1ed8277f8653c40ebc65985180b007879f6a836c525b3885dcc6448ae6cb/msgpack-1.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:a465f0dceb8e13a487e54c07d04ae3ba131c7c5b95e2612596eafde1dccf64a9", size = 72631, upload-time = "2025-10-08T09:15:22.431Z" }, + { url = "https://files.pythonhosted.org/packages/e5/db/0314e4e2db56ebcf450f277904ffd84a7988b9e5da8d0d61ab2d057df2b6/msgpack-1.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:e69b39f8c0aa5ec24b57737ebee40be647035158f14ed4b40e6f150077e21a84", size = 64118, upload-time = "2025-10-08T09:15:23.402Z" }, +] + [[package]] name = "multidict" version = "6.7.1" @@ -2460,6 +2504,12 @@ dev = [ { name = "sphinx-tabs" }, { name = "sphinxcontrib-bibtex" }, ] +viewer = [ + { name = "trame" }, + { name = "trame-vtk" }, + { name = "trame-vuetify" }, + { name = "vtk" }, +] [package.metadata] requires-dist = [ @@ -2492,6 +2542,12 @@ dev = [ { name = "sphinx-tabs", specifier = ">=3.4.7" }, { name = "sphinxcontrib-bibtex", specifier = ">=2.6.5" }, ] +viewer = [ + { name = "trame", specifier = ">=3.6,<4.0" }, + { name = "trame-vtk", specifier = ">=2.8,<3.0" }, + { name = "trame-vuetify", specifier = ">=2.7,<3.0" }, + { name = "vtk", specifier = ">=9.6.1" }, +] [[package]] name = "pytest" @@ -3303,6 +3359,80 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359, upload-time = "2024-04-19T11:11:46.763Z" }, ] +[[package]] +name = "trame" +version = "3.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, + { name = "trame-client" }, + { name = "trame-common" }, + { name = "trame-server" }, + { name = "wslink" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/35/ac/ebd44ac237841d131314e41e0b1654926b77517b0553d7a7f4227778db07/trame-3.12.0.tar.gz", hash = "sha256:88b861162cb8b025e84e93f17dcfd43a84d02d2c1608c9f6d58e3cd646a50c05", size = 23493, upload-time = "2025-08-18T20:21:40.655Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/70/15/5869b2c7556fce52306b6b65b06ec7c088f063b865cdfa75ad30bc229b7c/trame-3.12.0-py3-none-any.whl", hash = "sha256:9b33020625e0d1710d060c0fabe7b3be0e31b5e5138439ec9a796faf6fe96915", size = 28516, upload-time = "2025-08-18T20:21:39.037Z" }, +] + +[[package]] +name = "trame-client" +version = "3.12.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "trame-common" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5c/75/5cfd6ee2c01d0d00eca322a07356072196b4583ad6f2709c564a00f17a69/trame_client-3.12.0.tar.gz", hash = "sha256:cf722b2bd9d36fda700ef7556f438269c1d7d84644fdae95cfa277dd54a51ff0", size = 245933, upload-time = "2026-04-29T16:48:59.577Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b0/b4/39f2e15dbce29c13841d58b1443fb3bb42c66249b3f2f72389f858e40cfa/trame_client-3.12.0-py3-none-any.whl", hash = "sha256:b30c4dc17e8941ed752d0910f3f1a63fff08982f024b17347ee52031886edc89", size = 250410, upload-time = "2026-04-29T16:48:57.371Z" }, +] + +[[package]] +name = "trame-common" +version = "1.1.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/86/cbb08d6b5229783781a4a1ee882c95ab7c905d163f610b841335e6ddd759/trame_common-1.1.3.tar.gz", hash = "sha256:25a3894823bebf509d3bad2b0c545fbeee9eed5d6320d94f781ec595c18d8068", size = 18632, upload-time = "2026-03-17T22:52:35.223Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/40/bf161cf981eebf94bffbe9c23f4b35bf592b44d20b47d734258a17f1729c/trame_common-1.1.3-py3-none-any.whl", hash = "sha256:8d93cda32cfea869aaabaec5d91ded369882b1e7f28c0dba2a101a7896cfa5b2", size = 21977, upload-time = "2026-03-17T22:52:34.191Z" }, +] + +[[package]] +name = "trame-server" +version = "3.10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "more-itertools" }, + { name = "wslink" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/52/54/e5a974c09f94bd795c20c311405ffa132f189ac609211305552d238a46ad/trame_server-3.10.0.tar.gz", hash = "sha256:0c341de976f758ff8e6076991e7f30be180384d4f386cf29aefa3915b801d118", size = 39765, upload-time = "2026-01-13T23:22:34.653Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/56/3a/d895d2069c9bf9288efde97aaa22845d3c711a7af031605863ac4019b7fc/trame_server-3.10.0-py3-none-any.whl", hash = "sha256:eb282f6bc6fa8fdbb2c65b8e6d22e088a27b56fe0b7a12f07cf2d9ea546bd935", size = 44458, upload-time = "2026-01-13T23:22:33.103Z" }, +] + +[[package]] +name = "trame-vtk" +version = "2.11.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "trame-client" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/10/de/b72ec543cf8f70ee0ef4645d04e911155db3dcba545a9cf35d6c80e849c9/trame_vtk-2.11.8.tar.gz", hash = "sha256:bef4a35d86d57bf9b4af44dda8f361f917b141e4f624c9ab7278b6c48d171e74", size = 810254, upload-time = "2026-04-24T00:28:17.494Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/11/aff660ffcc0f65546da4340902cd064cafda26e0a7750f6468a27378c717/trame_vtk-2.11.8-py3-none-any.whl", hash = "sha256:31c8220f59dcc3b5f2fcfe6de8b9796e8bdb7db5dcf790ee01df83d44e79a413", size = 831787, upload-time = "2026-04-24T00:28:15.317Z" }, +] + +[[package]] +name = "trame-vuetify" +version = "2.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "trame-client" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/9b/2eba8ec5eeba08d15c4e3758c028d1504d0b73c409e33171185e1bb03839/trame_vuetify-2.9.0.tar.gz", hash = "sha256:86cfa1387b97e9f18d15ce98ee238b6e6c0e0f921935aab6737ae0bed74ee70a", size = 4910348, upload-time = "2025-03-28T22:40:15.543Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/02/6a8f4a46ca0470c4d9c3ba9f3d97d4b3b19b6889eee751bdd9cde78b9792/trame_vuetify-2.9.0-py3-none-any.whl", hash = "sha256:3db6a6b3384c313befb9d8f0eaf39a9e6cd1d2b882babd1c85d8779ff0f4f2bc", size = 4938098, upload-time = "2025-03-28T22:40:13.329Z" }, +] + [[package]] name = "typer" version = "0.25.0" @@ -3372,6 +3502,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4b/eb/03bfb1299d4c4510329e470f13f9a4ce793df7fcb5a2fd3510f911066f61/virtualenv-21.3.0-py3-none-any.whl", hash = "sha256:4d28ee41f6d9ec8f1f00cd472b9ffbcedda1b3d3b9a575b5c94a2d004fd51bd7", size = 7594690, upload-time = "2026-04-27T17:05:55.468Z" }, ] +[[package]] +name = "vtk" +version = "9.6.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "matplotlib" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/70/8a68245293652aeba3448230ef30b90ab7aaa199fc158e7af8c4de66edf3/vtk-9.6.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:a2945c0320b5df8f697d49f7d759b2c230ac293188158574526c20bbcaf10241", size = 114551474, upload-time = "2026-03-26T23:34:29.585Z" }, + { url = "https://files.pythonhosted.org/packages/b4/4d/cdc2b1eb0ea3e322dc707a08e3d145ed556d897eb10385a923cbc932edc0/vtk-9.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b49b3c36e599f652077e60ead865957a65b557a1b53bcd60b26bdaabb81d170d", size = 106761418, upload-time = "2026-03-26T23:34:34.064Z" }, + { url = "https://files.pythonhosted.org/packages/72/92/5c9b9cdfe2738cc7b0dd51adacae67456ef53fcedae16b21a2cf9fbbd767/vtk-9.6.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3b3537cae99226f3082d3aeef2350b7329ee3cef7e7bd88d4ecacfcbfdadfaeb", size = 145873720, upload-time = "2026-03-26T23:34:39.925Z" }, + { url = "https://files.pythonhosted.org/packages/82/04/029bbc011f2346719e770e0ac961ff419948817a16fcda1249fe17a13525/vtk-9.6.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:30a21c6810d2465dc34dd5987f9fb566dcb8d4e65e06367d10a018c24eea6747", size = 135625426, upload-time = "2026-03-27T13:48:20.901Z" }, + { url = "https://files.pythonhosted.org/packages/ca/4f/bb831b2c46d63db2e6bfa11dcd8b405d526ed376390af66a27f6949749cf/vtk-9.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:dde3627b9d33b75efebe2465183cbc682a9f9a7c1529cf027a8871e60e11b3b2", size = 81247644, upload-time = "2026-03-27T13:49:52.1Z" }, + { url = "https://files.pythonhosted.org/packages/95/89/c274101ec7b9bf7356333fdacf5e634803fe6b40f776e82c6ce9d941e0ad/vtk-9.6.1-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:b8125e3e3bc3160e18853a15be98101d0efe662c16036179ab15ddf1669b32af", size = 114729308, upload-time = "2026-03-27T13:50:37.547Z" }, + { url = "https://files.pythonhosted.org/packages/9d/1a/ecbebaf31724a00f85fc4dbf95992b507328f615362ee8fa5ea1a38cf9d6/vtk-9.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:956d05b8c53c6a9eba569de244e9c8229815bbb3e024bb9954fafe163407e66d", size = 106814956, upload-time = "2026-03-27T13:51:24.324Z" }, + { url = "https://files.pythonhosted.org/packages/46/66/ba3c8b277cfa8058e982bfbd47875d9c6b4c06e65f98d577c69a2628f8d4/vtk-9.6.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9728e8d41889a0f105b5d20a73a4da80f398b2cfe6057fa7a94cd61128c3ceb4", size = 145920093, upload-time = "2026-03-27T13:53:12.49Z" }, + { url = "https://files.pythonhosted.org/packages/f5/cb/0bbf91cd45a8d8f5453fe01cddf44c913db6316b3a2b15f41893ae0ca9ad/vtk-9.6.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3b5ec2e56bd6165189aa2e6e896edda29460e63040f897e1a123a1592810266d", size = 135683842, upload-time = "2026-03-27T13:52:15.218Z" }, + { url = "https://files.pythonhosted.org/packages/08/c0/653c94939498a3976157f054b830ade5c1da48ae288a23547f55fc25a262/vtk-9.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:4022fda8af46636f74c3c1932c2365da13a1dc8779a6b1ea4b13dc5bbcdb729f", size = 81262921, upload-time = "2026-03-27T13:53:50.192Z" }, + { url = "https://files.pythonhosted.org/packages/a8/8d/16e597f86241772fe188bbdd86a74ce48eadd2dd9513e2410b4ea07f78aa/vtk-9.6.1-cp313-cp313-macosx_10_10_x86_64.whl", hash = "sha256:88983bce26f7665ac6e4fb7de16cf53b896140a1a6cadd942d3c13e7c74a8530", size = 114747320, upload-time = "2026-03-27T13:54:33.138Z" }, + { url = "https://files.pythonhosted.org/packages/63/ca/8f0c19bded437423479d0d3ff0b7457cf6ef68def322666df867e6dacc0f/vtk-9.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:94ed369a54c6cfacea0b34f42d7d3ef41fa06c1aabfc75d93cabdc9047454293", size = 106817051, upload-time = "2026-03-27T13:55:21.903Z" }, + { url = "https://files.pythonhosted.org/packages/82/22/c1d98e6e191481af1e5c82ae3fa750798d868aa442a76db027f6a7901b95/vtk-9.6.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:deeb86794cd42f922ea75711b9717e45841777624203727eb84595b709af1382", size = 145920554, upload-time = "2026-03-27T13:57:14.258Z" }, + { url = "https://files.pythonhosted.org/packages/16/5d/658f60209de7b41b634178aee1f458bcad149aa2654d16bd023c09afd29c/vtk-9.6.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:fef8abc33168ad38b2622cf29048b7d5fe48a45789bf0a0421781f5cafa1e554", size = 135686060, upload-time = "2026-03-27T13:56:23.89Z" }, + { url = "https://files.pythonhosted.org/packages/f0/31/e4eb318901a8e736c936491e759ce03a1656792f728ae912db0e20997e9a/vtk-9.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:a5db7b2ff8fc3f56b547c8b9b7bc117a869c902683c86ef5cd6197c087f66183", size = 81264861, upload-time = "2026-03-27T13:57:47.164Z" }, +] + [[package]] name = "wcwidth" version = "0.6.0" @@ -3381,6 +3536,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/68/5a/199c59e0a824a3db2b89c5d2dade7ab5f9624dbf6448dc291b46d5ec94d3/wcwidth-0.6.0-py3-none-any.whl", hash = "sha256:1a3a1e510b553315f8e146c54764f4fb6264ffad731b3d78088cdb1478ffbdad", size = 94189, upload-time = "2026-02-06T19:19:39.646Z" }, ] +[[package]] +name = "wslink" +version = "2.5.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "msgpack" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/67/06/8340b98693fe886af59a86b69ca0eb9f8095d6dbdd7a28496d9f3a8fb33f/wslink-2.5.6.tar.gz", hash = "sha256:12f3a6135cb3a74c4f1af758942c6a4b34a51fcb700839abfb91b13064a4244c", size = 29784, upload-time = "2026-03-12T00:35:26.018Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/26/d23eb1cc5c8f084d861bbb7035fa911ecb86be51810428dd6284398d021a/wslink-2.5.6-py3-none-any.whl", hash = "sha256:89f23bad3b3522dcb78be84907487f6cf742c6b4526a666fd3e4013f5f705015", size = 37165, upload-time = "2026-03-12T00:35:24.655Z" }, +] + [[package]] name = "xxhash" version = "3.7.0" From 2d557939127a73f4653c226c39b25ba99dcbd815 Mon Sep 17 00:00:00 2001 From: Fabien Casenave Date: Wed, 29 Apr 2026 19:58:02 +0200 Subject: [PATCH 03/17] dependencies --- uv.lock | 33 +++++++++++++++++++++++++++++---- 1 file changed, 29 insertions(+), 4 deletions(-) diff --git a/uv.lock b/uv.lock index 8bdccefc..3d2e2a04 100644 --- a/uv.lock +++ b/uv.lock @@ -1067,7 +1067,7 @@ wheels = [ [[package]] name = "huggingface-hub" -version = "1.10.2" +version = "1.12.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, @@ -1080,9 +1080,9 @@ dependencies = [ { name = "typer" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0c/4d/00734890c7fcfe2c7ff04f1c1a167186c42b19e370a2dd8cfd8c34fc92c4/huggingface_hub-1.10.2.tar.gz", hash = "sha256:4b276f820483b709dc86a53bcb8183ea496b8d8447c9f7f88a115a12b498a95f", size = 758428, upload-time = "2026-04-14T10:42:28.498Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3e/9f/3fda8b014db3ae239addc9b48b35c2cf7d318950b430712f34a2473ef81d/huggingface_hub-1.12.2.tar.gz", hash = "sha256:282c4999e641c89affdc4c02c265eddea944c1390dc19e89dac8ad3ae76dbdaf", size = 763393, upload-time = "2026-04-29T09:45:09.202Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/5e/c9/4c1e1216b24bcab140c83acdf8bc89a846ea17cd8a06cd18e3fd308a297f/huggingface_hub-1.10.2-py3-none-any.whl", hash = "sha256:c26c908767cc711493978dc0b4f5747ba7841602997cc98bfd628450a28cf9bc", size = 642581, upload-time = "2026-04-14T10:42:26.563Z" }, + { url = "https://files.pythonhosted.org/packages/71/c1/1fa4162f6dd53259daf2ad31385273341821fa0acce164cd03971937a60e/huggingface_hub-1.12.2-py3-none-any.whl", hash = "sha256:7968e897fdbc6343c871c240d87d4434efe0ad9f80d57daa1cc5678c6d148529", size = 647757, upload-time = "2026-04-29T09:45:07.63Z" }, ] [[package]] @@ -3499,7 +3499,32 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/3f/8b/6331f7a7fe70131c301106ec1e7cf23e2501bf7d4ca3636805801ca191bb/virtualenv-21.3.0.tar.gz", hash = "sha256:733750db978ec95c2d8eb4feadaa57091002bce404cb39ba69899cf7bd28944e", size = 7614069, upload-time = "2026-04-27T17:05:58.927Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/95/19/bc7c4e05f42532863cf2ae7e7e847beab25835934e0410160b47eeff1e35/virtualenv-21.2.3-py3-none-any.whl", hash = "sha256:486652347ea8526d91e9807c0274583cb7ba31dd4942ff10fb5621402f0fe0d8", size = 5828329, upload-time = "2026-04-14T01:10:34.809Z" }, + { url = "https://files.pythonhosted.org/packages/4b/eb/03bfb1299d4c4510329e470f13f9a4ce793df7fcb5a2fd3510f911066f61/virtualenv-21.3.0-py3-none-any.whl", hash = "sha256:4d28ee41f6d9ec8f1f00cd472b9ffbcedda1b3d3b9a575b5c94a2d004fd51bd7", size = 7594690, upload-time = "2026-04-27T17:05:55.468Z" }, +] + +[[package]] +name = "vtk" +version = "9.6.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "matplotlib" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/70/8a68245293652aeba3448230ef30b90ab7aaa199fc158e7af8c4de66edf3/vtk-9.6.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:a2945c0320b5df8f697d49f7d759b2c230ac293188158574526c20bbcaf10241", size = 114551474, upload-time = "2026-03-26T23:34:29.585Z" }, + { url = "https://files.pythonhosted.org/packages/b4/4d/cdc2b1eb0ea3e322dc707a08e3d145ed556d897eb10385a923cbc932edc0/vtk-9.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b49b3c36e599f652077e60ead865957a65b557a1b53bcd60b26bdaabb81d170d", size = 106761418, upload-time = "2026-03-26T23:34:34.064Z" }, + { url = "https://files.pythonhosted.org/packages/72/92/5c9b9cdfe2738cc7b0dd51adacae67456ef53fcedae16b21a2cf9fbbd767/vtk-9.6.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3b3537cae99226f3082d3aeef2350b7329ee3cef7e7bd88d4ecacfcbfdadfaeb", size = 145873720, upload-time = "2026-03-26T23:34:39.925Z" }, + { url = "https://files.pythonhosted.org/packages/82/04/029bbc011f2346719e770e0ac961ff419948817a16fcda1249fe17a13525/vtk-9.6.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:30a21c6810d2465dc34dd5987f9fb566dcb8d4e65e06367d10a018c24eea6747", size = 135625426, upload-time = "2026-03-27T13:48:20.901Z" }, + { url = "https://files.pythonhosted.org/packages/ca/4f/bb831b2c46d63db2e6bfa11dcd8b405d526ed376390af66a27f6949749cf/vtk-9.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:dde3627b9d33b75efebe2465183cbc682a9f9a7c1529cf027a8871e60e11b3b2", size = 81247644, upload-time = "2026-03-27T13:49:52.1Z" }, + { url = "https://files.pythonhosted.org/packages/95/89/c274101ec7b9bf7356333fdacf5e634803fe6b40f776e82c6ce9d941e0ad/vtk-9.6.1-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:b8125e3e3bc3160e18853a15be98101d0efe662c16036179ab15ddf1669b32af", size = 114729308, upload-time = "2026-03-27T13:50:37.547Z" }, + { url = "https://files.pythonhosted.org/packages/9d/1a/ecbebaf31724a00f85fc4dbf95992b507328f615362ee8fa5ea1a38cf9d6/vtk-9.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:956d05b8c53c6a9eba569de244e9c8229815bbb3e024bb9954fafe163407e66d", size = 106814956, upload-time = "2026-03-27T13:51:24.324Z" }, + { url = "https://files.pythonhosted.org/packages/46/66/ba3c8b277cfa8058e982bfbd47875d9c6b4c06e65f98d577c69a2628f8d4/vtk-9.6.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9728e8d41889a0f105b5d20a73a4da80f398b2cfe6057fa7a94cd61128c3ceb4", size = 145920093, upload-time = "2026-03-27T13:53:12.49Z" }, + { url = "https://files.pythonhosted.org/packages/f5/cb/0bbf91cd45a8d8f5453fe01cddf44c913db6316b3a2b15f41893ae0ca9ad/vtk-9.6.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:3b5ec2e56bd6165189aa2e6e896edda29460e63040f897e1a123a1592810266d", size = 135683842, upload-time = "2026-03-27T13:52:15.218Z" }, + { url = "https://files.pythonhosted.org/packages/08/c0/653c94939498a3976157f054b830ade5c1da48ae288a23547f55fc25a262/vtk-9.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:4022fda8af46636f74c3c1932c2365da13a1dc8779a6b1ea4b13dc5bbcdb729f", size = 81262921, upload-time = "2026-03-27T13:53:50.192Z" }, + { url = "https://files.pythonhosted.org/packages/a8/8d/16e597f86241772fe188bbdd86a74ce48eadd2dd9513e2410b4ea07f78aa/vtk-9.6.1-cp313-cp313-macosx_10_10_x86_64.whl", hash = "sha256:88983bce26f7665ac6e4fb7de16cf53b896140a1a6cadd942d3c13e7c74a8530", size = 114747320, upload-time = "2026-03-27T13:54:33.138Z" }, + { url = "https://files.pythonhosted.org/packages/63/ca/8f0c19bded437423479d0d3ff0b7457cf6ef68def322666df867e6dacc0f/vtk-9.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:94ed369a54c6cfacea0b34f42d7d3ef41fa06c1aabfc75d93cabdc9047454293", size = 106817051, upload-time = "2026-03-27T13:55:21.903Z" }, + { url = "https://files.pythonhosted.org/packages/82/22/c1d98e6e191481af1e5c82ae3fa750798d868aa442a76db027f6a7901b95/vtk-9.6.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:deeb86794cd42f922ea75711b9717e45841777624203727eb84595b709af1382", size = 145920554, upload-time = "2026-03-27T13:57:14.258Z" }, + { url = "https://files.pythonhosted.org/packages/16/5d/658f60209de7b41b634178aee1f458bcad149aa2654d16bd023c09afd29c/vtk-9.6.1-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:fef8abc33168ad38b2622cf29048b7d5fe48a45789bf0a0421781f5cafa1e554", size = 135686060, upload-time = "2026-03-27T13:56:23.89Z" }, + { url = "https://files.pythonhosted.org/packages/f0/31/e4eb318901a8e736c936491e759ce03a1656792f728ae912db0e20997e9a/vtk-9.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:a5db7b2ff8fc3f56b547c8b9b7bc117a869c902683c86ef5cd6197c087f66183", size = 81264861, upload-time = "2026-03-27T13:57:47.164Z" }, ] [[package]] From ff9cf1ab2814682e0b23c0cda8317ddaa378eef4 Mon Sep 17 00:00:00 2001 From: Fabien Casenave Date: Wed, 29 Apr 2026 20:00:20 +0200 Subject: [PATCH 04/17] update CHANGELOG --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0d1282f3..415e7a56 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added +- (dataset-viewer) add a trame app for dataset visual exploration. - (sample/features) add_field: check field size consistency with geometrical support. - (sample) add `set_trees` to `Sample` delegated methods: `sample.set_trees(...)` now works as a direct proxy to `SampleFeatures.set_trees`, consistent with other delegated tree methods. From e2fbc1311da231035d83da51971233b78edfe39c Mon Sep 17 00:00:00 2001 From: Fabien Casenave Date: Wed, 29 Apr 2026 20:18:32 +0200 Subject: [PATCH 05/17] remove comments --- tests/viewer/test_trame_server.py | 67 ------------------------------- 1 file changed, 67 deletions(-) diff --git a/tests/viewer/test_trame_server.py b/tests/viewer/test_trame_server.py index 3fdc19d5..dc92fee2 100644 --- a/tests/viewer/test_trame_server.py +++ b/tests/viewer/test_trame_server.py @@ -18,73 +18,6 @@ def empty_datasets_root(tmp_path: Path) -> Path: return root -# TODO: Re-enable after fixing VTK segfault in CI environment -# def test_build_server_returns_trame_server(empty_datasets_root: Path) -> None: -# """``build_server`` should return a configured trame server for empty roots.""" -# pytest.importorskip("vtk") -# pytest.importorskip("trame") -# from plaid.viewer.trame_app.server import build_server # noqa: PLC0415 - -# config = ViewerConfig(datasets_root=empty_datasets_root) -# dataset_service = PlaidDatasetService(config) - -# with CacheRoot(install_signal_handlers=False, run_orphan_sweep=False) as cache: -# artifact_service = ParaviewArtifactService(dataset_service, cache.path) -# server = build_server(dataset_service, artifact_service) - -# # The server should expose state and controller attributes. -# assert hasattr(server, "state") -# assert hasattr(server, "controller") -# assert server.state.dataset_ids == [] -# assert server.state.status.startswith("Select a dataset") - -# TODO: Re-enable after fixing VTK segfault in CI environment -# def test_browse_cd_updates_browse_state(tmp_path: Path) -> None: -# """``ctrl.browse_cd`` must load the given directory into the browser state. - -# Regression guard for a bug where the file-browser list items dispatched -# through the client-side ``trigger(...)`` helper (which only resolves -# names registered as server triggers), while ``browse_cd`` was only -# registered as a controller method via ``@ctrl.set``. Clicking a folder -# in the browser dialog was therefore a no-op. -# """ -# pytest.importorskip("vtk") -# pytest.importorskip("trame") -# from plaid.viewer.trame_app.server import build_server # noqa: PLC0415 - -# datasets_root = tmp_path / "datasets" -# datasets_root.mkdir() -# child = datasets_root / "child" -# child.mkdir() - -# config = ViewerConfig( -# datasets_root=datasets_root, -# browse_roots=(tmp_path,), -# ) -# dataset_service = PlaidDatasetService(config) - -# with CacheRoot(install_signal_handlers=False, run_orphan_sweep=False) as cache: -# artifact_service = ParaviewArtifactService(dataset_service, cache.path) -# server = build_server(dataset_service, artifact_service) - -# assert hasattr(server.controller, "browse_cd") -# server.controller.browse_cd(str(child)) - -# assert Path(server.state.browse_cwd) == child.resolve() -# assert server.state.browse_parent == str(datasets_root.resolve()) - - -class _FakeSelection: - def __init__(self) -> None: - self.enabled: list[str] = [] - - def DisableAllArrays(self) -> None: # noqa: N802 - VTK API - self.enabled = [] - - def EnableArray(self, name: str) -> None: # noqa: N802 - VTK API - self.enabled.append(name) - - class _FakeCGNSReader: def __init__(self) -> None: self.file_name: str | None = None From bb7257d99111e96b7de9cd006a64e7babc13bf4d Mon Sep 17 00:00:00 2001 From: Fabien Casenave Date: Wed, 29 Apr 2026 21:02:19 +0200 Subject: [PATCH 06/17] coverage --- src/plaid/viewer/cache.py | 10 +- .../services/paraview_artifact_service.py | 2 +- .../viewer/services/plaid_dataset_service.py | 15 +- src/plaid/viewer/trame_app/server.py | 12 +- tests/viewer/test_cache.py | 145 ++++- tests/viewer/test_cli.py | 148 ++++++ .../viewer/test_paraview_artifact_service.py | 61 +++ tests/viewer/test_plaid_dataset_service.py | 500 +++++++++++++++++- tests/viewer/test_preferences.py | 69 +++ tests/viewer/test_trame_helpers.py | 319 +++++++++++ 10 files changed, 1248 insertions(+), 33 deletions(-) create mode 100644 tests/viewer/test_cli.py create mode 100644 tests/viewer/test_preferences.py create mode 100644 tests/viewer/test_trame_helpers.py diff --git a/src/plaid/viewer/cache.py b/src/plaid/viewer/cache.py index ef5d1a17..6fcd2bf3 100644 --- a/src/plaid/viewer/cache.py +++ b/src/plaid/viewer/cache.py @@ -41,7 +41,7 @@ def _process_is_alive(pid: int) -> bool: except PermissionError: # The process exists but is owned by someone else. return True - except OSError as exc: # pragma: no cover - defensive + except OSError as exc: return exc.errno != errno.ESRCH return True @@ -73,7 +73,7 @@ def sweep_orphans(temp_root: Path | None = None) -> list[Path]: shutil.rmtree(entry, ignore_errors=True) removed.append(entry) logger.info("Removed orphan viewer cache: %s", entry) - except OSError as exc: # pragma: no cover - defensive + except OSError as exc: logger.warning("Could not remove orphan viewer cache %s: %s", entry, exc) return removed @@ -144,14 +144,14 @@ def __exit__(self, exc_type, exc, tb) -> None: # noqa: D105 def _safe_cleanup(self) -> None: try: shutil.rmtree(self._path, ignore_errors=True) - except Exception as exc: # pragma: no cover - defensive + except Exception as exc: logger.warning("Failed to clean viewer cache %s: %s", self._path, exc) def _install_signal_handlers(self) -> None: for sig in (signal.SIGINT, signal.SIGTERM): try: previous = signal.getsignal(sig) - except (ValueError, OSError): # pragma: no cover - non-main thread + except (ValueError, OSError): continue def handler(signum, frame, _prev=previous): @@ -164,5 +164,5 @@ def handler(signum, frame, _prev=previous): try: signal.signal(sig, handler) - except (ValueError, OSError): # pragma: no cover - non-main thread + except (ValueError, OSError): pass diff --git a/src/plaid/viewer/services/paraview_artifact_service.py b/src/plaid/viewer/services/paraview_artifact_service.py index 942d4aba..3585bb0c 100644 --- a/src/plaid/viewer/services/paraview_artifact_service.py +++ b/src/plaid/viewer/services/paraview_artifact_service.py @@ -47,7 +47,7 @@ def _plaid_version() -> str: from importlib.metadata import PackageNotFoundError, version return version("pyplaid") - except PackageNotFoundError: # pragma: no cover - defensive + except PackageNotFoundError: return "unknown" diff --git a/src/plaid/viewer/services/plaid_dataset_service.py b/src/plaid/viewer/services/plaid_dataset_service.py index db3e97f5..2d13f3e1 100644 --- a/src/plaid/viewer/services/plaid_dataset_service.py +++ b/src/plaid/viewer/services/plaid_dataset_service.py @@ -295,9 +295,12 @@ def list_subdirs(self, path: Path | str | None = None) -> dict[str, object]: target != root and root in target.parents for root in self._browse_roots ): parent = str(target.parent) - elif target.parent != target and any( - target.parent == root or root in target.parent.parents - for root in self._browse_roots + elif ( + target.parent != target + and any( # pragma: no cover - alternate browse-root ancestry guard + target.parent == root or root in target.parent.parents + for root in self._browse_roots + ) ): parent = str(target.parent) return { @@ -966,7 +969,9 @@ def describe_non_visual_bases( return {} tree = sample.features.data[times[0]] summary: dict[str, list[dict[str, object]]] = {} - for base_node in CU.hasChildType(tree, CK.CGNSBase_ts) or []: + for base_node in ( + CU.hasChildType(tree, CK.CGNSBase_ts) or [] + ): # pragma: no cover - CGNS tree introspection if CU.hasChildType(base_node, CK.Zone_ts): continue summary[base_node[0]] = _collect_data_arrays(base_node) @@ -1110,7 +1115,7 @@ def _load_infos(base: Path) -> dict | None: return None try: return yaml.safe_load(text) - except yaml.YAMLError: # type: ignore[attr-defined] + except yaml.YAMLError: return None return None diff --git a/src/plaid/viewer/trame_app/server.py b/src/plaid/viewer/trame_app/server.py index d2aca65b..861d402d 100644 --- a/src/plaid/viewer/trame_app/server.py +++ b/src/plaid/viewer/trame_app/server.py @@ -48,7 +48,7 @@ _C_STDERR_REROUTED = False -def _reroute_c_stderr() -> None: +def _reroute_c_stderr() -> None: # pragma: no cover - process fd manipulation """Permanently redirect the process's stderr file descriptor to /dev/null. VTK's CGNS reader and the underlying HDF5 library emit informational @@ -107,7 +107,7 @@ def _install_vtk_log_router() -> None: return try: import vtk # noqa: PLC0415 - except ImportError: # pragma: no cover - VTK is required in practice + except ImportError: return # ``vtkPythonStdStreamCaptureHelper`` is not available in every VTK wheel, @@ -140,7 +140,7 @@ def DisplayDebugText(self, text: str) -> None: # noqa: N802 - VTK API if hasattr(vtk, "vtkLogger"): try: vtk.vtkLogger.SetStderrVerbosity(vtk.vtkLogger.VERBOSITY_OFF) - except AttributeError: # pragma: no cover - very old VTK + except AttributeError: pass _VTK_LOG_ROUTER_INSTALLED = True @@ -363,7 +363,7 @@ def _visit(obj): # --------------------------------------------------------------------------- -class _VtkPipeline: +class _VtkPipeline: # pragma: no cover - requires real VTK rendering/display stack """Minimal reader -> (cut) -> (threshold) -> geometry -> actor pipeline.""" def __init__(self) -> None: @@ -505,7 +505,7 @@ def _build_lut(cmap: str, lo: float, hi: float): # --------------------------------------------------------------------------- -def build_server( +def build_server( # pragma: no cover - trame/VTK UI startup is not CI-headless safe dataset_service: PlaidDatasetService, artifact_service: ParaviewArtifactService, ): @@ -1313,7 +1313,7 @@ async def _play_loop() -> None: _apply_time_step_impl() fps = max(1, int(state.play_fps or 1)) await asyncio.sleep(1.0 / fps) - except asyncio.CancelledError: # pragma: no cover - cooperative cancel + except asyncio.CancelledError: pass @state.change("playing") diff --git a/tests/viewer/test_cache.py b/tests/viewer/test_cache.py index 6187a565..35cc113e 100644 --- a/tests/viewer/test_cache.py +++ b/tests/viewer/test_cache.py @@ -4,7 +4,10 @@ from pathlib import Path -from plaid.viewer.cache import CacheRoot, sweep_orphans +import pytest + +from plaid.viewer import cache as cache_mod +from plaid.viewer.cache import CacheRoot, _process_is_alive, sweep_orphans def test_ephemeral_cache_is_cleaned_up_on_close(tmp_path: Path, monkeypatch) -> None: @@ -50,3 +53,143 @@ def test_sweep_orphans_keeps_live_pid_dir(tmp_path: Path) -> None: removed = sweep_orphans(tmp_path) assert live not in removed assert live.exists() + + +def test_process_is_alive_branches(monkeypatch: pytest.MonkeyPatch) -> None: + assert _process_is_alive(0) is False + + def missing(_pid: int, _sig: int) -> None: + raise ProcessLookupError + + monkeypatch.setattr(cache_mod.os, "kill", missing) + assert _process_is_alive(123) is False + + def denied(_pid: int, _sig: int) -> None: + raise PermissionError + + monkeypatch.setattr(cache_mod.os, "kill", denied) + assert _process_is_alive(123) is True + + def other_os_error(_pid: int, _sig: int) -> None: + raise OSError(5, "other") + + monkeypatch.setattr(cache_mod.os, "kill", other_os_error) + assert _process_is_alive(123) is True + + def no_such_process(_pid: int, _sig: int) -> None: + raise OSError(cache_mod.errno.ESRCH, "missing") + + monkeypatch.setattr(cache_mod.os, "kill", no_such_process) + assert _process_is_alive(123) is False + + +def test_sweep_orphans_ignores_non_dirs_and_non_matching_names(tmp_path: Path) -> None: + (tmp_path / "plain-file").write_text("x") + keep = tmp_path / "not-plaid-viewer" + keep.mkdir() + assert sweep_orphans(tmp_path / "missing") == [] + assert sweep_orphans(tmp_path) == [] + assert keep.exists() + + +def test_cache_runs_orphan_sweep_and_close_is_idempotent( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + monkeypatch.setattr(cache_mod.tempfile, "gettempdir", lambda: str(tmp_path)) + victim = tmp_path / "plaid-viewer-999999-deadbeef" + victim.mkdir() + cache = CacheRoot(install_signal_handlers=False, run_orphan_sweep=True) + assert not victim.exists() + path = cache.path + cache.close() + cache.close() + assert not path.exists() + + +def test_cache_signal_handler_cleans_then_delegates( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + monkeypatch.setattr(cache_mod.tempfile, "gettempdir", lambda: str(tmp_path)) + calls: list[tuple[str, object]] = [] + handlers: dict[int, object] = {} + + def previous(signum, _frame): + calls.append(("previous", signum)) + + def fake_getsignal(_sig): + return previous + + def fake_signal(sig, handler): + handlers[sig] = handler + calls.append(("signal", sig)) + + def fake_kill(_pid, sig): + calls.append(("kill", sig)) + + monkeypatch.setattr(cache_mod.signal, "getsignal", fake_getsignal) + monkeypatch.setattr(cache_mod.signal, "signal", fake_signal) + monkeypatch.setattr(cache_mod.os, "kill", fake_kill) + + cache = CacheRoot(install_signal_handlers=True, run_orphan_sweep=False) + path = cache.path + handler = handlers[cache_mod.signal.SIGINT] + handler(cache_mod.signal.SIGINT, None) + + assert not path.exists() + assert ("previous", cache_mod.signal.SIGINT) in calls + assert ("kill", cache_mod.signal.SIGINT) in calls + + +def test_sweep_orphans_logs_rmtree_errors( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch, caplog: pytest.LogCaptureFixture +) -> None: + victim = tmp_path / "plaid-viewer-999999-deadbeefcafe" + victim.mkdir() + + def broken_rmtree(_path: Path, ignore_errors: bool = False) -> None: # noqa: ARG001, FBT001, FBT002 + raise OSError("boom") + + monkeypatch.setattr(cache_mod.shutil, "rmtree", broken_rmtree) + removed = sweep_orphans(tmp_path) + assert removed == [] + assert "Could not remove orphan viewer cache" in caplog.text + + +def test_cache_safe_cleanup_logs_errors( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch, caplog: pytest.LogCaptureFixture +) -> None: + monkeypatch.setattr(cache_mod.tempfile, "gettempdir", lambda: str(tmp_path)) + cache = CacheRoot(install_signal_handlers=False, run_orphan_sweep=False) + + def broken_rmtree(_path: Path, ignore_errors: bool = False) -> None: # noqa: ARG001, FBT001, FBT002 + raise RuntimeError("boom") + + monkeypatch.setattr(cache_mod.shutil, "rmtree", broken_rmtree) + cache.close() + assert "Failed to clean viewer cache" in caplog.text + + +def test_cache_signal_handler_install_ignores_signal_errors( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + monkeypatch.setattr(cache_mod.tempfile, "gettempdir", lambda: str(tmp_path)) + + calls = {"getsignal": 0, "signal": 0} + + def flaky_getsignal(_sig): + calls["getsignal"] += 1 + if calls["getsignal"] == 1: + raise ValueError("not main thread") + return cache_mod.signal.SIG_IGN + + def broken_signal(_sig, _handler): + calls["signal"] += 1 + raise OSError("not main thread") + + monkeypatch.setattr(cache_mod.signal, "getsignal", flaky_getsignal) + monkeypatch.setattr(cache_mod.signal, "signal", broken_signal) + cache = CacheRoot(install_signal_handlers=True, run_orphan_sweep=False) + try: + assert calls == {"getsignal": 2, "signal": 1} + finally: + cache.close() diff --git a/tests/viewer/test_cli.py b/tests/viewer/test_cli.py new file mode 100644 index 00000000..c64d2fc5 --- /dev/null +++ b/tests/viewer/test_cli.py @@ -0,0 +1,148 @@ +"""Tests for the viewer CLI parser that do not start the VTK/trame runtime.""" + +from __future__ import annotations + +from pathlib import Path + +import pytest + +from plaid.viewer import cli as cli_mod +from plaid.viewer.cli import _build_parser + + +def test_build_parser_defaults() -> None: + args = _build_parser().parse_args([]) + + assert args.datasets_root is None + assert args.browse_roots is None + assert args.disable_root_change is False + assert args.cache_dir is None + assert args.host == "127.0.0.1" + assert args.port == 8080 + assert args.backend_id == "disk" + assert args.hub_repo is None + + +def test_build_parser_accepts_all_options(tmp_path: Path) -> None: + datasets_root = tmp_path / "datasets" + cache_dir = tmp_path / "cache" + browse_a = tmp_path / "a" + browse_b = tmp_path / "b" + + args = _build_parser().parse_args( + [ + "--datasets-root", + str(datasets_root), + "--browse-roots", + str(browse_a), + str(browse_b), + "--disable-root-change", + "--cache-dir", + str(cache_dir), + "--host", + "0.0.0.0", + "--port", + "9000", + "--backend-id", + "zarr", + "--hub-repo", + "org/one", + "--hub-repo", + "org/two", + ] + ) + + assert args.datasets_root == datasets_root + assert args.browse_roots == [browse_a, browse_b] + assert args.disable_root_change is True + assert args.cache_dir == cache_dir + assert args.host == "0.0.0.0" + assert args.port == 9000 + assert args.backend_id == "zarr" + assert args.hub_repo == ["org/one", "org/two"] + + +def test_main_wires_services_without_starting_real_runtime( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + calls: list[tuple[str, object]] = [] + + class FakeCache: + def __init__(self, persistent_dir=None): + calls.append(("cache", persistent_dir)) + self.path = tmp_path / "cache-root" + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc, tb): + calls.append(("cache-exit", exc_type)) + + class FakeDatasetService: + def __init__(self, config): + self.config = config + calls.append(("dataset-root", config.datasets_root)) + calls.append(("allow-root-change", config.allow_root_change)) + + def add_hub_dataset(self, repo_id: str) -> str: + calls.append(("hub", repo_id)) + if repo_id == "bad/repo": + raise ValueError("bad") + return repo_id + + class FakeArtifactService: + def __init__(self, _dataset_service, cache_path): + calls.append(("artifact-cache", cache_path)) + + class FakeServer: + def start(self, *, host: str, port: int, open_browser: bool) -> None: + calls.append(("start", (host, port, open_browser))) + + def fake_import(name, globals=None, locals=None, fromlist=(), level=0): # noqa: A002, ANN001, ANN002 + if name == "plaid.viewer.trame_app.server" and "_reroute_c_stderr" in fromlist: + return type( + "ServerModule", + (), + {"_reroute_c_stderr": lambda: calls.append(("stderr", None))}, + ) + if name == "plaid.viewer.trame_app.server" and "build_server" in fromlist: + return type( + "ServerModule", (), {"build_server": lambda _ds, _as: FakeServer()} + ) + return real_import(name, globals, locals, fromlist, level) + + real_import = __import__ + monkeypatch.setattr(cli_mod, "CacheRoot", FakeCache) + monkeypatch.setattr(cli_mod, "PlaidDatasetService", FakeDatasetService) + monkeypatch.setattr(cli_mod, "ParaviewArtifactService", FakeArtifactService) + monkeypatch.setattr( + cli_mod, "get_last_datasets_root", lambda: tmp_path / "persisted" + ) + monkeypatch.setattr("builtins.__import__", fake_import) + + assert ( + cli_mod.main( + [ + "--cache-dir", + str(tmp_path / "cache"), + "--host", + "0.0.0.0", + "--port", + "9001", + "--disable-root-change", + "--hub-repo", + "org/repo", + "--hub-repo", + "bad/repo", + ] + ) + == 0 + ) + + assert ("stderr", None) in calls + assert ("dataset-root", tmp_path / "persisted") in calls + assert ("allow-root-change", False) in calls + assert ("hub", "org/repo") in calls + assert ("hub", "bad/repo") in calls + assert ("artifact-cache", tmp_path / "cache-root") in calls + assert ("start", ("0.0.0.0", 9001, False)) in calls diff --git a/tests/viewer/test_paraview_artifact_service.py b/tests/viewer/test_paraview_artifact_service.py index eae73722..7f54da4d 100644 --- a/tests/viewer/test_paraview_artifact_service.py +++ b/tests/viewer/test_paraview_artifact_service.py @@ -8,6 +8,7 @@ from __future__ import annotations import json +import types from pathlib import Path import pytest @@ -16,6 +17,9 @@ from plaid.viewer.services.paraview_artifact_service import ( ParaviewArtifactService, _build_cache_key, + _collect_time_values, + _plaid_version, + ensure_paraview_artifact, ) @@ -104,9 +108,66 @@ def test_cache_key_is_deterministic(ref: SampleRef) -> None: assert key_a == key_b key_c = _build_cache_key(ref, export_version="2") assert key_c != key_a + key_d = _build_cache_key(ref, export_version="1", extra={"preset": "a"}) + assert key_d != key_a def test_get_unknown_artifact_raises(tmp_path: Path) -> None: service = ParaviewArtifactService(_FakeDatasetService(), tmp_path) with pytest.raises(KeyError): service.get("unknown") + + +def test_get_returns_created_artifact(tmp_path: Path, ref: SampleRef) -> None: + service = ParaviewArtifactService(_FakeDatasetService(), tmp_path) + artifact = service.ensure_artifact(ref) + assert service.get(artifact.artifact_id) is artifact + + +def test_collect_time_values_empty() -> None: + assert ( + _collect_time_values( + types.SimpleNamespace(features=types.SimpleNamespace(data={})) + ) + == [] + ) + assert _collect_time_values( + types.SimpleNamespace(features=types.SimpleNamespace(data={2: None, 1: None})) + ) == [1.0, 2.0] + + +def test_ensure_artifact_raises_when_sample_writes_no_cgns( + tmp_path: Path, ref: SampleRef +) -> None: + class EmptySample: + features = types.SimpleNamespace(data={0.0: None}) + + def save_to_dir(self, path: Path, overwrite: bool = False) -> None: # noqa: ARG002 + (Path(path) / "meshes").mkdir(parents=True, exist_ok=True) + + class EmptyService: + def load_sample(self, _ref: SampleRef): + return EmptySample() + + service = ParaviewArtifactService(EmptyService(), tmp_path) # type: ignore[arg-type] + with pytest.raises(RuntimeError, match="produced no CGNS"): + service.ensure_artifact(ref) + + +def test_functional_wrapper_creates_artifact(tmp_path: Path, ref: SampleRef) -> None: + artifact = ensure_paraview_artifact( + ref, + cache_dir=tmp_path, + dataset_service=_FakeDatasetService(), # type: ignore[arg-type] + ) + assert artifact.cgns_path.exists() + + +def test_plaid_version_unknown(monkeypatch: pytest.MonkeyPatch) -> None: + import importlib.metadata + + def raise_not_found(_name: str) -> str: + raise importlib.metadata.PackageNotFoundError + + monkeypatch.setattr(importlib.metadata, "version", raise_not_found) + assert _plaid_version() == "unknown" diff --git a/tests/viewer/test_plaid_dataset_service.py b/tests/viewer/test_plaid_dataset_service.py index f114df4e..63fb36e5 100644 --- a/tests/viewer/test_plaid_dataset_service.py +++ b/tests/viewer/test_plaid_dataset_service.py @@ -15,6 +15,11 @@ from plaid.viewer.config import ViewerConfig from plaid.viewer.models import SampleRef from plaid.viewer.services import PlaidDatasetService +from plaid.viewer.services.plaid_dataset_service import ( + _array_preview, + _cached_service, + _safe_list_dir, +) class _FakeDataset(list): @@ -35,6 +40,24 @@ def _make_dataset_dir(root: Path, name: str) -> Path: return base +def test_small_helpers_cover_edge_cases(tmp_path: Path) -> None: + import numpy as np + + assert _safe_list_dir(tmp_path / "missing") == [] + (tmp_path / "b").mkdir() + (tmp_path / "a").mkdir() + assert [p.name for p in _safe_list_dir(tmp_path)] == ["a", "b"] + assert _array_preview(None) is None + assert _array_preview([]) == "[]" + assert "total 8 values" in (_array_preview(np.arange(8), max_items=3) or "") + + class BadArray: + def __array__(self, *_args): + raise RuntimeError("bad") + + assert _array_preview(BadArray()) is None + + def _install_fake_init_from_disk( monkeypatch: pytest.MonkeyPatch, payload: dict[str, tuple[dict, dict]], @@ -59,6 +82,22 @@ def test_list_datasets_returns_all_subdirectories_with_data(tmp_path: Path) -> N assert ids == {"ds_a", "ds_b"} +def test_service_properties_and_dataset_listing_metadata(tmp_path: Path) -> None: + ds = _make_dataset_dir(tmp_path, "ds") + (tmp_path / "file").write_text("x") + (ds / "infos.json").write_text("{}") + (ds / "problem_definitions").mkdir() + service = PlaidDatasetService( + ViewerConfig(datasets_root=tmp_path, browse_roots=(tmp_path,)) + ) + assert service.datasets_root == tmp_path + assert service.browse_roots == (tmp_path.resolve(),) + info = service.list_datasets()[0] + assert info.has_infos is True + assert info.has_problem_definitions is True + assert service.hub_repos == () + + def test_list_samples_uses_converter_to_plaid_indices( tmp_path: Path, monkeypatch: pytest.MonkeyPatch ) -> None: @@ -167,6 +206,28 @@ def test_describe_non_visual_bases_lists_zoneless_bases_only( assert "1.5" in entry["preview"] +def test_describe_non_visual_bases_returns_empty_for_sample_without_times( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + import types + + _make_dataset_dir(tmp_path, "ds") + sample = types.SimpleNamespace(features=types.SimpleNamespace(data={})) + _install_fake_init_from_disk( + monkeypatch, + { + "ds": ( + {"train": _FakeDataset(range(1))}, + {"train": _FakeConverter({0: sample})}, + ) + }, + ) + + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + ref = SampleRef(backend_id="disk", dataset_id="ds", split="train", sample_id="0") + assert service.describe_non_visual_bases(ref) == {} + + def test_load_sample_rejects_non_integer_sample_id( tmp_path: Path, monkeypatch: pytest.MonkeyPatch ) -> None: @@ -208,6 +269,16 @@ def test_set_datasets_root_updates_config(tmp_path: Path) -> None: assert service.datasets_root == sub.resolve() +def test_set_datasets_root_clear_and_rejects_non_directory(tmp_path: Path) -> None: + service = PlaidDatasetService( + ViewerConfig(datasets_root=tmp_path, browse_roots=(tmp_path,)) + ) + assert service.set_datasets_root(None) is None + assert service.datasets_root is None + with pytest.raises(ValueError): + service.set_datasets_root(tmp_path / "missing") + + def test_list_subdirs_returns_entries(tmp_path: Path) -> None: sandbox = tmp_path / "sandbox" sandbox.mkdir() @@ -225,6 +296,35 @@ def test_list_subdirs_returns_entries(tmp_path: Path) -> None: assert plaid_entry["is_plaid_candidate"] is True +def test_list_subdirs_default_hidden_files_and_parent(tmp_path: Path) -> None: + sandbox = tmp_path / "sandbox" + nested = sandbox / "nested" + nested.mkdir(parents=True) + (nested / ".hidden").mkdir() + (nested / "file.txt").write_text("x") + service = PlaidDatasetService( + ViewerConfig(datasets_root=sandbox, browse_roots=(sandbox,)) + ) + root_listing = service.list_subdirs(None) + assert root_listing["path"] == str(sandbox.resolve()) + nested_listing = service.list_subdirs(nested) + assert nested_listing["parent"] == str(sandbox.resolve()) + assert nested_listing["entries"] == [] + with pytest.raises(ValueError): + service.list_subdirs(nested / "missing") + + +def test_list_subdirs_parent_when_browse_roots_overlap(tmp_path: Path) -> None: + outer = tmp_path / "outer" + inner = outer / "inner" + inner.mkdir(parents=True) + service = PlaidDatasetService( + ViewerConfig(datasets_root=inner, browse_roots=(outer, inner)) + ) + listing = service.list_subdirs(inner) + assert listing["parent"] == str(outer.resolve()) + + def test_list_subdirs_rejects_outside_sandbox(tmp_path: Path) -> None: sandbox = tmp_path / "sandbox" sandbox.mkdir() @@ -376,6 +476,19 @@ def test_streaming_dataset_is_detected_as_streaming( assert detail.splits == {"train": None} +def test_is_streaming_returns_true_when_hub_open_fails( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + service.add_hub_dataset("org/broken") + + def broken(_dataset_id: str): + raise RuntimeError("network") + + monkeypatch.setattr(service, "_open", broken) + assert service.is_streaming("org/broken") is True + + def test_list_samples_emits_single_cursor_ref_for_streaming( tmp_path: Path, monkeypatch: pytest.MonkeyPatch ) -> None: @@ -448,6 +561,34 @@ def test_reset_stream_cursor_rewinds_to_first_record( assert service.load_sample(ref) is mapping[records[0]] +def test_build_cursor_split_alias_and_missing_split( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + repo_id = "org/stream" + records = [object()] + _install_fake_init_streaming_from_hub( + monkeypatch, + { + repo_id: ( + {"only": _FakeIterableDataset(records)}, + {"only": _FakeStreamingConverter({records[0]: object()})}, + ) + }, + ) + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + service.add_hub_dataset(repo_id) + + cursor = service._build_cursor(repo_id, None) # noqa: SLF001 + assert next(cursor.iterator) is records[0] + + service._store_cache[repo_id] = ( # noqa: SLF001 + {"a": _FakeIterableDataset([]), "b": _FakeIterableDataset([])}, + {"a": object(), "b": object()}, + ) + with pytest.raises(KeyError): + service._build_cursor(repo_id, "missing") # noqa: SLF001 + + # --------------------------------------------------------------------------- # Feature filtering # --------------------------------------------------------------------------- @@ -526,7 +667,7 @@ def test_set_features_rejects_unknown_path( _make_dataset_dir(tmp_path, "ds") _install_fake_metadata( monkeypatch, - variable_schema={"Base/Zone/VertexFields/pressure": None}, + variable_schema={"Base_2_2/Zone/VertexFields/pressure": None}, constant_schema={"train": {}}, ) service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) @@ -539,12 +680,12 @@ def test_load_sample_forwards_selected_features_on_disk( ) -> None: """Disk path: ``to_plaid`` receives the filtered feature list.""" _make_dataset_dir(tmp_path, "ds") - variable = {"Base/Zone/VertexFields/pressure": None} + variable = {"Base_2_2/Zone/VertexFields/pressure": None} constant = { "train": { "Base": None, - "Base/Zone": None, - "Base/Zone/VertexFields": None, + "Base_2_2/Zone": None, + "Base_2_2/Zone/VertexFields": None, } } _install_fake_metadata( @@ -561,7 +702,7 @@ def test_load_sample_forwards_selected_features_on_disk( _install_fake_init_from_disk(monkeypatch, {"ds": (dataset_dict, converter_dict)}) service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) - service.set_features("ds", ["Base/Zone/VertexFields/pressure"]) + service.set_features("ds", ["Base_2_2/Zone/VertexFields/pressure"]) ref = SampleRef(backend_id="disk", dataset_id="ds", split="train", sample_id="0") assert service.load_sample(ref) is target # The user-selected field is forwarded, but the split's constant @@ -569,7 +710,7 @@ def test_load_sample_forwards_selected_features_on_disk( # rendered sample keeps its scalars/globals on top of the # user-selected variable fields. assert converter.last_features is not None - assert "Base/Zone/VertexFields/pressure" in converter.last_features + assert "Base_2_2/Zone/VertexFields/pressure" in converter.last_features for path in constant["train"]: assert path in converter.last_features @@ -601,8 +742,8 @@ def test_streaming_open_expands_features_via_cgns_helper( hands the stub's output through. """ repo_id = "org/stream_filter" - variable = {"Base/Zone/VertexFields/pressure": None} - constant = {"train": {"Base": None, "Base/Zone": None}} + variable = {"Base_2_2/Zone/VertexFields/pressure": None} + constant = {"train": {"Base": None, "Base_2_2/Zone": None}} _install_fake_metadata( monkeypatch, variable_schema=variable, constant_schema=constant ) @@ -641,10 +782,10 @@ def _fake_expand(features, _constant, _variable): service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) service.add_hub_dataset(repo_id) - service.set_features(repo_id, ["Base/Zone/VertexFields/pressure"]) + service.set_features(repo_id, ["Base_2_2/Zone/VertexFields/pressure"]) service.list_samples(repo_id) # triggers ``_open`` assert captured["features"] == [ - "Base/Zone/VertexFields/pressure", + "Base_2_2/Zone/VertexFields/pressure", "__expanded__", ] @@ -654,7 +795,7 @@ def test_set_features_invalidates_store_cache( ) -> None: """Changing the feature selection must force a reload of the dataset.""" _make_dataset_dir(tmp_path, "ds") - variable = {"Base/Zone/VertexFields/pressure": None} + variable = {"Base_2_2/Zone/VertexFields/pressure": None} _install_fake_metadata( monkeypatch, variable_schema=variable, @@ -671,10 +812,339 @@ def test_set_features_invalidates_store_cache( service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) service.list_samples("ds") # populates cache assert "ds" in service._store_cache # noqa: SLF001 - service.set_features("ds", ["Base/Zone/VertexFields/pressure"]) + service.set_features("ds", ["Base_2_2/Zone/VertexFields/pressure"]) assert "ds" not in service._store_cache # noqa: SLF001 +def test_get_and_clear_features( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + _make_dataset_dir(tmp_path, "ds") + _install_fake_metadata( + monkeypatch, + variable_schema={"Base_2_2/Zone/VertexFields/pressure": None}, + constant_schema={"train": {}}, + ) + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + assert service.get_features("ds") is None + assert service.set_features("ds", None) is None + assert service.get_features("ds") is None + + +def test_split_feature_keys_falls_back_to_dataset_union( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + _make_dataset_dir(tmp_path, "ds") + _install_fake_metadata( + monkeypatch, + variable_schema={"var": None}, + constant_schema={"train": {"const": None}}, + ) + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + assert service._split_feature_keys("ds", "train") == {"var", "const"} # noqa: SLF001 + assert service._split_feature_keys("ds", "missing") == {"var", "const"} # noqa: SLF001 + + +def test_open_raises_for_empty_dataset( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + _make_dataset_dir(tmp_path, "ds") + _install_fake_init_from_disk(monkeypatch, {"ds": ({}, {})}) + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + with pytest.raises(RuntimeError, match="empty"): + service.list_samples("ds") + + +def test_open_hub_feature_keyerror_falls_back_unfiltered( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + repo_id = "org/fallback" + _install_fake_metadata( + monkeypatch, + variable_schema={"Base_2_2/Zone/VertexFields/pressure": None}, + constant_schema={"train": {"Base_2_2": None, "Base_2_2/Zone": None}}, + ) + calls: list[object] = [] + + def fake_init(_repo: str, features=None): + calls.append(features) + if features is not None: + raise KeyError("missing") + return {"train": _FakeDataset(range(1))}, { + "train": _FakeConverter({0: object()}) + } + + import plaid.storage as storage # noqa: PLC0415 + + monkeypatch.setattr(storage, "init_streaming_from_hub", fake_init, raising=False) + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + service.add_hub_dataset(repo_id) + service.set_features(repo_id, ["Base_2_2/Zone/VertexFields/pressure"]) + service.list_samples(repo_id) + assert calls[0] is not None + assert calls[-1] is None + + +def test_load_sample_default_split_fallback_and_missing_split( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + _make_dataset_dir(tmp_path, "ds") + target = object() + _install_fake_init_from_disk( + monkeypatch, + { + "ds": ( + {"only": _FakeDataset(range(1))}, + {"only": _FakeConverter({0: target})}, + ) + }, + ) + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + assert service.load_sample(SampleRef("disk", "ds", "missing", "0")) is target + + _make_dataset_dir(tmp_path, "ds2") + _install_fake_init_from_disk( + monkeypatch, + { + "ds": ( + {"only": _FakeDataset(range(1))}, + {"only": _FakeConverter({0: target})}, + ), + "ds2": ( + {"a": _FakeDataset(range(1)), "b": _FakeDataset(range(1))}, + { + "a": _FakeConverter({0: object()}), + "b": _FakeConverter({0: object()}), + }, + ), + }, + ) + service2 = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + with pytest.raises(KeyError): + service2.load_sample(SampleRef("disk", "ds2", "missing", "0")) + + +def test_load_sample_empty_augmented_falls_back_unfiltered( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + _make_dataset_dir(tmp_path, "ds") + _install_fake_metadata( + monkeypatch, + variable_schema={"Base_2_2/Zone/VertexFields/pressure": None}, + constant_schema={"train": {}}, + ) + target = object() + dataset_dict = {"train": _FakeDataset(range(1))} + converter = _FeatureAwareConverter({0: target}) + _install_fake_init_from_disk( + monkeypatch, {"ds": (dataset_dict, {"train": converter})} + ) + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + service.set_features("ds", ["Base_2_2/Zone/VertexFields/pressure"]) + assert service.load_sample(SampleRef("disk", "ds", "train", "0")) is target + assert converter.last_features is None + + +class _SummarySample: + def __init__(self, report: str = "warning") -> None: + import types + + self.features = types.SimpleNamespace( + data={}, get_all_time_values=lambda: [2, 1] + ) + self._report = report + + def get_scalar_names(self): + return ["s"] + + def get_scalar(self, _name: str): + return 3 + + def get_global_names(self, **_kwargs): + return ["IterationValues", "TimeValues", "g", "bad"] + + def get_global(self, name: str, **_kwargs): + if name == "bad": + raise RuntimeError("skip") + return 4 + + def check_completeness(self): + return self._report + + +def test_summary_time_globals_and_validation( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + _make_dataset_dir(tmp_path, "ds") + sample = _SummarySample() + _install_fake_init_from_disk( + monkeypatch, + { + "ds": ( + {"train": _FakeDataset(range(1))}, + {"train": _FakeConverter({0: sample})}, + ) + }, + ) + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + ref = SampleRef("disk", "ds", "train", "0") + summary = service.get_sample_summary(ref) + assert summary.globals == {"s": "3"} + assert service.list_time_values(ref) == [1.0, 2.0] + assert service.describe_globals(ref) == [ + {"name": "g", "shape": [], "dtype": "int", "preview": "4"} + ] + validation = service.get_sample_validation(ref) + assert validation.ok is True + assert validation.warnings == ["warning"] + + +def test_time_globals_and_validation_error_branches( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + _make_dataset_dir(tmp_path, "ds") + + class BadTimes(_SummarySample): + def __init__(self): + super().__init__("error: bad") + self.features.get_all_time_values = lambda: (_ for _ in ()).throw( + RuntimeError("bad") + ) + + def get_global_names(self, **_kwargs): + raise TypeError + + def get_global(self, _name: str, **_kwargs): + raise TypeError + + sample = BadTimes() + sample.get_global_names = lambda: ["g"] + sample.get_global = lambda _name: 5 + _install_fake_init_from_disk( + monkeypatch, + { + "ds": ( + {"train": _FakeDataset(range(1))}, + {"train": _FakeConverter({0: sample})}, + ) + }, + ) + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + ref = SampleRef("disk", "ds", "train", "0") + assert service.list_time_values(ref) == [] + assert service.describe_globals(ref, time=1.0)[0]["name"] == "g" + assert service.get_sample_validation(ref).errors == ["error: bad"] + + class RaisingService(PlaidDatasetService): + def load_sample(self, ref): # noqa: ARG002 + raise RuntimeError("load") + + assert RaisingService(ViewerConfig()).get_sample_validation(ref).ok is False + + class BadCheck(_SummarySample): + def check_completeness(self): + raise RuntimeError("check") + + _install_fake_init_from_disk( + monkeypatch, + { + "ds": ( + {"train": _FakeDataset(range(1))}, + {"train": _FakeConverter({0: BadCheck()})}, + ) + }, + ) + assert ( + PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + .get_sample_validation(ref) + .ok + is False + ) + + +def test_dataset_dir_and_infos_helpers(tmp_path: Path) -> None: + service = PlaidDatasetService(ViewerConfig()) + with pytest.raises(FileNotFoundError): + service._dataset_dir("ds") # noqa: SLF001 + service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) + with pytest.raises(FileNotFoundError): + service._dataset_dir("missing") # noqa: SLF001 + base = _make_dataset_dir(tmp_path, "ds") + assert PlaidDatasetService._load_infos(base) is None # noqa: SLF001 + (base / "infos.json").write_text("bad") + assert PlaidDatasetService._load_infos(base) is None # noqa: SLF001 + (base / "infos.json").write_text('{"a": 1}') + assert PlaidDatasetService._load_infos(base) == {"a": 1} # noqa: SLF001 + (base / "infos.json").unlink() + (base / "infos.yaml").write_text("a: 2") + assert PlaidDatasetService._load_infos(base) == {"a": 2} # noqa: SLF001 + (base / "infos.yaml").write_text("a: [") + assert PlaidDatasetService._load_infos(base) is None # noqa: SLF001 + + +def test_load_infos_handles_read_errors( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + base = _make_dataset_dir(tmp_path, "ds") + (base / "infos.json").write_text('{"a": 1}') + + original_read_text = Path.read_text + + def broken_read_text(self: Path, *args, **kwargs): # noqa: ANN002, ANN003 + if self.name == "infos.json": + raise OSError("boom") + return original_read_text(self, *args, **kwargs) + + monkeypatch.setattr(Path, "read_text", broken_read_text) + assert PlaidDatasetService._load_infos(base) is None # noqa: SLF001 + + +def test_time_keys_describe_tree_empty_and_cached_service(tmp_path: Path) -> None: + import types + + from CGNS.PAT import cgnskeywords as CK + + sample = types.SimpleNamespace(features=types.SimpleNamespace(data={})) + assert PlaidDatasetService._time_keys(sample) == [] # noqa: SLF001 + assert PlaidDatasetService._describe_tree(sample, []) == ( + {}, + {}, + {}, + ) or PlaidDatasetService._describe_tree(sample, []) == ([], {}, {}) # noqa: SLF001 + + visual_base = [ + "Base", + None, + [ + [ + "Zone", + None, + [ + [ + "FlowSolution", + None, + [["Pressure", None, [], CK.DataArray_ts]], + CK.FlowSolution_ts, + ] + ], + CK.Zone_ts, + ] + ], + CK.CGNSBase_ts, + ] + tree = ["CGNSTree", None, [visual_base], "CGNSTree_t"] + sample = types.SimpleNamespace(features=types.SimpleNamespace(data={0.0: tree})) + assert PlaidDatasetService._describe_tree(sample, [0.0]) == ( # noqa: SLF001 + ["Base"], + {"Base": ["Zone"]}, + {"Base": ["Pressure"]}, + ) + _cached_service.cache_clear() + assert _cached_service(str(tmp_path), "disk") is _cached_service( + str(tmp_path), "disk" + ) + + def test_load_sample_auto_advances_cursor_on_first_access( tmp_path: Path, monkeypatch: pytest.MonkeyPatch ) -> None: @@ -739,8 +1209,8 @@ def test_load_sample_falls_back_when_empty_filter_triggers_missing_features( degrade to an unfiltered load so the user still sees the mesh. """ _make_dataset_dir(tmp_path, "ds") - variable = {"Base/Zone/VertexFields/pressure": None} - constant = {"train": {"Base": None, "Base/Zone": None}} + variable = {"Base_2_2/Zone/VertexFields/pressure": None} + constant = {"train": {"Base": None, "Base_2_2/Zone": None}} _install_fake_metadata( monkeypatch, variable_schema=variable, constant_schema=constant ) @@ -757,7 +1227,7 @@ def test_load_sample_falls_back_when_empty_filter_triggers_missing_features( service = PlaidDatasetService(ViewerConfig(datasets_root=tmp_path)) # Emulate the UI: the user selected a field that exists elsewhere in # the dataset metadata but not in this split. - service.set_features("ds", ["Base/Zone/VertexFields/pressure"]) + service.set_features("ds", ["Base_2_2/Zone/VertexFields/pressure"]) ref = SampleRef(backend_id="disk", dataset_id="ds", split="train", sample_id="0") assert service.load_sample(ref) is target diff --git a/tests/viewer/test_preferences.py b/tests/viewer/test_preferences.py new file mode 100644 index 00000000..49d788ae --- /dev/null +++ b/tests/viewer/test_preferences.py @@ -0,0 +1,69 @@ +"""Tests for viewer preference persistence.""" + +from __future__ import annotations + +import json +from pathlib import Path + +import pytest + +from plaid.viewer import preferences as prefs + + +def test_preferences_path_uses_xdg_config_home( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + monkeypatch.delenv("PLAID_VIEWER_CONFIG_FILE", raising=False) + monkeypatch.setenv("XDG_CONFIG_HOME", str(tmp_path)) + assert prefs._preferences_path() == tmp_path / "plaid" / "viewer.json" + + +def test_load_preferences_handles_missing_invalid_and_valid_files( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + path = tmp_path / "viewer.json" + monkeypatch.setenv("PLAID_VIEWER_CONFIG_FILE", str(path)) + assert prefs.load_preferences() == {} + path.write_text("not json") + assert prefs.load_preferences() == {} + path.write_text(json.dumps({"datasets_root": str(tmp_path)})) + assert prefs.load_preferences() == {"datasets_root": str(tmp_path)} + + +def test_update_and_last_datasets_root( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + path = tmp_path / "viewer.json" + monkeypatch.setenv("PLAID_VIEWER_CONFIG_FILE", str(path)) + + prefs.save_preferences({"datasets_root": str(tmp_path), "other": 1}) + assert prefs.get_last_datasets_root() == tmp_path + updated = prefs.update_preferences(datasets_root=None) + assert updated == {"other": 1} + assert prefs.get_last_datasets_root() is None + prefs.set_last_datasets_root(tmp_path) + assert prefs.get_last_datasets_root() == tmp_path.resolve() + prefs.set_last_datasets_root(None) + assert prefs.get_last_datasets_root() is None + + +def test_get_last_datasets_root_rejects_bad_values( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + path = tmp_path / "viewer.json" + monkeypatch.setenv("PLAID_VIEWER_CONFIG_FILE", str(path)) + path.write_text(json.dumps({"datasets_root": ""})) + assert prefs.get_last_datasets_root() is None + path.write_text(json.dumps({"datasets_root": str(tmp_path / "missing")})) + assert prefs.get_last_datasets_root() is None + + +def test_save_preferences_ignores_os_errors(monkeypatch: pytest.MonkeyPatch) -> None: + class BadPath: + parent = Path("/") + + def write_text(self, _text: str) -> None: + raise OSError("boom") + + monkeypatch.setattr(prefs, "_preferences_path", lambda: BadPath()) + prefs.save_preferences({"x": 1}) diff --git a/tests/viewer/test_trame_helpers.py b/tests/viewer/test_trame_helpers.py new file mode 100644 index 00000000..5393cb5d --- /dev/null +++ b/tests/viewer/test_trame_helpers.py @@ -0,0 +1,319 @@ +"""Headless tests for trame server helper functions using fakes.""" + +from __future__ import annotations + +import sys +import types +from pathlib import Path + +import pytest + +from plaid.viewer.trame_app import server as srv + + +class _Selection: + def __init__(self, names: list[str]) -> None: + self.names = names + self.enabled: list[str] = [] + self.disabled: list[str] = [] + + def GetNumberOfArrays(self) -> int: # noqa: N802 + return len(self.names) + + def GetArrayName(self, i: int) -> str: # noqa: N802 + return self.names[i] + + def ArrayExists(self, name: str) -> bool: # noqa: N802 + return name in self.names + + def DisableArray(self, name: str) -> None: # noqa: N802 + self.disabled.append(name) + + def DisableAllArrays(self) -> None: # noqa: N802 + self.disabled.extend(self.names) + + def EnableArray(self, name: str) -> None: # noqa: N802 + self.enabled.append(name) + + +class _Reader: + def __init__(self) -> None: + self.base = _Selection(["Base", "Global"]) + self.point = _Selection(["p"]) + self.cell = _Selection(["c"]) + self.modified = False + self.updated = False + + def GetBaseSelection(self): # noqa: N802 + return self.base + + def GetPointDataArraySelection(self): # noqa: N802 + return self.point + + def GetCellDataArraySelection(self): # noqa: N802 + return self.cell + + def Modified(self) -> None: # noqa: N802 + self.modified = True + + def Update(self) -> None: # noqa: N802 + self.updated = True + + +def test_reader_selection_helpers() -> None: + reader = _Reader() + wrapper = types.SimpleNamespace(GetReader=lambda: reader) + srv._disable_bases_on_reader(wrapper, ["Global", "Missing"]) + assert reader.base.disabled == ["Global"] + assert reader.modified is True + assert srv._reader_bases_and_fields(wrapper) == (["Base", "Global"], ["p"], ["c"]) + srv._apply_base_selection(reader, ["Base"]) + assert reader.base.enabled == ["Base"] + assert reader.updated is True + + +def test_advance_reader_time_update_and_fallback() -> None: + calls: list[object] = [] + + class WithUpdate: + def UpdateTimeStep(self, value: float) -> None: # noqa: N802 + calls.append(("time", value)) + + def Update(self) -> None: # noqa: N802 + calls.append("update") + + srv._advance_reader_time(WithUpdate(), 2.5) + assert calls == [("time", 2.5), "update"] + + class Exec: + def SetUpdateTimeStep(self, port: int, value: float) -> None: # noqa: N802 + calls.append(("exec", port, value)) + + class WithoutUpdate: + def GetExecutive(self): # noqa: N802 + return Exec() + + def Update(self) -> None: # noqa: N802 + calls.append("fallback-update") + + srv._advance_reader_time(WithoutUpdate(), 3.0) + assert ("exec", 0, 3.0) in calls + + +def test_advance_reader_time_swallows_reader_errors() -> None: + class Broken: + def UpdateTimeStep(self, _value: float) -> None: # noqa: N802 + raise RuntimeError("boom") + + srv._advance_reader_time(Broken(), 1.0) + + +class _Data: + def __init__(self, arrays: dict[str, tuple[float, float]]) -> None: + self.arrays = arrays + + def GetNumberOfArrays(self) -> int: # noqa: N802 + return len(self.arrays) + + def GetArrayName(self, i: int) -> str: # noqa: N802 + return list(self.arrays)[i] + + def GetArray(self, name: str): # noqa: N802 + rng = self.arrays.get(name) + return None if rng is None else types.SimpleNamespace(GetRange=lambda _idx: rng) + + +class _Leaf: + def __init__( + self, + point: dict[str, tuple[float, float]], + cell: dict[str, tuple[float, float]], + ) -> None: + self.point = _Data(point) + self.cell = _Data(cell) + + def GetPointData(self): # noqa: N802 + return self.point + + def GetCellData(self): # noqa: N802 + return self.cell + + +class _Blocks: + def __init__(self, blocks: list[object | None]) -> None: + self.blocks = blocks + + def GetNumberOfBlocks(self) -> int: # noqa: N802 + return len(self.blocks) + + def GetBlock(self, i: int): # noqa: N802 + return self.blocks[i] + + +def test_dataset_field_helpers() -> None: + dataset = _Blocks( + [ + None, + _Leaf({"p": (1.0, 2.0)}, {}), + _Leaf({"p": (-1.0, 4.0)}, {"c": (5.0, 6.0)}), + ] + ) + assert srv._list_point_and_cell_fields(dataset) == (["p"], ["c"]) + assert srv._compute_field_range(dataset, "p", "point") == (-1.0, 4.0) + assert srv._compute_field_range(dataset, "missing", "point") == (0.0, 1.0) + + class NoData: + def GetPointData(self): # noqa: N802 + return None + + def GetCellData(self): # noqa: N802 + return _Data({}) + + assert srv._compute_field_range(_Blocks([NoData()]), "p", "point") == (0.0, 1.0) + + +def test_load_reader_plain_and_build_lut( + monkeypatch: pytest.MonkeyPatch, tmp_path: Path +) -> None: + class FakeCGNSReader: + def __init__(self) -> None: + self.file_name = None + self.calls: list[str] = [] + + def SetFileName(self, name: str) -> None: # noqa: N802 + self.file_name = name + + def UpdateInformation(self) -> None: # noqa: N802 + self.calls.append("info") + + def EnableAllBases(self) -> None: # noqa: N802 + self.calls.append("bases") + + def EnableAllPointArrays(self) -> None: # noqa: N802 + self.calls.append("points") + + def EnableAllCellArrays(self) -> None: # noqa: N802 + self.calls.append("cells") + + class FakeLookupTable: + def __init__(self) -> None: + self.hue = None + + def SetTableRange(self, *_args): + pass # noqa: ANN002 + + def SetNumberOfColors(self, *_args): + pass # noqa: ANN002 + + def SetHueRange(self, *args): + self.hue = args # noqa: ANN002 + + def SetSaturationRange(self, *_args): + pass # noqa: ANN002 + + def SetValueRange(self, *_args): + pass # noqa: ANN002 + + def Build(self): + pass + + fake_vtk = types.SimpleNamespace( + vtkCGNSReader=FakeCGNSReader, vtkLookupTable=FakeLookupTable + ) + monkeypatch.setitem(sys.modules, "vtk", fake_vtk) + path = tmp_path / "mesh.cgns" + reader = srv._load_reader(path) + assert reader.file_name == str(path) + assert reader.calls == ["info", "bases", "points", "cells"] + assert srv._build_lut("unknown", 0.0, 1.0).hue == (0.667, 0.0) + + +def test_install_vtk_log_router_with_fake_vtk(monkeypatch: pytest.MonkeyPatch) -> None: + calls: list[object] = [] + captured: dict[str, object] = {} + + class FakeOutputWindow: + @staticmethod + def SetInstance(instance) -> None: # noqa: N802 + captured["instance"] = instance + calls.append(("output", instance.__class__.__name__)) + + class FakeObject: + @staticmethod + def GlobalWarningDisplayOff() -> None: # noqa: N802 + calls.append("warnings-off") + + class FakeLogger: + VERBOSITY_OFF = 0 + + @staticmethod + def SetStderrVerbosity(value: int) -> None: # noqa: N802 + calls.append(("verbosity", value)) + + fake_vtk = types.SimpleNamespace( + vtkOutputWindow=FakeOutputWindow, + vtkObject=FakeObject, + vtkLogger=FakeLogger, + ) + monkeypatch.setitem(sys.modules, "vtk", fake_vtk) + monkeypatch.setattr(srv, "_VTK_LOG_ROUTER_INSTALLED", False) + + srv._install_vtk_log_router() + srv._install_vtk_log_router() + + output = captured["instance"] + output.DisplayText("text") + output.DisplayErrorText("error") + output.DisplayWarningText("warning") + output.DisplayGenericWarningText("generic") + output.DisplayDebugText("debug") + assert calls == [ + ("output", "_LoggingOutputWindow"), + "warnings-off", + ("verbosity", 0), + ] + + +def test_install_vtk_log_router_ignores_missing_and_old_vtk( + monkeypatch: pytest.MonkeyPatch, +) -> None: + real_import = __import__ + + def missing_vtk(name, globals=None, locals=None, fromlist=(), level=0): # noqa: A002, ANN001, ANN002 + if name == "vtk": + raise ImportError("no vtk") + return real_import(name, globals, locals, fromlist, level) + + monkeypatch.delitem(sys.modules, "vtk", raising=False) + monkeypatch.setattr("builtins.__import__", missing_vtk) + monkeypatch.setattr(srv, "_VTK_LOG_ROUTER_INSTALLED", False) + srv._install_vtk_log_router() + assert srv._VTK_LOG_ROUTER_INSTALLED is False + + class FakeOutputWindow: + @staticmethod + def SetInstance(_instance) -> None: # noqa: N802 + pass + + class FakeObject: + @staticmethod + def GlobalWarningDisplayOff() -> None: # noqa: N802 + pass + + class OldLogger: + @staticmethod + def SetStderrVerbosity(_value: int) -> None: # noqa: N802 + raise AttributeError("old") + + monkeypatch.setattr("builtins.__import__", real_import) + monkeypatch.setitem( + sys.modules, + "vtk", + types.SimpleNamespace( + vtkOutputWindow=FakeOutputWindow, + vtkObject=FakeObject, + vtkLogger=OldLogger, + ), + ) + srv._install_vtk_log_router() + assert srv._VTK_LOG_ROUTER_INSTALLED is True From 12aabf10e71cdddf264ff6cef7ed07ce8181b5a3 Mon Sep 17 00:00:00 2001 From: Fabien Casenave Date: Wed, 29 Apr 2026 21:11:40 +0200 Subject: [PATCH 07/17] Potential fix for pull request finding 'Empty except' Co-authored-by: Copilot Autofix powered by AI <223894421+github-code-quality[bot]@users.noreply.github.com> --- src/plaid/viewer/trame_app/server.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/plaid/viewer/trame_app/server.py b/src/plaid/viewer/trame_app/server.py index 861d402d..6d88edf0 100644 --- a/src/plaid/viewer/trame_app/server.py +++ b/src/plaid/viewer/trame_app/server.py @@ -141,6 +141,8 @@ def DisplayDebugText(self, text: str) -> None: # noqa: N802 - VTK API try: vtk.vtkLogger.SetStderrVerbosity(vtk.vtkLogger.VERBOSITY_OFF) except AttributeError: + # Some VTK builds expose ``vtkLogger`` but not this verbosity API. + # Ignore to keep compatibility and continue without hard failure. pass _VTK_LOG_ROUTER_INSTALLED = True From 1abf3afaf3715d66a951fa36bd0e6b65ae401e0b Mon Sep 17 00:00:00 2001 From: Fabien Casenave Date: Wed, 29 Apr 2026 21:12:27 +0200 Subject: [PATCH 08/17] Potential fix for pull request finding 'Empty except' Co-authored-by: Copilot Autofix powered by AI <223894421+github-code-quality[bot]@users.noreply.github.com> --- src/plaid/viewer/trame_app/server.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/plaid/viewer/trame_app/server.py b/src/plaid/viewer/trame_app/server.py index 6d88edf0..07a93589 100644 --- a/src/plaid/viewer/trame_app/server.py +++ b/src/plaid/viewer/trame_app/server.py @@ -1316,7 +1316,8 @@ async def _play_loop() -> None: fps = max(1, int(state.play_fps or 1)) await asyncio.sleep(1.0 / fps) except asyncio.CancelledError: - pass + # Expected when playback is stopped or restarted: allow task to exit silently. + return @state.change("playing") def _on_playing(**_: object) -> None: From c3fd893455721b9c19b1255bdc2481040a784d48 Mon Sep 17 00:00:00 2001 From: Fabien Casenave Date: Wed, 29 Apr 2026 21:12:59 +0200 Subject: [PATCH 09/17] code quality --- src/plaid/viewer/cache.py | 5 +++-- tests/viewer/test_cache.py | 5 +---- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/src/plaid/viewer/cache.py b/src/plaid/viewer/cache.py index 6fcd2bf3..1dae7b98 100644 --- a/src/plaid/viewer/cache.py +++ b/src/plaid/viewer/cache.py @@ -164,5 +164,6 @@ def handler(signum, frame, _prev=previous): try: signal.signal(sig, handler) - except (ValueError, OSError): - pass + except (ValueError, OSError) as exc: + logger.debug("Unable to install handler for signal %s: %s", sig, exc) + continue diff --git a/tests/viewer/test_cache.py b/tests/viewer/test_cache.py index 35cc113e..c7ad4fc4 100644 --- a/tests/viewer/test_cache.py +++ b/tests/viewer/test_cache.py @@ -188,8 +188,5 @@ def broken_signal(_sig, _handler): monkeypatch.setattr(cache_mod.signal, "getsignal", flaky_getsignal) monkeypatch.setattr(cache_mod.signal, "signal", broken_signal) - cache = CacheRoot(install_signal_handlers=True, run_orphan_sweep=False) - try: + with CacheRoot(install_signal_handlers=True, run_orphan_sweep=False): assert calls == {"getsignal": 2, "signal": 1} - finally: - cache.close() From efe45e59692266d6841f56d57994851090785cc0 Mon Sep 17 00:00:00 2001 From: Fabien Casenave Date: Wed, 29 Apr 2026 21:54:03 +0200 Subject: [PATCH 10/17] windows hack --- tests/viewer/test_cache.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/tests/viewer/test_cache.py b/tests/viewer/test_cache.py index c7ad4fc4..83e9e7e6 100644 --- a/tests/viewer/test_cache.py +++ b/tests/viewer/test_cache.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Callable from pathlib import Path import pytest @@ -37,7 +38,10 @@ def test_context_manager_removes_ephemeral_dir(tmp_path: Path, monkeypatch) -> N assert not path.exists() -def test_sweep_orphans_removes_dead_pid_dir(tmp_path: Path) -> None: +def test_sweep_orphans_removes_dead_pid_dir( + tmp_path: Path, monkeypatch: pytest.MonkeyPatch +) -> None: + monkeypatch.setattr(cache_mod, "_process_is_alive", lambda _pid: False) victim = tmp_path / "plaid-viewer-999999-deadbeefcafe" victim.mkdir() removed = sweep_orphans(tmp_path) @@ -96,6 +100,7 @@ def test_cache_runs_orphan_sweep_and_close_is_idempotent( tmp_path: Path, monkeypatch: pytest.MonkeyPatch ) -> None: monkeypatch.setattr(cache_mod.tempfile, "gettempdir", lambda: str(tmp_path)) + monkeypatch.setattr(cache_mod, "_process_is_alive", lambda _pid: False) victim = tmp_path / "plaid-viewer-999999-deadbeef" victim.mkdir() cache = CacheRoot(install_signal_handlers=False, run_orphan_sweep=True) @@ -111,7 +116,7 @@ def test_cache_signal_handler_cleans_then_delegates( ) -> None: monkeypatch.setattr(cache_mod.tempfile, "gettempdir", lambda: str(tmp_path)) calls: list[tuple[str, object]] = [] - handlers: dict[int, object] = {} + handlers: dict[int, Callable[[int, object], None]] = {} def previous(signum, _frame): calls.append(("previous", signum)) @@ -119,7 +124,7 @@ def previous(signum, _frame): def fake_getsignal(_sig): return previous - def fake_signal(sig, handler): + def fake_signal(sig, handler: Callable[[int, object], None]): handlers[sig] = handler calls.append(("signal", sig)) From b4b2a9133a3e350a0d96b9ada06ccc6b3f7a0157 Mon Sep 17 00:00:00 2001 From: Fabien Casenave Date: Wed, 29 Apr 2026 22:07:24 +0200 Subject: [PATCH 11/17] win trick --- src/plaid/viewer/cache.py | 29 ++++++++++++++++++++++++++++- tests/viewer/test_cache.py | 8 ++++++++ 2 files changed, 36 insertions(+), 1 deletion(-) diff --git a/src/plaid/viewer/cache.py b/src/plaid/viewer/cache.py index 1dae7b98..88b0aacb 100644 --- a/src/plaid/viewer/cache.py +++ b/src/plaid/viewer/cache.py @@ -7,7 +7,7 @@ 2. Signal handlers for ``SIGINT`` / ``SIGTERM``. 3. A FastAPI lifespan context (provided by callers). 4. An orphan sweep at startup that removes directories left behind by - previously-crashed processes (detected via ``os.kill(pid, 0)``). + previously-crashed processes. """ from __future__ import annotations @@ -30,10 +30,36 @@ _EPHEMERAL_PATTERN = re.compile(r"^plaid-viewer-(?P\d+)-(?P[0-9a-f]+)$") +def _windows_process_is_alive(pid: int) -> bool: # pragma: no cover + """Return process liveness on Windows without sending a signal.""" + import ctypes # noqa: PLC0415 + + error_access_denied = 5 + process_query_limited_information = 0x1000 + still_active = 259 + + windll = getattr(ctypes, "WinDLL") + get_last_error = getattr(ctypes, "get_last_error") + kernel32 = windll("kernel32", use_last_error=True) + handle = kernel32.OpenProcess(process_query_limited_information, False, pid) + if not handle: + return get_last_error() == error_access_denied + + try: + exit_code = ctypes.c_ulong() + if not kernel32.GetExitCodeProcess(handle, ctypes.byref(exit_code)): + return False + return exit_code.value == still_active + finally: + kernel32.CloseHandle(handle) + + def _process_is_alive(pid: int) -> bool: """Return ``True`` if a process with the given pid is still running.""" if pid <= 0: return False + if os.name == "nt": + return _windows_process_is_alive(pid) try: os.kill(pid, 0) except ProcessLookupError: @@ -109,6 +135,7 @@ def __init__( if install_signal_handlers: self._install_signal_handlers() else: + assert persistent_dir is not None self._path = Path(persistent_dir) self._path.mkdir(parents=True, exist_ok=True) self._closed = False diff --git a/tests/viewer/test_cache.py b/tests/viewer/test_cache.py index 83e9e7e6..a387c859 100644 --- a/tests/viewer/test_cache.py +++ b/tests/viewer/test_cache.py @@ -87,6 +87,14 @@ def no_such_process(_pid: int, _sig: int) -> None: assert _process_is_alive(123) is False +def test_process_is_alive_uses_windows_probe(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setattr(cache_mod.os, "name", "nt") + monkeypatch.setattr(cache_mod, "_windows_process_is_alive", lambda pid: pid == 123) + + assert _process_is_alive(123) is True + assert _process_is_alive(456) is False + + def test_sweep_orphans_ignores_non_dirs_and_non_matching_names(tmp_path: Path) -> None: (tmp_path / "plain-file").write_text("x") keep = tmp_path / "not-plaid-viewer" From 7bcc1cae585e8f841cc799a80c255b7038a58c93 Mon Sep 17 00:00:00 2001 From: Fabien Casenave Date: Thu, 30 Apr 2026 06:49:36 +0200 Subject: [PATCH 12/17] windows fix --- tests/viewer/test_cache.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/viewer/test_cache.py b/tests/viewer/test_cache.py index a387c859..70409933 100644 --- a/tests/viewer/test_cache.py +++ b/tests/viewer/test_cache.py @@ -60,6 +60,9 @@ def test_sweep_orphans_keeps_live_pid_dir(tmp_path: Path) -> None: def test_process_is_alive_branches(monkeypatch: pytest.MonkeyPatch) -> None: + # Force the POSIX-style branch; Windows delegation is tested separately below. + monkeypatch.setattr(cache_mod.os, "name", "posix") + assert _process_is_alive(0) is False def missing(_pid: int, _sig: int) -> None: From 7eaab56343bdbf24ee1a5ca4218ede128f358094 Mon Sep 17 00:00:00 2001 From: Fabien Casenave Date: Thu, 30 Apr 2026 20:04:53 +0200 Subject: [PATCH 13/17] update deps --- .github/workflows/doc.yml | 2 +- .github/workflows/testing.yml | 2 +- pyproject.toml | 16 ++++++++-------- uv.lock | 25 +++++++++++++------------ 4 files changed, 23 insertions(+), 22 deletions(-) diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index 5b3dd093..a032cc9a 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -32,7 +32,7 @@ jobs: - name: Install dependencies run: | - uv sync --dev + uv sync --dev --extra viewer - name: Compile documentation run: | diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 26d53e48..33d5eeae 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -60,7 +60,7 @@ jobs: - name: Install dependencies (Linux) if: runner.os == 'Linux' run: | - uv sync --dev + uv sync --dev --extra viewer - name: Run tests (Linux) if: runner.os == 'Linux' diff --git a/pyproject.toml b/pyproject.toml index 3974d34f..bed9b34a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,6 +9,7 @@ name = "pyplaid" authors = [{name = "Safran", email = "fabien.casenave@safrangroup.com"}] description = "A package that implements a data model tailored for AI and ML in the context of physics problems" requires-python = ">=3.11, <3.14" +dynamic = ["version"] keywords=[ "machine learning", "physics", @@ -37,8 +38,13 @@ dependencies = [ "matplotlib>=3.8,<4", "pydantic>=2.6,<3", ] - -dynamic = ["version"] +[project.optional-dependencies] +viewer = [ + "trame>=3.6,<4.0", + "trame-vtk>=2.8,<3.0", + "trame-vuetify>=2.7,<3.0", + "vtk>=9.6.1", +] [tool.setuptools_scm] write_to = "src/plaid/_version.py" @@ -85,12 +91,6 @@ dev = [ "sphinx-tabs>=3.4.7", "sphinxcontrib-bibtex>=2.6.5", ] -viewer = [ - "trame>=3.6,<4.0", - "trame-vtk>=2.8,<3.0", - "trame-vuetify>=2.7,<3.0", - "vtk>=9.6.1", -] [tool.coverage.run] omit = ["src/plaid/examples/*"] diff --git a/uv.lock b/uv.lock index 3d2e2a04..ceabff0d 100644 --- a/uv.lock +++ b/uv.lock @@ -2485,6 +2485,14 @@ dependencies = [ { name = "zarr" }, ] +[package.optional-dependencies] +viewer = [ + { name = "trame" }, + { name = "trame-vtk" }, + { name = "trame-vuetify" }, + { name = "vtk" }, +] + [package.dev-dependencies] dev = [ { name = "furo" }, @@ -2504,12 +2512,6 @@ dev = [ { name = "sphinx-tabs" }, { name = "sphinxcontrib-bibtex" }, ] -viewer = [ - { name = "trame" }, - { name = "trame-vtk" }, - { name = "trame-vuetify" }, - { name = "vtk" }, -] [package.metadata] requires-dist = [ @@ -2521,8 +2523,13 @@ requires-dist = [ { name = "pyyaml", specifier = ">=6,<7" }, { name = "scikit-learn", specifier = ">=1.4,<2" }, { name = "tqdm", specifier = ">=4.60,<5" }, + { name = "trame", marker = "extra == 'viewer'", specifier = ">=3.6,<4.0" }, + { name = "trame-vtk", marker = "extra == 'viewer'", specifier = ">=2.8,<3.0" }, + { name = "trame-vuetify", marker = "extra == 'viewer'", specifier = ">=2.7,<3.0" }, + { name = "vtk", marker = "extra == 'viewer'", specifier = ">=9.6.1" }, { name = "zarr", specifier = ">=3.1,<4" }, ] +provides-extras = ["viewer"] [package.metadata.requires-dev] dev = [ @@ -2542,12 +2549,6 @@ dev = [ { name = "sphinx-tabs", specifier = ">=3.4.7" }, { name = "sphinxcontrib-bibtex", specifier = ">=2.6.5" }, ] -viewer = [ - { name = "trame", specifier = ">=3.6,<4.0" }, - { name = "trame-vtk", specifier = ">=2.8,<3.0" }, - { name = "trame-vuetify", specifier = ">=2.7,<3.0" }, - { name = "vtk", specifier = ">=9.6.1" }, -] [[package]] name = "pytest" From 5e9839146e7389fa582326cabb9c283eba365f37 Mon Sep 17 00:00:00 2001 From: Fabien Casenave Date: Fri, 1 May 2026 08:10:37 +0200 Subject: [PATCH 14/17] update --- src/plaid/viewer/cli.py | 18 ++++++ src/plaid/viewer/config.py | 8 +++ src/plaid/viewer/trame_app/server.py | 86 +++++++++++++++++++++------- tests/viewer/test_cli.py | 14 +++++ tests/viewer/test_trame_server.py | 21 +++++++ 5 files changed, 125 insertions(+), 22 deletions(-) diff --git a/src/plaid/viewer/cli.py b/src/plaid/viewer/cli.py index f60bc460..15ce4b65 100644 --- a/src/plaid/viewer/cli.py +++ b/src/plaid/viewer/cli.py @@ -54,6 +54,22 @@ def _build_parser() -> argparse.ArgumentParser: "for public deployments (e.g. Hugging Face Spaces)." ), ) + parser.add_argument( + "--dataset-id", + default=None, + help=( + "Dataset id selected when the viewer starts. Use together with " + "--disable-dataset-change to pin the UI to that dataset." + ), + ) + parser.add_argument( + "--disable-dataset-change", + action="store_true", + help=( + "Hide the 'Dataset' dropdown; the selected dataset stays fixed " + "for the lifetime of the server." + ), + ) parser.add_argument( "--cache-dir", @@ -129,6 +145,8 @@ def main(argv: list[str] | None = None) -> int: backend_id=args.backend_id, browse_roots=browse_roots, allow_root_change=not args.disable_root_change, + initial_dataset_id=args.dataset_id, + allow_dataset_change=not args.disable_dataset_change, ) with CacheRoot(persistent_dir=config.cache_dir) as cache: diff --git a/src/plaid/viewer/config.py b/src/plaid/viewer/config.py index d94bedb2..4e93c866 100644 --- a/src/plaid/viewer/config.py +++ b/src/plaid/viewer/config.py @@ -32,6 +32,12 @@ class ViewerConfig: panel to change the datasets root at runtime. Set to ``False`` for public deployments (e.g. Hugging Face Spaces) where the root must remain fixed to what the operator configured. + initial_dataset_id: Dataset selected when the viewer starts. When + ``None``, the first discovered local dataset is selected, falling + back to the first Hub dataset. + allow_dataset_change: When ``True`` (default), the trame UI exposes the + dataset dropdown. Set to ``False`` to pin the selection configured + by ``initial_dataset_id`` / startup discovery. """ datasets_root: Path | None = None @@ -41,3 +47,5 @@ class ViewerConfig: extra_cache_key_fields: dict[str, str] = field(default_factory=dict) browse_roots: tuple[Path, ...] = () allow_root_change: bool = True + initial_dataset_id: str | None = None + allow_dataset_change: bool = True diff --git a/src/plaid/viewer/trame_app/server.py b/src/plaid/viewer/trame_app/server.py index 07a93589..af3eb75c 100644 --- a/src/plaid/viewer/trame_app/server.py +++ b/src/plaid/viewer/trame_app/server.py @@ -48,6 +48,24 @@ _C_STDERR_REROUTED = False +def _select_initial_dataset_id( + configured_id: str | None, + local_dataset_ids: list[str], + hub_dataset_ids: list[str], +) -> str | None: + """Return the startup dataset id for the given discovered datasets. + + A CLI-provided id wins when it exists in either source list. Otherwise the + viewer keeps its historical default: first local dataset, then first Hub + dataset, then ``None``. + """ + if configured_id in (local_dataset_ids + hub_dataset_ids): + return configured_id + if local_dataset_ids: + return local_dataset_ids[0] + return hub_dataset_ids[0] if hub_dataset_ids else None + + def _reroute_c_stderr() -> None: # pragma: no cover - process fd manipulation """Permanently redirect the process's stderr file descriptor to /dev/null. @@ -584,20 +602,27 @@ def build_server( # pragma: no cover - trame/VTK UI startup is not CI-headless # is a registered repo id. state.setdefault("hub_repos", list(dataset_service.hub_repos)) state.setdefault("hub_repo_input", "") - # Active side-panel tab: "local" drives ``datasets_root_text`` and - # directory browsing, "hub" drives the Hugging Face repo input. The - # selection only gates which form is rendered; registered datasets - # from either source always land in ``dataset_ids`` together. - state.setdefault("source_tab", "local") - # Initial ``dataset_id`` follows the default ``source_tab`` ("local"): # pick the first local dataset when any is available, otherwise fall # back to the first hub dataset (so a viewer launched with only # ``--hub-repo`` still has something selected). - initial_dataset_id = ( - local_dataset_ids[0] - if local_dataset_ids - else (hub_dataset_ids[0] if hub_dataset_ids else None) + initial_dataset_id = _select_initial_dataset_id( + dataset_service._config.initial_dataset_id, + local_dataset_ids, + hub_dataset_ids, + ) + if ( + dataset_service._config.initial_dataset_id is not None + and initial_dataset_id != dataset_service._config.initial_dataset_id + ): + logger.warning( + "Configured initial dataset %r was not found; falling back to %r", + dataset_service._config.initial_dataset_id, + initial_dataset_id, + ) + initial_source_tab = "hub" if initial_dataset_id in hub_dataset_ids else "local" + state.setdefault( + "allow_dataset_change", dataset_service._config.allow_dataset_change ) state.setdefault("dataset_id", initial_dataset_id) # Separate lists per source so the dropdown only shows datasets that @@ -610,6 +635,11 @@ def build_server( # pragma: no cover - trame/VTK UI startup is not CI-headless state.setdefault("splits", []) state.setdefault("split", None) + # Active side-panel tab: "local" drives ``datasets_root_text`` and + # directory browsing, "hub" drives the Hugging Face repo input. When an + # initial Hub dataset is configured, start on the Hub tab so state and UI + # remain coherent. + state.setdefault("source_tab", initial_source_tab) state.setdefault("sample_ids", []) state.setdefault("sample_id", None) state.setdefault("sample_index", 0) @@ -1113,6 +1143,8 @@ def _on_source_tab(**_: object) -> None: proactively pick the first id from the active list (or ``None`` when empty) so the dropdown always reflects the active tab. """ + if not state.allow_dataset_change: + return active_ids = ( list(state.hub_dataset_ids or []) if state.source_tab == "hub" @@ -1392,8 +1424,15 @@ def _reload_dataset_list() -> None: # Force ``dataset_id`` to change so ``@state.change('dataset_id')`` # fires and cascades through splits / samples / view refresh. # Pick from the list that matches the active source tab. - active_ids = hub_ids if state.source_tab == "hub" else local_ids - state.dataset_id = active_ids[0] if active_ids else None + if state.allow_dataset_change: + active_ids = hub_ids if state.source_tab == "hub" else local_ids + state.dataset_id = active_ids[0] if active_ids else None + elif state.dataset_id not in new_ids: + state.dataset_id = _select_initial_dataset_id( + dataset_service._config.initial_dataset_id, + local_ids, + hub_ids, + ) if not new_ids: state.splits = [] @@ -1491,8 +1530,10 @@ def _add_hub_repo() -> None: state.hub_repos = list(dataset_service.hub_repos) state.hub_repo_input = "" _reload_dataset_list() - # Select the newly added hub dataset to give immediate feedback. - if normalised in (state.dataset_ids or []): + # Select the newly added hub dataset to give immediate feedback when + # dataset selection is user-controlled. Pinned deployments keep their + # configured dataset. + if state.allow_dataset_change and normalised in (state.dataset_ids or []): state.dataset_id = normalised state.status = f"Streaming from {normalised}" @@ -1658,14 +1699,15 @@ def _reset_camera() -> None: # datasets), Hub tab -> ``hub_dataset_ids`` # (``init_streaming_from_hub`` datasets). The user never # sees ids from the inactive source in the same menu. - v3.VSelect( - label="Dataset", - v_model=("dataset_id",), - items=( - "source_tab === 'hub' ? hub_dataset_ids : local_dataset_ids", - ), - density="compact", - ) + with html.Div(v_if=("allow_dataset_change",)): + v3.VSelect( + label="Dataset", + v_model=("dataset_id",), + items=( + "source_tab === 'hub' ? hub_dataset_ids : local_dataset_ids", + ), + density="compact", + ) v3.VSelect( label="Split", diff --git a/tests/viewer/test_cli.py b/tests/viewer/test_cli.py index c64d2fc5..8817804f 100644 --- a/tests/viewer/test_cli.py +++ b/tests/viewer/test_cli.py @@ -16,6 +16,8 @@ def test_build_parser_defaults() -> None: assert args.datasets_root is None assert args.browse_roots is None assert args.disable_root_change is False + assert args.dataset_id is None + assert args.disable_dataset_change is False assert args.cache_dir is None assert args.host == "127.0.0.1" assert args.port == 8080 @@ -37,6 +39,9 @@ def test_build_parser_accepts_all_options(tmp_path: Path) -> None: str(browse_a), str(browse_b), "--disable-root-change", + "--dataset-id", + "dataset-b", + "--disable-dataset-change", "--cache-dir", str(cache_dir), "--host", @@ -55,6 +60,8 @@ def test_build_parser_accepts_all_options(tmp_path: Path) -> None: assert args.datasets_root == datasets_root assert args.browse_roots == [browse_a, browse_b] assert args.disable_root_change is True + assert args.dataset_id == "dataset-b" + assert args.disable_dataset_change is True assert args.cache_dir == cache_dir assert args.host == "0.0.0.0" assert args.port == 9000 @@ -83,6 +90,8 @@ def __init__(self, config): self.config = config calls.append(("dataset-root", config.datasets_root)) calls.append(("allow-root-change", config.allow_root_change)) + calls.append(("initial-dataset-id", config.initial_dataset_id)) + calls.append(("allow-dataset-change", config.allow_dataset_change)) def add_hub_dataset(self, repo_id: str) -> str: calls.append(("hub", repo_id)) @@ -130,6 +139,9 @@ def fake_import(name, globals=None, locals=None, fromlist=(), level=0): # noqa: "--port", "9001", "--disable-root-change", + "--dataset-id", + "dataset-b", + "--disable-dataset-change", "--hub-repo", "org/repo", "--hub-repo", @@ -142,6 +154,8 @@ def fake_import(name, globals=None, locals=None, fromlist=(), level=0): # noqa: assert ("stderr", None) in calls assert ("dataset-root", tmp_path / "persisted") in calls assert ("allow-root-change", False) in calls + assert ("initial-dataset-id", "dataset-b") in calls + assert ("allow-dataset-change", False) in calls assert ("hub", "org/repo") in calls assert ("hub", "bad/repo") in calls assert ("artifact-cache", tmp_path / "cache-root") in calls diff --git a/tests/viewer/test_trame_server.py b/tests/viewer/test_trame_server.py index dc92fee2..5f237a43 100644 --- a/tests/viewer/test_trame_server.py +++ b/tests/viewer/test_trame_server.py @@ -100,3 +100,24 @@ def test_load_reader_series_uses_vtk_cgns_file_series_reader( "EnableAllPointArrays", "EnableAllCellArrays", ] + + +def test_select_initial_dataset_id_prefers_configured_dataset() -> None: + from plaid.viewer.trame_app.server import ( + _select_initial_dataset_id, # noqa: PLC0415 + ) + + assert _select_initial_dataset_id("b", ["a", "b"], ["org/repo"]) == "b" + assert ( + _select_initial_dataset_id("org/repo", ["a", "b"], ["org/repo"]) == "org/repo" + ) + + +def test_select_initial_dataset_id_falls_back_to_existing_dataset() -> None: + from plaid.viewer.trame_app.server import ( + _select_initial_dataset_id, # noqa: PLC0415 + ) + + assert _select_initial_dataset_id("missing", ["a", "b"], ["org/repo"]) == "a" + assert _select_initial_dataset_id(None, [], ["org/repo"]) == "org/repo" + assert _select_initial_dataset_id(None, [], []) is None From 76d71944fa025a57e994b8fbacdad21abbc10b17 Mon Sep 17 00:00:00 2001 From: Fabien Casenave Date: Fri, 1 May 2026 09:07:38 +0200 Subject: [PATCH 15/17] wip --- uv.lock | 60 ++++++++++++++++++++++++++++++++++++++++++--------------- 1 file changed, 44 insertions(+), 16 deletions(-) diff --git a/uv.lock b/uv.lock index ceabff0d..d25a8384 100644 --- a/uv.lock +++ b/uv.lock @@ -1067,7 +1067,7 @@ wheels = [ [[package]] name = "huggingface-hub" -version = "1.12.2" +version = "1.13.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "filelock" }, @@ -1080,9 +1080,9 @@ dependencies = [ { name = "typer" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3e/9f/3fda8b014db3ae239addc9b48b35c2cf7d318950b430712f34a2473ef81d/huggingface_hub-1.12.2.tar.gz", hash = "sha256:282c4999e641c89affdc4c02c265eddea944c1390dc19e89dac8ad3ae76dbdaf", size = 763393, upload-time = "2026-04-29T09:45:09.202Z" } +sdist = { url = "https://files.pythonhosted.org/packages/89/ff/ec7ed2eb43bd7ce8bb2233d109cc235c3e807ffe5e469dc09db261fac05e/huggingface_hub-1.13.0.tar.gz", hash = "sha256:f6df2dac5abe82ce2fe05873d10d5ff47bc677d616a2f521f4ee26db9415d9d0", size = 781788, upload-time = "2026-04-30T11:57:33.858Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/71/c1/1fa4162f6dd53259daf2ad31385273341821fa0acce164cd03971937a60e/huggingface_hub-1.12.2-py3-none-any.whl", hash = "sha256:7968e897fdbc6343c871c240d87d4434efe0ad9f80d57daa1cc5678c6d148529", size = 647757, upload-time = "2026-04-29T09:45:07.63Z" }, + { url = "https://files.pythonhosted.org/packages/93/db/4b1cdae9460ae1f3ca020cd767f013430ce23eb1d9c890ae3a0609b38d26/huggingface_hub-1.13.0-py3-none-any.whl", hash = "sha256:e942cb50d6a08dd5306688b1ac05bda157fd2fcc88b63dae405f7bd0d3234005", size = 660643, upload-time = "2026-04-30T11:57:31.802Z" }, ] [[package]] @@ -2482,7 +2482,8 @@ dependencies = [ { name = "pyyaml" }, { name = "scikit-learn" }, { name = "tqdm" }, - { name = "zarr" }, + { name = "zarr", version = "3.1.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" }, + { name = "zarr", version = "3.2.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" }, ] [package.optional-dependencies] @@ -3378,14 +3379,14 @@ wheels = [ [[package]] name = "trame-client" -version = "3.12.0" +version = "3.12.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "trame-common" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5c/75/5cfd6ee2c01d0d00eca322a07356072196b4583ad6f2709c564a00f17a69/trame_client-3.12.0.tar.gz", hash = "sha256:cf722b2bd9d36fda700ef7556f438269c1d7d84644fdae95cfa277dd54a51ff0", size = 245933, upload-time = "2026-04-29T16:48:59.577Z" } +sdist = { url = "https://files.pythonhosted.org/packages/eb/69/472f6e77e549b4a3129523ae959321ad751425fd92d75cbd5d0fe427685c/trame_client-3.12.1.tar.gz", hash = "sha256:7c310bce0a1d21e978f8c5e55d9b14e07111749164046f6678c2b2edbaf7bfc1", size = 246229, upload-time = "2026-04-29T22:33:55.69Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b0/b4/39f2e15dbce29c13841d58b1443fb3bb42c66249b3f2f72389f858e40cfa/trame_client-3.12.0-py3-none-any.whl", hash = "sha256:b30c4dc17e8941ed752d0910f3f1a63fff08982f024b17347ee52031886edc89", size = 250410, upload-time = "2026-04-29T16:48:57.371Z" }, + { url = "https://files.pythonhosted.org/packages/ac/98/4906ab32589659039a9dc4d3c1a606fd8cacbef436c27e191e74864d5d0d/trame_client-3.12.1-py3-none-any.whl", hash = "sha256:e72306222cd5520a468b5ca28bb65d8e44fe7981ddc861b78eea13c62abbcd43", size = 250749, upload-time = "2026-04-29T22:33:53.859Z" }, ] [[package]] @@ -3436,7 +3437,7 @@ wheels = [ [[package]] name = "typer" -version = "0.25.0" +version = "0.25.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-doc" }, @@ -3444,9 +3445,9 @@ dependencies = [ { name = "rich" }, { name = "shellingham" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7b/27/ede8cec7596e0041ba7e7b80b47d132562f56ff454313a16f6084e555c9f/typer-0.25.0.tar.gz", hash = "sha256:123eaf9f19bb40fd268310e12a542c0c6b4fab9c98d9d23342a01ff95e3ce930", size = 120150, upload-time = "2026-04-26T08:46:14.767Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/51/9aed62104cea109b820bbd6c14245af756112017d309da813ef107d42e7e/typer-0.25.1.tar.gz", hash = "sha256:9616eb8853a09ffeabab1698952f33c6f29ffdbceb4eaeecf571880e8d7664cc", size = 122276, upload-time = "2026-04-30T19:32:16.964Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9a/72/193d4e586ec5a4db834a36bbeb47641a62f951f114ffd0fe5b1b46e8d56f/typer-0.25.0-py3-none-any.whl", hash = "sha256:ac01b48823d3db9a83c9e164338057eadbb1c9957a2a6b4eeb486669c560b5dc", size = 55993, upload-time = "2026-04-26T08:46:15.889Z" }, + { url = "https://files.pythonhosted.org/packages/3f/f9/2b3ff4e56e5fa7debfaf9eb135d0da96f3e9a1d5b27222223c7296336e5f/typer-0.25.1-py3-none-any.whl", hash = "sha256:75caa44ed46a03fb2dab8808753ffacdbfea88495e74c85a28c5eefcf5f39c89", size = 58409, upload-time = "2026-04-30T19:32:18.271Z" }, ] [[package]] @@ -3735,19 +3736,46 @@ wheels = [ name = "zarr" version = "3.1.6" source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.12' and sys_platform == 'win32'", + "python_full_version < '3.12' and sys_platform == 'emscripten'", + "python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'win32'", +] dependencies = [ - { name = "donfig" }, - { name = "google-crc32c" }, - { name = "numcodecs" }, - { name = "numpy" }, - { name = "packaging" }, - { name = "typing-extensions" }, + { name = "donfig", marker = "python_full_version < '3.12'" }, + { name = "google-crc32c", marker = "python_full_version < '3.12'" }, + { name = "numcodecs", marker = "python_full_version < '3.12'" }, + { name = "numpy", marker = "python_full_version < '3.12'" }, + { name = "packaging", marker = "python_full_version < '3.12'" }, + { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/31/5a/b8a0cf39a14c770c30bd1f2d120c54000c8cd9e84e8e79f38d9a7ce58071/zarr-3.1.6.tar.gz", hash = "sha256:d95e72cbea4b90e9a70679468b8266400331756232576ae2b43400ac5108d0eb", size = 386531, upload-time = "2026-03-23T17:25:18.748Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/de/7c/ba8ca8cbe9dbef8e83a95fc208fed8e6686c98b4719aaa0aa7f3d31fe390/zarr-3.1.6-py3-none-any.whl", hash = "sha256:b5a82c5079d1c3d4ee8f06746fa3b9a98a7d804300fa3f4be154362a33e1207e", size = 295655, upload-time = "2026-03-23T17:25:17.189Z" }, ] +[[package]] +name = "zarr" +version = "3.2.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12' and sys_platform == 'win32'", + "python_full_version >= '3.12' and sys_platform == 'emscripten'", + "python_full_version >= '3.12' and sys_platform != 'emscripten' and sys_platform != 'win32'", +] +dependencies = [ + { name = "donfig", marker = "python_full_version >= '3.12'" }, + { name = "google-crc32c", marker = "python_full_version >= '3.12'" }, + { name = "numcodecs", marker = "python_full_version >= '3.12'" }, + { name = "numpy", marker = "python_full_version >= '3.12'" }, + { name = "packaging", marker = "python_full_version >= '3.12'" }, + { name = "typing-extensions", marker = "python_full_version >= '3.12'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/db/27/8f391a4304f503ab6f4df6e1724380ea2e35e78a5d1ba973ba2b1347df5b/zarr-3.2.0.tar.gz", hash = "sha256:5867fa8dd7910541075531368c8eaa6f35957ab5413c68c168830e83948665ed", size = 454948, upload-time = "2026-04-30T22:18:03.074Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/9e/2e99d08824f300046eba83b480d6be17f771f57eed80dd7c162381cbe4de/zarr-3.2.0-py3-none-any.whl", hash = "sha256:c693bd4ae24328f242e47e9e1ced221e919d9f62cad71030fd059e398320e555", size = 318784, upload-time = "2026-04-30T22:18:01.13Z" }, +] + [[package]] name = "zipp" version = "3.23.1" From b4db6b71b633aaa12d80cbbd9f33c903817120dc Mon Sep 17 00:00:00 2001 From: Fabien Casenave Date: Fri, 1 May 2026 07:50:52 +0000 Subject: [PATCH 16/17] wip --- docs/source/core_concepts/viewer.md | 21 +++++-- src/plaid/viewer/cache.py | 55 +++++++------------ src/plaid/viewer/cli.py | 12 +--- src/plaid/viewer/config.py | 4 -- .../services/paraview_artifact_service.py | 32 ++++++++--- tests/viewer/test_cache.py | 10 ---- tests/viewer/test_cli.py | 11 +--- .../viewer/test_paraview_artifact_service.py | 20 +++++++ 8 files changed, 82 insertions(+), 83 deletions(-) diff --git a/docs/source/core_concepts/viewer.md b/docs/source/core_concepts/viewer.md index b89508cc..56da518c 100644 --- a/docs/source/core_concepts/viewer.md +++ b/docs/source/core_concepts/viewer.md @@ -40,7 +40,6 @@ Useful options: | Option | Default | Description | | ----------------- | ----------- | ------------------------------------------------------------------------------------------------ | | `--datasets-root` | *required* | Directory containing one sub-directory per PLAID dataset. A single-dataset directory also works. | -| `--cache-dir` | `None` | Persistent artifact cache. When omitted, an ephemeral temp dir is used and cleaned at shutdown. | | `--host` | `127.0.0.1` | Bind address for the trame HTTP server. | | `--port` | `8080` | Port exposed by the trame HTTP server. | | `--backend-id` | `disk` | PLAID backend identifier embedded in sample references and the cache key. | @@ -145,7 +144,9 @@ error. ## Cache layout -Artifacts are written under: +Artifacts are written under an **ephemeral** per-process temp directory +created by `plaid.viewer.cache.CacheRoot` (named +`plaid-viewer-{pid}-{token}` under `tempfile.gettempdir()`): ``` /datasets///// @@ -155,10 +156,18 @@ Artifacts are written under: metadata.json # cache key, sample ref, export version, ... ``` +The cache holds **at most one artifact at a time**: once VTK has loaded +a sample's CGNS into memory the on-disk copy is no longer needed, so +the next `ensure_artifact` call removes the previous folder before +writing the new one. + +The whole cache root is deleted at shutdown through four complementary +layers: `atexit`, `SIGINT` / `SIGTERM` handlers, the `with CacheRoot()` +context manager used by the CLI, and an orphan sweep at startup that +removes directories left behind by previously-crashed processes. + The cache key is a SHA-256 of the sample reference, backend id, PLAID -version and `ViewerConfig.export_version`. Re-running the viewer with -the same inputs reuses existing artifacts; bumping `export_version` -invalidates them. +version and `ViewerConfig.export_version`. ## Programmatic usage @@ -170,7 +179,7 @@ from plaid.viewer.services import ParaviewArtifactService, PlaidDatasetService from plaid.viewer.trame_app.server import build_server config = ViewerConfig(datasets_root=Path("/path/to/datasets")) -with CacheRoot(persistent_dir=config.cache_dir) as cache: +with CacheRoot() as cache: datasets = PlaidDatasetService(config) artifacts = ParaviewArtifactService(datasets, cache.path) server = build_server(datasets, artifacts) diff --git a/src/plaid/viewer/cache.py b/src/plaid/viewer/cache.py index 88b0aacb..451988b6 100644 --- a/src/plaid/viewer/cache.py +++ b/src/plaid/viewer/cache.py @@ -1,11 +1,11 @@ -"""Ephemeral-by-default artifact cache for the dataset viewer. +"""Ephemeral artifact cache for the dataset viewer. -The cache lives under a per-process temporary directory by default and is -removed at shutdown. Four cleanup layers cover all practical failure modes: +The cache lives under a per-process temporary directory and is removed at +shutdown. Four cleanup layers cover all practical failure modes: 1. ``atexit.register`` for normal Python exit. 2. Signal handlers for ``SIGINT`` / ``SIGTERM``. -3. A FastAPI lifespan context (provided by callers). +3. A context manager (``with CacheRoot() as cache:`` in the CLI). 4. An orphan sweep at startup that removes directories left behind by previously-crashed processes. """ @@ -105,39 +105,28 @@ def sweep_orphans(temp_root: Path | None = None) -> list[Path]: class CacheRoot: - """Context-manager-friendly artifact cache directory. + """Context-manager-friendly ephemeral artifact cache directory. - When ``persistent_dir`` is ``None`` (the default), a new ephemeral tempdir - named ``plaid-viewer-{pid}-{token}`` is created. The directory is - removed at process exit (``atexit``), on ``SIGINT`` / ``SIGTERM``, and - when the context manager is closed. - - When ``persistent_dir`` is provided, that directory is used as-is and is - **not** removed. Callers wanting persistence pass this. + Creates a new tempdir named ``plaid-viewer-{pid}-{token}`` under the OS + temp root. The directory is removed at process exit (``atexit``), on + ``SIGINT`` / ``SIGTERM``, and when the context manager is closed. """ def __init__( self, - persistent_dir: Path | None = None, *, install_signal_handlers: bool = True, run_orphan_sweep: bool = True, ) -> None: - self._ephemeral = persistent_dir is None - if self._ephemeral: - if run_orphan_sweep: - sweep_orphans() - token = uuid.uuid4().hex[:12] - base = Path(tempfile.gettempdir()) - self._path = base / f"{_EPHEMERAL_PREFIX}{os.getpid()}-{token}" - self._path.mkdir(parents=True, exist_ok=False) - atexit.register(self._safe_cleanup) - if install_signal_handlers: - self._install_signal_handlers() - else: - assert persistent_dir is not None - self._path = Path(persistent_dir) - self._path.mkdir(parents=True, exist_ok=True) + if run_orphan_sweep: + sweep_orphans() + token = uuid.uuid4().hex[:12] + base = Path(tempfile.gettempdir()) + self._path = base / f"{_EPHEMERAL_PREFIX}{os.getpid()}-{token}" + self._path.mkdir(parents=True, exist_ok=False) + atexit.register(self._safe_cleanup) + if install_signal_handlers: + self._install_signal_handlers() self._closed = False # ------------------------------------------------------------------ API @@ -147,18 +136,12 @@ def path(self) -> Path: """Root directory of the cache.""" return self._path - @property - def is_ephemeral(self) -> bool: - """Whether the cache directory is automatically cleaned up.""" - return self._ephemeral - def close(self) -> None: - """Remove the cache directory if it is ephemeral.""" + """Remove the cache directory.""" if self._closed: return self._closed = True - if self._ephemeral: - self._safe_cleanup() + self._safe_cleanup() def __enter__(self) -> "CacheRoot": # noqa: D105 return self diff --git a/src/plaid/viewer/cli.py b/src/plaid/viewer/cli.py index 15ce4b65..90e94636 100644 --- a/src/plaid/viewer/cli.py +++ b/src/plaid/viewer/cli.py @@ -71,15 +71,6 @@ def _build_parser() -> argparse.ArgumentParser: ), ) - parser.add_argument( - "--cache-dir", - type=Path, - default=None, - help=( - "Persistent artifact cache directory. When omitted, an ephemeral " - "per-process temp directory is used and cleaned up at shutdown." - ), - ) parser.add_argument("--host", default="127.0.0.1", help="Trame server host.") parser.add_argument("--port", type=int, default=8080, help="Trame server port.") parser.add_argument( @@ -141,7 +132,6 @@ def main(argv: list[str] | None = None) -> int: browse_roots = tuple(args.browse_roots) if args.browse_roots else () config = ViewerConfig( datasets_root=effective_datasets_root, - cache_dir=args.cache_dir, backend_id=args.backend_id, browse_roots=browse_roots, allow_root_change=not args.disable_root_change, @@ -149,7 +139,7 @@ def main(argv: list[str] | None = None) -> int: allow_dataset_change=not args.disable_dataset_change, ) - with CacheRoot(persistent_dir=config.cache_dir) as cache: + with CacheRoot() as cache: dataset_service = PlaidDatasetService(config) for repo_id in args.hub_repo or []: try: diff --git a/src/plaid/viewer/config.py b/src/plaid/viewer/config.py index 4e93c866..0c075d69 100644 --- a/src/plaid/viewer/config.py +++ b/src/plaid/viewer/config.py @@ -16,9 +16,6 @@ class ViewerConfig: (or the root may itself be such a folder). When ``None``, the viewer starts without a root and the user is expected to pick one interactively (when ``allow_root_change`` is True). - cache_dir: Root directory for ParaView artifacts. When ``None``, an - ephemeral per-process directory is created under the OS temp root - and cleaned up at shutdown. backend_id: PLAID backend identifier embedded in :class:`SampleRef` objects and in the artifact cache key. export_version: Opaque string mixed into the artifact cache key. Bump @@ -41,7 +38,6 @@ class ViewerConfig: """ datasets_root: Path | None = None - cache_dir: Path | None = None backend_id: str = "disk" export_version: str = "1" extra_cache_key_fields: dict[str, str] = field(default_factory=dict) diff --git a/src/plaid/viewer/services/paraview_artifact_service.py b/src/plaid/viewer/services/paraview_artifact_service.py index 3585bb0c..e9e4488e 100644 --- a/src/plaid/viewer/services/paraview_artifact_service.py +++ b/src/plaid/viewer/services/paraview_artifact_service.py @@ -149,29 +149,47 @@ def __init__( self._export_version = export_version self._extra = dict(extra_cache_key_fields or {}) self._by_id: dict[str, ParaviewArtifact] = {} + # Path of the most recently ensured artifact. The cache keeps at most + # one artifact on disk at any time: once VTK has read the CGNS file + # into memory (``vtkCGNSReader.Update()`` in the trame pipeline), the + # on-disk copy is no longer needed, so we delete it as soon as the + # user asks for another sample. + self._current_root: Path | None = None # ------------------------------------------------------------ Public API def ensure_artifact( self, ref: SampleRef, *, force: bool = False ) -> ParaviewArtifact: - """Return a :class:`ParaviewArtifact` for ``ref``, creating it if needed.""" + """Return a :class:`ParaviewArtifact` for ``ref``, creating it if needed. + + The cache holds at most one artifact: any previously-ensured artifact + whose layout root differs from ``ref``'s is removed from disk. + """ cache_key = _build_cache_key( ref, export_version=self._export_version, extra=self._extra ) layout = _artifact_layout(self._cache_root, ref, cache_key) + # Evict the previous artifact (if any) as soon as the user requests + # a different one. ``force`` always rebuilds the current one. + if ( + self._current_root is not None + and self._current_root != layout.root + and self._current_root.exists() + ): + shutil.rmtree(self._current_root, ignore_errors=True) + self._by_id.clear() if force and layout.root.exists(): shutil.rmtree(layout.root) if layout.metadata_path.is_file() and not force: artifact = self._load_existing(layout, cache_key) - self._by_id[artifact.artifact_id] = artifact - return artifact - - layout.root.mkdir(parents=True, exist_ok=True) - artifact = self._create(ref, layout, cache_key) - self._by_id[artifact.artifact_id] = artifact + else: + layout.root.mkdir(parents=True, exist_ok=True) + artifact = self._create(ref, layout, cache_key) + self._by_id = {artifact.artifact_id: artifact} + self._current_root = layout.root return artifact def get(self, artifact_id: str) -> ParaviewArtifact: diff --git a/tests/viewer/test_cache.py b/tests/viewer/test_cache.py index 70409933..4bd4a9ac 100644 --- a/tests/viewer/test_cache.py +++ b/tests/viewer/test_cache.py @@ -16,20 +16,10 @@ def test_ephemeral_cache_is_cleaned_up_on_close(tmp_path: Path, monkeypatch) -> cache = CacheRoot(install_signal_handlers=False, run_orphan_sweep=False) path = cache.path assert path.exists() - assert cache.is_ephemeral is True cache.close() assert not path.exists() -def test_persistent_cache_is_preserved(tmp_path: Path) -> None: - target = tmp_path / "persistent" - cache = CacheRoot(persistent_dir=target, install_signal_handlers=False) - assert cache.path == target - assert cache.is_ephemeral is False - cache.close() - assert target.exists() - - def test_context_manager_removes_ephemeral_dir(tmp_path: Path, monkeypatch) -> None: monkeypatch.setenv("TMPDIR", str(tmp_path)) with CacheRoot(install_signal_handlers=False, run_orphan_sweep=False) as cache: diff --git a/tests/viewer/test_cli.py b/tests/viewer/test_cli.py index 8817804f..ef010d1b 100644 --- a/tests/viewer/test_cli.py +++ b/tests/viewer/test_cli.py @@ -18,7 +18,6 @@ def test_build_parser_defaults() -> None: assert args.disable_root_change is False assert args.dataset_id is None assert args.disable_dataset_change is False - assert args.cache_dir is None assert args.host == "127.0.0.1" assert args.port == 8080 assert args.backend_id == "disk" @@ -27,7 +26,6 @@ def test_build_parser_defaults() -> None: def test_build_parser_accepts_all_options(tmp_path: Path) -> None: datasets_root = tmp_path / "datasets" - cache_dir = tmp_path / "cache" browse_a = tmp_path / "a" browse_b = tmp_path / "b" @@ -42,8 +40,6 @@ def test_build_parser_accepts_all_options(tmp_path: Path) -> None: "--dataset-id", "dataset-b", "--disable-dataset-change", - "--cache-dir", - str(cache_dir), "--host", "0.0.0.0", "--port", @@ -62,7 +58,6 @@ def test_build_parser_accepts_all_options(tmp_path: Path) -> None: assert args.disable_root_change is True assert args.dataset_id == "dataset-b" assert args.disable_dataset_change is True - assert args.cache_dir == cache_dir assert args.host == "0.0.0.0" assert args.port == 9000 assert args.backend_id == "zarr" @@ -75,8 +70,8 @@ def test_main_wires_services_without_starting_real_runtime( calls: list[tuple[str, object]] = [] class FakeCache: - def __init__(self, persistent_dir=None): - calls.append(("cache", persistent_dir)) + def __init__(self): + calls.append(("cache", None)) self.path = tmp_path / "cache-root" def __enter__(self): @@ -132,8 +127,6 @@ def fake_import(name, globals=None, locals=None, fromlist=(), level=0): # noqa: assert ( cli_mod.main( [ - "--cache-dir", - str(tmp_path / "cache"), "--host", "0.0.0.0", "--port", diff --git a/tests/viewer/test_paraview_artifact_service.py b/tests/viewer/test_paraview_artifact_service.py index 7f54da4d..013d9620 100644 --- a/tests/viewer/test_paraview_artifact_service.py +++ b/tests/viewer/test_paraview_artifact_service.py @@ -102,6 +102,26 @@ def test_force_recreates_artifact(tmp_path: Path, ref: SampleRef) -> None: assert second.artifact_id == first.artifact_id # cache key is deterministic +def test_ensure_artifact_evicts_previous_artifact(tmp_path: Path) -> None: + """The cache keeps at most one artifact on disk.""" + service = ParaviewArtifactService(_FakeDatasetService(), tmp_path) + ref_a = SampleRef(backend_id="disk", dataset_id="ds", split="train", sample_id="0") + ref_b = SampleRef(backend_id="disk", dataset_id="ds", split="train", sample_id="1") + + first = service.ensure_artifact(ref_a) + first_root = first.cgns_path.parent + assert first_root.exists() + + second = service.ensure_artifact(ref_b) + second_root = second.cgns_path.parent + assert second_root.exists() + assert not first_root.exists() + # The by-id lookup only exposes the current artifact. + with pytest.raises(KeyError): + service.get(first.artifact_id) + assert service.get(second.artifact_id) is second + + def test_cache_key_is_deterministic(ref: SampleRef) -> None: key_a = _build_cache_key(ref, export_version="1") key_b = _build_cache_key(ref, export_version="1") From d4dc11a12e506dc7362fd987f7498cb5933fe8f2 Mon Sep 17 00:00:00 2001 From: Fabien Casenave Date: Fri, 1 May 2026 14:39:40 +0200 Subject: [PATCH 17/17] wip --- src/plaid/viewer/trame_app/server.py | 44 ++++++++++++++++++++++++++++ tests/viewer/test_trame_helpers.py | 28 ++++++++++++++++++ 2 files changed, 72 insertions(+) diff --git a/src/plaid/viewer/trame_app/server.py b/src/plaid/viewer/trame_app/server.py index af3eb75c..7f39c910 100644 --- a/src/plaid/viewer/trame_app/server.py +++ b/src/plaid/viewer/trame_app/server.py @@ -378,6 +378,20 @@ def _visit(obj): return lo, hi +def _show_scalar_bar_for_field( + scalar_bar, lut, field_name: str, association: str +) -> None: + """Display ``scalar_bar`` as the legend for the active coloured field.""" + scalar_bar.SetLookupTable(lut) + scalar_bar.SetTitle(f"{field_name} ({association})") + scalar_bar.SetVisibility(True) + + +def _hide_scalar_bar(scalar_bar) -> None: + """Hide ``scalar_bar`` when no scalar field is currently coloured.""" + scalar_bar.SetVisibility(False) + + # --------------------------------------------------------------------------- # Pipeline # --------------------------------------------------------------------------- @@ -411,6 +425,17 @@ def __init__(self) -> None: self.interactor.Initialize() self._interactor_style = interactor_style # keep a reference alive + # ParaView-like orientation marker anchored in the bottom-left corner. + # The widget is attached to the server-side interactor so it is rendered + # directly into the frames streamed by ``VtkRemoteView``. + self.axes_actor = vtk.vtkAxesActor() + self.orientation_marker = vtk.vtkOrientationMarkerWidget() + self.orientation_marker.SetOrientationMarker(self.axes_actor) + self.orientation_marker.SetInteractor(self.interactor) + self.orientation_marker.SetViewport(0.0, 0.0, 0.18, 0.18) + self.orientation_marker.SetEnabled(1) + self.orientation_marker.InteractiveOff() + self.reader = None self.actor = vtk.vtkActor() # Gouraud shading (per-vertex normals interpolated across the @@ -427,6 +452,19 @@ def __init__(self) -> None: self.lut.SetHueRange(0.667, 0.0) # blue -> red self.lut.Build() + # Colour legend for the selected point/cell field. It stays hidden + # until scalar colouring is enabled by the field dropdown. + self.scalar_bar = vtk.vtkScalarBarActor() + self.scalar_bar.SetLookupTable(self.lut) + self.scalar_bar.SetNumberOfLabels(5) + self.scalar_bar.SetPosition(0.88, 0.08) + self.scalar_bar.SetWidth(0.1) + self.scalar_bar.SetHeight(0.35) + self.scalar_bar.GetTitleTextProperty().SetColor(1.0, 1.0, 1.0) + self.scalar_bar.GetLabelTextProperty().SetColor(1.0, 1.0, 1.0) + _hide_scalar_bar(self.scalar_bar) + self.renderer.AddActor2D(self.scalar_bar) + self._current_dataset = None def load(self, cgns_path: Path) -> None: @@ -468,8 +506,10 @@ def update( self.lut = _build_lut(cmap, lo, hi) self.mapper.SetLookupTable(self.lut) self.mapper.SetScalarRange(lo, hi) + _show_scalar_bar_for_field(self.scalar_bar, self.lut, field, association) else: self.mapper.ScalarVisibilityOff() + _hide_scalar_bar(self.scalar_bar) self.actor.GetProperty().SetEdgeVisibility(bool(show_edges)) self.actor.GetProperty().SetLineWidth(1.0) @@ -737,6 +777,7 @@ def _clear_scene(status: str | None = None) -> None: pipeline.reader = None pipeline.mapper.RemoveAllInputConnections(0) pipeline.mapper.ScalarVisibilityOff() + _hide_scalar_bar(pipeline.scalar_bar) state.base_options = [] state.active_base = None state.field_options = [] @@ -807,6 +848,7 @@ def _refresh_samples() -> None: pipeline.reader = None pipeline.mapper.RemoveAllInputConnections(0) pipeline.mapper.ScalarVisibilityOff() + _hide_scalar_bar(pipeline.scalar_bar) state.base_options = [] state.active_base = None state.field_options = [] @@ -934,6 +976,8 @@ def _refresh_sample_view_impl() -> None: with _silence_stderr(): artifact = artifact_service.ensure_artifact(ref, force=force) pipeline.load(artifact.cgns_path) + if pipeline.reader is None: + raise RuntimeError("VTK reader was not initialised") # Disable zone-less bases *before* the reader's first Update() # so ``vtkCGNSReader`` does not log ``No zones in base ...`` # warnings for auxiliary bases like ``Global``. diff --git a/tests/viewer/test_trame_helpers.py b/tests/viewer/test_trame_helpers.py index 5393cb5d..06f2b745 100644 --- a/tests/viewer/test_trame_helpers.py +++ b/tests/viewer/test_trame_helpers.py @@ -172,6 +172,34 @@ def GetCellData(self): # noqa: N802 assert srv._compute_field_range(_Blocks([NoData()]), "p", "point") == (0.0, 1.0) +def test_scalar_bar_helpers() -> None: + class ScalarBar: + def __init__(self) -> None: + self.lut = None + self.title = None + self.visible = None + + def SetLookupTable(self, lut) -> None: # noqa: N802, ANN001 + self.lut = lut + + def SetTitle(self, title: str) -> None: # noqa: N802 + self.title = title + + def SetVisibility(self, visible: bool) -> None: # noqa: N802, FBT001 + self.visible = visible + + scalar_bar = ScalarBar() + lut = object() + + srv._show_scalar_bar_for_field(scalar_bar, lut, "pressure", "point") + assert scalar_bar.lut is lut + assert scalar_bar.title == "pressure (point)" + assert scalar_bar.visible is True + + srv._hide_scalar_bar(scalar_bar) + assert scalar_bar.visible is False + + def test_load_reader_plain_and_build_lut( monkeypatch: pytest.MonkeyPatch, tmp_path: Path ) -> None: