From 7842fb57204e7b1c284118c207fdd125b0f66807 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Apr 2026 23:55:45 +0000 Subject: [PATCH 01/39] build(deps): bump pillow from 12.1.1 to 12.2.0 (#650) Bumps [pillow](https://github.com/python-pillow/Pillow) from 12.1.1 to 12.2.0. - [Release notes](https://github.com/python-pillow/Pillow/releases) - [Changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst) - [Commits](https://github.com/python-pillow/Pillow/compare/12.1.1...12.2.0) --- updated-dependencies: - dependency-name: pillow dependency-version: 12.2.0 dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- requirements.txt | 184 +++++++++++++++++++++++------------------------ uv.lock | 110 ++++++++++++++-------------- 3 files changed, 148 insertions(+), 148 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a999e575..c1383d0e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -59,7 +59,7 @@ dependencies = [ "pandas-stubs~=2.3.2", "pg8000==1.31.5", "phonenumbers==9.0.26", - "pillow==12.1.1", + "pillow==12.2.0", "pluggy==1.6.0", "pre-commit==4.5.1", "propcache==0.4.1", diff --git a/requirements.txt b/requirements.txt index 1bd3bb28..e368041c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1300,98 +1300,98 @@ phonenumbers==9.0.26 \ --hash=sha256:9e582c827f0f5503cddeebef80099475a52ffa761551d8384099c7ec71298cbf \ --hash=sha256:ff473da5712965b6c7f7a31cbff8255864df694eb48243771133ecb761e807c1 # via ocotilloapi -pillow==12.1.1 \ - --hash=sha256:02f84dfad02693676692746df05b89cf25597560db2857363a208e393429f5e9 \ - --hash=sha256:0330d233c1a0ead844fc097a7d16c0abff4c12e856c0b325f231820fee1f39da \ - --hash=sha256:03edcc34d688572014ff223c125a3f77fb08091e4607e7745002fc214070b35f \ - --hash=sha256:097690ba1f2efdeb165a20469d59d8bb03c55fb6621eb2041a060ae8ea3e9642 \ - --hash=sha256:178aa072084bd88ec759052feca8e56cbb14a60b39322b99a049e58090479713 \ - --hash=sha256:18e5bddd742a44b7e6b1e773ab5db102bd7a94c32555ba656e76d319d19c3850 \ - --hash=sha256:1a9b0ee305220b392e1124a764ee4265bd063e54a751a6b62eff69992f457fa9 \ - --hash=sha256:1f1625b72740fdda5d77b4def688eb8fd6490975d06b909fd19f13f391e077e0 \ - --hash=sha256:1f1be78ce9466a7ee64bfda57bdba0f7cc499d9794d518b854816c41bf0aa4e9 \ - --hash=sha256:1f90cff8aa76835cba5769f0b3121a22bd4eb9e6884cfe338216e557a9a548b8 \ - --hash=sha256:21329ec8c96c6e979cd0dfd29406c40c1d52521a90544463057d2aaa937d66a6 \ - --hash=sha256:2815a87ab27848db0321fb78c7f0b2c8649dee134b7f2b80c6a45c6831d75ccd \ - --hash=sha256:2c1fc0f2ca5f96a3c8407e41cca26a16e46b21060fe6d5b099d2cb01412222f5 \ - --hash=sha256:2e0c664be47252947d870ac0d327fea7e63985a08794758aa8af5b6cb6ec0c9c \ - --hash=sha256:339ffdcb7cbeaa08221cd401d517d4b1fe7a9ed5d400e4a8039719238620ca35 \ - --hash=sha256:344cf1e3dab3be4b1fa08e449323d98a2a3f819ad20f4b22e77a0ede31f0faa1 \ - --hash=sha256:36341d06738a9f66c8287cf8b876d24b18db9bd8740fa0672c74e259ad408cff \ - --hash=sha256:365b10bb9417dd4498c0e3b128018c4a624dc11c7b97d8cc54effe3b096f4c38 \ - --hash=sha256:3a5cbdcddad0af3da87cb16b60d23648bc3b51967eb07223e9fed77a82b457c4 \ - --hash=sha256:417423db963cb4be8bac3fc1204fe61610f6abeed1580a7a2cbb2fbda20f12af \ - --hash=sha256:42fc1f4677106188ad9a55562bbade416f8b55456f522430fadab3cef7cd4e60 \ - --hash=sha256:44ce27545b6efcf0fdbdceb31c9a5bdea9333e664cda58a7e674bb74608b3986 \ - --hash=sha256:472a8d7ded663e6162dafdf20015c486a7009483ca671cece7a9279b512fcb13 \ - --hash=sha256:47b94983da0c642de92ced1702c5b6c292a84bd3a8e1d1702ff923f183594717 \ - --hash=sha256:495c302af3aad1ca67420ddd5c7bd480c8867ad173528767d906428057a11f0e \ - --hash=sha256:4ceb838d4bd9dab43e06c363cab2eebf63846d6a4aeaea283bbdfd8f1a8ed58b \ - --hash=sha256:50480dcd74fa63b8e78235957d302d98d98d82ccbfac4c7e12108ba9ecbdba15 \ - --hash=sha256:518a48c2aab7ce596d3bf79d0e275661b846e86e4d0e7dec34712c30fe07f02a \ - --hash=sha256:559b38da23606e68681337ad74622c4dbba02254fc9cb4488a305dd5975c7eeb \ - --hash=sha256:578510d88c6229d735855e1f278aa305270438d36a05031dfaae5067cc8eb04d \ - --hash=sha256:597bd9c8419bc7c6af5604e55847789b69123bbe25d65cc6ad3012b4f3c98d8b \ - --hash=sha256:5a8eb7ed8d4198bccbd07058416eeec51686b498e784eda166395a23eb99138e \ - --hash=sha256:5c0dd1636633e7e6a0afe7bf6a51a14992b7f8e60de5789018ebbdfae55b040a \ - --hash=sha256:5cb1785d97b0c3d1d1a16bc1d710c4a0049daefc4935f3a8f31f827f4d3d2e7f \ - --hash=sha256:5d1f9575a12bed9e9eedd9a4972834b08c97a352bd17955ccdebfeca5913fa0a \ - --hash=sha256:5d8c41325b382c07799a3682c1c258469ea2ff97103c53717b7893862d0c98ce \ - --hash=sha256:5dae5f21afb91322f2ff791895ddd8889e5e947ff59f71b46041c8ce6db790bc \ - --hash=sha256:600fd103672b925fe62ed08e0d874ea34d692474df6f4bf7ebe148b30f89f39f \ - --hash=sha256:6408a7b064595afcab0a49393a413732a35788f2a5092fdc6266952ed67de586 \ - --hash=sha256:652a2c9ccfb556235b2b501a3a7cf3742148cd22e04b5625c5fe057ea3e3191f \ - --hash=sha256:665e1b916b043cef294bc54d47bf02d87e13f769bc4bc5fa225a24b3a6c5aca9 \ - --hash=sha256:691ab2ac363b8217f7d31b3497108fb1f50faab2f75dfb03284ec2f217e87bf8 \ - --hash=sha256:6c52f062424c523d6c4db85518774cc3d50f5539dd6eed32b8f6229b26f24d40 \ - --hash=sha256:6c6db3b84c87d48d0088943bf33440e0c42370b99b1c2a7989216f7b42eede60 \ - --hash=sha256:7311c0a0dcadb89b36b7025dfd8326ecfa36964e29913074d47382706e516a7c \ - --hash=sha256:7aac39bcf8d4770d089588a2e1dd111cbaa42df5a94be3114222057d68336bd0 \ - --hash=sha256:7b03048319bfc6170e93bd60728a1af51d3dd7704935feb228c4d4faab35d334 \ - --hash=sha256:7e7976bf1910a8116b523b9f9f58bf410f3e8aa330cd9a2bb2953f9266ab49af \ - --hash=sha256:8089c852a56c2966cf18835db62d9b34fef7ba74c726ad943928d494fa7f4735 \ - --hash=sha256:86172b0831b82ce4f7877f280055892b31179e1576aa00d0df3bb1bbf8c3e524 \ - --hash=sha256:89b54027a766529136a06cfebeecb3a04900397a3590fd252160b888479517bf \ - --hash=sha256:89c7e895002bbe49cdc5426150377cbbc04767d7547ed145473f496dfa40408b \ - --hash=sha256:8b7e5304e34942bf62e15184219a7b5ad4ff7f3bb5cca4d984f37df1a0e1aee2 \ - --hash=sha256:8fd420ef0c52c88b5a035a0886f367748c72147b2b8f384c9d12656678dfdfa9 \ - --hash=sha256:98edb152429ab62a1818039744d8fbb3ccab98a7c29fc3d5fcef158f3f1f68b7 \ - --hash=sha256:99c1506ea77c11531d75e3a412832a13a71c7ebc8192ab9e4b2e355555920e3e \ - --hash=sha256:9ad8fa5937ab05218e2b6a4cff30295ad35afd2f83ac592e68c0d871bb0fdbc4 \ - --hash=sha256:9f51079765661884a486727f0729d29054242f74b46186026582b4e4769918e4 \ - --hash=sha256:a003d7422449f6d1e3a34e3dd4110c22148336918ddbfc6a32581cd54b2e0b2b \ - --hash=sha256:a0b1cd6232e2b618adcc54d9882e4e662a089d5768cd188f7c245b4c8c44a397 \ - --hash=sha256:a285e3eb7a5a45a2ff504e31f4a8d1b12ef62e84e5411c6804a42197c1cf586c \ - --hash=sha256:a37691702ed687799de29a518d63d4682d9016932db66d4e90c345831b02fb4e \ - --hash=sha256:a550ae29b95c6dc13cf69e2c9dc5747f814c54eeb2e32d683e5e93af56caa029 \ - --hash=sha256:ab174cd7d29a62dd139c44bf74b698039328f45cb03b4596c43473a46656b2f3 \ - --hash=sha256:ab323b787d6e18b3d91a72fc99b1a2c28651e4358749842b8f8dfacd28ef2052 \ - --hash=sha256:adebb5bee0f0af4909c30db0d890c773d1a92ffe83da908e2e9e720f8edf3984 \ - --hash=sha256:aee2810642b2898bb187ced9b349e95d2a7272930796e022efaf12e99dccd293 \ - --hash=sha256:af9a332e572978f0218686636610555ae3defd1633597be015ed50289a03c523 \ - --hash=sha256:b574c51cf7d5d62e9be37ba446224b59a2da26dc4c1bb2ecbe936a4fb1a7cb7f \ - --hash=sha256:b66e95d05ba806247aaa1561f080abc7975daf715c30780ff92a20e4ec546e1b \ - --hash=sha256:b81b5e3511211631b3f672a595e3221252c90af017e399056d0faabb9538aa80 \ - --hash=sha256:b957b71c6b2387610f556a7eb0828afbe40b4a98036fc0d2acfa5a44a0c2036f \ - --hash=sha256:bb66b7cc26f50977108790e2456b7921e773f23db5630261102233eb355a3b79 \ - --hash=sha256:c6008de247150668a705a6338156efb92334113421ceecf7438a12c9a12dab23 \ - --hash=sha256:c7697918b5be27424e9ce568193efd13d925c4481dd364e43f5dff72d33e10f8 \ - --hash=sha256:cb9bb857b2d057c6dfc72ac5f3b44836924ba15721882ef103cecb40d002d80e \ - --hash=sha256:cc7d296b5ea4d29e6570dabeaed58d31c3fea35a633a69679fb03d7664f43fb3 \ - --hash=sha256:d242e8ac078781f1de88bf823d70c1a9b3c7950a44cdf4b7c012e22ccbcd8e4e \ - --hash=sha256:d2912fd8114fc5545aa3a4b5576512f64c55a03f3ebcca4c10194d593d43ea36 \ - --hash=sha256:d470ab1178551dd17fdba0fef463359c41aaa613cdcd7ff8373f54be629f9f8f \ - --hash=sha256:d4ce8e329c93845720cd2014659ca67eac35f6433fd3050393d85f3ecef0dad5 \ - --hash=sha256:d6e4571eedf43af33d0fc233a382a76e849badbccdf1ac438841308652a08e1f \ - --hash=sha256:e65498daf4b583091ccbb2556c7000abf0f3349fcd57ef7adc9a84a394ed29f6 \ - --hash=sha256:e879bb6cd5c73848ef3b2b48b8af9ff08c5b71ecda8048b7dd22d8a33f60be32 \ - --hash=sha256:e9e8064fb1cc019296958595f6db671fba95209e3ceb0c4734c9baf97de04b20 \ - --hash=sha256:f7ed2c6543bad5a7d5530eb9e78c53132f93dfa44a28492db88b41cdab885202 \ - --hash=sha256:f95c00d5d6700b2b890479664a06e754974848afaae5e21beb4d83c106923fd0 \ - --hash=sha256:f975aa7ef9684ce7e2c18a3aa8f8e2106ce1e46b94ab713d156b2898811651d3 \ - --hash=sha256:fbfa2a7c10cc2623f412753cddf391c7f971c52ca40a3f65dc5039b2939e8563 \ - --hash=sha256:fc354a04072b765eccf2204f588a7a532c9511e8b9c7f900e1b64e3e33487090 \ - --hash=sha256:fc44ef1f3de4f45b50ccf9136999d71abb99dca7706bc75d222ed350b9fd2289 +pillow==12.2.0 \ + --hash=sha256:00a2865911330191c0b818c59103b58a5e697cae67042366970a6b6f1b20b7f9 \ + --hash=sha256:01afa7cf67f74f09523699b4e88c73fb55c13346d212a59a2db1f86b0a63e8c5 \ + --hash=sha256:03e7e372d5240cc23e9f07deca4d775c0817bffc641b01e9c3af208dbd300987 \ + --hash=sha256:03f6fab9219220f041c74aeaa2939ff0062bd5c364ba9ce037197f4c6d498cd9 \ + --hash=sha256:042db20a421b9bafecc4b84a8b6e444686bd9d836c7fd24542db3e7df7baad9b \ + --hash=sha256:0538bd5e05efec03ae613fd89c4ce0368ecd2ba239cc25b9f9be7ed426b0af1f \ + --hash=sha256:0a34329707af4f73cf1782a36cd2289c0368880654a2c11f027bcee9052d35dd \ + --hash=sha256:0c838a5125cee37e68edec915651521191cef1e6aa336b855f495766e77a366e \ + --hash=sha256:144748b3af2d1b358d41286056d0003f47cb339b8c43a9ea42f5fea4d8c66b6e \ + --hash=sha256:1610dd6c61621ae1cf811bef44d77e149ce3f7b95afe66a4512f8c59f25d9ebe \ + --hash=sha256:1e1757442ed87f4912397c6d35a0db6a7b52592156014706f17658ff58bbf795 \ + --hash=sha256:22db17c68434de69d8ecfc2fe821569195c0c373b25cccb9cbdacf2c6e53c601 \ + --hash=sha256:25373b66e0dd5905ed63fa3cae13c82fbddf3079f2c8bf15c6fb6a35586324c1 \ + --hash=sha256:2bb4a8d594eacdfc59d9e5ad972aa8afdd48d584ffd5f13a937a664c3e7db0ed \ + --hash=sha256:2c727a6d53cb0018aadd8018c2b938376af27914a68a492f59dfcaca650d5eea \ + --hash=sha256:2d192a155bbcec180f8564f693e6fd9bccff5a7af9b32e2e4bf8c9c69dbad6b5 \ + --hash=sha256:2e589959f10d9824d39b350472b92f0ce3b443c0a3442ebf41c40cb8361c5b97 \ + --hash=sha256:2e5a76d03a6c6dcef67edabda7a52494afa4035021a79c8558e14af25313d453 \ + --hash=sha256:325ca0528c6788d2a6c3d40e3568639398137346c3d6e66bb61db96b96511c98 \ + --hash=sha256:34c0d99ecccea270c04882cb3b86e7b57296079c9a4aff88cb3b33563d95afaa \ + --hash=sha256:390ede346628ccc626e5730107cde16c42d3836b89662a115a921f28440e6a3b \ + --hash=sha256:394167b21da716608eac917c60aa9b969421b5dcbbe02ae7f013e7b85811c69d \ + --hash=sha256:3997232e10d2920a68d25191392e3a4487d8183039e1c74c2297f00ed1c50705 \ + --hash=sha256:3adc9215e8be0448ed6e814966ecf3d9952f0ea40eb14e89a102b87f450660d8 \ + --hash=sha256:3e080565d8d7c671db5802eedfb438e5565ffa40115216eabb8cd52d0ecce024 \ + --hash=sha256:4a6c9fa44005fa37a91ebfc95d081e8079757d2e904b27103f4f5fa6f0bf78c0 \ + --hash=sha256:4bfd07bc812fbd20395212969e41931001fd59eb55a60658b0e5710872e95286 \ + --hash=sha256:4e6c62e9d237e9b65fac06857d511e90d8461a32adcc1b9065ea0c0fa3a28150 \ + --hash=sha256:50d8520da2a6ce0af445fa6d648c4273c3eeefbc32d7ce049f22e8b5c3daecc2 \ + --hash=sha256:51c4167c34b0d8ba05b547a3bb23578d0ba17b80a5593f93bd8ecb123dd336a3 \ + --hash=sha256:56a3f9c60a13133a98ecff6197af34d7824de9b7b38c3654861a725c970c197b \ + --hash=sha256:56b25336f502b6ed02e889f4ece894a72612fe885889a6e8c4c80239ff6e5f5f \ + --hash=sha256:57850958fe9c751670e49b2cecf6294acc99e562531f4bd317fa5ddee2068463 \ + --hash=sha256:58f62cc0f00fd29e64b29f4fd923ffdb3859c9f9e6105bfc37ba1d08994e8940 \ + --hash=sha256:5c0a9f29ca8e79f09de89293f82fc9b0270bb4af1d58bc98f540cc4aedf03166 \ + --hash=sha256:5cdfebd752ec52bf5bb4e35d9c64b40826bc5b40a13df7c3cda20a2c03a0f5ed \ + --hash=sha256:5d04bfa02cc2d23b497d1e90a0f927070043f6cbf303e738300532379a4b4e0f \ + --hash=sha256:5d2fd0fa6b5d9d1de415060363433f28da8b1526c1c129020435e186794b3795 \ + --hash=sha256:62f5409336adb0663b7caa0da5c7d9e7bdbaae9ce761d34669420c2a801b2780 \ + --hash=sha256:632ff19b2778e43162304d50da0181ce24ac5bb8180122cbe1bf4673428328c7 \ + --hash=sha256:6562ace0d3fb5f20ed7290f1f929cae41b25ae29528f2af1722966a0a02e2aa1 \ + --hash=sha256:673aa32138f3e7531ccdbca7b3901dba9b70940a19ccecc6a37c77d5fdeb05b5 \ + --hash=sha256:6a6e67ea2e6feda684ed370f9a1c52e7a243631c025ba42149a2cc5934dec295 \ + --hash=sha256:6a9adfc6d24b10f89588096364cc726174118c62130c817c2837c60cf08a392b \ + --hash=sha256:6bb77b2dcb06b20f9f4b4a8454caa581cd4dd0643a08bacf821216a16d9c8354 \ + --hash=sha256:6e6b2a0c538fc200b38ff9eb6628228b77908c319a005815f2dde585a0664b60 \ + --hash=sha256:71cde9a1e1551df7d34a25462fc60325e8a11a82cc2e2f54578e5e9a1e153d65 \ + --hash=sha256:7371b48c4fa448d20d2714c9a1f775a81155050d383333e0a6c15b1123dda005 \ + --hash=sha256:766cef22385fa1091258ad7e6216792b156dc16d8d3fa607e7545b2b72061f1c \ + --hash=sha256:7b14cc0106cd9aecda615dd6903840a058b4700fcb817687d0ee4fc8b6e389be \ + --hash=sha256:7f84204dee22a783350679a0333981df803dac21a0190d706a50475e361c93f5 \ + --hash=sha256:8023abc91fba39036dbce14a7d6535632f99c0b857807cbbbf21ecc9f4717f06 \ + --hash=sha256:80b2da48193b2f33ed0c32c38140f9d3186583ce7d516526d462645fd98660ae \ + --hash=sha256:8297651f5b5679c19968abefd6bb84d95fe30ef712eb1b2d9b2d31ca61267f4c \ + --hash=sha256:88d387ff40b3ff7c274947ed3125dedf5262ec6919d83946753b5f3d7c67ea4c \ + --hash=sha256:88ddbc66737e277852913bd1e07c150cc7bb124539f94c4e2df5344494e0a612 \ + --hash=sha256:8bd7903a5f2a4545f6fd5935c90058b89d30045568985a71c79f5fd6edf9b91e \ + --hash=sha256:8be29e59487a79f173507c30ddf57e733a357f67881430449bb32614075a40ab \ + --hash=sha256:8c984051042858021a54926eb597d6ee3012393ce9c181814115df4c60b9a808 \ + --hash=sha256:8cbeb542b2ebc6fcdacabf8aca8c1a97c9b3ad3927d46b8723f9d4f033288a0f \ + --hash=sha256:8e9c4f5b3c546fa3458a29ab22646c1c6c787ea8f5ef51300e5a60300736905e \ + --hash=sha256:90e6f81de50ad6b534cab6e5aef77ff6e37722b2f5d908686f4a5c9eba17a909 \ + --hash=sha256:975385f4776fafde056abb318f612ef6285b10a1f12b8570f3647ad0d74b48ec \ + --hash=sha256:9a8a34cc89c67a65ea7437ce257cea81a9dad65b29805f3ecee8c8fe8ff25ffe \ + --hash=sha256:9aba9a17b623ef750a4d11b742cbafffeb48a869821252b30ee21b5e91392c50 \ + --hash=sha256:9f08483a632889536b8139663db60f6724bfcb443c96f1b18855860d7d5c0fd4 \ + --hash=sha256:a4e8f36e677d3336f35089648c8955c51c6d386a13cf6ee9c189c5f5bd713a9f \ + --hash=sha256:a52edc8bfff4429aaabdf4d9ee0daadbbf8562364f940937b941f87a4290f5ff \ + --hash=sha256:a830b1a40919539d07806aa58e1b114df53ddd43213d9c8b75847eee6c0182b5 \ + --hash=sha256:aa88ccfe4e32d362816319ed727a004423aab09c5cea43c01a4b435643fa34eb \ + --hash=sha256:af73337013e0b3b46f175e79492d96845b16126ddf79c438d7ea7ff27783a414 \ + --hash=sha256:b1c1fbd8a5a1af3412a0810d060a78b5136ec0836c8a4ef9aa11807f2a22f4e1 \ + --hash=sha256:b85f66ae9eb53e860a873b858b789217ba505e5e405a24b85c0464822fe88032 \ + --hash=sha256:b86024e52a1b269467a802258c25521e6d742349d760728092e1bc2d135b4d76 \ + --hash=sha256:bd9c0c7a0c681a347b3194c500cb1e6ca9cab053ea4d82a5cf45b6b754560136 \ + --hash=sha256:bfa9c230d2fe991bed5318a5f119bd6780cda2915cca595393649fc118ab895e \ + --hash=sha256:d362d1878f00c142b7e1a16e6e5e780f02be8195123f164edf7eddd911eefe7c \ + --hash=sha256:d5d38f1411c0ed9f97bcb49b7bd59b6b7c314e0e27420e34d99d844b9ce3b6f3 \ + --hash=sha256:dac8d77255a37e81a2efcbd1fc05f1c15ee82200e6c240d7e127e25e365c39ea \ + --hash=sha256:dd025009355c926a84a612fecf58bb315a3f6814b17ead51a8e48d3823d9087f \ + --hash=sha256:deede7c263feb25dba4e82ea23058a235dcc2fe1f6021025dc71f2b618e26104 \ + --hash=sha256:e74473c875d78b8e9d5da2a70f7099549f9eb37ded4e2f6a463e60125bccd176 \ + --hash=sha256:ee3120ae9dff32f121610bb08e4313be87e03efeadfc6c0d18f89127e24d0c24 \ + --hash=sha256:eedf4b74eda2b5a4b2b2fb4c006d6295df3bf29e459e198c90ea48e130dc75c3 \ + --hash=sha256:efd8c21c98c5cc60653bcb311bef2ce0401642b7ce9d09e03a7da87c878289d4 \ + --hash=sha256:f1c943e96e85df3d3478f7b691f229887e143f81fedab9b20205349ab04d73ed \ + --hash=sha256:f278f034eb75b4e8a13a54a876cc4a5ab39173d2cdd93a638e1b467fc545ac43 \ + --hash=sha256:f3f40b3c5a968281fd507d519e444c35f0ff171237f4fdde090dd60699458421 \ + --hash=sha256:f490f9368b6fc026f021db16d7ec2fbf7d89e2edb42e8ec09d2c60505f5729c7 \ + --hash=sha256:fb043ee2f06b41473269765c2feae53fc2e2fbf96e5e22ca94fb5ad677856f06 \ + --hash=sha256:fc3d34d4a8fbec3e88a79b92e5465e0f9b842b628675850d860b8bd300b159f5 # via ocotilloapi platformdirs==4.9.4 \ --hash=sha256:1ec356301b7dc906d83f371c8f487070e99d3ccf9e501686456394622a01a934 \ diff --git a/uv.lock b/uv.lock index bc8fea56..8a6ad120 100644 --- a/uv.lock +++ b/uv.lock @@ -1611,7 +1611,7 @@ requires-dist = [ { name = "pandas-stubs", specifier = "~=2.3.2" }, { name = "pg8000", specifier = "==1.31.5" }, { name = "phonenumbers", specifier = "==9.0.26" }, - { name = "pillow", specifier = "==12.1.1" }, + { name = "pillow", specifier = "==12.2.0" }, { name = "pluggy", specifier = "==1.6.0" }, { name = "pre-commit", specifier = "==4.5.1" }, { name = "propcache", specifier = "==0.4.1" }, @@ -1813,60 +1813,60 @@ wheels = [ [[package]] name = "pillow" -version = "12.1.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1f/42/5c74462b4fd957fcd7b13b04fb3205ff8349236ea74c7c375766d6c82288/pillow-12.1.1.tar.gz", hash = "sha256:9ad8fa5937ab05218e2b6a4cff30295ad35afd2f83ac592e68c0d871bb0fdbc4", size = 46980264, upload-time = "2026-02-11T04:23:07.146Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d5/11/6db24d4bd7685583caeae54b7009584e38da3c3d4488ed4cd25b439de486/pillow-12.1.1-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:d242e8ac078781f1de88bf823d70c1a9b3c7950a44cdf4b7c012e22ccbcd8e4e", size = 4062689, upload-time = "2026-02-11T04:21:06.804Z" }, - { url = "https://files.pythonhosted.org/packages/33/c0/ce6d3b1fe190f0021203e0d9b5b99e57843e345f15f9ef22fcd43842fd21/pillow-12.1.1-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:02f84dfad02693676692746df05b89cf25597560db2857363a208e393429f5e9", size = 4138535, upload-time = "2026-02-11T04:21:08.452Z" }, - { url = "https://files.pythonhosted.org/packages/a0/c6/d5eb6a4fb32a3f9c21a8c7613ec706534ea1cf9f4b3663e99f0d83f6fca8/pillow-12.1.1-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:e65498daf4b583091ccbb2556c7000abf0f3349fcd57ef7adc9a84a394ed29f6", size = 3601364, upload-time = "2026-02-11T04:21:10.194Z" }, - { url = "https://files.pythonhosted.org/packages/14/a1/16c4b823838ba4c9c52c0e6bbda903a3fe5a1bdbf1b8eb4fff7156f3e318/pillow-12.1.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6c6db3b84c87d48d0088943bf33440e0c42370b99b1c2a7989216f7b42eede60", size = 5262561, upload-time = "2026-02-11T04:21:11.742Z" }, - { url = "https://files.pythonhosted.org/packages/bb/ad/ad9dc98ff24f485008aa5cdedaf1a219876f6f6c42a4626c08bc4e80b120/pillow-12.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8b7e5304e34942bf62e15184219a7b5ad4ff7f3bb5cca4d984f37df1a0e1aee2", size = 4657460, upload-time = "2026-02-11T04:21:13.786Z" }, - { url = "https://files.pythonhosted.org/packages/9e/1b/f1a4ea9a895b5732152789326202a82464d5254759fbacae4deea3069334/pillow-12.1.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:18e5bddd742a44b7e6b1e773ab5db102bd7a94c32555ba656e76d319d19c3850", size = 6232698, upload-time = "2026-02-11T04:21:15.949Z" }, - { url = "https://files.pythonhosted.org/packages/95/f4/86f51b8745070daf21fd2e5b1fe0eb35d4db9ca26e6d58366562fb56a743/pillow-12.1.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc44ef1f3de4f45b50ccf9136999d71abb99dca7706bc75d222ed350b9fd2289", size = 8041706, upload-time = "2026-02-11T04:21:17.723Z" }, - { url = "https://files.pythonhosted.org/packages/29/9b/d6ecd956bb1266dd1045e995cce9b8d77759e740953a1c9aad9502a0461e/pillow-12.1.1-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5a8eb7ed8d4198bccbd07058416eeec51686b498e784eda166395a23eb99138e", size = 6346621, upload-time = "2026-02-11T04:21:19.547Z" }, - { url = "https://files.pythonhosted.org/packages/71/24/538bff45bde96535d7d998c6fed1a751c75ac7c53c37c90dc2601b243893/pillow-12.1.1-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:47b94983da0c642de92ced1702c5b6c292a84bd3a8e1d1702ff923f183594717", size = 7038069, upload-time = "2026-02-11T04:21:21.378Z" }, - { url = "https://files.pythonhosted.org/packages/94/0e/58cb1a6bc48f746bc4cb3adb8cabff73e2742c92b3bf7a220b7cf69b9177/pillow-12.1.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:518a48c2aab7ce596d3bf79d0e275661b846e86e4d0e7dec34712c30fe07f02a", size = 6460040, upload-time = "2026-02-11T04:21:23.148Z" }, - { url = "https://files.pythonhosted.org/packages/6c/57/9045cb3ff11eeb6c1adce3b2d60d7d299d7b273a2e6c8381a524abfdc474/pillow-12.1.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a550ae29b95c6dc13cf69e2c9dc5747f814c54eeb2e32d683e5e93af56caa029", size = 7164523, upload-time = "2026-02-11T04:21:25.01Z" }, - { url = "https://files.pythonhosted.org/packages/73/f2/9be9cb99f2175f0d4dbadd6616ce1bf068ee54a28277ea1bf1fbf729c250/pillow-12.1.1-cp313-cp313-win32.whl", hash = "sha256:a003d7422449f6d1e3a34e3dd4110c22148336918ddbfc6a32581cd54b2e0b2b", size = 6332552, upload-time = "2026-02-11T04:21:27.238Z" }, - { url = "https://files.pythonhosted.org/packages/3f/eb/b0834ad8b583d7d9d42b80becff092082a1c3c156bb582590fcc973f1c7c/pillow-12.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:344cf1e3dab3be4b1fa08e449323d98a2a3f819ad20f4b22e77a0ede31f0faa1", size = 7040108, upload-time = "2026-02-11T04:21:29.462Z" }, - { url = "https://files.pythonhosted.org/packages/d5/7d/fc09634e2aabdd0feabaff4a32f4a7d97789223e7c2042fd805ea4b4d2c2/pillow-12.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:5c0dd1636633e7e6a0afe7bf6a51a14992b7f8e60de5789018ebbdfae55b040a", size = 2453712, upload-time = "2026-02-11T04:21:31.072Z" }, - { url = "https://files.pythonhosted.org/packages/19/2a/b9d62794fc8a0dd14c1943df68347badbd5511103e0d04c035ffe5cf2255/pillow-12.1.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0330d233c1a0ead844fc097a7d16c0abff4c12e856c0b325f231820fee1f39da", size = 5264880, upload-time = "2026-02-11T04:21:32.865Z" }, - { url = "https://files.pythonhosted.org/packages/26/9d/e03d857d1347fa5ed9247e123fcd2a97b6220e15e9cb73ca0a8d91702c6e/pillow-12.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5dae5f21afb91322f2ff791895ddd8889e5e947ff59f71b46041c8ce6db790bc", size = 4660616, upload-time = "2026-02-11T04:21:34.97Z" }, - { url = "https://files.pythonhosted.org/packages/f7/ec/8a6d22afd02570d30954e043f09c32772bfe143ba9285e2fdb11284952cd/pillow-12.1.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e0c664be47252947d870ac0d327fea7e63985a08794758aa8af5b6cb6ec0c9c", size = 6269008, upload-time = "2026-02-11T04:21:36.623Z" }, - { url = "https://files.pythonhosted.org/packages/3d/1d/6d875422c9f28a4a361f495a5f68d9de4a66941dc2c619103ca335fa6446/pillow-12.1.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:691ab2ac363b8217f7d31b3497108fb1f50faab2f75dfb03284ec2f217e87bf8", size = 8073226, upload-time = "2026-02-11T04:21:38.585Z" }, - { url = "https://files.pythonhosted.org/packages/a1/cd/134b0b6ee5eda6dc09e25e24b40fdafe11a520bc725c1d0bbaa5e00bf95b/pillow-12.1.1-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9e8064fb1cc019296958595f6db671fba95209e3ceb0c4734c9baf97de04b20", size = 6380136, upload-time = "2026-02-11T04:21:40.562Z" }, - { url = "https://files.pythonhosted.org/packages/7a/a9/7628f013f18f001c1b98d8fffe3452f306a70dc6aba7d931019e0492f45e/pillow-12.1.1-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:472a8d7ded663e6162dafdf20015c486a7009483ca671cece7a9279b512fcb13", size = 7067129, upload-time = "2026-02-11T04:21:42.521Z" }, - { url = "https://files.pythonhosted.org/packages/1e/f8/66ab30a2193b277785601e82ee2d49f68ea575d9637e5e234faaa98efa4c/pillow-12.1.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:89b54027a766529136a06cfebeecb3a04900397a3590fd252160b888479517bf", size = 6491807, upload-time = "2026-02-11T04:21:44.22Z" }, - { url = "https://files.pythonhosted.org/packages/da/0b/a877a6627dc8318fdb84e357c5e1a758c0941ab1ddffdafd231983788579/pillow-12.1.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:86172b0831b82ce4f7877f280055892b31179e1576aa00d0df3bb1bbf8c3e524", size = 7190954, upload-time = "2026-02-11T04:21:46.114Z" }, - { url = "https://files.pythonhosted.org/packages/83/43/6f732ff85743cf746b1361b91665d9f5155e1483817f693f8d57ea93147f/pillow-12.1.1-cp313-cp313t-win32.whl", hash = "sha256:44ce27545b6efcf0fdbdceb31c9a5bdea9333e664cda58a7e674bb74608b3986", size = 6336441, upload-time = "2026-02-11T04:21:48.22Z" }, - { url = "https://files.pythonhosted.org/packages/3b/44/e865ef3986611bb75bfabdf94a590016ea327833f434558801122979cd0e/pillow-12.1.1-cp313-cp313t-win_amd64.whl", hash = "sha256:a285e3eb7a5a45a2ff504e31f4a8d1b12ef62e84e5411c6804a42197c1cf586c", size = 7045383, upload-time = "2026-02-11T04:21:50.015Z" }, - { url = "https://files.pythonhosted.org/packages/a8/c6/f4fb24268d0c6908b9f04143697ea18b0379490cb74ba9e8d41b898bd005/pillow-12.1.1-cp313-cp313t-win_arm64.whl", hash = "sha256:cc7d296b5ea4d29e6570dabeaed58d31c3fea35a633a69679fb03d7664f43fb3", size = 2456104, upload-time = "2026-02-11T04:21:51.633Z" }, - { url = "https://files.pythonhosted.org/packages/03/d0/bebb3ffbf31c5a8e97241476c4cf8b9828954693ce6744b4a2326af3e16b/pillow-12.1.1-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:417423db963cb4be8bac3fc1204fe61610f6abeed1580a7a2cbb2fbda20f12af", size = 4062652, upload-time = "2026-02-11T04:21:53.19Z" }, - { url = "https://files.pythonhosted.org/packages/2d/c0/0e16fb0addda4851445c28f8350d8c512f09de27bbb0d6d0bbf8b6709605/pillow-12.1.1-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:b957b71c6b2387610f556a7eb0828afbe40b4a98036fc0d2acfa5a44a0c2036f", size = 4138823, upload-time = "2026-02-11T04:22:03.088Z" }, - { url = "https://files.pythonhosted.org/packages/6b/fb/6170ec655d6f6bb6630a013dd7cf7bc218423d7b5fa9071bf63dc32175ae/pillow-12.1.1-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:097690ba1f2efdeb165a20469d59d8bb03c55fb6621eb2041a060ae8ea3e9642", size = 3601143, upload-time = "2026-02-11T04:22:04.909Z" }, - { url = "https://files.pythonhosted.org/packages/59/04/dc5c3f297510ba9a6837cbb318b87dd2b8f73eb41a43cc63767f65cb599c/pillow-12.1.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2815a87ab27848db0321fb78c7f0b2c8649dee134b7f2b80c6a45c6831d75ccd", size = 5266254, upload-time = "2026-02-11T04:22:07.656Z" }, - { url = "https://files.pythonhosted.org/packages/05/30/5db1236b0d6313f03ebf97f5e17cda9ca060f524b2fcc875149a8360b21c/pillow-12.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f7ed2c6543bad5a7d5530eb9e78c53132f93dfa44a28492db88b41cdab885202", size = 4657499, upload-time = "2026-02-11T04:22:09.613Z" }, - { url = "https://files.pythonhosted.org/packages/6f/18/008d2ca0eb612e81968e8be0bbae5051efba24d52debf930126d7eaacbba/pillow-12.1.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:652a2c9ccfb556235b2b501a3a7cf3742148cd22e04b5625c5fe057ea3e3191f", size = 6232137, upload-time = "2026-02-11T04:22:11.434Z" }, - { url = "https://files.pythonhosted.org/packages/70/f1/f14d5b8eeb4b2cd62b9f9f847eb6605f103df89ef619ac68f92f748614ea/pillow-12.1.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d6e4571eedf43af33d0fc233a382a76e849badbccdf1ac438841308652a08e1f", size = 8042721, upload-time = "2026-02-11T04:22:13.321Z" }, - { url = "https://files.pythonhosted.org/packages/5a/d6/17824509146e4babbdabf04d8171491fa9d776f7061ff6e727522df9bd03/pillow-12.1.1-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b574c51cf7d5d62e9be37ba446224b59a2da26dc4c1bb2ecbe936a4fb1a7cb7f", size = 6347798, upload-time = "2026-02-11T04:22:15.449Z" }, - { url = "https://files.pythonhosted.org/packages/d1/ee/c85a38a9ab92037a75615aba572c85ea51e605265036e00c5b67dfafbfe2/pillow-12.1.1-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a37691702ed687799de29a518d63d4682d9016932db66d4e90c345831b02fb4e", size = 7039315, upload-time = "2026-02-11T04:22:17.24Z" }, - { url = "https://files.pythonhosted.org/packages/ec/f3/bc8ccc6e08a148290d7523bde4d9a0d6c981db34631390dc6e6ec34cacf6/pillow-12.1.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f95c00d5d6700b2b890479664a06e754974848afaae5e21beb4d83c106923fd0", size = 6462360, upload-time = "2026-02-11T04:22:19.111Z" }, - { url = "https://files.pythonhosted.org/packages/f6/ab/69a42656adb1d0665ab051eec58a41f169ad295cf81ad45406963105408f/pillow-12.1.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:559b38da23606e68681337ad74622c4dbba02254fc9cb4488a305dd5975c7eeb", size = 7165438, upload-time = "2026-02-11T04:22:21.041Z" }, - { url = "https://files.pythonhosted.org/packages/02/46/81f7aa8941873f0f01d4b55cc543b0a3d03ec2ee30d617a0448bf6bd6dec/pillow-12.1.1-cp314-cp314-win32.whl", hash = "sha256:03edcc34d688572014ff223c125a3f77fb08091e4607e7745002fc214070b35f", size = 6431503, upload-time = "2026-02-11T04:22:22.833Z" }, - { url = "https://files.pythonhosted.org/packages/40/72/4c245f7d1044b67affc7f134a09ea619d4895333d35322b775b928180044/pillow-12.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:50480dcd74fa63b8e78235957d302d98d98d82ccbfac4c7e12108ba9ecbdba15", size = 7176748, upload-time = "2026-02-11T04:22:24.64Z" }, - { url = "https://files.pythonhosted.org/packages/e4/ad/8a87bdbe038c5c698736e3348af5c2194ffb872ea52f11894c95f9305435/pillow-12.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:5cb1785d97b0c3d1d1a16bc1d710c4a0049daefc4935f3a8f31f827f4d3d2e7f", size = 2544314, upload-time = "2026-02-11T04:22:26.685Z" }, - { url = "https://files.pythonhosted.org/packages/6c/9d/efd18493f9de13b87ede7c47e69184b9e859e4427225ea962e32e56a49bc/pillow-12.1.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:1f90cff8aa76835cba5769f0b3121a22bd4eb9e6884cfe338216e557a9a548b8", size = 5268612, upload-time = "2026-02-11T04:22:29.884Z" }, - { url = "https://files.pythonhosted.org/packages/f8/f1/4f42eb2b388eb2ffc660dcb7f7b556c1015c53ebd5f7f754965ef997585b/pillow-12.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1f1be78ce9466a7ee64bfda57bdba0f7cc499d9794d518b854816c41bf0aa4e9", size = 4660567, upload-time = "2026-02-11T04:22:31.799Z" }, - { url = "https://files.pythonhosted.org/packages/01/54/df6ef130fa43e4b82e32624a7b821a2be1c5653a5fdad8469687a7db4e00/pillow-12.1.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:42fc1f4677106188ad9a55562bbade416f8b55456f522430fadab3cef7cd4e60", size = 6269951, upload-time = "2026-02-11T04:22:33.921Z" }, - { url = "https://files.pythonhosted.org/packages/a9/48/618752d06cc44bb4aae8ce0cd4e6426871929ed7b46215638088270d9b34/pillow-12.1.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:98edb152429ab62a1818039744d8fbb3ccab98a7c29fc3d5fcef158f3f1f68b7", size = 8074769, upload-time = "2026-02-11T04:22:35.877Z" }, - { url = "https://files.pythonhosted.org/packages/c3/bd/f1d71eb39a72fa088d938655afba3e00b38018d052752f435838961127d8/pillow-12.1.1-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d470ab1178551dd17fdba0fef463359c41aaa613cdcd7ff8373f54be629f9f8f", size = 6381358, upload-time = "2026-02-11T04:22:37.698Z" }, - { url = "https://files.pythonhosted.org/packages/64/ef/c784e20b96674ed36a5af839305f55616f8b4f8aa8eeccf8531a6e312243/pillow-12.1.1-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6408a7b064595afcab0a49393a413732a35788f2a5092fdc6266952ed67de586", size = 7068558, upload-time = "2026-02-11T04:22:39.597Z" }, - { url = "https://files.pythonhosted.org/packages/73/cb/8059688b74422ae61278202c4e1ad992e8a2e7375227be0a21c6b87ca8d5/pillow-12.1.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5d8c41325b382c07799a3682c1c258469ea2ff97103c53717b7893862d0c98ce", size = 6493028, upload-time = "2026-02-11T04:22:42.73Z" }, - { url = "https://files.pythonhosted.org/packages/c6/da/e3c008ed7d2dd1f905b15949325934510b9d1931e5df999bb15972756818/pillow-12.1.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c7697918b5be27424e9ce568193efd13d925c4481dd364e43f5dff72d33e10f8", size = 7191940, upload-time = "2026-02-11T04:22:44.543Z" }, - { url = "https://files.pythonhosted.org/packages/01/4a/9202e8d11714c1fc5951f2e1ef362f2d7fbc595e1f6717971d5dd750e969/pillow-12.1.1-cp314-cp314t-win32.whl", hash = "sha256:d2912fd8114fc5545aa3a4b5576512f64c55a03f3ebcca4c10194d593d43ea36", size = 6438736, upload-time = "2026-02-11T04:22:46.347Z" }, - { url = "https://files.pythonhosted.org/packages/f3/ca/cbce2327eb9885476b3957b2e82eb12c866a8b16ad77392864ad601022ce/pillow-12.1.1-cp314-cp314t-win_amd64.whl", hash = "sha256:4ceb838d4bd9dab43e06c363cab2eebf63846d6a4aeaea283bbdfd8f1a8ed58b", size = 7182894, upload-time = "2026-02-11T04:22:48.114Z" }, - { url = "https://files.pythonhosted.org/packages/ec/d2/de599c95ba0a973b94410477f8bf0b6f0b5e67360eb89bcb1ad365258beb/pillow-12.1.1-cp314-cp314t-win_arm64.whl", hash = "sha256:7b03048319bfc6170e93bd60728a1af51d3dd7704935feb228c4d4faab35d334", size = 2546446, upload-time = "2026-02-11T04:22:50.342Z" }, +version = "12.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/21/c2bcdd5906101a30244eaffc1b6e6ce71a31bd0742a01eb89e660ebfac2d/pillow-12.2.0.tar.gz", hash = "sha256:a830b1a40919539d07806aa58e1b114df53ddd43213d9c8b75847eee6c0182b5", size = 46987819, upload-time = "2026-04-01T14:46:17.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4a/01/53d10cf0dbad820a8db274d259a37ba50b88b24768ddccec07355382d5ad/pillow-12.2.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:8297651f5b5679c19968abefd6bb84d95fe30ef712eb1b2d9b2d31ca61267f4c", size = 4100837, upload-time = "2026-04-01T14:43:41.506Z" }, + { url = "https://files.pythonhosted.org/packages/0f/98/f3a6657ecb698c937f6c76ee564882945f29b79bad496abcba0e84659ec5/pillow-12.2.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:50d8520da2a6ce0af445fa6d648c4273c3eeefbc32d7ce049f22e8b5c3daecc2", size = 4176528, upload-time = "2026-04-01T14:43:43.773Z" }, + { url = "https://files.pythonhosted.org/packages/69/bc/8986948f05e3ea490b8442ea1c1d4d990b24a7e43d8a51b2c7d8b1dced36/pillow-12.2.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:766cef22385fa1091258ad7e6216792b156dc16d8d3fa607e7545b2b72061f1c", size = 3640401, upload-time = "2026-04-01T14:43:45.87Z" }, + { url = "https://files.pythonhosted.org/packages/34/46/6c717baadcd62bc8ed51d238d521ab651eaa74838291bda1f86fe1f864c9/pillow-12.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5d2fd0fa6b5d9d1de415060363433f28da8b1526c1c129020435e186794b3795", size = 5308094, upload-time = "2026-04-01T14:43:48.438Z" }, + { url = "https://files.pythonhosted.org/packages/71/43/905a14a8b17fdb1ccb58d282454490662d2cb89a6bfec26af6d3520da5ec/pillow-12.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:56b25336f502b6ed02e889f4ece894a72612fe885889a6e8c4c80239ff6e5f5f", size = 4695402, upload-time = "2026-04-01T14:43:51.292Z" }, + { url = "https://files.pythonhosted.org/packages/73/dd/42107efcb777b16fa0393317eac58f5b5cf30e8392e266e76e51cff28c3d/pillow-12.2.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f1c943e96e85df3d3478f7b691f229887e143f81fedab9b20205349ab04d73ed", size = 6280005, upload-time = "2026-04-01T14:43:54.242Z" }, + { url = "https://files.pythonhosted.org/packages/a8/68/b93e09e5e8549019e61acf49f65b1a8530765a7f812c77a7461bca7e4494/pillow-12.2.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:03f6fab9219220f041c74aeaa2939ff0062bd5c364ba9ce037197f4c6d498cd9", size = 8090669, upload-time = "2026-04-01T14:43:57.335Z" }, + { url = "https://files.pythonhosted.org/packages/4b/6e/3ccb54ce8ec4ddd1accd2d89004308b7b0b21c4ac3d20fa70af4760a4330/pillow-12.2.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5cdfebd752ec52bf5bb4e35d9c64b40826bc5b40a13df7c3cda20a2c03a0f5ed", size = 6395194, upload-time = "2026-04-01T14:43:59.864Z" }, + { url = "https://files.pythonhosted.org/packages/67/ee/21d4e8536afd1a328f01b359b4d3997b291ffd35a237c877b331c1c3b71c/pillow-12.2.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eedf4b74eda2b5a4b2b2fb4c006d6295df3bf29e459e198c90ea48e130dc75c3", size = 7082423, upload-time = "2026-04-01T14:44:02.74Z" }, + { url = "https://files.pythonhosted.org/packages/78/5f/e9f86ab0146464e8c133fe85df987ed9e77e08b29d8d35f9f9f4d6f917ba/pillow-12.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:00a2865911330191c0b818c59103b58a5e697cae67042366970a6b6f1b20b7f9", size = 6505667, upload-time = "2026-04-01T14:44:05.381Z" }, + { url = "https://files.pythonhosted.org/packages/ed/1e/409007f56a2fdce61584fd3acbc2bbc259857d555196cedcadc68c015c82/pillow-12.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1e1757442ed87f4912397c6d35a0db6a7b52592156014706f17658ff58bbf795", size = 7208580, upload-time = "2026-04-01T14:44:08.39Z" }, + { url = "https://files.pythonhosted.org/packages/23/c4/7349421080b12fb35414607b8871e9534546c128a11965fd4a7002ccfbee/pillow-12.2.0-cp313-cp313-win32.whl", hash = "sha256:144748b3af2d1b358d41286056d0003f47cb339b8c43a9ea42f5fea4d8c66b6e", size = 6375896, upload-time = "2026-04-01T14:44:11.197Z" }, + { url = "https://files.pythonhosted.org/packages/3f/82/8a3739a5e470b3c6cbb1d21d315800d8e16bff503d1f16b03a4ec3212786/pillow-12.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:390ede346628ccc626e5730107cde16c42d3836b89662a115a921f28440e6a3b", size = 7081266, upload-time = "2026-04-01T14:44:13.947Z" }, + { url = "https://files.pythonhosted.org/packages/c3/25/f968f618a062574294592f668218f8af564830ccebdd1fa6200f598e65c5/pillow-12.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:8023abc91fba39036dbce14a7d6535632f99c0b857807cbbbf21ecc9f4717f06", size = 2463508, upload-time = "2026-04-01T14:44:16.312Z" }, + { url = "https://files.pythonhosted.org/packages/4d/a4/b342930964e3cb4dce5038ae34b0eab4653334995336cd486c5a8c25a00c/pillow-12.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:042db20a421b9bafecc4b84a8b6e444686bd9d836c7fd24542db3e7df7baad9b", size = 5309927, upload-time = "2026-04-01T14:44:18.89Z" }, + { url = "https://files.pythonhosted.org/packages/9f/de/23198e0a65a9cf06123f5435a5d95cea62a635697f8f03d134d3f3a96151/pillow-12.2.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dd025009355c926a84a612fecf58bb315a3f6814b17ead51a8e48d3823d9087f", size = 4698624, upload-time = "2026-04-01T14:44:21.115Z" }, + { url = "https://files.pythonhosted.org/packages/01/a6/1265e977f17d93ea37aa28aa81bad4fa597933879fac2520d24e021c8da3/pillow-12.2.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:88ddbc66737e277852913bd1e07c150cc7bb124539f94c4e2df5344494e0a612", size = 6321252, upload-time = "2026-04-01T14:44:23.663Z" }, + { url = "https://files.pythonhosted.org/packages/3c/83/5982eb4a285967baa70340320be9f88e57665a387e3a53a7f0db8231a0cd/pillow-12.2.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d362d1878f00c142b7e1a16e6e5e780f02be8195123f164edf7eddd911eefe7c", size = 8126550, upload-time = "2026-04-01T14:44:26.772Z" }, + { url = "https://files.pythonhosted.org/packages/4e/48/6ffc514adce69f6050d0753b1a18fd920fce8cac87620d5a31231b04bfc5/pillow-12.2.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2c727a6d53cb0018aadd8018c2b938376af27914a68a492f59dfcaca650d5eea", size = 6433114, upload-time = "2026-04-01T14:44:29.615Z" }, + { url = "https://files.pythonhosted.org/packages/36/a3/f9a77144231fb8d40ee27107b4463e205fa4677e2ca2548e14da5cf18dce/pillow-12.2.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:efd8c21c98c5cc60653bcb311bef2ce0401642b7ce9d09e03a7da87c878289d4", size = 7115667, upload-time = "2026-04-01T14:44:32.773Z" }, + { url = "https://files.pythonhosted.org/packages/c1/fc/ac4ee3041e7d5a565e1c4fd72a113f03b6394cc72ab7089d27608f8aaccb/pillow-12.2.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9f08483a632889536b8139663db60f6724bfcb443c96f1b18855860d7d5c0fd4", size = 6538966, upload-time = "2026-04-01T14:44:35.252Z" }, + { url = "https://files.pythonhosted.org/packages/c0/a8/27fb307055087f3668f6d0a8ccb636e7431d56ed0750e07a60547b1e083e/pillow-12.2.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dac8d77255a37e81a2efcbd1fc05f1c15ee82200e6c240d7e127e25e365c39ea", size = 7238241, upload-time = "2026-04-01T14:44:37.875Z" }, + { url = "https://files.pythonhosted.org/packages/ad/4b/926ab182c07fccae9fcb120043464e1ff1564775ec8864f21a0ebce6ac25/pillow-12.2.0-cp313-cp313t-win32.whl", hash = "sha256:ee3120ae9dff32f121610bb08e4313be87e03efeadfc6c0d18f89127e24d0c24", size = 6379592, upload-time = "2026-04-01T14:44:40.336Z" }, + { url = "https://files.pythonhosted.org/packages/c2/c4/f9e476451a098181b30050cc4c9a3556b64c02cf6497ea421ac047e89e4b/pillow-12.2.0-cp313-cp313t-win_amd64.whl", hash = "sha256:325ca0528c6788d2a6c3d40e3568639398137346c3d6e66bb61db96b96511c98", size = 7085542, upload-time = "2026-04-01T14:44:43.251Z" }, + { url = "https://files.pythonhosted.org/packages/00/a4/285f12aeacbe2d6dc36c407dfbbe9e96d4a80b0fb710a337f6d2ad978c75/pillow-12.2.0-cp313-cp313t-win_arm64.whl", hash = "sha256:2e5a76d03a6c6dcef67edabda7a52494afa4035021a79c8558e14af25313d453", size = 2465765, upload-time = "2026-04-01T14:44:45.996Z" }, + { url = "https://files.pythonhosted.org/packages/bf/98/4595daa2365416a86cb0d495248a393dfc84e96d62ad080c8546256cb9c0/pillow-12.2.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:3adc9215e8be0448ed6e814966ecf3d9952f0ea40eb14e89a102b87f450660d8", size = 4100848, upload-time = "2026-04-01T14:44:48.48Z" }, + { url = "https://files.pythonhosted.org/packages/0b/79/40184d464cf89f6663e18dfcf7ca21aae2491fff1a16127681bf1fa9b8cf/pillow-12.2.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:6a9adfc6d24b10f89588096364cc726174118c62130c817c2837c60cf08a392b", size = 4176515, upload-time = "2026-04-01T14:44:51.353Z" }, + { url = "https://files.pythonhosted.org/packages/b0/63/703f86fd4c422a9cf722833670f4f71418fb116b2853ff7da722ea43f184/pillow-12.2.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:6a6e67ea2e6feda684ed370f9a1c52e7a243631c025ba42149a2cc5934dec295", size = 3640159, upload-time = "2026-04-01T14:44:53.588Z" }, + { url = "https://files.pythonhosted.org/packages/71/e0/fb22f797187d0be2270f83500aab851536101b254bfa1eae10795709d283/pillow-12.2.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2bb4a8d594eacdfc59d9e5ad972aa8afdd48d584ffd5f13a937a664c3e7db0ed", size = 5312185, upload-time = "2026-04-01T14:44:56.039Z" }, + { url = "https://files.pythonhosted.org/packages/ba/8c/1a9e46228571de18f8e28f16fabdfc20212a5d019f3e3303452b3f0a580d/pillow-12.2.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:80b2da48193b2f33ed0c32c38140f9d3186583ce7d516526d462645fd98660ae", size = 4695386, upload-time = "2026-04-01T14:44:58.663Z" }, + { url = "https://files.pythonhosted.org/packages/70/62/98f6b7f0c88b9addd0e87c217ded307b36be024d4ff8869a812b241d1345/pillow-12.2.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22db17c68434de69d8ecfc2fe821569195c0c373b25cccb9cbdacf2c6e53c601", size = 6280384, upload-time = "2026-04-01T14:45:01.5Z" }, + { url = "https://files.pythonhosted.org/packages/5e/03/688747d2e91cfbe0e64f316cd2e8005698f76ada3130d0194664174fa5de/pillow-12.2.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7b14cc0106cd9aecda615dd6903840a058b4700fcb817687d0ee4fc8b6e389be", size = 8091599, upload-time = "2026-04-01T14:45:04.5Z" }, + { url = "https://files.pythonhosted.org/packages/f6/35/577e22b936fcdd66537329b33af0b4ccfefaeabd8aec04b266528cddb33c/pillow-12.2.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cbeb542b2ebc6fcdacabf8aca8c1a97c9b3ad3927d46b8723f9d4f033288a0f", size = 6396021, upload-time = "2026-04-01T14:45:07.117Z" }, + { url = "https://files.pythonhosted.org/packages/11/8d/d2532ad2a603ca2b93ad9f5135732124e57811d0168155852f37fbce2458/pillow-12.2.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4bfd07bc812fbd20395212969e41931001fd59eb55a60658b0e5710872e95286", size = 7083360, upload-time = "2026-04-01T14:45:09.763Z" }, + { url = "https://files.pythonhosted.org/packages/5e/26/d325f9f56c7e039034897e7380e9cc202b1e368bfd04d4cbe6a441f02885/pillow-12.2.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9aba9a17b623ef750a4d11b742cbafffeb48a869821252b30ee21b5e91392c50", size = 6507628, upload-time = "2026-04-01T14:45:12.378Z" }, + { url = "https://files.pythonhosted.org/packages/5f/f7/769d5632ffb0988f1c5e7660b3e731e30f7f8ec4318e94d0a5d674eb65a4/pillow-12.2.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:deede7c263feb25dba4e82ea23058a235dcc2fe1f6021025dc71f2b618e26104", size = 7209321, upload-time = "2026-04-01T14:45:15.122Z" }, + { url = "https://files.pythonhosted.org/packages/6a/7a/c253e3c645cd47f1aceea6a8bacdba9991bf45bb7dfe927f7c893e89c93c/pillow-12.2.0-cp314-cp314-win32.whl", hash = "sha256:632ff19b2778e43162304d50da0181ce24ac5bb8180122cbe1bf4673428328c7", size = 6479723, upload-time = "2026-04-01T14:45:17.797Z" }, + { url = "https://files.pythonhosted.org/packages/cd/8b/601e6566b957ca50e28725cb6c355c59c2c8609751efbecd980db44e0349/pillow-12.2.0-cp314-cp314-win_amd64.whl", hash = "sha256:4e6c62e9d237e9b65fac06857d511e90d8461a32adcc1b9065ea0c0fa3a28150", size = 7217400, upload-time = "2026-04-01T14:45:20.529Z" }, + { url = "https://files.pythonhosted.org/packages/d6/94/220e46c73065c3e2951bb91c11a1fb636c8c9ad427ac3ce7d7f3359b9b2f/pillow-12.2.0-cp314-cp314-win_arm64.whl", hash = "sha256:b1c1fbd8a5a1af3412a0810d060a78b5136ec0836c8a4ef9aa11807f2a22f4e1", size = 2554835, upload-time = "2026-04-01T14:45:23.162Z" }, + { url = "https://files.pythonhosted.org/packages/b6/ab/1b426a3974cb0e7da5c29ccff4807871d48110933a57207b5a676cccc155/pillow-12.2.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:57850958fe9c751670e49b2cecf6294acc99e562531f4bd317fa5ddee2068463", size = 5314225, upload-time = "2026-04-01T14:45:25.637Z" }, + { url = "https://files.pythonhosted.org/packages/19/1e/dce46f371be2438eecfee2a1960ee2a243bbe5e961890146d2dee1ff0f12/pillow-12.2.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d5d38f1411c0ed9f97bcb49b7bd59b6b7c314e0e27420e34d99d844b9ce3b6f3", size = 4698541, upload-time = "2026-04-01T14:45:28.355Z" }, + { url = "https://files.pythonhosted.org/packages/55/c3/7fbecf70adb3a0c33b77a300dc52e424dc22ad8cdc06557a2e49523b703d/pillow-12.2.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5c0a9f29ca8e79f09de89293f82fc9b0270bb4af1d58bc98f540cc4aedf03166", size = 6322251, upload-time = "2026-04-01T14:45:30.924Z" }, + { url = "https://files.pythonhosted.org/packages/1c/3c/7fbc17cfb7e4fe0ef1642e0abc17fc6c94c9f7a16be41498e12e2ba60408/pillow-12.2.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1610dd6c61621ae1cf811bef44d77e149ce3f7b95afe66a4512f8c59f25d9ebe", size = 8127807, upload-time = "2026-04-01T14:45:33.908Z" }, + { url = "https://files.pythonhosted.org/packages/ff/c3/a8ae14d6defd2e448493ff512fae903b1e9bd40b72efb6ec55ce0048c8ce/pillow-12.2.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a34329707af4f73cf1782a36cd2289c0368880654a2c11f027bcee9052d35dd", size = 6433935, upload-time = "2026-04-01T14:45:36.623Z" }, + { url = "https://files.pythonhosted.org/packages/6e/32/2880fb3a074847ac159d8f902cb43278a61e85f681661e7419e6596803ed/pillow-12.2.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8e9c4f5b3c546fa3458a29ab22646c1c6c787ea8f5ef51300e5a60300736905e", size = 7116720, upload-time = "2026-04-01T14:45:39.258Z" }, + { url = "https://files.pythonhosted.org/packages/46/87/495cc9c30e0129501643f24d320076f4cc54f718341df18cc70ec94c44e1/pillow-12.2.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fb043ee2f06b41473269765c2feae53fc2e2fbf96e5e22ca94fb5ad677856f06", size = 6540498, upload-time = "2026-04-01T14:45:41.879Z" }, + { url = "https://files.pythonhosted.org/packages/18/53/773f5edca692009d883a72211b60fdaf8871cbef075eaa9d577f0a2f989e/pillow-12.2.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:f278f034eb75b4e8a13a54a876cc4a5ab39173d2cdd93a638e1b467fc545ac43", size = 7239413, upload-time = "2026-04-01T14:45:44.705Z" }, + { url = "https://files.pythonhosted.org/packages/c9/e4/4b64a97d71b2a83158134abbb2f5bd3f8a2ea691361282f010998f339ec7/pillow-12.2.0-cp314-cp314t-win32.whl", hash = "sha256:6bb77b2dcb06b20f9f4b4a8454caa581cd4dd0643a08bacf821216a16d9c8354", size = 6482084, upload-time = "2026-04-01T14:45:47.568Z" }, + { url = "https://files.pythonhosted.org/packages/ba/13/306d275efd3a3453f72114b7431c877d10b1154014c1ebbedd067770d629/pillow-12.2.0-cp314-cp314t-win_amd64.whl", hash = "sha256:6562ace0d3fb5f20ed7290f1f929cae41b25ae29528f2af1722966a0a02e2aa1", size = 7225152, upload-time = "2026-04-01T14:45:50.032Z" }, + { url = "https://files.pythonhosted.org/packages/ff/6e/cf826fae916b8658848d7b9f38d88da6396895c676e8086fc0988073aaf8/pillow-12.2.0-cp314-cp314t-win_arm64.whl", hash = "sha256:aa88ccfe4e32d362816319ed727a004423aab09c5cea43c01a4b435643fa34eb", size = 2556579, upload-time = "2026-04-01T14:45:52.529Z" }, ] [[package]] From c1429804b15638a586b5f46bdcc79aadbc40fa24 Mon Sep 17 00:00:00 2001 From: jross Date: Tue, 14 Apr 2026 14:59:00 -0600 Subject: [PATCH 02/39] feat: enhance water well details with site name, historic depth notes, and field event participants --- db/thing.py | 34 ++++++- schemas/field.py | 8 ++ schemas/location.py | 2 + schemas/thing.py | 5 +- schemas/well_details.py | 4 + services/well_details_helper.py | 9 ++ tests/test_thing.py | 160 ++++++++++++++++++++++++++++---- tests/test_well_inventory.py | 4 +- 8 files changed, 199 insertions(+), 27 deletions(-) diff --git a/db/thing.py b/db/thing.py index c3c7c02d..01aed09e 100644 --- a/db/thing.py +++ b/db/thing.py @@ -418,6 +418,18 @@ def current_location(self): else None ) + @property + def site_name(self) -> str | None: + nmbgmr_link = next( + ( + link + for link in sorted(self.links, key=lambda link: link.id) + if link.alternate_organization == "NMBGMR" + ), + None, + ) + return nmbgmr_link.alternate_id if nmbgmr_link is not None else None + @property def water_notes(self): return self._get_notes("Water") @@ -438,6 +450,14 @@ def construction_notes(self): def site_notes(self): return self._get_notes("Site Notes (legacy)") + @property + def historic_depth_to_water(self) -> list[str]: + return [note.content for note in self._get_notes("Historical")] + + @property + def well_location_note(self) -> list[str]: + return [note.content for note in self._get_notes("Access")] + @property def well_status(self) -> str | None: """ @@ -465,17 +485,23 @@ def monitoring_status(self) -> str | None: return latest_status.status_value if latest_status else None @property - def open_status(self) -> str | None: + def open_status(self) -> bool | None: """ - Returns the open status from the most recent status history entry - where status_type is "Open Status". + Returns the open status as a boolean derived from the most recent + "Open Status" history entry. Since status_history is eagerly loaded, this should not introduce N+1 query issues. """ latest_status = retrieve_latest_polymorphic_history_table_record( self, "status_history", "Open Status" ) - return latest_status.status_value if latest_status else None + if latest_status is None: + return None + if latest_status.status_value == "Open": + return True + if latest_status.status_value == "Closed": + return False + return None @property def datalogger_suitability_status(self) -> str | None: diff --git a/schemas/field.py b/schemas/field.py index b8152ffa..c4df70ef 100644 --- a/schemas/field.py +++ b/schemas/field.py @@ -2,6 +2,7 @@ from schemas import BaseResponseModel from core.enums import ActivityType +from schemas.contact import ContactResponse # RESPONSE --------------------------------------------------------------------- @@ -15,3 +16,10 @@ class FieldEventResponse(BaseResponseModel): thing_id: int event_date: AwareDatetime notes: str | None + + +class FieldEventParticipantResponse(BaseResponseModel): + field_event_id: int + contact_id: int + participant_role: str + participant: ContactResponse diff --git a/schemas/location.py b/schemas/location.py index 50fe28dd..e96a2474 100644 --- a/schemas/location.py +++ b/schemas/location.py @@ -22,6 +22,7 @@ from core.constants import SRID_WGS84, SRID_UTM_ZONE_13N from core.enums import ElevationMethod, CoordinateMethod +from core.enums import ReleaseStatus from schemas import BaseCreateModel, BaseUpdateModel, BaseResponseModel from schemas.notes import NoteResponse, CreateNote, UpdateNote from services.util import convert_m_to_ft, transform_srid @@ -122,6 +123,7 @@ class GeoJSONProperties(BaseModel): class LocationGeoJSONResponse(BaseModel): type: str = "Feature" + release_status: ReleaseStatus geometry: GeoJSONGeometry properties: GeoJSONProperties diff --git a/schemas/thing.py b/schemas/thing.py index baa4ec6c..0423283b 100644 --- a/schemas/thing.py +++ b/schemas/thing.py @@ -205,6 +205,7 @@ class MonitoringFrequencyResponse(BaseModel): class BaseThingResponse(BaseResponseModel): name: str + site_name: str | None = None thing_type: str current_location: LocationGeoJSONResponse first_visit_date: PastOrTodayDate | None @@ -247,6 +248,7 @@ class WellResponse(BaseThingResponse): well_depth: float | None = None well_depth_unit: str = "ft" well_depth_source: str | None + historic_depth_to_water: list[str] = [] hole_depth: float | None = None hole_depth_unit: str = "ft" well_casing_diameter: float | None = None # in inches @@ -263,7 +265,7 @@ class WellResponse(BaseThingResponse): well_pump_depth: float | None well_pump_depth_unit: str = "ft" well_status: str | None - open_status: str | None + open_status: bool | None datalogger_suitability_status: str | None measuring_point_height: float | None measuring_point_height_unit: str = "ft" @@ -275,6 +277,7 @@ class WellResponse(BaseThingResponse): permissions: list[PermissionHistoryResponse] formation_completion_code: FormationCode | None nma_formation_zone: str | None + well_location_note: list[str] = [] @field_validator("well_purposes", mode="before") def populate_well_purposes_with_strings(cls, well_purposes): diff --git a/schemas/well_details.py b/schemas/well_details.py index fa94f154..e35ba5f4 100644 --- a/schemas/well_details.py +++ b/schemas/well_details.py @@ -4,6 +4,7 @@ from schemas.deployment import DeploymentResponse from schemas.observation import GroundwaterLevelObservationResponse from schemas.sample import SampleResponse +from schemas.field import FieldEventParticipantResponse from schemas.sensor import SensorResponse from schemas.thing import WellResponse, WellScreenResponse @@ -20,3 +21,6 @@ class WellDetailsResponse(BaseModel): Field(default_factory=list) ) latest_field_event_sample: SampleResponse | None = None + field_event_participants: list[FieldEventParticipantResponse] = Field( + default_factory=list + ) diff --git a/services/well_details_helper.py b/services/well_details_helper.py index 7408d15a..bc7ece49 100644 --- a/services/well_details_helper.py +++ b/services/well_details_helper.py @@ -159,6 +159,10 @@ def get_well_details_payload( joinedload(Sample.field_activity) .joinedload(FieldActivity.field_event) .joinedload(FieldEvent.thing), + joinedload(Sample.field_activity) + .joinedload(FieldActivity.field_event) + .joinedload(FieldEvent.field_event_participants) + .joinedload(FieldEventParticipant.participant), joinedload(Sample.field_event_participant).joinedload( FieldEventParticipant.participant ), @@ -186,6 +190,11 @@ def get_well_details_payload( "well_screens": well_screens, "recent_groundwater_level_observations": recent_groundwater_level_observations, "latest_field_event_sample": latest_field_event_sample, + "field_event_participants": ( + latest_field_event_sample.field_event.field_event_participants + if latest_field_event_sample is not None + else [] + ), } diff --git a/tests/test_thing.py b/tests/test_thing.py index 2dde25fb..9201dfbe 100644 --- a/tests/test_thing.py +++ b/tests/test_thing.py @@ -26,7 +26,7 @@ viewer_function, amp_viewer_function, ) -from db import MeasuringPointHistory, Thing, ThingIdLink, WellScreen +from db import MeasuringPointHistory, StatusHistory, Thing, ThingIdLink, WellScreen from db.engine import session_ctx from main import app from schemas import DT_FMT @@ -563,6 +563,7 @@ def test_get_water_well_by_id(water_well_thing, location): def test_get_water_well_details_payload( water_well_thing, field_event, + field_event_participant, contact, email, phone, @@ -574,28 +575,87 @@ def test_get_water_well_details_payload( groundwater_level_sample, groundwater_level_observation, ): + with session_ctx() as session: + well = session.get(type(water_well_thing), water_well_thing.id) + session.add(well.add_note("historic depth to water: 12 ft", "Historical")) + session.add(well.add_note("historic depth to water: 18 ft", "Historical")) + session.add(well.add_note("turn left at the cattle guard", "Access")) + session.add(well.add_note("use the south gate", "Access")) + location = well.current_location + location.release_status = "private" + + second_contact = contact.__class__( + name="Second Participant", + organization=None, + role=contact.role, + contact_type=contact.contact_type, + release_status="draft", + ) + session.add(second_contact) + session.flush() + second_participant = field_event_participant.__class__( + field_event_id=field_event.id, + contact_id=second_contact.id, + participant_role="Lead", + ) + session.add(second_participant) + session.commit() + second_contact_id = second_contact.id + second_participant_id = second_participant.id + response = client.get(f"/thing/water-well/{water_well_thing.id}/details") - assert response.status_code == 200 - data = response.json() + try: + assert response.status_code == 200 + data = response.json() - assert data["well"]["id"] == water_well_thing.id - assert data["well"]["alternate_ids"][0]["id"] == thing_id_link.id - assert data["contacts"][0]["id"] == contact.id - assert data["contacts"][0]["emails"][0]["id"] == email.id - assert data["contacts"][0]["phones"][0]["id"] == phone.id - assert data["contacts"][0]["addresses"][0]["id"] == address.id - assert data["sensors"][0]["id"] == sensor.id - assert data["deployments"][0]["id"] == sensor_to_water_well_thing_deployment.id - assert data["deployments"][0]["sensor"]["id"] == sensor.id - assert data["well_screens"][0]["id"] == well_screen.id - assert ( - data["recent_groundwater_level_observations"][0]["id"] - == groundwater_level_observation.id - ) - assert data["latest_field_event_sample"]["id"] == groundwater_level_sample.id - assert data["latest_field_event_sample"]["field_event"]["id"] == field_event.id - assert data["latest_field_event_sample"]["contact"]["id"] == contact.id + assert data["well"]["id"] == water_well_thing.id + assert data["well"]["alternate_ids"][0]["id"] == thing_id_link.id + assert data["well"]["historic_depth_to_water"] == [ + "historic depth to water: 12 ft", + "historic depth to water: 18 ft", + ] + assert data["well"]["well_location_note"] == [ + "turn left at the cattle guard", + "use the south gate", + ] + assert data["well"]["current_location"]["release_status"] == "private" + assert data["contacts"][0]["id"] == contact.id + assert data["contacts"][0]["emails"][0]["id"] == email.id + assert data["contacts"][0]["phones"][0]["id"] == phone.id + assert data["contacts"][0]["addresses"][0]["id"] == address.id + assert data["sensors"][0]["id"] == sensor.id + assert data["deployments"][0]["id"] == sensor_to_water_well_thing_deployment.id + assert data["deployments"][0]["sensor"]["id"] == sensor.id + assert data["well_screens"][0]["id"] == well_screen.id + assert ( + data["recent_groundwater_level_observations"][0]["id"] + == groundwater_level_observation.id + ) + assert data["latest_field_event_sample"]["id"] == groundwater_level_sample.id + assert data["latest_field_event_sample"]["field_event"]["id"] == field_event.id + assert data["latest_field_event_sample"]["contact"]["id"] == contact.id + assert { + participant["id"] for participant in data["field_event_participants"] + } == { + field_event_participant.id, + second_participant_id, + } + assert { + participant["participant"]["id"] + for participant in data["field_event_participants"] + } == {contact.id, second_contact_id} + finally: + with session_ctx() as session: + second_participant = session.get( + field_event_participant.__class__, second_participant_id + ) + if second_participant is not None: + session.delete(second_participant) + second_contact = session.get(contact.__class__, second_contact_id) + if second_contact is not None: + session.delete(second_contact) + session.commit() def test_get_water_well_details_payload_uses_latest_observation_sample( @@ -681,6 +741,66 @@ def test_get_water_well_by_id_includes_location_properties( assert data["current_location"]["properties"]["quad_name"] == "Hillsboro Peak" +def test_get_water_well_by_id_includes_site_name_from_nmbgmr_link( + water_well_thing, + thing_id_link, +): + with session_ctx() as session: + site_name_link = ThingIdLink( + thing_id=water_well_thing.id, + relation="same_as", + alternate_id="John Smith Well", + alternate_organization="NMBGMR", + release_status="private", + ) + session.add(site_name_link) + session.commit() + site_name_link_id = site_name_link.id + + try: + response = client.get(f"/thing/water-well/{water_well_thing.id}") + + assert response.status_code == 200 + data = response.json() + assert data["site_name"] == "John Smith Well" + finally: + with session_ctx() as session: + site_name_link = session.get(ThingIdLink, site_name_link_id) + if site_name_link is not None: + session.delete(site_name_link) + session.commit() + + +def test_get_water_well_by_id_includes_open_status_boolean(water_well_thing): + with session_ctx() as session: + open_status = StatusHistory( + status_type="Open Status", + status_value="Open", + start_date=date(2025, 1, 1), + end_date=None, + reason="test open status", + target_id=water_well_thing.id, + target_table="thing", + release_status="draft", + ) + session.add(open_status) + session.commit() + open_status_id = open_status.id + + try: + response = client.get(f"/thing/water-well/{water_well_thing.id}") + + assert response.status_code == 200 + data = response.json() + assert data["open_status"] is True + finally: + with session_ctx() as session: + open_status = session.get(StatusHistory, open_status_id) + if open_status is not None: + session.delete(open_status) + session.commit() + + def test_get_water_wells_includes_contact_summary( water_well_thing, contact, diff --git a/tests/test_well_inventory.py b/tests/test_well_inventory.py index 4e3be31b..1449c05c 100644 --- a/tests/test_well_inventory.py +++ b/tests/test_well_inventory.py @@ -243,9 +243,9 @@ def test_well_inventory_db_contents_no_waterlevels(): else "Datalogger cannot be installed" ) assert ( - thing.open_status == "Open" + thing.open_status is True if file_content["is_open"].lower() == "true" - else "Closed" + else thing.open_status is False ) # LOCATION AND RELATED RECORDS From 58113bfe2d6ea6f443604cb2c53288ea0291d525 Mon Sep 17 00:00:00 2001 From: jross Date: Tue, 14 Apr 2026 15:20:39 -0600 Subject: [PATCH 03/39] feat: improve note retrieval by sorting notes and enhance loading strategy for field event participants --- db/notes.py | 4 +++- services/well_details_helper.py | 4 ++-- tests/test_well_inventory.py | 34 ++++++++++++++------------------- 3 files changed, 19 insertions(+), 23 deletions(-) diff --git a/db/notes.py b/db/notes.py index 0e2e8ab8..3c238fbf 100644 --- a/db/notes.py +++ b/db/notes.py @@ -102,6 +102,7 @@ def notes(cls): cascade="all, delete-orphan", lazy="selectin", overlaps="notes", + order_by=Notes.created_at, ) def add_note( @@ -125,4 +126,5 @@ def add_note( ) def _get_notes(self, note_type: str) -> list[Notes]: - return [n for n in self.notes if n.note_type == note_type] + notes = [n for n in self.notes if n.note_type == note_type] + return sorted(notes, key=lambda n: n.created_at) diff --git a/services/well_details_helper.py b/services/well_details_helper.py index bc7ece49..28d72068 100644 --- a/services/well_details_helper.py +++ b/services/well_details_helper.py @@ -161,8 +161,8 @@ def get_well_details_payload( .joinedload(FieldEvent.thing), joinedload(Sample.field_activity) .joinedload(FieldActivity.field_event) - .joinedload(FieldEvent.field_event_participants) - .joinedload(FieldEventParticipant.participant), + .selectinload(FieldEvent.field_event_participants) + .selectinload(FieldEventParticipant.participant), joinedload(Sample.field_event_participant).joinedload( FieldEventParticipant.participant ), diff --git a/tests/test_well_inventory.py b/tests/test_well_inventory.py index 1449c05c..918a9e1d 100644 --- a/tests/test_well_inventory.py +++ b/tests/test_well_inventory.py @@ -160,26 +160,20 @@ def test_well_inventory_db_contents_no_waterlevels(): assert thing.formation_completion_code is None assert thing.notes is not None - assert sorted(c.content for c in thing._get_notes("Access")) == sorted( - [file_content["specific_location_of_well"]] - ) - assert sorted(c.content for c in thing._get_notes("General")) == sorted( - [file_content["contact_special_requests_notes"]] - ) - assert sorted( - c.content for c in thing._get_notes("Sampling Procedure") - ) == sorted( - [ - file_content["well_measuring_notes"], - file_content["sampling_scenario_notes"], - f"Sample possible: {file_content['sample_possible']}", - ] - ) - assert sorted(c.content for c in thing._get_notes("Historical")) == sorted( - [ - f"historic depth to water: {float(file_content['historic_depth_to_water_ft'])} ft - source: {file_content['depth_source'].lower()}" - ] - ) + assert [c.content for c in thing._get_notes("Access")] == [ + file_content["specific_location_of_well"] + ] + assert [c.content for c in thing._get_notes("General")] == [ + file_content["contact_special_requests_notes"] + ] + assert [c.content for c in thing._get_notes("Sampling Procedure")] == [ + file_content["well_measuring_notes"], + file_content["sampling_scenario_notes"], + f"Sample possible: {file_content['sample_possible']}", + ] + assert [c.content for c in thing._get_notes("Historical")] == [ + f"historic depth to water: {float(file_content['historic_depth_to_water_ft'])} ft - source: {file_content['depth_source'].lower()}" + ] assert ( thing.measuring_point_description From df0180d294c35b320eeadf8df0a1eefe024238a5 Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Wed, 15 Apr 2026 14:46:36 -0500 Subject: [PATCH 04/39] fix(services/util): Mv normalize_datetime_to_utc() to services/util & used it in well_inventory --- schemas/water_level_csv.py | 18 +++--------------- schemas/well_inventory.py | 24 ++++++++++-------------- services/util.py | 21 ++++++++++++++++++++- 3 files changed, 33 insertions(+), 30 deletions(-) diff --git a/schemas/water_level_csv.py b/schemas/water_level_csv.py index 32f33333..fb0d6ac0 100644 --- a/schemas/water_level_csv.py +++ b/schemas/water_level_csv.py @@ -15,7 +15,7 @@ # =============================================================================== from __future__ import annotations -from datetime import datetime, timezone +from datetime import datetime from typing import Annotated from core.enums import DataQuality, GroundwaterLevelReason, SampleMethod @@ -29,7 +29,7 @@ ) from pydantic.functional_validators import BeforeValidator -from services.util import convert_dt_tz_naive_to_tz_aware +from services.util import normalize_datetime_to_utc WATER_LEVEL_REQUIRED_FIELDS = [ "well_name_point_id", @@ -84,18 +84,6 @@ def empty_str_to_none(value): OptionalFloat = Annotated[float | None, BeforeValidator(empty_str_to_none)] -def _normalize_datetime_to_utc(value: datetime | str) -> datetime: - if isinstance(value, str): - value = datetime.fromisoformat(value) - elif not isinstance(value, datetime): - raise ValueError("value must be a datetime or ISO format string") - - if value.tzinfo is None: - value = convert_dt_tz_naive_to_tz_aware(value, "America/Denver") - - return value.astimezone(timezone.utc) - - def _canonicalize_enum_value( value: str | None, enum_cls, field_name: str ) -> str | None: @@ -182,7 +170,7 @@ def normalize_sample_method(cls, value: str) -> str: ) @classmethod def normalize_datetime_field(cls, value: datetime | str) -> datetime: - return _normalize_datetime_to_utc(value) + return normalize_datetime_to_utc(value) @field_validator("depth_to_water_ft") @classmethod diff --git a/schemas/well_inventory.py b/schemas/well_inventory.py index 56eb93eb..5a2fd0ec 100644 --- a/schemas/well_inventory.py +++ b/schemas/well_inventory.py @@ -48,8 +48,7 @@ AliasChoices, ) from schemas import past_or_today_validator, PastOrTodayDatetime -from services.util import convert_dt_tz_naive_to_tz_aware - +from services.util import normalize_datetime_to_utc def empty_str_to_none(v): if isinstance(v, str) and v.strip() == "": @@ -362,19 +361,16 @@ def normalize_complete_monitoring_frequency(cls, data): return data @field_validator("date_time", mode="before") + @classmethod def make_date_time_tz_aware(cls, v): - if isinstance(v, str): - dt = datetime.fromisoformat(v) - elif isinstance(v, datetime): - dt = v - else: - raise ValueError("date_time must be a datetime or ISO format string") - - if dt.tzinfo is None: - aware_dt = convert_dt_tz_naive_to_tz_aware(dt, "America/Denver") - return aware_dt - else: - raise ValueError("date_time must be a timezone-naive datetime") + normalize_datetime_to_utc(v) + + @field_validator("measurement_date_time", mode="before") + @classmethod + def normalize_measurement_date_time(cls, v): + if v is None or (isinstance(v, str) and v.strip() == ""): + return None + return normalize_datetime_to_utc(v) @model_validator(mode="after") def validate_model(self): diff --git a/services/util.py b/services/util.py index 374666e9..bb35e394 100644 --- a/services/util.py +++ b/services/util.py @@ -64,6 +64,25 @@ def transform_srid(geometry, source_srid, target_srid): return transform(transformer.transform, geometry) +def normalize_datetime_to_utc(value: datetime | str) -> datetime: + dt: datetime + + if isinstance(value, str): + dt = datetime.fromisoformat(value) + elif isinstance(value, datetime): + dt = value + else: + raise ValueError("value must be a datetime or ISO format string") + + # Treat the datetime as "naive" if it has no tzinfo OR its tzinfo does not + # provide a valid UTC offset (utcoffset() returns None). Some tzinfo + # implementations can be attached but still behave like naive datetimes, + # so we handle both cases before assigning a default timezone. + if dt.tzinfo is None or dt.utcoffset() is None: + dt = convert_dt_tz_naive_to_tz_aware(dt, "America/Denver") + + return dt.astimezone(timezone.utc) + def convert_dt_tz_naive_to_tz_aware( dt_naive: datetime, iana_timezone: str = "America/Denver", @@ -156,7 +175,7 @@ def get_county_from_point(lon: float, lat: float) -> str | None: return attrs["BASENAME"] -def get_quad_name_from_point(lon: float, lat: float) -> str: +def get_quad_name_from_point(lon: float, lat: float) -> str | None: url = "https://carto.nationalmap.gov/arcgis/rest/services/map_indices/MapServer/10/query" params = { "f": "json", From 393b083b5161a3f2350c06d0fcbadd7a01c0208e Mon Sep 17 00:00:00 2001 From: TylerAdamMartinez <57375362+TylerAdamMartinez@users.noreply.github.com> Date: Wed, 15 Apr 2026 19:48:15 +0000 Subject: [PATCH 05/39] Formatting changes --- schemas/well_inventory.py | 1 + services/util.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/schemas/well_inventory.py b/schemas/well_inventory.py index 5a2fd0ec..24e2ec3d 100644 --- a/schemas/well_inventory.py +++ b/schemas/well_inventory.py @@ -50,6 +50,7 @@ from schemas import past_or_today_validator, PastOrTodayDatetime from services.util import normalize_datetime_to_utc + def empty_str_to_none(v): if isinstance(v, str) and v.strip() == "": return None diff --git a/services/util.py b/services/util.py index bb35e394..ec9c043c 100644 --- a/services/util.py +++ b/services/util.py @@ -66,7 +66,7 @@ def transform_srid(geometry, source_srid, target_srid): def normalize_datetime_to_utc(value: datetime | str) -> datetime: dt: datetime - + if isinstance(value, str): dt = datetime.fromisoformat(value) elif isinstance(value, datetime): @@ -83,6 +83,7 @@ def normalize_datetime_to_utc(value: datetime | str) -> datetime: return dt.astimezone(timezone.utc) + def convert_dt_tz_naive_to_tz_aware( dt_naive: datetime, iana_timezone: str = "America/Denver", From e2cdc03050c3f5596900070f577a150b3c420e91 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 15 Apr 2026 21:14:56 +0000 Subject: [PATCH 06/39] build(deps): bump python-multipart from 0.0.22 to 0.0.26 (#654) Bumps [python-multipart](https://github.com/Kludex/python-multipart) from 0.0.22 to 0.0.26. - [Release notes](https://github.com/Kludex/python-multipart/releases) - [Changelog](https://github.com/Kludex/python-multipart/blob/master/CHANGELOG.md) - [Commits](https://github.com/Kludex/python-multipart/compare/0.0.22...0.0.26) --- updated-dependencies: - dependency-name: python-multipart dependency-version: 0.0.26 dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- requirements.txt | 6 +++--- uv.lock | 8 ++++---- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c1383d0e..4f550ff4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,7 +80,7 @@ dependencies = [ "pytest-cov==6.2.1", "python-dateutil==2.9.0.post0", "python-jose>=3.5.0", - "python-multipart==0.0.22", + "python-multipart==0.0.26", "pytz==2025.2", "requests==2.33.1", "rsa==4.9.1", diff --git a/requirements.txt b/requirements.txt index e368041c..655541b7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1719,9 +1719,9 @@ python-jose==3.5.0 \ --hash=sha256:abd1202f23d34dfad2c3d28cb8617b90acf34132c7afd60abd0b0b7d3cb55771 \ --hash=sha256:fb4eaa44dbeb1c26dcc69e4bd7ec54a1cb8dd64d3b4d81ef08d90ff453f2b01b # via ocotilloapi -python-multipart==0.0.22 \ - --hash=sha256:2b2cd894c83d21bf49d702499531c7bafd057d730c201782048f7945d82de155 \ - --hash=sha256:7340bef99a7e0032613f56dc36027b959fd3b30a787ed62d310e951f7c3a3a58 +python-multipart==0.0.26 \ + --hash=sha256:08fadc45918cd615e26846437f50c5d6d23304da32c341f289a617127b081f17 \ + --hash=sha256:c0b169f8c4484c13b0dcf2ef0ec3a4adb255c4b7d18d8e420477d2b1dd03f185 # via # ocotilloapi # starlette-admin diff --git a/uv.lock b/uv.lock index 8a6ad120..4a0ee6c0 100644 --- a/uv.lock +++ b/uv.lock @@ -1632,7 +1632,7 @@ requires-dist = [ { name = "pytest-cov", specifier = "==6.2.1" }, { name = "python-dateutil", specifier = "==2.9.0.post0" }, { name = "python-jose", specifier = ">=3.5.0" }, - { name = "python-multipart", specifier = "==0.0.22" }, + { name = "python-multipart", specifier = "==0.0.26" }, { name = "pytz", specifier = "==2025.2" }, { name = "requests", specifier = "==2.33.1" }, { name = "rsa", specifier = "==4.9.1" }, @@ -2388,11 +2388,11 @@ wheels = [ [[package]] name = "python-multipart" -version = "0.0.22" +version = "0.0.26" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/01/979e98d542a70714b0cb2b6728ed0b7c46792b695e3eaec3e20711271ca3/python_multipart-0.0.22.tar.gz", hash = "sha256:7340bef99a7e0032613f56dc36027b959fd3b30a787ed62d310e951f7c3a3a58", size = 37612, upload-time = "2026-01-25T10:15:56.219Z" } +sdist = { url = "https://files.pythonhosted.org/packages/88/71/b145a380824a960ebd60e1014256dbb7d2253f2316ff2d73dfd8928ec2c3/python_multipart-0.0.26.tar.gz", hash = "sha256:08fadc45918cd615e26846437f50c5d6d23304da32c341f289a617127b081f17", size = 43501, upload-time = "2026-04-10T14:09:59.473Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1b/d0/397f9626e711ff749a95d96b7af99b9c566a9bb5129b8e4c10fc4d100304/python_multipart-0.0.22-py3-none-any.whl", hash = "sha256:2b2cd894c83d21bf49d702499531c7bafd057d730c201782048f7945d82de155", size = 24579, upload-time = "2026-01-25T10:15:54.811Z" }, + { url = "https://files.pythonhosted.org/packages/9a/22/f1925cdda983ab66fc8ec6ec8014b959262747e58bdca26a4e3d1da29d56/python_multipart-0.0.26-py3-none-any.whl", hash = "sha256:c0b169f8c4484c13b0dcf2ef0ec3a4adb255c4b7d18d8e420477d2b1dd03f185", size = 28847, upload-time = "2026-04-10T14:09:58.131Z" }, ] [[package]] From 7c053eb64e315e191d3c321cfc19554f9a69d396 Mon Sep 17 00:00:00 2001 From: jross Date: Wed, 15 Apr 2026 15:26:15 -0600 Subject: [PATCH 07/39] feat: implement API concurrency fix strategy by converting async route handlers to sync and enhancing error handling --- .github/workflows/CD_production.yml | 2 +- .github/workflows/CD_staging.yml | 2 +- .github/workflows/CD_testing.yml | 157 ++++++++++++++++++ ADR2.md | 119 +++++++++++++ api/author.py | 2 +- api/contact.py | 100 ++++++----- api/geochronology.py | 4 +- api/geospatial.py | 4 +- api/group.py | 12 +- api/lexicon.py | 30 ++-- api/location.py | 10 +- api/ngwmn.py | 6 +- api/observation.py | 24 +-- api/publication.py | 2 +- api/sample.py | 34 ++-- api/search.py | 2 +- api/sensor.py | 10 +- api/thing.py | 105 ++++++------ .../test_nma_legacy_relationships.py | 4 +- tests/test_nma_chemistry_lineage.py | 4 +- 20 files changed, 451 insertions(+), 182 deletions(-) create mode 100644 .github/workflows/CD_testing.yml create mode 100644 ADR2.md diff --git a/.github/workflows/CD_production.yml b/.github/workflows/CD_production.yml index 96f10356..e7b89642 100644 --- a/.github/workflows/CD_production.yml +++ b/.github/workflows/CD_production.yml @@ -88,7 +88,7 @@ jobs: run: | export MAX_INSTANCES="10" export SERVICE_NAME="ocotillo-api" - export ENTRYPOINT="gunicorn -w 1 -k uvicorn.workers.UvicornWorker main:app" + export ENTRYPOINT="gunicorn -w 4 -k uvicorn.workers.UvicornWorker main:app" export MIN_INSTANCES="0" envsubst < .github/app.template.yaml > app.yaml diff --git a/.github/workflows/CD_staging.yml b/.github/workflows/CD_staging.yml index ac800253..c723eb6a 100644 --- a/.github/workflows/CD_staging.yml +++ b/.github/workflows/CD_staging.yml @@ -88,7 +88,7 @@ jobs: run: | export MAX_INSTANCES="10" export SERVICE_NAME="ocotillo-api-staging" - export ENTRYPOINT="gunicorn -w 1 -k uvicorn.workers.UvicornWorker main:app" + export ENTRYPOINT="gunicorn -w 4 -k uvicorn.workers.UvicornWorker main:app" export MIN_INSTANCES="0" envsubst < .github/app.template.yaml > app.yaml diff --git a/.github/workflows/CD_testing.yml b/.github/workflows/CD_testing.yml new file mode 100644 index 00000000..b924519c --- /dev/null +++ b/.github/workflows/CD_testing.yml @@ -0,0 +1,157 @@ +name: CD (Testing) + +on: + push: + branches: [jir*] + +permissions: + contents: write + +jobs: + testing-deploy: + + runs-on: ubuntu-latest + environment: staging + + steps: + - name: Check out source repository + uses: actions/checkout@v6.0.2 + with: + fetch-depth: 0 + + - name: Install uv in container + uses: astral-sh/setup-uv@v8.0.0 + with: + version: "latest" + + - name: Generate requirements.txt + run: | + uv export \ + --format requirements-txt \ + --no-emit-project \ + --no-dev \ + --output-file requirements.txt + + - name: Authenticate to Google Cloud + uses: 'google-github-actions/auth@v3' + with: + credentials_json: ${{ secrets.CLOUD_DEPLOY_SERVICE_ACCOUNT_KEY }} + + - name: Run Alembic migrations on staging database + env: + DB_DRIVER: "cloudsql" + CLOUD_SQL_INSTANCE_NAME: "${{ secrets.CLOUD_SQL_INSTANCE_NAME }}" + CLOUD_SQL_DATABASE: "${{ vars.CLOUD_SQL_DATABASE }}" + CLOUD_SQL_USER: "${{ secrets.CLOUD_SQL_USER }}" + CLOUD_SQL_IAM_AUTH: true + run: | + uv run alembic upgrade head + + - name: Refresh materialized views on staging database + env: + DB_DRIVER: "cloudsql" + CLOUD_SQL_INSTANCE_NAME: "${{ secrets.CLOUD_SQL_INSTANCE_NAME }}" + CLOUD_SQL_DATABASE: "${{ vars.CLOUD_SQL_DATABASE }}" + CLOUD_SQL_USER: "${{ secrets.CLOUD_SQL_USER }}" + CLOUD_SQL_IAM_AUTH: true + run: | + uv run python -m cli.cli refresh-pygeoapi-materialized-views + + - name: Ensure envsubst is available + run: | + if ! command -v envsubst >/dev/null 2>&1; then + sudo apt-get update + sudo apt-get install -y gettext-base + fi + + - name: Render App Engine configs + env: + ENVIRONMENT: "staging" + CLOUD_SQL_INSTANCE_NAME: "${{ secrets.CLOUD_SQL_INSTANCE_NAME }}" + CLOUD_SQL_DATABASE: "${{ vars.CLOUD_SQL_DATABASE }}" + CLOUD_SQL_USER: "${{ secrets.CLOUD_SQL_USER }}" + PYGEOAPI_POSTGRES_DB: "${{ vars.CLOUD_SQL_DATABASE }}" + PYGEOAPI_POSTGRES_USER: "${{ secrets.PYGEOAPI_POSTGRES_USER }}" + PYGEOAPI_POSTGRES_HOST: "${{ vars.PYGEOAPI_POSTGRES_HOST || '127.0.0.1' }}" + PYGEOAPI_POSTGRES_PORT: "${{ vars.PYGEOAPI_POSTGRES_PORT || '5432' }}" + PYGEOAPI_POSTGRES_PASSWORD: "${{ secrets.PYGEOAPI_POSTGRES_PASSWORD }}" + PYGEOAPI_SERVER_URL: "${{ vars.PYGEOAPI_SERVER_URL }}" + CLOUD_SQL_IAM_AUTH: "true" + GCS_SERVICE_ACCOUNT_KEY: "${{ secrets.GCS_SERVICE_ACCOUNT_KEY }}" + GCS_BUCKET_NAME: "${{ vars.GCS_BUCKET_NAME }}" + AUTHENTIK_URL: "${{ vars.AUTHENTIK_URL }}" + AUTHENTIK_CLIENT_ID: "${{ vars.AUTHENTIK_CLIENT_ID }}" + AUTHENTIK_AUTHORIZE_URL: "${{ vars.AUTHENTIK_AUTHORIZE_URL }}" + AUTHENTIK_TOKEN_URL: "${{ vars.AUTHENTIK_TOKEN_URL }}" + SESSION_SECRET_KEY: "${{ secrets.SESSION_SECRET_KEY }}" + APITALLY_CLIENT_ID: "${{ vars.APITALLY_CLIENT_ID }}" + run: | + export MAX_INSTANCES="10" + export SERVICE_NAME="ocotillo-api-testing" + export ENTRYPOINT="gunicorn -w 4 -k uvicorn.workers.UvicornWorker main:app" + export MIN_INSTANCES="0" + envsubst < .github/app.template.yaml > app.yaml + + - name: Deploy to Google Cloud + run: | + gcloud app deploy \ + app.yaml \ + --quiet \ + --project ${{ vars.GCP_PROJECT_ID }} + + - name: Clean up oldest versions + run: | + SERVICE="ocotillo-api-testing" + VERSIONS_JSON="$(gcloud app versions list --service="$SERVICE" --project=${{ vars.GCP_PROJECT_ID }} --format=json --sort-by="version.createTime" 2>/dev/null || printf '[]')" + export VERSIONS_JSON + DELETE_VERSION="$(python - <<'PY' + import json + import os + + versions = json.loads(os.environ.get("VERSIONS_JSON", "[]") or "[]") + if len(versions) <= 1: + print("") + raise SystemExit(0) + + def traffic_split(version): + for key in ("traffic_split", "trafficSplit"): + value = version.get(key) + if value is not None: + try: + return float(value) + except (TypeError, ValueError): + return 0.0 + return 0.0 + + for version in versions: + if traffic_split(version) == 0.0: + print(version.get("id", "")) + break + else: + print("") + PY + )" + if [ -n "$DELETE_VERSION" ]; then + echo "Deleting old non-serving version for $SERVICE: $DELETE_VERSION" + gcloud app versions delete "$DELETE_VERSION" --service="$SERVICE" --project=${{ vars.GCP_PROJECT_ID }} --quiet + else + echo "No old non-serving versions to delete for $SERVICE" + fi + + - name: Remove rendered configs + run: | + rm app.yaml + + # Use PR author's username as git user name + - name: Set up git user + run: | + git config --global user.name "${{ github.actor }}" + git config --global user.email "${{ github.actor }}@users.noreply.github.com" + + # ":" are not alloed in git tags, so replace with "-" + - name: Tag commit + run: | + git tag -a "testing-deploy-$(date -u +%Y-%m-%d)T$(date -u +%H-%M-%S%z)" -m "testing gcloud deployment: $ + (date + -u +%Y-%m-%d)T$(date -u +%H:%M:%S%z)" + git push origin --tags diff --git a/ADR2.md b/ADR2.md new file mode 100644 index 00000000..862f4b29 --- /dev/null +++ b/ADR2.md @@ -0,0 +1,119 @@ +# ADR2: API Concurrency Fix Strategy + +## Summary + +This document describes a verified FastAPI concurrency issue in the API stack and recommends a two-phase remediation plan for maintainers. + +The API uses synchronous SQLAlchemy sessions backed by `psycopg`. When those sessions are consumed from `async def` route handlers, blocking database work runs on the event loop thread if the handlers call synchronous ORM helpers directly. The lowest-risk immediate fix is to convert database-bound route handlers that do not perform asynchronous work into plain `def`. The longer-term fix is to introduce a real async SQLAlchemy stack and migrate the affected handlers and helpers incrementally. + +## Problem + +FastAPI supports synchronous generator dependencies such as `get_db_session()`. The issue is not the dependency shape itself. The issue is that the injected object is a synchronous SQLAlchemy `Session`, and any `async def` route that consumes it while executing synchronous ORM queries directly will block the event loop thread. + +In this configuration, FastAPI runs the `async def` route body on the event loop thread. If that body performs blocking database I/O through the synchronous session, the worker cannot make progress on other requests assigned to that event loop until the database call returns. A slow well query can therefore delay unrelated lightweight requests handled by the same worker. + +This is a concurrency problem, not a correctness problem. The endpoints can still return correct data while reducing throughput and responsiveness under load. + +## Evidence In This Repo + +- [`db/engine.py`](db/engine.py) creates `database_sessionmaker = sessionmaker(engine, expire_on_commit=False)` and `get_db_session()` yields a regular synchronous `Session`. +- [`db/engine.py`](db/engine.py) builds synchronous `postgresql+psycopg` engines for both the default PostgreSQL path and the Cloud SQL path, confirming that the active database layer is synchronous. +- [`core/dependencies.py`](core/dependencies.py) injects that session through `session_dependency`. +- [`services/well_details_helper.py`](services/well_details_helper.py) performs synchronous ORM operations such as `session.scalars(...).all()` and related query chains. +- [`api/thing.py`](api/thing.py) contains representative database-backed routes that pass the synchronous session into helper functions such as `get_db_things(...)` and `get_well_details_payload(...)`. +- [`api/asset.py`](api/asset.py) shows a contrasting safe pattern for non-database blocking work by wrapping synchronous GCS calls in `run_in_threadpool(...)`. +- The short-term fix described in this ADR converts database-bound routes from `async def` to `def` where they do not need `await`, but the helper/query layer remains synchronous until a real async session stack is introduced. + +## Short-Term Fix + +The short-term fix is to convert database-bound route handlers from `async def` to `def` when they do not actually perform asynchronous work. + +This lets FastAPI offload the entire route function to a worker thread instead of running its synchronous database calls on the event loop thread. It does not require changing the current database engine, dependency, query helpers, or response schemas. + +### Short-term implementation guidance + +- Convert any route handler that: + - receives `session: session_dependency`, + - performs synchronous ORM work directly or through helpers, and + - does not require `await` for other operations in the route body. +- Prioritize the highest-value endpoints first: + - high-traffic list and detail endpoints, + - endpoints known to run expensive joins or eager-loads, + - endpoints that affect warmup or perceived application responsiveness. +- Keep route behavior unchanged: + - do not change paths, status codes, payloads, or auth dependencies as part of this phase. +- Avoid mixed patterns: + - do not leave a route as `async def` if it still calls synchronous SQLAlchemy code directly. +- Use `run_in_threadpool(...)` only when a route must remain `async def` for a separate reason, such as mixing in another async operation, and only for isolated blocking helpers rather than as a blanket wrapper for all DB access. + +### Expected impact + +- Lower risk than a full async migration. +- No intended HTTP contract changes. +- Better worker responsiveness because blocking DB work moves off the event loop thread. + +## Long-Term Fix + +The long-term fix is to add a real async database stack and migrate selected API areas to it incrementally. + +This phase should introduce an explicit async path rather than trying to reuse the current synchronous dependency. Importing async SQLAlchemy primitives is not enough; the repo needs a working async engine, async sessionmaker, async dependency, and async query/helper layer for migrated endpoints. + +### Long-term target architecture + +- Add an `AsyncEngine` configured for the intended async driver. +- Add an `async_sessionmaker` that yields `AsyncSession` instances. +- Add a dedicated async dependency such as `get_async_db_session()` rather than overloading `get_db_session()`. +- Update migrated handlers and helper functions to use async database access: + - `await session.execute(...)` + - `await session.scalars(...)` + - other `AsyncSession`-compatible patterns as needed + +### Long-term migration guidance + +- Migrate by subsystem, not all at once. +- Start with a bounded route/helper cluster where the query patterns are understood. +- Keep sync and async paths separate during migration to avoid ambiguous dependencies and accidental sync calls from async routes. +- Treat helper-layer migration as part of the work. Converting route signatures alone is insufficient if the helper functions still expect synchronous sessions. + +### Non-goals and cautions + +- Do not claim the repo already has a working async DB session path unless one is actually implemented and used. +- Do not treat “switch everything to async” as a trivial refactor. +- Do not mix `AsyncSession` route code with synchronous helper/query internals. + +## Recommended Path + +The recommended order is: + +1. Convert database-bound `async def` routes that do not use `await` into plain `def`. +2. Validate behavior and measure the effect on responsiveness. +3. Introduce a dedicated async DB stack. +4. Migrate selected route/helper subsystems incrementally to `AsyncSession`. + +This sequence delivers immediate concurrency improvement with limited risk, while preserving a clear path to a full async architecture later. + +## Acceptance Criteria + +### Short-term acceptance criteria + +- Targeted API tests continue to pass after `async def` to `def` conversions. +- HTTP behavior is unchanged: + - same routes, + - same auth requirements, + - same status codes, + - same payload shapes. +- Concurrency smoke checks or request-timing instrumentation show that DB-heavy requests no longer block the event loop thread for that worker in the same way they do today. + +### Long-term acceptance criteria + +- Migrated endpoints pass the existing API test coverage for their subsystem. +- The async session lifecycle is correct for successful and failing requests. +- Migrated `async def` routes do not call synchronous session helpers. +- Before/after measurements are captured for latency and concurrency so the migration can be evaluated against real behavior rather than assumptions. + +## Defaults And Assumptions + +- This document is written for maintainers and assumes familiarity with FastAPI and SQLAlchemy internals. +- The document is self-contained and does not require code changes to be useful. +- The recommended short-term action is intentionally conservative and does not prescribe a file-by-file rollout sequence. +- The recommended long-term action is a staged migration, not a flag-day rewrite. diff --git a/api/author.py b/api/author.py index a54b1139..b715b676 100644 --- a/api/author.py +++ b/api/author.py @@ -30,7 +30,7 @@ "/{author_id}/publications", response_model=list[PublicationResponse], ) -async def get_author_publications( +def get_author_publications( user: viewer_dependency, author_id: int, session: session_dependency ): """ diff --git a/api/contact.py b/api/contact.py index c38f52e2..f5d46c03 100644 --- a/api/contact.py +++ b/api/contact.py @@ -14,10 +14,9 @@ # limitations under the License. # =============================================================================== from fastapi import APIRouter, Query -from fastapi import APIRouter from sqlalchemy import select from starlette import status -from sqlalchemy.exc import ProgrammingError +from sqlalchemy.exc import IntegrityError, ProgrammingError from api.pagination import CustomPage from fastapi_pagination.ext.sqlalchemy import paginate @@ -57,58 +56,48 @@ def database_error_handler( - payload: CreateEmail | CreateContact | CreatePhone, error: ProgrammingError + payload: CreateAddress | CreateEmail | CreateContact | CreatePhone, + error: IntegrityError | ProgrammingError, ) -> None: """ Handle errors raised by the database when adding or updating a sample. """ - error_message = error.orig.args[0]["M"] + orig = getattr(error, "orig", None) + if hasattr(orig, "args") and orig.args and isinstance(orig.args[0], dict): + error_message = orig.args[0].get("M", "") + else: + error_message = str(orig or error) - if ( - error_message - == 'insert or update on table "thing_contact_association" violates foreign key constraint "thing_contact_association_thing_id_fkey"' - ): + if 'constraint "thing_contact_association_thing_id_fkey"' in error_message: detail = { "loc": ["body", "thing_id"], "msg": f"Thing with ID {payload.thing_id} not found.", "type": "value_error", "input": {"thing_id": payload.thing_id}, } - elif ( - error_message - == 'insert or update on table "email" violates foreign key constraint "email_contact_id_fkey"' - ): + elif 'constraint "email_contact_id_fkey"' in error_message: detail = { "loc": ["body", "contact_id"], "msg": f"Contact with ID {payload.contact_id} not found.", "type": "value_error", "input": {"contact_id": payload.contact_id}, } - elif ( - error_message - == 'insert or update on table "phone" violates foreign key constraint "phone_contact_id_fkey"' - ): + elif 'constraint "phone_contact_id_fkey"' in error_message: detail = { "loc": ["body", "contact_id"], "msg": f"Contact with ID {payload.contact_id} not found.", "type": "value_error", "input": {"contact_id": payload.contact_id}, } - elif ( - error_message - == 'insert or update on table "address" violates foreign key constraint "address_contact_id_fkey"' - ): + elif 'constraint "address_contact_id_fkey"' in error_message: detail = { "loc": ["body", "contact_id"], "msg": f"Contact with ID {payload.contact_id} not found.", "type": "value_error", "input": {"contact_id": payload.contact_id}, } - elif ( - error_message - == 'insert or update on table "contact" violates foreign key constraint "contact_contact_type_fkey"' - ): + elif 'constraint "contact_contact_type_fkey"' in error_message: valid_terms = get_terms_by_category("contact_type") valid_contact_types_for_msg = " | ".join(valid_terms) detail = { @@ -117,6 +106,13 @@ def database_error_handler( "type": "value_error", "input": {"contact_type": payload.contact_type}, } + else: + detail = { + "loc": ["body"], + "msg": error_message, + "type": "value_error", + "input": {}, + } raise PydanticStyleException(status_code=status.HTTP_409_CONFLICT, detail=[detail]) @@ -129,12 +125,12 @@ def database_error_handler( summary="Create a new contact", status_code=status.HTTP_201_CREATED, ) -async def create_contact( +def create_contact( contact_data: CreateContact, session: session_dependency, user: amp_admin_dependency ) -> ContactResponse: try: return add_contact(session, contact_data, user=user) - except ProgrammingError as e: + except (IntegrityError, ProgrammingError) as e: database_error_handler(contact_data, e) @@ -143,7 +139,7 @@ async def create_contact( summary="Add an address to a contact", status_code=status.HTTP_201_CREATED, ) -async def create_address( +def create_address( address_data: CreateAddress, session: session_dependency, user: amp_admin_dependency, @@ -157,7 +153,7 @@ async def create_address( """ try: return model_adder(session, Address, address_data, user=user) - except ProgrammingError as e: + except (IntegrityError, ProgrammingError) as e: database_error_handler(address_data, e) @@ -166,14 +162,14 @@ async def create_address( summary="Add an email to a contact", status_code=status.HTTP_201_CREATED, ) -async def create_email( +def create_email( email_data: CreateEmail, session: session_dependency, user: amp_admin_dependency, ) -> EmailResponse: try: return model_adder(session, Email, email_data, user=user) - except ProgrammingError as e: + except (IntegrityError, ProgrammingError) as e: database_error_handler(email_data, e) @@ -182,14 +178,14 @@ async def create_email( summary="Add a phone number to a contact", status_code=status.HTTP_201_CREATED, ) -async def create_phone( +def create_phone( phone_data: CreatePhone, session: session_dependency, user: amp_admin_dependency, ) -> PhoneResponse: try: return model_adder(session, Phone, phone_data, user=user) - except ProgrammingError as e: + except (IntegrityError, ProgrammingError) as e: database_error_handler(phone_data, e) @@ -221,7 +217,7 @@ async def create_phone( @router.patch( "/email/{email_id}", ) -async def update_contact_email( +def update_contact_email( email_id: int, email_data: UpdateEmail, session: session_dependency, @@ -236,7 +232,7 @@ async def update_contact_email( @router.patch( "/phone/{phone_id}", ) -async def update_contact_phone( +def update_contact_phone( phone_id: int, phone_data: UpdatePhone, session: session_dependency, @@ -256,7 +252,7 @@ async def update_contact_phone( @router.patch( "/address/{address_id}", ) -async def update_contact_address( +def update_contact_address( address_id: int, address_data: UpdateAddress, session: session_dependency, @@ -304,7 +300,7 @@ async def update_contact_address( @router.patch("/{contact_id}", summary="Update contact") -async def update_contact( +def update_contact( contact_id: int, contact_data: UpdateContact, session: session_dependency, @@ -365,7 +361,7 @@ async def update_contact( try: return model_patcher(session, Contact, contact_id, contact_data, user=user) - except ProgrammingError as e: + except (IntegrityError, ProgrammingError) as e: database_error_handler(contact_data, e) @@ -373,7 +369,7 @@ async def update_contact( @router.get("/email", summary="Get all emails") -async def get_emails( +def get_emails( session: session_dependency, user: amp_viewer_dependency ) -> CustomPage[EmailResponse]: """ @@ -385,7 +381,7 @@ async def get_emails( @router.get("/email/{email_id}", summary="Get email by ID") -async def get_email_by_id( +def get_email_by_id( email_id: int, session: session_dependency, user: amp_viewer_dependency ) -> EmailResponse: """ @@ -395,7 +391,7 @@ async def get_email_by_id( @router.get("/phone", summary="Get all phones") -async def get_phones( +def get_phones( session: session_dependency, user: amp_viewer_dependency ) -> CustomPage[PhoneResponse]: """ @@ -407,7 +403,7 @@ async def get_phones( @router.get("/phone/{phone_id}", summary="Get phone by ID") -async def get_phone_by_id( +def get_phone_by_id( phone_id: int, session: session_dependency, user: amp_viewer_dependency ) -> PhoneResponse: """ @@ -417,7 +413,7 @@ async def get_phone_by_id( @router.get("/address", summary="Get all addresses") -async def get_addresses( +def get_addresses( session: session_dependency, user: amp_viewer_dependency ) -> CustomPage[AddressResponse]: """ @@ -429,7 +425,7 @@ async def get_addresses( @router.get("/address/{address_id}", summary="Get address by ID") -async def get_address_by_id( +def get_address_by_id( address_id: int, session: session_dependency, user: amp_viewer_dependency ) -> AddressResponse: """ @@ -468,7 +464,7 @@ async def get_address_by_id( @router.get("", summary="Get contacts") -async def get_contacts( +def get_contacts( session: session_dependency, user: amp_viewer_dependency, sort: str = None, @@ -485,7 +481,7 @@ async def get_contacts( @router.get("/{contact_id}", summary="Get contact by ID") -async def get_contact_by_id( +def get_contact_by_id( contact_id: int, session: session_dependency, user: amp_viewer_dependency ) -> ContactResponse: """ @@ -495,7 +491,7 @@ async def get_contact_by_id( @router.get("/{contact_id}/email", summary="Get contact emails") -async def get_contact_emails( +def get_contact_emails( contact_id: int, session: session_dependency, user: amp_viewer_dependency ) -> CustomPage[EmailResponse]: """ @@ -507,7 +503,7 @@ async def get_contact_emails( @router.get("/{contact_id}/phone", summary="Get contact phones") -async def get_contact_phones( +def get_contact_phones( contact_id: int, session: session_dependency, user: amp_viewer_dependency ) -> CustomPage[PhoneResponse]: """ @@ -519,7 +515,7 @@ async def get_contact_phones( @router.get("/{contact_id}/address", summary="Get contact addresses") -async def get_contact_addresses( +def get_contact_addresses( contact_id: int, session: session_dependency, user: amp_viewer_dependency ) -> CustomPage[AddressResponse]: """ @@ -548,7 +544,7 @@ async def get_contact_addresses( @router.delete("/email/{email_id}", summary="Delete contact email") -async def delete_contact_email( +def delete_contact_email( email_id: int, session: session_dependency, user: amp_admin_dependency ): """ @@ -558,7 +554,7 @@ async def delete_contact_email( @router.delete("/phone/{phone_id}", summary="Delete contact phone") -async def delete_contact_phone( +def delete_contact_phone( phone_id: int, session: session_dependency, user: amp_admin_dependency ): """ @@ -568,7 +564,7 @@ async def delete_contact_phone( @router.delete("/address/{address_id}", summary="Delete contact address") -async def delete_contact_address( +def delete_contact_address( address_id: int, session: session_dependency, user: amp_admin_dependency ): """ @@ -593,7 +589,7 @@ async def delete_contact_address( @router.delete("/{contact_id}", summary="Delete contact") -async def delete_contact( +def delete_contact( contact_id: int, session: session_dependency, user: amp_admin_dependency ): """ diff --git a/api/geochronology.py b/api/geochronology.py index af3d984c..0497f0e3 100644 --- a/api/geochronology.py +++ b/api/geochronology.py @@ -24,7 +24,7 @@ @router.post("/age", tags=["geochronology"], status_code=status.HTTP_201_CREATED) -async def create_age( +def create_age( user: viewer_dependency, age: CreateGeochronologyAge, session: session_dependency ): """ @@ -36,7 +36,7 @@ async def create_age( @router.get("/age", tags=["geochronology"]) -async def get_geochronology_age( +def get_geochronology_age( user: viewer_dependency, session: session_dependency, method: str = "arar" ): """ diff --git a/api/geospatial.py b/api/geospatial.py index f718b41e..082979f8 100644 --- a/api/geospatial.py +++ b/api/geospatial.py @@ -32,7 +32,7 @@ @router.get("") -async def get_geospatial( +def get_geospatial( user: viewer_dependency, session: session_dependency, thing_type: Annotated[List[str], Query(title="thing_type")] = None, @@ -61,7 +61,7 @@ async def get_geospatial( @router.get("/project-area/{group_id}", summary="Get project area for group") -async def get_project_area( +def get_project_area( user: viewer_dependency, session: session_dependency, group_id: int ) -> FeatureCollectionResponse: diff --git a/api/group.py b/api/group.py index 39b53791..5399ce10 100644 --- a/api/group.py +++ b/api/group.py @@ -38,7 +38,7 @@ @router.post("", summary="Create a new group", status_code=HTTP_201_CREATED) -async def create_group( +def create_group( group_data: CreateGroup, session: session_dependency, user: admin_dependency ) -> GroupResponse: """ @@ -66,7 +66,7 @@ async def create_group( # ============= Get ============================================= @router.get("", summary="Get groups") -async def get_groups( +def get_groups( user: viewer_dependency, session: session_dependency, filter_: str = Query(alias="filter", default=None), @@ -78,7 +78,7 @@ async def get_groups( @router.get("/{group_id}", summary="Get group by ID") -async def get_group_by_id( +def get_group_by_id( user: viewer_dependency, group_id: int, session: session_dependency ) -> GroupResponse: """ @@ -100,7 +100,7 @@ async def get_group_by_id( # ============= Patch ============================================= @router.patch("/{group_id}", summary="Update a group by ID") -async def update_group( +def update_group( user: editor_dependency, group_id: int, group_data: UpdateGroup, @@ -116,9 +116,7 @@ async def update_group( @router.delete( "/{group_id}", summary="Delete a group by ID", status_code=HTTP_204_NO_CONTENT ) -async def delete_group( - user: admin_dependency, group_id: int, session: session_dependency -): +def delete_group(user: admin_dependency, group_id: int, session: session_dependency): return model_deleter(session, Group, group_id) diff --git a/api/lexicon.py b/api/lexicon.py index e0f08b56..ee71831e 100644 --- a/api/lexicon.py +++ b/api/lexicon.py @@ -106,7 +106,7 @@ def disabled_endpoint(): deprecated=True, dependencies=[Depends(disabled_endpoint)], ) -async def add_category( +def add_category( category_data: CreateLexiconCategory, session: session_dependency, user: lexicon_admin_dependency, @@ -124,7 +124,7 @@ async def add_category( deprecated=True, dependencies=[Depends(disabled_endpoint)], ) -async def add_term( +def add_term( term_data: CreateLexiconTerm, session: session_dependency, user: lexicon_admin_dependency, @@ -145,7 +145,7 @@ async def add_term( deprecated=True, dependencies=[Depends(disabled_endpoint)], ) -async def add_triple( +def add_triple( triple_data: CreateLexiconTriple, session: session_dependency, user: lexicon_admin_dependency, @@ -166,7 +166,7 @@ async def add_triple( deprecated=True, dependencies=[Depends(disabled_endpoint)], ) -async def update_lexicon_term( +def update_lexicon_term( term_id: int, term_data: UpdateLexiconTerm, session: session_dependency, @@ -182,7 +182,7 @@ async def update_lexicon_term( deprecated=True, dependencies=[Depends(disabled_endpoint)], ) -async def update_lexicon_category( +def update_lexicon_category( category_id: int, category_data: UpdateLexiconCategory, session: session_dependency, @@ -199,7 +199,7 @@ async def update_lexicon_category( deprecated=True, dependencies=[Depends(disabled_endpoint)], ) -async def update_lexicon_triple( +def update_lexicon_triple( triple_id: int, triple_data: UpdateLexiconTriple, session: session_dependency, @@ -215,7 +215,7 @@ async def update_lexicon_triple( @router.get("/term", summary="Get lexicon terms", status_code=HTTP_200_OK) -async def get_lexicon_terms( +def get_lexicon_terms( session: session_dependency, user: viewer_dependency, category: str | None = None, @@ -252,14 +252,14 @@ async def get_lexicon_terms( @router.get("/term/{term_id}", status_code=HTTP_200_OK) -async def get_lexicon_term( +def get_lexicon_term( term_id: int, session: session_dependency, user: viewer_dependency ) -> LexiconTermResponse: return simple_get_by_id(session, LexiconTerm, term_id) @router.get("/category") -async def get_lexicon_categories( +def get_lexicon_categories( session: session_dependency, user: viewer_dependency, name: str | None = None, @@ -278,14 +278,14 @@ async def get_lexicon_categories( @router.get("/category/{category_id}") -async def get_lexicon_category( +def get_lexicon_category( category_id: int, user: viewer_dependency, session: session_dependency ) -> LexiconCategoryResponse: return simple_get_by_id(session, LexiconCategory, category_id) @router.get("/triple", summary="Get lexicon triples", status_code=HTTP_200_OK) -async def get_lexicon_triples( +def get_lexicon_triples( session: session_dependency, user: viewer_dependency, sort: str = "subject", @@ -299,7 +299,7 @@ async def get_lexicon_triples( @router.get("/triple/{triple_id}", status_code=HTTP_200_OK) -async def get_lexicon_triple( +def get_lexicon_triple( triple_id: int, session: session_dependency, user: viewer_dependency ) -> LexiconTripleResponse: return simple_get_by_id(session, LexiconTriple, triple_id) @@ -315,7 +315,7 @@ async def get_lexicon_triple( deprecated=True, dependencies=[Depends(disabled_endpoint)], ) -async def delete_lexicon_term( +def delete_lexicon_term( session: session_dependency, user: lexicon_admin_dependency, term_id: int ): return model_deleter(session, LexiconTerm, term_id) @@ -328,7 +328,7 @@ async def delete_lexicon_term( deprecated=True, dependencies=[Depends(disabled_endpoint)], ) -async def delete_lexicon_category( +def delete_lexicon_category( session: session_dependency, user: lexicon_admin_dependency, category_id: int ): return model_deleter(session, LexiconCategory, category_id) @@ -341,7 +341,7 @@ async def delete_lexicon_category( deprecated=True, dependencies=[Depends(disabled_endpoint)], ) -async def delete_lexicon_triple( +def delete_lexicon_triple( session: session_dependency, user: lexicon_admin_dependency, triple_id: int ): return model_deleter(session, LexiconTriple, triple_id) diff --git a/api/location.py b/api/location.py index af2590e5..b5e595d3 100644 --- a/api/location.py +++ b/api/location.py @@ -43,7 +43,7 @@ summary="Create a new sample location", status_code=status.HTTP_201_CREATED, ) -async def create_location( +def create_location( location_data: CreateLocation, session: session_dependency, user: admin_dependency ) -> LocationResponse: """ @@ -58,7 +58,7 @@ async def create_location( "/{location_id}", summary="Update a location", ) -async def update_location( +def update_location( location_id: int, location_data: UpdateLocation, session: session_dependency, @@ -131,7 +131,7 @@ async def update_location( "", summary="Get all locations", ) -async def get_location( +def get_location( session: session_dependency, user: viewer_dependency, nearby_point: str = None, @@ -168,7 +168,7 @@ async def get_location( "/{location_id}", summary="Get location by ID", ) -async def get_location_by_id( +def get_location_by_id( location_id: int, session: session_dependency, user: viewer_dependency ) -> LocationResponse: """ @@ -179,7 +179,7 @@ async def get_location_by_id( @router.delete("/{location_id}", summary="Delete location by ID") -async def delete_location( +def delete_location( location_id: int, session: session_dependency, user: admin_dependency ) -> Response: """ diff --git a/api/ngwmn.py b/api/ngwmn.py index 4d8b065c..7fc2e1d5 100644 --- a/api/ngwmn.py +++ b/api/ngwmn.py @@ -30,7 +30,7 @@ "/waterlevels/{pointid}", summary="Get waterlevels for a given pointid in the NGWMN format", ) -async def read_ngwmn_waterlevels(pointid: str, db: session_dependency): +def read_ngwmn_waterlevels(pointid: str, db: session_dependency): data = make_waterlevels_response(pointid, db) return Response(content=data, media_type="application/xml") @@ -39,7 +39,7 @@ async def read_ngwmn_waterlevels(pointid: str, db: session_dependency): "/wellconstruction/{pointid}", summary="Get wellconstruction for a given pointid in the NGWMN format", ) -async def read_ngwmn_wellconstruction(pointid: str, db: session_dependency): +def read_ngwmn_wellconstruction(pointid: str, db: session_dependency): data = make_well_construction_response(pointid, db) return Response(content=data, media_type="application/xml") @@ -48,7 +48,7 @@ async def read_ngwmn_wellconstruction(pointid: str, db: session_dependency): "/lithology/{pointid}", summary="Get lithology for a given pointid in the NGWMN format", ) -async def read_ngwmn_lithology(pointid: str, db: session_dependency): +def read_ngwmn_lithology(pointid: str, db: session_dependency): data = make_lithology_response(pointid, db) return Response(content=data, media_type="application/xml") diff --git a/api/observation.py b/api/observation.py index 3b446bd7..d4c7fff7 100644 --- a/api/observation.py +++ b/api/observation.py @@ -64,7 +64,7 @@ # ============= Post ============================================= @router.post("/groundwater-level", status_code=HTTP_201_CREATED) -async def add_groundwater_level_observation( +def add_groundwater_level_observation( obs_data: CreateGroundwaterLevelObservation, session: session_dependency, user: amp_admin_dependency, @@ -76,7 +76,7 @@ async def add_groundwater_level_observation( @router.post("/water-chemistry", status_code=HTTP_201_CREATED) -async def add_water_chemistry_observation( +def add_water_chemistry_observation( obs_data: CreateWaterChemistryObservation, session: session_dependency, user: amp_admin_dependency, @@ -116,7 +116,7 @@ async def bulk_upload_groundwater_levels( @router.patch("/groundwater-level/{observation_id}", status_code=HTTP_200_OK) -async def update_groundwater_level_observation( +def update_groundwater_level_observation( observation_id: int, obs_data: UpdateGroundwaterLevelObservation, session: session_dependency, @@ -130,7 +130,7 @@ async def update_groundwater_level_observation( @router.patch("/water-chemistry/{observation_id}", status_code=HTTP_200_OK) -async def update_water_chemistry_observation( +def update_water_chemistry_observation( observation_id: int, obs_data: UpdateWaterChemistryObservation, session: session_dependency, @@ -148,7 +148,7 @@ async def update_water_chemistry_observation( "/transducer-groundwater-level", summary="Get transducer groundwater level observations", ) -async def get_transducer_groundwater_level_observations( +def get_transducer_groundwater_level_observations( request: Request, session: session_dependency, user: amp_viewer_dependency, @@ -170,7 +170,7 @@ async def get_transducer_groundwater_level_observations( @router.get("/groundwater-level", summary="Get groundwater level observations") -async def get_groundwater_level_observations( +def get_groundwater_level_observations( request: Request, session: session_dependency, user: amp_viewer_dependency, @@ -204,7 +204,7 @@ async def get_groundwater_level_observations( "/groundwater-level/{observation_id}", summary="Get groundwater level observation by ID", ) -async def get_groundwater_level_observation_by_id( +def get_groundwater_level_observation_by_id( session: session_dependency, request: Request, user: amp_viewer_dependency, @@ -218,7 +218,7 @@ async def get_groundwater_level_observation_by_id( @router.get("/water-chemistry", summary="Get water chemistry observations") -async def get_water_chemistry_observations( +def get_water_chemistry_observations( request: Request, session: session_dependency, user: amp_viewer_dependency, @@ -251,7 +251,7 @@ async def get_water_chemistry_observations( @router.get( "/water-chemistry/{observation_id}", summary="Get water chemistry observation by ID" ) -async def get_water_chemistry_observation_by_id( +def get_water_chemistry_observation_by_id( session: session_dependency, request: Request, user: amp_viewer_dependency, @@ -265,7 +265,7 @@ async def get_water_chemistry_observation_by_id( @router.get("", summary="Get all observations") -async def get_all_observations( +def get_all_observations( request: Request, session: session_dependency, user: amp_viewer_dependency, @@ -293,7 +293,7 @@ async def get_all_observations( @router.get("/{observation_id}", summary="Get an observation by its ID") -async def get_observation_by_id( +def get_observation_by_id( session: session_dependency, user: amp_viewer_dependency, observation_id: int ) -> ObservationResponse: return simple_get_by_id(session, Observation, observation_id) @@ -307,7 +307,7 @@ async def get_observation_by_id( summary="Delete an observation", status_code=HTTP_204_NO_CONTENT, ) -async def delete_observation( +def delete_observation( session: session_dependency, user: amp_admin_dependency, observation_id: int ) -> None: return model_deleter(session, Observation, observation_id) diff --git a/api/publication.py b/api/publication.py index 751c0ec8..76ca5889 100644 --- a/api/publication.py +++ b/api/publication.py @@ -29,7 +29,7 @@ @router.post( "/add", response_model=PublicationResponse, status_code=status.HTTP_201_CREATED ) -async def post_publication( +def post_publication( user: admin_dependency, publication_data: CreatePublication, # Replace with your actual schema session: Session = Depends( diff --git a/api/sample.py b/api/sample.py index fdd471cb..5dba4616 100644 --- a/api/sample.py +++ b/api/sample.py @@ -55,11 +55,12 @@ def database_error_handler( """ Handle errors raised by the database when adding or updating a sample. """ - error_message = error.orig.args[0]["M"] - if ( - error_message == "duplicate key value violates unique " - 'constraint "sample_sample_name_key"' - ): + orig = getattr(error, "orig", None) + if hasattr(orig, "args") and orig.args and isinstance(orig.args[0], dict): + error_message = orig.args[0].get("M", "") + else: + error_message = str(orig or error) + if 'constraint "sample_sample_name_key"' in error_message: detail = { "loc": ["body", "sample_name"], "msg": ( @@ -68,11 +69,7 @@ def database_error_handler( "type": "value_error", "input": {"sample_name": payload.sample_name}, } - elif ( - error_message - == 'insert or update on table "sample" violates foreign key constraint ' - '"sample_field_activity_id_fkey"' - ): + elif 'constraint "sample_field_activity_id_fkey"' in error_message: detail = { "loc": ["body", "field_activity_id"], "msg": ( @@ -81,6 +78,13 @@ def database_error_handler( "type": "value_error", "input": {"field_activity_id": payload.field_activity_id}, } + else: + detail = { + "loc": ["body"], + "msg": error_message, + "type": "value_error", + "input": {}, + } raise PydanticStyleException( status_code=HTTP_409_CONFLICT, @@ -90,7 +94,7 @@ def database_error_handler( # ============= Post ============================================= @router.post("", status_code=HTTP_201_CREATED) -async def add_sample( +def add_sample( sample_data: CreateSample, session: session_dependency, user: admin_dependency, @@ -108,7 +112,7 @@ async def add_sample( # ============= Update ============================================= @router.patch("/{sample_id}", summary="Update Sample") -async def update_sample( +def update_sample( sample_id: int, sample_data: UpdateSample, session: session_dependency, @@ -133,7 +137,7 @@ async def update_sample( # ============= Get ============================================= @router.get("", summary="Get Samples") -async def get_samples( +def get_samples( session: session_dependency, user: viewer_dependency, thing_id: int | None = None, @@ -154,7 +158,7 @@ async def get_samples( @router.get("/{sample_id}", summary="Get Sample by ID") -async def get_sample_by_id( +def get_sample_by_id( sample_id: int, session: session_dependency, user: viewer_dependency, @@ -172,7 +176,7 @@ async def get_sample_by_id( "/{sample_id}", summary="Delete Sample by ID", ) -async def delete_sample_by_id( +def delete_sample_by_id( sample_id: int, session: session_dependency, user: admin_dependency, diff --git a/api/search.py b/api/search.py index 9c587a01..b1a6b36f 100644 --- a/api/search.py +++ b/api/search.py @@ -197,7 +197,7 @@ def _get_asset_results(session: Session, q: str, limit: int) -> list[dict]: @router.get("") -async def search_api( +def search_api( user: viewer_dependency, session: session_dependency, q: str, diff --git a/api/sensor.py b/api/sensor.py index 49e1c0ba..69ab2816 100644 --- a/api/sensor.py +++ b/api/sensor.py @@ -39,7 +39,7 @@ @router.post("", status_code=status.HTTP_201_CREATED) -async def add_sensor( +def add_sensor( sensor_data: CreateSensor, session: session_dependency, user: admin_dependency ) -> SensorResponse: """ @@ -55,7 +55,7 @@ async def add_sensor( @router.patch("/{sensor_id}", status_code=status.HTTP_200_OK) -async def update_sensor( +def update_sensor( sensor_id: int, sensor_data: UpdateSensor, session: session_dependency, @@ -115,7 +115,7 @@ async def update_sensor( @router.delete("/{sensor_id}") -async def delete_sensor( +def delete_sensor( sensor_id: int, session: session_dependency, user: admin_dependency ) -> Response: """ @@ -128,7 +128,7 @@ async def delete_sensor( @router.get("", status_code=status.HTTP_200_OK) -async def get_sensors( +def get_sensors( session: session_dependency, user: viewer_dependency, thing_id: int = None, # Optional filter for thing_id. Filter by the Thing where equipment is deployed @@ -157,7 +157,7 @@ async def get_sensors( @router.get("/{sensor_id}", status_code=status.HTTP_200_OK) -async def get_sensor( +def get_sensor( sensor_id: int, session: session_dependency, user: viewer_dependency ) -> SensorResponse: """ diff --git a/api/thing.py b/api/thing.py index 5b8a52e1..8ba57c76 100644 --- a/api/thing.py +++ b/api/thing.py @@ -17,7 +17,7 @@ from fastapi import APIRouter, Query, Request from fastapi_pagination.ext.sqlalchemy import paginate from sqlalchemy import select -from sqlalchemy.exc import ProgrammingError +from sqlalchemy.exc import IntegrityError, ProgrammingError from sqlalchemy.orm import selectinload from starlette.status import ( HTTP_200_OK, @@ -80,50 +80,41 @@ def database_error_handler( - payload: CreateWell | CreateSpring, error: ProgrammingError + payload: CreateWell | CreateSpring | CreateWellScreen | CreateThingIdLink, + error: IntegrityError | ProgrammingError, ) -> None: """ Handle errors raised by the database when adding or updating a thing. """ - error_message = error.orig.args[0]["M"] - - if ( - error_message - == 'insert or update on table "group_thing_association" violates foreign key constraint "group_thing_association_group_id_fkey"' - ): + orig = getattr(error, "orig", None) + if hasattr(orig, "args") and orig.args and isinstance(orig.args[0], dict): + error_message = orig.args[0].get("M", "") + else: + error_message = str(orig or error) + if 'constraint "group_thing_association_group_id_fkey"' in error_message: detail = { "loc": ["body", "group_id"], "msg": f"Group with ID {payload.group_id} not found.", "type": "value_error", "input": {"group_id": payload.group_id}, } - elif ( - error_message - == 'insert or update on table "location_thing_association" violates foreign key constraint "location_thing_association_location_id_fkey"' - ): - + elif 'constraint "location_thing_association_location_id_fkey"' in error_message: detail = { "loc": ["body", "location_id"], "msg": f"Location with ID {payload.location_id} not found.", "type": "value_error", "input": {"location_id": payload.location_id}, } - elif ( - error_message - == 'insert or update on table "well_screen" violates foreign key constraint "well_screen_thing_id_fkey"' - ): + elif 'constraint "well_screen_thing_id_fkey"' in error_message: detail = { "loc": ["body", "thing_id"], "msg": f"Thing with ID {payload.thing_id} not found.", "type": "value_error", "input": {"thing_id": payload.thing_id}, } - elif ( - error_message - == 'insert or update on table "well_screen" violates foreign key constraint "well_screen_screen_type_fkey"' - ): + elif 'constraint "well_screen_screen_type_fkey"' in error_message: valid_screen_types = get_terms_by_category("casing_material") valid_screen_types_for_msg = " | ".join(valid_screen_types) detail = { @@ -132,16 +123,20 @@ def database_error_handler( "type": "value_error", "input": {"screen_type": payload.screen_type}, } - elif ( - error_message - == 'insert or update on table "thing_id_link" violates foreign key constraint "thing_id_link_thing_id_fkey"' - ): + elif 'constraint "thing_id_link_thing_id_fkey"' in error_message: detail = { "loc": ["body", "thing_id"], "msg": f"Thing with ID {payload.thing_id} not found.", "type": "value_error", "input": {"thing_id": payload.thing_id}, } + else: + detail = { + "loc": ["body"], + "msg": error_message, + "type": "value_error", + "input": {}, + } raise PydanticStyleException(status_code=HTTP_409_CONFLICT, detail=[detail]) @@ -150,7 +145,7 @@ def database_error_handler( @router.get("/water-well", summary="Get all water wells", status_code=HTTP_200_OK) -async def get_water_wells( +def get_water_wells( user: viewer_dependency, session: session_dependency, request: Request, @@ -180,7 +175,7 @@ async def get_water_wells( @router.get( "/water-well/{thing_id}", summary="Get water well by ID", status_code=HTTP_200_OK ) -async def get_well_by_id( +def get_well_by_id( user: viewer_dependency, thing_id: int, session: session_dependency, @@ -197,7 +192,7 @@ async def get_well_by_id( summary="Get water well details payload", status_code=HTTP_200_OK, ) -async def get_well_details( +def get_well_details( user: viewer_dependency, thing_id: int, session: session_dependency, @@ -219,7 +214,7 @@ async def get_well_details( summary="Get water well export payload", status_code=HTTP_200_OK, ) -async def get_well_export( +def get_well_export( user: viewer_dependency, thing_id: int, session: session_dependency, @@ -240,7 +235,7 @@ async def get_well_export( summary="Get well screens by water well ID", status_code=HTTP_200_OK, ) -async def get_well_screens_by_well_id( +def get_well_screens_by_well_id( user: viewer_dependency, thing_id: int, session: session_dependency, @@ -258,7 +253,7 @@ async def get_well_screens_by_well_id( "/well-screen", summary="Get well screens", ) -async def get_well_screens( +def get_well_screens( user: viewer_dependency, session: session_dependency, thing_id: int = None, @@ -277,7 +272,7 @@ async def get_well_screens( "/well-screen/{wellscreen_id}", summary="Get well screen by ID", ) -async def get_well_screen_by_id( +def get_well_screen_by_id( user: viewer_dependency, session: session_dependency, wellscreen_id: int, @@ -290,7 +285,7 @@ async def get_well_screen_by_id( @router.get("/spring", summary="Get all springs") -async def get_springs( +def get_springs( user: viewer_dependency, session: session_dependency, request: Request, @@ -307,7 +302,7 @@ async def get_springs( @router.get("/spring/{thing_id}", summary="Get spring by ID", status_code=HTTP_200_OK) -async def get_spring_by_id( +def get_spring_by_id( user: viewer_dependency, thing_id: int, session: session_dependency, @@ -323,7 +318,7 @@ async def get_spring_by_id( "/id-link", summary="Get all thing links", ) -async def get_thing_id_links( +def get_thing_id_links( user: viewer_dependency, session: session_dependency, filter_: str = Query(alias="filter", default=None), @@ -341,7 +336,7 @@ async def get_thing_id_links( @public_route @router.get("/id-link/{link_id}", summary="Get thing links by link ID") -async def get_thing_id_links( +def get_thing_id_links( user: viewer_dependency, link_id: int, session: session_dependency, @@ -354,7 +349,7 @@ async def get_thing_id_links( @public_route @router.get("", summary="Get all things", status_code=HTTP_200_OK) -async def get_things( +def get_things( user: viewer_dependency, session: session_dependency, within: Optional[str] = None, @@ -383,7 +378,7 @@ async def get_things( @router.get("/{thing_id}", summary="Get thing by ID", status_code=HTTP_200_OK) -async def get_thing_by_id( +def get_thing_by_id( user: viewer_dependency, thing_id: int, session: session_dependency, @@ -397,7 +392,7 @@ async def get_thing_by_id( @router.get("/{thing_id}/id-link", summary="Get thing links by thing ID") -async def get_thing_id_links( +def get_thing_id_links( user: viewer_dependency, thing_id: int, session: session_dependency, @@ -411,7 +406,7 @@ async def get_thing_id_links( @router.get("/{thing_id}/deployment", summary="Get deployments by thing ID") -async def get_thing_deployments( +def get_thing_deployments( user: viewer_dependency, thing_id: int, session: session_dependency, @@ -431,7 +426,7 @@ async def get_thing_deployments( @router.post( "/id-link", status_code=HTTP_201_CREATED, summary="Create a new thing link" ) -async def create_thing_id_link( +def create_thing_id_link( link_data: CreateThingIdLink, session: session_dependency, user: admin_dependency, @@ -441,7 +436,7 @@ async def create_thing_id_link( """ try: return model_adder(session, ThingIdLink, link_data, user=user) - except ProgrammingError as e: + except (IntegrityError, ProgrammingError) as e: database_error_handler(link_data, e) @@ -450,7 +445,7 @@ async def create_thing_id_link( summary="Create a water well", status_code=HTTP_201_CREATED, ) -async def create_well( +def create_well( thing_data: CreateWell, session: session_dependency, request: Request, @@ -463,7 +458,7 @@ async def create_well( thing = add_thing(session=session, data=thing_data, request=request, user=user) modify_well_descriptor_tables(session, thing, thing_data, user) return thing - except ProgrammingError as e: + except (IntegrityError, ProgrammingError) as e: database_error_handler(thing_data, e) @@ -472,7 +467,7 @@ async def create_well( summary="Create a new spring", status_code=HTTP_201_CREATED, ) -async def create_spring( +def create_spring( thing_data: CreateSpring, session: session_dependency, request: Request, @@ -483,7 +478,7 @@ async def create_spring( """ try: return add_thing(session=session, data=thing_data, request=request, user=user) - except ProgrammingError as e: + except (IntegrityError, ProgrammingError) as e: database_error_handler(thing_data, e) @@ -492,7 +487,7 @@ async def create_spring( summary="Create a new well screen", status_code=HTTP_201_CREATED, ) -async def create_wellscreen( +def create_wellscreen( session: session_dependency, user: admin_dependency, well_screen_data: CreateWellScreen, @@ -502,7 +497,7 @@ async def create_wellscreen( """ try: return add_well_screen(session, well_screen_data, user=user) - except ProgrammingError as e: + except (IntegrityError, ProgrammingError) as e: database_error_handler(well_screen_data, e) except PydanticStyleException as e: raise e @@ -516,7 +511,7 @@ async def create_wellscreen( summary="Update well by parent thing ID", status_code=HTTP_200_OK, ) -async def update_water_well( +def update_water_well( thing_id: int, thing_data: UpdateWell, session: session_dependency, @@ -545,7 +540,7 @@ async def update_water_well( summary="Update spring by parent thing ID", status_code=HTTP_200_OK, ) -async def update_spring( +def update_spring( thing_id: int, thing_data: UpdateSpring, session: session_dependency, @@ -561,7 +556,7 @@ async def update_spring( @router.patch( "/id-link/{link_id}", summary="Update thing link by ID", status_code=HTTP_200_OK ) -async def update_thing_id_link( +def update_thing_id_link( link_id: int, link_data: UpdateThingIdLink, session: session_dependency, @@ -575,7 +570,7 @@ async def update_thing_id_link( summary="Update Well Screen by ID", status_code=HTTP_200_OK, ) -async def update_well_screen( +def update_well_screen( well_screen_id: int, well_screen_data: UpdateWellScreen, session: session_dependency, @@ -594,7 +589,7 @@ async def update_well_screen( @router.delete( "/{thing_id}", summary="Delete thing by ID", status_code=HTTP_204_NO_CONTENT ) -async def delete_thing( +def delete_thing( thing_id: int, session: session_dependency, user: admin_dependency, @@ -610,7 +605,7 @@ async def delete_thing( summary="Delete well screen by ID", status_code=HTTP_204_NO_CONTENT, ) -async def delete_well_screen( +def delete_well_screen( well_screen_id: int, session: session_dependency, user: admin_dependency, @@ -626,7 +621,7 @@ async def delete_well_screen( summary="Delete thing link by ID", status_code=HTTP_204_NO_CONTENT, ) -async def delete_thing_id_link( +def delete_thing_id_link( link_id: int, session: session_dependency, user: admin_dependency, diff --git a/tests/integration/test_nma_legacy_relationships.py b/tests/integration/test_nma_legacy_relationships.py index c613f13c..7731f86a 100644 --- a/tests/integration/test_nma_legacy_relationships.py +++ b/tests/integration/test_nma_legacy_relationships.py @@ -212,11 +212,11 @@ def test_chemistry_sample_requires_thing(self): with session_ctx() as session: record = NMA_Chemistry_SampleInfo( nma_sample_pt_id=uuid.uuid4(), - nma_sample_point_id="ORPHAN-CHEM", + nma_sample_point_id="ORPHAN", # No thing_id - should fail on commit ) session.add(record) - # pg8000 raises ProgrammingError for NOT NULL violations (error code 23502) + # Driver may surface NOT NULL violations as ProgrammingError (code 23502) with pytest.raises((IntegrityError, ProgrammingError, ValueError)): session.commit() session.rollback() diff --git a/tests/test_nma_chemistry_lineage.py b/tests/test_nma_chemistry_lineage.py index f0853958..b6710931 100644 --- a/tests/test_nma_chemistry_lineage.py +++ b/tests/test_nma_chemistry_lineage.py @@ -348,7 +348,7 @@ def test_sample_info_requires_thing(shared_thing): # No thing_id - should fail ) session.add(sample_info) - # pg8000 raises ProgrammingError for NOT NULL violations (error code 23502) + # Driver may surface NOT NULL violations as ProgrammingError (code 23502) with pytest.raises((IntegrityError, ProgrammingError, ValueError)): session.commit() session.rollback() @@ -461,7 +461,7 @@ def test_mtc_requires_chemistry_sample_info(): # No chemistry_sample_info_id - should fail ) session.add(mtc) - # pg8000 raises ProgrammingError for NOT NULL violations (error code 23502) + # Driver may surface NOT NULL violations as ProgrammingError (code 23502) with pytest.raises((IntegrityError, ProgrammingError)): session.commit() session.rollback() From 9ab9846a0e3ab8b24021bd0ec67d19f3d5ea82c2 Mon Sep 17 00:00:00 2001 From: Kelsey Smuczynski Date: Wed, 15 Apr 2026 16:32:31 -0600 Subject: [PATCH 08/39] feat(water-level-importer): add support for field event participants and sampler validation - create or reuse field event participants for imported staff columns - link imported samples to the measuring participant - fail a row when measuring_person does not resolve to exactly one participant --- services/water_level_csv.py | 124 ++++++++++++- tests/test_water_level_csv_service.py | 253 +++++++++++++++++++++++++- 2 files changed, 375 insertions(+), 2 deletions(-) diff --git a/services/water_level_csv.py b/services/water_level_csv.py index 9faa5af2..e694f919 100644 --- a/services/water_level_csv.py +++ b/services/water_level_csv.py @@ -24,7 +24,16 @@ from pathlib import Path from typing import Any, BinaryIO, Iterable, List -from db import Thing, FieldEvent, FieldActivity, Sample, Observation, Parameter +from db import ( + Thing, + FieldEvent, + FieldActivity, + Sample, + Observation, + Parameter, + Contact, + FieldEventParticipant, +) from db.engine import session_ctx from pydantic import ValidationError from schemas.water_level_csv import ( @@ -35,6 +44,7 @@ ) from sqlalchemy import select from sqlalchemy.orm import Session, selectinload +from services.contact_helper import add_contact from services.thing_helper import find_water_wells_by_name REQUIRED_FIELDS: List[str] = list(WATER_LEVEL_REQUIRED_FIELDS) @@ -56,6 +66,8 @@ class _ValidatedRow: raw: dict[str, str] well: Thing field_staff: str + field_staff_2: str | None + field_staff_3: str | None sampler: str sample_method_term: str field_event_dt: datetime @@ -291,6 +303,8 @@ def _validate_rows( raw={**normalized}, well=well, field_staff=model.field_staff, + field_staff_2=model.field_staff_2, + field_staff_3=model.field_staff_3, sampler=model.measuring_person, sample_method_term=model.sample_method, field_event_dt=model.field_event_date_time, @@ -352,6 +366,7 @@ def _validate_depth_to_water_against_well( def _create_records( session: Session, parameter_id: int, rows: list[_ValidatedRow] ) -> tuple[list[dict[str, Any]], list[str]]: + """Create or update field-event, sample, and observation rows for each CSV row.""" created: list[dict[str, Any]] = [] errors: list[str] = [] @@ -393,6 +408,12 @@ def _create_records( _apply_sample_values(sample, row, sample_name) _apply_observation_values(observation, row, parameter_id) + # Add participants after required sample/observation fields are populated + # so the contact lookup does not trigger an autoflush of incomplete rows. + participants = _ensure_field_event_participants(session, field_event, row) + sample.field_event_participant = _resolve_measuring_participant( + row, participants + ) session.flush() savepoint.commit() @@ -427,12 +448,14 @@ def _create_records( def _build_sample_name(row: _ValidatedRow) -> str: + """Build the deterministic sample identifier used for create/update matching.""" return f"{row.well.name}-WL-{row.measurement_dt.strftime('%Y%m%d%H%M')}" def _find_existing_imported_sample( session: Session, row: _ValidatedRow, sample_name: str ) -> Sample | None: + """Return the previously imported groundwater-level sample for this row, if any.""" sql = ( select(Sample) .join(FieldActivity, Sample.field_activity_id == FieldActivity.id) @@ -454,13 +477,111 @@ def _find_existing_imported_sample( def _find_existing_observation(sample: Sample, parameter_id: int) -> Observation | None: + """Return the groundwater-level observation already linked to the sample, if any.""" for observation in sample.observations: if observation.parameter_id == parameter_id: return observation return None +def _ensure_field_event_participants( + session: Session, field_event: FieldEvent, row: _ValidatedRow +) -> list[FieldEventParticipant]: + """Return event participants for imported staff names, creating any missing ones.""" + participant_specs = ( + (row.field_staff, "Lead"), + (row.field_staff_2, "Participant"), + (row.field_staff_3, "Participant"), + ) + existing_participants = session.scalars( + select(FieldEventParticipant) + .options(selectinload(FieldEventParticipant.participant)) + .where(FieldEventParticipant.field_event_id == field_event.id) + .order_by(FieldEventParticipant.id.asc()) + ).all() + + for staff_name, role in participant_specs: + if not staff_name: + continue + + contact = _get_or_create_field_staff_contact(session, staff_name) + participant = next( + ( + existing + for existing in existing_participants + if existing.contact_id == contact.id + and existing.participant_role == role + ), + None, + ) + if participant is None: + participant = FieldEventParticipant( + field_event=field_event, + contact_id=contact.id, + participant_role=role, + ) + session.add(participant) + # Attach the resolved contact eagerly so downstream matching can use + # participant.participant.name without an extra lookup. + participant.participant = contact + existing_participants.append(participant) + + return existing_participants + + +def _get_or_create_field_staff_contact(session: Session, staff_name: str) -> Contact: + """Resolve or create the contact record used by field event participants.""" + contact_type = "Field Event Participant" + organization = "NMBGMR" + contact = session.scalars( + select(Contact) + .where(Contact.name == staff_name) + .where(Contact.organization == organization) + .where(Contact.contact_type == contact_type) + ).first() + + if contact is None: + payload = { + "name": staff_name, + "role": "Technician", + "organization": organization, + "contact_type": contact_type, + } + contact = add_contact(session, payload, None, commit=False) + + return contact + + +def _resolve_measuring_participant( + row: _ValidatedRow, participants: list[FieldEventParticipant] +) -> FieldEventParticipant: + """Return the unique participant matching measuring_person or raise a row error.""" + matching_participants = [ + participant + for participant in participants + if participant.participant is not None + and participant.participant.name == row.sampler + ] + if len(matching_participants) == 1: + return matching_participants[0] + + if not matching_participants: + raise ValueError( + "measuring_person " + f"'{row.sampler}' could not be matched to a field event participant" + ) + + raise ValueError( + "measuring_person " + f"'{row.sampler}' matched multiple field event participants; " + # Ambiguous staff rows should fail so the importer never guesses which + # participant performed the measurement. + "field_staff values must identify exactly one measuring person" + ) + + def _apply_sample_values(sample: Sample, row: _ValidatedRow, sample_name: str) -> None: + """Apply normalized sample values from the validated CSV row.""" sample.sample_date = row.measurement_dt sample.sample_name = sample_name sample.sample_matrix = "groundwater" @@ -472,6 +593,7 @@ def _apply_sample_values(sample: Sample, row: _ValidatedRow, sample_name: str) - def _apply_observation_values( observation: Observation, row: _ValidatedRow, parameter_id: int ) -> None: + """Apply normalized observation values from the validated CSV row.""" observation.observation_datetime = row.measurement_dt observation.parameter_id = parameter_id observation.value = row.depth_to_water_ft diff --git a/tests/test_water_level_csv_service.py b/tests/test_water_level_csv_service.py index e4b01d9c..03b2d138 100644 --- a/tests/test_water_level_csv_service.py +++ b/tests/test_water_level_csv_service.py @@ -2,7 +2,15 @@ from decimal import Decimal from types import SimpleNamespace -from db import FieldActivity, FieldEvent, Observation, Sample, Thing +from db import ( + Contact, + FieldActivity, + FieldEvent, + FieldEventParticipant, + Observation, + Sample, + Thing, +) from db.measuring_point_history import MeasuringPointHistory from db.engine import session_ctx from tests import get_parameter_id @@ -218,6 +226,8 @@ def test_bulk_upload_water_levels_is_idempotent(water_well_thing): assert len(observations) == 1 assert samples[0].sample_name == "Test Well-WL-202502151730" assert samples[0].sample_matrix == "groundwater" + assert samples[0].field_event_participant is not None + assert samples[0].field_event_participant.participant.name == "A Lopez" assert observations[0].groundwater_level_reason == "Water level not affected" assert ( observations[0].nma_data_quality @@ -226,6 +236,247 @@ def test_bulk_upload_water_levels_is_idempotent(water_well_thing): assert observations[0].measuring_point_height == 1.5 +def test_bulk_upload_water_levels_creates_field_event_participants(water_well_thing): + csv_content = "\n".join( + [ + ",".join( + [ + "field_staff", + "field_staff_2", + "field_staff_3", + "well_name_point_id", + "field_event_date_time", + "measurement_date_time", + "sampler", + "sample_method", + "mp_height", + "level_status", + "depth_to_water_ft", + "data_quality", + "water_level_notes", + ] + ), + ",".join( + [ + "A Lopez", + "B Chen", + "C Diaz", + water_well_thing.name, + "2025-02-15T08:00:00-07:00", + "2025-02-15T10:30:00-07:00", + "A Lopez", + "electric tape", + "1.5", + "Water level not affected", + "7.0", + "Water level accurate to within two hundreths of a foot", + "Initial measurement", + ] + ), + ] + ) + + result = bulk_upload_water_levels(csv_content.encode("utf-8")) + + assert result.exit_code == 0, result.payload + + with session_ctx() as session: + field_event = session.scalars( + select(FieldEvent) + .join(Thing, FieldEvent.thing_id == Thing.id) + .where(Thing.id == water_well_thing.id) + ).one() + participants = session.scalars( + select(FieldEventParticipant) + .where(FieldEventParticipant.field_event_id == field_event.id) + .order_by(FieldEventParticipant.id.asc()) + ).all() + contacts = session.scalars( + select(Contact) + .where( + Contact.name.in_(["A Lopez", "B Chen", "C Diaz"]), + Contact.organization == "NMBGMR", + Contact.contact_type == "Field Event Participant", + ) + .order_by(Contact.name.asc()) + ).all() + + assert len(participants) == 3 + assert [participant.participant_role for participant in participants] == [ + "Lead", + "Participant", + "Participant", + ] + assert {participant.field_event_id for participant in participants} == { + field_event.id + } + sample = session.scalars( + select(Sample) + .join(FieldActivity, Sample.field_activity_id == FieldActivity.id) + .where(FieldActivity.field_event_id == field_event.id) + ).one() + assert sample.field_event_participant_id == participants[0].id + assert sample.field_event_participant.participant.name == "A Lopez" + + +def test_bulk_upload_water_levels_does_not_duplicate_field_event_participants_on_rerun( + water_well_thing, +): + csv_content = "\n".join( + [ + ",".join( + [ + "field_staff", + "field_staff_2", + "well_name_point_id", + "field_event_date_time", + "measurement_date_time", + "sampler", + "sample_method", + "mp_height", + "level_status", + "depth_to_water_ft", + "data_quality", + "water_level_notes", + ] + ), + ",".join( + [ + "A Lopez", + "B Chen", + water_well_thing.name, + "2025-02-15T08:00:00-07:00", + "2025-02-15T10:30:00-07:00", + "A Lopez", + "electric tape", + "1.5", + "Water level not affected", + "7.0", + "Water level accurate to within two hundreths of a foot", + "Initial measurement", + ] + ), + ] + ) + + first = bulk_upload_water_levels(csv_content.encode("utf-8")) + + assert first.exit_code == 0, first.payload + + with session_ctx() as session: + field_event = session.scalars( + select(FieldEvent) + .join(Thing, FieldEvent.thing_id == Thing.id) + .where(Thing.id == water_well_thing.id) + ).one() + participants = session.scalars( + select(FieldEventParticipant) + .where(FieldEventParticipant.field_event_id == field_event.id) + .order_by(FieldEventParticipant.id.asc()) + ).all() + sample = session.scalars( + select(Sample) + .join(FieldActivity, Sample.field_activity_id == FieldActivity.id) + .where(FieldActivity.field_event_id == field_event.id) + ).one() + + # Capture the exact participant/contact linkage from the first import so + # the rerun can prove the importer reused those records rather than + # creating replacements. + first_participant_ids = [participant.id for participant in participants] + first_contact_ids = [participant.contact_id for participant in participants] + first_sample_participant_id = sample.field_event_participant_id + + second = bulk_upload_water_levels(csv_content.encode("utf-8")) + + assert second.exit_code == 0, second.payload + + with session_ctx() as session: + field_events = session.scalars( + select(FieldEvent) + .join(Thing, FieldEvent.thing_id == Thing.id) + .where(Thing.id == water_well_thing.id) + ).all() + participants = session.scalars( + select(FieldEventParticipant) + .where(FieldEventParticipant.field_event_id == field_events[0].id) + .order_by(FieldEventParticipant.id.asc()) + ).all() + sample = session.scalars( + select(Sample) + .join(FieldActivity, Sample.field_activity_id == FieldActivity.id) + .where(FieldActivity.field_event_id == field_events[0].id) + ).one() + + assert len(field_events) == 1 + assert len(participants) == 2 + assert [participant.id for participant in participants] == first_participant_ids + assert [ + participant.contact_id for participant in participants + ] == first_contact_ids + assert sample.field_event_participant_id == first_sample_participant_id + assert sample.field_event_participant is not None + assert sample.field_event_participant.participant.name == "A Lopez" + + +def test_bulk_upload_water_levels_fails_when_measuring_person_is_ambiguous( + water_well_thing, +): + csv_content = "\n".join( + [ + ",".join( + [ + "field_staff", + "field_staff_2", + "well_name_point_id", + "field_event_date_time", + "measurement_date_time", + "sampler", + "sample_method", + "mp_height", + "level_status", + "depth_to_water_ft", + "data_quality", + "water_level_notes", + ] + ), + ",".join( + [ + "A Lopez", + "A Lopez", + water_well_thing.name, + "2025-02-15T08:00:00-07:00", + "2025-02-15T10:30:00-07:00", + "A Lopez", + "electric tape", + "1.5", + "Water level not affected", + "7.0", + "Water level accurate to within two hundreths of a foot", + "Initial measurement", + ] + ), + ] + ) + + result = bulk_upload_water_levels(csv_content.encode("utf-8")) + + assert result.exit_code == 1 + assert result.payload["summary"]["total_rows_imported"] == 0 + assert result.payload["validation_errors"] == [ + "Row 1: measuring_person 'A Lopez' matched multiple field event " + "participants; field_staff values must identify exactly one measuring " + "person" + ] + + with session_ctx() as session: + samples = session.scalars(select(Sample)).all() + participants = session.scalars(select(FieldEventParticipant)).all() + + assert samples == [] + assert participants == [] + + def test_bulk_upload_water_levels_warns_when_mp_height_differs_from_history( water_well_thing, ): From 35f9c9f402504ad7c6d8758434ac6c2c5c37cbf3 Mon Sep 17 00:00:00 2001 From: Kelsey Smuczynski Date: Wed, 15 Apr 2026 16:36:25 -0600 Subject: [PATCH 09/39] refactor(water-level-importer): stop storing staff names in note fields - Keep `water_level_notes` as freeform notes - Stop duplicating field staff and sampler into field event/activity notes - Treat structured participants as the authoritative staff source --- services/water_level_csv.py | 15 ++++++++------- tests/test_cli_commands.py | 11 ++++++----- tests/test_water_level_csv_service.py | 8 ++++++++ 3 files changed, 22 insertions(+), 12 deletions(-) diff --git a/services/water_level_csv.py b/services/water_level_csv.py index e694f919..eab23a35 100644 --- a/services/water_level_csv.py +++ b/services/water_level_csv.py @@ -386,7 +386,9 @@ def _create_records( field_activity = FieldActivity( field_event=field_event, activity_type="groundwater level", - notes=f"Sampler: {row.sampler}", + # Measuring staff now lives on structured participants and the + # sample participant link, not in field_activity.notes. + notes=None, ) sample = Sample(field_activity=field_activity) observation = Observation(sample=sample) @@ -404,7 +406,9 @@ def _create_records( field_event.event_date = row.field_event_dt field_event.notes = _build_field_event_notes(row) - field_activity.notes = f"Sampler: {row.sampler}" + # Clear any legacy sampler note text so downstream readers use + # structured participant data as the authoritative source. + field_activity.notes = None _apply_sample_values(sample, row, sample_name) _apply_observation_values(observation, row, parameter_id) @@ -605,11 +609,8 @@ def _apply_observation_values( def _build_field_event_notes(row: _ValidatedRow) -> str | None: - parts = [f"Field staff: {row.field_staff}"] - if row.water_level_notes: - parts.append(row.water_level_notes) - notes = " | ".join(part for part in parts if part) - return notes or None + """Return only freeform field-event notes; staff lives in structured participants.""" + return row.water_level_notes or None def _get_groundwater_level_parameter_id(session: Session) -> int: diff --git a/tests/test_cli_commands.py b/tests/test_cli_commands.py index a1d4515f..5059ff8a 100644 --- a/tests/test_cli_commands.py +++ b/tests/test_cli_commands.py @@ -696,10 +696,12 @@ def _write_csv(path: Path, *, well_name: str, notes: str): "Water level accurate to within two hundreths of a foot," f"{notes}" ) - csv_text = textwrap.dedent(f"""\ + csv_text = textwrap.dedent( + f"""\ {header} {row} - """) + """ + ) path.write_text(csv_text) unique_notes = f"pytest-{uuid.uuid4()}" @@ -742,9 +744,8 @@ def _write_csv(path: Path, *, well_name: str, notes: str): observation.nma_data_quality == "Water level accurate to within two hundreths of a foot" ) - assert ( - field_event.notes == f"Field staff: CLI Tester | {unique_notes}" - ), "Field event notes should capture field staff and notes" + assert field_event.notes == unique_notes + assert field_activity.notes is None created_ids = { "observation_id": observation.id, diff --git a/tests/test_water_level_csv_service.py b/tests/test_water_level_csv_service.py index 03b2d138..c0a499b9 100644 --- a/tests/test_water_level_csv_service.py +++ b/tests/test_water_level_csv_service.py @@ -310,6 +310,14 @@ def test_bulk_upload_water_levels_creates_field_event_participants(water_well_th assert {participant.field_event_id for participant in participants} == { field_event.id } + # Notes now carry only freeform text; staff identity should come from the + # structured participant records and the sample participant link. + assert field_event.notes == "Initial measurement" + assert len(contacts) == 3 + field_activity = session.scalars( + select(FieldActivity).where(FieldActivity.field_event_id == field_event.id) + ).one() + assert field_activity.notes is None sample = session.scalars( select(Sample) .join(FieldActivity, Sample.field_activity_id == FieldActivity.id) From 2d61ecf620b488dc989056d14c1eb2e90574f783 Mon Sep 17 00:00:00 2001 From: Kelsey Smuczynski Date: Wed, 15 Apr 2026 16:43:38 -0600 Subject: [PATCH 10/39] test(api): verify well details exposes imported water-level staff - Verify the well details payload exposes the measuring person on latest_field_event_sample.contact - Verify the latest field event participants list includes imported staff --- tests/test_thing.py | 58 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) diff --git a/tests/test_thing.py b/tests/test_thing.py index 9201dfbe..fd9c86d9 100644 --- a/tests/test_thing.py +++ b/tests/test_thing.py @@ -32,6 +32,7 @@ from schemas import DT_FMT from schemas.location import LocationResponse from schemas.thing import UpdateWell, ValidateWell +from services.water_level_csv import bulk_upload_water_levels from tests import ( client, override_authentication, @@ -721,6 +722,63 @@ def test_get_water_well_details_payload_uses_latest_observation_sample( session.commit() +def test_get_water_well_details_payload_includes_imported_water_level_staff( + water_well_thing, +): + """Imported water-level rows should expose measuring staff via structured detail payload fields.""" + csv_content = "\n".join( + [ + ",".join( + [ + "field_staff", + "field_staff_2", + "well_name_point_id", + "field_event_date_time", + "measurement_date_time", + "sampler", + "sample_method", + "mp_height", + "level_status", + "depth_to_water_ft", + "data_quality", + "water_level_notes", + ] + ), + ",".join( + [ + "A Lopez", + "B Chen", + water_well_thing.name, + "2025-02-15T08:00:00-07:00", + "2025-02-15T10:30:00-07:00", + "A Lopez", + "electric tape", + "1.5", + "Water level not affected", + "7.0", + "Water level accurate to within two hundreths of a foot", + "Imported measurement", + ] + ), + ] + ) + + result = bulk_upload_water_levels(csv_content.encode("utf-8")) + + assert result.exit_code == 0, result.payload + + # `/details` is the primary frontend payload for latest water-level staff. + response = client.get(f"/thing/water-well/{water_well_thing.id}/details") + + assert response.status_code == 200 + data = response.json() + assert data["latest_field_event_sample"]["contact"]["name"] == "A Lopez" + assert { + participant["participant"]["name"] + for participant in data["field_event_participants"] + } == {"A Lopez", "B Chen"} + + def test_get_water_well_details_payload_404_not_found(): response = client.get("/thing/water-well/999999/details") From 3b62751fade707c49e5467edfc0ec6a9b6a41e7c Mon Sep 17 00:00:00 2001 From: ksmuczynski <20096455+ksmuczynski@users.noreply.github.com> Date: Thu, 16 Apr 2026 02:14:38 +0000 Subject: [PATCH 11/39] Formatting changes --- tests/test_cli_commands.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/tests/test_cli_commands.py b/tests/test_cli_commands.py index 5059ff8a..b9b28fa1 100644 --- a/tests/test_cli_commands.py +++ b/tests/test_cli_commands.py @@ -696,12 +696,10 @@ def _write_csv(path: Path, *, well_name: str, notes: str): "Water level accurate to within two hundreths of a foot," f"{notes}" ) - csv_text = textwrap.dedent( - f"""\ + csv_text = textwrap.dedent(f"""\ {header} {row} - """ - ) + """) path.write_text(csv_text) unique_notes = f"pytest-{uuid.uuid4()}" From 56c96a8615ccbd9f7e605624bb778036981386ce Mon Sep 17 00:00:00 2001 From: Kelsey Smuczynski Date: Wed, 15 Apr 2026 20:36:44 -0600 Subject: [PATCH 12/39] fix(water-level-importer): reuse contacts by unique name and organization `Contact` rows are unique on `name + organization`, so the water-level importer now uses that same key when resolving field staff contacts. This avoids missing an existing contact with a different contact_type and attempting a duplicate insert. Also adds regression coverage for reusing an existing same-name/same-organization contact during import. --- services/water_level_csv.py | 4 +- tests/test_water_level_csv_service.py | 69 +++++++++++++++++++++++++++ 2 files changed, 72 insertions(+), 1 deletion(-) diff --git a/services/water_level_csv.py b/services/water_level_csv.py index eab23a35..abe239b0 100644 --- a/services/water_level_csv.py +++ b/services/water_level_csv.py @@ -537,11 +537,13 @@ def _get_or_create_field_staff_contact(session: Session, staff_name: str) -> Con """Resolve or create the contact record used by field event participants.""" contact_type = "Field Event Participant" organization = "NMBGMR" + # Contact uniqueness is enforced on (name, organization), so the lookup + # must use the same key to avoid missing an existing row with a different + # contact_type and attempting a duplicate insert. contact = session.scalars( select(Contact) .where(Contact.name == staff_name) .where(Contact.organization == organization) - .where(Contact.contact_type == contact_type) ).first() if contact is None: diff --git a/tests/test_water_level_csv_service.py b/tests/test_water_level_csv_service.py index c0a499b9..fb6fc233 100644 --- a/tests/test_water_level_csv_service.py +++ b/tests/test_water_level_csv_service.py @@ -485,6 +485,75 @@ def test_bulk_upload_water_levels_fails_when_measuring_person_is_ambiguous( assert participants == [] +def test_bulk_upload_water_levels_reuses_contact_with_same_name_and_organization( + water_well_thing, +): + staff_name = "Z Vega" + + with session_ctx() as session: + existing_contact = Contact( + name=staff_name, + organization="NMBGMR", + role="Technician", + contact_type="Primary", + ) + session.add(existing_contact) + session.commit() + existing_contact_id = existing_contact.id + + csv_content = "\n".join( + [ + ",".join( + [ + "field_staff", + "well_name_point_id", + "field_event_date_time", + "measurement_date_time", + "sampler", + "sample_method", + "mp_height", + "level_status", + "depth_to_water_ft", + "data_quality", + "water_level_notes", + ] + ), + ",".join( + [ + staff_name, + water_well_thing.name, + "2025-02-15T08:00:00-07:00", + "2025-02-15T10:30:00-07:00", + staff_name, + "electric tape", + "1.5", + "Water level not affected", + "7.0", + "Water level accurate to within two hundreths of a foot", + "Initial measurement", + ] + ), + ] + ) + + result = bulk_upload_water_levels(csv_content.encode("utf-8")) + + assert result.exit_code == 0, result.payload + + with session_ctx() as session: + contacts = session.scalars( + select(Contact) + .where(Contact.name == staff_name) + .where(Contact.organization == "NMBGMR") + ).all() + participants = session.scalars(select(FieldEventParticipant)).all() + + assert len(contacts) == 1 + assert contacts[0].id == existing_contact_id + assert len(participants) == 1 + assert participants[0].contact_id == existing_contact_id + + def test_bulk_upload_water_levels_warns_when_mp_height_differs_from_history( water_well_thing, ): From 2eaa6b84a1b72e86e0065abf559bb8b9a7550534 Mon Sep 17 00:00:00 2001 From: Kelsey Smuczynski Date: Thu, 16 Apr 2026 09:23:38 -0600 Subject: [PATCH 13/39] test(water-level-importer): clean up imported participant contacts in tests Water-level importer tests were leaving participant contacts behind in the shared test database, which caused test_get_contacts to fail during full-suite runs. Update fixture and CLI test cleanup so importer-created staff contacts are removed after tests finish, while keeping the contact API assertions strict. --- tests/conftest.py | 19 ++++++++++++++++++- tests/test_cli_commands.py | 28 +++++++++++++++++++++++++--- 2 files changed, 43 insertions(+), 4 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 5818707b..9eb1afd1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,7 +5,7 @@ from alembic import command from alembic.config import Config from dotenv import load_dotenv -from sqlalchemy import delete +from sqlalchemy import delete, select from sqlalchemy import inspect as sa_inspect from core.initializers import init_lexicon, init_parameter @@ -174,6 +174,10 @@ def second_location(): @pytest.fixture() def water_well_thing(location): with session_ctx() as session: + # Some importer tests create participant contacts as a side effect. Keep + # a baseline so teardown can remove only the contacts introduced while + # this fixture-owned well existed. + existing_contact_ids = set(session.scalars(select(Contact.id)).all()) water_well = Thing( name="Test Well", first_visit_date="2023-03-03", @@ -209,10 +213,23 @@ def water_well_thing(location): session.refresh(water_well) session.refresh(assoc) yield water_well + # Capture participant contacts before deleting the well, because the + # field event rows cascade away with the well and would no longer be + # queryable afterward. + imported_contact_ids = set( + session.scalars( + select(FieldEventParticipant.contact_id) + .join(FieldEvent) + .where(FieldEvent.thing_id == water_well.id) + ).all() + ) session.delete(water_well) session.delete(assoc) session.delete(measuring_point_history) session.commit() + for contact_id in imported_contact_ids - existing_contact_ids: + _delete_if_present(session, session.get(Contact, contact_id)) + session.commit() @pytest.fixture() diff --git a/tests/test_cli_commands.py b/tests/test_cli_commands.py index b9b28fa1..c47345a5 100644 --- a/tests/test_cli_commands.py +++ b/tests/test_cli_commands.py @@ -27,7 +27,14 @@ from cli.cli import cli from cli.service_adapter import WellInventoryResult -from db import FieldActivity, FieldEvent, Observation, Sample +from db import ( + Contact, + FieldActivity, + FieldEvent, + FieldEventParticipant, + Observation, + Sample, +) from db.engine import session_ctx @@ -696,10 +703,12 @@ def _write_csv(path: Path, *, well_name: str, notes: str): "Water level accurate to within two hundreths of a foot," f"{notes}" ) - csv_text = textwrap.dedent(f"""\ + csv_text = textwrap.dedent( + f"""\ {header} {row} - """) + """ + ) path.write_text(csv_text) unique_notes = f"pytest-{uuid.uuid4()}" @@ -755,6 +764,14 @@ def _write_csv(path: Path, *, well_name: str, notes: str): if created_ids: # Clean up committed rows so other tests see a pristine database. with session_ctx() as session: + # Collect participant contacts before deleting the field event so + # importer-created staff contacts do not leak into later tests. + participant_contact_ids = session.scalars( + select(FieldEventParticipant.contact_id).where( + FieldEventParticipant.field_event_id + == created_ids["field_event_id"] + ) + ).all() observation = session.get(Observation, created_ids["observation_id"]) sample = session.get(Sample, created_ids["sample_id"]) field_activity = session.get( @@ -774,6 +791,11 @@ def _write_csv(path: Path, *, well_name: str, notes: str): if field_event: session.delete(field_event) session.flush() + for contact_id in participant_contact_ids: + contact = session.get(Contact, contact_id) + if contact: + session.delete(contact) + session.flush() session.commit() From 8465a535cc329512d58a64c434f1ea11f2a1b937 Mon Sep 17 00:00:00 2001 From: ksmuczynski <20096455+ksmuczynski@users.noreply.github.com> Date: Thu, 16 Apr 2026 15:24:09 +0000 Subject: [PATCH 14/39] Formatting changes --- tests/test_cli_commands.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/tests/test_cli_commands.py b/tests/test_cli_commands.py index c47345a5..bc9ff031 100644 --- a/tests/test_cli_commands.py +++ b/tests/test_cli_commands.py @@ -703,12 +703,10 @@ def _write_csv(path: Path, *, well_name: str, notes: str): "Water level accurate to within two hundreths of a foot," f"{notes}" ) - csv_text = textwrap.dedent( - f"""\ + csv_text = textwrap.dedent(f"""\ {header} {row} - """ - ) + """) path.write_text(csv_text) unique_notes = f"pytest-{uuid.uuid4()}" From 772e326aef65461a04d3f737ec28ad72312b4640 Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Thu, 16 Apr 2026 11:37:10 -0500 Subject: [PATCH 15/39] fix(services/util): Add needed import --- services/util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/util.py b/services/util.py index ec9c043c..aeeaae80 100644 --- a/services/util.py +++ b/services/util.py @@ -1,7 +1,7 @@ import json import logging import time -from datetime import datetime +from datetime import datetime, timezone from zoneinfo import ZoneInfo import httpx From 39e01de7cb3ab8613d4e3973c81fb6e30c8673e2 Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Thu, 16 Apr 2026 11:47:58 -0500 Subject: [PATCH 16/39] fix(schemas): Swap before mode for after in well_inventory & water_level_csv --- schemas/water_level_csv.py | 2 +- schemas/well_inventory.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/schemas/water_level_csv.py b/schemas/water_level_csv.py index fb0d6ac0..8e9e31e7 100644 --- a/schemas/water_level_csv.py +++ b/schemas/water_level_csv.py @@ -166,7 +166,7 @@ def normalize_sample_method(cls, value: str) -> str: @field_validator( "field_event_date_time", "water_level_date_time", - mode="before", + mode="after", ) @classmethod def normalize_datetime_field(cls, value: datetime | str) -> datetime: diff --git a/schemas/well_inventory.py b/schemas/well_inventory.py index 24e2ec3d..9ef44847 100644 --- a/schemas/well_inventory.py +++ b/schemas/well_inventory.py @@ -361,12 +361,12 @@ def normalize_complete_monitoring_frequency(cls, data): return data - @field_validator("date_time", mode="before") + @field_validator("date_time", mode="after") @classmethod def make_date_time_tz_aware(cls, v): normalize_datetime_to_utc(v) - @field_validator("measurement_date_time", mode="before") + @field_validator("measurement_date_time", mode="after") @classmethod def normalize_measurement_date_time(cls, v): if v is None or (isinstance(v, str) and v.strip() == ""): From 90b5b8a33e43f8934febb59e9dcd399f6930a25f Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Thu, 16 Apr 2026 11:52:39 -0500 Subject: [PATCH 17/39] fix(schemas/well_inventory): Add case for None --- schemas/well_inventory.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/schemas/well_inventory.py b/schemas/well_inventory.py index 9ef44847..6c9e4afa 100644 --- a/schemas/well_inventory.py +++ b/schemas/well_inventory.py @@ -363,8 +363,10 @@ def normalize_complete_monitoring_frequency(cls, data): @field_validator("date_time", mode="after") @classmethod - def make_date_time_tz_aware(cls, v): - normalize_datetime_to_utc(v) + def normalize_date_time(cls, value: datetime | None) -> datetime | None: + if value is None: + return None + normalize_datetime_to_utc(value) @field_validator("measurement_date_time", mode="after") @classmethod From 7b3c8ad497fd486934ae604ed41a585d6119dc02 Mon Sep 17 00:00:00 2001 From: Kelsey Smuczynski Date: Thu, 16 Apr 2026 15:07:24 -0600 Subject: [PATCH 18/39] refactor(water-level-importer): normalize staff columns before participant creation Normalize the fixed field_staff CSV columns into a single iterable shape after validation, so participant creation no longer hardcodes each slot in the importer. This keeps the input CSV/schema unchanged while making the participant logic easier to read and easier to extend later if the staff-field shape changes. --- services/water_level_csv.py | 32 +++++++++++++++++--------------- 1 file changed, 17 insertions(+), 15 deletions(-) diff --git a/services/water_level_csv.py b/services/water_level_csv.py index abe239b0..a9f4198d 100644 --- a/services/water_level_csv.py +++ b/services/water_level_csv.py @@ -65,9 +65,7 @@ class _ValidatedRow: row_index: int raw: dict[str, str] well: Thing - field_staff: str - field_staff_2: str | None - field_staff_3: str | None + field_staff_entries: tuple[tuple[str, str], ...] sampler: str sample_method_term: str field_event_dt: datetime @@ -302,9 +300,7 @@ def _validate_rows( row_index=idx, raw={**normalized}, well=well, - field_staff=model.field_staff, - field_staff_2=model.field_staff_2, - field_staff_3=model.field_staff_3, + field_staff_entries=_normalize_field_staff_entries(model), sampler=model.measuring_person, sample_method_term=model.sample_method, field_event_dt=model.field_event_date_time, @@ -323,6 +319,20 @@ def _validate_rows( return valid_rows, errors +def _normalize_field_staff_entries( + model: WaterLevelCsvRow, +) -> tuple[tuple[str, str], ...]: + """Normalize fixed staff columns into an iterable participant list.""" + participant_specs = ( + (model.field_staff, "Lead"), + (model.field_staff_2, "Participant"), + (model.field_staff_3, "Participant"), + ) + return tuple( + (staff_name, role) for staff_name, role in participant_specs if staff_name + ) + + def _resolve_measuring_point_height( well: Thing, csv_mp_height: float | None ) -> tuple[float | int | None, float | int | None, bool]: @@ -492,11 +502,6 @@ def _ensure_field_event_participants( session: Session, field_event: FieldEvent, row: _ValidatedRow ) -> list[FieldEventParticipant]: """Return event participants for imported staff names, creating any missing ones.""" - participant_specs = ( - (row.field_staff, "Lead"), - (row.field_staff_2, "Participant"), - (row.field_staff_3, "Participant"), - ) existing_participants = session.scalars( select(FieldEventParticipant) .options(selectinload(FieldEventParticipant.participant)) @@ -504,10 +509,7 @@ def _ensure_field_event_participants( .order_by(FieldEventParticipant.id.asc()) ).all() - for staff_name, role in participant_specs: - if not staff_name: - continue - + for staff_name, role in row.field_staff_entries: contact = _get_or_create_field_staff_contact(session, staff_name) participant = next( ( From 2bf9ced968ad8f4c9af3bcbd69b743619d3b298a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 16 Apr 2026 22:23:40 +0000 Subject: [PATCH 19/39] build(deps): bump mako from 1.3.10 to 1.3.11 (#658) Bumps [mako](https://github.com/sqlalchemy/mako) from 1.3.10 to 1.3.11. - [Release notes](https://github.com/sqlalchemy/mako/releases) - [Changelog](https://github.com/sqlalchemy/mako/blob/main/CHANGES) - [Commits](https://github.com/sqlalchemy/mako/commits) --- updated-dependencies: - dependency-name: mako dependency-version: 1.3.11 dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- requirements.txt | 6 +++--- uv.lock | 8 ++++---- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4f550ff4..7b274a09 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,7 +50,7 @@ dependencies = [ "iniconfig==2.3.0", "itsdangerous>=2.2.0", "jinja2==3.1.6", - "mako==1.3.10", + "mako==1.3.11", "markupsafe==3.0.3", "multidict==6.7.1", "numpy==2.4.4", diff --git a/requirements.txt b/requirements.txt index 655541b7..2e9efa2c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1022,9 +1022,9 @@ lark==1.3.1 \ --hash=sha256:b426a7a6d6d53189d318f2b6236ab5d6429eaf09259f1ca33eb716eed10d2905 \ --hash=sha256:c629b661023a014c37da873b4ff58a817398d12635d3bbb2c5a03be7fe5d1e12 # via pygeofilter -mako==1.3.10 \ - --hash=sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28 \ - --hash=sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59 +mako==1.3.11 \ + --hash=sha256:071eb4ab4c5010443152255d77db7faa6ce5916f35226eb02dc34479b6858069 \ + --hash=sha256:e372c6e333cf004aa736a15f425087ec977e1fcbd2966aae7f17c8dc1da27a77 # via # alembic # ocotilloapi diff --git a/uv.lock b/uv.lock index 4a0ee6c0..ed4913b8 100644 --- a/uv.lock +++ b/uv.lock @@ -1197,14 +1197,14 @@ wheels = [ [[package]] name = "mako" -version = "1.3.10" +version = "1.3.11" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" } +sdist = { url = "https://files.pythonhosted.org/packages/59/8a/805404d0c0b9f3d7a326475ca008db57aea9c5c9f2e1e39ed0faa335571c/mako-1.3.11.tar.gz", hash = "sha256:071eb4ab4c5010443152255d77db7faa6ce5916f35226eb02dc34479b6858069", size = 399811, upload-time = "2026-04-14T20:19:51.493Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, + { url = "https://files.pythonhosted.org/packages/68/a5/19d7aaa7e433713ffe881df33705925a196afb9532efc8475d26593921a6/mako-1.3.11-py3-none-any.whl", hash = "sha256:e372c6e333cf004aa736a15f425087ec977e1fcbd2966aae7f17c8dc1da27a77", size = 78503, upload-time = "2026-04-14T20:19:53.233Z" }, ] [[package]] @@ -1602,7 +1602,7 @@ requires-dist = [ { name = "iniconfig", specifier = "==2.3.0" }, { name = "itsdangerous", specifier = ">=2.2.0" }, { name = "jinja2", specifier = "==3.1.6" }, - { name = "mako", specifier = "==1.3.10" }, + { name = "mako", specifier = "==1.3.11" }, { name = "markupsafe", specifier = "==3.0.3" }, { name = "multidict", specifier = "==6.7.1" }, { name = "numpy", specifier = "==2.4.4" }, From b73dcc75823415f3895fa231e3d83abcdf20b2f4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 17 Apr 2026 00:28:18 +0000 Subject: [PATCH 20/39] build(deps): bump authlib from 1.6.9 to 1.6.11 (#659) Bumps [authlib](https://github.com/authlib/authlib) from 1.6.9 to 1.6.11. - [Release notes](https://github.com/authlib/authlib/releases) - [Changelog](https://github.com/authlib/authlib/blob/v1.6.11/docs/changelog.rst) - [Commits](https://github.com/authlib/authlib/compare/v1.6.9...v1.6.11) --- updated-dependencies: - dependency-name: authlib dependency-version: 1.6.11 dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 2 +- requirements.txt | 6 +++--- uv.lock | 8 ++++---- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 7b274a09..e757a412 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,7 +18,7 @@ dependencies = [ "asn1crypto==1.5.1", "asyncpg==0.31.0", "attrs==25.4.0", - "authlib==1.6.9", + "authlib==1.6.11", "bcrypt==4.3.0", "cachetools==5.5.2", "certifi==2025.8.3", diff --git a/requirements.txt b/requirements.txt index 2e9efa2c..ea852a72 100644 --- a/requirements.txt +++ b/requirements.txt @@ -223,9 +223,9 @@ attrs==25.4.0 \ # ocotilloapi # rasterio # referencing -authlib==1.6.9 \ - --hash=sha256:d8f2421e7e5980cc1ddb4e32d3f5fa659cfaf60d8eaf3281ebed192e4ab74f04 \ - --hash=sha256:f08b4c14e08f0861dc18a32357b33fbcfd2ea86cfe3fe149484b4d764c4a0ac3 +authlib==1.6.11 \ + --hash=sha256:64db35b9b01aeccb4715a6c9a6613a06f2bd7be2ab9d2eb89edd1dfc7580a38f \ + --hash=sha256:c8687a9a26451c51a34a06fa17bb97cb15bba46a6a626755e2d7f50da8bff3e3 # via ocotilloapi babel==2.18.0 \ --hash=sha256:b80b99a14bd085fcacfa15c9165f651fbb3406e66cc603abf11c5750937c992d \ diff --git a/uv.lock b/uv.lock index ed4913b8..c0e13457 100644 --- a/uv.lock +++ b/uv.lock @@ -244,14 +244,14 @@ wheels = [ [[package]] name = "authlib" -version = "1.6.9" +version = "1.6.11" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/af/98/00d3dd826d46959ad8e32af2dbb2398868fd9fd0683c26e56d0789bd0e68/authlib-1.6.9.tar.gz", hash = "sha256:d8f2421e7e5980cc1ddb4e32d3f5fa659cfaf60d8eaf3281ebed192e4ab74f04", size = 165134, upload-time = "2026-03-02T07:44:01.998Z" } +sdist = { url = "https://files.pythonhosted.org/packages/28/10/b325d58ffe86815b399334a101e63bc6fa4e1953921cb23703b48a0a0220/authlib-1.6.11.tar.gz", hash = "sha256:64db35b9b01aeccb4715a6c9a6613a06f2bd7be2ab9d2eb89edd1dfc7580a38f", size = 165359, upload-time = "2026-04-16T07:22:50.279Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/53/23/b65f568ed0c22f1efacb744d2db1a33c8068f384b8c9b482b52ebdbc3ef6/authlib-1.6.9-py2.py3-none-any.whl", hash = "sha256:f08b4c14e08f0861dc18a32357b33fbcfd2ea86cfe3fe149484b4d764c4a0ac3", size = 244197, upload-time = "2026-03-02T07:44:00.307Z" }, + { url = "https://files.pythonhosted.org/packages/57/2f/55fca558f925a51db046e5b929deb317ddb05afed74b22d89f4eca578980/authlib-1.6.11-py2.py3-none-any.whl", hash = "sha256:c8687a9a26451c51a34a06fa17bb97cb15bba46a6a626755e2d7f50da8bff3e3", size = 244469, upload-time = "2026-04-16T07:22:48.413Z" }, ] [[package]] @@ -1570,7 +1570,7 @@ requires-dist = [ { name = "asn1crypto", specifier = "==1.5.1" }, { name = "asyncpg", specifier = "==0.31.0" }, { name = "attrs", specifier = "==25.4.0" }, - { name = "authlib", specifier = "==1.6.9" }, + { name = "authlib", specifier = "==1.6.11" }, { name = "bcrypt", specifier = "==4.3.0" }, { name = "cachetools", specifier = "==5.5.2" }, { name = "certifi", specifier = "==2025.8.3" }, From 3cbb0e59758f4e4857cd83d47250ec2d8d3c645a Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Fri, 17 Apr 2026 10:17:29 -0500 Subject: [PATCH 21/39] fix(well_inventory): date_time was always returning None --- schemas/water_level_csv.py | 8 ++++++-- schemas/well_inventory.py | 4 ++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/schemas/water_level_csv.py b/schemas/water_level_csv.py index 8e9e31e7..67edcc54 100644 --- a/schemas/water_level_csv.py +++ b/schemas/water_level_csv.py @@ -156,7 +156,7 @@ def canonicalize_sample_method(value: str) -> str: @field_validator("sample_method") @classmethod - def normalize_sample_method(cls, value: str) -> str: + def normalize_sample_method(cls, value: str) -> str | None: return _canonicalize_enum_value( cls.canonicalize_sample_method(value), SampleMethod, @@ -169,7 +169,11 @@ def normalize_sample_method(cls, value: str) -> str: mode="after", ) @classmethod - def normalize_datetime_field(cls, value: datetime | str) -> datetime: + def normalize_datetime_field( + cls, value: datetime | None + ) -> datetime | None: + if value is None: + return None return normalize_datetime_to_utc(value) @field_validator("depth_to_water_ft") diff --git a/schemas/well_inventory.py b/schemas/well_inventory.py index 6c9e4afa..bbdd67b1 100644 --- a/schemas/well_inventory.py +++ b/schemas/well_inventory.py @@ -366,11 +366,11 @@ def normalize_complete_monitoring_frequency(cls, data): def normalize_date_time(cls, value: datetime | None) -> datetime | None: if value is None: return None - normalize_datetime_to_utc(value) + return normalize_datetime_to_utc(value) @field_validator("measurement_date_time", mode="after") @classmethod - def normalize_measurement_date_time(cls, v): + def normalize_measurement_date_time(cls, v) -> datetime | None: if v is None or (isinstance(v, str) and v.strip() == ""): return None return normalize_datetime_to_utc(v) From f33d6d17d1e793bb6d988f6ace5e7c726a4b60ff Mon Sep 17 00:00:00 2001 From: TylerAdamMartinez <57375362+TylerAdamMartinez@users.noreply.github.com> Date: Fri, 17 Apr 2026 15:18:24 +0000 Subject: [PATCH 22/39] Formatting changes --- schemas/water_level_csv.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/schemas/water_level_csv.py b/schemas/water_level_csv.py index 67edcc54..a41bf050 100644 --- a/schemas/water_level_csv.py +++ b/schemas/water_level_csv.py @@ -169,9 +169,7 @@ def normalize_sample_method(cls, value: str) -> str | None: mode="after", ) @classmethod - def normalize_datetime_field( - cls, value: datetime | None - ) -> datetime | None: + def normalize_datetime_field(cls, value: datetime | None) -> datetime | None: if value is None: return None return normalize_datetime_to_utc(value) From 60fe71935c5af99605393b3d90421a46d921b771 Mon Sep 17 00:00:00 2001 From: jross Date: Fri, 17 Apr 2026 09:31:41 -0600 Subject: [PATCH 23/39] feat: enhance well details API response with field events and related data structures --- schemas/sample.py | 14 ++ schemas/thing.py | 15 ++- schemas/well_details.py | 48 +++++-- services/well_details_helper.py | 230 ++++++++++++-------------------- tests/test_thing.py | 35 +++-- 5 files changed, 167 insertions(+), 175 deletions(-) diff --git a/schemas/sample.py b/schemas/sample.py index 8dce646b..6ec77e95 100644 --- a/schemas/sample.py +++ b/schemas/sample.py @@ -141,4 +141,18 @@ class SampleResponse(BaseResponseModel): depth_bottom: float | None +class WellDetailsSampleResponse(BaseResponseModel): + field_event: FieldEventResponse + field_activity: FieldActivityResponse + contact: ContactResponse | None + sample_date: UTCAwareDatetime + sample_name: str + sample_matrix: SampleMatrix + sample_method: SampleMethod + qc_type: SampleQcType + notes: str | None + depth_top: float | None + depth_bottom: float | None + + # ============= EOF ============================================= diff --git a/schemas/thing.py b/schemas/thing.py index 0423283b..bb2b051e 100644 --- a/schemas/thing.py +++ b/schemas/thing.py @@ -349,13 +349,8 @@ class ThingResponse(WellResponse, SpringResponse): measuring_point_height: float | None -class WellScreenResponse(BaseResponseModel): - """ - Response schema for well screen details. - """ - +class WellScreenBaseResponse(BaseResponseModel): thing_id: int - thing: WellResponse aquifer_system_id: int | None = None aquifer_system: str | None = None aquifer_type: str | None = None @@ -387,6 +382,14 @@ def populate_geologic_formation_with_code(cls, geologic_formation): return None +class WellScreenResponse(WellScreenBaseResponse): + """ + Response schema for well screen details. + """ + + thing: WellResponse + + class GeoJSONGeometry(BaseModel): """ Geometry schema for GeoJSON response. diff --git a/schemas/well_details.py b/schemas/well_details.py index e35ba5f4..e939411e 100644 --- a/schemas/well_details.py +++ b/schemas/well_details.py @@ -1,12 +1,44 @@ from pydantic import BaseModel, ConfigDict, Field +from schemas import BaseResponseModel, UTCAwareDatetime from schemas.contact import ContactResponse from schemas.deployment import DeploymentResponse -from schemas.observation import GroundwaterLevelObservationResponse -from schemas.sample import SampleResponse from schemas.field import FieldEventParticipantResponse +from schemas.observation import ObservationResponse from schemas.sensor import SensorResponse -from schemas.thing import WellResponse, WellScreenResponse +from schemas.thing import WellResponse, WellScreenBaseResponse + + +class WellDetailsFieldEventSampleResponse(BaseResponseModel): + contact: ContactResponse | None = None + sample_date: UTCAwareDatetime + sample_name: str + sample_matrix: str + sample_method: str + qc_type: str + notes: str | None = None + depth_top: float | None = None + depth_bottom: float | None = None + observations: list[ObservationResponse] = Field(default_factory=list) + + +class WellDetailsFieldActivityResponse(BaseResponseModel): + field_event_id: int + activity_type: str + notes: str | None = None + samples: list[WellDetailsFieldEventSampleResponse] = Field(default_factory=list) + + +class WellDetailsFieldEventResponse(BaseResponseModel): + thing_id: int + event_date: UTCAwareDatetime + notes: str | None = None + field_event_participants: list[FieldEventParticipantResponse] = Field( + default_factory=list + ) + field_activities: list[WellDetailsFieldActivityResponse] = Field( + default_factory=list + ) class WellDetailsResponse(BaseModel): @@ -16,11 +48,5 @@ class WellDetailsResponse(BaseModel): contacts: list[ContactResponse] = Field(default_factory=list) sensors: list[SensorResponse] = Field(default_factory=list) deployments: list[DeploymentResponse] = Field(default_factory=list) - well_screens: list[WellScreenResponse] = Field(default_factory=list) - recent_groundwater_level_observations: list[GroundwaterLevelObservationResponse] = ( - Field(default_factory=list) - ) - latest_field_event_sample: SampleResponse | None = None - field_event_participants: list[FieldEventParticipantResponse] = Field( - default_factory=list - ) + well_screens: list[WellScreenBaseResponse] = Field(default_factory=list) + field_events: list[WellDetailsFieldEventResponse] = Field(default_factory=list) diff --git a/services/well_details_helper.py b/services/well_details_helper.py index 28d72068..12d7acc4 100644 --- a/services/well_details_helper.py +++ b/services/well_details_helper.py @@ -1,8 +1,9 @@ import logging import time +from contextlib import contextmanager from sqlalchemy import select -from sqlalchemy.orm import Session, joinedload, selectinload +from sqlalchemy.orm import Session, selectinload from db import ( Contact, @@ -11,7 +12,6 @@ FieldEvent, FieldEventParticipant, Observation, - Parameter, Sample, Sensor, ThingContactAssociation, @@ -47,155 +47,93 @@ def _log_payload_stage(payload_name: str, stage: str, thing_id: int, started_at: ) +@contextmanager +def _payload_stage_timer(payload_name: str, stage: str, thing_id: int): + started_at = time.perf_counter() + try: + yield + finally: + _log_payload_stage(payload_name, stage, thing_id, started_at) + + def get_well_details_payload( session: Session, request, thing_id: int, - recent_observation_limit: int = 100, ): - payload_started_at = time.perf_counter() - stage_started_at = time.perf_counter() - well = get_thing_of_a_thing_type_by_id(session, request, thing_id) - _log_payload_stage("well_details", "load_well", thing_id, stage_started_at) - - stage_started_at = time.perf_counter() - contacts = session.scalars( - select(Contact) - .join(ThingContactAssociation) - .where(ThingContactAssociation.thing_id == well.id) - .options( - selectinload(Contact.emails), - selectinload(Contact.phones), - selectinload(Contact.addresses), - selectinload(Contact.incomplete_nma_phones), - selectinload(Contact.thing_associations).selectinload( - ThingContactAssociation.thing - ), - ) - .order_by(Contact.id) - ).all() - _log_payload_stage("well_details", "load_contacts", thing_id, stage_started_at) - - stage_started_at = time.perf_counter() - sensors = session.scalars( - select(Sensor) - .join(Deployment) - .where(Deployment.thing_id == well.id) - .distinct() - .order_by(Sensor.id) - ).all() - _log_payload_stage("well_details", "load_sensors", thing_id, stage_started_at) - - stage_started_at = time.perf_counter() - deployments = session.scalars( - select(Deployment) - .where(Deployment.thing_id == well.id) - .options(selectinload(Deployment.sensor)) - .order_by(Deployment.installation_date.desc(), Deployment.id.desc()) - ).all() - _log_payload_stage( - "well_details", - "load_deployments", - thing_id, - stage_started_at, - ) - - stage_started_at = time.perf_counter() - well_screens = session.scalars( - select(WellScreen) - .where(WellScreen.thing_id == well.id) - .order_by(WellScreen.screen_depth_top.asc(), WellScreen.id.asc()) - ).all() - _log_payload_stage( - "well_details", - "load_well_screens", - thing_id, - stage_started_at, - ) - - stage_started_at = time.perf_counter() - groundwater_parameter_id = ( - session.query(Parameter) - .filter(Parameter.parameter_name == "groundwater level") - .one() - .id - ) - _log_payload_stage( - "well_details", - "resolve_groundwater_parameter", - thing_id, - stage_started_at, - ) - - stage_started_at = time.perf_counter() - recent_groundwater_level_observations = session.scalars( - select(Observation) - .join(Sample) - .join(FieldActivity) - .join(FieldEvent) - .where( - FieldEvent.thing_id == well.id, - Observation.parameter_id == groundwater_parameter_id, - ) - .options(selectinload(Observation.parameter)) - .order_by(Observation.observation_datetime.desc(), Observation.id.desc()) - .limit(recent_observation_limit) - ).all() - _log_payload_stage( - "well_details", - "load_recent_groundwater_level_observations", - thing_id, - stage_started_at, - ) - - latest_field_event_sample = None - if recent_groundwater_level_observations: - latest_sample_id = recent_groundwater_level_observations[0].sample_id - stage_started_at = time.perf_counter() - latest_field_event_sample = session.scalar( - select(Sample) - .where(Sample.id == latest_sample_id) - .options( - joinedload(Sample.field_activity) - .joinedload(FieldActivity.field_event) - .joinedload(FieldEvent.thing), - joinedload(Sample.field_activity) - .joinedload(FieldActivity.field_event) - .selectinload(FieldEvent.field_event_participants) - .selectinload(FieldEventParticipant.participant), - joinedload(Sample.field_event_participant).joinedload( - FieldEventParticipant.participant - ), - ) - ) - _log_payload_stage( - "well_details", - "load_latest_field_event_sample", - thing_id, - stage_started_at, - ) - - _log_payload_stage( - "well_details", - "payload_total", - thing_id, - payload_started_at, - ) - - return { - "well": well, - "contacts": contacts, - "sensors": sensors, - "deployments": deployments, - "well_screens": well_screens, - "recent_groundwater_level_observations": recent_groundwater_level_observations, - "latest_field_event_sample": latest_field_event_sample, - "field_event_participants": ( - latest_field_event_sample.field_event.field_event_participants - if latest_field_event_sample is not None - else [] - ), - } + with _payload_stage_timer("well_details", "payload_total", thing_id): + with _payload_stage_timer("well_details", "load_well", thing_id): + well = get_thing_of_a_thing_type_by_id(session, request, thing_id) + + with _payload_stage_timer("well_details", "load_contacts", thing_id): + contacts = session.scalars( + select(Contact) + .join(ThingContactAssociation) + .where(ThingContactAssociation.thing_id == well.id) + .options( + selectinload(Contact.emails), + selectinload(Contact.phones), + selectinload(Contact.addresses), + selectinload(Contact.incomplete_nma_phones), + selectinload(Contact.thing_associations).selectinload( + ThingContactAssociation.thing + ), + ) + .order_by(Contact.id) + ).all() + + with _payload_stage_timer("well_details", "load_sensors", thing_id): + sensors = session.scalars( + select(Sensor) + .join(Deployment) + .where(Deployment.thing_id == well.id) + .distinct() + .order_by(Sensor.id) + ).all() + + with _payload_stage_timer("well_details", "load_deployments", thing_id): + deployments = session.scalars( + select(Deployment) + .where(Deployment.thing_id == well.id) + .options(selectinload(Deployment.sensor)) + .order_by(Deployment.installation_date.desc(), Deployment.id.desc()) + ).all() + + with _payload_stage_timer("well_details", "load_well_screens", thing_id): + well_screens = session.scalars( + select(WellScreen) + .where(WellScreen.thing_id == well.id) + .order_by(WellScreen.screen_depth_top.asc(), WellScreen.id.asc()) + ).all() + + with _payload_stage_timer("well_details", "load_field_events", thing_id): + field_events = session.scalars( + select(FieldEvent) + .where(FieldEvent.thing_id == well.id) + .options( + selectinload(FieldEvent.field_event_participants).selectinload( + FieldEventParticipant.participant + ), + selectinload(FieldEvent.field_activities) + .selectinload(FieldActivity.samples) + .selectinload(Sample.field_event_participant) + .selectinload(FieldEventParticipant.participant), + selectinload(FieldEvent.field_activities) + .selectinload(FieldActivity.samples) + .selectinload(Sample.observations) + .selectinload(Observation.parameter), + ) + .order_by(FieldEvent.event_date.desc(), FieldEvent.id.desc()) + ).all() + + return { + "well": well, + "contacts": contacts, + "sensors": sensors, + "deployments": deployments, + "well_screens": well_screens, + "field_events": field_events, + } def get_well_export_payload( diff --git a/tests/test_thing.py b/tests/test_thing.py index 9201dfbe..8fdf1996 100644 --- a/tests/test_thing.py +++ b/tests/test_thing.py @@ -628,22 +628,31 @@ def test_get_water_well_details_payload( assert data["deployments"][0]["id"] == sensor_to_water_well_thing_deployment.id assert data["deployments"][0]["sensor"]["id"] == sensor.id assert data["well_screens"][0]["id"] == well_screen.id - assert ( - data["recent_groundwater_level_observations"][0]["id"] - == groundwater_level_observation.id + assert "thing" not in data["well_screens"][0] + assert len(data["field_events"]) == 1 + assert data["field_events"][0]["id"] == field_event.id + assert data["field_events"][0]["field_activities"][0]["id"] == ( + groundwater_level_sample.field_activity_id ) - assert data["latest_field_event_sample"]["id"] == groundwater_level_sample.id - assert data["latest_field_event_sample"]["field_event"]["id"] == field_event.id - assert data["latest_field_event_sample"]["contact"]["id"] == contact.id + assert data["field_events"][0]["field_activities"][0]["samples"][0]["id"] == ( + groundwater_level_sample.id + ) + assert { + observation["id"] + for observation in data["field_events"][0]["field_activities"][0][ + "samples" + ][0]["observations"] + } == {groundwater_level_observation.id} assert { - participant["id"] for participant in data["field_event_participants"] + participant["id"] + for participant in data["field_events"][0]["field_event_participants"] } == { field_event_participant.id, second_participant_id, } assert { participant["participant"]["id"] - for participant in data["field_event_participants"] + for participant in data["field_events"][0]["field_event_participants"] } == {contact.id, second_contact_id} finally: with session_ctx() as session: @@ -705,11 +714,13 @@ def test_get_water_well_details_payload_uses_latest_observation_sample( assert response.status_code == 200 data = response.json() - assert data["latest_field_event_sample"]["id"] == later_sample_id - assert ( - data["recent_groundwater_level_observations"][0]["id"] - == later_observation_id + activity_samples = data["field_events"][0]["field_activities"][0]["samples"] + matching_sample = next( + sample for sample in activity_samples if sample["id"] == later_sample_id ) + assert { + observation["id"] for observation in matching_sample["observations"] + } == {later_observation_id} finally: with session_ctx() as session: later_observation = session.get(Observation, later_observation_id) From 50eadb507ed112c4f0506b4be3c934abebb440f0 Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Fri, 17 Apr 2026 10:32:02 -0500 Subject: [PATCH 24/39] chore(test_well_inventory): Update tests to reflect the new logic --- schemas/water_level_csv.py | 6 ++++-- schemas/well_inventory.py | 8 ++++---- tests/test_well_inventory.py | 17 ++++++++++------- 3 files changed, 18 insertions(+), 13 deletions(-) diff --git a/schemas/water_level_csv.py b/schemas/water_level_csv.py index a41bf050..fbafd7d6 100644 --- a/schemas/water_level_csv.py +++ b/schemas/water_level_csv.py @@ -169,8 +169,10 @@ def normalize_sample_method(cls, value: str) -> str | None: mode="after", ) @classmethod - def normalize_datetime_field(cls, value: datetime | None) -> datetime | None: - if value is None: + def normalize_datetime_field( + cls, value: datetime | None + ) -> datetime | None: + if value is None or (isinstance(value, str) and value.strip() == ""): return None return normalize_datetime_to_utc(value) diff --git a/schemas/well_inventory.py b/schemas/well_inventory.py index bbdd67b1..2b2dba97 100644 --- a/schemas/well_inventory.py +++ b/schemas/well_inventory.py @@ -364,16 +364,16 @@ def normalize_complete_monitoring_frequency(cls, data): @field_validator("date_time", mode="after") @classmethod def normalize_date_time(cls, value: datetime | None) -> datetime | None: - if value is None: + if value is None or (isinstance(value, str) and value.strip() == ""): return None return normalize_datetime_to_utc(value) @field_validator("measurement_date_time", mode="after") @classmethod - def normalize_measurement_date_time(cls, v) -> datetime | None: - if v is None or (isinstance(v, str) and v.strip() == ""): + def normalize_measurement_date_time(cls, value: datetime | None) -> datetime | None: + if value is None or (isinstance(value, str) and value.strip() == ""): return None - return normalize_datetime_to_utc(v) + return normalize_datetime_to_utc(value) @model_validator(mode="after") def validate_model(self): diff --git a/tests/test_well_inventory.py b/tests/test_well_inventory.py index 918a9e1d..0c5d36b7 100644 --- a/tests/test_well_inventory.py +++ b/tests/test_well_inventory.py @@ -655,7 +655,10 @@ def test_conflicting_mp_heights_raises_error(tmp_path): def test_blank_depth_to_water_still_creates_water_level_records(tmp_path): - """Blank depth-to-water is treated as missing while preserving the attempted measurement.""" + """ + Blank depth-to-water is treated as missing while preserving the attempted measurement. + Naive CSV datetimes are interpreted as America/Denver local time and stored as UTC. + """ row = _minimal_valid_well_inventory_row() row.update( { @@ -677,16 +680,16 @@ def test_blank_depth_to_water_still_creates_water_level_records(tmp_path): result = well_inventory_csv(file_path) assert result.exit_code == 0, result.stderr + expected_utc = datetime.fromisoformat("2025-02-15T17:30:00+00:00") + with session_ctx() as session: samples = session.query(Sample).all() observations = session.query(Observation).all() assert len(samples) == 1 assert len(observations) == 1 - assert samples[0].sample_date == datetime.fromisoformat("2025-02-15T10:30:00Z") - assert observations[0].observation_datetime == datetime.fromisoformat( - "2025-02-15T10:30:00Z" - ) + assert samples[0].sample_date == expected_utc + assert observations[0].observation_datetime == expected_utc assert observations[0].value is None assert observations[0].measuring_point_height == 3.5 @@ -1398,7 +1401,7 @@ def test_water_level_aliases_are_mapped(self): assert model.sampler == "Tech 1" assert model.measurement_date_time == datetime.fromisoformat( - "2025-02-15T10:30:00" + "2025-02-15T17:30:00+00:00" ) assert model.mp_height == 2.5 assert model.depth_to_water_ft == 11.2 @@ -1416,7 +1419,7 @@ def test_blank_depth_to_water_is_treated_as_none(self): model = WellInventoryRow(**row) assert model.measurement_date_time == datetime.fromisoformat( - "2025-02-15T10:30:00" + "2025-02-15T17:30:00+00:00" ) assert model.depth_to_water_ft is None From eea99a6af3ad1b7e45b48905fcf6a25e6b8eba16 Mon Sep 17 00:00:00 2001 From: TylerAdamMartinez <57375362+TylerAdamMartinez@users.noreply.github.com> Date: Fri, 17 Apr 2026 15:34:50 +0000 Subject: [PATCH 25/39] Formatting changes --- schemas/water_level_csv.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/schemas/water_level_csv.py b/schemas/water_level_csv.py index fbafd7d6..f69ad0e2 100644 --- a/schemas/water_level_csv.py +++ b/schemas/water_level_csv.py @@ -169,9 +169,7 @@ def normalize_sample_method(cls, value: str) -> str | None: mode="after", ) @classmethod - def normalize_datetime_field( - cls, value: datetime | None - ) -> datetime | None: + def normalize_datetime_field(cls, value: datetime | None) -> datetime | None: if value is None or (isinstance(value, str) and value.strip() == ""): return None return normalize_datetime_to_utc(value) From ac76eba0cf1430fd9ec147be34063d747ff933f7 Mon Sep 17 00:00:00 2001 From: jross Date: Fri, 17 Apr 2026 14:56:49 -0600 Subject: [PATCH 26/39] feat: add field event limit to well details API and enhance response validation --- api/thing.py | 2 ++ schemas/sample.py | 14 ------------ schemas/well_details.py | 9 ++++---- services/well_details_helper.py | 6 +++++ tests/test_thing.py | 39 ++++++++++++++++++++++++++++++++- 5 files changed, 51 insertions(+), 19 deletions(-) diff --git a/api/thing.py b/api/thing.py index 8ba57c76..6beb474e 100644 --- a/api/thing.py +++ b/api/thing.py @@ -197,6 +197,7 @@ def get_well_details( thing_id: int, session: session_dependency, request: Request, + field_event_limit: int = Query(default=25, ge=1, le=100), ) -> WellDetailsResponse: """ Retrieve the consolidated payload needed to render the well details page. @@ -206,6 +207,7 @@ def get_well_details( session=session, request=request, thing_id=thing_id, + field_event_limit=field_event_limit, ) diff --git a/schemas/sample.py b/schemas/sample.py index 6ec77e95..8dce646b 100644 --- a/schemas/sample.py +++ b/schemas/sample.py @@ -141,18 +141,4 @@ class SampleResponse(BaseResponseModel): depth_bottom: float | None -class WellDetailsSampleResponse(BaseResponseModel): - field_event: FieldEventResponse - field_activity: FieldActivityResponse - contact: ContactResponse | None - sample_date: UTCAwareDatetime - sample_name: str - sample_matrix: SampleMatrix - sample_method: SampleMethod - qc_type: SampleQcType - notes: str | None - depth_top: float | None - depth_bottom: float | None - - # ============= EOF ============================================= diff --git a/schemas/well_details.py b/schemas/well_details.py index e939411e..2d058cd2 100644 --- a/schemas/well_details.py +++ b/schemas/well_details.py @@ -1,5 +1,6 @@ from pydantic import BaseModel, ConfigDict, Field +from core.enums import ActivityType, SampleMatrix, SampleMethod, SampleQcType from schemas import BaseResponseModel, UTCAwareDatetime from schemas.contact import ContactResponse from schemas.deployment import DeploymentResponse @@ -13,9 +14,9 @@ class WellDetailsFieldEventSampleResponse(BaseResponseModel): contact: ContactResponse | None = None sample_date: UTCAwareDatetime sample_name: str - sample_matrix: str - sample_method: str - qc_type: str + sample_matrix: SampleMatrix + sample_method: SampleMethod + qc_type: SampleQcType notes: str | None = None depth_top: float | None = None depth_bottom: float | None = None @@ -24,7 +25,7 @@ class WellDetailsFieldEventSampleResponse(BaseResponseModel): class WellDetailsFieldActivityResponse(BaseResponseModel): field_event_id: int - activity_type: str + activity_type: ActivityType notes: str | None = None samples: list[WellDetailsFieldEventSampleResponse] = Field(default_factory=list) diff --git a/services/well_details_helper.py b/services/well_details_helper.py index 12d7acc4..1ce6f8fd 100644 --- a/services/well_details_helper.py +++ b/services/well_details_helper.py @@ -60,6 +60,7 @@ def get_well_details_payload( session: Session, request, thing_id: int, + field_event_limit: int = 25, ): with _payload_stage_timer("well_details", "payload_total", thing_id): with _payload_stage_timer("well_details", "load_well", thing_id): @@ -103,6 +104,10 @@ def get_well_details_payload( well_screens = session.scalars( select(WellScreen) .where(WellScreen.thing_id == well.id) + .options( + selectinload(WellScreen.aquifer_system), + selectinload(WellScreen.geologic_formation), + ) .order_by(WellScreen.screen_depth_top.asc(), WellScreen.id.asc()) ).all() @@ -124,6 +129,7 @@ def get_well_details_payload( .selectinload(Observation.parameter), ) .order_by(FieldEvent.event_date.desc(), FieldEvent.id.desc()) + .limit(field_event_limit) ).all() return { diff --git a/tests/test_thing.py b/tests/test_thing.py index 8fdf1996..fce045f8 100644 --- a/tests/test_thing.py +++ b/tests/test_thing.py @@ -716,8 +716,10 @@ def test_get_water_well_details_payload_uses_latest_observation_sample( data = response.json() activity_samples = data["field_events"][0]["field_activities"][0]["samples"] matching_sample = next( - sample for sample in activity_samples if sample["id"] == later_sample_id + (sample for sample in activity_samples if sample["id"] == later_sample_id), + None, ) + assert matching_sample is not None, "Expected later sample in field event" assert { observation["id"] for observation in matching_sample["observations"] } == {later_observation_id} @@ -732,6 +734,41 @@ def test_get_water_well_details_payload_uses_latest_observation_sample( session.commit() +def test_get_water_well_details_payload_limits_field_events( + water_well_thing, + field_event, +): + from db import FieldEvent + + with session_ctx() as session: + later_field_event = FieldEvent( + thing_id=water_well_thing.id, + event_date="2025-01-02T00:00:00Z", + notes="later field event", + release_status="draft", + ) + session.add(later_field_event) + session.commit() + session.refresh(later_field_event) + later_field_event_id = later_field_event.id + + try: + response = client.get( + f"/thing/water-well/{water_well_thing.id}/details?field_event_limit=1" + ) + + assert response.status_code == 200 + data = response.json() + assert len(data["field_events"]) == 1 + assert data["field_events"][0]["id"] == later_field_event_id + finally: + with session_ctx() as session: + later_field_event = session.get(FieldEvent, later_field_event_id) + if later_field_event is not None: + session.delete(later_field_event) + session.commit() + + def test_get_water_well_details_payload_404_not_found(): response = client.get("/thing/water-well/999999/details") From 39af0e6643e9a5dd0ed00e619b33dd4247420c1a Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Fri, 17 Apr 2026 17:16:37 -0500 Subject: [PATCH 27/39] doc(well-inventory-csv): Add timezone-aware conversion to UTC notes --- tests/features/well-inventory-csv.feature | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/tests/features/well-inventory-csv.feature b/tests/features/well-inventory-csv.feature index 0ee85bba..a7d66d6d 100644 --- a/tests/features/well-inventory-csv.feature +++ b/tests/features/well-inventory-csv.feature @@ -140,8 +140,10 @@ Feature: Bulk upload well inventory from CSV via CLI | depth_to_water_ft | | data_quality | | water_level_notes | - And the required "date_time" values are valid ISO 8601 timezone-naive datetime strings (e.g. "2025-02-15T10:30:00") - And the optional "water_level_date_time" values are valid ISO 8601 timezone-naive datetime strings (e.g. "2025-02-15T10:30:00") when provided + And the required "date_time" values are valid ISO 8601 datetime strings (timezone-naive or timezone-aware) + (e.g. "2025-02-15T10:30:00" or "2025-02-15T10:30:00-07:00") + And the optional "water_level_date_time" values are valid ISO 8601 datetime strings (timezone-naive or timezone-aware) + (e.g. "2025-02-15T10:30:00" or "2025-02-15T10:30:00-07:00") when provided # And all optional lexicon fields contain valid lexicon values when provided # And all optional numeric fields contain valid numeric values when provided @@ -149,7 +151,9 @@ Feature: Bulk upload well inventory from CSV via CLI When I run the well inventory bulk upload command # assumes users are entering datetimes as Mountain Time because location is restricted to New Mexico - Then all datetime objects are assigned the correct Mountain Time timezone offset based on the date value. + Then all datetime objects are normalized to UTC + And timezone-naive datetimes are interpreted as Mountain Time before conversion + And timezone-aware datetimes are converted to UTC using their provided offset And the command exits with code 0 # And null values in the response are represented as JSON null And the response includes a summary containing: From 2f938b50379ea08253c66b98e653d758d5ee291a Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Fri, 17 Apr 2026 17:22:11 -0500 Subject: [PATCH 28/39] test(well_inventory): Add test_timezone_aware_datetimes_are_normalized_to_utc test --- tests/test_well_inventory.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/tests/test_well_inventory.py b/tests/test_well_inventory.py index 0c5d36b7..23686fd7 100644 --- a/tests/test_well_inventory.py +++ b/tests/test_well_inventory.py @@ -1407,6 +1407,23 @@ def test_water_level_aliases_are_mapped(self): assert model.depth_to_water_ft == 11.2 assert model.water_level_notes == "Initial reading" + def test_timezone_aware_datetimes_are_normalized_to_utc(self): + row = _minimal_valid_well_inventory_row() + row.update( + { + "date_time": "2025-02-15T10:30:00-07:00", + "water_level_date_time": "2025-02-15T11:45:00-07:00", + "depth_to_water_ft": 11.2, + } + ) + + model = WellInventoryRow(**row) + + assert model.date_time == datetime.fromisoformat("2025-02-15T17:30:00+00:00") + assert model.measurement_date_time == datetime.fromisoformat( + "2025-02-15T18:45:00+00:00" + ) + def test_blank_depth_to_water_is_treated_as_none(self): row = _minimal_valid_well_inventory_row() row.update( From bcead1876fafb9cb95a50605a9e66f899c6fdb05 Mon Sep 17 00:00:00 2001 From: Tyler Adam Martinez Date: Fri, 17 Apr 2026 17:57:01 -0500 Subject: [PATCH 29/39] chore(steps/well-inventory-csv): Update the steps to match the feature file updates --- tests/features/steps/well-inventory-csv.py | 119 ++++++++++----------- tests/features/well-inventory-csv.feature | 6 +- 2 files changed, 59 insertions(+), 66 deletions(-) diff --git a/tests/features/steps/well-inventory-csv.py b/tests/features/steps/well-inventory-csv.py index bba4b679..63247f00 100644 --- a/tests/features/steps/well-inventory-csv.py +++ b/tests/features/steps/well-inventory-csv.py @@ -1,6 +1,6 @@ import json import tempfile -from datetime import datetime, timedelta +from datetime import datetime, timezone from pathlib import Path from behave import given, when, then @@ -9,8 +9,11 @@ from db import Thing from db.engine import session_ctx from db.lexicon import LexiconCategory -from services.util import convert_dt_tz_naive_to_tz_aware from sqlalchemy import select +from zoneinfo import ZoneInfo + + +MOUNTAIN_TZ = ZoneInfo("America/Denver") @given("valid lexicon values exist for:") @@ -64,32 +67,28 @@ def step_given_the_csv_includes_optional_water_level_entry_fields_when_available @given( - 'the required "date_time" values are valid ISO 8601 timezone-naive datetime strings (e.g. "2025-02-15T10:30:00")' + 'the required "date_time" values are valid ISO 8601 datetime strings (timezone-naive or timezone-aware)' ) -def step_step_step(context: Context): +def step_validate_required_datetime(context: Context): """Verifies that "date_time" values are valid ISO 8601 timezone-naive datetime strings.""" for row in context.rows: try: - date_time = datetime.fromisoformat(row["date_time"]) - assert ( - date_time.tzinfo is None - ), f"date_time should be timezone-naive: {row['date_time']}" + value = row["date_time"].replace("Z", "+00:00") + datetime.fromisoformat(value) except ValueError as e: raise ValueError(f"Invalid date_time: {row['date_time']}") from e @given( - 'the optional "water_level_date_time" values are valid ISO 8601 timezone-naive datetime strings (e.g. "2025-02-15T10:30:00") when provided' + 'the optional "water_level_date_time" values are valid ISO 8601 datetime strings (timezone-naive or timezone-aware) when provided' ) -def step_step_step_2(context: Context): +def step_validate_optional_datetime(context: Context): """Verifies that "water_level_date_time" values are valid ISO 8601 timezone-naive datetime strings.""" for row in context.rows: if row.get("water_level_date_time", None): try: - date_time = datetime.fromisoformat(row["water_level_date_time"]) - assert ( - date_time.tzinfo is None - ), f"water_level_date_time should be timezone-naive: {row['water_level_date_time']}" + value = row["water_level_date_time"].replace("Z", "+00:00") + datetime.fromisoformat(value) except ValueError as e: raise ValueError( f"Invalid water_level_date_time: {row['water_level_date_time']}" @@ -99,6 +98,28 @@ def step_step_step_2(context: Context): @when("I upload the file to the bulk upload endpoint") @when("I run the well inventory bulk upload command") def step_when_i_run_the_well_inventory_bulk_upload_command(context: Context): + context.datetime_pairs = [] + context.normalized_datetimes = [] + + for row in getattr(context, "rows", []): + raw = row.get("date_time") + if not raw: + continue + try: + original = datetime.fromisoformat(raw.replace("Z", "+00:00")) + except ValueError: + continue + + if original.tzinfo is None: + aware = original.replace(tzinfo=MOUNTAIN_TZ) + else: + aware = original + + normalized = aware.astimezone(timezone.utc) + + context.datetime_pairs.append((original, normalized)) + context.normalized_datetimes.append(normalized) + suffix = Path(getattr(context, "file_name", "upload.csv")).suffix or ".csv" with tempfile.NamedTemporaryFile(mode="w", suffix=suffix, delete=False) as fp: fp.write(context.file_content) @@ -141,58 +162,30 @@ def json(self): return self._json -@then( - "all datetime objects are assigned the correct Mountain Time timezone offset based on the date value." -) -def step_step_step_3(context: Context): - """Converts all datetime strings in the CSV rows to timezone-aware datetime objects with Mountain Time offset.""" - for i, row in enumerate(context.rows): - # Convert date_time field - date_time_naive = datetime.fromisoformat(row["date_time"]) - date_time_aware = convert_dt_tz_naive_to_tz_aware( - date_time_naive, "America/Denver" - ) - row["date_time"] = date_time_aware.isoformat() - - # confirm correct time zone and offset - if i == 0: - # MST, offset -07:00 - assert date_time_aware.utcoffset() == timedelta( - hours=-7 - ), "date_time offset is not -07:00" - else: - # MDT, offset -06:00 - assert date_time_aware.utcoffset() == timedelta( - hours=-6 - ), "date_time offset is not -06:00" +@then(u'all datetime objects are normalized to UTC') +def step_all_normalized_to_utc(context): + for dt in context.normalized_datetimes: + assert dt.tzinfo == timezone.utc, f"Not UTC: {dt}" - # confirm the time was not changed from what was provided - assert ( - date_time_aware.replace(tzinfo=None) == date_time_naive - ), "date_time value was changed during timezone assignment" - # Convert water_level_date_time field if it exists - if row.get("water_level_date_time", None): - wl_date_time_naive = datetime.fromisoformat(row["water_level_date_time"]) - wl_date_time_aware = convert_dt_tz_naive_to_tz_aware( - wl_date_time_naive, "America/Denver" +@then(u'timezone-naive datetimes are interpreted as Mountain Time before conversion') +def step_naive_as_mountain(context): + for original, normalized in context.datetime_pairs: + if original.tzinfo is None: + expected = original.replace(tzinfo=MOUNTAIN_TZ).astimezone(timezone.utc) + assert normalized == expected, ( + f"Naive datetime not handled as Mountain Time: {original}" + ) + + +@then(u'timezone-aware datetimes are converted to UTC using their provided offset') +def step_aware_to_utc(context): + for original, normalized in context.datetime_pairs: + if original.tzinfo is not None: + expected = original.astimezone(timezone.utc) + assert normalized == expected, ( + f"Aware datetime not converted correctly: {original}" ) - row["water_level_date_time"] = wl_date_time_aware.isoformat() - - if wl_date_time_aware.dst(): - # MDT, offset -06:00 - assert wl_date_time_aware.utcoffset() == timedelta( - hours=-6 - ), "water_level_date_time offset is not -06:00" - else: - # MST, offset -07:00 - assert wl_date_time_aware.utcoffset() == timedelta( - hours=-7 - ), "water_level_date_time offset is not -07:00" - - assert ( - wl_date_time_aware.replace(tzinfo=None) == wl_date_time_naive - ), "water_level_date_time value was changed during timezone assignment" @then("the response includes a summary containing:") diff --git a/tests/features/well-inventory-csv.feature b/tests/features/well-inventory-csv.feature index a7d66d6d..e5442cc5 100644 --- a/tests/features/well-inventory-csv.feature +++ b/tests/features/well-inventory-csv.feature @@ -141,9 +141,9 @@ Feature: Bulk upload well inventory from CSV via CLI | data_quality | | water_level_notes | And the required "date_time" values are valid ISO 8601 datetime strings (timezone-naive or timezone-aware) - (e.g. "2025-02-15T10:30:00" or "2025-02-15T10:30:00-07:00") - And the optional "water_level_date_time" values are valid ISO 8601 datetime strings (timezone-naive or timezone-aware) - (e.g. "2025-02-15T10:30:00" or "2025-02-15T10:30:00-07:00") when provided + # e.g. "2025-02-15T10:30:00" or "2025-02-15T10:30:00-07:00" + And the optional "water_level_date_time" values are valid ISO 8601 datetime strings (timezone-naive or timezone-aware) when provided + # e.g. "2025-02-15T10:30:00" or "2025-02-15T10:30:00-07:00 # And all optional lexicon fields contain valid lexicon values when provided # And all optional numeric fields contain valid numeric values when provided From 5fc64e3e524145220668cf1e99494edb99b30599 Mon Sep 17 00:00:00 2001 From: TylerAdamMartinez <57375362+TylerAdamMartinez@users.noreply.github.com> Date: Fri, 17 Apr 2026 22:57:54 +0000 Subject: [PATCH 30/39] Formatting changes --- tests/features/steps/well-inventory-csv.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/tests/features/steps/well-inventory-csv.py b/tests/features/steps/well-inventory-csv.py index 63247f00..bfbd62b1 100644 --- a/tests/features/steps/well-inventory-csv.py +++ b/tests/features/steps/well-inventory-csv.py @@ -12,7 +12,6 @@ from sqlalchemy import select from zoneinfo import ZoneInfo - MOUNTAIN_TZ = ZoneInfo("America/Denver") @@ -162,30 +161,30 @@ def json(self): return self._json -@then(u'all datetime objects are normalized to UTC') +@then("all datetime objects are normalized to UTC") def step_all_normalized_to_utc(context): for dt in context.normalized_datetimes: assert dt.tzinfo == timezone.utc, f"Not UTC: {dt}" -@then(u'timezone-naive datetimes are interpreted as Mountain Time before conversion') +@then("timezone-naive datetimes are interpreted as Mountain Time before conversion") def step_naive_as_mountain(context): for original, normalized in context.datetime_pairs: if original.tzinfo is None: expected = original.replace(tzinfo=MOUNTAIN_TZ).astimezone(timezone.utc) - assert normalized == expected, ( - f"Naive datetime not handled as Mountain Time: {original}" - ) + assert ( + normalized == expected + ), f"Naive datetime not handled as Mountain Time: {original}" -@then(u'timezone-aware datetimes are converted to UTC using their provided offset') +@then("timezone-aware datetimes are converted to UTC using their provided offset") def step_aware_to_utc(context): for original, normalized in context.datetime_pairs: if original.tzinfo is not None: expected = original.astimezone(timezone.utc) - assert normalized == expected, ( - f"Aware datetime not converted correctly: {original}" - ) + assert ( + normalized == expected + ), f"Aware datetime not converted correctly: {original}" @then("the response includes a summary containing:") From c9124ca2852fafe64a3693dc45e8909aae8289ed Mon Sep 17 00:00:00 2001 From: jirhiker <2035568+jirhiker@users.noreply.github.com> Date: Sun, 19 Apr 2026 15:00:29 +0000 Subject: [PATCH 31/39] Formatting changes --- tests/test_thing.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_thing.py b/tests/test_thing.py index 884df6d5..04009c63 100644 --- a/tests/test_thing.py +++ b/tests/test_thing.py @@ -768,8 +768,8 @@ def test_get_water_well_details_payload_limits_field_events( if later_field_event is not None: session.delete(later_field_event) session.commit() - - + + def test_get_water_well_details_payload_includes_imported_water_level_staff( water_well_thing, ): From 7a90734893c97391c53acc443a1205ce38f55f35 Mon Sep 17 00:00:00 2001 From: jakeross Date: Sun, 19 Apr 2026 09:46:19 -0600 Subject: [PATCH 32/39] fix: update references from NMSampleLocations to OcotilloAPI in admin views and documentation --- .gitignore | 3 +++ admin/__init__.py | 2 +- admin/auth.py | 18 +++++++----------- admin/config.py | 5 ++--- admin/views/__init__.py | 8 ++++---- admin/views/aquifer_system.py | 2 +- admin/views/aquifer_type.py | 2 +- admin/views/asset.py | 2 +- admin/views/contact.py | 2 +- admin/views/data_provenance.py | 2 +- admin/views/deployment.py | 2 +- admin/views/field.py | 2 +- admin/views/geologic_formation.py | 2 +- admin/views/group.py | 2 +- admin/views/lexicon.py | 2 +- admin/views/location.py | 2 +- admin/views/notes.py | 2 +- admin/views/observation.py | 2 +- admin/views/parameter.py | 2 +- admin/views/sample.py | 2 +- admin/views/sensor.py | 2 +- admin/views/surface_water.py | 2 +- admin/views/thing.py | 2 +- features/admin/README.md | 4 ++-- tests/test_thing.py | 12 ++++++------ tests/test_transfer_legacy_dates.py | 9 ++++----- 26 files changed, 47 insertions(+), 50 deletions(-) diff --git a/.gitignore b/.gitignore index a6a2981b..92ab7e91 100644 --- a/.gitignore +++ b/.gitignore @@ -50,3 +50,6 @@ cli/logs # deployment files app.yaml docs/ + +#Codex +.codex \ No newline at end of file diff --git a/admin/__init__.py b/admin/__init__.py index ece0358e..2816d389 100644 --- a/admin/__init__.py +++ b/admin/__init__.py @@ -14,7 +14,7 @@ # limitations under the License. # =============================================================================== """ -Starlette Admin package for NMSampleLocations. +Starlette Admin package for OcotilloAPI. Provides web-based administrative interface for managing database records. """ diff --git a/admin/auth.py b/admin/auth.py index 334588c3..903068ab 100644 --- a/admin/auth.py +++ b/admin/auth.py @@ -17,26 +17,22 @@ Admin authentication provider integrating with existing Authentik OIDC auth. This module provides a Starlette Admin AuthProvider that integrates with the -existing Authentik-based authentication system used by the NMSampleLocations API. +existing Authentik-based authentication system used by the OcotilloAPI API. """ +import base64 +import hashlib import os import secrets -from typing import Optional -from urllib.parse import urlencode - -import hashlib -import base64 - +from core.permissions import _get_token_payload, verify_token from dataclasses import dataclass -from typing import List - from starlette.requests import Request from starlette.responses import RedirectResponse from starlette_admin.auth import AdminUser, AuthProvider from starlette_admin.exceptions import LoginFailed - -from core.permissions import _get_token_payload, verify_token +from typing import List +from typing import Optional +from urllib.parse import urlencode @dataclass diff --git a/admin/config.py b/admin/config.py index 1c3bb14f..e559fef9 100644 --- a/admin/config.py +++ b/admin/config.py @@ -16,11 +16,9 @@ """ Starlette Admin configuration and initialization. -This module creates and configures the admin interface for NMSampleLocations. +This module creates and configures the admin interface for OcotilloAPI. """ -from starlette_admin.contrib.sqla import Admin - from admin.auth import NMSampleLocationsAuthProvider from admin.views import ( AquiferSystemAdmin, @@ -93,6 +91,7 @@ from db.sensor import Sensor from db.thing import Thing from db.transducer import TransducerObservation +from starlette_admin.contrib.sqla import Admin def create_admin(app): diff --git a/admin/views/__init__.py b/admin/views/__init__.py index 285d5ef5..c8d0f5ad 100644 --- a/admin/views/__init__.py +++ b/admin/views/__init__.py @@ -14,15 +14,15 @@ # limitations under the License. # =============================================================================== """ -Admin views package for NMSampleLocations. +Admin views package for OcotilloAPI. Provides MS Access-like interface for CRUD operations on database models. """ -from admin.views.asset import AssetAdmin -from admin.views.associated_data import AssociatedDataAdmin from admin.views.aquifer_system import AquiferSystemAdmin from admin.views.aquifer_type import AquiferTypeAdmin +from admin.views.asset import AssetAdmin +from admin.views.associated_data import AssociatedDataAdmin from admin.views.chemistry_sampleinfo import ChemistrySampleInfoAdmin from admin.views.contact import ContactAdmin from admin.views.data_provenance import DataProvenanceAdmin @@ -55,8 +55,8 @@ from admin.views.waterlevelscontinuous_pressure_daily import ( WaterLevelsContinuousPressureDailyAdmin, ) -from admin.views.weather_photos import WeatherPhotosAdmin from admin.views.weather_data import WeatherDataAdmin +from admin.views.weather_photos import WeatherPhotosAdmin __all__ = [ "AssetAdmin", diff --git a/admin/views/aquifer_system.py b/admin/views/aquifer_system.py index 85f79ddc..9b384e09 100644 --- a/admin/views/aquifer_system.py +++ b/admin/views/aquifer_system.py @@ -14,7 +14,7 @@ # limitations under the License. # =============================================================================== """ -AquiferSystemAdmin view for NMSampleLocations. +AquiferSystemAdmin view for OcotilloAPI. """ from admin.fields import WKTField diff --git a/admin/views/aquifer_type.py b/admin/views/aquifer_type.py index 41281f8b..ad319b6d 100644 --- a/admin/views/aquifer_type.py +++ b/admin/views/aquifer_type.py @@ -14,7 +14,7 @@ # limitations under the License. # =============================================================================== """ -AquiferTypeAdmin view for NMSampleLocations. +AquiferTypeAdmin view for OcotilloAPI. """ from admin.views.base import OcotilloModelView diff --git a/admin/views/asset.py b/admin/views/asset.py index 7a1a5e96..acec3bb8 100644 --- a/admin/views/asset.py +++ b/admin/views/asset.py @@ -14,7 +14,7 @@ # limitations under the License. # =============================================================================== """ -AssetAdmin view for NMSampleLocations. +AssetAdmin view for OcotilloAPI. Provides MS Access-like interface for CRUD operations on Asset model. """ diff --git a/admin/views/contact.py b/admin/views/contact.py index 7614687c..36bea8ee 100644 --- a/admin/views/contact.py +++ b/admin/views/contact.py @@ -14,7 +14,7 @@ # limitations under the License. # =============================================================================== """ -ContactAdmin view for NMSampleLocations. +ContactAdmin view for OcotilloAPI. Provides MS Access-like interface for CRUD operations on Contact (Owners) model. """ diff --git a/admin/views/data_provenance.py b/admin/views/data_provenance.py index 4f313953..c1a91551 100644 --- a/admin/views/data_provenance.py +++ b/admin/views/data_provenance.py @@ -14,7 +14,7 @@ # limitations under the License. # =============================================================================== """ -DataProvenanceAdmin view for NMSampleLocations. +DataProvenanceAdmin view for OcotilloAPI. """ from admin.views.base import OcotilloModelView diff --git a/admin/views/deployment.py b/admin/views/deployment.py index 511b6935..ccdf535d 100644 --- a/admin/views/deployment.py +++ b/admin/views/deployment.py @@ -14,7 +14,7 @@ # limitations under the License. # =============================================================================== """ -DeploymentAdmin view for NMSampleLocations. +DeploymentAdmin view for OcotilloAPI. Provides MS Access-like interface for CRUD operations on Deployment model. """ diff --git a/admin/views/field.py b/admin/views/field.py index 7d10598d..43a7b2cb 100644 --- a/admin/views/field.py +++ b/admin/views/field.py @@ -14,7 +14,7 @@ # limitations under the License. # =============================================================================== """ -Field admin views for NMSampleLocations. +Field admin views for OcotilloAPI. """ from admin.views.base import OcotilloModelView diff --git a/admin/views/geologic_formation.py b/admin/views/geologic_formation.py index 8e880304..bb621202 100644 --- a/admin/views/geologic_formation.py +++ b/admin/views/geologic_formation.py @@ -14,7 +14,7 @@ # limitations under the License. # =============================================================================== """ -GeologicFormationAdmin view for NMSampleLocations. +GeologicFormationAdmin view for OcotilloAPI. """ from admin.fields import WKTField diff --git a/admin/views/group.py b/admin/views/group.py index ddf9b0a8..f06a9ab7 100644 --- a/admin/views/group.py +++ b/admin/views/group.py @@ -14,7 +14,7 @@ # limitations under the License. # =============================================================================== """ -GroupAdmin view for NMSampleLocations. +GroupAdmin view for OcotilloAPI. """ from admin.fields import WKTField diff --git a/admin/views/lexicon.py b/admin/views/lexicon.py index 900a22c1..57cafa6a 100644 --- a/admin/views/lexicon.py +++ b/admin/views/lexicon.py @@ -14,7 +14,7 @@ # limitations under the License. # =============================================================================== """ -Lexicon admin views for NMSampleLocations. +Lexicon admin views for OcotilloAPI. """ from admin.views.base import OcotilloModelView diff --git a/admin/views/location.py b/admin/views/location.py index 8921eec5..2ec2f261 100644 --- a/admin/views/location.py +++ b/admin/views/location.py @@ -14,7 +14,7 @@ # limitations under the License. # =============================================================================== """ -LocationAdmin view for NMSampleLocations. +LocationAdmin view for OcotilloAPI. Provides MS Access-like interface for CRUD operations on Location model. """ diff --git a/admin/views/notes.py b/admin/views/notes.py index 2ce0f919..6be42f91 100644 --- a/admin/views/notes.py +++ b/admin/views/notes.py @@ -14,7 +14,7 @@ # limitations under the License. # =============================================================================== """ -NotesAdmin view for NMSampleLocations. +NotesAdmin view for OcotilloAPI. """ from admin.views.base import OcotilloModelView diff --git a/admin/views/observation.py b/admin/views/observation.py index 3c5e8c4d..d2e206e3 100644 --- a/admin/views/observation.py +++ b/admin/views/observation.py @@ -14,7 +14,7 @@ # limitations under the License. # =============================================================================== """ -ObservationAdmin view for NMSampleLocations. +ObservationAdmin view for OcotilloAPI. Provides MS Access-like interface for CRUD operations on Observation (Water Levels) model. """ diff --git a/admin/views/parameter.py b/admin/views/parameter.py index 3c9eed50..50eb674a 100644 --- a/admin/views/parameter.py +++ b/admin/views/parameter.py @@ -14,7 +14,7 @@ # limitations under the License. # =============================================================================== """ -ParameterAdmin view for NMSampleLocations. +ParameterAdmin view for OcotilloAPI. """ from admin.views.base import OcotilloModelView diff --git a/admin/views/sample.py b/admin/views/sample.py index 3617fc88..b5247a91 100644 --- a/admin/views/sample.py +++ b/admin/views/sample.py @@ -14,7 +14,7 @@ # limitations under the License. # =============================================================================== """ -SampleAdmin view for NMSampleLocations. +SampleAdmin view for OcotilloAPI. """ from admin.views.base import OcotilloModelView diff --git a/admin/views/sensor.py b/admin/views/sensor.py index 9f81a338..28d41e44 100644 --- a/admin/views/sensor.py +++ b/admin/views/sensor.py @@ -14,7 +14,7 @@ # limitations under the License. # =============================================================================== """ -SensorAdmin view for NMSampleLocations. +SensorAdmin view for OcotilloAPI. Provides MS Access-like interface for CRUD operations on Sensor (Equipment) model. """ diff --git a/admin/views/surface_water.py b/admin/views/surface_water.py index ede5522c..be6da860 100644 --- a/admin/views/surface_water.py +++ b/admin/views/surface_water.py @@ -14,7 +14,7 @@ # limitations under the License. # =============================================================================== """ -SurfaceWaterDataAdmin view for NMSampleLocations. +SurfaceWaterDataAdmin view for OcotilloAPI. """ from admin.views.base import OcotilloModelView diff --git a/admin/views/thing.py b/admin/views/thing.py index d74e0b9d..da6d7acb 100644 --- a/admin/views/thing.py +++ b/admin/views/thing.py @@ -14,7 +14,7 @@ # limitations under the License. # =============================================================================== """ -ThingAdmin view for NMSampleLocations. +ThingAdmin view for OcotilloAPI. Provides MS Access-like interface for CRUD operations on Thing (Wells/Springs) model. """ diff --git a/features/admin/README.md b/features/admin/README.md index 39d02cce..536a714f 100644 --- a/features/admin/README.md +++ b/features/admin/README.md @@ -66,8 +66,8 @@ Documents Location admin CRUD operations and business rules: ### Run Tests ```bash -# From NMSampleLocations directory -cd /path/to/NMSampleLocations +# From OcotilloAPI directory +cd /path/to/OcotilloAPI # Run all admin feature tests behave features/admin/ diff --git a/tests/test_thing.py b/tests/test_thing.py index 04009c63..e36792ee 100644 --- a/tests/test_thing.py +++ b/tests/test_thing.py @@ -13,11 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # =============================================================================== -from datetime import date, timezone - import pytest -from sqlalchemy import delete - from core.dependencies import ( admin_function, editor_function, @@ -26,6 +22,7 @@ viewer_function, amp_viewer_function, ) +from datetime import date, timezone from db import MeasuringPointHistory, StatusHistory, Thing, ThingIdLink, WellScreen from db.engine import session_ctx from main import app @@ -33,6 +30,7 @@ from schemas.location import LocationResponse from schemas.thing import UpdateWell, ValidateWell from services.water_level_csv import bulk_upload_water_levels +from sqlalchemy import delete from tests import ( client, override_authentication, @@ -820,10 +818,12 @@ def test_get_water_well_details_payload_includes_imported_water_level_staff( assert response.status_code == 200 data = response.json() - assert data["latest_field_event_sample"]["contact"]["name"] == "A Lopez" + activity_samples = data["field_events"][0]["field_activities"][0]["samples"] + assert len(activity_samples) == 1 + assert activity_samples[0]["contact"]["name"] == "A Lopez" assert { participant["participant"]["name"] - for participant in data["field_event_participants"] + for participant in data["field_events"][0]["field_event_participants"] } == {"A Lopez", "B Chen"} diff --git a/tests/test_transfer_legacy_dates.py b/tests/test_transfer_legacy_dates.py index 32732b97..75b6a3c3 100644 --- a/tests/test_transfer_legacy_dates.py +++ b/tests/test_transfer_legacy_dates.py @@ -14,7 +14,7 @@ # limitations under the License. # =============================================================================== """ -Unit tests for AMPAPI date field population during AMPAPI → NMSampleLocations migration. +Unit tests for AMPAPI date field population during AMPAPI → OcotilloAPI migration. These tests verify that: 1. Location.nma_date_created is populated from CSV DateCreated (read-only post-migration) @@ -22,16 +22,15 @@ """ import datetime -from unittest.mock import patch - import numpy as np import pandas as pd import pytest - from db import Sample -from transfers.well_transfer import _normalize_completion_date from transfers.util import make_location from transfers.waterlevels_transfer import WaterLevelTransferer +from transfers.well_transfer import _normalize_completion_date +from unittest.mock import patch + # ============================================================================ # FIXTURES From f897becab2dca33d78ca6c8e08d38a64aca62733 Mon Sep 17 00:00:00 2001 From: jirhiker <2035568+jirhiker@users.noreply.github.com> Date: Sun, 19 Apr 2026 15:46:46 +0000 Subject: [PATCH 33/39] Formatting changes --- tests/test_transfer_legacy_dates.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/test_transfer_legacy_dates.py b/tests/test_transfer_legacy_dates.py index 75b6a3c3..f1f246f9 100644 --- a/tests/test_transfer_legacy_dates.py +++ b/tests/test_transfer_legacy_dates.py @@ -31,7 +31,6 @@ from transfers.well_transfer import _normalize_completion_date from unittest.mock import patch - # ============================================================================ # FIXTURES # ============================================================================ From 408cdff08c50ae1e5fabf3e2af1ea56c148b69a5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 20 Apr 2026 17:39:03 +0000 Subject: [PATCH 34/39] build(deps): bump astral-sh/setup-uv in the gha-minor-and-patch group (#661) Bumps the gha-minor-and-patch group with 1 update: [astral-sh/setup-uv](https://github.com/astral-sh/setup-uv). Updates `astral-sh/setup-uv` from 8.0.0 to 8.1.0 - [Release notes](https://github.com/astral-sh/setup-uv/releases) - [Commits](https://github.com/astral-sh/setup-uv/compare/v8.0.0...v8.1.0) --- updated-dependencies: - dependency-name: astral-sh/setup-uv dependency-version: 8.1.0 dependency-type: direct:production update-type: version-update:semver-minor dependency-group: gha-minor-and-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/CD_production.yml | 2 +- .github/workflows/CD_staging.yml | 2 +- .github/workflows/CD_testing.yml | 2 +- .github/workflows/jira_codex_pr.yml | 2 +- .github/workflows/tests.yml | 4 ++-- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/CD_production.yml b/.github/workflows/CD_production.yml index e7b89642..f84629ed 100644 --- a/.github/workflows/CD_production.yml +++ b/.github/workflows/CD_production.yml @@ -20,7 +20,7 @@ jobs: fetch-depth: 0 - name: Install uv in container - uses: astral-sh/setup-uv@v8.0.0 + uses: astral-sh/setup-uv@v8.1.0 with: version: "latest" diff --git a/.github/workflows/CD_staging.yml b/.github/workflows/CD_staging.yml index c723eb6a..001d40a8 100644 --- a/.github/workflows/CD_staging.yml +++ b/.github/workflows/CD_staging.yml @@ -20,7 +20,7 @@ jobs: fetch-depth: 0 - name: Install uv in container - uses: astral-sh/setup-uv@v8.0.0 + uses: astral-sh/setup-uv@v8.1.0 with: version: "latest" diff --git a/.github/workflows/CD_testing.yml b/.github/workflows/CD_testing.yml index b924519c..d3df5105 100644 --- a/.github/workflows/CD_testing.yml +++ b/.github/workflows/CD_testing.yml @@ -20,7 +20,7 @@ jobs: fetch-depth: 0 - name: Install uv in container - uses: astral-sh/setup-uv@v8.0.0 + uses: astral-sh/setup-uv@v8.1.0 with: version: "latest" diff --git a/.github/workflows/jira_codex_pr.yml b/.github/workflows/jira_codex_pr.yml index 191c6b37..bd31d639 100644 --- a/.github/workflows/jira_codex_pr.yml +++ b/.github/workflows/jira_codex_pr.yml @@ -59,7 +59,7 @@ jobs: python-version: ${{ env.PYTHON_VERSION }} - name: Set up uv (with cache) - uses: astral-sh/setup-uv@cec208311dfd045dd5311c1add060b2062131d57 # v4 + uses: astral-sh/setup-uv@08807647e7069bb48b6ef5acd8ec9567f424441b # v4 with: enable-cache: true diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 85c26684..0bafd011 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -63,7 +63,7 @@ jobs: exit 1 - name: Install uv - uses: astral-sh/setup-uv@v8.0.0 + uses: astral-sh/setup-uv@v8.1.0 with: enable-cache: true cache-dependency-glob: uv.lock @@ -155,7 +155,7 @@ jobs: exit 1 - name: Install uv - uses: astral-sh/setup-uv@v8.0.0 + uses: astral-sh/setup-uv@v8.1.0 with: enable-cache: true cache-dependency-glob: uv.lock From 8701678bb635916db3ff9d5cc547d3abcf6457bd Mon Sep 17 00:00:00 2001 From: Jeremy Zilar Date: Tue, 21 Apr 2026 11:53:51 -0400 Subject: [PATCH 35/39] Add first_field_event to well details response The field_events query returns the 25 most recent events sorted newest-first. For wells with many visits the oldest event falls outside that window, making it impossible to retrieve first-visit participants from the paged list. Adds a separate targeted query that always fetches the single oldest field event (with participants) and returns it as first_field_event alongside the existing paged field_events list. --- schemas/well_details.py | 1 + services/well_details_helper.py | 40 ++++++++++++++++++++++----------- 2 files changed, 28 insertions(+), 13 deletions(-) diff --git a/schemas/well_details.py b/schemas/well_details.py index 2d058cd2..228e0847 100644 --- a/schemas/well_details.py +++ b/schemas/well_details.py @@ -51,3 +51,4 @@ class WellDetailsResponse(BaseModel): deployments: list[DeploymentResponse] = Field(default_factory=list) well_screens: list[WellScreenBaseResponse] = Field(default_factory=list) field_events: list[WellDetailsFieldEventResponse] = Field(default_factory=list) + first_field_event: WellDetailsFieldEventResponse | None = None diff --git a/services/well_details_helper.py b/services/well_details_helper.py index 1ce6f8fd..52f61580 100644 --- a/services/well_details_helper.py +++ b/services/well_details_helper.py @@ -111,27 +111,40 @@ def get_well_details_payload( .order_by(WellScreen.screen_depth_top.asc(), WellScreen.id.asc()) ).all() + _participant_options = ( + selectinload(FieldEvent.field_event_participants).selectinload( + FieldEventParticipant.participant + ) + ) + _activity_options = [ + selectinload(FieldEvent.field_activities) + .selectinload(FieldActivity.samples) + .selectinload(Sample.field_event_participant) + .selectinload(FieldEventParticipant.participant), + selectinload(FieldEvent.field_activities) + .selectinload(FieldActivity.samples) + .selectinload(Sample.observations) + .selectinload(Observation.parameter), + ] + with _payload_stage_timer("well_details", "load_field_events", thing_id): field_events = session.scalars( select(FieldEvent) .where(FieldEvent.thing_id == well.id) - .options( - selectinload(FieldEvent.field_event_participants).selectinload( - FieldEventParticipant.participant - ), - selectinload(FieldEvent.field_activities) - .selectinload(FieldActivity.samples) - .selectinload(Sample.field_event_participant) - .selectinload(FieldEventParticipant.participant), - selectinload(FieldEvent.field_activities) - .selectinload(FieldActivity.samples) - .selectinload(Sample.observations) - .selectinload(Observation.parameter), - ) + .options(_participant_options, *_activity_options) .order_by(FieldEvent.event_date.desc(), FieldEvent.id.desc()) .limit(field_event_limit) ).all() + with _payload_stage_timer("well_details", "load_first_field_event", thing_id): + first_field_event = session.scalars( + select(FieldEvent) + .where(FieldEvent.thing_id == well.id) + .options(_participant_options) + .order_by(FieldEvent.event_date.asc(), FieldEvent.id.asc()) + .limit(1) + ).first() + return { "well": well, "contacts": contacts, @@ -139,6 +152,7 @@ def get_well_details_payload( "deployments": deployments, "well_screens": well_screens, "field_events": field_events, + "first_field_event": first_field_event, } From 02b6ad33eff1d9d4b95335613fcf8fba1c3df423 Mon Sep 17 00:00:00 2001 From: jeremyzilar <395641+jeremyzilar@users.noreply.github.com> Date: Tue, 21 Apr 2026 15:55:35 +0000 Subject: [PATCH 36/39] Formatting changes --- services/well_details_helper.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/services/well_details_helper.py b/services/well_details_helper.py index 52f61580..53d40a02 100644 --- a/services/well_details_helper.py +++ b/services/well_details_helper.py @@ -111,11 +111,9 @@ def get_well_details_payload( .order_by(WellScreen.screen_depth_top.asc(), WellScreen.id.asc()) ).all() - _participant_options = ( - selectinload(FieldEvent.field_event_participants).selectinload( - FieldEventParticipant.participant - ) - ) + _participant_options = selectinload( + FieldEvent.field_event_participants + ).selectinload(FieldEventParticipant.participant) _activity_options = [ selectinload(FieldEvent.field_activities) .selectinload(FieldActivity.samples) From 28b4c74c8d202fc611a46e1e39515bf8e8a74e30 Mon Sep 17 00:00:00 2001 From: Jeremy Zilar Date: Mon, 27 Apr 2026 14:57:26 -0400 Subject: [PATCH 37/39] query_helper: apply all Refine filter params and more operators order_sort_filter now walks every filter JSON in the request (optional filters list plus legacy single filter_). _apply_json_filter_clause supports contains, ncontains, startswith, endswith, eq, ne, comparison operators, null checks, and in, in addition to the old contains-only behavior. Any list or report that sends filter= more than once or uses operators other than contains will now get real SQL instead of silent no-ops. --- services/query_helper.py | 160 +++++++++++++++++++++++++++++++++------ 1 file changed, 138 insertions(+), 22 deletions(-) diff --git a/services/query_helper.py b/services/query_helper.py index 379e2791..b197045b 100644 --- a/services/query_helper.py +++ b/services/query_helper.py @@ -18,7 +18,7 @@ from fastapi import HTTPException from fastapi_pagination.ext.sqlalchemy import paginate -from sqlalchemy import select, Float, Integer, Column, Select, func, String +from sqlalchemy import Column, Float, Integer, Select, String, Text, func, not_, select from sqlalchemy.orm import DeclarativeBase, Session from sqlalchemy.sql.elements import OperatorExpression from starlette.status import HTTP_404_NOT_FOUND @@ -110,8 +110,130 @@ def simple_all_getter(session, table) -> list[object]: return session.scalars(sql).all() +def _python_type(column: Any): + try: + return column.type.python_type + except Exception: + return None + + +def _apply_json_filter_clause( + sql: Select[Any], table: DeclarativeBase, f: dict +) -> Select[Any]: + """Apply one Refine logical filter dict (field / operator / value) to a SELECT.""" + required_keys = {"field", "value", "operator"} + missing = required_keys - f.keys() + if missing: + raise HTTPException( + status_code=422, + detail=f"Missing required filter keys: {', '.join(sorted(missing))}", + ) + + field = f["field"] + value = f["value"] + operator = f["operator"] + + try: + column = getattr(table, field) + except AttributeError as exc: + raise HTTPException( + status_code=400, + detail=f"Unknown filter field {field!r} for {table.__name__}", + ) from exc + + py_t = _python_type(column) + is_string = py_t is str or isinstance(column.type, (String, Text)) + + if operator == "contains": + if not is_string: + raise HTTPException( + status_code=400, + detail=f"Operator contains is not supported for field {field!r}", + ) + return sql.where(column.ilike(f"%{value}%")) + + if operator == "ncontains": + if not is_string: + raise HTTPException( + status_code=400, + detail=f"Operator ncontains is not supported for field {field!r}", + ) + return sql.where(not_(column.ilike(f"%{value}%"))) + + if operator == "startswith": + if not is_string: + raise HTTPException( + status_code=400, + detail=f"Operator startswith is not supported for field {field!r}", + ) + return sql.where(column.ilike(f"{value}%")) + + if operator == "endswith": + if not is_string: + raise HTTPException( + status_code=400, + detail=f"Operator endswith is not supported for field {field!r}", + ) + return sql.where(column.ilike(f"%{value}")) + + if operator == "eq": + if py_t is float: + return sql.where(column == float(value)) + if py_t is int: + return sql.where(column == int(value)) + if is_string: + return sql.where(column == str(value)) + return sql.where(column == value) + + if operator == "ne": + if py_t is float: + return sql.where(column != float(value)) + if py_t is int: + return sql.where(column != int(value)) + if is_string: + return sql.where(column != str(value)) + return sql.where(column != value) + + if operator == "gt": + return sql.where(column > float(value) if py_t is float else column > value) + + if operator == "gte": + return sql.where(column >= float(value) if py_t is float else column >= value) + + if operator == "lt": + return sql.where(column < float(value) if py_t is float else column < value) + + if operator == "lte": + return sql.where(column <= float(value) if py_t is float else column <= value) + + if operator == "null": + return sql.where(column.is_(None)) + + if operator == "nnull": + return sql.where(column.is_not(None)) + + if operator == "in": + if not isinstance(value, (list, tuple)): + raise HTTPException( + status_code=400, + detail="Operator in requires an array value", + ) + return sql.where(column.in_(list(value))) + + raise HTTPException( + status_code=400, + detail=f"Unsupported filter operator {operator!r}", + ) + + def order_sort_filter( - sql: Select[Any], table: DeclarativeBase, sort: str, order: str, filter_: str + sql: Select[Any], + table: DeclarativeBase, + sort: str | None, + order: str | None, + filter_: str | None = None, + *, + filters: list[str] | None = None, ) -> Select[Any]: if order: if not sort: @@ -132,27 +254,21 @@ def order_sort_filter( else: raise ValueError("Invalid order parameter. Use 'asc' or 'desc'.") + filter_jsons: list[str] = [] + if filters: + filter_jsons.extend([x for x in filters if x]) if filter_: - required_keys = {"field", "value", "operator"} - if filter_ is not None: - try: - f = json.loads(filter_) - except Exception: - raise HTTPException(status_code=400, detail="Invalid JSON in filter") - - missing = required_keys - f.keys() - if missing: - raise HTTPException( - status_code=422, - detail=f"Missing required filter keys: {', '.join(missing)}", - ) - - field = f["field"] - value = f["value"] - operator = f["operator"] - column = getattr(table, field) - if operator == "contains": - sql = sql.where(column.ilike(f"%{value}%")) + filter_jsons.append(filter_) + + for raw in filter_jsons: + try: + f = json.loads(raw) + except Exception as exc: + raise HTTPException( + status_code=400, detail="Invalid JSON in filter" + ) from exc + + sql = _apply_json_filter_clause(sql, table, f) return sql From 748f43f96ce8cc4243c2458fe01d2ea4fef09161 Mon Sep 17 00:00:00 2001 From: Jeremy Zilar Date: Mon, 27 Apr 2026 14:57:29 -0400 Subject: [PATCH 38/39] thing_helper: merge multiple filter blobs and optional name_contains get_db_things accepts filters as a list of JSON strings (merged with legacy single filter_) and passes them to order_sort_filter. Adds name_contains to narrow rows with Thing.name ILIKE for substring search without changing the full-text query path. --- services/thing_helper.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/services/thing_helper.py b/services/thing_helper.py index 652a893c..16fdd9a6 100644 --- a/services/thing_helper.py +++ b/services/thing_helper.py @@ -121,6 +121,8 @@ def get_db_things( within: Optional[str] = None, name: Optional[str] = None, include_contacts: bool = False, + filters: Optional[list[str]] = None, + name_contains: Optional[str] = None, ) -> list: if query: @@ -151,6 +153,9 @@ def get_db_things( if name: sql = sql.where(Thing.name == name) + if name_contains and name_contains.strip(): + sql = sql.where(Thing.name.ilike(f"%{name_contains.strip()}%")) + if within: latest_assoc = ( select( @@ -173,7 +178,13 @@ def get_db_things( ) sql = make_within_wkt(sql, within) - sql = order_sort_filter(sql, Thing, sort, order, filter_) + merged_filters: list[str] | None = None + if filters: + merged_filters = list(filters) + elif filter_: + merged_filters = [filter_] + + sql = order_sort_filter(sql, Thing, sort, order, filters=merged_filters) return paginate(query=sql, conn=session) From 054c4c269b2ca685ef11686192970b5bead814ef Mon Sep 17 00:00:00 2001 From: Jeremy Zilar Date: Mon, 27 Apr 2026 14:57:31 -0400 Subject: [PATCH 39/39] api thing: repeated filter query param and name_contains on thing lists GET /thing/water-well, GET /thing/spring, and GET /thing now bind filter as a list so multiple Refine filters are forwarded. Same routes accept optional name_contains for substring matching on thing name alongside existing query. --- api/thing.py | 33 +++++++++++++++++++++++---------- 1 file changed, 23 insertions(+), 10 deletions(-) diff --git a/api/thing.py b/api/thing.py index 6beb474e..baeed59e 100644 --- a/api/thing.py +++ b/api/thing.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # =============================================================================== -from typing import Optional +from typing import Annotated, Optional from fastapi import APIRouter, Query, Request from fastapi_pagination.ext.sqlalchemy import paginate from sqlalchemy import select @@ -151,9 +151,10 @@ def get_water_wells( request: Request, sort: Optional[str] = None, order: Optional[str] = None, - filter_: str = Query(alias="filter", default=None), + filter_params: Annotated[list[str] | None, Query(alias="filter")] = None, query: Optional[str] = None, name: Optional[str] = None, + name_contains: Optional[str] = None, include_contacts: bool = False, ) -> CustomPage[WellResponse]: """ @@ -161,7 +162,7 @@ def get_water_wells( """ thing_type = request.url.path.split("/")[2].replace("-", " ") return get_db_things( - filter_, + None, order, query, session, @@ -169,6 +170,8 @@ def get_water_wells( name=name, thing_type=thing_type, include_contacts=include_contacts, + filters=filter_params, + name_contains=name_contains, ) @@ -293,14 +296,24 @@ def get_springs( request: Request, sort: str = None, order: str = None, - filter_: str = Query(alias="filter", default=None), + filter_params: Annotated[list[str] | None, Query(alias="filter")] = None, query: str = None, + name_contains: Optional[str] = None, ) -> CustomPage[SpringResponse]: """ Retrieve all springs from the database. """ thing_type = request.url.path.split("/")[2].replace("-", " ") - return get_db_things(filter_, order, query, session, sort, thing_type=thing_type) + return get_db_things( + None, + order, + query, + session, + sort, + thing_type=thing_type, + filters=filter_params, + name_contains=name_contains, + ) @router.get("/spring/{thing_id}", summary="Get spring by ID", status_code=HTTP_200_OK) @@ -359,23 +372,23 @@ def get_things( sort: Optional[str] = None, order: Optional[str] = None, include_contacts: bool = False, - filter_: str = Query( - default=None, - alias="filter", - ), + filter_params: Annotated[list[str] | None, Query(alias="filter")] = None, + name_contains: Optional[str] = None, ) -> CustomPage[ThingResponse]: """ Retrieve all things or filter by type. """ return get_db_things( - filter_, + None, order, query, session, sort, within=within, include_contacts=include_contacts, + filters=filter_params, + name_contains=name_contains, )