diff --git a/.docs/changelog.md b/.docs/changelog.md index 256e245d5d6aac818396dae86aed655e5e4613d6..e7f12b284b67d0af3201031cd96c49eda62c2604 100644 --- a/.docs/changelog.md +++ b/.docs/changelog.md @@ -2,7 +2,7 @@ author: Martin Weise --- -## v1.4.7 (???) +## v1.4.7 (2024-10-21) [:simple-gitlab: GitLab Release](https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/tags/v1.4.7) @@ -19,12 +19,19 @@ author: Martin Weise #### Changes +* Allow anonymous users to create subsets for public databases + in [#449](https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/issues/449). +* Show file upload progress + in [#448](https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/issues/448). * Change the Docker image of the Auth Service to Bitnami-maintained similar to Kubernetes deployment with accompanying Auth Database change to PostgreSQL in [#455](https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/issues/455) #### Fixes +* Multiple UI errors in [#453](https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/issues/453). +* Fixed install script.sh + in [#444](https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/issues/444) * No hardcoded data type metadata in UI but instead added it hardcoded (associated with `image_id`) Metadata Database. ## v1.4.6 (2024-10-11) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index efc01492bf25a734553d4ed08d62fd6a4cdb1689..f571fba1d16722ed56e3152dbe634272ebcbd284 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -260,10 +260,37 @@ test-search-service: - build-search-service dependencies: - build-search-service + before_script: + - "cp -r ./dbrepo-search-service/init/clients ./dbrepo-search-service/clients" + - "cp -r ./dbrepo-search-service/init/omlib ./dbrepo-search-service/omlib" + script: + - "pip install pipenv" + - "pipenv install gunicorn && pipenv install --dev --system --deploy" + - cd ./dbrepo-search-service/ && coverage run -m pytest test/test_app.py test/test_jwt.py test/test_opensearch_client.py test/test_keycloak_client.py --junitxml=report.xml && coverage html && coverage report > ./coverage.txt + - "cat ./coverage.txt | grep -o 'TOTAL[^%]*%'" + artifacts: + when: always + paths: + - ./dbrepo-search-service/report.xml + - ./dbrepo-search-service/coverage.txt + expire_in: 1 days + reports: + junit: ./dbrepo-search-service/report.xml + coverage: '/TOTAL.*?([0-9]{1,3})%/' + +test-search-service-init: + image: docker.io/python:3.11-alpine + stage: test + variables: + PIPENV_PIPFILE: "./dbrepo-search-service/init/Pipfile" + needs: + - build-search-service + dependencies: + - build-search-service script: - "pip install pipenv" - "pipenv install gunicorn && pipenv install --dev --system --deploy" - - cd ./dbrepo-search-service/ && coverage run -m pytest test/test_opensearch_client.py --junitxml=report.xml && coverage html --omit="test/*,omlib/*" && coverage report --omit="test/*,omlib/*" > ./coverage.txt + - cd ./dbrepo-search-service/init/ && coverage run -m pytest test/test_app.py --junitxml=report.xml && coverage html && coverage report > ./coverage.txt - "cat ./coverage.txt | grep -o 'TOTAL[^%]*%'" artifacts: when: always diff --git a/dbrepo-analyse-service/Pipfile.lock b/dbrepo-analyse-service/Pipfile.lock index 91f5b55d641405ee5efabe19d0183109a2a60b7e..bcff34a23efb2e4a660fb11c850aba569369e918 100644 --- a/dbrepo-analyse-service/Pipfile.lock +++ b/dbrepo-analyse-service/Pipfile.lock @@ -440,7 +440,7 @@ }, "dbrepo": { "hashes": [ - "sha256:654d487f1c0fd99b4978f5756aec4046f3e6019aeb225ecdd449768795f6e7e0" + "sha256:84607677b0826bb9b2fa120aacdf56d16c8d9ae423f435b2bd2c22b1c965a33c" ], "markers": "python_version >= '3.11'", "path": "./lib/dbrepo-1.4.7.tar.gz" @@ -1594,11 +1594,11 @@ }, "tinydb": { "hashes": [ - "sha256:09c4c6a239da9be676b948f1f28074cffd1cf08e7af920c1df50424cc8bee8d6", - "sha256:1c7c507ef520c789f94f1f5786f0722a98a59a85031a2e81e2accc701721f07f" + "sha256:f7dfc39b8d7fda7a1ca62a8dbb449ffd340a117c1206b68c50b1a481fb95181d", + "sha256:f97030ee5cbc91eeadd1d7af07ab0e48ceb04aa63d4a983adbaca4cba16e86c3" ], "markers": "python_version >= '3.8' and python_version < '4.0'", - "version": "==4.8.1" + "version": "==4.8.2" }, "tuspy": { "hashes": [ @@ -1642,107 +1642,107 @@ }, "yarl": { "hashes": [ - "sha256:0127bc2ea72c1eaae6808ace661f0edf222f32ffa987d37f2dbb4798288f2656", - "sha256:0358b697abdf1f2d68038bd02ef8ddcc4813835744f79c755f8743aa485585e7", - "sha256:06306c74f0775621a70fa5acd292119bbb6961d1f9a5f3d657a4c8c15b86f7b9", - "sha256:06b5b462cadf59c1df28ffbb0a3971fa16b60cf0c9d59a38bf5679a986d18685", - "sha256:097094a979af7b31520517c59179f6817b8426724343cecbec0eb3af1f8fb6cf", - "sha256:0c791a2d42da20ac568e5c0cc9b8af313188becd203a936ad959b578dafbcebb", - "sha256:1656a8b531a96427f26f498b7d0f19931166ff30e4344eca99bdb27faca14fc5", - "sha256:18614630533ac37ec373bd8035aec8fa4dd9aedac641209c06de7e082622ff77", - "sha256:1e5fa4c4e55cdacef1844f609bc9a02c8cd29c324a71ca1d3ee454701d4bb496", - "sha256:1edaf4171fc1582352ac5d9b2783966fa0f4ff86187279ef2a491613d23b894a", - "sha256:2124c642b8cc9b68e5981e429842dadc32bb850b010cccec9d24236253a19f60", - "sha256:229f222bb47cd7ab225648efd1ae47fe6943f18e4c91bce66471faf09fe33128", - "sha256:2429a651a2191c3fb8c9de21546c6046da539034d51dcb8df52302748004593d", - "sha256:25a4e29ee758596b2a0daffa4814714e9b464077ca862baf78ed0e8698e46b61", - "sha256:27c323b28723faed046f906c70466144c4dd12046a0128a301b29a65cfeff758", - "sha256:2add8ed2acf42398dfaa7dffd32e4d18ffbae341d62c8d4765bd9929336379b5", - "sha256:2bece7fdc13e23db005879b67190db0d397f6ba89c81dc7e3c77e9f5819aff7f", - "sha256:2eafb4e92f72a3b6c27f1d5e921d046e2728850af8887f86857c3fe868a5b5c0", - "sha256:32840ff92c713053591ff0e66845d4e9f4bea8fd5fba3da00f8d92e77722f24e", - "sha256:33896afca6fb4e1988c099534c52823870dfc8730bc6f96a3831f24c1e0ab814", - "sha256:350b468a217d433cbb4482e9414a14dfd360a3d5ab92013175925abb234364cc", - "sha256:38cab8f91b1085f1fd0765d40c46c8f43282f109018d5fcd017c46ac3eaba0cf", - "sha256:3e24a778470f3a9e9c11250d09daf5dea93369bc51aefca6605dbc963737a117", - "sha256:4224bbbc8a2e9b9a3828d36c1bab7458441d7fb9fb3af321eb735732ba8ee89d", - "sha256:4424082edff76fe46ff08851e91865097c0ad780fa79b87063dc5d5b80efc9d6", - "sha256:454707fb16f180984da6338d1f51897f0b8d8c4c2e0592d9d1e9fa02a5bb8218", - "sha256:4b1ab96a1ac91bd1233706d638ade35f663684deaa4e5e5f190858cba044afb9", - "sha256:4c5ff3e7609c214667c7d7e00d5f4f3576fefde47ebcb7e492c015117dafebbf", - "sha256:5107d89c9edec6ee077970a95fb9eeb4776ea8c2337b6a39c0ade9a58f50f3e4", - "sha256:5156c12a97405339ec93facbc7860566db381af2de1bec338195563fb64f37ef", - "sha256:553a1e3537aeeb29c0eb29ef28b80e0e801697fa71d96ac60675b284ff8e582a", - "sha256:5e1cc7823f43781390965c4762b54262cfcf76b6f152e489d00a5a1ac63063e4", - "sha256:5eef9804e65eb292e9c5587e88fe6a27a11f121d358312ac47211e8f42876751", - "sha256:6237637b496bc04819190e724a4e61ff2f251abf432f70cf491b3bc4a3f2f253", - "sha256:627bb5bc4ed3d3ebceb9fb55717cec6cd58bb47fdb5669169ebbc248e9bf156c", - "sha256:676d7356bb30825b7dbdad4fdd7a9feac379d074e5d4a36299767d72857ded42", - "sha256:6960b0d2e713e726cb2914e3051e080b12412f70dcb8731cf7a8fb52c37931bb", - "sha256:6b93a666cd8cfd43f605d1b81a32b9e290bf45c74c2bfd51ba705449c78448c7", - "sha256:6ca160b4c649f0d56daef04751eef4571de46ed4b80f9051a87d090fef32f08e", - "sha256:70ac7893e67a81ed1346ee3e71203ca4b0c3550c005b1d1cf87bc1e61eecd04b", - "sha256:73c4af08e9bb9a9aa7df6c789b05b924b9a0c6a368bb0e418d0b85181b64b631", - "sha256:748dcacc19c69957f7063ea4fb359fa2180735b1a638c81a4a96b86a382a6f29", - "sha256:75d9762f65205a86381298eb9079f27c60b84de0c262e402dcf45c6cbc385234", - "sha256:7711d83dafe52cda16ff2dd205cd83c05e4c06d5aaac596ae2cf7d50d094a530", - "sha256:7aa9f9af452c3e8486a0b88fddd58352e6cea17b691b18861d26e46cf65ffff0", - "sha256:7f713d8f3c4e2eac0d91b741e8ef2e1082022de244685601ec83e899b445d86a", - "sha256:81edbd9bf9f25cd995e6d51c307e1d279587d40b7473e258fef6d5e548560cd2", - "sha256:83363a5789f128618041b9a737c7b146f1965abddf4294b0444591406b437c1e", - "sha256:85e273e59b8b1a5f60a89df82cddeaf918181abd7ae7a2f2f899b68b0c774ff1", - "sha256:8ad2e487824ba4cda87851a371139e255410e45d3bf2e334194789278d709cec", - "sha256:8b7f902f13a230686f01bcff17cd9ba045653069811c8fd5027f0f414b417e2f", - "sha256:8f074a24aa9a6a3d406474ec889ebb5d661f329349068e05e8dfcb3c4be67752", - "sha256:9084d99933824ed8d665f10f4ce62d08fed714e7678d5ff11a8c2c98b2dc18f9", - "sha256:928f7a61c4311f3dd003af19bb779f99683f97a0559b765c80fdb8846dab0452", - "sha256:97fcaf530318369da3cfd6ff52f5ab38daf8cb10ecee9a76efebf8031de09eef", - "sha256:994d27b24b61b1870f3571395c840433faabec5dcd239bd11ff6af7e34234bb6", - "sha256:9ae454916aa3abe28d0ef1c21ca1e8e36a14ccf52183d465dfaccffaa7ed462c", - "sha256:9fac5416c44e8e1d8ea9440096f88e1a7273257f3157184c5c715060e0c448a1", - "sha256:a2fe45c1143eefb680a4589c55e671fabd482a7f8c7791f311ea3bcc20139246", - "sha256:a3f8be3e785009ffa148e66474fea5c787ccb203b3d0bd1f22e1e22f7da0f3b3", - "sha256:a616c2e4b60cb8cdd9eb3b0c6fda4ab5f3e26244b427aaade560dcf63c5754fb", - "sha256:a94c9058c5703c172904103d7b479f7e23dd4e5f8e67b49f6cd256d35ff169cb", - "sha256:b1208f2e081d34832f509cbe311237a0543effe23d60b2fa14c0d3f86e6d1d07", - "sha256:b4b25de7e85ba90b2ff230153123b6b000a7f69c41d84a3a0dc3f878334c8509", - "sha256:bbe72c41cdd55c88b238a8925849fde4069c0cdcdef83f8d967f8f3982659326", - "sha256:c0a86dd3e85c6aa3fc73236eb5cf7ce69dd8ad7abcd23f8ae1126831c8e40c2f", - "sha256:c3b08d9e98d1a15338fcfbd52c02003704322c2d460c9b9be7df08f2952bdce6", - "sha256:c4d9c221cc8e32b14196498679bf2b324bec1d1127c4ba934d98e19298faa661", - "sha256:c4f882e42c6cea89488b9a16919edde8c0b1a98f307c05abdd3dd3bc4368af40", - "sha256:c5cc25cbd9ae01d49ac7b504ef5f3cbdcc8d139f9750dcfa0b80d405b4645cc2", - "sha256:c7f2deac59dc3e0528bdded248e637e789e5111ba1723a8d7a262eb93e133e15", - "sha256:c8b034b60e74fb29064f765851e77e5910055e1c4a3cb75c32eccf2b470fc00f", - "sha256:c9b9159eeeb7cd1c7131dc7f5878454f97a4dc20cd157e6474174ccac448b844", - "sha256:c9c405ca78c70c3599d8956e53be0c9def9c51ad949964a49ad96c79729a5b1a", - "sha256:ceb200918c9bd163bd390cc169b254b23b4be121026b003be93a4f2f5b554b4b", - "sha256:d06040266b5e6512a37b4703684d1798124764b43328254799e9678c588882a6", - "sha256:d3f5e201bd170fb97c643e84df58e221372cd053fbb291ebbd878b165ea5057e", - "sha256:d4aa7cca009817789fd5b8e52e8122f9e85dc580c88b816a93321c00a8acbced", - "sha256:d772ae3c12d3b8629db656050c86ee66924eaa98f7125a889175a59cfaafdb19", - "sha256:d816969b55a970b3accc7f9e4ea8f60043e3f7de96f21c06063d747ffc2f18ba", - "sha256:d885dcdca7bae42bc9a2f6cbf766abcb2a6cc043b1905fc3782c6ea1f74a2b95", - "sha256:db903458a457a53ee0f764ed11c5b5368398e216b442c42dca9d90fbd2bbf31c", - "sha256:dc63bb79e896d6ce6aaf672ac304b54969280e949c45727867fc154a17ec7ab2", - "sha256:dd042e6c3bf36448e3e3ed302b12ce79762480f4aff8e7a167cdf8c35dc93297", - "sha256:ddea4abc4606c10dddb70651b210b7ab5b663148d6d7bc85d76963c923629891", - "sha256:df57f3c3ef760489f2e82192e6c93286c2bc80d6d854ef940e5345ae7153cd4b", - "sha256:e1ddf05eeb422810b1aa919095db0691493442eebbf9cfb0f1e478a7b2fbdf3d", - "sha256:e2e3cb74684ff357e6b3c82dd71031d3c1fd7ee9f9b0a5205e5568c963e074f9", - "sha256:e4f64c8c52dde564bf3251b41d7a6746564b0fc0516cebe9c9e6695224440d22", - "sha256:e4f7efb38331e8327c1cc7fb2a2905a7db03d1a7fdb04706bf6465d0e44d41d4", - "sha256:e61b2019ebb5345510b833c4dd1f4afb1f0c07753f86f184c63836ffc3fb08ba", - "sha256:e7e38bf6e52797084c5c396db5bb519615727e491e9003e2449631457bf77738", - "sha256:eae041f535fe2e57681954f7cccb81854d777ce4c2a87749428ebe6c71c02ec0", - "sha256:eb964d18c01b7a1263a6f07b88d63711fcd564fc429d934279cf12f4b467bf53", - "sha256:ef780f9d480ffb423380abeb4cfcad66ecb8f93526dfa367d322fdad9ec7c25f", - "sha256:efc0430b80ed834c80c99c32946cfc6ee29dfcd7c62ad3c8f15657322ade7942", - "sha256:f2508ee2bad8381b5254eadc35d32fe800d12eb2c63b744183341f3a66e435a7", - "sha256:fee9acd5e39c8611957074dfba06552e430020eea831caf5eb2cea30f10e06bd" + "sha256:0545de8c688fbbf3088f9e8b801157923be4bf8e7b03e97c2ecd4dfa39e48e0e", + "sha256:076b1ed2ac819933895b1a000904f62d615fe4533a5cf3e052ff9a1da560575c", + "sha256:0afad2cd484908f472c8fe2e8ef499facee54a0a6978be0e0cff67b1254fd747", + "sha256:0ccaa1bc98751fbfcf53dc8dfdb90d96e98838010fc254180dd6707a6e8bb179", + "sha256:0d3105efab7c5c091609abacad33afff33bdff0035bece164c98bcf5a85ef90a", + "sha256:0e1af74a9529a1137c67c887ed9cde62cff53aa4d84a3adbec329f9ec47a3936", + "sha256:136f9db0f53c0206db38b8cd0c985c78ded5fd596c9a86ce5c0b92afb91c3a19", + "sha256:156ececdf636143f508770bf8a3a0498de64da5abd890c7dbb42ca9e3b6c05b8", + "sha256:15c87339490100c63472a76d87fe7097a0835c705eb5ae79fd96e343473629ed", + "sha256:1695497bb2a02a6de60064c9f077a4ae9c25c73624e0d43e3aa9d16d983073c2", + "sha256:173563f3696124372831007e3d4b9821746964a95968628f7075d9231ac6bb33", + "sha256:173866d9f7409c0fb514cf6e78952e65816600cb888c68b37b41147349fe0057", + "sha256:23ec1d3c31882b2a8a69c801ef58ebf7bae2553211ebbddf04235be275a38548", + "sha256:243fbbbf003754fe41b5bdf10ce1e7f80bcc70732b5b54222c124d6b4c2ab31c", + "sha256:28c6cf1d92edf936ceedc7afa61b07e9d78a27b15244aa46bbcd534c7458ee1b", + "sha256:2aa738e0282be54eede1e3f36b81f1e46aee7ec7602aa563e81e0e8d7b67963f", + "sha256:2cf441c4b6e538ba0d2591574f95d3fdd33f1efafa864faa077d9636ecc0c4e9", + "sha256:30c3ff305f6e06650a761c4393666f77384f1cc6c5c0251965d6bfa5fbc88f7f", + "sha256:31561a5b4d8dbef1559b3600b045607cf804bae040f64b5f5bca77da38084a8a", + "sha256:32b66be100ac5739065496c74c4b7f3015cef792c3174982809274d7e51b3e04", + "sha256:3433da95b51a75692dcf6cc8117a31410447c75a9a8187888f02ad45c0a86c50", + "sha256:34a2d76a1984cac04ff8b1bfc939ec9dc0914821264d4a9c8fd0ed6aa8d4cfd2", + "sha256:353665775be69bbfc6d54c8d134bfc533e332149faeddd631b0bc79df0897f46", + "sha256:38d0124fa992dbacd0c48b1b755d3ee0a9f924f427f95b0ef376556a24debf01", + "sha256:3c56ec1eacd0a5d35b8a29f468659c47f4fe61b2cab948ca756c39b7617f0aa5", + "sha256:3db817b4e95eb05c362e3b45dafe7144b18603e1211f4a5b36eb9522ecc62bcf", + "sha256:3e52474256a7db9dcf3c5f4ca0b300fdea6c21cca0148c8891d03a025649d935", + "sha256:416f2e3beaeae81e2f7a45dc711258be5bdc79c940a9a270b266c0bec038fb84", + "sha256:435aca062444a7f0c884861d2e3ea79883bd1cd19d0a381928b69ae1b85bc51d", + "sha256:4388c72174868884f76affcdd3656544c426407e0043c89b684d22fb265e04a5", + "sha256:43ebdcc120e2ca679dba01a779333a8ea76b50547b55e812b8b92818d604662c", + "sha256:458c0c65802d816a6b955cf3603186de79e8fdb46d4f19abaec4ef0a906f50a7", + "sha256:533a28754e7f7439f217550a497bb026c54072dbe16402b183fdbca2431935a9", + "sha256:553dad9af802a9ad1a6525e7528152a015b85fb8dbf764ebfc755c695f488367", + "sha256:5838f2b79dc8f96fdc44077c9e4e2e33d7089b10788464609df788eb97d03aad", + "sha256:5b48388ded01f6f2429a8c55012bdbd1c2a0c3735b3e73e221649e524c34a58d", + "sha256:5bc0df728e4def5e15a754521e8882ba5a5121bd6b5a3a0ff7efda5d6558ab3d", + "sha256:63eab904f8630aed5a68f2d0aeab565dcfc595dc1bf0b91b71d9ddd43dea3aea", + "sha256:66f629632220a4e7858b58e4857927dd01a850a4cef2fb4044c8662787165cf7", + "sha256:670eb11325ed3a6209339974b276811867defe52f4188fe18dc49855774fa9cf", + "sha256:69d5856d526802cbda768d3e6246cd0d77450fa2a4bc2ea0ea14f0d972c2894b", + "sha256:6e840553c9c494a35e449a987ca2c4f8372668ee954a03a9a9685075228e5036", + "sha256:711bdfae4e699a6d4f371137cbe9e740dc958530cb920eb6f43ff9551e17cfbc", + "sha256:74abb8709ea54cc483c4fb57fb17bb66f8e0f04438cff6ded322074dbd17c7ec", + "sha256:75119badf45f7183e10e348edff5a76a94dc19ba9287d94001ff05e81475967b", + "sha256:766dcc00b943c089349d4060b935c76281f6be225e39994c2ccec3a2a36ad627", + "sha256:78e6fdc976ec966b99e4daa3812fac0274cc28cd2b24b0d92462e2e5ef90d368", + "sha256:81dadafb3aa124f86dc267a2168f71bbd2bfb163663661ab0038f6e4b8edb810", + "sha256:82d5161e8cb8f36ec778fd7ac4d740415d84030f5b9ef8fe4da54784a1f46c94", + "sha256:833547179c31f9bec39b49601d282d6f0ea1633620701288934c5f66d88c3e50", + "sha256:856b7f1a7b98a8c31823285786bd566cf06226ac4f38b3ef462f593c608a9bd6", + "sha256:8657d3f37f781d987037f9cc20bbc8b40425fa14380c87da0cb8dfce7c92d0fb", + "sha256:93bed8a8084544c6efe8856c362af08a23e959340c87a95687fdbe9c9f280c8b", + "sha256:954dde77c404084c2544e572f342aef384240b3e434e06cecc71597e95fd1ce7", + "sha256:98f68df80ec6ca3015186b2677c208c096d646ef37bbf8b49764ab4a38183931", + "sha256:99e12d2bf587b44deb74e0d6170fec37adb489964dbca656ec41a7cd8f2ff178", + "sha256:9a13a07532e8e1c4a5a3afff0ca4553da23409fad65def1b71186fb867eeae8d", + "sha256:9c1e3ff4b89cdd2e1a24c214f141e848b9e0451f08d7d4963cb4108d4d798f1f", + "sha256:9ce2e0f6123a60bd1a7f5ae3b2c49b240c12c132847f17aa990b841a417598a2", + "sha256:9fcda20b2de7042cc35cf911702fa3d8311bd40055a14446c1e62403684afdc5", + "sha256:a32d58f4b521bb98b2c0aa9da407f8bd57ca81f34362bcb090e4a79e9924fefc", + "sha256:a39c36f4218a5bb668b4f06874d676d35a035ee668e6e7e3538835c703634b84", + "sha256:a5cafb02cf097a82d74403f7e0b6b9df3ffbfe8edf9415ea816314711764a27b", + "sha256:a7cf963a357c5f00cb55b1955df8bbe68d2f2f65de065160a1c26b85a1e44172", + "sha256:a880372e2e5dbb9258a4e8ff43f13888039abb9dd6d515f28611c54361bc5644", + "sha256:ace4cad790f3bf872c082366c9edd7f8f8f77afe3992b134cfc810332206884f", + "sha256:af8ff8d7dc07ce873f643de6dfbcd45dc3db2c87462e5c387267197f59e6d776", + "sha256:b47a6000a7e833ebfe5886b56a31cb2ff12120b1efd4578a6fcc38df16cc77bd", + "sha256:b71862a652f50babab4a43a487f157d26b464b1dedbcc0afda02fd64f3809d04", + "sha256:b7f227ca6db5a9fda0a2b935a2ea34a7267589ffc63c8045f0e4edb8d8dcf956", + "sha256:bc8936d06cd53fddd4892677d65e98af514c8d78c79864f418bbf78a4a2edde4", + "sha256:bed1b5dbf90bad3bfc19439258c97873eab453c71d8b6869c136346acfe497e7", + "sha256:c45817e3e6972109d1a2c65091504a537e257bc3c885b4e78a95baa96df6a3f8", + "sha256:c68e820879ff39992c7f148113b46efcd6ec765a4865581f2902b3c43a5f4bbb", + "sha256:c77494a2f2282d9bbbbcab7c227a4d1b4bb829875c96251f66fb5f3bae4fb053", + "sha256:c998d0558805860503bc3a595994895ca0f7835e00668dadc673bbf7f5fbfcbe", + "sha256:ccad2800dfdff34392448c4bf834be124f10a5bc102f254521d931c1c53c455a", + "sha256:cd126498171f752dd85737ab1544329a4520c53eed3997f9b08aefbafb1cc53b", + "sha256:ce44217ad99ffad8027d2fde0269ae368c86db66ea0571c62a000798d69401fb", + "sha256:d1ac2bc069f4a458634c26b101c2341b18da85cb96afe0015990507efec2e417", + "sha256:d417a4f6943112fae3924bae2af7112562285848d9bcee737fc4ff7cbd450e6c", + "sha256:d538df442c0d9665664ab6dd5fccd0110fa3b364914f9c85b3ef9b7b2e157980", + "sha256:ded1b1803151dd0f20a8945508786d57c2f97a50289b16f2629f85433e546d47", + "sha256:e2e93b88ecc8f74074012e18d679fb2e9c746f2a56f79cd5e2b1afcf2a8a786b", + "sha256:e4ca3b9f370f218cc2a0309542cab8d0acdfd66667e7c37d04d617012485f904", + "sha256:e4ee8b8639070ff246ad3649294336b06db37a94bdea0d09ea491603e0be73b8", + "sha256:e52f77a0cd246086afde8815039f3e16f8d2be51786c0a39b57104c563c5cbb0", + "sha256:eaea112aed589131f73d50d570a6864728bd7c0c66ef6c9154ed7b59f24da611", + "sha256:ed20a4bdc635f36cb19e630bfc644181dd075839b6fc84cac51c0f381ac472e2", + "sha256:eedc3f247ee7b3808ea07205f3e7d7879bc19ad3e6222195cd5fbf9988853e4d", + "sha256:f0e1844ad47c7bd5d6fa784f1d4accc5f4168b48999303a868fe0f8597bde715", + "sha256:f4fe99ce44128c71233d0d72152db31ca119711dfc5f2c82385ad611d8d7f897", + "sha256:f8cfd847e6b9ecf9f2f2531c8427035f291ec286c0a4944b0a9fce58c6446046", + "sha256:f9ca0e6ce7774dc7830dc0cc4bb6b3eec769db667f230e7c770a628c1aa5681b", + "sha256:fa2bea05ff0a8fb4d8124498e00e02398f06d23cdadd0fe027d84a3f7afde31e", + "sha256:fbbb63bed5fcd70cd3dd23a087cd78e4675fb5a2963b8af53f945cbbca79ae16", + "sha256:fbda058a9a68bec347962595f50546a8a4a34fd7b0654a7b9697917dc2bf810d", + "sha256:ffd591e22b22f9cb48e472529db6a47203c41c2c5911ff0a52e85723196c0d75" ], "markers": "python_version >= '3.8'", - "version": "==1.15.0" + "version": "==1.15.2" }, "zope.event": { "hashes": [ @@ -2026,72 +2026,72 @@ }, "coverage": { "hashes": [ - "sha256:078a87519057dacb5d77e333f740708ec2a8f768655f1db07f8dfd28d7a005f0", - "sha256:087932079c065d7b8ebadd3a0160656c55954144af6439886c8bcf78bbbcde7f", - "sha256:0bbae11c138585c89fb4e991faefb174a80112e1a7557d507aaa07675c62e66b", - "sha256:0ff2ef83d6d0b527b5c9dad73819b24a2f76fdddcfd6c4e7a4d7e73ecb0656b4", - "sha256:12179eb0575b8900912711688e45474f04ab3934aaa7b624dea7b3c511ecc90f", - "sha256:1e5e92e3e84a8718d2de36cd8387459cba9a4508337b8c5f450ce42b87a9e760", - "sha256:2186369a654a15628e9c1c9921409a6b3eda833e4b91f3ca2a7d9f77abb4987c", - "sha256:21c0ea0d4db8a36b275cb6fb2437a3715697a4ba3cb7b918d3525cc75f726304", - "sha256:24500f4b0e03aab60ce575c85365beab64b44d4db837021e08339f61d1fbfe52", - "sha256:2b636a301e53964550e2f3094484fa5a96e699db318d65398cfba438c5c92171", - "sha256:343056c5e0737487a5291f5691f4dfeb25b3e3c8699b4d36b92bb0e586219d14", - "sha256:35a51598f29b2a19e26d0908bd196f771a9b1c5d9a07bf20be0adf28f1ad4f77", - "sha256:39d3b964abfe1519b9d313ab28abf1d02faea26cd14b27f5283849bf59479ff5", - "sha256:3ec528ae69f0a139690fad6deac8a7d33629fa61ccce693fdd07ddf7e9931fba", - "sha256:47ccb6e99a3031ffbbd6e7cc041e70770b4fe405370c66a54dbf26a500ded80b", - "sha256:4eea60c79d36a8f39475b1af887663bc3ae4f31289cd216f514ce18d5938df40", - "sha256:536f77f2bf5797983652d1d55f1a7272a29afcc89e3ae51caa99b2db4e89d658", - "sha256:5ed69befa9a9fc796fe015a7040c9398722d6b97df73a6b608e9e275fa0932b0", - "sha256:62ab4231c01e156ece1b3a187c87173f31cbeee83a5e1f6dff17f288dca93345", - "sha256:667952739daafe9616db19fbedbdb87917eee253ac4f31d70c7587f7ab531b4e", - "sha256:69f251804e052fc46d29d0e7348cdc5fcbfc4861dc4a1ebedef7e78d241ad39e", - "sha256:6c2ba1e0c24d8fae8f2cf0aeb2fc0a2a7f69b6d20bd8d3749fd6b36ecef5edf0", - "sha256:6e85830eed5b5263ffa0c62428e43cb844296f3b4461f09e4bdb0d44ec190bc2", - "sha256:7571e8bbecc6ac066256f9de40365ff833553e2e0c0c004f4482facb131820ef", - "sha256:7781f4f70c9b0b39e1b129b10c7d43a4e0c91f90c60435e6da8288efc2b73438", - "sha256:7926d8d034e06b479797c199747dd774d5e86179f2ce44294423327a88d66ca7", - "sha256:7b80fbb0da3aebde102a37ef0138aeedff45997e22f8962e5f16ae1742852676", - "sha256:7fca4a92c8a7a73dee6946471bce6d1443d94155694b893b79e19ca2a540d86e", - "sha256:84c4315577f7cd511d6250ffd0f695c825efe729f4205c0340f7004eda51191f", - "sha256:8d9c5d13927d77af4fbe453953810db766f75401e764727e73a6ee4f82527b3e", - "sha256:9681516288e3dcf0aa7c26231178cc0be6cac9705cac06709f2353c5b406cfea", - "sha256:97df87e1a20deb75ac7d920c812e9326096aa00a9a4b6d07679b4f1f14b06c90", - "sha256:9bcd51eeca35a80e76dc5794a9dd7cb04b97f0e8af620d54711793bfc1fbba4b", - "sha256:9c6b0c1cafd96213a0327cf680acb39f70e452caf8e9a25aeb05316db9c07f89", - "sha256:a5f81e68aa62bc0cfca04f7b19eaa8f9c826b53fc82ab9e2121976dc74f131f3", - "sha256:a663b180b6669c400b4630a24cc776f23a992d38ce7ae72ede2a397ce6b0f170", - "sha256:a7b2e437fbd8fae5bc7716b9c7ff97aecc95f0b4d56e4ca08b3c8d8adcaadb84", - "sha256:a867d26f06bcd047ef716175b2696b315cb7571ccb951006d61ca80bbc356e9e", - "sha256:aa68a6cdbe1bc6793a9dbfc38302c11599bbe1837392ae9b1d238b9ef3dafcf1", - "sha256:ab31fdd643f162c467cfe6a86e9cb5f1965b632e5e65c072d90854ff486d02cf", - "sha256:ad4ef1c56b47b6b9024b939d503ab487231df1f722065a48f4fc61832130b90e", - "sha256:b92f9ca04b3e719d69b02dc4a69debb795af84cb7afd09c5eb5d54b4a1ae2191", - "sha256:bb21bac7783c1bf6f4bbe68b1e0ff0d20e7e7732cfb7995bc8d96e23aa90fc7b", - "sha256:bf4eeecc9e10f5403ec06138978235af79c9a79af494eb6b1d60a50b49ed2869", - "sha256:bfde025e2793a22efe8c21f807d276bd1d6a4bcc5ba6f19dbdfc4e7a12160909", - "sha256:c37faddc8acd826cfc5e2392531aba734b229741d3daec7f4c777a8f0d4993e5", - "sha256:c71965d1ced48bf97aab79fad56df82c566b4c498ffc09c2094605727c4b7e36", - "sha256:c9192925acc33e146864b8cf037e2ed32a91fdf7644ae875f5d46cd2ef086a5f", - "sha256:c9df1950fb92d49970cce38100d7e7293c84ed3606eaa16ea0b6bc27175bb667", - "sha256:cdfcf2e914e2ba653101157458afd0ad92a16731eeba9a611b5cbb3e7124e74b", - "sha256:d03a060ac1a08e10589c27d509bbdb35b65f2d7f3f8d81cf2fa199877c7bc58a", - "sha256:d20c3d1f31f14d6962a4e2f549c21d31e670b90f777ef4171be540fb7fb70f02", - "sha256:e4ee15b267d2dad3e8759ca441ad450c334f3733304c55210c2a44516e8d5530", - "sha256:e8ea055b3ea046c0f66217af65bc193bbbeca1c8661dc5fd42698db5795d2627", - "sha256:ebabdf1c76593a09ee18c1a06cd3022919861365219ea3aca0247ededf6facd6", - "sha256:ebc94fadbd4a3f4215993326a6a00e47d79889391f5659bf310f55fe5d9f581c", - "sha256:ed5ac02126f74d190fa2cc14a9eb2a5d9837d5863920fa472b02eb1595cdc925", - "sha256:f01e53575f27097d75d42de33b1b289c74b16891ce576d767ad8c48d17aeb5e0", - "sha256:f361296ca7054f0936b02525646b2731b32c8074ba6defab524b79b2b7eeac72", - "sha256:f9035695dadfb397bee9eeaf1dc7fbeda483bf7664a7397a629846800ce6e276", - "sha256:fcad7d5d2bbfeae1026b395036a8aa5abf67e8038ae7e6a25c7d0f88b10a8e6a", - "sha256:ff797320dcbff57caa6b2301c3913784a010e13b1f6cf4ab3f563f3c5e7919db" + "sha256:04f2189716e85ec9192df307f7c255f90e78b6e9863a03223c3b998d24a3c6c6", + "sha256:0c6c0f4d53ef603397fc894a895b960ecd7d44c727df42a8d500031716d4e8d2", + "sha256:0ca37993206402c6c35dc717f90d4c8f53568a8b80f0bf1a1b2b334f4d488fba", + "sha256:12f9515d875859faedb4144fd38694a761cd2a61ef9603bf887b13956d0bbfbb", + "sha256:1990b1f4e2c402beb317840030bb9f1b6a363f86e14e21b4212e618acdfce7f6", + "sha256:2341a78ae3a5ed454d524206a3fcb3cec408c2a0c7c2752cd78b606a2ff15af4", + "sha256:23bb63ae3f4c645d2d82fa22697364b0046fbafb6261b258a58587441c5f7bd0", + "sha256:27bd5f18d8f2879e45724b0ce74f61811639a846ff0e5c0395b7818fae87aec6", + "sha256:2dc7d6b380ca76f5e817ac9eef0c3686e7834c8346bef30b041a4ad286449990", + "sha256:331b200ad03dbaa44151d74daeb7da2cf382db424ab923574f6ecca7d3b30de3", + "sha256:365defc257c687ce3e7d275f39738dcd230777424117a6c76043459db131dd43", + "sha256:37be7b5ea3ff5b7c4a9db16074dc94523b5f10dd1f3b362a827af66a55198175", + "sha256:3c2e6fa98032fec8282f6b27e3f3986c6e05702828380618776ad794e938f53a", + "sha256:40e8b1983080439d4802d80b951f4a93d991ef3261f69e81095a66f86cf3c3c6", + "sha256:43517e1f6b19f610a93d8227e47790722c8bf7422e46b365e0469fc3d3563d97", + "sha256:43b32a06c47539fe275106b376658638b418c7cfdfff0e0259fbf877e845f14b", + "sha256:43d6a66e33b1455b98fc7312b124296dad97a2e191c80320587234a77b1b736e", + "sha256:4c59d6a4a4633fad297f943c03d0d2569867bd5372eb5684befdff8df8522e39", + "sha256:52ac29cc72ee7e25ace7807249638f94c9b6a862c56b1df015d2b2e388e51dbd", + "sha256:54356a76b67cf8a3085818026bb556545ebb8353951923b88292556dfa9f812d", + "sha256:583049c63106c0555e3ae3931edab5669668bbef84c15861421b94e121878d3f", + "sha256:6d99198203f0b9cb0b5d1c0393859555bc26b548223a769baf7e321a627ed4fc", + "sha256:6da42bbcec130b188169107ecb6ee7bd7b4c849d24c9370a0c884cf728d8e976", + "sha256:6e484e479860e00da1f005cd19d1c5d4a813324e5951319ac3f3eefb497cc549", + "sha256:70a6756ce66cd6fe8486c775b30889f0dc4cb20c157aa8c35b45fd7868255c5c", + "sha256:70d24936ca6c15a3bbc91ee9c7fc661132c6f4c9d42a23b31b6686c05073bde5", + "sha256:71967c35828c9ff94e8c7d405469a1fb68257f686bca7c1ed85ed34e7c2529c4", + "sha256:79644f68a6ff23b251cae1c82b01a0b51bc40c8468ca9585c6c4b1aeee570e0b", + "sha256:87cd2e29067ea397a47e352efb13f976eb1b03e18c999270bb50589323294c6e", + "sha256:8d4c6ea0f498c7c79111033a290d060c517853a7bcb2f46516f591dab628ddd3", + "sha256:9134032f5aa445ae591c2ba6991d10136a1f533b1d2fa8f8c21126468c5025c6", + "sha256:921fbe13492caf6a69528f09d5d7c7d518c8d0e7b9f6701b7719715f29a71e6e", + "sha256:99670790f21a96665a35849990b1df447993880bb6463a0a1d757897f30da929", + "sha256:9975442f2e7a5cfcf87299c26b5a45266ab0696348420049b9b94b2ad3d40234", + "sha256:99ded130555c021d99729fabd4ddb91a6f4cc0707df4b1daf912c7850c373b13", + "sha256:a3328c3e64ea4ab12b85999eb0779e6139295bbf5485f69d42cf794309e3d007", + "sha256:a4fb91d5f72b7e06a14ff4ae5be625a81cd7e5f869d7a54578fc271d08d58ae3", + "sha256:aa23ce39661a3e90eea5f99ec59b763b7d655c2cada10729ed920a38bfc2b167", + "sha256:aac7501ae73d4a02f4b7ac8fcb9dc55342ca98ffb9ed9f2dfb8a25d53eda0e4d", + "sha256:ab84a8b698ad5a6c365b08061920138e7a7dd9a04b6feb09ba1bfae68346ce6d", + "sha256:b4adeb878a374126f1e5cf03b87f66279f479e01af0e9a654cf6d1509af46c40", + "sha256:b9853509b4bf57ba7b1f99b9d866c422c9c5248799ab20e652bbb8a184a38181", + "sha256:bb7d5fe92bd0dc235f63ebe9f8c6e0884f7360f88f3411bfed1350c872ef2054", + "sha256:bca4c8abc50d38f9773c1ec80d43f3768df2e8576807d1656016b9d3eeaa96fd", + "sha256:c222958f59b0ae091f4535851cbb24eb57fc0baea07ba675af718fb5302dddb2", + "sha256:c30e42ea11badb147f0d2e387115b15e2bd8205a5ad70d6ad79cf37f6ac08c91", + "sha256:c3a79f56dee9136084cf84a6c7c4341427ef36e05ae6415bf7d787c96ff5eaa3", + "sha256:c51ef82302386d686feea1c44dbeef744585da16fcf97deea2a8d6c1556f519b", + "sha256:c77326300b839c44c3e5a8fe26c15b7e87b2f32dfd2fc9fee1d13604347c9b38", + "sha256:d33a785ea8354c480515e781554d3be582a86297e41ccbea627a5c632647f2cd", + "sha256:d546cfa78844b8b9c1c0533de1851569a13f87449897bbc95d698d1d3cb2a30f", + "sha256:da29ceabe3025a1e5a5aeeb331c5b1af686daab4ff0fb4f83df18b1180ea83e2", + "sha256:df8c05a0f574d480947cba11b947dc41b1265d721c3777881da2fb8d3a1ddfba", + "sha256:e266af4da2c1a4cbc6135a570c64577fd3e6eb204607eaff99d8e9b710003c6f", + "sha256:e279f3db904e3b55f520f11f983cc8dc8a4ce9b65f11692d4718ed021ec58b83", + "sha256:ea52bd218d4ba260399a8ae4bb6b577d82adfc4518b93566ce1fddd4a49d1dce", + "sha256:ebec65f5068e7df2d49466aab9128510c4867e532e07cb6960075b27658dca38", + "sha256:ec1e3b40b82236d100d259854840555469fad4db64f669ab817279eb95cd535c", + "sha256:ee77c7bef0724165e795b6b7bf9c4c22a9b8468a6bdb9c6b4281293c6b22a90f", + "sha256:f263b18692f8ed52c8de7f40a0751e79015983dbd77b16906e5b310a39d3ca21", + "sha256:f7b26757b22faf88fcf232f5f0e62f6e0fd9e22a8a5d0d5016888cdfe1f6c1c4", + "sha256:f7ddb920106bbbbcaf2a274d56f46956bf56ecbde210d88061824a95bdd94e92" ], "index": "pypi", "markers": "python_version >= '3.9'", - "version": "==7.6.2" + "version": "==7.6.3" }, "docker": { "hashes": [ diff --git a/dbrepo-analyse-service/lib/dbrepo-1.4.7.tar.gz b/dbrepo-analyse-service/lib/dbrepo-1.4.7.tar.gz index 936ec4dac16fe8f065ac7d37a09aedca421086a6..c652dbae4a60aae708eb6fe4e751977b97b72344 100644 Binary files a/dbrepo-analyse-service/lib/dbrepo-1.4.7.tar.gz and b/dbrepo-analyse-service/lib/dbrepo-1.4.7.tar.gz differ diff --git a/dbrepo-data-service/rest-service/src/test/java/at/tuwien/service/TableServiceIntegrationTest.java b/dbrepo-data-service/rest-service/src/test/java/at/tuwien/service/TableServiceIntegrationTest.java index d18a3f3e4275124d8428d99d9cee37a467828b69..7413b20462e7ba1968e9df86e4a83514f3835605 100644 --- a/dbrepo-data-service/rest-service/src/test/java/at/tuwien/service/TableServiceIntegrationTest.java +++ b/dbrepo-data-service/rest-service/src/test/java/at/tuwien/service/TableServiceIntegrationTest.java @@ -22,10 +22,7 @@ import at.tuwien.gateway.DataDatabaseSidecarGateway; import at.tuwien.gateway.MetadataServiceGateway; import at.tuwien.test.AbstractUnitTest; import lombok.extern.log4j.Log4j2; -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.*; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; @@ -507,6 +504,7 @@ public class TableServiceIntegrationTest extends AbstractUnitTest { } @Test + @Disabled("Not stable CI/CD") public void getStatistics_succeeds() throws TableMalformedException, SQLException, TableNotFoundException { /* test */ diff --git a/dbrepo-search-service/.coveragerc b/dbrepo-search-service/.coveragerc new file mode 100644 index 0000000000000000000000000000000000000000..4683a93d3748d16ab20a61f318e3016d3f4a8e09 --- /dev/null +++ b/dbrepo-search-service/.coveragerc @@ -0,0 +1,5 @@ +[report] +omit = + */test/* + */omlib/* + */init/* \ No newline at end of file diff --git a/dbrepo-search-service/.gitignore b/dbrepo-search-service/.gitignore index 12a8c1aa3630c30664b9b59f3cdff7b765275f04..78c8fdf6e56a9e006504b1f2345abdacbf6e7bb4 100644 --- a/dbrepo-search-service/.gitignore +++ b/dbrepo-search-service/.gitignore @@ -9,6 +9,8 @@ __pycache__/ # Generated coverage.txt report.xml +clients/ +omlib/ # Libraries ./lib/dbrepo-1.4.4* diff --git a/dbrepo-search-service/Dockerfile b/dbrepo-search-service/Dockerfile index 35427f81a429f8a61bd724f7bb8141b734d5013f..9586d0be307c57572de9ece9667903fa92a8a7a8 100644 --- a/dbrepo-search-service/Dockerfile +++ b/dbrepo-search-service/Dockerfile @@ -17,8 +17,8 @@ USER 1001 WORKDIR /app -COPY --chown=1001 ./clients ./clients -COPY --chown=1001 ./omlib ./omlib +COPY --chown=1001 ./init/clients ./clients +COPY --chown=1001 ./init/omlib ./omlib COPY --chown=1001 ./os-yml ./os-yml COPY --chown=1001 ./app.py ./app.py COPY --chown=1001 ./friendly_names_overrides.json ./friendly_names_overrides.json diff --git a/dbrepo-search-service/Pipfile b/dbrepo-search-service/Pipfile index 298815c4b21885d64092b9f736fe4fb9516c8b1b..e74391ce665a39201bd480819e31a712fe82a6c7 100644 --- a/dbrepo-search-service/Pipfile +++ b/dbrepo-search-service/Pipfile @@ -24,6 +24,7 @@ gunicorn = "*" [dev-packages] coverage = "*" pytest = "*" +requests-mock = "*" [requires] python_version = "3.11" diff --git a/dbrepo-search-service/Pipfile.lock b/dbrepo-search-service/Pipfile.lock index 0a4189531723dec8e470e010bfd71551ad0ca5ba..123e864f6dd050a838ae2c7e9137a4d7b3f1e27f 100644 --- a/dbrepo-search-service/Pipfile.lock +++ b/dbrepo-search-service/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "e82f3e09ac14fafd1be12c5b57dfcbfafecfd7c8ac2d098ce101264da2d22d42" + "sha256": "491e5f6ada48e8af417dfa7d6a0b4d98ccf9b9072df53b44d8de014b687fc80c" }, "pipfile-spec": 6, "requires": { @@ -388,9 +388,8 @@ }, "dbrepo": { "hashes": [ - "sha256:654d487f1c0fd99b4978f5756aec4046f3e6019aeb225ecdd449768795f6e7e0" + "sha256:84607677b0826bb9b2fa120aacdf56d16c8d9ae423f435b2bd2c22b1c965a33c" ], - "markers": "python_version >= '3.11'", "path": "./lib/dbrepo-1.4.7.tar.gz" }, "docker": { @@ -616,7 +615,7 @@ "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79", "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f" ], - "markers": "python_version < '3.13' and platform_machine == 'aarch64' or (platform_machine == 'ppc64le' or (platform_machine == 'x86_64' or (platform_machine == 'amd64' or (platform_machine == 'AMD64' or (platform_machine == 'win32' or platform_machine == 'WIN32')))))", + "markers": "python_version < '3.13' and (platform_machine == 'aarch64' or (platform_machine == 'ppc64le' or (platform_machine == 'x86_64' or (platform_machine == 'amd64' or (platform_machine == 'AMD64' or (platform_machine == 'win32' or platform_machine == 'WIN32'))))))", "version": "==3.1.1" }, "gunicorn": { @@ -1245,11 +1244,11 @@ }, "pyparsing": { "hashes": [ - "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c", - "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032" + "sha256:93d9577b88da0bbea8cc8334ee8b918ed014968fd2ec383e868fb8afb1ccef84", + "sha256:cbf74e27246d595d9a74b186b810f6fbb86726dbf3b9532efb343f6d7294fe9c" ], - "markers": "python_full_version >= '3.6.8'", - "version": "==3.1.4" + "markers": "python_version >= '3.9'", + "version": "==3.2.0" }, "pytest": { "hashes": [ @@ -1265,7 +1264,7 @@ "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", "version": "==2.9.0.post0" }, "python-dotenv": { @@ -1482,7 +1481,7 @@ "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", "version": "==1.16.0" }, "sqlalchemy": { @@ -1566,11 +1565,11 @@ }, "tinydb": { "hashes": [ - "sha256:09c4c6a239da9be676b948f1f28074cffd1cf08e7af920c1df50424cc8bee8d6", - "sha256:1c7c507ef520c789f94f1f5786f0722a98a59a85031a2e81e2accc701721f07f" + "sha256:f7dfc39b8d7fda7a1ca62a8dbb449ffd340a117c1206b68c50b1a481fb95181d", + "sha256:f97030ee5cbc91eeadd1d7af07ab0e48ceb04aa63d4a983adbaca4cba16e86c3" ], "markers": "python_version >= '3.8' and python_version < '4.0'", - "version": "==4.8.1" + "version": "==4.8.2" }, "tuspy": { "hashes": [ @@ -1690,178 +1689,305 @@ }, "yarl": { "hashes": [ - "sha256:0127bc2ea72c1eaae6808ace661f0edf222f32ffa987d37f2dbb4798288f2656", - "sha256:0358b697abdf1f2d68038bd02ef8ddcc4813835744f79c755f8743aa485585e7", - "sha256:06306c74f0775621a70fa5acd292119bbb6961d1f9a5f3d657a4c8c15b86f7b9", - "sha256:06b5b462cadf59c1df28ffbb0a3971fa16b60cf0c9d59a38bf5679a986d18685", - "sha256:097094a979af7b31520517c59179f6817b8426724343cecbec0eb3af1f8fb6cf", - "sha256:0c791a2d42da20ac568e5c0cc9b8af313188becd203a936ad959b578dafbcebb", - "sha256:1656a8b531a96427f26f498b7d0f19931166ff30e4344eca99bdb27faca14fc5", - "sha256:18614630533ac37ec373bd8035aec8fa4dd9aedac641209c06de7e082622ff77", - "sha256:1e5fa4c4e55cdacef1844f609bc9a02c8cd29c324a71ca1d3ee454701d4bb496", - "sha256:1edaf4171fc1582352ac5d9b2783966fa0f4ff86187279ef2a491613d23b894a", - "sha256:2124c642b8cc9b68e5981e429842dadc32bb850b010cccec9d24236253a19f60", - "sha256:229f222bb47cd7ab225648efd1ae47fe6943f18e4c91bce66471faf09fe33128", - "sha256:2429a651a2191c3fb8c9de21546c6046da539034d51dcb8df52302748004593d", - "sha256:25a4e29ee758596b2a0daffa4814714e9b464077ca862baf78ed0e8698e46b61", - "sha256:27c323b28723faed046f906c70466144c4dd12046a0128a301b29a65cfeff758", - "sha256:2add8ed2acf42398dfaa7dffd32e4d18ffbae341d62c8d4765bd9929336379b5", - "sha256:2bece7fdc13e23db005879b67190db0d397f6ba89c81dc7e3c77e9f5819aff7f", - "sha256:2eafb4e92f72a3b6c27f1d5e921d046e2728850af8887f86857c3fe868a5b5c0", - "sha256:32840ff92c713053591ff0e66845d4e9f4bea8fd5fba3da00f8d92e77722f24e", - "sha256:33896afca6fb4e1988c099534c52823870dfc8730bc6f96a3831f24c1e0ab814", - "sha256:350b468a217d433cbb4482e9414a14dfd360a3d5ab92013175925abb234364cc", - "sha256:38cab8f91b1085f1fd0765d40c46c8f43282f109018d5fcd017c46ac3eaba0cf", - "sha256:3e24a778470f3a9e9c11250d09daf5dea93369bc51aefca6605dbc963737a117", - "sha256:4224bbbc8a2e9b9a3828d36c1bab7458441d7fb9fb3af321eb735732ba8ee89d", - "sha256:4424082edff76fe46ff08851e91865097c0ad780fa79b87063dc5d5b80efc9d6", - "sha256:454707fb16f180984da6338d1f51897f0b8d8c4c2e0592d9d1e9fa02a5bb8218", - "sha256:4b1ab96a1ac91bd1233706d638ade35f663684deaa4e5e5f190858cba044afb9", - "sha256:4c5ff3e7609c214667c7d7e00d5f4f3576fefde47ebcb7e492c015117dafebbf", - "sha256:5107d89c9edec6ee077970a95fb9eeb4776ea8c2337b6a39c0ade9a58f50f3e4", - "sha256:5156c12a97405339ec93facbc7860566db381af2de1bec338195563fb64f37ef", - "sha256:553a1e3537aeeb29c0eb29ef28b80e0e801697fa71d96ac60675b284ff8e582a", - "sha256:5e1cc7823f43781390965c4762b54262cfcf76b6f152e489d00a5a1ac63063e4", - "sha256:5eef9804e65eb292e9c5587e88fe6a27a11f121d358312ac47211e8f42876751", - "sha256:6237637b496bc04819190e724a4e61ff2f251abf432f70cf491b3bc4a3f2f253", - "sha256:627bb5bc4ed3d3ebceb9fb55717cec6cd58bb47fdb5669169ebbc248e9bf156c", - "sha256:676d7356bb30825b7dbdad4fdd7a9feac379d074e5d4a36299767d72857ded42", - "sha256:6960b0d2e713e726cb2914e3051e080b12412f70dcb8731cf7a8fb52c37931bb", - "sha256:6b93a666cd8cfd43f605d1b81a32b9e290bf45c74c2bfd51ba705449c78448c7", - "sha256:6ca160b4c649f0d56daef04751eef4571de46ed4b80f9051a87d090fef32f08e", - "sha256:70ac7893e67a81ed1346ee3e71203ca4b0c3550c005b1d1cf87bc1e61eecd04b", - "sha256:73c4af08e9bb9a9aa7df6c789b05b924b9a0c6a368bb0e418d0b85181b64b631", - "sha256:748dcacc19c69957f7063ea4fb359fa2180735b1a638c81a4a96b86a382a6f29", - "sha256:75d9762f65205a86381298eb9079f27c60b84de0c262e402dcf45c6cbc385234", - "sha256:7711d83dafe52cda16ff2dd205cd83c05e4c06d5aaac596ae2cf7d50d094a530", - "sha256:7aa9f9af452c3e8486a0b88fddd58352e6cea17b691b18861d26e46cf65ffff0", - "sha256:7f713d8f3c4e2eac0d91b741e8ef2e1082022de244685601ec83e899b445d86a", - "sha256:81edbd9bf9f25cd995e6d51c307e1d279587d40b7473e258fef6d5e548560cd2", - "sha256:83363a5789f128618041b9a737c7b146f1965abddf4294b0444591406b437c1e", - "sha256:85e273e59b8b1a5f60a89df82cddeaf918181abd7ae7a2f2f899b68b0c774ff1", - "sha256:8ad2e487824ba4cda87851a371139e255410e45d3bf2e334194789278d709cec", - "sha256:8b7f902f13a230686f01bcff17cd9ba045653069811c8fd5027f0f414b417e2f", - "sha256:8f074a24aa9a6a3d406474ec889ebb5d661f329349068e05e8dfcb3c4be67752", - "sha256:9084d99933824ed8d665f10f4ce62d08fed714e7678d5ff11a8c2c98b2dc18f9", - "sha256:928f7a61c4311f3dd003af19bb779f99683f97a0559b765c80fdb8846dab0452", - "sha256:97fcaf530318369da3cfd6ff52f5ab38daf8cb10ecee9a76efebf8031de09eef", - "sha256:994d27b24b61b1870f3571395c840433faabec5dcd239bd11ff6af7e34234bb6", - "sha256:9ae454916aa3abe28d0ef1c21ca1e8e36a14ccf52183d465dfaccffaa7ed462c", - "sha256:9fac5416c44e8e1d8ea9440096f88e1a7273257f3157184c5c715060e0c448a1", - "sha256:a2fe45c1143eefb680a4589c55e671fabd482a7f8c7791f311ea3bcc20139246", - "sha256:a3f8be3e785009ffa148e66474fea5c787ccb203b3d0bd1f22e1e22f7da0f3b3", - "sha256:a616c2e4b60cb8cdd9eb3b0c6fda4ab5f3e26244b427aaade560dcf63c5754fb", - "sha256:a94c9058c5703c172904103d7b479f7e23dd4e5f8e67b49f6cd256d35ff169cb", - "sha256:b1208f2e081d34832f509cbe311237a0543effe23d60b2fa14c0d3f86e6d1d07", - "sha256:b4b25de7e85ba90b2ff230153123b6b000a7f69c41d84a3a0dc3f878334c8509", - "sha256:bbe72c41cdd55c88b238a8925849fde4069c0cdcdef83f8d967f8f3982659326", - "sha256:c0a86dd3e85c6aa3fc73236eb5cf7ce69dd8ad7abcd23f8ae1126831c8e40c2f", - "sha256:c3b08d9e98d1a15338fcfbd52c02003704322c2d460c9b9be7df08f2952bdce6", - "sha256:c4d9c221cc8e32b14196498679bf2b324bec1d1127c4ba934d98e19298faa661", - "sha256:c4f882e42c6cea89488b9a16919edde8c0b1a98f307c05abdd3dd3bc4368af40", - "sha256:c5cc25cbd9ae01d49ac7b504ef5f3cbdcc8d139f9750dcfa0b80d405b4645cc2", - "sha256:c7f2deac59dc3e0528bdded248e637e789e5111ba1723a8d7a262eb93e133e15", - "sha256:c8b034b60e74fb29064f765851e77e5910055e1c4a3cb75c32eccf2b470fc00f", - "sha256:c9b9159eeeb7cd1c7131dc7f5878454f97a4dc20cd157e6474174ccac448b844", - "sha256:c9c405ca78c70c3599d8956e53be0c9def9c51ad949964a49ad96c79729a5b1a", - "sha256:ceb200918c9bd163bd390cc169b254b23b4be121026b003be93a4f2f5b554b4b", - "sha256:d06040266b5e6512a37b4703684d1798124764b43328254799e9678c588882a6", - "sha256:d3f5e201bd170fb97c643e84df58e221372cd053fbb291ebbd878b165ea5057e", - "sha256:d4aa7cca009817789fd5b8e52e8122f9e85dc580c88b816a93321c00a8acbced", - "sha256:d772ae3c12d3b8629db656050c86ee66924eaa98f7125a889175a59cfaafdb19", - "sha256:d816969b55a970b3accc7f9e4ea8f60043e3f7de96f21c06063d747ffc2f18ba", - "sha256:d885dcdca7bae42bc9a2f6cbf766abcb2a6cc043b1905fc3782c6ea1f74a2b95", - "sha256:db903458a457a53ee0f764ed11c5b5368398e216b442c42dca9d90fbd2bbf31c", - "sha256:dc63bb79e896d6ce6aaf672ac304b54969280e949c45727867fc154a17ec7ab2", - "sha256:dd042e6c3bf36448e3e3ed302b12ce79762480f4aff8e7a167cdf8c35dc93297", - "sha256:ddea4abc4606c10dddb70651b210b7ab5b663148d6d7bc85d76963c923629891", - "sha256:df57f3c3ef760489f2e82192e6c93286c2bc80d6d854ef940e5345ae7153cd4b", - "sha256:e1ddf05eeb422810b1aa919095db0691493442eebbf9cfb0f1e478a7b2fbdf3d", - "sha256:e2e3cb74684ff357e6b3c82dd71031d3c1fd7ee9f9b0a5205e5568c963e074f9", - "sha256:e4f64c8c52dde564bf3251b41d7a6746564b0fc0516cebe9c9e6695224440d22", - "sha256:e4f7efb38331e8327c1cc7fb2a2905a7db03d1a7fdb04706bf6465d0e44d41d4", - "sha256:e61b2019ebb5345510b833c4dd1f4afb1f0c07753f86f184c63836ffc3fb08ba", - "sha256:e7e38bf6e52797084c5c396db5bb519615727e491e9003e2449631457bf77738", - "sha256:eae041f535fe2e57681954f7cccb81854d777ce4c2a87749428ebe6c71c02ec0", - "sha256:eb964d18c01b7a1263a6f07b88d63711fcd564fc429d934279cf12f4b467bf53", - "sha256:ef780f9d480ffb423380abeb4cfcad66ecb8f93526dfa367d322fdad9ec7c25f", - "sha256:efc0430b80ed834c80c99c32946cfc6ee29dfcd7c62ad3c8f15657322ade7942", - "sha256:f2508ee2bad8381b5254eadc35d32fe800d12eb2c63b744183341f3a66e435a7", - "sha256:fee9acd5e39c8611957074dfba06552e430020eea831caf5eb2cea30f10e06bd" + "sha256:0545de8c688fbbf3088f9e8b801157923be4bf8e7b03e97c2ecd4dfa39e48e0e", + "sha256:076b1ed2ac819933895b1a000904f62d615fe4533a5cf3e052ff9a1da560575c", + "sha256:0afad2cd484908f472c8fe2e8ef499facee54a0a6978be0e0cff67b1254fd747", + "sha256:0ccaa1bc98751fbfcf53dc8dfdb90d96e98838010fc254180dd6707a6e8bb179", + "sha256:0d3105efab7c5c091609abacad33afff33bdff0035bece164c98bcf5a85ef90a", + "sha256:0e1af74a9529a1137c67c887ed9cde62cff53aa4d84a3adbec329f9ec47a3936", + "sha256:136f9db0f53c0206db38b8cd0c985c78ded5fd596c9a86ce5c0b92afb91c3a19", + "sha256:156ececdf636143f508770bf8a3a0498de64da5abd890c7dbb42ca9e3b6c05b8", + "sha256:15c87339490100c63472a76d87fe7097a0835c705eb5ae79fd96e343473629ed", + "sha256:1695497bb2a02a6de60064c9f077a4ae9c25c73624e0d43e3aa9d16d983073c2", + "sha256:173563f3696124372831007e3d4b9821746964a95968628f7075d9231ac6bb33", + "sha256:173866d9f7409c0fb514cf6e78952e65816600cb888c68b37b41147349fe0057", + "sha256:23ec1d3c31882b2a8a69c801ef58ebf7bae2553211ebbddf04235be275a38548", + "sha256:243fbbbf003754fe41b5bdf10ce1e7f80bcc70732b5b54222c124d6b4c2ab31c", + "sha256:28c6cf1d92edf936ceedc7afa61b07e9d78a27b15244aa46bbcd534c7458ee1b", + "sha256:2aa738e0282be54eede1e3f36b81f1e46aee7ec7602aa563e81e0e8d7b67963f", + "sha256:2cf441c4b6e538ba0d2591574f95d3fdd33f1efafa864faa077d9636ecc0c4e9", + "sha256:30c3ff305f6e06650a761c4393666f77384f1cc6c5c0251965d6bfa5fbc88f7f", + "sha256:31561a5b4d8dbef1559b3600b045607cf804bae040f64b5f5bca77da38084a8a", + "sha256:32b66be100ac5739065496c74c4b7f3015cef792c3174982809274d7e51b3e04", + "sha256:3433da95b51a75692dcf6cc8117a31410447c75a9a8187888f02ad45c0a86c50", + "sha256:34a2d76a1984cac04ff8b1bfc939ec9dc0914821264d4a9c8fd0ed6aa8d4cfd2", + "sha256:353665775be69bbfc6d54c8d134bfc533e332149faeddd631b0bc79df0897f46", + "sha256:38d0124fa992dbacd0c48b1b755d3ee0a9f924f427f95b0ef376556a24debf01", + "sha256:3c56ec1eacd0a5d35b8a29f468659c47f4fe61b2cab948ca756c39b7617f0aa5", + "sha256:3db817b4e95eb05c362e3b45dafe7144b18603e1211f4a5b36eb9522ecc62bcf", + "sha256:3e52474256a7db9dcf3c5f4ca0b300fdea6c21cca0148c8891d03a025649d935", + "sha256:416f2e3beaeae81e2f7a45dc711258be5bdc79c940a9a270b266c0bec038fb84", + "sha256:435aca062444a7f0c884861d2e3ea79883bd1cd19d0a381928b69ae1b85bc51d", + "sha256:4388c72174868884f76affcdd3656544c426407e0043c89b684d22fb265e04a5", + "sha256:43ebdcc120e2ca679dba01a779333a8ea76b50547b55e812b8b92818d604662c", + "sha256:458c0c65802d816a6b955cf3603186de79e8fdb46d4f19abaec4ef0a906f50a7", + "sha256:533a28754e7f7439f217550a497bb026c54072dbe16402b183fdbca2431935a9", + "sha256:553dad9af802a9ad1a6525e7528152a015b85fb8dbf764ebfc755c695f488367", + "sha256:5838f2b79dc8f96fdc44077c9e4e2e33d7089b10788464609df788eb97d03aad", + "sha256:5b48388ded01f6f2429a8c55012bdbd1c2a0c3735b3e73e221649e524c34a58d", + "sha256:5bc0df728e4def5e15a754521e8882ba5a5121bd6b5a3a0ff7efda5d6558ab3d", + "sha256:63eab904f8630aed5a68f2d0aeab565dcfc595dc1bf0b91b71d9ddd43dea3aea", + "sha256:66f629632220a4e7858b58e4857927dd01a850a4cef2fb4044c8662787165cf7", + "sha256:670eb11325ed3a6209339974b276811867defe52f4188fe18dc49855774fa9cf", + "sha256:69d5856d526802cbda768d3e6246cd0d77450fa2a4bc2ea0ea14f0d972c2894b", + "sha256:6e840553c9c494a35e449a987ca2c4f8372668ee954a03a9a9685075228e5036", + "sha256:711bdfae4e699a6d4f371137cbe9e740dc958530cb920eb6f43ff9551e17cfbc", + "sha256:74abb8709ea54cc483c4fb57fb17bb66f8e0f04438cff6ded322074dbd17c7ec", + "sha256:75119badf45f7183e10e348edff5a76a94dc19ba9287d94001ff05e81475967b", + "sha256:766dcc00b943c089349d4060b935c76281f6be225e39994c2ccec3a2a36ad627", + "sha256:78e6fdc976ec966b99e4daa3812fac0274cc28cd2b24b0d92462e2e5ef90d368", + "sha256:81dadafb3aa124f86dc267a2168f71bbd2bfb163663661ab0038f6e4b8edb810", + "sha256:82d5161e8cb8f36ec778fd7ac4d740415d84030f5b9ef8fe4da54784a1f46c94", + "sha256:833547179c31f9bec39b49601d282d6f0ea1633620701288934c5f66d88c3e50", + "sha256:856b7f1a7b98a8c31823285786bd566cf06226ac4f38b3ef462f593c608a9bd6", + "sha256:8657d3f37f781d987037f9cc20bbc8b40425fa14380c87da0cb8dfce7c92d0fb", + "sha256:93bed8a8084544c6efe8856c362af08a23e959340c87a95687fdbe9c9f280c8b", + "sha256:954dde77c404084c2544e572f342aef384240b3e434e06cecc71597e95fd1ce7", + "sha256:98f68df80ec6ca3015186b2677c208c096d646ef37bbf8b49764ab4a38183931", + "sha256:99e12d2bf587b44deb74e0d6170fec37adb489964dbca656ec41a7cd8f2ff178", + "sha256:9a13a07532e8e1c4a5a3afff0ca4553da23409fad65def1b71186fb867eeae8d", + "sha256:9c1e3ff4b89cdd2e1a24c214f141e848b9e0451f08d7d4963cb4108d4d798f1f", + "sha256:9ce2e0f6123a60bd1a7f5ae3b2c49b240c12c132847f17aa990b841a417598a2", + "sha256:9fcda20b2de7042cc35cf911702fa3d8311bd40055a14446c1e62403684afdc5", + "sha256:a32d58f4b521bb98b2c0aa9da407f8bd57ca81f34362bcb090e4a79e9924fefc", + "sha256:a39c36f4218a5bb668b4f06874d676d35a035ee668e6e7e3538835c703634b84", + "sha256:a5cafb02cf097a82d74403f7e0b6b9df3ffbfe8edf9415ea816314711764a27b", + "sha256:a7cf963a357c5f00cb55b1955df8bbe68d2f2f65de065160a1c26b85a1e44172", + "sha256:a880372e2e5dbb9258a4e8ff43f13888039abb9dd6d515f28611c54361bc5644", + "sha256:ace4cad790f3bf872c082366c9edd7f8f8f77afe3992b134cfc810332206884f", + "sha256:af8ff8d7dc07ce873f643de6dfbcd45dc3db2c87462e5c387267197f59e6d776", + "sha256:b47a6000a7e833ebfe5886b56a31cb2ff12120b1efd4578a6fcc38df16cc77bd", + "sha256:b71862a652f50babab4a43a487f157d26b464b1dedbcc0afda02fd64f3809d04", + "sha256:b7f227ca6db5a9fda0a2b935a2ea34a7267589ffc63c8045f0e4edb8d8dcf956", + "sha256:bc8936d06cd53fddd4892677d65e98af514c8d78c79864f418bbf78a4a2edde4", + "sha256:bed1b5dbf90bad3bfc19439258c97873eab453c71d8b6869c136346acfe497e7", + "sha256:c45817e3e6972109d1a2c65091504a537e257bc3c885b4e78a95baa96df6a3f8", + "sha256:c68e820879ff39992c7f148113b46efcd6ec765a4865581f2902b3c43a5f4bbb", + "sha256:c77494a2f2282d9bbbbcab7c227a4d1b4bb829875c96251f66fb5f3bae4fb053", + "sha256:c998d0558805860503bc3a595994895ca0f7835e00668dadc673bbf7f5fbfcbe", + "sha256:ccad2800dfdff34392448c4bf834be124f10a5bc102f254521d931c1c53c455a", + "sha256:cd126498171f752dd85737ab1544329a4520c53eed3997f9b08aefbafb1cc53b", + "sha256:ce44217ad99ffad8027d2fde0269ae368c86db66ea0571c62a000798d69401fb", + "sha256:d1ac2bc069f4a458634c26b101c2341b18da85cb96afe0015990507efec2e417", + "sha256:d417a4f6943112fae3924bae2af7112562285848d9bcee737fc4ff7cbd450e6c", + "sha256:d538df442c0d9665664ab6dd5fccd0110fa3b364914f9c85b3ef9b7b2e157980", + "sha256:ded1b1803151dd0f20a8945508786d57c2f97a50289b16f2629f85433e546d47", + "sha256:e2e93b88ecc8f74074012e18d679fb2e9c746f2a56f79cd5e2b1afcf2a8a786b", + "sha256:e4ca3b9f370f218cc2a0309542cab8d0acdfd66667e7c37d04d617012485f904", + "sha256:e4ee8b8639070ff246ad3649294336b06db37a94bdea0d09ea491603e0be73b8", + "sha256:e52f77a0cd246086afde8815039f3e16f8d2be51786c0a39b57104c563c5cbb0", + "sha256:eaea112aed589131f73d50d570a6864728bd7c0c66ef6c9154ed7b59f24da611", + "sha256:ed20a4bdc635f36cb19e630bfc644181dd075839b6fc84cac51c0f381ac472e2", + "sha256:eedc3f247ee7b3808ea07205f3e7d7879bc19ad3e6222195cd5fbf9988853e4d", + "sha256:f0e1844ad47c7bd5d6fa784f1d4accc5f4168b48999303a868fe0f8597bde715", + "sha256:f4fe99ce44128c71233d0d72152db31ca119711dfc5f2c82385ad611d8d7f897", + "sha256:f8cfd847e6b9ecf9f2f2531c8427035f291ec286c0a4944b0a9fce58c6446046", + "sha256:f9ca0e6ce7774dc7830dc0cc4bb6b3eec769db667f230e7c770a628c1aa5681b", + "sha256:fa2bea05ff0a8fb4d8124498e00e02398f06d23cdadd0fe027d84a3f7afde31e", + "sha256:fbbb63bed5fcd70cd3dd23a087cd78e4675fb5a2963b8af53f945cbbca79ae16", + "sha256:fbda058a9a68bec347962595f50546a8a4a34fd7b0654a7b9697917dc2bf810d", + "sha256:ffd591e22b22f9cb48e472529db6a47203c41c2c5911ff0a52e85723196c0d75" ], "markers": "python_version >= '3.8'", - "version": "==1.15.0" + "version": "==1.15.2" } }, "develop": { + "certifi": { + "hashes": [ + "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", + "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9" + ], + "markers": "python_version >= '3.6'", + "version": "==2024.8.30" + }, + "charset-normalizer": { + "hashes": [ + "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621", + "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6", + "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8", + "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912", + "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c", + "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b", + "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d", + "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d", + "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95", + "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e", + "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565", + "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64", + "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab", + "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be", + "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e", + "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907", + "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0", + "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2", + "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62", + "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62", + "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23", + "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc", + "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284", + "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca", + "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455", + "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858", + "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b", + "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594", + "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc", + "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db", + "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b", + "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea", + "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6", + "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920", + "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749", + "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7", + "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd", + "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99", + "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242", + "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee", + "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129", + "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2", + "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51", + "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee", + "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8", + "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b", + "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613", + "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742", + "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe", + "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3", + "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5", + "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631", + "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7", + "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15", + "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c", + "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea", + "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417", + "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250", + "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88", + "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca", + "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa", + "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99", + "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149", + "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41", + "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574", + "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0", + "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f", + "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d", + "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654", + "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3", + "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19", + "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90", + "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578", + "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9", + "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1", + "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51", + "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719", + "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236", + "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a", + "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c", + "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade", + "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944", + "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc", + "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6", + "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6", + "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27", + "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6", + "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2", + "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12", + "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf", + "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114", + "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7", + "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf", + "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d", + "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b", + "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed", + "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03", + "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4", + "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67", + "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365", + "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a", + "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748", + "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b", + "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079", + "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482" + ], + "markers": "python_full_version >= '3.7.0'", + "version": "==3.4.0" + }, "coverage": { "hashes": [ - "sha256:078a87519057dacb5d77e333f740708ec2a8f768655f1db07f8dfd28d7a005f0", - "sha256:087932079c065d7b8ebadd3a0160656c55954144af6439886c8bcf78bbbcde7f", - "sha256:0bbae11c138585c89fb4e991faefb174a80112e1a7557d507aaa07675c62e66b", - "sha256:0ff2ef83d6d0b527b5c9dad73819b24a2f76fdddcfd6c4e7a4d7e73ecb0656b4", - "sha256:12179eb0575b8900912711688e45474f04ab3934aaa7b624dea7b3c511ecc90f", - "sha256:1e5e92e3e84a8718d2de36cd8387459cba9a4508337b8c5f450ce42b87a9e760", - "sha256:2186369a654a15628e9c1c9921409a6b3eda833e4b91f3ca2a7d9f77abb4987c", - "sha256:21c0ea0d4db8a36b275cb6fb2437a3715697a4ba3cb7b918d3525cc75f726304", - "sha256:24500f4b0e03aab60ce575c85365beab64b44d4db837021e08339f61d1fbfe52", - "sha256:2b636a301e53964550e2f3094484fa5a96e699db318d65398cfba438c5c92171", - "sha256:343056c5e0737487a5291f5691f4dfeb25b3e3c8699b4d36b92bb0e586219d14", - "sha256:35a51598f29b2a19e26d0908bd196f771a9b1c5d9a07bf20be0adf28f1ad4f77", - "sha256:39d3b964abfe1519b9d313ab28abf1d02faea26cd14b27f5283849bf59479ff5", - "sha256:3ec528ae69f0a139690fad6deac8a7d33629fa61ccce693fdd07ddf7e9931fba", - "sha256:47ccb6e99a3031ffbbd6e7cc041e70770b4fe405370c66a54dbf26a500ded80b", - "sha256:4eea60c79d36a8f39475b1af887663bc3ae4f31289cd216f514ce18d5938df40", - "sha256:536f77f2bf5797983652d1d55f1a7272a29afcc89e3ae51caa99b2db4e89d658", - "sha256:5ed69befa9a9fc796fe015a7040c9398722d6b97df73a6b608e9e275fa0932b0", - "sha256:62ab4231c01e156ece1b3a187c87173f31cbeee83a5e1f6dff17f288dca93345", - "sha256:667952739daafe9616db19fbedbdb87917eee253ac4f31d70c7587f7ab531b4e", - "sha256:69f251804e052fc46d29d0e7348cdc5fcbfc4861dc4a1ebedef7e78d241ad39e", - "sha256:6c2ba1e0c24d8fae8f2cf0aeb2fc0a2a7f69b6d20bd8d3749fd6b36ecef5edf0", - "sha256:6e85830eed5b5263ffa0c62428e43cb844296f3b4461f09e4bdb0d44ec190bc2", - "sha256:7571e8bbecc6ac066256f9de40365ff833553e2e0c0c004f4482facb131820ef", - "sha256:7781f4f70c9b0b39e1b129b10c7d43a4e0c91f90c60435e6da8288efc2b73438", - "sha256:7926d8d034e06b479797c199747dd774d5e86179f2ce44294423327a88d66ca7", - "sha256:7b80fbb0da3aebde102a37ef0138aeedff45997e22f8962e5f16ae1742852676", - "sha256:7fca4a92c8a7a73dee6946471bce6d1443d94155694b893b79e19ca2a540d86e", - "sha256:84c4315577f7cd511d6250ffd0f695c825efe729f4205c0340f7004eda51191f", - "sha256:8d9c5d13927d77af4fbe453953810db766f75401e764727e73a6ee4f82527b3e", - "sha256:9681516288e3dcf0aa7c26231178cc0be6cac9705cac06709f2353c5b406cfea", - "sha256:97df87e1a20deb75ac7d920c812e9326096aa00a9a4b6d07679b4f1f14b06c90", - "sha256:9bcd51eeca35a80e76dc5794a9dd7cb04b97f0e8af620d54711793bfc1fbba4b", - "sha256:9c6b0c1cafd96213a0327cf680acb39f70e452caf8e9a25aeb05316db9c07f89", - "sha256:a5f81e68aa62bc0cfca04f7b19eaa8f9c826b53fc82ab9e2121976dc74f131f3", - "sha256:a663b180b6669c400b4630a24cc776f23a992d38ce7ae72ede2a397ce6b0f170", - "sha256:a7b2e437fbd8fae5bc7716b9c7ff97aecc95f0b4d56e4ca08b3c8d8adcaadb84", - "sha256:a867d26f06bcd047ef716175b2696b315cb7571ccb951006d61ca80bbc356e9e", - "sha256:aa68a6cdbe1bc6793a9dbfc38302c11599bbe1837392ae9b1d238b9ef3dafcf1", - "sha256:ab31fdd643f162c467cfe6a86e9cb5f1965b632e5e65c072d90854ff486d02cf", - "sha256:ad4ef1c56b47b6b9024b939d503ab487231df1f722065a48f4fc61832130b90e", - "sha256:b92f9ca04b3e719d69b02dc4a69debb795af84cb7afd09c5eb5d54b4a1ae2191", - "sha256:bb21bac7783c1bf6f4bbe68b1e0ff0d20e7e7732cfb7995bc8d96e23aa90fc7b", - "sha256:bf4eeecc9e10f5403ec06138978235af79c9a79af494eb6b1d60a50b49ed2869", - "sha256:bfde025e2793a22efe8c21f807d276bd1d6a4bcc5ba6f19dbdfc4e7a12160909", - "sha256:c37faddc8acd826cfc5e2392531aba734b229741d3daec7f4c777a8f0d4993e5", - "sha256:c71965d1ced48bf97aab79fad56df82c566b4c498ffc09c2094605727c4b7e36", - "sha256:c9192925acc33e146864b8cf037e2ed32a91fdf7644ae875f5d46cd2ef086a5f", - "sha256:c9df1950fb92d49970cce38100d7e7293c84ed3606eaa16ea0b6bc27175bb667", - "sha256:cdfcf2e914e2ba653101157458afd0ad92a16731eeba9a611b5cbb3e7124e74b", - "sha256:d03a060ac1a08e10589c27d509bbdb35b65f2d7f3f8d81cf2fa199877c7bc58a", - "sha256:d20c3d1f31f14d6962a4e2f549c21d31e670b90f777ef4171be540fb7fb70f02", - "sha256:e4ee15b267d2dad3e8759ca441ad450c334f3733304c55210c2a44516e8d5530", - "sha256:e8ea055b3ea046c0f66217af65bc193bbbeca1c8661dc5fd42698db5795d2627", - "sha256:ebabdf1c76593a09ee18c1a06cd3022919861365219ea3aca0247ededf6facd6", - "sha256:ebc94fadbd4a3f4215993326a6a00e47d79889391f5659bf310f55fe5d9f581c", - "sha256:ed5ac02126f74d190fa2cc14a9eb2a5d9837d5863920fa472b02eb1595cdc925", - "sha256:f01e53575f27097d75d42de33b1b289c74b16891ce576d767ad8c48d17aeb5e0", - "sha256:f361296ca7054f0936b02525646b2731b32c8074ba6defab524b79b2b7eeac72", - "sha256:f9035695dadfb397bee9eeaf1dc7fbeda483bf7664a7397a629846800ce6e276", - "sha256:fcad7d5d2bbfeae1026b395036a8aa5abf67e8038ae7e6a25c7d0f88b10a8e6a", - "sha256:ff797320dcbff57caa6b2301c3913784a010e13b1f6cf4ab3f563f3c5e7919db" + "sha256:04f2189716e85ec9192df307f7c255f90e78b6e9863a03223c3b998d24a3c6c6", + "sha256:0c6c0f4d53ef603397fc894a895b960ecd7d44c727df42a8d500031716d4e8d2", + "sha256:0ca37993206402c6c35dc717f90d4c8f53568a8b80f0bf1a1b2b334f4d488fba", + "sha256:12f9515d875859faedb4144fd38694a761cd2a61ef9603bf887b13956d0bbfbb", + "sha256:1990b1f4e2c402beb317840030bb9f1b6a363f86e14e21b4212e618acdfce7f6", + "sha256:2341a78ae3a5ed454d524206a3fcb3cec408c2a0c7c2752cd78b606a2ff15af4", + "sha256:23bb63ae3f4c645d2d82fa22697364b0046fbafb6261b258a58587441c5f7bd0", + "sha256:27bd5f18d8f2879e45724b0ce74f61811639a846ff0e5c0395b7818fae87aec6", + "sha256:2dc7d6b380ca76f5e817ac9eef0c3686e7834c8346bef30b041a4ad286449990", + "sha256:331b200ad03dbaa44151d74daeb7da2cf382db424ab923574f6ecca7d3b30de3", + "sha256:365defc257c687ce3e7d275f39738dcd230777424117a6c76043459db131dd43", + "sha256:37be7b5ea3ff5b7c4a9db16074dc94523b5f10dd1f3b362a827af66a55198175", + "sha256:3c2e6fa98032fec8282f6b27e3f3986c6e05702828380618776ad794e938f53a", + "sha256:40e8b1983080439d4802d80b951f4a93d991ef3261f69e81095a66f86cf3c3c6", + "sha256:43517e1f6b19f610a93d8227e47790722c8bf7422e46b365e0469fc3d3563d97", + "sha256:43b32a06c47539fe275106b376658638b418c7cfdfff0e0259fbf877e845f14b", + "sha256:43d6a66e33b1455b98fc7312b124296dad97a2e191c80320587234a77b1b736e", + "sha256:4c59d6a4a4633fad297f943c03d0d2569867bd5372eb5684befdff8df8522e39", + "sha256:52ac29cc72ee7e25ace7807249638f94c9b6a862c56b1df015d2b2e388e51dbd", + "sha256:54356a76b67cf8a3085818026bb556545ebb8353951923b88292556dfa9f812d", + "sha256:583049c63106c0555e3ae3931edab5669668bbef84c15861421b94e121878d3f", + "sha256:6d99198203f0b9cb0b5d1c0393859555bc26b548223a769baf7e321a627ed4fc", + "sha256:6da42bbcec130b188169107ecb6ee7bd7b4c849d24c9370a0c884cf728d8e976", + "sha256:6e484e479860e00da1f005cd19d1c5d4a813324e5951319ac3f3eefb497cc549", + "sha256:70a6756ce66cd6fe8486c775b30889f0dc4cb20c157aa8c35b45fd7868255c5c", + "sha256:70d24936ca6c15a3bbc91ee9c7fc661132c6f4c9d42a23b31b6686c05073bde5", + "sha256:71967c35828c9ff94e8c7d405469a1fb68257f686bca7c1ed85ed34e7c2529c4", + "sha256:79644f68a6ff23b251cae1c82b01a0b51bc40c8468ca9585c6c4b1aeee570e0b", + "sha256:87cd2e29067ea397a47e352efb13f976eb1b03e18c999270bb50589323294c6e", + "sha256:8d4c6ea0f498c7c79111033a290d060c517853a7bcb2f46516f591dab628ddd3", + "sha256:9134032f5aa445ae591c2ba6991d10136a1f533b1d2fa8f8c21126468c5025c6", + "sha256:921fbe13492caf6a69528f09d5d7c7d518c8d0e7b9f6701b7719715f29a71e6e", + "sha256:99670790f21a96665a35849990b1df447993880bb6463a0a1d757897f30da929", + "sha256:9975442f2e7a5cfcf87299c26b5a45266ab0696348420049b9b94b2ad3d40234", + "sha256:99ded130555c021d99729fabd4ddb91a6f4cc0707df4b1daf912c7850c373b13", + "sha256:a3328c3e64ea4ab12b85999eb0779e6139295bbf5485f69d42cf794309e3d007", + "sha256:a4fb91d5f72b7e06a14ff4ae5be625a81cd7e5f869d7a54578fc271d08d58ae3", + "sha256:aa23ce39661a3e90eea5f99ec59b763b7d655c2cada10729ed920a38bfc2b167", + "sha256:aac7501ae73d4a02f4b7ac8fcb9dc55342ca98ffb9ed9f2dfb8a25d53eda0e4d", + "sha256:ab84a8b698ad5a6c365b08061920138e7a7dd9a04b6feb09ba1bfae68346ce6d", + "sha256:b4adeb878a374126f1e5cf03b87f66279f479e01af0e9a654cf6d1509af46c40", + "sha256:b9853509b4bf57ba7b1f99b9d866c422c9c5248799ab20e652bbb8a184a38181", + "sha256:bb7d5fe92bd0dc235f63ebe9f8c6e0884f7360f88f3411bfed1350c872ef2054", + "sha256:bca4c8abc50d38f9773c1ec80d43f3768df2e8576807d1656016b9d3eeaa96fd", + "sha256:c222958f59b0ae091f4535851cbb24eb57fc0baea07ba675af718fb5302dddb2", + "sha256:c30e42ea11badb147f0d2e387115b15e2bd8205a5ad70d6ad79cf37f6ac08c91", + "sha256:c3a79f56dee9136084cf84a6c7c4341427ef36e05ae6415bf7d787c96ff5eaa3", + "sha256:c51ef82302386d686feea1c44dbeef744585da16fcf97deea2a8d6c1556f519b", + "sha256:c77326300b839c44c3e5a8fe26c15b7e87b2f32dfd2fc9fee1d13604347c9b38", + "sha256:d33a785ea8354c480515e781554d3be582a86297e41ccbea627a5c632647f2cd", + "sha256:d546cfa78844b8b9c1c0533de1851569a13f87449897bbc95d698d1d3cb2a30f", + "sha256:da29ceabe3025a1e5a5aeeb331c5b1af686daab4ff0fb4f83df18b1180ea83e2", + "sha256:df8c05a0f574d480947cba11b947dc41b1265d721c3777881da2fb8d3a1ddfba", + "sha256:e266af4da2c1a4cbc6135a570c64577fd3e6eb204607eaff99d8e9b710003c6f", + "sha256:e279f3db904e3b55f520f11f983cc8dc8a4ce9b65f11692d4718ed021ec58b83", + "sha256:ea52bd218d4ba260399a8ae4bb6b577d82adfc4518b93566ce1fddd4a49d1dce", + "sha256:ebec65f5068e7df2d49466aab9128510c4867e532e07cb6960075b27658dca38", + "sha256:ec1e3b40b82236d100d259854840555469fad4db64f669ab817279eb95cd535c", + "sha256:ee77c7bef0724165e795b6b7bf9c4c22a9b8468a6bdb9c6b4281293c6b22a90f", + "sha256:f263b18692f8ed52c8de7f40a0751e79015983dbd77b16906e5b310a39d3ca21", + "sha256:f7b26757b22faf88fcf232f5f0e62f6e0fd9e22a8a5d0d5016888cdfe1f6c1c4", + "sha256:f7ddb920106bbbbcaf2a274d56f46956bf56ecbde210d88061824a95bdd94e92" ], "index": "pypi", "markers": "python_version >= '3.9'", - "version": "==7.6.2" + "version": "==7.6.3" + }, + "idna": { + "hashes": [ + "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", + "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3" + ], + "markers": "python_version >= '3.6'", + "version": "==3.10" }, "iniconfig": { "hashes": [ @@ -1895,6 +2021,31 @@ "index": "pypi", "markers": "python_version >= '3.8'", "version": "==8.3.3" + }, + "requests": { + "hashes": [ + "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", + "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6" + ], + "markers": "python_version >= '3.8'", + "version": "==2.32.3" + }, + "requests-mock": { + "hashes": [ + "sha256:b1e37054004cdd5e56c84454cc7df12b25f90f382159087f4b6915aaeef39563", + "sha256:e9e12e333b525156e82a3c852f22016b9158220d2f47454de9cae8a77d371401" + ], + "index": "pypi", + "markers": "python_version >= '3.5'", + "version": "==1.12.1" + }, + "urllib3": { + "hashes": [ + "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac", + "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9" + ], + "markers": "python_version >= '3.10'", + "version": "==2.2.3" } } } diff --git a/dbrepo-search-service/app.py b/dbrepo-search-service/app.py index 56d2c711c0b7ac0218a1bf7d65f86888e18266d4..7566178526d97cd7e21a288edd1d9881e49ad9cc 100644 --- a/dbrepo-search-service/app.py +++ b/dbrepo-search-service/app.py @@ -2,6 +2,7 @@ import math import os import logging from ast import literal_eval +from json import dumps from typing import List, Any import requests @@ -10,6 +11,7 @@ from flasgger import LazyJSONEncoder, Swagger, swag_from from flask import Flask, request from flask_cors import CORS from flask_httpauth import HTTPTokenAuth, HTTPBasicAuth, MultiAuth +from jwt.exceptions import JWTDecodeError from opensearchpy import TransportError, NotFoundError from prometheus_flask_exporter import PrometheusMetrics from pydantic import ValidationError @@ -206,9 +208,6 @@ app.config["OPENSEARCH_PASSWORD"] = os.getenv('OPENSEARCH_PASSWORD', 'admin') app.json_encoder = LazyJSONEncoder -available_types = literal_eval( - os.getenv("COLLECTION", "['database','table','column','identifier','unit','concept','user','view']")) - @token_auth.verify_token def verify_token(token: str): @@ -217,7 +216,7 @@ def verify_token(token: str): try: client = KeycloakClient() return client.verify_jwt(access_token=token) - except AssertionError: + except JWTDecodeError as error: return False @@ -268,8 +267,7 @@ def general_filter(index, results): "view": ["id", "name", "creator", " created"], } if index not in important_keys.keys(): - error_msg = "the keys to be returned to the user for your index aren't specified in the important Keys dict" - raise KeyError(error_msg) + raise KeyError(f"Failed to find index {index} in: {important_keys.keys()}") for result in results: result_keys_copy = tuple(result.keys()) for key in result_keys_copy: @@ -294,35 +292,37 @@ def get_index(index: str): :return: list of the results """ logging.info(f'Searching for index: {index}') - if index not in available_types: - return ApiError(status='NOT_FOUND', message='Failed to find index', - code='search.index.missing').model_dump(), 404 results = OpenSearchClient().query_index_by_term_opensearch("*", "contains") - results = general_filter(index, results) - - results_per_page = min(request.args.get("results_per_page", 50, type=int), 500) - max_pages = math.ceil(len(results) / results_per_page) - page = min(request.args.get("page", 1, type=int), max_pages) - results = results[(results_per_page * (page - 1)): (results_per_page * page)] - return dict({"results": results}), 200 + try: + results = general_filter(index, results) + + results_per_page = min(request.args.get("results_per_page", 50, type=int), 500) + max_pages = math.ceil(len(results) / results_per_page) + page = min(request.args.get("page", 1, type=int), max_pages) + results = results[(results_per_page * (page - 1)): (results_per_page * page)] + return dict({"results": results}), 200 + except KeyError: + return ApiError(status='NOT_FOUND', message=f'Failed to find get index: {index}', + code='search.index.missing').model_dump(), 404 -@app.route("/api/search/<string:type>/fields", methods=["GET"], endpoint="search_get_index_fields") +@app.route("/api/search/<string:field_type>/fields", methods=["GET"], endpoint="search_get_index_fields") @metrics.gauge(name='dbrepo_search_type_list', description='Time needed to list search types') @swag_from("os-yml/get_fields.yml") -def get_fields(type: str): +def get_fields(field_type: str): """ returns a list of attributes of the data for a specific index. - :param type: The search type + :param field_type: The search type :return: """ - logging.info(f'Searching in index database for type: {type}') - if type not in available_types: - return ApiError(status='NOT_FOUND', message='Failed to find type', + logging.info(f'Searching in index database for type: {field_type}') + try: + fields = OpenSearchClient().get_fields_for_index(field_type) + logging.debug(f'get fields for field_type {field_type} resulted in {len(fields)} field(s)') + return fields, 200 + except NotFoundError: + return ApiError(status='NOT_FOUND', message=f'Failed to find fields for search type {field_type}', code='search.type.missing').model_dump(), 404 - fields = OpenSearchClient().get_fields_for_index(type) - logging.debug(f'get fields for type {type} resulted in {len(fields)} field(s)') - return fields, 200 @app.route("/api/search", methods=["GET"], endpoint="search_fuzzy_search") @@ -344,10 +344,10 @@ def get_fuzzy_search(): return dict({"results": results}), 200 -@app.route("/api/search/<string:type>", methods=["POST"], endpoint="search_post_general_search") +@app.route("/api/search/<string:field_type>", methods=["POST"], endpoint="search_post_general_search") @metrics.gauge(name='dbrepo_search_type', description='Time needed to search by type') @swag_from("os-yml/post_general_search.yml") -def post_general_search(type): +def post_general_search(field_type): """ Main endpoint for fuzzy searching. :return: @@ -356,11 +356,7 @@ def post_general_search(type): return ApiError(status='UNSUPPORTED_MEDIA_TYPE', message='Content type needs to be application/json', code='search.general.media').model_dump(), 415 req_body = request.json - logging.info(f'Searching in index database for type: {type}') - logging.debug(f"search request body: {req_body}") - if type is not None and type not in available_types: - return ApiError(status='NOT_FOUND', message=f'Type {type} is not in collection: {available_types}', - code='search.general.missing').model_dump(), 404 + logging.info(f'Searching in index database for type: {field_type}') t1 = request.args.get("t1") if not str(t1).isdigit(): t1 = None @@ -370,9 +366,9 @@ def post_general_search(type): if t1 is not None and t2 is not None and "unit.uri" in req_body and "concept.uri" in req_body: response = OpenSearchClient().unit_independent_search(t1, t2, req_body) else: - response = OpenSearchClient().general_search(type, req_body) + response = OpenSearchClient().general_search(field_type, req_body) # filter by type - if type == 'table': + if field_type == 'table': tmp = [] for database in response: if database["tables"] is not None: @@ -380,7 +376,7 @@ def post_general_search(type): table["is_public"] = database["is_public"] tmp.append(table) response = tmp - if type == 'identifier': + if field_type == 'identifier': tmp = [] for database in response: if database["identifiers"] is not None: @@ -398,30 +394,30 @@ def post_general_search(type): if 'identifier' in view: tmp.append(view['identifier']) response = tmp - elif type == 'column': + elif field_type == 'column': response = [x for xs in response for x in xs["tables"]] for table in response: for column in table["columns"]: column["table_id"] = table["id"] column["database_id"] = table["database_id"] response = [x for xs in response for x in xs["columns"]] - elif type == 'concept': + elif field_type == 'concept': tmp = [] tables = [x for xs in response for x in xs["tables"]] for column in [x for xs in tables for x in xs["columns"]]: if 'concept' in column and column["concept"] is not None: tmp.append(column["concept"]) response = tmp - elif type == 'unit': + elif field_type == 'unit': tmp = [] tables = [x for xs in response for x in xs["tables"]] for column in [x for xs in tables for x in xs["columns"]]: if 'unit' in column and column["unit"] is not None: tmp.append(column["unit"]) response = tmp - elif type == 'view': + elif field_type == 'view': response = [x for xs in response for x in xs["views"]] - return dict({'results': response, 'type': type}), 200 + return dict({'results': response, 'type': field_type}), 200 @app.route("/api/search/database/<int:database_id>", methods=["PUT"], endpoint="search_put_database") @@ -436,16 +432,9 @@ def update_database(database_id: int) -> Database | ApiError: logging.error(f"Failed to validate: {e}") return ApiError(status='BAD_REQUEST', message=f'Malformed payload: {e}', code='search.general.missing').model_dump(), 400 - try: - database = OpenSearchClient().update_database(database_id, payload) - logging.info(f"Updated database with id : {database_id}") - return database.model_dump(), 202 - except NotFoundError: - return ApiError(status='NOT_FOUND', message='Failed to find database', - code='search.database.missing').model_dump(), 404 - except TransportError: - return ApiError(status='BAD_REQUEST', message='Failed to update database', - code='search.database.invalid').model_dump(), 400 + database = OpenSearchClient().update_database(database_id, payload) + logging.info(f"Updated database with id : {database_id}") + return database.model_dump(), 202 @app.route("/api/search/database/<int:database_id>", methods=["DELETE"], endpoint="database_delete_database") @@ -455,7 +444,7 @@ def update_database(database_id: int) -> Database | ApiError: def delete_database(database_id: int): try: OpenSearchClient().delete_database(database_id) - return None, 202 + return dumps({}), 202 except NotFoundError: return ApiError(status='NOT_FOUND', message='Failed to find database', code='search.database.missing').model_dump(), 404 diff --git a/dbrepo-search-service/init/Dockerfile b/dbrepo-search-service/init/Dockerfile index ebde913dbd9e013e9591fa4135e919a78fbbbd26..b0704a50470e02dde96effc7cf5e9fc6298cf428 100644 --- a/dbrepo-search-service/init/Dockerfile +++ b/dbrepo-search-service/init/Dockerfile @@ -1,6 +1,7 @@ FROM python:3.11-alpine +LABEL org.opencontainers.image.authors="martin.weise@tuwien.ac.at" -RUN apk add bash curl +RUN apk add --no-cache curl bash jq WORKDIR /home/alpine @@ -16,6 +17,8 @@ USER 1001 WORKDIR /app +COPY --chown=1001 ./clients ./clients +COPY --chown=1001 ./omlib ./omlib COPY --chown=1001 ./app.py ./app.py COPY --chown=1001 ./database.json ./database.json diff --git a/dbrepo-search-service/init/Pipfile b/dbrepo-search-service/init/Pipfile index 9647c2ca4e1866754e9b0d695023fb6ba252a61b..a6a48b1cfb4d5f8d10e398ca42e636abf6191eff 100644 --- a/dbrepo-search-service/init/Pipfile +++ b/dbrepo-search-service/init/Pipfile @@ -9,7 +9,8 @@ opensearch-py = "~=2.2" python-dotenv = "~=1.0" testcontainers-opensearch = "*" pytest = "*" -dbrepo = {path = "./lib/dbrepo-1.4.6.tar.gz"} +dbrepo = {path = "./lib/dbrepo-1.4.7.tar.gz"} +rdflib = "*" [dev-packages] coverage = "*" diff --git a/dbrepo-search-service/init/Pipfile.lock b/dbrepo-search-service/init/Pipfile.lock index 01e860ce1156a1ad10e66eb1341ebce320578d7a..75bdbc29f92ce91ebd3bca121e904cd2bc05174b 100644 --- a/dbrepo-search-service/init/Pipfile.lock +++ b/dbrepo-search-service/init/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "f0cf0656eb5669fc13fa9bfcbff42b1d69e88a7926cdb61b343598ede594b38f" + "sha256": "17c83557efb9ffbe3c93e34a0df4e794f27b0fb6658915e2c68f4a9a3ef1e09b" }, "pipfile-spec": 6, "requires": { @@ -282,11 +282,9 @@ }, "dbrepo": { "hashes": [ - "sha256:273c77e6b4eb5c32cfb3b852064a2dd8a09c107e385b0a539ee87640ef3d4b75", - "sha256:409297ef9bb4687eb66c64ff185fa1d224aa393be5132d0a14d628b200de9ffa" + "sha256:84607677b0826bb9b2fa120aacdf56d16c8d9ae423f435b2bd2c22b1c965a33c" ], - "markers": "python_version >= '3.11'", - "path": "./lib/dbrepo-1.4.6.tar.gz" + "path": "./lib/dbrepo-1.4.7.tar.gz" }, "docker": { "hashes": [ @@ -410,6 +408,13 @@ "markers": "python_version >= '3.7'", "version": "==2.0.0" }, + "isodate": { + "hashes": [ + "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96", + "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9" + ], + "version": "==0.6.1" + }, "itsdangerous": { "hashes": [ "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", @@ -938,6 +943,14 @@ "markers": "python_version >= '3.8'", "version": "==2.23.4" }, + "pyparsing": { + "hashes": [ + "sha256:93d9577b88da0bbea8cc8334ee8b918ed014968fd2ec383e868fb8afb1ccef84", + "sha256:cbf74e27246d595d9a74b186b810f6fbb86726dbf3b9532efb343f6d7294fe9c" + ], + "markers": "python_version >= '3.9'", + "version": "==3.2.0" + }, "pytest": { "hashes": [ "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181", @@ -952,7 +965,7 @@ "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", "version": "==2.9.0.post0" }, "python-dotenv": { @@ -971,6 +984,15 @@ ], "version": "==2024.2" }, + "rdflib": { + "hashes": [ + "sha256:0438920912a642c866a513de6fe8a0001bd86ef975057d6962c79ce4771687cd", + "sha256:9995eb8569428059b8c1affd26b25eac510d64f5043d9ce8c84e0d0036e995ae" + ], + "index": "pypi", + "markers": "python_full_version >= '3.8.1' and python_full_version < '4.0.0'", + "version": "==7.0.0" + }, "requests": { "hashes": [ "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", @@ -984,7 +1006,7 @@ "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", "version": "==1.16.0" }, "testcontainers-core": { @@ -1004,11 +1026,11 @@ }, "tinydb": { "hashes": [ - "sha256:09c4c6a239da9be676b948f1f28074cffd1cf08e7af920c1df50424cc8bee8d6", - "sha256:1c7c507ef520c789f94f1f5786f0722a98a59a85031a2e81e2accc701721f07f" + "sha256:f7dfc39b8d7fda7a1ca62a8dbb449ffd340a117c1206b68c50b1a481fb95181d", + "sha256:f97030ee5cbc91eeadd1d7af07ab0e48ceb04aa63d4a983adbaca4cba16e86c3" ], "markers": "python_version >= '3.8' and python_version < '4.0'", - "version": "==4.8.1" + "version": "==4.8.2" }, "tuspy": { "hashes": [ @@ -1128,178 +1150,178 @@ }, "yarl": { "hashes": [ - "sha256:0127bc2ea72c1eaae6808ace661f0edf222f32ffa987d37f2dbb4798288f2656", - "sha256:0358b697abdf1f2d68038bd02ef8ddcc4813835744f79c755f8743aa485585e7", - "sha256:06306c74f0775621a70fa5acd292119bbb6961d1f9a5f3d657a4c8c15b86f7b9", - "sha256:06b5b462cadf59c1df28ffbb0a3971fa16b60cf0c9d59a38bf5679a986d18685", - "sha256:097094a979af7b31520517c59179f6817b8426724343cecbec0eb3af1f8fb6cf", - "sha256:0c791a2d42da20ac568e5c0cc9b8af313188becd203a936ad959b578dafbcebb", - "sha256:1656a8b531a96427f26f498b7d0f19931166ff30e4344eca99bdb27faca14fc5", - "sha256:18614630533ac37ec373bd8035aec8fa4dd9aedac641209c06de7e082622ff77", - "sha256:1e5fa4c4e55cdacef1844f609bc9a02c8cd29c324a71ca1d3ee454701d4bb496", - "sha256:1edaf4171fc1582352ac5d9b2783966fa0f4ff86187279ef2a491613d23b894a", - "sha256:2124c642b8cc9b68e5981e429842dadc32bb850b010cccec9d24236253a19f60", - "sha256:229f222bb47cd7ab225648efd1ae47fe6943f18e4c91bce66471faf09fe33128", - "sha256:2429a651a2191c3fb8c9de21546c6046da539034d51dcb8df52302748004593d", - "sha256:25a4e29ee758596b2a0daffa4814714e9b464077ca862baf78ed0e8698e46b61", - "sha256:27c323b28723faed046f906c70466144c4dd12046a0128a301b29a65cfeff758", - "sha256:2add8ed2acf42398dfaa7dffd32e4d18ffbae341d62c8d4765bd9929336379b5", - "sha256:2bece7fdc13e23db005879b67190db0d397f6ba89c81dc7e3c77e9f5819aff7f", - "sha256:2eafb4e92f72a3b6c27f1d5e921d046e2728850af8887f86857c3fe868a5b5c0", - "sha256:32840ff92c713053591ff0e66845d4e9f4bea8fd5fba3da00f8d92e77722f24e", - "sha256:33896afca6fb4e1988c099534c52823870dfc8730bc6f96a3831f24c1e0ab814", - "sha256:350b468a217d433cbb4482e9414a14dfd360a3d5ab92013175925abb234364cc", - "sha256:38cab8f91b1085f1fd0765d40c46c8f43282f109018d5fcd017c46ac3eaba0cf", - "sha256:3e24a778470f3a9e9c11250d09daf5dea93369bc51aefca6605dbc963737a117", - "sha256:4224bbbc8a2e9b9a3828d36c1bab7458441d7fb9fb3af321eb735732ba8ee89d", - "sha256:4424082edff76fe46ff08851e91865097c0ad780fa79b87063dc5d5b80efc9d6", - "sha256:454707fb16f180984da6338d1f51897f0b8d8c4c2e0592d9d1e9fa02a5bb8218", - "sha256:4b1ab96a1ac91bd1233706d638ade35f663684deaa4e5e5f190858cba044afb9", - "sha256:4c5ff3e7609c214667c7d7e00d5f4f3576fefde47ebcb7e492c015117dafebbf", - "sha256:5107d89c9edec6ee077970a95fb9eeb4776ea8c2337b6a39c0ade9a58f50f3e4", - "sha256:5156c12a97405339ec93facbc7860566db381af2de1bec338195563fb64f37ef", - "sha256:553a1e3537aeeb29c0eb29ef28b80e0e801697fa71d96ac60675b284ff8e582a", - "sha256:5e1cc7823f43781390965c4762b54262cfcf76b6f152e489d00a5a1ac63063e4", - "sha256:5eef9804e65eb292e9c5587e88fe6a27a11f121d358312ac47211e8f42876751", - "sha256:6237637b496bc04819190e724a4e61ff2f251abf432f70cf491b3bc4a3f2f253", - "sha256:627bb5bc4ed3d3ebceb9fb55717cec6cd58bb47fdb5669169ebbc248e9bf156c", - "sha256:676d7356bb30825b7dbdad4fdd7a9feac379d074e5d4a36299767d72857ded42", - "sha256:6960b0d2e713e726cb2914e3051e080b12412f70dcb8731cf7a8fb52c37931bb", - "sha256:6b93a666cd8cfd43f605d1b81a32b9e290bf45c74c2bfd51ba705449c78448c7", - "sha256:6ca160b4c649f0d56daef04751eef4571de46ed4b80f9051a87d090fef32f08e", - "sha256:70ac7893e67a81ed1346ee3e71203ca4b0c3550c005b1d1cf87bc1e61eecd04b", - "sha256:73c4af08e9bb9a9aa7df6c789b05b924b9a0c6a368bb0e418d0b85181b64b631", - "sha256:748dcacc19c69957f7063ea4fb359fa2180735b1a638c81a4a96b86a382a6f29", - "sha256:75d9762f65205a86381298eb9079f27c60b84de0c262e402dcf45c6cbc385234", - "sha256:7711d83dafe52cda16ff2dd205cd83c05e4c06d5aaac596ae2cf7d50d094a530", - "sha256:7aa9f9af452c3e8486a0b88fddd58352e6cea17b691b18861d26e46cf65ffff0", - "sha256:7f713d8f3c4e2eac0d91b741e8ef2e1082022de244685601ec83e899b445d86a", - "sha256:81edbd9bf9f25cd995e6d51c307e1d279587d40b7473e258fef6d5e548560cd2", - "sha256:83363a5789f128618041b9a737c7b146f1965abddf4294b0444591406b437c1e", - "sha256:85e273e59b8b1a5f60a89df82cddeaf918181abd7ae7a2f2f899b68b0c774ff1", - "sha256:8ad2e487824ba4cda87851a371139e255410e45d3bf2e334194789278d709cec", - "sha256:8b7f902f13a230686f01bcff17cd9ba045653069811c8fd5027f0f414b417e2f", - "sha256:8f074a24aa9a6a3d406474ec889ebb5d661f329349068e05e8dfcb3c4be67752", - "sha256:9084d99933824ed8d665f10f4ce62d08fed714e7678d5ff11a8c2c98b2dc18f9", - "sha256:928f7a61c4311f3dd003af19bb779f99683f97a0559b765c80fdb8846dab0452", - "sha256:97fcaf530318369da3cfd6ff52f5ab38daf8cb10ecee9a76efebf8031de09eef", - "sha256:994d27b24b61b1870f3571395c840433faabec5dcd239bd11ff6af7e34234bb6", - "sha256:9ae454916aa3abe28d0ef1c21ca1e8e36a14ccf52183d465dfaccffaa7ed462c", - "sha256:9fac5416c44e8e1d8ea9440096f88e1a7273257f3157184c5c715060e0c448a1", - "sha256:a2fe45c1143eefb680a4589c55e671fabd482a7f8c7791f311ea3bcc20139246", - "sha256:a3f8be3e785009ffa148e66474fea5c787ccb203b3d0bd1f22e1e22f7da0f3b3", - "sha256:a616c2e4b60cb8cdd9eb3b0c6fda4ab5f3e26244b427aaade560dcf63c5754fb", - "sha256:a94c9058c5703c172904103d7b479f7e23dd4e5f8e67b49f6cd256d35ff169cb", - "sha256:b1208f2e081d34832f509cbe311237a0543effe23d60b2fa14c0d3f86e6d1d07", - "sha256:b4b25de7e85ba90b2ff230153123b6b000a7f69c41d84a3a0dc3f878334c8509", - "sha256:bbe72c41cdd55c88b238a8925849fde4069c0cdcdef83f8d967f8f3982659326", - "sha256:c0a86dd3e85c6aa3fc73236eb5cf7ce69dd8ad7abcd23f8ae1126831c8e40c2f", - "sha256:c3b08d9e98d1a15338fcfbd52c02003704322c2d460c9b9be7df08f2952bdce6", - "sha256:c4d9c221cc8e32b14196498679bf2b324bec1d1127c4ba934d98e19298faa661", - "sha256:c4f882e42c6cea89488b9a16919edde8c0b1a98f307c05abdd3dd3bc4368af40", - "sha256:c5cc25cbd9ae01d49ac7b504ef5f3cbdcc8d139f9750dcfa0b80d405b4645cc2", - "sha256:c7f2deac59dc3e0528bdded248e637e789e5111ba1723a8d7a262eb93e133e15", - "sha256:c8b034b60e74fb29064f765851e77e5910055e1c4a3cb75c32eccf2b470fc00f", - "sha256:c9b9159eeeb7cd1c7131dc7f5878454f97a4dc20cd157e6474174ccac448b844", - "sha256:c9c405ca78c70c3599d8956e53be0c9def9c51ad949964a49ad96c79729a5b1a", - "sha256:ceb200918c9bd163bd390cc169b254b23b4be121026b003be93a4f2f5b554b4b", - "sha256:d06040266b5e6512a37b4703684d1798124764b43328254799e9678c588882a6", - "sha256:d3f5e201bd170fb97c643e84df58e221372cd053fbb291ebbd878b165ea5057e", - "sha256:d4aa7cca009817789fd5b8e52e8122f9e85dc580c88b816a93321c00a8acbced", - "sha256:d772ae3c12d3b8629db656050c86ee66924eaa98f7125a889175a59cfaafdb19", - "sha256:d816969b55a970b3accc7f9e4ea8f60043e3f7de96f21c06063d747ffc2f18ba", - "sha256:d885dcdca7bae42bc9a2f6cbf766abcb2a6cc043b1905fc3782c6ea1f74a2b95", - "sha256:db903458a457a53ee0f764ed11c5b5368398e216b442c42dca9d90fbd2bbf31c", - "sha256:dc63bb79e896d6ce6aaf672ac304b54969280e949c45727867fc154a17ec7ab2", - "sha256:dd042e6c3bf36448e3e3ed302b12ce79762480f4aff8e7a167cdf8c35dc93297", - "sha256:ddea4abc4606c10dddb70651b210b7ab5b663148d6d7bc85d76963c923629891", - "sha256:df57f3c3ef760489f2e82192e6c93286c2bc80d6d854ef940e5345ae7153cd4b", - "sha256:e1ddf05eeb422810b1aa919095db0691493442eebbf9cfb0f1e478a7b2fbdf3d", - "sha256:e2e3cb74684ff357e6b3c82dd71031d3c1fd7ee9f9b0a5205e5568c963e074f9", - "sha256:e4f64c8c52dde564bf3251b41d7a6746564b0fc0516cebe9c9e6695224440d22", - "sha256:e4f7efb38331e8327c1cc7fb2a2905a7db03d1a7fdb04706bf6465d0e44d41d4", - "sha256:e61b2019ebb5345510b833c4dd1f4afb1f0c07753f86f184c63836ffc3fb08ba", - "sha256:e7e38bf6e52797084c5c396db5bb519615727e491e9003e2449631457bf77738", - "sha256:eae041f535fe2e57681954f7cccb81854d777ce4c2a87749428ebe6c71c02ec0", - "sha256:eb964d18c01b7a1263a6f07b88d63711fcd564fc429d934279cf12f4b467bf53", - "sha256:ef780f9d480ffb423380abeb4cfcad66ecb8f93526dfa367d322fdad9ec7c25f", - "sha256:efc0430b80ed834c80c99c32946cfc6ee29dfcd7c62ad3c8f15657322ade7942", - "sha256:f2508ee2bad8381b5254eadc35d32fe800d12eb2c63b744183341f3a66e435a7", - "sha256:fee9acd5e39c8611957074dfba06552e430020eea831caf5eb2cea30f10e06bd" + "sha256:0545de8c688fbbf3088f9e8b801157923be4bf8e7b03e97c2ecd4dfa39e48e0e", + "sha256:076b1ed2ac819933895b1a000904f62d615fe4533a5cf3e052ff9a1da560575c", + "sha256:0afad2cd484908f472c8fe2e8ef499facee54a0a6978be0e0cff67b1254fd747", + "sha256:0ccaa1bc98751fbfcf53dc8dfdb90d96e98838010fc254180dd6707a6e8bb179", + "sha256:0d3105efab7c5c091609abacad33afff33bdff0035bece164c98bcf5a85ef90a", + "sha256:0e1af74a9529a1137c67c887ed9cde62cff53aa4d84a3adbec329f9ec47a3936", + "sha256:136f9db0f53c0206db38b8cd0c985c78ded5fd596c9a86ce5c0b92afb91c3a19", + "sha256:156ececdf636143f508770bf8a3a0498de64da5abd890c7dbb42ca9e3b6c05b8", + "sha256:15c87339490100c63472a76d87fe7097a0835c705eb5ae79fd96e343473629ed", + "sha256:1695497bb2a02a6de60064c9f077a4ae9c25c73624e0d43e3aa9d16d983073c2", + "sha256:173563f3696124372831007e3d4b9821746964a95968628f7075d9231ac6bb33", + "sha256:173866d9f7409c0fb514cf6e78952e65816600cb888c68b37b41147349fe0057", + "sha256:23ec1d3c31882b2a8a69c801ef58ebf7bae2553211ebbddf04235be275a38548", + "sha256:243fbbbf003754fe41b5bdf10ce1e7f80bcc70732b5b54222c124d6b4c2ab31c", + "sha256:28c6cf1d92edf936ceedc7afa61b07e9d78a27b15244aa46bbcd534c7458ee1b", + "sha256:2aa738e0282be54eede1e3f36b81f1e46aee7ec7602aa563e81e0e8d7b67963f", + "sha256:2cf441c4b6e538ba0d2591574f95d3fdd33f1efafa864faa077d9636ecc0c4e9", + "sha256:30c3ff305f6e06650a761c4393666f77384f1cc6c5c0251965d6bfa5fbc88f7f", + "sha256:31561a5b4d8dbef1559b3600b045607cf804bae040f64b5f5bca77da38084a8a", + "sha256:32b66be100ac5739065496c74c4b7f3015cef792c3174982809274d7e51b3e04", + "sha256:3433da95b51a75692dcf6cc8117a31410447c75a9a8187888f02ad45c0a86c50", + "sha256:34a2d76a1984cac04ff8b1bfc939ec9dc0914821264d4a9c8fd0ed6aa8d4cfd2", + "sha256:353665775be69bbfc6d54c8d134bfc533e332149faeddd631b0bc79df0897f46", + "sha256:38d0124fa992dbacd0c48b1b755d3ee0a9f924f427f95b0ef376556a24debf01", + "sha256:3c56ec1eacd0a5d35b8a29f468659c47f4fe61b2cab948ca756c39b7617f0aa5", + "sha256:3db817b4e95eb05c362e3b45dafe7144b18603e1211f4a5b36eb9522ecc62bcf", + "sha256:3e52474256a7db9dcf3c5f4ca0b300fdea6c21cca0148c8891d03a025649d935", + "sha256:416f2e3beaeae81e2f7a45dc711258be5bdc79c940a9a270b266c0bec038fb84", + "sha256:435aca062444a7f0c884861d2e3ea79883bd1cd19d0a381928b69ae1b85bc51d", + "sha256:4388c72174868884f76affcdd3656544c426407e0043c89b684d22fb265e04a5", + "sha256:43ebdcc120e2ca679dba01a779333a8ea76b50547b55e812b8b92818d604662c", + "sha256:458c0c65802d816a6b955cf3603186de79e8fdb46d4f19abaec4ef0a906f50a7", + "sha256:533a28754e7f7439f217550a497bb026c54072dbe16402b183fdbca2431935a9", + "sha256:553dad9af802a9ad1a6525e7528152a015b85fb8dbf764ebfc755c695f488367", + "sha256:5838f2b79dc8f96fdc44077c9e4e2e33d7089b10788464609df788eb97d03aad", + "sha256:5b48388ded01f6f2429a8c55012bdbd1c2a0c3735b3e73e221649e524c34a58d", + "sha256:5bc0df728e4def5e15a754521e8882ba5a5121bd6b5a3a0ff7efda5d6558ab3d", + "sha256:63eab904f8630aed5a68f2d0aeab565dcfc595dc1bf0b91b71d9ddd43dea3aea", + "sha256:66f629632220a4e7858b58e4857927dd01a850a4cef2fb4044c8662787165cf7", + "sha256:670eb11325ed3a6209339974b276811867defe52f4188fe18dc49855774fa9cf", + "sha256:69d5856d526802cbda768d3e6246cd0d77450fa2a4bc2ea0ea14f0d972c2894b", + "sha256:6e840553c9c494a35e449a987ca2c4f8372668ee954a03a9a9685075228e5036", + "sha256:711bdfae4e699a6d4f371137cbe9e740dc958530cb920eb6f43ff9551e17cfbc", + "sha256:74abb8709ea54cc483c4fb57fb17bb66f8e0f04438cff6ded322074dbd17c7ec", + "sha256:75119badf45f7183e10e348edff5a76a94dc19ba9287d94001ff05e81475967b", + "sha256:766dcc00b943c089349d4060b935c76281f6be225e39994c2ccec3a2a36ad627", + "sha256:78e6fdc976ec966b99e4daa3812fac0274cc28cd2b24b0d92462e2e5ef90d368", + "sha256:81dadafb3aa124f86dc267a2168f71bbd2bfb163663661ab0038f6e4b8edb810", + "sha256:82d5161e8cb8f36ec778fd7ac4d740415d84030f5b9ef8fe4da54784a1f46c94", + "sha256:833547179c31f9bec39b49601d282d6f0ea1633620701288934c5f66d88c3e50", + "sha256:856b7f1a7b98a8c31823285786bd566cf06226ac4f38b3ef462f593c608a9bd6", + "sha256:8657d3f37f781d987037f9cc20bbc8b40425fa14380c87da0cb8dfce7c92d0fb", + "sha256:93bed8a8084544c6efe8856c362af08a23e959340c87a95687fdbe9c9f280c8b", + "sha256:954dde77c404084c2544e572f342aef384240b3e434e06cecc71597e95fd1ce7", + "sha256:98f68df80ec6ca3015186b2677c208c096d646ef37bbf8b49764ab4a38183931", + "sha256:99e12d2bf587b44deb74e0d6170fec37adb489964dbca656ec41a7cd8f2ff178", + "sha256:9a13a07532e8e1c4a5a3afff0ca4553da23409fad65def1b71186fb867eeae8d", + "sha256:9c1e3ff4b89cdd2e1a24c214f141e848b9e0451f08d7d4963cb4108d4d798f1f", + "sha256:9ce2e0f6123a60bd1a7f5ae3b2c49b240c12c132847f17aa990b841a417598a2", + "sha256:9fcda20b2de7042cc35cf911702fa3d8311bd40055a14446c1e62403684afdc5", + "sha256:a32d58f4b521bb98b2c0aa9da407f8bd57ca81f34362bcb090e4a79e9924fefc", + "sha256:a39c36f4218a5bb668b4f06874d676d35a035ee668e6e7e3538835c703634b84", + "sha256:a5cafb02cf097a82d74403f7e0b6b9df3ffbfe8edf9415ea816314711764a27b", + "sha256:a7cf963a357c5f00cb55b1955df8bbe68d2f2f65de065160a1c26b85a1e44172", + "sha256:a880372e2e5dbb9258a4e8ff43f13888039abb9dd6d515f28611c54361bc5644", + "sha256:ace4cad790f3bf872c082366c9edd7f8f8f77afe3992b134cfc810332206884f", + "sha256:af8ff8d7dc07ce873f643de6dfbcd45dc3db2c87462e5c387267197f59e6d776", + "sha256:b47a6000a7e833ebfe5886b56a31cb2ff12120b1efd4578a6fcc38df16cc77bd", + "sha256:b71862a652f50babab4a43a487f157d26b464b1dedbcc0afda02fd64f3809d04", + "sha256:b7f227ca6db5a9fda0a2b935a2ea34a7267589ffc63c8045f0e4edb8d8dcf956", + "sha256:bc8936d06cd53fddd4892677d65e98af514c8d78c79864f418bbf78a4a2edde4", + "sha256:bed1b5dbf90bad3bfc19439258c97873eab453c71d8b6869c136346acfe497e7", + "sha256:c45817e3e6972109d1a2c65091504a537e257bc3c885b4e78a95baa96df6a3f8", + "sha256:c68e820879ff39992c7f148113b46efcd6ec765a4865581f2902b3c43a5f4bbb", + "sha256:c77494a2f2282d9bbbbcab7c227a4d1b4bb829875c96251f66fb5f3bae4fb053", + "sha256:c998d0558805860503bc3a595994895ca0f7835e00668dadc673bbf7f5fbfcbe", + "sha256:ccad2800dfdff34392448c4bf834be124f10a5bc102f254521d931c1c53c455a", + "sha256:cd126498171f752dd85737ab1544329a4520c53eed3997f9b08aefbafb1cc53b", + "sha256:ce44217ad99ffad8027d2fde0269ae368c86db66ea0571c62a000798d69401fb", + "sha256:d1ac2bc069f4a458634c26b101c2341b18da85cb96afe0015990507efec2e417", + "sha256:d417a4f6943112fae3924bae2af7112562285848d9bcee737fc4ff7cbd450e6c", + "sha256:d538df442c0d9665664ab6dd5fccd0110fa3b364914f9c85b3ef9b7b2e157980", + "sha256:ded1b1803151dd0f20a8945508786d57c2f97a50289b16f2629f85433e546d47", + "sha256:e2e93b88ecc8f74074012e18d679fb2e9c746f2a56f79cd5e2b1afcf2a8a786b", + "sha256:e4ca3b9f370f218cc2a0309542cab8d0acdfd66667e7c37d04d617012485f904", + "sha256:e4ee8b8639070ff246ad3649294336b06db37a94bdea0d09ea491603e0be73b8", + "sha256:e52f77a0cd246086afde8815039f3e16f8d2be51786c0a39b57104c563c5cbb0", + "sha256:eaea112aed589131f73d50d570a6864728bd7c0c66ef6c9154ed7b59f24da611", + "sha256:ed20a4bdc635f36cb19e630bfc644181dd075839b6fc84cac51c0f381ac472e2", + "sha256:eedc3f247ee7b3808ea07205f3e7d7879bc19ad3e6222195cd5fbf9988853e4d", + "sha256:f0e1844ad47c7bd5d6fa784f1d4accc5f4168b48999303a868fe0f8597bde715", + "sha256:f4fe99ce44128c71233d0d72152db31ca119711dfc5f2c82385ad611d8d7f897", + "sha256:f8cfd847e6b9ecf9f2f2531c8427035f291ec286c0a4944b0a9fce58c6446046", + "sha256:f9ca0e6ce7774dc7830dc0cc4bb6b3eec769db667f230e7c770a628c1aa5681b", + "sha256:fa2bea05ff0a8fb4d8124498e00e02398f06d23cdadd0fe027d84a3f7afde31e", + "sha256:fbbb63bed5fcd70cd3dd23a087cd78e4675fb5a2963b8af53f945cbbca79ae16", + "sha256:fbda058a9a68bec347962595f50546a8a4a34fd7b0654a7b9697917dc2bf810d", + "sha256:ffd591e22b22f9cb48e472529db6a47203c41c2c5911ff0a52e85723196c0d75" ], "markers": "python_version >= '3.8'", - "version": "==1.15.0" + "version": "==1.15.2" } }, "develop": { "coverage": { "hashes": [ - "sha256:078a87519057dacb5d77e333f740708ec2a8f768655f1db07f8dfd28d7a005f0", - "sha256:087932079c065d7b8ebadd3a0160656c55954144af6439886c8bcf78bbbcde7f", - "sha256:0bbae11c138585c89fb4e991faefb174a80112e1a7557d507aaa07675c62e66b", - "sha256:0ff2ef83d6d0b527b5c9dad73819b24a2f76fdddcfd6c4e7a4d7e73ecb0656b4", - "sha256:12179eb0575b8900912711688e45474f04ab3934aaa7b624dea7b3c511ecc90f", - "sha256:1e5e92e3e84a8718d2de36cd8387459cba9a4508337b8c5f450ce42b87a9e760", - "sha256:2186369a654a15628e9c1c9921409a6b3eda833e4b91f3ca2a7d9f77abb4987c", - "sha256:21c0ea0d4db8a36b275cb6fb2437a3715697a4ba3cb7b918d3525cc75f726304", - "sha256:24500f4b0e03aab60ce575c85365beab64b44d4db837021e08339f61d1fbfe52", - "sha256:2b636a301e53964550e2f3094484fa5a96e699db318d65398cfba438c5c92171", - "sha256:343056c5e0737487a5291f5691f4dfeb25b3e3c8699b4d36b92bb0e586219d14", - "sha256:35a51598f29b2a19e26d0908bd196f771a9b1c5d9a07bf20be0adf28f1ad4f77", - "sha256:39d3b964abfe1519b9d313ab28abf1d02faea26cd14b27f5283849bf59479ff5", - "sha256:3ec528ae69f0a139690fad6deac8a7d33629fa61ccce693fdd07ddf7e9931fba", - "sha256:47ccb6e99a3031ffbbd6e7cc041e70770b4fe405370c66a54dbf26a500ded80b", - "sha256:4eea60c79d36a8f39475b1af887663bc3ae4f31289cd216f514ce18d5938df40", - "sha256:536f77f2bf5797983652d1d55f1a7272a29afcc89e3ae51caa99b2db4e89d658", - "sha256:5ed69befa9a9fc796fe015a7040c9398722d6b97df73a6b608e9e275fa0932b0", - "sha256:62ab4231c01e156ece1b3a187c87173f31cbeee83a5e1f6dff17f288dca93345", - "sha256:667952739daafe9616db19fbedbdb87917eee253ac4f31d70c7587f7ab531b4e", - "sha256:69f251804e052fc46d29d0e7348cdc5fcbfc4861dc4a1ebedef7e78d241ad39e", - "sha256:6c2ba1e0c24d8fae8f2cf0aeb2fc0a2a7f69b6d20bd8d3749fd6b36ecef5edf0", - "sha256:6e85830eed5b5263ffa0c62428e43cb844296f3b4461f09e4bdb0d44ec190bc2", - "sha256:7571e8bbecc6ac066256f9de40365ff833553e2e0c0c004f4482facb131820ef", - "sha256:7781f4f70c9b0b39e1b129b10c7d43a4e0c91f90c60435e6da8288efc2b73438", - "sha256:7926d8d034e06b479797c199747dd774d5e86179f2ce44294423327a88d66ca7", - "sha256:7b80fbb0da3aebde102a37ef0138aeedff45997e22f8962e5f16ae1742852676", - "sha256:7fca4a92c8a7a73dee6946471bce6d1443d94155694b893b79e19ca2a540d86e", - "sha256:84c4315577f7cd511d6250ffd0f695c825efe729f4205c0340f7004eda51191f", - "sha256:8d9c5d13927d77af4fbe453953810db766f75401e764727e73a6ee4f82527b3e", - "sha256:9681516288e3dcf0aa7c26231178cc0be6cac9705cac06709f2353c5b406cfea", - "sha256:97df87e1a20deb75ac7d920c812e9326096aa00a9a4b6d07679b4f1f14b06c90", - "sha256:9bcd51eeca35a80e76dc5794a9dd7cb04b97f0e8af620d54711793bfc1fbba4b", - "sha256:9c6b0c1cafd96213a0327cf680acb39f70e452caf8e9a25aeb05316db9c07f89", - "sha256:a5f81e68aa62bc0cfca04f7b19eaa8f9c826b53fc82ab9e2121976dc74f131f3", - "sha256:a663b180b6669c400b4630a24cc776f23a992d38ce7ae72ede2a397ce6b0f170", - "sha256:a7b2e437fbd8fae5bc7716b9c7ff97aecc95f0b4d56e4ca08b3c8d8adcaadb84", - "sha256:a867d26f06bcd047ef716175b2696b315cb7571ccb951006d61ca80bbc356e9e", - "sha256:aa68a6cdbe1bc6793a9dbfc38302c11599bbe1837392ae9b1d238b9ef3dafcf1", - "sha256:ab31fdd643f162c467cfe6a86e9cb5f1965b632e5e65c072d90854ff486d02cf", - "sha256:ad4ef1c56b47b6b9024b939d503ab487231df1f722065a48f4fc61832130b90e", - "sha256:b92f9ca04b3e719d69b02dc4a69debb795af84cb7afd09c5eb5d54b4a1ae2191", - "sha256:bb21bac7783c1bf6f4bbe68b1e0ff0d20e7e7732cfb7995bc8d96e23aa90fc7b", - "sha256:bf4eeecc9e10f5403ec06138978235af79c9a79af494eb6b1d60a50b49ed2869", - "sha256:bfde025e2793a22efe8c21f807d276bd1d6a4bcc5ba6f19dbdfc4e7a12160909", - "sha256:c37faddc8acd826cfc5e2392531aba734b229741d3daec7f4c777a8f0d4993e5", - "sha256:c71965d1ced48bf97aab79fad56df82c566b4c498ffc09c2094605727c4b7e36", - "sha256:c9192925acc33e146864b8cf037e2ed32a91fdf7644ae875f5d46cd2ef086a5f", - "sha256:c9df1950fb92d49970cce38100d7e7293c84ed3606eaa16ea0b6bc27175bb667", - "sha256:cdfcf2e914e2ba653101157458afd0ad92a16731eeba9a611b5cbb3e7124e74b", - "sha256:d03a060ac1a08e10589c27d509bbdb35b65f2d7f3f8d81cf2fa199877c7bc58a", - "sha256:d20c3d1f31f14d6962a4e2f549c21d31e670b90f777ef4171be540fb7fb70f02", - "sha256:e4ee15b267d2dad3e8759ca441ad450c334f3733304c55210c2a44516e8d5530", - "sha256:e8ea055b3ea046c0f66217af65bc193bbbeca1c8661dc5fd42698db5795d2627", - "sha256:ebabdf1c76593a09ee18c1a06cd3022919861365219ea3aca0247ededf6facd6", - "sha256:ebc94fadbd4a3f4215993326a6a00e47d79889391f5659bf310f55fe5d9f581c", - "sha256:ed5ac02126f74d190fa2cc14a9eb2a5d9837d5863920fa472b02eb1595cdc925", - "sha256:f01e53575f27097d75d42de33b1b289c74b16891ce576d767ad8c48d17aeb5e0", - "sha256:f361296ca7054f0936b02525646b2731b32c8074ba6defab524b79b2b7eeac72", - "sha256:f9035695dadfb397bee9eeaf1dc7fbeda483bf7664a7397a629846800ce6e276", - "sha256:fcad7d5d2bbfeae1026b395036a8aa5abf67e8038ae7e6a25c7d0f88b10a8e6a", - "sha256:ff797320dcbff57caa6b2301c3913784a010e13b1f6cf4ab3f563f3c5e7919db" + "sha256:04f2189716e85ec9192df307f7c255f90e78b6e9863a03223c3b998d24a3c6c6", + "sha256:0c6c0f4d53ef603397fc894a895b960ecd7d44c727df42a8d500031716d4e8d2", + "sha256:0ca37993206402c6c35dc717f90d4c8f53568a8b80f0bf1a1b2b334f4d488fba", + "sha256:12f9515d875859faedb4144fd38694a761cd2a61ef9603bf887b13956d0bbfbb", + "sha256:1990b1f4e2c402beb317840030bb9f1b6a363f86e14e21b4212e618acdfce7f6", + "sha256:2341a78ae3a5ed454d524206a3fcb3cec408c2a0c7c2752cd78b606a2ff15af4", + "sha256:23bb63ae3f4c645d2d82fa22697364b0046fbafb6261b258a58587441c5f7bd0", + "sha256:27bd5f18d8f2879e45724b0ce74f61811639a846ff0e5c0395b7818fae87aec6", + "sha256:2dc7d6b380ca76f5e817ac9eef0c3686e7834c8346bef30b041a4ad286449990", + "sha256:331b200ad03dbaa44151d74daeb7da2cf382db424ab923574f6ecca7d3b30de3", + "sha256:365defc257c687ce3e7d275f39738dcd230777424117a6c76043459db131dd43", + "sha256:37be7b5ea3ff5b7c4a9db16074dc94523b5f10dd1f3b362a827af66a55198175", + "sha256:3c2e6fa98032fec8282f6b27e3f3986c6e05702828380618776ad794e938f53a", + "sha256:40e8b1983080439d4802d80b951f4a93d991ef3261f69e81095a66f86cf3c3c6", + "sha256:43517e1f6b19f610a93d8227e47790722c8bf7422e46b365e0469fc3d3563d97", + "sha256:43b32a06c47539fe275106b376658638b418c7cfdfff0e0259fbf877e845f14b", + "sha256:43d6a66e33b1455b98fc7312b124296dad97a2e191c80320587234a77b1b736e", + "sha256:4c59d6a4a4633fad297f943c03d0d2569867bd5372eb5684befdff8df8522e39", + "sha256:52ac29cc72ee7e25ace7807249638f94c9b6a862c56b1df015d2b2e388e51dbd", + "sha256:54356a76b67cf8a3085818026bb556545ebb8353951923b88292556dfa9f812d", + "sha256:583049c63106c0555e3ae3931edab5669668bbef84c15861421b94e121878d3f", + "sha256:6d99198203f0b9cb0b5d1c0393859555bc26b548223a769baf7e321a627ed4fc", + "sha256:6da42bbcec130b188169107ecb6ee7bd7b4c849d24c9370a0c884cf728d8e976", + "sha256:6e484e479860e00da1f005cd19d1c5d4a813324e5951319ac3f3eefb497cc549", + "sha256:70a6756ce66cd6fe8486c775b30889f0dc4cb20c157aa8c35b45fd7868255c5c", + "sha256:70d24936ca6c15a3bbc91ee9c7fc661132c6f4c9d42a23b31b6686c05073bde5", + "sha256:71967c35828c9ff94e8c7d405469a1fb68257f686bca7c1ed85ed34e7c2529c4", + "sha256:79644f68a6ff23b251cae1c82b01a0b51bc40c8468ca9585c6c4b1aeee570e0b", + "sha256:87cd2e29067ea397a47e352efb13f976eb1b03e18c999270bb50589323294c6e", + "sha256:8d4c6ea0f498c7c79111033a290d060c517853a7bcb2f46516f591dab628ddd3", + "sha256:9134032f5aa445ae591c2ba6991d10136a1f533b1d2fa8f8c21126468c5025c6", + "sha256:921fbe13492caf6a69528f09d5d7c7d518c8d0e7b9f6701b7719715f29a71e6e", + "sha256:99670790f21a96665a35849990b1df447993880bb6463a0a1d757897f30da929", + "sha256:9975442f2e7a5cfcf87299c26b5a45266ab0696348420049b9b94b2ad3d40234", + "sha256:99ded130555c021d99729fabd4ddb91a6f4cc0707df4b1daf912c7850c373b13", + "sha256:a3328c3e64ea4ab12b85999eb0779e6139295bbf5485f69d42cf794309e3d007", + "sha256:a4fb91d5f72b7e06a14ff4ae5be625a81cd7e5f869d7a54578fc271d08d58ae3", + "sha256:aa23ce39661a3e90eea5f99ec59b763b7d655c2cada10729ed920a38bfc2b167", + "sha256:aac7501ae73d4a02f4b7ac8fcb9dc55342ca98ffb9ed9f2dfb8a25d53eda0e4d", + "sha256:ab84a8b698ad5a6c365b08061920138e7a7dd9a04b6feb09ba1bfae68346ce6d", + "sha256:b4adeb878a374126f1e5cf03b87f66279f479e01af0e9a654cf6d1509af46c40", + "sha256:b9853509b4bf57ba7b1f99b9d866c422c9c5248799ab20e652bbb8a184a38181", + "sha256:bb7d5fe92bd0dc235f63ebe9f8c6e0884f7360f88f3411bfed1350c872ef2054", + "sha256:bca4c8abc50d38f9773c1ec80d43f3768df2e8576807d1656016b9d3eeaa96fd", + "sha256:c222958f59b0ae091f4535851cbb24eb57fc0baea07ba675af718fb5302dddb2", + "sha256:c30e42ea11badb147f0d2e387115b15e2bd8205a5ad70d6ad79cf37f6ac08c91", + "sha256:c3a79f56dee9136084cf84a6c7c4341427ef36e05ae6415bf7d787c96ff5eaa3", + "sha256:c51ef82302386d686feea1c44dbeef744585da16fcf97deea2a8d6c1556f519b", + "sha256:c77326300b839c44c3e5a8fe26c15b7e87b2f32dfd2fc9fee1d13604347c9b38", + "sha256:d33a785ea8354c480515e781554d3be582a86297e41ccbea627a5c632647f2cd", + "sha256:d546cfa78844b8b9c1c0533de1851569a13f87449897bbc95d698d1d3cb2a30f", + "sha256:da29ceabe3025a1e5a5aeeb331c5b1af686daab4ff0fb4f83df18b1180ea83e2", + "sha256:df8c05a0f574d480947cba11b947dc41b1265d721c3777881da2fb8d3a1ddfba", + "sha256:e266af4da2c1a4cbc6135a570c64577fd3e6eb204607eaff99d8e9b710003c6f", + "sha256:e279f3db904e3b55f520f11f983cc8dc8a4ce9b65f11692d4718ed021ec58b83", + "sha256:ea52bd218d4ba260399a8ae4bb6b577d82adfc4518b93566ce1fddd4a49d1dce", + "sha256:ebec65f5068e7df2d49466aab9128510c4867e532e07cb6960075b27658dca38", + "sha256:ec1e3b40b82236d100d259854840555469fad4db64f669ab817279eb95cd535c", + "sha256:ee77c7bef0724165e795b6b7bf9c4c22a9b8468a6bdb9c6b4281293c6b22a90f", + "sha256:f263b18692f8ed52c8de7f40a0751e79015983dbd77b16906e5b310a39d3ca21", + "sha256:f7b26757b22faf88fcf232f5f0e62f6e0fd9e22a8a5d0d5016888cdfe1f6c1c4", + "sha256:f7ddb920106bbbbcaf2a274d56f46956bf56ecbde210d88061824a95bdd94e92" ], "index": "pypi", "markers": "python_version >= '3.9'", - "version": "==7.6.2" + "version": "==7.6.3" }, "iniconfig": { "hashes": [ diff --git a/dbrepo-search-service/init/README.md b/dbrepo-search-service/init/README.md index 74767ea02ada906b7f7feab0e13a7c4b1fe9a7d1..a188e561c737ddcc9bb772099dcfe51e5d8e9323 100644 --- a/dbrepo-search-service/init/README.md +++ b/dbrepo-search-service/init/README.md @@ -4,4 +4,8 @@ Responsible for: * Creating `database` index if not existing * Importing database(s) from the Metadata Database -* Exit \ No newline at end of file +* Exit + +## Development + +Open in `./dbrepo-search-service` directory (depends on `clients` package). \ No newline at end of file diff --git a/dbrepo-search-service/init/app.py b/dbrepo-search-service/init/app.py index 24cc3f73bbc36c11943bd1069ec1a437535df2b5..9fe915f92c50d2b712058783d4eecf1b087cc8f7 100644 --- a/dbrepo-search-service/init/app.py +++ b/dbrepo-search-service/init/app.py @@ -6,7 +6,6 @@ from typing import List import opensearchpy.exceptions from dbrepo.RestClient import RestClient from logging.config import dictConfig -from pathlib import Path from dbrepo.api.dto import Database from opensearchpy import OpenSearch @@ -68,9 +67,6 @@ class App: logging.debug(f"create instance {self.search_host}:{self.search_port}") return self.search_instance - def index_exists(self): - return self._instance().indices.exists(index="database") - def database_exists(self, database_id: int): try: self._instance().get(index="database", id=database_id) @@ -78,28 +74,13 @@ class App: except opensearchpy.exceptions.NotFoundError: return False - def index_update(self, is_created: bool) -> bool: - """ - - :param is_created: - :return: True if the index was updated - """ - if is_created: - logging.debug(f"index 'database' does not exist, creating...") - with open('./database.json', 'r') as f: - self._instance().indices.create(index="database", body=json.load(f)) - logging.info(f"Created index 'database'") - return True - mapping = dict(self._instance().indices.get_mapping(index="database")) - identifier_props = mapping["database"]["mappings"]["properties"]["identifiers"]["properties"] - if "status" in identifier_props: - logging.debug(f"found mapping database.identifiers.status: detected current mapping") - return False - logging.debug(f"index 'database' exists, updating mapping...") + def index_update(self) -> None: + if self._instance().indices.exists(index="database"): + logging.debug(f"index 'database' exists, removing...") + self._instance().indices.delete(index="database") with open('./database.json', 'r') as f: - self._instance().indices.put_mapping(index="database", body=json.load(f)) - logging.info(f"Updated index 'database'") - return True + self._instance().indices.create(index="database", body=json.load(f)) + logging.info(f"Created index 'database'") def fetch_databases(self) -> List[Database]: logging.debug(f"fetching database from endpoint: {self.metadata_service_endpoint}") @@ -126,7 +107,6 @@ class App: if __name__ == "__main__": app = App() - create = not app.index_exists() - update = app.index_update(is_created=create) + update = app.index_update() app.save_databases(databases=app.fetch_databases()) logging.info("Finished. Exiting.") diff --git a/dbrepo-search-service/clients/keycloak_client.py b/dbrepo-search-service/init/clients/keycloak_client.py similarity index 100% rename from dbrepo-search-service/clients/keycloak_client.py rename to dbrepo-search-service/init/clients/keycloak_client.py diff --git a/dbrepo-search-service/clients/opensearch_client.py b/dbrepo-search-service/init/clients/opensearch_client.py similarity index 89% rename from dbrepo-search-service/clients/opensearch_client.py rename to dbrepo-search-service/init/clients/opensearch_client.py index 3f198ac443784286c4c16912d1e2d946f1fd9080..7d25fcded5a29e87524523785133d7aaa56f314d 100644 --- a/dbrepo-search-service/clients/opensearch_client.py +++ b/dbrepo-search-service/init/clients/opensearch_client.py @@ -1,14 +1,14 @@ """ The opensearch_client.py is used by the different API endpoints in routes.py to handle requests to the opensearch db """ +import os from json import dumps, load import logging from dbrepo.api.dto import Database -from flask import current_app from collections.abc import MutableMapping -from opensearchpy import OpenSearch, TransportError, RequestError +from opensearchpy import OpenSearch, TransportError, RequestError, NotFoundError from omlib.measure import om from omlib.constants import OM_IDS @@ -26,11 +26,11 @@ class OpenSearchClient: password: str = None instance: OpenSearch = None - def __init__(self): - self.host = current_app.config["OPENSEARCH_HOST"] - self.port = int(current_app.config["OPENSEARCH_PORT"]) - self.username = current_app.config["OPENSEARCH_USERNAME"] - self.password = current_app.config["OPENSEARCH_PASSWORD"] + def __init__(self, host: str = None, port: int = None, username: str = None, password: str = None): + self.host = os.getenv('OPENSEARCH_HOST', host) + self.port = int(os.getenv('OPENSEARCH_PORT', port)) + self.username = os.getenv('OPENSEARCH_USERNAME', username) + self.password = os.getenv('OPENSEARCH_PASSWORD', password) def _instance(self) -> OpenSearch: """ @@ -42,7 +42,6 @@ class OpenSearchClient: self.instance = OpenSearch(hosts=[{"host": self.host, "port": self.port}], http_compress=True, http_auth=(self.username, self.password)) - logging.debug(f"create instance {self.host}:{self.port}") return self.instance def get_database(self, database_id: int) -> Database: @@ -68,16 +67,8 @@ class OpenSearchClient: @throws: opensearchpy.exceptions.NotFoundError If the database was not found in the Search Database. """ logging.debug(f"updating database with id: {database_id} in search database") - try: - self._instance().index(index="database", id=database_id, body=dumps(data.model_dump())) - except RequestError as e: - logging.error(f"Failed to update in search database: {e.info}") - raise e - try: - response: dict = self._instance().get(index="database", id=database_id) - except TransportError as e: - logging.error(f"Failed to get updated database in search database: {e.status_code}") - raise e + self._instance().index(index="database", id=database_id, body=dumps(data.model_dump())) + response: dict = self._instance().get(index="database", id=database_id) database = Database.parse_obj(response["_source"]) logging.info(f"Updated database with id {database_id} in index 'database'") return database @@ -119,10 +110,10 @@ class OpenSearchClient: results = [hit["_source"] for hit in response["hits"]["hits"]] return results - def get_fields_for_index(self, type: str): + def get_fields_for_index(self, field_type: str): """ returns a list of attributes of the data for a specific index. - :param type: The search type + :param field_type: The search type :return: list of fields """ fields = { @@ -135,8 +126,10 @@ class OpenSearchClient: "view": "views.*", "user": "creator.*", } - logging.debug(f'requesting field(s) {fields[type]} for filter: {type}') - fields = self._instance().indices.get_field_mapping(fields[type]) + if field_type not in fields.keys(): + raise NotFoundError(f"Failed to find field type: {field_type}") + logging.debug(f'requesting field(s) {fields[field_type]} for filter: {field_type}') + fields = self._instance().indices.get_field_mapping(fields[field_type]) fields_list = [] fd = flatten_dict(fields) for key in fd.keys(): @@ -170,13 +163,13 @@ class OpenSearchClient: logging.info(f"Found {len(response['hits']['hits'])} result(s)") return response - def general_search(self, type: str = None, field_value_pairs: dict = None): + def general_search(self, field_type: str = None, field_value_pairs: dict = None): """ Main method for searching stuff in the opensearch db all parameters are optional - :param type: The index to be searched. Optional. + :param field_type: The index to be searched. Optional. :param field_value_pairs: The key-value pair of properties that need to match. Optional. :return: The object of results and HTTP status code. e.g. { "hits": { "hits": [] } }, 200 """ @@ -205,7 +198,7 @@ class OpenSearchClient: body = { "query": {"bool": {"must": musts}} } - logging.debug(f'search in index database for type: {type}') + logging.debug(f'search in index database for type: {field_type}') logging.debug(f'search body: {dumps(body)}') response = self._instance().search( index="database", @@ -214,12 +207,10 @@ class OpenSearchClient: results = [hit["_source"] for hit in response["hits"]["hits"]] return results - def unit_independent_search(self, t1=None, t2=None, field_value_pairs=None): + def unit_independent_search(self, t1: float, t2: float, field_value_pairs): """ Main method for searching stuff in the opensearch db - all parameters are optional - :param t1: start value :param t2: end value :param field_value_pairs: the key-value pairs @@ -241,6 +232,8 @@ class OpenSearchClient: ) unit_uris = [hit["key"] for hit in response["aggregations"]["units"]["buckets"]] logging.debug(f"found {len(unit_uris)} unit(s) in column index") + if len(unit_uris) == 0: + raise NotFoundError("Failed to search: no unit assigned") base_unit = unit_uri_to_unit(field_value_pairs["unit.uri"]) for unit_uri in unit_uris: gte = t1 diff --git a/dbrepo-search-service/init/database.json b/dbrepo-search-service/init/database.json index d87d33b5e29abae3ffbb9beab8bad45a00d0ff56..1e6bdd0c4c4d3f2302bafc4c7a79bed2ec84224d 100644 --- a/dbrepo-search-service/init/database.json +++ b/dbrepo-search-service/init/database.json @@ -125,40 +125,6 @@ }, "image": { "properties": { - "date_formats": { - "properties": { - "created_at": { - "type": "date" - }, - "database_format": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - }, - "has_time": { - "type": "boolean" - }, - "id": { - "type": "long" - }, - "unix_format": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - } - } - }, - "default_port": { - "type": "long" - }, "dialect": { "type": "text", "fields": { @@ -639,37 +605,6 @@ "database_id": { "type": "long" }, - "date_format": { - "properties": { - "created_at": { - "type": "date" - }, - "database_format": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - }, - "has_time": { - "type": "boolean" - }, - "id": { - "type": "long" - }, - "unix_format": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - } - } - }, "id": { "type": "long" }, @@ -959,37 +894,6 @@ "database_id": { "type": "long" }, - "date_format": { - "properties": { - "created_at": { - "type": "date" - }, - "database_format": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - }, - "has_time": { - "type": "boolean" - }, - "id": { - "type": "long" - }, - "unix_format": { - "type": "text", - "fields": { - "keyword": { - "type": "keyword", - "ignore_above": 256 - } - } - } - } - }, "id": { "type": "long" }, diff --git a/dbrepo-search-service/init/lib/dbrepo-1.4.4-py3-none-any.whl b/dbrepo-search-service/init/lib/dbrepo-1.4.4-py3-none-any.whl deleted file mode 100644 index 617969c3eb15926d932b7c0180bed51b9ef7052d..0000000000000000000000000000000000000000 Binary files a/dbrepo-search-service/init/lib/dbrepo-1.4.4-py3-none-any.whl and /dev/null differ diff --git a/dbrepo-search-service/init/lib/dbrepo-1.4.4.tar.gz b/dbrepo-search-service/init/lib/dbrepo-1.4.4.tar.gz deleted file mode 100644 index 9d1d5ae238baba6bc51db4d219a0d09b5aca1c51..0000000000000000000000000000000000000000 Binary files a/dbrepo-search-service/init/lib/dbrepo-1.4.4.tar.gz and /dev/null differ diff --git a/dbrepo-search-service/init/lib/dbrepo-1.4.5-py3-none-any.whl b/dbrepo-search-service/init/lib/dbrepo-1.4.5-py3-none-any.whl deleted file mode 100644 index 249fd5dc181271a3069745f5a6ef8a26de398037..0000000000000000000000000000000000000000 Binary files a/dbrepo-search-service/init/lib/dbrepo-1.4.5-py3-none-any.whl and /dev/null differ diff --git a/dbrepo-search-service/init/lib/dbrepo-1.4.5.tar.gz b/dbrepo-search-service/init/lib/dbrepo-1.4.5.tar.gz deleted file mode 100644 index 2f21496bd2280550f4242bbc0fff4a47116d6ad5..0000000000000000000000000000000000000000 Binary files a/dbrepo-search-service/init/lib/dbrepo-1.4.5.tar.gz and /dev/null differ diff --git a/dbrepo-search-service/init/lib/dbrepo-1.4.6.tar.gz b/dbrepo-search-service/init/lib/dbrepo-1.4.6.tar.gz deleted file mode 100644 index 958287917e7f2b38a0286c3e91c2d471462bceee..0000000000000000000000000000000000000000 Binary files a/dbrepo-search-service/init/lib/dbrepo-1.4.6.tar.gz and /dev/null differ diff --git a/dbrepo-search-service/init/lib/dbrepo-1.4.7.tar.gz b/dbrepo-search-service/init/lib/dbrepo-1.4.7.tar.gz index 936ec4dac16fe8f065ac7d37a09aedca421086a6..c652dbae4a60aae708eb6fe4e751977b97b72344 100644 Binary files a/dbrepo-search-service/init/lib/dbrepo-1.4.7.tar.gz and b/dbrepo-search-service/init/lib/dbrepo-1.4.7.tar.gz differ diff --git a/dbrepo-search-service/init/lib/dbrepo-1.4.7rc0-py3-none-any.whl b/dbrepo-search-service/init/lib/dbrepo-1.4.7rc0-py3-none-any.whl deleted file mode 100644 index 84353af6a26fdb3e281e10ab90d21130a0701258..0000000000000000000000000000000000000000 Binary files a/dbrepo-search-service/init/lib/dbrepo-1.4.7rc0-py3-none-any.whl and /dev/null differ diff --git a/dbrepo-search-service/init/lib/dbrepo-1.4.7rc0.tar.gz b/dbrepo-search-service/init/lib/dbrepo-1.4.7rc0.tar.gz deleted file mode 100644 index 735c2e9d3699303d119033e184e0cfb9a571317f..0000000000000000000000000000000000000000 Binary files a/dbrepo-search-service/init/lib/dbrepo-1.4.7rc0.tar.gz and /dev/null differ diff --git a/dbrepo-search-service/omlib/__init__.py b/dbrepo-search-service/init/omlib/__init__.py similarity index 100% rename from dbrepo-search-service/omlib/__init__.py rename to dbrepo-search-service/init/omlib/__init__.py diff --git a/dbrepo-search-service/omlib/constants.py b/dbrepo-search-service/init/omlib/constants.py similarity index 100% rename from dbrepo-search-service/omlib/constants.py rename to dbrepo-search-service/init/omlib/constants.py diff --git a/dbrepo-search-service/omlib/dimension.py b/dbrepo-search-service/init/omlib/dimension.py similarity index 100% rename from dbrepo-search-service/omlib/dimension.py rename to dbrepo-search-service/init/omlib/dimension.py diff --git a/dbrepo-search-service/omlib/exceptions/__init__.py b/dbrepo-search-service/init/omlib/exceptions/__init__.py similarity index 100% rename from dbrepo-search-service/omlib/exceptions/__init__.py rename to dbrepo-search-service/init/omlib/exceptions/__init__.py diff --git a/dbrepo-search-service/omlib/exceptions/dimensionexception.py b/dbrepo-search-service/init/omlib/exceptions/dimensionexception.py similarity index 100% rename from dbrepo-search-service/omlib/exceptions/dimensionexception.py rename to dbrepo-search-service/init/omlib/exceptions/dimensionexception.py diff --git a/dbrepo-search-service/omlib/exceptions/unitconversionexception.py b/dbrepo-search-service/init/omlib/exceptions/unitconversionexception.py similarity index 100% rename from dbrepo-search-service/omlib/exceptions/unitconversionexception.py rename to dbrepo-search-service/init/omlib/exceptions/unitconversionexception.py diff --git a/dbrepo-search-service/omlib/exceptions/unitidentityexception.py b/dbrepo-search-service/init/omlib/exceptions/unitidentityexception.py similarity index 100% rename from dbrepo-search-service/omlib/exceptions/unitidentityexception.py rename to dbrepo-search-service/init/omlib/exceptions/unitidentityexception.py diff --git a/dbrepo-search-service/omlib/measure.py b/dbrepo-search-service/init/omlib/measure.py similarity index 100% rename from dbrepo-search-service/omlib/measure.py rename to dbrepo-search-service/init/omlib/measure.py diff --git a/dbrepo-search-service/omlib/omconstants.py b/dbrepo-search-service/init/omlib/omconstants.py similarity index 100% rename from dbrepo-search-service/omlib/omconstants.py rename to dbrepo-search-service/init/omlib/omconstants.py diff --git a/dbrepo-search-service/omlib/rdf/__init__.py b/dbrepo-search-service/init/omlib/rdf/__init__.py similarity index 100% rename from dbrepo-search-service/omlib/rdf/__init__.py rename to dbrepo-search-service/init/omlib/rdf/__init__.py diff --git a/dbrepo-search-service/omlib/rdf/om-2.0.rdf b/dbrepo-search-service/init/omlib/rdf/om-2.0.rdf similarity index 100% rename from dbrepo-search-service/omlib/rdf/om-2.0.rdf rename to dbrepo-search-service/init/omlib/rdf/om-2.0.rdf diff --git a/dbrepo-search-service/omlib/scale.py b/dbrepo-search-service/init/omlib/scale.py similarity index 100% rename from dbrepo-search-service/omlib/scale.py rename to dbrepo-search-service/init/omlib/scale.py diff --git a/dbrepo-search-service/omlib/thing.py b/dbrepo-search-service/init/omlib/thing.py similarity index 100% rename from dbrepo-search-service/omlib/thing.py rename to dbrepo-search-service/init/omlib/thing.py diff --git a/dbrepo-search-service/omlib/unit.py b/dbrepo-search-service/init/omlib/unit.py similarity index 100% rename from dbrepo-search-service/omlib/unit.py rename to dbrepo-search-service/init/omlib/unit.py diff --git a/dbrepo-search-service/init/test/conftest.py b/dbrepo-search-service/init/test/conftest.py index 2a21f689702d7f78e14e73b6170715753e32b49c..e2a00b1d86a7129935c7dbd42acb4a51254d3dbc 100644 --- a/dbrepo-search-service/init/test/conftest.py +++ b/dbrepo-search-service/init/test/conftest.py @@ -1,8 +1,8 @@ import logging +import os import pytest -from app import app -from flask import current_app +import json from testcontainers.opensearch import OpenSearchContainer @@ -19,9 +19,10 @@ def session(request): logging.debug("[fixture] starting opensearch container") container.start() - with app.app_context(): - current_app.config['OPENSEARCH_HOST'] = container.get_container_host_ip() - current_app.config['OPENSEARCH_PORT'] = container.get_exposed_port(9200) + os.environ['OPENSEARCH_HOST'] = container.get_container_host_ip() + os.environ['OPENSEARCH_PORT'] = container.get_exposed_port(9200) + os.environ['OPENSEARCH_USERNAME'] = 'admin' + os.environ['OPENSEARCH_PASSWORD'] = 'admin' # destructor def stop_opensearch(): @@ -30,20 +31,17 @@ def session(request): request.addfinalizer(stop_opensearch) return container -# @pytest.fixture(scope="function", autouse=True) -# def cleanup(request, session): -# """ -# Clean up after each test by removing the buckets and re-adding them (=so they are empty again) -# :param request: / -# :param session: / -# :return: -# """ -# logging.info("[fixture] truncate buckets") -# for bucket in ["dbrepo-upload", "dbrepo-download"]: -# objects = [] -# for obj in session.get_client().list_objects(bucket): -# objects.append(DeleteObject(obj.object_name)) -# logging.info(f'request to remove objects {objects}') -# errors = session.get_client().remove_objects(bucket, objects) -# for error in errors: -# raise ConnectionError(f'Failed to delete object with key {error.object_name} of bucket {bucket}') + +@pytest.fixture(scope="function", autouse=True) +def cleanup(request, session): + """ + Clean up after each test by removing the index and re-adding it (=so it's empty again) + :param request: / + :param session: / + :return: + """ + logging.info("[fixture] clean schema") + with open('./database.json', 'r') as f: + if session.get_client().indices.exists(index="database"): + session.get_client().indices.delete(index="database") + session.get_client().indices.create(index="database", body=json.load(f)) diff --git a/dbrepo-search-service/init/test/test_app.py b/dbrepo-search-service/init/test/test_app.py index 0df10e51c74b59b995148a350998566a30a4a52e..118ccf99c16586e4305967ab22a12e6317fb8ae2 100644 --- a/dbrepo-search-service/init/test/test_app.py +++ b/dbrepo-search-service/init/test/test_app.py @@ -1,93 +1,21 @@ -import datetime import unittest -from app import app - +from app import App from clients.opensearch_client import OpenSearchClient -class OpenSearchClientTest(unittest.TestCase): +class AppTest(unittest.TestCase): + + def test_index_update_succeeds(self): + # test + app = App() + app.index_update() - def test_index_exists_succeeds(self): - with app.app_context(): - client = RestClient(endpoint=self.metadata_service_endpoint) - # mock - client.update_database(database_id=1, data=req) + def test_index_update_not_exists_succeeds(self): + # mock + client = OpenSearchClient() + client._instance().indices.delete(index="database") - # test - req.tables = [Table(id=1, - name="Test Table", - internal_name="test_table", - queue_name="dbrepo", - routing_key="dbrepo.test_tuw1.test_table", - is_public=True, - database_id=req.id, - constraints=Constraints(uniques=[], foreign_keys=[], checks=[], - primary_key=[PrimaryKey(id=1, - table=TableMinimal(id=1, database_id=1), - column=ColumnMinimal(id=1, table_id=1, - database_id=1))]), - is_versioned=True, - created_by="c6b71ef5-2d2f-48b2-9d79-b8f23a3a0502", - creator=User(id="c6b71ef5-2d2f-48b2-9d79-b8f23a3a0502", - username="foo", - attributes=UserAttributes(theme="dark")), - owner=User(id="c6b71ef5-2d2f-48b2-9d79-b8f23a3a0502", - username="foo", - attributes=UserAttributes(theme="dark")), - created=datetime.datetime(2024, 4, 25, 17, 44, tzinfo=datetime.timezone.utc), - columns=[Column(id=1, - name="ID", - internal_name="id", - database_id=req.id, - table_id=1, - column_type=ColumnType.BIGINT, - is_public=True, - is_null_allowed=False)])] - database = client.update_database(database_id=1, data=req) - self.assertEqual(1, database.id) - self.assertEqual("Test", database.name) - self.assertEqual("test_tuw1", database.internal_name) - self.assertEqual("c6b71ef5-2d2f-48b2-9d79-b8f23a3a0502", database.creator.id) - self.assertEqual("foo", database.creator.username) - self.assertEqual("dark", database.creator.attributes.theme) - self.assertEqual("c6b71ef5-2d2f-48b2-9d79-b8f23a3a0502", database.owner.id) - self.assertEqual("foo", database.owner.username) - self.assertEqual("dark", database.owner.attributes.theme) - self.assertEqual("c6b71ef5-2d2f-48b2-9d79-b8f23a3a0502", database.contact.id) - self.assertEqual("foo", database.contact.username) - self.assertEqual("dark", database.contact.attributes.theme) - self.assertEqual(datetime.datetime(2024, 3, 25, 16, tzinfo=datetime.timezone.utc), database.created) - self.assertEqual("dbrepo", database.exchange_name) - self.assertEqual(True, database.is_public) - self.assertEqual(1, database.container.id) - # ... - self.assertEqual(1, database.container.image.id) - # ... - self.assertEqual(1, len(database.tables)) - self.assertEqual(1, database.tables[0].id) - self.assertEqual("Test Table", database.tables[0].name) - self.assertEqual("test_table", database.tables[0].internal_name) - self.assertEqual("dbrepo", database.tables[0].queue_name) - self.assertEqual("dbrepo.test_tuw1.test_table", database.tables[0].routing_key) - self.assertEqual(True, database.tables[0].is_public) - self.assertEqual(1, database.tables[0].database_id) - self.assertEqual(True, database.tables[0].is_versioned) - self.assertEqual("c6b71ef5-2d2f-48b2-9d79-b8f23a3a0502", database.tables[0].created_by) - self.assertEqual("c6b71ef5-2d2f-48b2-9d79-b8f23a3a0502", database.tables[0].creator.id) - self.assertEqual("foo", database.tables[0].creator.username) - self.assertEqual("dark", database.tables[0].creator.attributes.theme) - self.assertEqual("c6b71ef5-2d2f-48b2-9d79-b8f23a3a0502", database.tables[0].owner.id) - self.assertEqual("foo", database.tables[0].owner.username) - self.assertEqual("dark", database.tables[0].owner.attributes.theme) - self.assertEqual(datetime.datetime(2024, 4, 25, 17, 44, tzinfo=datetime.timezone.utc), - database.tables[0].created) - self.assertEqual(1, len(database.tables[0].columns)) - self.assertEqual(1, database.tables[0].columns[0].id) - self.assertEqual("ID", database.tables[0].columns[0].name) - self.assertEqual("id", database.tables[0].columns[0].internal_name) - self.assertEqual(ColumnType.BIGINT, database.tables[0].columns[0].column_type) - self.assertEqual(1, database.tables[0].columns[0].database_id) - self.assertEqual(1, database.tables[0].columns[0].table_id) - self.assertEqual(True, database.tables[0].columns[0].is_public) - self.assertEqual(False, database.tables[0].columns[0].is_null_allowed) + # test + app = App() + app.index_update() diff --git a/dbrepo-search-service/lib/dbrepo-1.4.4-py3-none-any.whl b/dbrepo-search-service/lib/dbrepo-1.4.4-py3-none-any.whl deleted file mode 100644 index 617969c3eb15926d932b7c0180bed51b9ef7052d..0000000000000000000000000000000000000000 Binary files a/dbrepo-search-service/lib/dbrepo-1.4.4-py3-none-any.whl and /dev/null differ diff --git a/dbrepo-search-service/lib/dbrepo-1.4.4.tar.gz b/dbrepo-search-service/lib/dbrepo-1.4.4.tar.gz deleted file mode 100644 index 9d1d5ae238baba6bc51db4d219a0d09b5aca1c51..0000000000000000000000000000000000000000 Binary files a/dbrepo-search-service/lib/dbrepo-1.4.4.tar.gz and /dev/null differ diff --git a/dbrepo-search-service/lib/dbrepo-1.4.5-py3-none-any.whl b/dbrepo-search-service/lib/dbrepo-1.4.5-py3-none-any.whl deleted file mode 100644 index 249fd5dc181271a3069745f5a6ef8a26de398037..0000000000000000000000000000000000000000 Binary files a/dbrepo-search-service/lib/dbrepo-1.4.5-py3-none-any.whl and /dev/null differ diff --git a/dbrepo-search-service/lib/dbrepo-1.4.5.tar.gz b/dbrepo-search-service/lib/dbrepo-1.4.5.tar.gz deleted file mode 100644 index 2f21496bd2280550f4242bbc0fff4a47116d6ad5..0000000000000000000000000000000000000000 Binary files a/dbrepo-search-service/lib/dbrepo-1.4.5.tar.gz and /dev/null differ diff --git a/dbrepo-search-service/lib/dbrepo-1.4.6.tar.gz b/dbrepo-search-service/lib/dbrepo-1.4.6.tar.gz deleted file mode 100644 index 958287917e7f2b38a0286c3e91c2d471462bceee..0000000000000000000000000000000000000000 Binary files a/dbrepo-search-service/lib/dbrepo-1.4.6.tar.gz and /dev/null differ diff --git a/dbrepo-search-service/lib/dbrepo-1.4.6rc1-py3-none-any.whl b/dbrepo-search-service/lib/dbrepo-1.4.6rc1-py3-none-any.whl deleted file mode 100644 index 83944ce88d8aec5a3b767aa09caf9a8700323104..0000000000000000000000000000000000000000 Binary files a/dbrepo-search-service/lib/dbrepo-1.4.6rc1-py3-none-any.whl and /dev/null differ diff --git a/dbrepo-search-service/lib/dbrepo-1.4.6rc1.tar.gz b/dbrepo-search-service/lib/dbrepo-1.4.6rc1.tar.gz deleted file mode 100644 index a0c8432134f3c21359cd7fb8ee1a341812a6c034..0000000000000000000000000000000000000000 Binary files a/dbrepo-search-service/lib/dbrepo-1.4.6rc1.tar.gz and /dev/null differ diff --git a/dbrepo-search-service/lib/dbrepo-1.4.7.tar.gz b/dbrepo-search-service/lib/dbrepo-1.4.7.tar.gz index 936ec4dac16fe8f065ac7d37a09aedca421086a6..c652dbae4a60aae708eb6fe4e751977b97b72344 100644 Binary files a/dbrepo-search-service/lib/dbrepo-1.4.7.tar.gz and b/dbrepo-search-service/lib/dbrepo-1.4.7.tar.gz differ diff --git a/dbrepo-search-service/lib/dbrepo-1.4.7rc0-py3-none-any.whl b/dbrepo-search-service/lib/dbrepo-1.4.7rc0-py3-none-any.whl deleted file mode 100644 index 84353af6a26fdb3e281e10ab90d21130a0701258..0000000000000000000000000000000000000000 Binary files a/dbrepo-search-service/lib/dbrepo-1.4.7rc0-py3-none-any.whl and /dev/null differ diff --git a/dbrepo-search-service/lib/dbrepo-1.4.7rc0.tar.gz b/dbrepo-search-service/lib/dbrepo-1.4.7rc0.tar.gz deleted file mode 100644 index 735c2e9d3699303d119033e184e0cfb9a571317f..0000000000000000000000000000000000000000 Binary files a/dbrepo-search-service/lib/dbrepo-1.4.7rc0.tar.gz and /dev/null differ diff --git a/dbrepo-search-service/test/conftest.py b/dbrepo-search-service/test/conftest.py index 2a21f689702d7f78e14e73b6170715753e32b49c..1d603685d63e464a4ffe2f4aac005c2af319fc5a 100644 --- a/dbrepo-search-service/test/conftest.py +++ b/dbrepo-search-service/test/conftest.py @@ -1,8 +1,8 @@ import logging +import os import pytest -from app import app -from flask import current_app +import json from testcontainers.opensearch import OpenSearchContainer @@ -19,9 +19,10 @@ def session(request): logging.debug("[fixture] starting opensearch container") container.start() - with app.app_context(): - current_app.config['OPENSEARCH_HOST'] = container.get_container_host_ip() - current_app.config['OPENSEARCH_PORT'] = container.get_exposed_port(9200) + os.environ['OPENSEARCH_HOST'] = container.get_container_host_ip() + os.environ['OPENSEARCH_PORT'] = container.get_exposed_port(9200) + os.environ['OPENSEARCH_USERNAME'] = 'admin' + os.environ['OPENSEARCH_PASSWORD'] = 'admin' # destructor def stop_opensearch(): @@ -30,20 +31,17 @@ def session(request): request.addfinalizer(stop_opensearch) return container -# @pytest.fixture(scope="function", autouse=True) -# def cleanup(request, session): -# """ -# Clean up after each test by removing the buckets and re-adding them (=so they are empty again) -# :param request: / -# :param session: / -# :return: -# """ -# logging.info("[fixture] truncate buckets") -# for bucket in ["dbrepo-upload", "dbrepo-download"]: -# objects = [] -# for obj in session.get_client().list_objects(bucket): -# objects.append(DeleteObject(obj.object_name)) -# logging.info(f'request to remove objects {objects}') -# errors = session.get_client().remove_objects(bucket, objects) -# for error in errors: -# raise ConnectionError(f'Failed to delete object with key {error.object_name} of bucket {bucket}') + +@pytest.fixture(scope="function", autouse=True) +def cleanup(request, session): + """ + Clean up after each test by removing the index and re-adding it (=so it's empty again) + :param request: / + :param session: / + :return: + """ + logging.info("[fixture] clean schema") + with open('./init/database.json', 'r') as f: + if session.get_client().indices.exists(index="database"): + session.get_client().indices.delete(index="database") + session.get_client().indices.create(index="database", body=json.load(f)) diff --git a/dbrepo-search-service/test/rsa/rs256.key b/dbrepo-search-service/test/rsa/rs256.key new file mode 100644 index 0000000000000000000000000000000000000000..86b3eaf5c6c4c6b83071b6d1e9d69cb22bcd4085 --- /dev/null +++ b/dbrepo-search-service/test/rsa/rs256.key @@ -0,0 +1,3 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpAIBAAKCAQEAqqnHQ2BWWW9vDNLRCcxD++xZg/16oqMo/c1l+lcFEjjAIJjJp/HqrPYU/U9GvquGE6PbVFtTzW1KcKawOW+FJNOA3CGo8Q1TFEfz43B8rZpKsFbJKvQGVv1Z4HaKPvLUm7iMm8Hv91cLduuoWx6Q3DPe2vg13GKKEZe7UFghF+0T9u8EKzA/XqQ0OiICmsmYPbwvf9N3bCKsB/Y10EYmZRb8IhCoV9mmO5TxgWgiuNeCTtNCv2ePYqL/U0WvyGFW0reasIK8eg3KrAUj8DpyOgPOVBn3lBGf+3KFSYi+0bwZbJZWqbC/Xlk20Go1YfeJPRIt7ImxD27R/lNjgDO/MwIDAQABAoIBADNcMt6hAHub4JTAYS6Mra0EPRBO2XhWmACBrv3+8ETClXd5475KPLDewgRVtlmtbwU8G8awUXESQgPS9lfiqvQhPreA3cHlm6oP2WMKOEtakr2s8I+frsTBLCo0Ini9RaSzjoVVgS0zofyhASKi+T970MafSj5P3XNb8YBFdXgoYDiA7FXLH6a/+m7LScL+wGcFMAAeYESxZbMQLfH3v8L+4EcTraiwjLG17ZdlF3dpybMyUSse6ZQ/PdlyvBuzzLXhN6Ce2gd9ATfS+YWTzo7Yf+GU+ex5bIpVOfHqtuM/hyq7YGKENClsXwNZIAoFnvGCbvECAfgyapVrD30IfykCgYEA0rgsSZ82pxT40NxwgBD1g9lbNVBKXphRB/3S078qusUzJjT7AldEj4imGPhAbI7bI8gAeWJsp1XJWkjM8ktaVrh+NQl7p8e9OPh0pQF/5Bdg8ajbjXESpjnaU66pVYRQy/d+jNli/YRAHX5RUfsBl+6W4+WSVMGmKBiqJsur+ecCgYEAz1YVXClcmUnyZem5B+2E9noIzjF6ROE+jIb6rawM85P3Xd0lXtECQavtxw+Qk7I32qOwrxl1UpK2foVel3pazi+4OpMfmqtYGenRP1Zk1cZwrDo0cIemTDGjj3kJ8tYn12CGolFQpJZgK6OHzvG0tOxI5VZgjIViWNPe1PGWXtUCgYEAxXGNDe8BZs1f11S2lUlOw5yGug3hoYFXbAWJ5p7Ziuf8ZXB/QlJDC7se54a11wKEk6Jzz0lKRgE8CjzszJuOqnN0zn10QGIIC7nCklo1W6QMUmPGVWH994N976tZP6gbjQL6sT+AYcvpx7j0ubxYYeRNvnz+ACzzY964kGGHY0ECgYEAumlwPPNnMN7+VEjGNm2D7UMdJZ3wi3tkjF5ThdA5uMohTsAk+FG80KSu3RmOaGyEsUwY7+VYyYvlDm4E9PZqLBVVczyR3rMNPAcwPd0EPfvzk7WlLkOX7ct3fehaXH3VRlyfz9KCSeh1wOZ/lT1VtpD2nVOC7PSDzs92+kfXZZ0CgYAnrD1y4skgXkdwolZ3unn3EFyGm2d+X5aMTHwQPdWxqoNIAl/9wdghlzihwnPhhsxq1WzlxuC3V2IMrNPtRx70Mi+FbSmR5m4Xx5RptgMtMlwno+L40PzNJgMjHGjt0wcx3Vel8wuohDtnqMyS7P5nG1/TQx0Cyzwn7QOXlNpgbQ== +-----END RSA PRIVATE KEY----- \ No newline at end of file diff --git a/dbrepo-search-service/test/rsa/rsa256.pkey b/dbrepo-search-service/test/rsa/rsa256.pkey new file mode 100644 index 0000000000000000000000000000000000000000..857dfb22beeac202c2955d7cc4f782b787492beb --- /dev/null +++ b/dbrepo-search-service/test/rsa/rsa256.pkey @@ -0,0 +1,3 @@ +-----BEGIN PUBLIC KEY----- +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqqnHQ2BWWW9vDNLRCcxD++xZg/16oqMo/c1l+lcFEjjAIJjJp/HqrPYU/U9GvquGE6PbVFtTzW1KcKawOW+FJNOA3CGo8Q1TFEfz43B8rZpKsFbJKvQGVv1Z4HaKPvLUm7iMm8Hv91cLduuoWx6Q3DPe2vg13GKKEZe7UFghF+0T9u8EKzA/XqQ0OiICmsmYPbwvf9N3bCKsB/Y10EYmZRb8IhCoV9mmO5TxgWgiuNeCTtNCv2ePYqL/U0WvyGFW0reasIK8eg3KrAUj8DpyOgPOVBn3lBGf+3KFSYi+0bwZbJZWqbC/Xlk20Go1YfeJPRIt7ImxD27R/lNjgDO/MwIDAQAB +-----END PUBLIC KEY----- diff --git a/dbrepo-search-service/test/test_app.py b/dbrepo-search-service/test/test_app.py new file mode 100644 index 0000000000000000000000000000000000000000..1b1af020987175fcd8894e61fd2e85519f42c998 --- /dev/null +++ b/dbrepo-search-service/test/test_app.py @@ -0,0 +1,300 @@ +import json +import time +import unittest +import datetime + +import jwt +from dbrepo.api.dto import Database, User, UserAttributes, Container, Image, Table, Constraints, Column, ColumnType, \ + Concept, Unit + +from app import app + +req = Database(id=1, + name="Test", + internal_name="test_tuw1", + creator=User(id="c6b71ef5-2d2f-48b2-9d79-b8f23a3a0502", + username="foo", + attributes=UserAttributes(theme="dark")), + owner=User(id="c6b71ef5-2d2f-48b2-9d79-b8f23a3a0502", + username="foo", + attributes=UserAttributes(theme="dark")), + contact=User(id="c6b71ef5-2d2f-48b2-9d79-b8f23a3a0502", + username="foo", + attributes=UserAttributes(theme="dark")), + created=datetime.datetime(2024, 3, 25, 16, tzinfo=datetime.timezone.utc), + exchange_name="dbrepo", + is_public=True, + container=Container(id=1, + name="MariaDB", + internal_name="mariadb", + host="data-db", + port="3306", + created=datetime.datetime(2024, 3, 1, 10, tzinfo=datetime.timezone.utc), + sidecar_host="data-db-sidecar", + sidecar_port=3305, + image=Image(id=1, + registry="docker.io", + name="mariadb", + version="11.1.3", + dialect="org.hibernate.dialect.MariaDBDialect", + driver_class="org.mariadb.jdbc.Driver", + jdbc_method="mariadb", + default_port=3306)), + tables=[Table(id=1, database_id=1, name="Data", internal_name="data", + creator=User(id="c6b71ef5-2d2f-48b2-9d79-b8f23a3a0502", + username="foo", + attributes=UserAttributes(theme="dark")), + owner=User(id="c6b71ef5-2d2f-48b2-9d79-b8f23a3a0502", + username="foo", + attributes=UserAttributes(theme="dark")), + created=datetime.datetime(2024, 3, 1, 10, tzinfo=datetime.timezone.utc), + constraints=Constraints(uniques=[], foreign_keys=[], checks=[], primary_key=[]), + is_versioned=False, + created_by="c6b71ef5-2d2f-48b2-9d79-b8f23a3a0502", + queue_name="dbrepo", + routing_key="dbrepo.1.1", + is_public=True, + columns=[Column(id=1, database_id=1, table_id=1, name="ID", internal_name="id", + column_type=ColumnType.BIGINT, is_public=True, is_null_allowed=False, + size=20, d=0, + concept=Concept(id=1, uri="http://www.wikidata.org/entity/Q2221906", + created=datetime.datetime(2024, 3, 1, 10, + tzinfo=datetime.timezone.utc)), + unit=Unit(id=1, + uri="http://www.ontology-of-units-of-measure.org/resource/om-2/degreeCelsius", + created=datetime.datetime(2024, 3, 1, 10, + tzinfo=datetime.timezone.utc)), + val_min=0, + val_max=10)] + )]) + + +class JwtTest(unittest.TestCase): + + def token(self, roles: [str], iat: int = int(time.time())): + claims = { + 'iat': iat, + 'realm_access': { + 'roles': roles + } + } + with open('test/rsa/rs256.key', 'rb') as fh: + return jwt.JWT().encode(claims, jwt.jwk_from_pem(fh.read()), alg='RS256') + + def test_update_database_media_type_fails(self): + with app.test_client() as test_client: + # test + response = test_client.put('/api/search/database/1', + headers={'Authorization': f'Bearer {self.token(["update-search-index"])}'}) + self.assertEqual(415, response.status_code) + + def test_health_succeeds(self): + with app.test_client() as test_client: + # test + response = test_client.get('/health') + self.assertEqual(200, response.status_code) + + def test_update_database_no_auth_fails(self): + with app.test_client() as test_client: + # test + response = test_client.put('/api/search/database/1') + self.assertEqual(401, response.status_code) + + def test_update_database_no_body_fails(self): + with app.test_client() as test_client: + # test + response = test_client.put('/api/search/database/1', + headers={'Authorization': f'Bearer {self.token(["update-search-index"])}', + 'Content-Type': 'application/json'}) + self.assertEqual(400, response.status_code) + + def test_update_database_empty_body_fails(self): + with app.test_client() as test_client: + # test + response = test_client.put('/api/search/database/1', + headers={'Authorization': f'Bearer {self.token(["update-search-index"])}', + 'Content-Type': 'application/json'}, + data={}) + self.assertEqual(400, response.status_code) + + def test_update_database_malformed_body_fails(self): + with app.test_client() as test_client: + # test + response = test_client.put('/api/search/database/1', + headers={'Authorization': f'Bearer {self.token(["update-search-index"])}', + 'Content-Type': 'application/json'}, + data=dict({"id": 1})) + self.assertEqual(400, response.status_code) + + def test_update_database_succeeds(self): + with app.test_client() as test_client: + # test + response = test_client.put('/api/search/database/1', + headers={'Authorization': f'Bearer {self.token(["update-search-index"])}', + 'Content-Type': 'application/json'}, + data=req.model_dump_json()) + self.assertEqual(202, response.status_code) + + def test_get_fields_succeeds(self): + with app.test_client() as test_client: + # test + response = test_client.get('/api/search/database/fields', headers={'Content-Type': 'application/json'}) + self.assertEqual(200, response.status_code) + + def test_get_fields_fails(self): + with app.test_client() as test_client: + # test + response = test_client.get('/api/search/unknown/fields', headers={'Content-Type': 'application/json'}) + self.assertEqual(404, response.status_code) + + def test_delete_database_no_auth_fails(self): + with app.test_client() as test_client: + # test + response = test_client.delete('/api/search/database/1') + self.assertEqual(401, response.status_code) + + def test_delete_database_no_role_fails(self): + with app.test_client() as test_client: + # test + response = test_client.delete('/api/search/database/1', + headers={'Authorization': f'Bearer {self.token([])}'}) + self.assertEqual(403, response.status_code) + + def test_delete_database_succeeds(self): + with app.test_client() as test_client: + # mock + test_client.put('/api/search/database/1', + headers={'Authorization': f'Bearer {self.token(["update-search-index"])}', + 'Content-Type': 'application/json'}, + data=req.model_dump_json()) + # test + response = test_client.delete('/api/search/database/1', + headers={'Authorization': f'Bearer {self.token(["admin"])}'}) + self.assertEqual(202, response.status_code) + + def test_delete_database_not_found_fails(self): + with app.test_client() as test_client: + # test + response = test_client.delete('/api/search/database/1', + headers={'Authorization': f'Bearer {self.token(["admin"])}'}) + self.assertEqual(404, response.status_code) + + def test_get_fuzzy_search_succeeds(self): + with app.test_client() as test_client: + # test + response = test_client.get('/api/search?q=test') + self.assertEqual(200, response.status_code) + + def test_get_fuzzy_search_no_query_fails(self): + with app.test_client() as test_client: + # test + response = test_client.get('/api/search') + self.assertEqual(400, response.status_code) + + def test_get_index_succeeds(self): + with app.test_client() as test_client: + # test + response = test_client.get('/api/search/table') + self.assertEqual(200, response.status_code) + + def test_get_index_fails(self): + with app.test_client() as test_client: + # test + response = test_client.get('/api/search/unknown') + self.assertEqual(404, response.status_code) + + def test_post_general_search_media_type_fails(self): + with app.test_client() as test_client: + # test + response = test_client.post('/api/search/database') + self.assertEqual(415, response.status_code) + + def test_post_general_search_no_body_fails(self): + with app.test_client() as test_client: + # test + response = test_client.post('/api/search/database', headers={'Content-Type': 'application/json'}) + self.assertEqual(400, response.status_code) + + def test_post_general_search_succeeds(self): + with app.test_client() as test_client: + # mock + test_client.put('/api/search/database/1', + headers={'Authorization': f'Bearer {self.token(["update-search-index"])}', + 'Content-Type': 'application/json'}, + data=req.model_dump_json()) + # test + response = test_client.post('/api/search/database', headers={'Content-Type': 'application/json'}, + data=json.dumps({'id': 1})) + self.assertEqual(200, response.status_code) + + def test_post_general_search_table_succeeds(self): + with app.test_client() as test_client: + # mock + test_client.put('/api/search/database/1', + headers={'Authorization': f'Bearer {self.token(["update-search-index"])}', + 'Content-Type': 'application/json'}, + data=req.model_dump_json()) + # test + response = test_client.post('/api/search/table', headers={'Content-Type': 'application/json'}, + data=json.dumps({'id': 1})) + self.assertEqual(200, response.status_code) + + def test_post_general_search_column_succeeds(self): + with app.test_client() as test_client: + # mock + test_client.put('/api/search/database/1', + headers={'Authorization': f'Bearer {self.token(["update-search-index"])}', + 'Content-Type': 'application/json'}, + data=req.model_dump_json()) + # test + response = test_client.post('/api/search/column', headers={'Content-Type': 'application/json'}, + data=json.dumps({'id': 1})) + self.assertEqual(200, response.status_code) + + def test_post_general_search_identifier_succeeds(self): + with app.test_client() as test_client: + # mock + test_client.put('/api/search/database/1', + headers={'Authorization': f'Bearer {self.token(["update-search-index"])}', + 'Content-Type': 'application/json'}, + data=req.model_dump_json()) + # test + response = test_client.post('/api/search/identifier', headers={'Content-Type': 'application/json'}, + data=json.dumps({'id': 1})) + self.assertEqual(200, response.status_code) + + def test_post_general_search_concept_succeeds(self): + with app.test_client() as test_client: + # mock + test_client.put('/api/search/database/1', + headers={'Authorization': f'Bearer {self.token(["update-search-index"])}', + 'Content-Type': 'application/json'}, + data=req.model_dump_json()) + # test + response = test_client.post('/api/search/concept', headers={'Content-Type': 'application/json'}, + data=json.dumps({'id': 1})) + self.assertEqual(200, response.status_code) + + def test_post_general_search_unit_succeeds(self): + with app.test_client() as test_client: + # mock + test_client.put('/api/search/database/1', + headers={'Authorization': f'Bearer {self.token(["update-search-index"])}', + 'Content-Type': 'application/json'}, + data=req.model_dump_json()) + # test + response = test_client.post('/api/search/unit', headers={'Content-Type': 'application/json'}, + data=json.dumps({'id': 1})) + self.assertEqual(200, response.status_code) + + def test_post_general_search_view_succeeds(self): + with app.test_client() as test_client: + # mock + test_client.put('/api/search/database/1', + headers={'Authorization': f'Bearer {self.token(["update-search-index"])}', + 'Content-Type': 'application/json'}, + data=req.model_dump_json()) + # test + response = test_client.post('/api/search/view', headers={'Content-Type': 'application/json'}, + data=json.dumps({'id': 1})) + self.assertEqual(200, response.status_code) diff --git a/dbrepo-search-service/test/test_jwt.py b/dbrepo-search-service/test/test_jwt.py new file mode 100644 index 0000000000000000000000000000000000000000..59cd4ee1168117d0aeb6bf3549fe5088edc379b9 --- /dev/null +++ b/dbrepo-search-service/test/test_jwt.py @@ -0,0 +1,97 @@ +import time +import unittest + +import jwt +import requests_mock + +from app import verify_token, app, verify_password, get_user_roles +from clients.keycloak_client import User + + +class JwtTest(unittest.TestCase): + + def response(self, roles: [str]) -> dict: + return dict({ + "client_id": "username", + "realm_access": { + "roles": roles + } + }) + + def token(self, roles: [str], iat: int = int(time.time())) -> str: + claims = { + 'iat': iat, + 'realm_access': { + 'roles': roles + } + } + with open('test/rsa/rs256.key', 'rb') as fh: + return jwt.JWT().encode(claims, jwt.jwk_from_pem(fh.read()), alg='RS256') + + def test_verify_token_no_token_fails(self): + with app.app_context(): + # test + user = verify_token(None) + self.assertFalse(user) + + def test_verify_token_empty_token_fails(self): + with app.app_context(): + # test + user = verify_token("") + self.assertFalse(user) + + def test_verify_token_malformed_token_fails(self): + with app.app_context(): + # test + user = verify_token("eyEYEY12345") + self.assertFalse(user) + + def test_verify_token_succeeds(self): + with app.app_context(): + with requests_mock.Mocker() as mock: + # mock + mock.post('http://auth-service:8080/api/auth/realms/dbrepo/protocol/openid-connect/token', + json=self.response([])) + # test + user = verify_token(self.token([])) + self.assertEqual([], user.roles) + + def test_verify_password_no_username_fails(self): + with app.app_context(): + # test + user = verify_password(None, "pass") + self.assertFalse(user) + + def test_verify_password_empty_username_fails(self): + with app.app_context(): + # test + user = verify_password("", "pass") + self.assertFalse(user) + + def test_verify_password_no_password_fails(self): + with app.app_context(): + # test + user = verify_password("username", None) + self.assertFalse(user) + + def test_verify_password_empty_password_fails(self): + with app.app_context(): + # test + user = verify_password("username", "") + self.assertFalse(user) + + def test_verify_password_succeeds(self): + with app.app_context(): + with requests_mock.Mocker() as mock: + # mock + mock.post('http://auth-service:8080/api/auth/realms/dbrepo/protocol/openid-connect/token', + json=self.response([])) + # test + user = verify_password("username", "password") + self.assertIsNotNone(user) + + def test_get_user_roles_succeeds(self): + with app.app_context(): + # test + roles: [str] = get_user_roles(User(username="username", roles=[])) + self.assertEqual([], roles) diff --git a/dbrepo-search-service/test/test_keycloak_client.py b/dbrepo-search-service/test/test_keycloak_client.py new file mode 100644 index 0000000000000000000000000000000000000000..453a9b802be9885daa8e87afe265c272ee1ca211 --- /dev/null +++ b/dbrepo-search-service/test/test_keycloak_client.py @@ -0,0 +1,57 @@ +import time +import unittest + +import jwt +import requests_mock + +from app import app +from clients.keycloak_client import KeycloakClient + + +class JwtTest(unittest.TestCase): + + def response(self, username) -> dict: + return dict({ + "client_id": username, + "access_token": "eyEY1234" + }) + + def token(self, username: str, roles: [str], iat: int = int(time.time())) -> str: + claims = { + 'iat': iat, + 'client_id': username, + 'realm_access': { + 'roles': roles + } + } + with open('test/rsa/rs256.key', 'rb') as fh: + return jwt.JWT().encode(claims, jwt.jwk_from_pem(fh.read()), alg='RS256') + + def test_obtain_user_token_succeeds(self): + with app.app_context(): + with requests_mock.Mocker() as mock: + # mock + mock.post('http://auth-service:8080/api/auth/realms/dbrepo/protocol/openid-connect/token', + json=self.response("username")) + # test + token = KeycloakClient().obtain_user_token("username", "password") + self.assertEqual("eyEY1234", token) + + def test_obtain_user_token_malformed_fails(self): + with app.app_context(): + with requests_mock.Mocker() as mock: + # mock + mock.post('http://auth-service:8080/api/auth/realms/dbrepo/protocol/openid-connect/token', + json={"client_id": "username"}) + # test + try: + KeycloakClient().obtain_user_token("username", "password") + self.fail() + except AssertionError: + pass + + def test_verify_jwt_succeeds(self): + with app.app_context(): + # test + user = KeycloakClient().verify_jwt(self.token("username", [])) + self.assertEqual("username", user.username) diff --git a/dbrepo-search-service/test/test_opensearch_client.py b/dbrepo-search-service/test/test_opensearch_client.py index adf421af74dee1690ffe631b5fc33c975655300b..581e5f8c5d94435c4344b4e8478ec09b116fd735 100644 --- a/dbrepo-search-service/test/test_opensearch_client.py +++ b/dbrepo-search-service/test/test_opensearch_client.py @@ -4,10 +4,11 @@ import unittest import opensearchpy from dbrepo.api.dto import Database, User, UserAttributes, Container, Image, Table, Column, ColumnType, Constraints, \ PrimaryKey, TableMinimal, ColumnMinimal, Concept, Unit +from opensearchpy import NotFoundError from app import app -from clients.opensearch_client import OpenSearchClient +from init.clients.opensearch_client import OpenSearchClient req = Database(id=1, name="Test", @@ -73,9 +74,8 @@ class OpenSearchClientTest(unittest.TestCase): def test_update_database_succeeds(self): with app.app_context(): - client = OpenSearchClient() # mock - client.update_database(database_id=1, data=req) + OpenSearchClient().update_database(database_id=req.id, data=req) # test req.tables = [Table(id=1, @@ -87,9 +87,10 @@ class OpenSearchClientTest(unittest.TestCase): database_id=req.id, constraints=Constraints(uniques=[], foreign_keys=[], checks=[], primary_key=[PrimaryKey(id=1, - table=TableMinimal(id=1, database_id=1), + table=TableMinimal(id=1, + database_id=req.id), column=ColumnMinimal(id=1, table_id=1, - database_id=1))]), + database_id=req.id))]), is_versioned=True, created_by="c6b71ef5-2d2f-48b2-9d79-b8f23a3a0502", creator=User(id="c6b71ef5-2d2f-48b2-9d79-b8f23a3a0502", @@ -107,7 +108,7 @@ class OpenSearchClientTest(unittest.TestCase): column_type=ColumnType.BIGINT, is_public=True, is_null_allowed=False)])] - database = client.update_database(database_id=1, data=req) + database = OpenSearchClient().update_database(database_id=req.id, data=req) self.assertEqual(1, database.id) self.assertEqual("Test", database.name) self.assertEqual("test_tuw1", database.internal_name) @@ -157,10 +158,8 @@ class OpenSearchClientTest(unittest.TestCase): def test_update_database_create_succeeds(self): with app.app_context(): - client = OpenSearchClient() - # test - database = client.update_database(database_id=1, data=req) + database = OpenSearchClient().update_database(database_id=req.id, data=req) self.assertEqual(1, database.id) self.assertEqual("Test", database.name) self.assertEqual("test_tuw1", database.internal_name) @@ -185,124 +184,87 @@ class OpenSearchClientTest(unittest.TestCase): def test_update_database_malformed_fails(self): with app.app_context(): app.config['OPENSEARCH_USERNAME'] = 'i_do_not_exist' - client = OpenSearchClient() # test try: - database = client.update_database(database_id=1, data=req) + database = OpenSearchClient().update_database(database_id=req.id, data=req) except opensearchpy.exceptions.TransportError: pass def test_delete_database_fails(self): with app.app_context(): - client = OpenSearchClient() # test try: - client.delete_database(database_id=9999) + OpenSearchClient().delete_database(database_id=9999) except opensearchpy.exceptions.NotFoundError: pass def test_delete_database_succeeds(self): with app.app_context(): - client = OpenSearchClient() - # mock - client.update_database(database_id=req.id, data=req) + OpenSearchClient().update_database(database_id=req.id, data=req) # test - client.delete_database(database_id=req.id) + OpenSearchClient().delete_database(database_id=req.id) - def test_find_database_succeeds(self): + def test_get_database_succeeds(self): with app.app_context(): - client = OpenSearchClient() - # mock - client.update_database(database_id=req.id, data=req) + OpenSearchClient().update_database(database_id=req.id, data=req) # test - client.get_database(database_id=req.id) + database = OpenSearchClient().get_database(database_id=req.id) + self.assertEqual(req.id, database.id) - def test_find_database_fails(self): + def test_get_database_fails(self): with app.app_context(): - client = OpenSearchClient() # mock - client.update_database(database_id=1, data=req) + OpenSearchClient().update_database(database_id=req.id, data=req) # test try: - client.get_database(database_id=1) + OpenSearchClient().get_database(database_id=req.id) except opensearchpy.exceptions.NotFoundError: pass - # def test_query_index_by_term_opensearch_contains_succeeds(self): - # with app.app_context(): - # client = OpenSearchClient() - # - # # mock - # client.update_database(database_id=1, data=req) - # - # # test - # response = client.query_index_by_term_opensearch(term="test", mode="contains") - # self.assertEqual(1, len(response)) - # self.assertEqual(1, response[0]['id']) - # self.assertEqual('Test', response[0]['name']) - - # def test_query_index_by_term_opensearch_exact_succeeds(self): - # with app.app_context(): - # client = OpenSearchClient() - # - # # mock - # client.update_database(database_id=1, data=req) - # - # # test - # response = client.query_index_by_term_opensearch(term="test", mode="exact") - # self.assertEqual(1, len(response)) - # self.assertEqual(1, response[0]['id']) - # self.assertEqual('Test', response[0]['name']) - def test_get_fields_for_index_database_succeeds(self): with app.app_context(): - client = OpenSearchClient() - # mock - client.update_database(database_id=1, data=req) + OpenSearchClient().update_database(database_id=req.id, data=req) # test - response = client.get_fields_for_index(type="database") + response = OpenSearchClient().get_fields_for_index(field_type="database") self.assertTrue(len(response) > 0) def test_get_fields_for_index_user_succeeds(self): with app.app_context(): - client = OpenSearchClient() - # mock - client.update_database(database_id=1, data=req) + OpenSearchClient().update_database(database_id=req.id, data=req) # test - response = client.get_fields_for_index(type="user") + response = OpenSearchClient().get_fields_for_index(field_type="user") self.assertTrue(len(response) > 0) def test_fuzzy_search_succeeds(self): with app.app_context(): - client = OpenSearchClient() - # mock - client.update_database(database_id=1, data=req) + OpenSearchClient().update_database(database_id=req.id, data=req) # test - response = client.fuzzy_search(search_term="test") + response = OpenSearchClient().fuzzy_search(search_term="test") self.assertTrue(len(response) > 0) - # def test_general_search_succeeds(self): - # with app.app_context(): - # client = OpenSearchClient() - # - # # mock - # client.update_database(database_id=1, data=req) - # - # # test - # response = client.general_search(type="database", field_value_pairs={"name": "Test", - # "id": None}) - # self.assertTrue(len(response) > 0) + def test_unit_independent_search_fails(self): + with app.app_context(): + # mock + OpenSearchClient().update_database(database_id=req.id, data=req) + + # test + try: + OpenSearchClient().unit_independent_search(0, 100, { + "unit.uri": "http://www.ontology-of-units-of-measure.org/resource/om-2/degreeCelsius"}) + self.fail() + except NotFoundError: + pass diff --git a/helm/dbrepo-mariadb-galera/Chart.yaml b/helm/dbrepo-mariadb-galera/Chart.yaml index 4be4337885fe446add1835f3a5296bc2a42f9583..f39aa491579df5df8e147e1cdeb9a05441af624b 100644 --- a/helm/dbrepo-mariadb-galera/Chart.yaml +++ b/helm/dbrepo-mariadb-galera/Chart.yaml @@ -6,14 +6,14 @@ description: Helm Chart for installing DBRepo Data Database sources: - https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services type: application -version: "1.4.6" -appVersion: "1.4.6" +version: "1.4.7" +appVersion: "1.4.7" keywords: - dbrepo maintainers: - name: Martin Weise email: martin.weise@tuwien.ac.at -home: https://www.ifs.tuwien.ac.at/infrastructures/dbrepo/ +home: https://www.ifs.tuwien.ac.at/infrastructures/dbrepo/1.4.7/ icon: https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/master/dbrepo-ui/public/favicon.png dependencies: - name: mariadb-galera diff --git a/helm/dbrepo-mariadb-galera/templates/configmap.yaml b/helm/dbrepo-mariadb-galera/templates/configmap.yaml index 066a0d1afbbc5d46cd00a1f5315195365aef0c66..db67a0f76a18f8452fc26597763bd1041ce0ebd5 100644 --- a/helm/dbrepo-mariadb-galera/templates/configmap.yaml +++ b/helm/dbrepo-mariadb-galera/templates/configmap.yaml @@ -2,7 +2,7 @@ apiVersion: v1 kind: ConfigMap metadata: name: database-setup - namespace: {{ .Values.namespace }} + namespace: {{ include "common.names.namespace" . | quote }} data: {{- with .Values.database.extraInitDbScripts }} {{ toYaml . | nindent 2 }} diff --git a/helm/dbrepo-mariadb-galera/templates/secret.yaml b/helm/dbrepo-mariadb-galera/templates/secret.yaml index fc5a2a7c5ddaa757c9fc29dc75b24d787a3c0656..78dad5e3afa45da011caebe51af6c56a68b57593 100644 --- a/helm/dbrepo-mariadb-galera/templates/secret.yaml +++ b/helm/dbrepo-mariadb-galera/templates/secret.yaml @@ -2,7 +2,7 @@ apiVersion: v1 kind: Secret metadata: name: sidecar-secret - namespace: {{ .Values.namespace }} + namespace: {{ include "common.names.namespace" . | quote }} stringData: S3_ACCESS_KEY_ID: "{{ .Values.s3.auth.adminAccessKeyId }}" S3_SECRET_ACCESS_KEY: "{{ .Values.s3.auth.adminSecretAccessKey }}" diff --git a/helm/dbrepo-mariadb-galera/values.yaml b/helm/dbrepo-mariadb-galera/values.yaml index f5026a3b5bdeb9f1ce83e1b4b79cb5e87bef6122..1dd16b367b3c8c4bbe048ad6a0ff78a0cb8e1fec 100644 --- a/helm/dbrepo-mariadb-galera/values.yaml +++ b/helm/dbrepo-mariadb-galera/values.yaml @@ -45,7 +45,7 @@ database: # VALUES ('MariaDB Galera TEST', 'mariadb_11_1_3', 1, 'data-db', 3306, 'data-db', 80, 'root', 'dbrepo'); # COMMIT; ## @param database.replicaCount The number of cluster nodes, should be uneven i.e. 2n+1 - replicaCount: 3 + replicaCount: 1 persistence: ## @param database.persistence.enabled Enable persistent storage. enabled: true @@ -59,7 +59,7 @@ database: ## @skip database.sidecars sidecars: - name: sidecar - image: registry.datalab.tuwien.ac.at/dbrepo/data-db-sidecar:1.4.5 + image: registry.datalab.tuwien.ac.at/dbrepo/data-db-sidecar:1.4.6 imagePullPolicy: Always securityContext: runAsUser: 1001 diff --git a/helm/dbrepo/Chart.lock b/helm/dbrepo/Chart.lock index 0937dd527af328b079b719db542efa264c410368..da97fe0213905453bbfa64ff175ac380a3962a97 100644 --- a/helm/dbrepo/Chart.lock +++ b/helm/dbrepo/Chart.lock @@ -29,5 +29,5 @@ dependencies: - name: prometheus repository: https://charts.bitnami.com/bitnami version: 1.3.22 -digest: sha256:840d2ea4b1e36fe8fa399fc4170b6274a3de161c13a4a1a3b18ce3107ab71f79 -generated: "2024-10-01T07:48:55.174297756+02:00" +digest: sha256:10e81a48c99d3ff8bd04b735f3039d8ac0cb5eefe7b45c4096242ca988afad64 +generated: "2024-10-20T19:37:42.898952802+02:00" diff --git a/helm/dbrepo/Chart.yaml b/helm/dbrepo/Chart.yaml index 9e09e7f8e37033cc9c8ff2fa4f1776f43212c59c..a08ce5859e92b86f97cc795a92bac9a19799764b 100644 --- a/helm/dbrepo/Chart.yaml +++ b/helm/dbrepo/Chart.yaml @@ -13,7 +13,7 @@ keywords: maintainers: - name: Martin Weise email: martin.weise@tuwien.ac.at -home: https://www.ifs.tuwien.ac.at/infrastructures/dbrepo/ +home: https://www.ifs.tuwien.ac.at/infrastructures/dbrepo/1.4.7/ icon: https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/master/dbrepo-ui/public/favicon.png dependencies: - name: opensearch diff --git a/helm/dbrepo/charts/dbrepo-mariadb-galera-1.4.6.tgz b/helm/dbrepo/charts/dbrepo-mariadb-galera-1.4.6.tgz deleted file mode 100644 index 4af22ff6e998199f69a8e1ff43fd96c3f55aa8ec..0000000000000000000000000000000000000000 Binary files a/helm/dbrepo/charts/dbrepo-mariadb-galera-1.4.6.tgz and /dev/null differ diff --git a/helm/dbrepo/charts/dbrepo-mariadb-galera-1.4.7.tgz b/helm/dbrepo/charts/dbrepo-mariadb-galera-1.4.7.tgz new file mode 100644 index 0000000000000000000000000000000000000000..506d0c1563ecb1ee9b94b404e295860b1711d573 Binary files /dev/null and b/helm/dbrepo/charts/dbrepo-mariadb-galera-1.4.7.tgz differ diff --git a/helm/dbrepo/tmpcharts-529887/opensearch-1.2.2.tgz b/helm/dbrepo/tmpcharts-529887/opensearch-1.2.2.tgz new file mode 100644 index 0000000000000000000000000000000000000000..0393bfc1aa2fa964c68e66af6da6f356ea84e29f Binary files /dev/null and b/helm/dbrepo/tmpcharts-529887/opensearch-1.2.2.tgz differ diff --git a/helm/dbrepo/values.schema.json b/helm/dbrepo/values.schema.json index a8e7bbbf743a45c32d8a3a43c8785f4c279fc0ba..21928f6a3ae2102ff8c61a2c36f3aaa98fcb89ec 100644 --- a/helm/dbrepo/values.schema.json +++ b/helm/dbrepo/values.schema.json @@ -530,6 +530,21 @@ }, "type": "object" }, + "extraVolumes": { + "items": { + "properties": { + "emptyDir": { + "properties": {}, + "type": "object" + }, + "name": { + "type": "string" + } + }, + "type": "object" + }, + "type": "array" + }, "image": { "properties": { "debug": { diff --git a/helm/dbrepo/values.yaml b/helm/dbrepo/values.yaml index 68c2e4e06fed820f6c5858efdce9bcf97550fada..cb60ce9de5fd89e1c2533aabe77a05200bc56fe6 100644 --- a/helm/dbrepo/values.yaml +++ b/helm/dbrepo/values.yaml @@ -857,11 +857,11 @@ metricdb: - job_name: 'actuator scrape' metrics_path: '/actuator/prometheus' static_configs: - - targets: [ 'data-service', 'metadata-service', 'ui' ] + - targets: [ 'data-service', 'metadata-service' ] - job_name: 'metrics scrape' metrics_path: '/metrics' static_configs: - - targets: [ 'auth-service-metrics:8080', 'analyse-service', 'search-service', 'data-db:8080', 'data-db-metrics:9104', 'broker-service:9419', 'metadata-db-metrics:9104', 'storage-service-master-metrics:9327', 'upload-service' ] + - targets: [ 'auth-service-metrics:8080', 'ui', 'analyse-service', 'search-service', 'data-db:8080', 'data-db-metrics:9104', 'broker-service:9419', 'metadata-db-metrics:9104', 'storage-service-master-metrics:9327', 'upload-service' ] - job_name: 'dashboard scrape' metrics_path: '/dashboard/metrics' static_configs: diff --git a/lib/python/.coveragerc b/lib/python/.coveragerc new file mode 100644 index 0000000000000000000000000000000000000000..2301243c8400da8046858a8cbfc8a5abfad9b5d7 --- /dev/null +++ b/lib/python/.coveragerc @@ -0,0 +1,3 @@ +[report] +omit = + */tests/* diff --git a/lib/python/.gitignore b/lib/python/.gitignore index 46916e3e91c948d297fa8fda068bc1b123d9aced..c954a774008e444617bea9ec16df30b7dda3183b 100644 --- a/lib/python/.gitignore +++ b/lib/python/.gitignore @@ -5,6 +5,7 @@ dist/ dbrepo.egg-info/ build/ +htmlcov/ # debug debug.py diff --git a/lib/python/dbrepo/api/dto.py b/lib/python/dbrepo/api/dto.py index 9bfb775f0ce7f04e2d2ede8024867e022aab25eb..fc97c2b8d731b064e2d8ff7a77e458e727f95f3c 100644 --- a/lib/python/dbrepo/api/dto.py +++ b/lib/python/dbrepo/api/dto.py @@ -11,14 +11,6 @@ Timestamp = Annotated[ ] -class ImageDate(BaseModel): - id: int - database_format: str - unix_format: str - has_time: bool - created_at: Timestamp - - class JwtAuth(BaseModel): access_token: str refresh_token: str @@ -40,7 +32,7 @@ class Image(BaseModel): driver_class: str jdbc_method: str default_port: int - date_formats: Optional[List[ImageDate]] = field(default_factory=list) + data_types: List[DataType] = field(default_factory=list) class ImageBrief(BaseModel): @@ -520,7 +512,6 @@ class CreateTableColumn(BaseModel): index_length: Optional[int] = None size: Optional[int] = None d: Optional[int] = None - dfid: Optional[int] = None enums: Optional[List[str]] = None sets: Optional[List[str]] = None @@ -883,6 +874,24 @@ class UpdateQuery(BaseModel): persist: bool +class DataType(BaseModel): + display_name: str + value: str + documentation: str + is_quoted: bool + is_buildable: bool + size_min: Optional[int] = None + size_max: Optional[int] = None + size_default: Optional[int] = None + size_required: Optional[bool] = None + d_min: Optional[int] = None + d_max: Optional[int] = None + d_default: Optional[int] = None + d_required: Optional[bool] = None + data_hint: Optional[str] = None + type_hint: Optional[str] = None + + class Column(BaseModel): id: int name: str @@ -902,7 +911,6 @@ class Column(BaseModel): unit: Optional[Unit] = None enums: Optional[List[str]] = field(default_factory=list) sets: Optional[List[str]] = field(default_factory=list) - date_format: Optional[ImageDate] = None index_length: Optional[int] = None length: Optional[int] = None data_length: Optional[int] = None @@ -928,7 +936,6 @@ class ViewColumn(BaseModel): median: Optional[float] = None concept: Optional[Concept] = None unit: Optional[Unit] = None - date_format: Optional[ImageDate] = None index_length: Optional[int] = None length: Optional[int] = None diff --git a/lib/python/test.sh b/lib/python/test.sh index 532d9a58d1a981cbd070f371ebf6dfaea7757c20..cd0129654a468e4aa0d9bec0b1ba3b04f193fd24 100644 --- a/lib/python/test.sh +++ b/lib/python/test.sh @@ -1,3 +1,3 @@ #!/bin/bash source ./lib/python/venv/bin/activate -cd ./lib/python/ && coverage run -m pytest tests/*.py --junitxml=report.xml && coverage html --omit="test/*" && coverage report --omit="test/*" > ./coverage.txt \ No newline at end of file +cd ./lib/python/ && coverage run -m pytest tests/*.py --junitxml=report.xml && coverage html && coverage report > ./coverage.txt \ No newline at end of file diff --git a/lib/python/tests/test_unit_container.py b/lib/python/tests/test_unit_container.py index 8f3297879ada5fcf416cc8afc508d059cf28c95d..0e1d93faa0e9b1c598afcede5ad71c7e08ef9767 100644 --- a/lib/python/tests/test_unit_container.py +++ b/lib/python/tests/test_unit_container.py @@ -4,11 +4,9 @@ import requests_mock import datetime from dbrepo.RestClient import RestClient -from dbrepo.api.dto import Container, Image, ContainerBrief, ImageBrief +from dbrepo.api.dto import Container, Image, ContainerBrief, ImageBrief, DataType from dbrepo.api.exceptions import ResponseCodeError, NotExistsError -from dbrepo.api.dto import ImageDate - class ContainerUnitTest(unittest.TestCase): @@ -69,29 +67,10 @@ class ContainerUnitTest(unittest.TestCase): dialect="org.hibernate.dialect.MariaDBDialect", driver_class="org.mariadb.jdbc.Driver", jdbc_method="mariadb", - date_formats=[ - ImageDate(id=1, - example="2024-03-26 10:26:00", - database_format="%Y-%c-%d %H:%i:%S", - unix_format="yyyy-MM-dd HH:mm:ss", - has_time=True, - created_at=datetime.datetime(2024, 3, 26, 10, 26, 0, 0, - datetime.timezone.utc)), - ImageDate(id=2, - example="2024-03-26", - database_format="%Y-%c-%d", - unix_format="yyyy-MM-dd", - has_time=False, - created_at=datetime.datetime(2024, 3, 26, 0, 0, 0, 0, - datetime.timezone.utc)), - ImageDate(id=3, - example="10:25:01", - database_format="%Y-%c-%d", - unix_format="yyyy-MM-dd", - has_time=False, - created_at=datetime.datetime(2024, 3, 26, 0, 0, 0, 0, - datetime.timezone.utc)), - ]), + data_types=[ + DataType(display_name="SERIAL", value="serial", + documentation="https://mariadb.com/kb/en/bigint/", + is_quoted=False, is_buildable=True)]), hash="f829dd8a884182d0da846f365dee1221fd16610a14c81b8f9f295ff162749e50") # mock mock.get('/api/container/1', json=exp.model_dump()) diff --git a/lib/python/tests/test_unit_database.py b/lib/python/tests/test_unit_database.py index dea15691e22a990434772c433683eb6eefc0b253..f72c80c93c0497eb0d2d6f68e2ab8029cccb2aec 100644 --- a/lib/python/tests/test_unit_database.py +++ b/lib/python/tests/test_unit_database.py @@ -7,11 +7,9 @@ from pydantic_core import ValidationError from dbrepo.RestClient import RestClient from dbrepo.api.dto import Database, User, Container, Image, UserAttributes, DatabaseAccess, AccessType, DatabaseBrief, \ - UserBrief + UserBrief, DataType from dbrepo.api.exceptions import ResponseCodeError, NotExistsError, ForbiddenError, MalformedError, AuthenticationError -from dbrepo.api.dto import ImageDate - class DatabaseUnitTest(unittest.TestCase): @@ -72,7 +70,11 @@ class DatabaseUnitTest(unittest.TestCase): dialect='org.hibernate.dialect.MariaDBDialect', driver_class='org.mariadb.jdbc.Driver', jdbc_method='mariadb', - default_port=3306 + default_port=3306, + data_types=[ + DataType(display_name="SERIAL", value="serial", + documentation="https://mariadb.com/kb/en/bigint/", + is_quoted=False, is_buildable=True)] ) ) ) diff --git a/make/test.mk b/make/test.mk index b3ad94f9ba405a39a921b02cb1cd227535d350b5..36d44d42f56f1592b8cd5364efb3111617e72d30 100644 --- a/make/test.mk +++ b/make/test.mk @@ -12,6 +12,10 @@ test-metadata-service: ## Test the Metadata Service. test-analyse-service: ## Test the Analyse Service. bash ./dbrepo-analyse-service/test.sh +.PHONY: test-search-service +test-search-service: ## Test the Search Service + bash ./dbrepo-search-service/test.sh + .PHONY: test-lib test-lib: ## Test the Python Library. bash ./lib/python/test.sh diff --git a/yq.1 b/yq.1 deleted file mode 100644 index 5e4a37a5d0d9bbc8f4060fa8c4c6a4f3af9c313d..0000000000000000000000000000000000000000 --- a/yq.1 +++ /dev/null @@ -1,17196 +0,0 @@ -'\" t -.\" Automatically generated by Pandoc 2.14.2 -.\" -.TH "YQ" "1" "" "" "yq (https://github.com/mikefarah/yq/) version v4.44.3" -.hy -.SH NAME -.PP -\f[I]yq\f[R] is a portable command-line data file processor -.SH SYNOPSIS -.PP -yq [eval/eval-all] [expression] files.. -.PP -eval/e - (default) Apply the expression to each document in each yaml -file in sequence -.PP -eval-all/ea - Loads all yaml documents of all yaml files and runs -expression once -.SH DESCRIPTION -.PP -a lightweight and portable command-line data file processor. -\f[C]yq\f[R] uses jq (https://github.com/stedolan/jq) like syntax but -works with yaml, json, xml, csv, properties and TOML files. -It doesn\[cq]t yet support everything \f[C]jq\f[R] does - but it does -support the most common operations and functions, and more is being -added continuously. -.PP -This documentation is also available at https://mikefarah.gitbook.io/yq/ -# QUICK GUIDE -.SS Read a value: -.IP -.nf -\f[C] -yq \[aq].a.b[0].c\[aq] file.yaml -\f[R] -.fi -.SS Pipe from STDIN: -.IP -.nf -\f[C] -cat file.yaml | yq \[aq].a.b[0].c\[aq] -\f[R] -.fi -.SS Update a yaml file, in place -.IP -.nf -\f[C] -yq -i \[aq].a.b[0].c = \[dq]cool\[dq]\[aq] file.yaml -\f[R] -.fi -.SS Update using environment variables -.IP -.nf -\f[C] -NAME=mike yq -i \[aq].a.b[0].c = strenv(NAME)\[aq] file.yaml -\f[R] -.fi -.SS Merge multiple files -.IP -.nf -\f[C] -yq ea \[aq]. as $item ireduce ({}; . * $item )\[aq] path/to/*.yml -\f[R] -.fi -.PP -Note the use of \f[C]ea\f[R] to evaluate all files at once (instead of -in sequence.) -.SS Multiple updates to a yaml file -.IP -.nf -\f[C] -yq -i \[aq] - .a.b[0].c = \[dq]cool\[dq] | - .x.y.z = \[dq]foobar\[dq] | - .person.name = strenv(NAME) -\[aq] file.yaml -\f[R] -.fi -.PP -See the documentation (https://mikefarah.gitbook.io/yq/) for more. -.SH KNOWN ISSUES / MISSING FEATURES -.IP \[bu] 2 -\f[C]yq\f[R] attempts to preserve comment positions and whitespace as -much as possible, but it does not handle all scenarios (see -https://github.com/go-yaml/yaml/tree/v3 for details) -.IP \[bu] 2 -Powershell has its own\&...opinions: -https://mikefarah.gitbook.io/yq/usage/tips-and-tricks#quotes-in-windows-powershell -.SH BUGS / ISSUES / FEATURE REQUESTS -.PP -Please visit the GitHub page https://github.com/mikefarah/yq/. -.SH HOW IT WORKS -.PP -In \f[C]yq\f[R] expressions are made up of operators and pipes. -A context of nodes is passed through the expression and each operation -takes the context as input and returns a new context as output. -That output is piped in as input for the next operation in the -expression. -To begin with, the context is set to the first yaml document of the -first yaml file (if processing in sequence using eval). -.PP -Lets look at a couple of examples. -.SS Simple assignment example -.PP -Given a document like: -.IP -.nf -\f[C] -a: cat -b: dog -\f[R] -.fi -.PP -with an expression: -.IP -.nf -\f[C] -\&.a = .b -\f[R] -.fi -.PP -Like math expressions - operator precedence is important. -.PP -The \f[C]=\f[R] operator takes two arguments, a \f[C]lhs\f[R] -expression, which in this case is \f[C].a\f[R] and \f[C]rhs\f[R] -expression which is \f[C].b\f[R]. -.PP -It pipes the current, lets call it `root' context through the -\f[C]lhs\f[R] expression of \f[C].a\f[R] to return the node -.IP -.nf -\f[C] -cat -\f[R] -.fi -.PP -Side note: this node holds not only its value `cat', but comments and -metadata too, including path and parent information. -.PP -The \f[C]=\f[R] operator then pipes the `root' context through the -\f[C]rhs\f[R] expression of \f[C].b\f[R] to return the node -.IP -.nf -\f[C] -dog -\f[R] -.fi -.PP -Both sides have now been evaluated, so now the operator copies across -the value from the RHS (\f[C].b\f[R]) to the LHS (\f[C].a\f[R]), and it -returns the now updated context: -.IP -.nf -\f[C] -a: dog -b: dog -\f[R] -.fi -.SS Complex assignment, operator precedence rules -.PP -Just like math expressions - \f[C]yq\f[R] expressions have an order of -precedence. -The pipe \f[C]|\f[R] operator has a low order of precedence, so -operators with higher precedence will get evaluated first. -.PP -Most of the time, this is intuitively what you\[cq]d want, for instance -\f[C].a = \[dq]cat\[dq] | .b = \[dq]dog\[dq]\f[R] is effectively: -\f[C](.a = \[dq]cat\[dq]) | (.b = \[dq]dog\[dq])\f[R]. -.PP -However, this is not always the case, particularly if you have a complex -LHS or RHS expression, for instance if you want to select particular -nodes to update. -.PP -Lets say you had: -.IP -.nf -\f[C] -- name: bob - fruit: apple -- name: sally - fruit: orange -\f[R] -.fi -.PP -Lets say you wanted to update the \f[C]sally\f[R] entry to have fruit: -`mango'. -The \f[I]incorrect\f[R] way to do that is: -\f[C].[] | select(.name == \[dq]sally\[dq]) | .fruit = \[dq]mango\[dq]\f[R]. -.PP -Because \f[C]|\f[R] has a low operator precedence, this will be -evaluated (\f[I]incorrectly\f[R]) as : -\f[C](.[]) | (select(.name == \[dq]sally\[dq])) | (.fruit = \[dq]mango\[dq])\f[R]. -What you\[cq]ll see is only the updated segment returned: -.IP -.nf -\f[C] -name: sally -fruit: mango -\f[R] -.fi -.PP -To properly update this yaml, you will need to use brackets (think -BODMAS from maths) and wrap the entire LHS: -\f[C](.[] | select(.name == \[dq]sally\[dq]) | .fruit) = \[dq]mango\[dq]\f[R] -.PP -Now that entire LHS expression is passed to the `assign' (\f[C]=\f[R]) -operator, and the yaml is correctly updated and returned: -.IP -.nf -\f[C] -- name: bob - fruit: apple -- name: sally - fruit: mango -\f[R] -.fi -.SS Relative update (e.g.\ \f[C]|=\f[R]) -.PP -There is another form of the \f[C]=\f[R] operator which we call the -relative form. -It\[cq]s very similar to \f[C]=\f[R] but with one key difference when -evaluating the RHS expression. -.PP -In the plain form, we pass in the `root' level context to the RHS -expression. -In relative form, we pass in \f[I]each result of the LHS\f[R] to the RHS -expression. -Let\[cq]s go through an example. -.PP -Given a document like: -.IP -.nf -\f[C] -a: 1 -b: thing -\f[R] -.fi -.PP -with an expression: -.IP -.nf -\f[C] -\&.a |= . + 1 -\f[R] -.fi -.PP -Similar to the \f[C]=\f[R] operator, \f[C]|=\f[R] takes two operands, -the LHS and RHS. -.PP -It pipes the current context (the whole document) through the LHS -expression of \f[C].a\f[R] to get the node value: -.IP -.nf -\f[C] -1 -\f[R] -.fi -.PP -Now it pipes \f[I]that LHS context\f[R] into the RHS expression -\f[C]. + 1\f[R] (whereas in the \f[C]=\f[R] plain form it piped the -original document context into the RHS) to yield: -.IP -.nf -\f[C] -2 -\f[R] -.fi -.PP -The assignment operator then copies across the value from the RHS to the -value on the LHS, and it returns the now updated `root' context: -.IP -.nf -\f[C] -a: 2 -b: thing -\f[R] -.fi -.SH Add -.PP -Add behaves differently according to the type of the LHS: * arrays: -concatenate * number scalars: arithmetic addition * string scalars: -concatenate * maps: shallow merge (use the multiply operator -(\f[C]*\f[R]) to deeply merge) -.PP -Use \f[C]+=\f[R] as a relative append assign for things like increment. -Note that \f[C].a += .x\f[R] is equivalent to running -\f[C].a = .a + .x\f[R]. -.SS Concatenate arrays -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - - 1 - - 2 -b: - - 3 - - 4 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a + .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- 1 -- 2 -- 3 -- 4 -\f[R] -.fi -.SS Concatenate to existing array -.PP -Note that the styling of \f[C]a\f[R] is kept. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: [1,2] -b: - - 3 - - 4 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a += .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: [1, 2, 3, 4] -b: - - 3 - - 4 -\f[R] -.fi -.SS Concatenate null to array -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - - 1 - - 2 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a + null\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- 1 -- 2 -\f[R] -.fi -.SS Append to existing array -.PP -Note that the styling is copied from existing array elements -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: [\[aq]dog\[aq]] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a += \[dq]cat\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: [\[aq]dog\[aq], \[aq]cat\[aq]] -\f[R] -.fi -.SS Prepend to existing array -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - - dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a = [\[dq]cat\[dq]] + .a\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - - cat - - dog -\f[R] -.fi -.SS Add new object to array -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - - dog: woof -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a + {\[dq]cat\[dq]: \[dq]meow\[dq]}\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- dog: woof -- cat: meow -\f[R] -.fi -.SS Relative append -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - a1: - b: - - cat - a2: - b: - - dog - a3: {} -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a[].b += [\[dq]mouse\[dq]]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - a1: - b: - - cat - - mouse - a2: - b: - - dog - - mouse - a3: - b: - - mouse -\f[R] -.fi -.SS String concatenation -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: meow -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a += .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: catmeow -b: meow -\f[R] -.fi -.SS Number addition - float -.PP -If the lhs or rhs are floats then the expression will be calculated with -floats. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 3 -b: 4.9 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a = .a + .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: 7.9 -b: 4.9 -\f[R] -.fi -.SS Number addition - int -.PP -If both the lhs and rhs are ints then the expression will be calculated -with ints. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 3 -b: 4 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a = .a + .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: 7 -b: 4 -\f[R] -.fi -.SS Increment numbers -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 3 -b: 5 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] += 1\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: 4 -b: 6 -\f[R] -.fi -.SS Date addition -.PP -You can add durations to dates. -Assumes RFC3339 date time format, see date-time -operators (https://mikefarah.gitbook.io/yq/operators/date-time-operators) -for more information. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 2021-01-01T00:00:00Z -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a += \[dq]3h10m\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: 2021-01-01T03:10:00Z -\f[R] -.fi -.SS Date addition - custom format -.PP -You can add durations to dates. -See date-time -operators (https://mikefarah.gitbook.io/yq/operators/date-time-operators) -for more information. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: Saturday, 15-Dec-01 at 2:59AM GMT -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]with_dtf(\[dq]Monday, 02-Jan-06 at 3:04PM MST\[dq], .a += \[dq]3h1m\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: Saturday, 15-Dec-01 at 6:00AM GMT -\f[R] -.fi -.SS Add to null -.PP -Adding to null simply returns the rhs -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]null + \[dq]cat\[dq]\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cat -\f[R] -.fi -.SS Add maps to shallow merge -.PP -Adding objects together shallow merges them. -Use \f[C]*\f[R] to deeply merge. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - thing: - name: Astuff - value: x - a1: cool -b: - thing: - name: Bstuff - legs: 3 - b1: neat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a += .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - thing: - name: Bstuff - legs: 3 - a1: cool - b1: neat -b: - thing: - name: Bstuff - legs: 3 - b1: neat -\f[R] -.fi -.SS Custom types: that are really strings -.PP -When custom tags are encountered, yq will try to decode the underlying -type. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: !horse cat -b: !goat _meow -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a += .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: !horse cat_meow -b: !goat _meow -\f[R] -.fi -.SS Custom types: that are really numbers -.PP -When custom tags are encountered, yq will try to decode the underlying -type. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: !horse 1.2 -b: !goat 2.3 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a += .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: !horse 3.5 -b: !goat 2.3 -\f[R] -.fi -.SH Alternative (Default value) -.PP -This operator is used to provide alternative (or default) values when a -particular expression is either null or false. -.SS LHS is defined -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: bridge -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a // \[dq]hello\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -bridge -\f[R] -.fi -.SS LHS is not defined -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -{} -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a // \[dq]hello\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -hello -\f[R] -.fi -.SS LHS is null -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: \[ti] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a // \[dq]hello\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -hello -\f[R] -.fi -.SS LHS is false -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: false -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a // \[dq]hello\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -hello -\f[R] -.fi -.SS RHS is an expression -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: false -b: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a // .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cat -\f[R] -.fi -.SS Update or create - entity exists -.PP -This initialises \f[C]a\f[R] if it\[cq]s not present -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 1 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq](.a // (.a = 0)) += 1\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: 2 -\f[R] -.fi -.SS Update or create - entity does not exist -.PP -This initialises \f[C]a\f[R] if it\[cq]s not present -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -b: camel -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq](.a // (.a = 0)) += 1\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -b: camel -a: 1 -\f[R] -.fi -.SH Anchor and Alias Operators -.PP -Use the \f[C]alias\f[R] and \f[C]anchor\f[R] operators to read and write -yaml aliases and anchors. -The \f[C]explode\f[R] operator normalises a yaml file (dereference (or -expands) aliases and remove anchor names). -.PP -\f[C]yq\f[R] supports merge aliases (like \f[C]<<: *blah\f[R]) however -this is no longer in the standard yaml spec (1.2) and so \f[C]yq\f[R] -will automatically add the \f[C]!!merge\f[R] tag to these nodes as it is -effectively a custom tag. -.SS Merge one map -.PP -see https://yaml.org/type/merge.html -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- &CENTER - x: 1 - y: 2 -- &LEFT - x: 0 - y: 2 -- &BIG - r: 10 -- &SMALL - r: 1 -- !!merge <<: *CENTER - r: 10 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[4] | explode(.)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -x: 1 -y: 2 -r: 10 -\f[R] -.fi -.SS Merge multiple maps -.PP -see https://yaml.org/type/merge.html -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- &CENTER - x: 1 - y: 2 -- &LEFT - x: 0 - y: 2 -- &BIG - r: 10 -- &SMALL - r: 1 -- !!merge <<: - - *CENTER - - *BIG -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[4] | explode(.)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -r: 10 -x: 1 -y: 2 -\f[R] -.fi -.SS Override -.PP -see https://yaml.org/type/merge.html -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- &CENTER - x: 1 - y: 2 -- &LEFT - x: 0 - y: 2 -- &BIG - r: 10 -- &SMALL - r: 1 -- !!merge <<: - - *BIG - - *LEFT - - *SMALL - x: 1 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[4] | explode(.)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -r: 10 -x: 1 -y: 2 -\f[R] -.fi -.SS Get anchor -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: &billyBob cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a | anchor\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -billyBob -\f[R] -.fi -.SS Set anchor -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a anchor = \[dq]foobar\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: &foobar cat -\f[R] -.fi -.SS Set anchor relatively using assign-update -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a anchor |= .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: &cat - b: cat -\f[R] -.fi -.SS Get alias -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -b: &billyBob meow -a: *billyBob -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a | alias\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -billyBob -\f[R] -.fi -.SS Set alias -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -b: &meow purr -a: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a alias = \[dq]meow\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -b: &meow purr -a: *meow -\f[R] -.fi -.SS Set alias to blank does nothing -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -b: &meow purr -a: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a alias = \[dq]\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -b: &meow purr -a: cat -\f[R] -.fi -.SS Set alias relatively using assign-update -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -b: &meow purr -a: - f: meow -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a alias |= .f\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -b: &meow purr -a: *meow -\f[R] -.fi -.SS Explode alias and anchor -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -f: - a: &a cat - b: *a -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]explode(.f)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -f: - a: cat - b: cat -\f[R] -.fi -.SS Explode with no aliases or anchors -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: mike -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]explode(.a)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: mike -\f[R] -.fi -.SS Explode with alias keys -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -f: - a: &a cat - *a: b -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]explode(.f)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -f: - a: cat - cat: b -\f[R] -.fi -.SS Explode with merge anchors -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -foo: &foo - a: foo_a - thing: foo_thing - c: foo_c -bar: &bar - b: bar_b - thing: bar_thing - c: bar_c -foobarList: - b: foobarList_b - !!merge <<: - - *foo - - *bar - c: foobarList_c -foobar: - c: foobar_c - !!merge <<: *foo - thing: foobar_thing -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]explode(.)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -foo: - a: foo_a - thing: foo_thing - c: foo_c -bar: - b: bar_b - thing: bar_thing - c: bar_c -foobarList: - b: bar_b - thing: foo_thing - c: foobarList_c - a: foo_a -foobar: - c: foo_c - a: foo_a - thing: foobar_thing -\f[R] -.fi -.SS Dereference and update a field -.PP -Use explode with multiply to dereference an object -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -item_value: &item_value - value: true -thingOne: - name: item_1 - !!merge <<: *item_value -thingTwo: - name: item_2 - !!merge <<: *item_value -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].thingOne |= explode(.) * {\[dq]value\[dq]: false}\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -item_value: &item_value - value: true -thingOne: - name: item_1 - value: false -thingTwo: - name: item_2 - !!merge <<: *item_value -\f[R] -.fi -.SH Array to Map -.PP -Use this operator to convert an array to..a map. -The indices are used as map keys, null values in the array are skipped -over. -.PP -Behind the scenes, this is implemented using reduce: -.IP -.nf -\f[C] -(.[] | select(. != null) ) as $i ireduce({}; .[$i | key] = $i) -\f[R] -.fi -.SS Simple example -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -cool: - - null - - null - - hello -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].cool |= array_to_map\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cool: - 2: hello -\f[R] -.fi -.SH Assign (Update) -.PP -This operator is used to update node values. -It can be used in either the: -.SS plain form: \f[C]=\f[R] -.PP -Which will set the LHS node values equal to the RHS node values. -The RHS expression is run against the matching nodes in the pipeline. -.SS relative form: \f[C]|=\f[R] -.PP -This will do a similar thing to the plain form, but the RHS expression -is run with \f[I]each LHS node as context\f[R]. -This is useful for updating values based on old values, e.g.\ increment. -.SS Flags -.IP \[bu] 2 -\f[C]c\f[R] clobber custom tags -.SS Create yaml file -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq].a.b = \[dq]cat\[dq] | .x = \[dq]frog\[dq]\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: cat -x: frog -\f[R] -.fi -.SS Update node to be the child value -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: - g: foof -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a |= .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - g: foof -\f[R] -.fi -.SS Double elements in an array -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- 1 -- 2 -- 3 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] |= . * 2\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- 2 -- 4 -- 6 -\f[R] -.fi -.SS Update node from another file -.PP -Note this will also work when the second file is a scalar -(string/number) -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: apples -\f[R] -.fi -.PP -And another sample another.yml file of: -.IP -.nf -\f[C] -b: bob -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq eval-all \[aq]select(fileIndex==0).a = select(fileIndex==1) | select(fileIndex==0)\[aq] sample.yml another.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: bob -\f[R] -.fi -.SS Update node to be the sibling value -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: child -b: sibling -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a = .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: sibling -b: sibling -\f[R] -.fi -.SS Updated multiple paths -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: fieldA -b: fieldB -c: fieldC -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq](.a, .c) = \[dq]potato\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: potato -b: fieldB -c: potato -\f[R] -.fi -.SS Update string value -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: apple -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a.b = \[dq]frog\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: frog -\f[R] -.fi -.SS Update string value via |= -.PP -Note there is no difference between \f[C]=\f[R] and \f[C]|=\f[R] when -the RHS is a scalar -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: apple -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a.b |= \[dq]frog\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: frog -\f[R] -.fi -.SS Update deeply selected results -.PP -Note that the LHS is wrapped in brackets! This is to ensure we don\[cq]t -first filter out the yaml and then update the snippet. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: apple - c: cactus -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq](.a[] | select(. == \[dq]apple\[dq])) = \[dq]frog\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: frog - c: cactus -\f[R] -.fi -.SS Update array values -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- candy -- apple -- sandy -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq](.[] | select(. == \[dq]*andy\[dq])) = \[dq]bogs\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- bogs -- apple -- bogs -\f[R] -.fi -.SS Update empty object -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -{} -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a.b |= \[dq]bogs\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: bogs -\f[R] -.fi -.SS Update node value that has an anchor -.PP -Anchor will remain -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: &cool cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a = \[dq]dog\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: &cool dog -\f[R] -.fi -.SS Update empty object and array -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -{} -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a.b.[0] |= \[dq]bogs\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: - - bogs -\f[R] -.fi -.SS Custom types are maintained by default -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: !cat meow -b: !dog woof -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a = .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: !cat woof -b: !dog woof -\f[R] -.fi -.SS Custom types: clobber -.PP -Use the \f[C]c\f[R] option to clobber custom tags -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: !cat meow -b: !dog woof -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a =c .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: !dog woof -b: !dog woof -\f[R] -.fi -.SH Boolean Operators -.PP -The \f[C]or\f[R] and \f[C]and\f[R] operators take two parameters and -return a boolean result. -.PP -\f[C]not\f[R] flips a boolean from true to false, or vice versa. -.PP -\f[C]any\f[R] will return \f[C]true\f[R] if there are any \f[C]true\f[R] -values in an array sequence, and \f[C]all\f[R] will return true if -\f[I]all\f[R] elements in an array are true. -.PP -\f[C]any_c(condition)\f[R] and \f[C]all_c(condition)\f[R] are like -\f[C]any\f[R] and \f[C]all\f[R] but they take a condition expression -that is used against each element to determine if it\[cq]s -\f[C]true\f[R]. -Note: in \f[C]jq\f[R] you can simply pass a condition to \f[C]any\f[R] -or \f[C]all\f[R] and it simply works - \f[C]yq\f[R] isn\[cq]t that -clever..yet -.PP -These are most commonly used with the \f[C]select\f[R] operator to -filter particular nodes. -.SS Related Operators -.IP \[bu] 2 -equals / not equals (\f[C]==\f[R], \f[C]!=\f[R]) operators -here (https://mikefarah.gitbook.io/yq/operators/equals) -.IP \[bu] 2 -comparison (\f[C]>=\f[R], \f[C]<\f[R] etc) operators -here (https://mikefarah.gitbook.io/yq/operators/compare) -.IP \[bu] 2 -select operator here (https://mikefarah.gitbook.io/yq/operators/select) -.SS \f[C]or\f[R] example -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]true or false\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -true -\f[R] -.fi -.SS \[lq]yes\[rq] and \[lq]no\[rq] are strings -.PP -In the yaml 1.2 standard, support for yes/no as booleans was dropped - -they are now considered strings. -See `10.2.1.2. Boolean' in https://yaml.org/spec/1.2.2/ -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- yes -- no -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] | tag\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -!!str -!!str -\f[R] -.fi -.SS \f[C]and\f[R] example -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]true and false\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -false -\f[R] -.fi -.SS Matching nodes with select, equals and or -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- a: bird - b: dog -- a: frog - b: bird -- a: cat - b: fly -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq][.[] | select(.a == \[dq]cat\[dq] or .b == \[dq]dog\[dq])]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- a: bird - b: dog -- a: cat - b: fly -\f[R] -.fi -.SS \f[C]any\f[R] returns true if any boolean in a given array is true -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- false -- true -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]any\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -true -\f[R] -.fi -.SS \f[C]any\f[R] returns false for an empty array -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -[] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]any\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -false -\f[R] -.fi -.SS \f[C]any_c\f[R] returns true if any element in the array is true for the given condition. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - - rad - - awesome -b: - - meh - - whatever -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] |= any_c(. == \[dq]awesome\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: true -b: false -\f[R] -.fi -.SS \f[C]all\f[R] returns true if all booleans in a given array are true -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- true -- true -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]all\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -true -\f[R] -.fi -.SS \f[C]all\f[R] returns true for an empty array -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -[] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]all\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -true -\f[R] -.fi -.SS \f[C]all_c\f[R] returns true if all elements in the array are true for the given condition. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - - rad - - awesome -b: - - meh - - 12 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] |= all_c(tag == \[dq]!!str\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: true -b: false -\f[R] -.fi -.SS Not true is false -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]true | not\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -false -\f[R] -.fi -.SS Not false is true -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]false | not\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -true -\f[R] -.fi -.SS String values considered to be true -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]\[dq]cat\[dq] | not\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -false -\f[R] -.fi -.SS Empty string value considered to be true -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]\[dq]\[dq] | not\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -false -\f[R] -.fi -.SS Numbers are considered to be true -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]1 | not\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -false -\f[R] -.fi -.SS Zero is considered to be true -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]0 | not\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -false -\f[R] -.fi -.SS Null is considered to be false -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]\[ti] | not\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -true -\f[R] -.fi -.SH Collect into Array -.PP -This creates an array using the expression between the square brackets. -.SS Collect empty -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq][]\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -[] -\f[R] -.fi -.SS Collect single -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq][\[dq]cat\[dq]]\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- cat -\f[R] -.fi -.SS Collect many -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq][.a, .b]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- cat -- dog -\f[R] -.fi -.SH Column -.PP -Returns the column of the matching node. -Starts from 1, 0 indicates there was no column data. -.PP -Column is the number of characters that precede that node on the line it -starts. -.SS Returns column of \f[I]value\f[R] node -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: bob -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].b | column\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -4 -\f[R] -.fi -.SS Returns column of \f[I]key\f[R] node -.PP -Pipe through the key operator to get the column of the key -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: bob -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].b | key | column\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -1 -\f[R] -.fi -.SS First column is 1 -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a | key | column\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -1 -\f[R] -.fi -.SS No column data is 0 -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]{\[dq]a\[dq]: \[dq]new entry\[dq]} | column\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -0 -\f[R] -.fi -.SH Comment Operators -.PP -Use these comment operators to set or retrieve comments. -Note that line comments on maps/arrays are actually set on the -\f[I]key\f[R] node as opposed to the \f[I]value\f[R] (map/array). -See below for examples. -.PP -Like the \f[C]=\f[R] and \f[C]|=\f[R] assign operators, the same syntax -applies when updating comments: -.SS plain form: \f[C]=\f[R] -.PP -This will set the LHS nodes\[cq] comments equal to the expression on the -RHS. -The RHS is run against the matching nodes in the pipeline -.SS relative form: \f[C]|=\f[R] -.PP -This is similar to the plain form, but it evaluates the RHS with -\f[I]each matching LHS node as context\f[R]. -This is useful if you want to set the comments as a relative expression -of the node, for instance its value or path. -.SS Set line comment -.PP -Set the comment on the key node for more reliability (see below). -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a line_comment=\[dq]single\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: cat # single -\f[R] -.fi -.SS Set line comment of a maps/arrays -.PP -For maps and arrays, you need to set the line comment on the -\f[I]key\f[R] node. -This will also work for scalars. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: things -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq](.a | key) line_comment=\[dq]single\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: # single - b: things -\f[R] -.fi -.SS Use update assign to perform relative updates -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].. line_comment |= .\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: cat # cat -b: dog # dog -\f[R] -.fi -.SS Where is the comment - map key example -.PP -The underlying yaml parser can assign comments in a document to -surprising nodes. -Use an expression like this to find where you comment is. -`p' indicates the path, `isKey' is if the node is a map key (as opposed -to a map value). -From this, you can see the `hello-world-comment' is actually on the -`hello' key -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -hello: # hello-world-comment - message: world -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq][... | {\[dq]p\[dq]: path | join(\[dq].\[dq]), \[dq]isKey\[dq]: is_key, \[dq]hc\[dq]: headComment, \[dq]lc\[dq]: lineComment, \[dq]fc\[dq]: footComment}]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- p: \[dq]\[dq] - isKey: false - hc: \[dq]\[dq] - lc: \[dq]\[dq] - fc: \[dq]\[dq] -- p: hello - isKey: true - hc: \[dq]\[dq] - lc: hello-world-comment - fc: \[dq]\[dq] -- p: hello - isKey: false - hc: \[dq]\[dq] - lc: \[dq]\[dq] - fc: \[dq]\[dq] -- p: hello.message - isKey: true - hc: \[dq]\[dq] - lc: \[dq]\[dq] - fc: \[dq]\[dq] -- p: hello.message - isKey: false - hc: \[dq]\[dq] - lc: \[dq]\[dq] - fc: \[dq]\[dq] -\f[R] -.fi -.SS Retrieve comment - map key example -.PP -From the previous example, we know that the comment is on the `hello' -\f[I]key\f[R] as a lineComment -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -hello: # hello-world-comment - message: world -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].hello | key | line_comment\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -hello-world-comment -\f[R] -.fi -.SS Where is the comment - array example -.PP -The underlying yaml parser can assign comments in a document to -surprising nodes. -Use an expression like this to find where you comment is. -`p' indicates the path, `isKey' is if the node is a map key (as opposed -to a map value). -From this, you can see the `under-name-comment' is actually on the first -child -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -name: - # under-name-comment - - first-array-child -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq][... | {\[dq]p\[dq]: path | join(\[dq].\[dq]), \[dq]isKey\[dq]: is_key, \[dq]hc\[dq]: headComment, \[dq]lc\[dq]: lineComment, \[dq]fc\[dq]: footComment}]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- p: \[dq]\[dq] - isKey: false - hc: \[dq]\[dq] - lc: \[dq]\[dq] - fc: \[dq]\[dq] -- p: name - isKey: true - hc: \[dq]\[dq] - lc: \[dq]\[dq] - fc: \[dq]\[dq] -- p: name - isKey: false - hc: \[dq]\[dq] - lc: \[dq]\[dq] - fc: \[dq]\[dq] -- p: name.0 - isKey: false - hc: under-name-comment - lc: \[dq]\[dq] - fc: \[dq]\[dq] -\f[R] -.fi -.SS Retrieve comment - array example -.PP -From the previous example, we know that the comment is on the first -child as a headComment -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -name: - # under-name-comment - - first-array-child -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].name[0] | headComment\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -under-name-comment -\f[R] -.fi -.SS Set head comment -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]. head_comment=\[dq]single\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -# single -a: cat -\f[R] -.fi -.SS Set head comment of a map entry -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -f: foo -a: - b: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq](.a | key) head_comment=\[dq]single\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -f: foo -# single -a: - b: cat -\f[R] -.fi -.SS Set foot comment, using an expression -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]. foot_comment=.a\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: cat -# cat -\f[R] -.fi -.SS Remove comment -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat # comment -b: dog # leave this -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a line_comment=\[dq]\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: cat -b: dog # leave this -\f[R] -.fi -.SS Remove (strip) all comments -.PP -Note the use of \f[C]...\f[R] to ensure key nodes are included. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -# hi - -a: cat # comment -# great -b: # key comment -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]... comments=\[dq]\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: cat -b: -\f[R] -.fi -.SS Get line comment -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -# welcome! - -a: cat # meow -# have a great day -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a | line_comment\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -meow -\f[R] -.fi -.SS Get head comment -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -# welcome! - -a: cat # meow - -# have a great day -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]. | head_comment\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -welcome! -\f[R] -.fi -.SS Head comment with document split -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -# welcome! ---- -# bob -a: cat # meow - -# have a great day -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]head_comment\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -welcome! -bob -\f[R] -.fi -.SS Get foot comment -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -# welcome! - -a: cat # meow - -# have a great day -# no really -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]. | foot_comment\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -have a great day -no really -\f[R] -.fi -.SH Compare Operators -.PP -Comparison operators (\f[C]>\f[R], \f[C]>=\f[R], \f[C]<\f[R], -\f[C]<=\f[R]) can be used for comparing scalar values of the same time. -.PP -The following types are currently supported: -.IP \[bu] 2 -numbers -.IP \[bu] 2 -strings -.IP \[bu] 2 -datetimes -.SS Related Operators -.IP \[bu] 2 -equals / not equals (\f[C]==\f[R], \f[C]!=\f[R]) operators -here (https://mikefarah.gitbook.io/yq/operators/equals) -.IP \[bu] 2 -boolean operators (\f[C]and\f[R], \f[C]or\f[R], \f[C]any\f[R] etc) -here (https://mikefarah.gitbook.io/yq/operators/boolean-operators) -.IP \[bu] 2 -select operator here (https://mikefarah.gitbook.io/yq/operators/select) -.SS Compare numbers (>) -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 5 -b: 4 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a > .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -true -\f[R] -.fi -.SS Compare equal numbers (>=) -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 5 -b: 5 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a >= .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -true -\f[R] -.fi -.SS Compare strings -.PP -Compares strings by their bytecode. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: zoo -b: apple -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a > .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -true -\f[R] -.fi -.SS Compare date times -.PP -You can compare date times. -Assumes RFC3339 date time format, see date-time -operators (https://mikefarah.gitbook.io/yq/operators/date-time-operators) -for more information. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 2021-01-01T03:10:00Z -b: 2020-01-01T03:10:00Z -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a > .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -true -\f[R] -.fi -.SS Both sides are null: > is false -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq].a > .b\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -false -\f[R] -.fi -.SS Both sides are null: >= is true -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq].a >= .b\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -true -\f[R] -.fi -.SH Contains -.PP -This returns \f[C]true\f[R] if the context contains the passed in -parameter, and false otherwise. -For arrays, this will return true if the passed in array is contained -within the array. -For strings, it will return true if the string is a substring. -.PP -{% hint style=\[lq]warning\[rq] %} -.PP -\f[I]Note\f[R] that, just like jq, when checking if an array of strings -\f[C]contains\f[R] another, this will use \f[C]contains\f[R] and -\f[I]not\f[R] equals to check each string. -This means an expression like \f[C]contains([\[dq]cat\[dq]])\f[R] will -return true for an array \f[C][\[dq]cats\[dq]]\f[R]. -.PP -See the \[lq]Array has a subset array\[rq] example below on how to check -for a subset. -.PP -{% endhint %} -.SS Array contains array -.PP -Array is equal or subset of -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- foobar -- foobaz -- blarp -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]contains([\[dq]baz\[dq], \[dq]bar\[dq]])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -true -\f[R] -.fi -.SS Array has a subset array -.PP -Subtract the superset array from the subset, if there\[cq]s anything -left, it\[cq]s not a subset -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- foobar -- foobaz -- blarp -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq][\[dq]baz\[dq], \[dq]bar\[dq]] - . | length == 0\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -false -\f[R] -.fi -.SS Object included in array -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -\[dq]foo\[dq]: 12 -\[dq]bar\[dq]: - - 1 - - 2 - - \[dq]barp\[dq]: 12 - \[dq]blip\[dq]: 13 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]contains({\[dq]bar\[dq]: [{\[dq]barp\[dq]: 12}]})\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -true -\f[R] -.fi -.SS Object not included in array -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -\[dq]foo\[dq]: 12 -\[dq]bar\[dq]: - - 1 - - 2 - - \[dq]barp\[dq]: 12 - \[dq]blip\[dq]: 13 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]contains({\[dq]foo\[dq]: 12, \[dq]bar\[dq]: [{\[dq]barp\[dq]: 15}]})\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -false -\f[R] -.fi -.SS String contains substring -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -foobar -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]contains(\[dq]bar\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -true -\f[R] -.fi -.SS String equals string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -meow -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]contains(\[dq]meow\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -true -\f[R] -.fi -.SH Create, Collect into Object -.PP -This is used to construct objects (or maps). -This can be used against existing yaml, or to create fresh yaml -documents. -.SS Collect empty object -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]{}\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -{} -\f[R] -.fi -.SS Wrap (prefix) existing object -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -name: Mike -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]{\[dq]wrap\[dq]: .}\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -wrap: - name: Mike -\f[R] -.fi -.SS Using splat to create multiple objects -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -name: Mike -pets: - - cat - - dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]{.name: .pets.[]}\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -Mike: cat -Mike: dog -\f[R] -.fi -.SS Working with multiple documents -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -name: Mike -pets: - - cat - - dog ---- -name: Rosey -pets: - - monkey - - sheep -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]{.name: .pets.[]}\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -Mike: cat -Mike: dog ---- -Rosey: monkey -Rosey: sheep -\f[R] -.fi -.SS Creating yaml from scratch -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]{\[dq]wrap\[dq]: \[dq]frog\[dq]}\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -wrap: frog -\f[R] -.fi -.SS Creating yaml from scratch with multiple objects -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq](.a.b = \[dq]foo\[dq]) | (.d.e = \[dq]bar\[dq])\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: foo -d: - e: bar -\f[R] -.fi -.SH Date Time -.PP -Various operators for parsing and manipulating dates. -.SS Date time formattings -.PP -This uses Golang\[cq]s built in time library for parsing and formatting -date times. -.PP -When not specified, the RFC3339 standard is assumed -\f[C]2006-01-02T15:04:05Z07:00\f[R] for parsing. -.PP -To specify a custom parsing format, use the \f[C]with_dtf\f[R] operator. -The first parameter sets the datetime parsing format for the expression -in the second parameter. -The expression can be any valid \f[C]yq\f[R] expression tree. -.IP -.nf -\f[C] -yq \[aq]with_dtf(\[dq]myformat\[dq]; .a + \[dq]3h\[dq] | tz(\[dq]Australia/Melbourne\[dq]))\[aq] -\f[R] -.fi -.PP -See the library docs (https://pkg.go.dev/time#pkg-constants) for -examples of formatting options. -.SS Timezones -.PP -This uses Golang\[cq]s built in LoadLocation function to parse timezones -strings. -See the library docs (https://pkg.go.dev/time#LoadLocation) for more -details. -.SS Durations -.PP -Durations are parsed using Golang\[cq]s built in -ParseDuration (https://pkg.go.dev/time#ParseDuration) function. -.PP -You can add durations to time using the \f[C]+\f[R] operator. -.SS Format: from standard RFC3339 format -.PP -Providing a single parameter assumes a standard RFC3339 datetime format. -If the target format is not a valid yaml datetime format, the result -will be a string tagged node. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 2001-12-15T02:59:43.1Z -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a |= format_datetime(\[dq]Monday, 02-Jan-06 at 3:04PM\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: Saturday, 15-Dec-01 at 2:59AM -\f[R] -.fi -.SS Format: from custom date time -.PP -Use with_dtf to set a custom datetime format for parsing. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: Saturday, 15-Dec-01 at 2:59AM -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a |= with_dtf(\[dq]Monday, 02-Jan-06 at 3:04PM\[dq]; format_datetime(\[dq]2006-01-02\[dq]))\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: 2001-12-15 -\f[R] -.fi -.SS Format: get the day of the week -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 2001-12-15 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a | format_datetime(\[dq]Monday\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -Saturday -\f[R] -.fi -.SS Now -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cool -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].updated = now\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: cool -updated: 2021-05-19T01:02:03Z -\f[R] -.fi -.SS From Unix -.PP -Converts from unix time. -Note, you don\[cq]t have to pipe through the tz operator :) -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]1675301929 | from_unix | tz(\[dq]UTC\[dq])\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -2023-02-02T01:38:49Z -\f[R] -.fi -.SS To Unix -.PP -Converts to unix time -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]now | to_unix\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -1621386123 -\f[R] -.fi -.SS Timezone: from standard RFC3339 format -.PP -Returns a new datetime in the specified timezone. -Specify standard IANA Time Zone format or `utc', `local'. -When given a single parameter, this assumes the datetime is in RFC3339 -format. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cool -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].updated = (now | tz(\[dq]Australia/Sydney\[dq]))\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: cool -updated: 2021-05-19T11:02:03+10:00 -\f[R] -.fi -.SS Timezone: with custom format -.PP -Specify standard IANA Time Zone format or `utc', `local' -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: Saturday, 15-Dec-01 at 2:59AM GMT -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a |= with_dtf(\[dq]Monday, 02-Jan-06 at 3:04PM MST\[dq]; tz(\[dq]Australia/Sydney\[dq]))\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: Saturday, 15-Dec-01 at 1:59PM AEDT -\f[R] -.fi -.SS Add and tz custom format -.PP -Specify standard IANA Time Zone format or `utc', `local' -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: Saturday, 15-Dec-01 at 2:59AM GMT -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a |= with_dtf(\[dq]Monday, 02-Jan-06 at 3:04PM MST\[dq]; tz(\[dq]Australia/Sydney\[dq]))\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: Saturday, 15-Dec-01 at 1:59PM AEDT -\f[R] -.fi -.SS Date addition -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 2021-01-01T00:00:00Z -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a += \[dq]3h10m\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: 2021-01-01T03:10:00Z -\f[R] -.fi -.SS Date subtraction -.PP -You can subtract durations from dates. -Assumes RFC3339 date time format, see date-time -operators (https://mikefarah.gitbook.io/yq/operators/datetime#date-time-formattings) -for more information. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 2021-01-01T03:10:00Z -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a -= \[dq]3h10m\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: 2021-01-01T00:00:00Z -\f[R] -.fi -.SS Date addition - custom format -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: Saturday, 15-Dec-01 at 2:59AM GMT -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]with_dtf(\[dq]Monday, 02-Jan-06 at 3:04PM MST\[dq]; .a += \[dq]3h1m\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: Saturday, 15-Dec-01 at 6:00AM GMT -\f[R] -.fi -.SS Date script with custom format -.PP -You can embed full expressions in with_dtf if needed. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: Saturday, 15-Dec-01 at 2:59AM GMT -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]with_dtf(\[dq]Monday, 02-Jan-06 at 3:04PM MST\[dq]; .a = (.a + \[dq]3h1m\[dq] | tz(\[dq]Australia/Perth\[dq])))\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: Saturday, 15-Dec-01 at 2:00PM AWST -\f[R] -.fi -.SH Delete -.PP -Deletes matching entries in maps or arrays. -.SS Delete entry in map -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]del(.b)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: cat -\f[R] -.fi -.SS Delete nested entry in map -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - a1: fred - a2: frood -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]del(.a.a1)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - a2: frood -\f[R] -.fi -.SS Delete entry in array -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- 1 -- 2 -- 3 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]del(.[1])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- 1 -- 3 -\f[R] -.fi -.SS Delete nested entry in array -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- a: cat - b: dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]del(.[0].a)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- b: dog -\f[R] -.fi -.SS Delete no matches -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]del(.c)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: cat -b: dog -\f[R] -.fi -.SS Delete matching entries -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: dog -c: bat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]del( .[] | select(. == \[dq]*at\[dq]) )\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -b: dog -\f[R] -.fi -.SS Recursively delete matching keys -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - name: frog - b: - name: blog - age: 12 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]del(.. | select(has(\[dq]name\[dq])).name)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: - age: 12 -\f[R] -.fi -.SH Divide -.PP -Divide behaves differently according to the type of the LHS: * strings: -split by the divider * number: arithmetic division -.SS String split -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat_meow -b: _ -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].c = .a / .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: cat_meow -b: _ -c: - - cat - - meow -\f[R] -.fi -.SS Number division -.PP -The result during division is calculated as a float -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 12 -b: 2.5 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a = .a / .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: 4.8 -b: 2.5 -\f[R] -.fi -.SS Number division by zero -.PP -Dividing by zero results in +Inf or -Inf -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 1 -b: -1 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a = .a / 0 | .b = .b / 0\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: !!float +Inf -b: !!float -Inf -\f[R] -.fi -.SH Document Index -.PP -Use the \f[C]documentIndex\f[R] operator (or the \f[C]di\f[R] shorthand) -to select nodes of a particular document. -.SS Retrieve a document index -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat ---- -a: frog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a | document_index\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -0 ---- -1 -\f[R] -.fi -.SS Retrieve a document index, shorthand -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat ---- -a: frog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a | di\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -0 ---- -1 -\f[R] -.fi -.SS Filter by document index -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat ---- -a: frog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]select(document_index == 1)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: frog -\f[R] -.fi -.SS Filter by document index shorthand -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat ---- -a: frog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]select(di == 1)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: frog -\f[R] -.fi -.SS Print Document Index with matches -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat ---- -a: frog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a | ({\[dq]match\[dq]: ., \[dq]doc\[dq]: document_index})\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -match: cat -doc: 0 ---- -match: frog -doc: 1 -\f[R] -.fi -.SH Encoder / Decoder -.PP -Encode operators will take the piped in object structure and encode it -as a string in the desired format. -The decode operators do the opposite, they take a formatted string and -decode it into the relevant object structure. -.PP -Note that you can optionally pass an indent value to the encode -functions (see below). -.PP -These operators are useful to process yaml documents that have -stringified embedded yaml/json/props in them. -.PP -.TS -tab(@); -l l l. -T{ -Format -T}@T{ -Decode (from string) -T}@T{ -Encode (to string) -T} -_ -T{ -Yaml -T}@T{ -from_yaml/\[at]yamld -T}@T{ -to_yaml(i)/\[at]yaml -T} -T{ -JSON -T}@T{ -from_json/\[at]jsond -T}@T{ -to_json(i)/\[at]json -T} -T{ -Properties -T}@T{ -from_props/\[at]propsd -T}@T{ -to_props/\[at]props -T} -T{ -CSV -T}@T{ -from_csv/\[at]csvd -T}@T{ -to_csv/\[at]csv -T} -T{ -TSV -T}@T{ -from_tsv/\[at]tsvd -T}@T{ -to_tsv/\[at]tsv -T} -T{ -XML -T}@T{ -from_xml/\[at]xmld -T}@T{ -to_xml(i)/\[at]xml -T} -T{ -Base64 -T}@T{ -\[at]base64d -T}@T{ -\[at]base64 -T} -T{ -URI -T}@T{ -\[at]urid -T}@T{ -\[at]uri -T} -T{ -Shell -T}@T{ -T}@T{ -\[at]sh -T} -.TE -.PP -See CSV and TSV -documentation (https://mikefarah.gitbook.io/yq/usage/csv-tsv) for -accepted formats. -.PP -XML uses the \f[C]--xml-attribute-prefix\f[R] and -\f[C]xml-content-name\f[R] flags to identify attributes and content -fields. -.PP -Base64 assumes rfc4648 (https://rfc-editor.org/rfc/rfc4648.html) -encoding. -Encoding and decoding both assume that the content is a utf-8 string and -not binary content. -.SS Encode value as json string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - cool: thing -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].b = (.a | to_json)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - cool: thing -b: | - { - \[dq]cool\[dq]: \[dq]thing\[dq] - } -\f[R] -.fi -.SS Encode value as json string, on one line -.PP -Pass in a 0 indent to print json on a single line. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - cool: thing -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].b = (.a | to_json(0))\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - cool: thing -b: \[aq]{\[dq]cool\[dq]:\[dq]thing\[dq]}\[aq] -\f[R] -.fi -.SS Encode value as json string, on one line shorthand -.PP -Pass in a 0 indent to print json on a single line. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - cool: thing -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].b = (.a | \[at]json)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - cool: thing -b: \[aq]{\[dq]cool\[dq]:\[dq]thing\[dq]}\[aq] -\f[R] -.fi -.SS Decode a json encoded string -.PP -Keep in mind JSON is a subset of YAML. -If you want idiomatic yaml, pipe through the style operator to clear out -the JSON styling. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: \[aq]{\[dq]cool\[dq]:\[dq]thing\[dq]}\[aq] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a | from_json | ... style=\[dq]\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cool: thing -\f[R] -.fi -.SS Encode value as props string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - cool: thing -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].b = (.a | \[at]props)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - cool: thing -b: | - cool = thing -\f[R] -.fi -.SS Decode props encoded string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: |- - cats=great - dogs=cool as well -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a |= \[at]propsd\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - cats: great - dogs: cool as well -\f[R] -.fi -.SS Decode csv encoded string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: |- - cats,dogs - great,cool as well -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a |= \[at]csvd\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - - cats: great - dogs: cool as well -\f[R] -.fi -.SS Decode tsv encoded string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: |- - cats dogs - great cool as well -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a |= \[at]tsvd\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - - cats: great - dogs: cool as well -\f[R] -.fi -.SS Encode value as yaml string -.PP -Indent defaults to 2 -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - cool: - bob: dylan -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].b = (.a | to_yaml)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - cool: - bob: dylan -b: | - cool: - bob: dylan -\f[R] -.fi -.SS Encode value as yaml string, with custom indentation -.PP -You can specify the indentation level as the first parameter. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - cool: - bob: dylan -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].b = (.a | to_yaml(8))\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - cool: - bob: dylan -b: | - cool: - bob: dylan -\f[R] -.fi -.SS Decode a yaml encoded string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: \[aq]foo: bar\[aq] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].b = (.a | from_yaml)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: \[aq]foo: bar\[aq] -b: - foo: bar -\f[R] -.fi -.SS Update a multiline encoded yaml string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: | - foo: bar - baz: dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a |= (from_yaml | .foo = \[dq]cat\[dq] | to_yaml)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: | - foo: cat - baz: dog -\f[R] -.fi -.SS Update a single line encoded yaml string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: \[aq]foo: bar\[aq] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a |= (from_yaml | .foo = \[dq]cat\[dq] | to_yaml)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: \[aq]foo: cat\[aq] -\f[R] -.fi -.SS Encode array of scalars as csv string -.PP -Scalars are strings, numbers and booleans. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- cat -- thing1,thing2 -- true -- 3.40 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]\[at]csv\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cat,\[dq]thing1,thing2\[dq],true,3.40 -\f[R] -.fi -.SS Encode array of arrays as csv string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- - cat - - thing1,thing2 - - true - - 3.40 -- - dog - - thing3 - - false - - 12 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]\[at]csv\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cat,\[dq]thing1,thing2\[dq],true,3.40 -dog,thing3,false,12 -\f[R] -.fi -.SS Encode array of arrays as tsv string -.PP -Scalars are strings, numbers and booleans. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- - cat - - thing1,thing2 - - true - - 3.40 -- - dog - - thing3 - - false - - 12 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]\[at]tsv\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cat thing1,thing2 true 3.40 -dog thing3 false 12 -\f[R] -.fi -.SS Encode value as xml string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - cool: - foo: bar - +\[at]id: hi -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a | to_xml\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -<cool id=\[dq]hi\[dq]> - <foo>bar</foo> -</cool> -\f[R] -.fi -.SS Encode value as xml string on a single line -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - cool: - foo: bar - +\[at]id: hi -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a | \[at]xml\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -<cool id=\[dq]hi\[dq]><foo>bar</foo></cool> -\f[R] -.fi -.SS Encode value as xml string with custom indentation -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - cool: - foo: bar - +\[at]id: hi -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]{\[dq]cat\[dq]: .a | to_xml(1)}\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cat: | - <cool id=\[dq]hi\[dq]> - <foo>bar</foo> - </cool> -\f[R] -.fi -.SS Decode a xml encoded string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: <foo>bar</foo> -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].b = (.a | from_xml)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: <foo>bar</foo> -b: - foo: bar -\f[R] -.fi -.SS Encode a string to base64 -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -coolData: a special string -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].coolData | \[at]base64\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -YSBzcGVjaWFsIHN0cmluZw== -\f[R] -.fi -.SS Encode a yaml document to base64 -.PP -Pipe through \[at]yaml first to convert to a string, then use -\[at]base64 to encode it. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: apple -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]\[at]yaml | \[at]base64\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -YTogYXBwbGUK -\f[R] -.fi -.SS Encode a string to uri -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -coolData: this has & special () characters * -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].coolData | \[at]uri\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -this+has+%26+special+%28%29+characters+%2A -\f[R] -.fi -.SS Decode a URI to a string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -this+has+%26+special+%28%29+characters+%2A -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]\[at]urid\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -this has & special () characters * -\f[R] -.fi -.SS Encode a string to sh -.PP -Sh/Bash friendly string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -coolData: strings with spaces and a \[aq]quote\[aq] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].coolData | \[at]sh\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -strings\[aq] with spaces and a \[aq]\[rs]\[aq]quote\[rs]\[aq] -\f[R] -.fi -.SS Decode a base64 encoded string -.PP -Decoded data is assumed to be a string. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -coolData: V29ya3Mgd2l0aCBVVEYtMTYg8J+Yig== -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].coolData | \[at]base64d\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -Works with UTF-16 \[u1F60A] -\f[R] -.fi -.SS Decode a base64 encoded yaml document -.PP -Pipe through \f[C]from_yaml\f[R] to parse the decoded base64 string as a -yaml document. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -coolData: YTogYXBwbGUK -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].coolData |= (\[at]base64d | from_yaml)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -coolData: - a: apple -\f[R] -.fi -.SH Entries -.PP -Similar to the same named functions in \f[C]jq\f[R] these functions -convert to/from an object and an array of key-value pairs. -This is most useful for performing operations on keys of maps. -.SS to_entries Map -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 1 -b: 2 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]to_entries\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- key: a - value: 1 -- key: b - value: 2 -\f[R] -.fi -.SS to_entries Array -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- a -- b -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]to_entries\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- key: 0 - value: a -- key: 1 - value: b -\f[R] -.fi -.SS to_entries null -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -null -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]to_entries\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -\f[R] -.fi -.SS from_entries map -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 1 -b: 2 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]to_entries | from_entries\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: 1 -b: 2 -\f[R] -.fi -.SS from_entries with numeric key indices -.PP -from_entries always creates a map, even for numeric keys -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- a -- b -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]to_entries | from_entries\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -0: a -1: b -\f[R] -.fi -.SS Use with_entries to update keys -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 1 -b: 2 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]with_entries(.key |= \[dq]KEY_\[dq] + .)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -KEY_a: 1 -KEY_b: 2 -\f[R] -.fi -.SS Custom sort map keys -.PP -Use to_entries to convert to an array of key/value pairs, sort the array -using sort/sort_by/etc, and convert it back. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 1 -c: 3 -b: 2 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]to_entries | sort_by(.key) | reverse | from_entries\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -c: 3 -b: 2 -a: 1 -\f[R] -.fi -.SS Use with_entries to filter the map -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: bird -c: - d: dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]with_entries(select(.value | has(\[dq]b\[dq])))\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: bird -\f[R] -.fi -.SH Env Variable Operators -.PP -These operators are used to handle environment variables usage in -expressions and documents. -While environment variables can, of course, be passed in via your CLI -with string interpolation, this often comes with complex quote escaping -and can be tricky to write and read. -.PP -There are three operators: -.IP \[bu] 2 -\f[C]env\f[R] which takes a single environment variable name and parse -the variable as a yaml node (be it a map, array, string, number of -boolean) -.IP \[bu] 2 -\f[C]strenv\f[R] which also takes a single environment variable name, -and always parses the variable as a string. -.IP \[bu] 2 -\f[C]envsubst\f[R] which you pipe strings into and it interpolates -environment variables in strings using -envsubst (https://github.com/a8m/envsubst). -.SS EnvSubst Options -.PP -You can optionally pass envsubst any of the following options: -.IP \[bu] 2 -nu: NoUnset, this will fail if there are any referenced variables that -are not set -.IP \[bu] 2 -ne: NoEmpty, this will fail if there are any referenced variables that -are empty -.IP \[bu] 2 -ff: FailFast, this will abort on the first failure (rather than collect -all the errors) -.PP -E.g: \f[C]envsubst(ne, ff)\f[R] will fail on the first empty variable. -.PP -See Imposing -Restrictions (https://github.com/a8m/envsubst#imposing-restrictions) in -the \f[C]envsubst\f[R] documentation for more information, and below for -examples. -.SS Tip -.PP -To replace environment variables across all values in a document, -\f[C]envsubst\f[R] can be used with the recursive descent operator as -follows: -.IP -.nf -\f[C] -yq \[aq](.. | select(tag == \[dq]!!str\[dq])) |= envsubst\[aq] file.yaml -\f[R] -.fi -.SS Read string environment variable -.PP -Running -.IP -.nf -\f[C] -myenv=\[dq]cat meow\[dq] yq --null-input \[aq].a = env(myenv)\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: cat meow -\f[R] -.fi -.SS Read boolean environment variable -.PP -Running -.IP -.nf -\f[C] -myenv=\[dq]true\[dq] yq --null-input \[aq].a = env(myenv)\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: true -\f[R] -.fi -.SS Read numeric environment variable -.PP -Running -.IP -.nf -\f[C] -myenv=\[dq]12\[dq] yq --null-input \[aq].a = env(myenv)\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: 12 -\f[R] -.fi -.SS Read yaml environment variable -.PP -Running -.IP -.nf -\f[C] -myenv=\[dq]{b: fish}\[dq] yq --null-input \[aq].a = env(myenv)\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: {b: fish} -\f[R] -.fi -.SS Read boolean environment variable as a string -.PP -Running -.IP -.nf -\f[C] -myenv=\[dq]true\[dq] yq --null-input \[aq].a = strenv(myenv)\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: \[dq]true\[dq] -\f[R] -.fi -.SS Read numeric environment variable as a string -.PP -Running -.IP -.nf -\f[C] -myenv=\[dq]12\[dq] yq --null-input \[aq].a = strenv(myenv)\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: \[dq]12\[dq] -\f[R] -.fi -.SS Dynamically update a path from an environment variable -.PP -The env variable can be any valid yq expression. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: - - name: dog - - name: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -pathEnv=\[dq].a.b[0].name\[dq] valueEnv=\[dq]moo\[dq] yq \[aq]eval(strenv(pathEnv)) = strenv(valueEnv)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: - - name: moo - - name: cat -\f[R] -.fi -.SS Dynamic key lookup with environment variable -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -cat: meow -dog: woof -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -myenv=\[dq]cat\[dq] yq \[aq].[env(myenv)]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -meow -\f[R] -.fi -.SS Replace strings with envsubst -.PP -Running -.IP -.nf -\f[C] -myenv=\[dq]cat\[dq] yq --null-input \[aq]\[dq]the ${myenv} meows\[dq] | envsubst\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -the cat meows -\f[R] -.fi -.SS Replace strings with envsubst, missing variables -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]\[dq]the ${myenvnonexisting} meows\[dq] | envsubst\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -the meows -\f[R] -.fi -.SS Replace strings with envsubst(nu), missing variables -.PP -(nu) not unset, will fail if there are unset (missing) variables -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]\[dq]the ${myenvnonexisting} meows\[dq] | envsubst(nu)\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -Error: variable ${myenvnonexisting} not set -\f[R] -.fi -.SS Replace strings with envsubst(ne), missing variables -.PP -(ne) not empty, only validates set variables -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]\[dq]the ${myenvnonexisting} meows\[dq] | envsubst(ne)\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -the meows -\f[R] -.fi -.SS Replace strings with envsubst(ne), empty variable -.PP -(ne) not empty, will fail if a references variable is empty -.PP -Running -.IP -.nf -\f[C] -myenv=\[dq]\[dq] yq --null-input \[aq]\[dq]the ${myenv} meows\[dq] | envsubst(ne)\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -Error: variable ${myenv} set but empty -\f[R] -.fi -.SS Replace strings with envsubst, missing variables with defaults -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]\[dq]the ${myenvnonexisting-dog} meows\[dq] | envsubst\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -the dog meows -\f[R] -.fi -.SS Replace strings with envsubst(nu), missing variables with defaults -.PP -Having a default specified skips over the missing variable. -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]\[dq]the ${myenvnonexisting-dog} meows\[dq] | envsubst(nu)\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -the dog meows -\f[R] -.fi -.SS Replace strings with envsubst(ne), missing variables with defaults -.PP -Fails, because the variable is explicitly set to blank. -.PP -Running -.IP -.nf -\f[C] -myEmptyEnv=\[dq]\[dq] yq --null-input \[aq]\[dq]the ${myEmptyEnv-dog} meows\[dq] | envsubst(ne)\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -Error: variable ${myEmptyEnv} set but empty -\f[R] -.fi -.SS Replace string environment variable in document -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -v: ${myenv} -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -myenv=\[dq]cat meow\[dq] yq \[aq].v |= envsubst\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -v: cat meow -\f[R] -.fi -.SS (Default) Return all envsubst errors -.PP -By default, all errors are returned at once. -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]\[dq]the ${notThere} ${alsoNotThere}\[dq] | envsubst(nu)\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -Error: variable ${notThere} not set -variable ${alsoNotThere} not set -\f[R] -.fi -.SS Fail fast, return the first envsubst error (and abort) -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]\[dq]the ${notThere} ${alsoNotThere}\[dq] | envsubst(nu,ff)\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -Error: variable ${notThere} not set -\f[R] -.fi -.SH Equals / Not Equals -.PP -This is a boolean operator that will return \f[C]true\f[R] if the LHS is -equal to the RHS and \f[C]false\f[R] otherwise. -.IP -.nf -\f[C] -\&.a == .b -\f[R] -.fi -.PP -It is most often used with the select operator to find particular nodes: -.IP -.nf -\f[C] -select(.a == .b) -\f[R] -.fi -.PP -The not equals \f[C]!=\f[R] operator returns \f[C]false\f[R] if the LHS -is equal to the RHS. -.SS Related Operators -.IP \[bu] 2 -comparison (\f[C]>=\f[R], \f[C]<\f[R] etc) operators -here (https://mikefarah.gitbook.io/yq/operators/compare) -.IP \[bu] 2 -boolean operators (\f[C]and\f[R], \f[C]or\f[R], \f[C]any\f[R] etc) -here (https://mikefarah.gitbook.io/yq/operators/boolean-operators) -.IP \[bu] 2 -select operator here (https://mikefarah.gitbook.io/yq/operators/select) -.SS Match string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- cat -- goat -- dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] | (. == \[dq]*at\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -true -true -false -\f[R] -.fi -.SS Don\[cq]t match string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- cat -- goat -- dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] | (. != \[dq]*at\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -false -false -true -\f[R] -.fi -.SS Match number -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- 3 -- 4 -- 5 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] | (. == 4)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -false -true -false -\f[R] -.fi -.SS Don\[cq]t match number -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- 3 -- 4 -- 5 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] | (. != 4)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -true -false -true -\f[R] -.fi -.SS Match nulls -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]null == \[ti]\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -true -\f[R] -.fi -.SS Non existent key doesn\[cq]t equal a value -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: frog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]select(.b != \[dq]thing\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: frog -\f[R] -.fi -.SS Two non existent keys are equal -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: frog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]select(.b == .c)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: frog -\f[R] -.fi -.SH Error -.PP -Use this operation to short-circuit expressions. -Useful for validation. -.SS Validate a particular value -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: hello -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]select(.a == \[dq]howdy\[dq]) or error(\[dq].a [\[dq] + .a + \[dq]] is not howdy!\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -Error: .a [hello] is not howdy! -\f[R] -.fi -.SS Validate the environment variable is a number - invalid -.PP -Running -.IP -.nf -\f[C] -numberOfCats=\[dq]please\[dq] yq --null-input \[aq]env(numberOfCats) | select(tag == \[dq]!!int\[dq]) or error(\[dq]numberOfCats is not a number :(\[dq])\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -Error: numberOfCats is not a number :( -\f[R] -.fi -.SS Validate the environment variable is a number - valid -.PP -\f[C]with\f[R] can be a convenient way of encapsulating validation. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -name: Bob -favouriteAnimal: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -numberOfCats=\[dq]3\[dq] yq \[aq] - with(env(numberOfCats); select(tag == \[dq]!!int\[dq]) or error(\[dq]numberOfCats is not a number :(\[dq])) | - .numPets = env(numberOfCats) -\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -name: Bob -favouriteAnimal: cat -numPets: 3 -\f[R] -.fi -.SH Eval -.PP -Use \f[C]eval\f[R] to dynamically process an expression - for instance -from an environment variable. -.PP -\f[C]eval\f[R] takes a single argument, and evaluates that as a -\f[C]yq\f[R] expression. -Any valid expression can be used, be it a path -\f[C].a.b.c | select(. == \[dq]cat\[dq])\f[R], or an update -\f[C].a.b.c = \[dq]gogo\[dq]\f[R]. -.PP -Tip: This can be a useful way to parameterise complex scripts. -.SS Dynamically evaluate a path -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -pathExp: .a.b[] | select(.name == \[dq]cat\[dq]) -a: - b: - - name: dog - - name: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]eval(.pathExp)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -name: cat -\f[R] -.fi -.SS Dynamically update a path from an environment variable -.PP -The env variable can be any valid yq expression. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: - - name: dog - - name: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -pathEnv=\[dq].a.b[0].name\[dq] valueEnv=\[dq]moo\[dq] yq \[aq]eval(strenv(pathEnv)) = strenv(valueEnv)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: - - name: moo - - name: cat -\f[R] -.fi -.SH File Operators -.PP -File operators are most often used with merge when needing to merge -specific files together. -Note that when doing this, you will need to use \f[C]eval-all\f[R] to -ensure all yaml documents are loaded into memory before performing the -merge (as opposed to \f[C]eval\f[R] which runs the expression once per -document). -.PP -Note that the \f[C]fileIndex\f[R] operator has a short alias of -\f[C]fi\f[R]. -.SS Merging files -.PP -Note the use of eval-all to ensure all documents are loaded into memory. -.IP -.nf -\f[C] -yq eval-all \[aq]select(fi == 0) * select(filename == \[dq]file2.yaml\[dq])\[aq] file1.yaml file2.yaml -\f[R] -.fi -.SS Get filename -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]filename\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -sample.yml -\f[R] -.fi -.SS Get file index -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]file_index\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -0 -\f[R] -.fi -.SS Get file indices of multiple documents -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -\f[R] -.fi -.PP -And another sample another.yml file of: -.IP -.nf -\f[C] -a: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq eval-all \[aq]file_index\[aq] sample.yml another.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -0 -1 -\f[R] -.fi -.SS Get file index alias -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]fi\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -0 -\f[R] -.fi -.SH Filter -.PP -Filters an array (or map values) by the expression given. -Equivalent to doing \f[C]map(select(exp))\f[R]. -.SS Filter array -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- 1 -- 2 -- 3 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]filter(. < 3)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- 1 -- 2 -\f[R] -.fi -.SS Filter map values -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -c: - things: cool - frog: yes -d: - things: hot - frog: false -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]filter(.things == \[dq]cool\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- things: cool - frog: yes -\f[R] -.fi -.SH Flatten -.PP -This recursively flattens arrays. -.SS Flatten -.PP -Recursively flattens all arrays -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- 1 -- - 2 -- - - 3 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]flatten\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- 1 -- 2 -- 3 -\f[R] -.fi -.SS Flatten with depth of one -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- 1 -- - 2 -- - - 3 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]flatten(1)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- 1 -- 2 -- - 3 -\f[R] -.fi -.SS Flatten empty array -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- [] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]flatten\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -[] -\f[R] -.fi -.SS Flatten array of objects -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- foo: bar -- - foo: baz -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]flatten\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- foo: bar -- foo: baz -\f[R] -.fi -.SH Group By -.PP -This is used to group items in an array by an expression. -.SS Group by field -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- foo: 1 - bar: 10 -- foo: 3 - bar: 100 -- foo: 1 - bar: 1 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]group_by(.foo)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- - foo: 1 - bar: 10 - - foo: 1 - bar: 1 -- - foo: 3 - bar: 100 -\f[R] -.fi -.SS Group by field, with nulls -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- cat: dog -- foo: 1 - bar: 10 -- foo: 3 - bar: 100 -- no: foo for you -- foo: 1 - bar: 1 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]group_by(.foo)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- - cat: dog - - no: foo for you -- - foo: 1 - bar: 10 - - foo: 1 - bar: 1 -- - foo: 3 - bar: 100 -\f[R] -.fi -.SH Has -.PP -This operation returns true if the key exists in a map (or index in an -array), false otherwise. -.SS Has map key -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- a: yes -- a: \[ti] -- a: -- b: nope -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] | has(\[dq]a\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -true -true -true -false -\f[R] -.fi -.SS Select, checking for existence of deep paths -.PP -Simply pipe in parent expressions into \f[C]has\f[R] -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- a: - b: - c: cat -- a: - b: - d: dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] | select(.a.b | has(\[dq]c\[dq]))\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: - c: cat -\f[R] -.fi -.SS Has array index -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- [] -- [1] -- [1, 2] -- [1, null] -- [1, 2, 3] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] | has(1)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -false -false -true -true -true -\f[R] -.fi -.SH Keys -.PP -Use the \f[C]keys\f[R] operator to return map keys or array indices. -.SS Map keys -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -dog: woof -cat: meow -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]keys\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- dog -- cat -\f[R] -.fi -.SS Array keys -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- apple -- banana -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]keys\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- 0 -- 1 -\f[R] -.fi -.SS Retrieve array key -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- 1 -- 2 -- 3 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[1] | key\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -1 -\f[R] -.fi -.SS Retrieve map key -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: thing -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a | key\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a -\f[R] -.fi -.SS No key -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -{} -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]key\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -\f[R] -.fi -.SS Update map key -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - x: 3 - y: 4 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq](.a.x | key) = \[dq]meow\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - meow: 3 - y: 4 -\f[R] -.fi -.SS Get comment from map key -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - # comment on key - x: 3 - y: 4 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a.x | key | headComment\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -comment on key -\f[R] -.fi -.SS Check node is a key -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: - - cat - c: frog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq][... | { \[dq]p\[dq]: path | join(\[dq].\[dq]), \[dq]isKey\[dq]: is_key, \[dq]tag\[dq]: tag }]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- p: \[dq]\[dq] - isKey: false - tag: \[aq]!!map\[aq] -- p: a - isKey: true - tag: \[aq]!!str\[aq] -- p: a - isKey: false - tag: \[aq]!!map\[aq] -- p: a.b - isKey: true - tag: \[aq]!!str\[aq] -- p: a.b - isKey: false - tag: \[aq]!!seq\[aq] -- p: a.b.0 - isKey: false - tag: \[aq]!!str\[aq] -- p: a.c - isKey: true - tag: \[aq]!!str\[aq] -- p: a.c - isKey: false - tag: \[aq]!!str\[aq] -\f[R] -.fi -.SS Get kind -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: 5 -c: 3.2 -e: true -f: [] -g: {} -h: null -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].. | kind\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -map -scalar -scalar -scalar -scalar -seq -map -scalar -\f[R] -.fi -.SS Get kind, ignores custom tags -.PP -Unlike tag, kind is not affected by custom tags. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: !!thing cat -b: !!foo {} -c: !!bar [] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].. | kind\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -map -scalar -map -seq -\f[R] -.fi -.SS Add comments only to scalars -.PP -An example of how you can use kind -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: 5 - c: 3.2 -e: true -f: [] -g: {} -h: null -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq](.. | select(kind == \[dq]scalar\[dq])) line_comment = \[dq]this is a scalar\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: 5 # this is a scalar - c: 3.2 # this is a scalar -e: true # this is a scalar -f: [] -g: {} -h: null # this is a scalar -\f[R] -.fi -.SH Length -.PP -Returns the lengths of the nodes. -Length is defined according to the type of the node. -.SS String length -.PP -returns length of string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a | length\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -3 -\f[R] -.fi -.SS null length -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: null -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a | length\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -0 -\f[R] -.fi -.SS Map length -.PP -returns number of entries -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -c: dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]length\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -2 -\f[R] -.fi -.SS Array length -.PP -returns number of elements -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- 2 -- 4 -- 6 -- 8 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]length\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -4 -\f[R] -.fi -.SH Line -.PP -Returns the line of the matching node. -Starts from 1, 0 indicates there was no line data. -.SS Returns line of \f[I]value\f[R] node -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: - c: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].b | line\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -3 -\f[R] -.fi -.SS Returns line of \f[I]key\f[R] node -.PP -Pipe through the key operator to get the line of the key -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: - c: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].b | key | line\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -2 -\f[R] -.fi -.SS First line is 1 -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a | line\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -1 -\f[R] -.fi -.SS No line data is 0 -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]{\[dq]a\[dq]: \[dq]new entry\[dq]} | line\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -0 -\f[R] -.fi -.SH Load -.PP -The load operators allows you to load in content from another file. -.PP -Note that you can use string operators like \f[C]+\f[R] and -\f[C]sub\f[R] to modify the value in the yaml file to a path that exists -in your system. -.PP -You can load files of the following supported types: -.PP -.TS -tab(@); -l l. -T{ -Format -T}@T{ -Load Operator -T} -_ -T{ -Yaml -T}@T{ -load -T} -T{ -XML -T}@T{ -load_xml -T} -T{ -Properties -T}@T{ -load_props -T} -T{ -Plain String -T}@T{ -load_str -T} -T{ -Base64 -T}@T{ -load_base64 -T} -.TE -.PP -Note that load_base64 only works for base64 encoded utf-8 strings. -.SS Samples files for tests: -.SS yaml -.PP -\f[C]../../examples/thing.yml\f[R]: -.IP -.nf -\f[C] -a: apple is included -b: cool -\f[R] -.fi -.SS xml -.PP -\f[C]small.xml\f[R]: -.IP -.nf -\f[C] -<this>is some xml</this> -\f[R] -.fi -.SS properties -.PP -\f[C]small.properties\f[R]: -.IP -.nf -\f[C] -this.is = a properties file -\f[R] -.fi -.SS base64 -.PP -\f[C]base64.txt\f[R]: -.IP -.nf -\f[C] -bXkgc2VjcmV0IGNoaWxsaSByZWNpcGUgaXMuLi4u -\f[R] -.fi -.SS Simple example -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -myFile: ../../examples/thing.yml -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]load(.myFile)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: apple is included -b: cool. -\f[R] -.fi -.SS Replace node with referenced file -.PP -Note that you can modify the filename in the load operator if needed. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -something: - file: thing.yml -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].something |= load(\[dq]../../examples/\[dq] + .file)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -something: - a: apple is included - b: cool. -\f[R] -.fi -.SS Replace \f[I]all\f[R] nodes with referenced file -.PP -Recursively match all the nodes (\f[C]..\f[R]) and then filter the ones -that have a `file' attribute. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -something: - file: thing.yml -over: - here: - - file: thing.yml -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq](.. | select(has(\[dq]file\[dq]))) |= load(\[dq]../../examples/\[dq] + .file)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -something: - a: apple is included - b: cool. -over: - here: - - a: apple is included - b: cool. -\f[R] -.fi -.SS Replace node with referenced file as string -.PP -This will work for any text based file -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -something: - file: thing.yml -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].something |= load_str(\[dq]../../examples/\[dq] + .file)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -something: |- - a: apple is included - b: cool. -\f[R] -.fi -.SS Load from XML -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -cool: things -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].more_stuff = load_xml(\[dq]../../examples/small.xml\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cool: things -more_stuff: - this: is some xml -\f[R] -.fi -.SS Load from Properties -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -cool: things -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].more_stuff = load_props(\[dq]../../examples/small.properties\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cool: things -more_stuff: - this: - is: a properties file -\f[R] -.fi -.SS Merge from properties -.PP -This can be used as a convenient way to update a yaml document -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -this: - is: from yaml - cool: ay -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]. *= load_props(\[dq]../../examples/small.properties\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -this: - is: a properties file - cool: ay -\f[R] -.fi -.SS Load from base64 encoded file -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -cool: things -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].more_stuff = load_base64(\[dq]../../examples/base64.txt\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cool: things -more_stuff: my secret chilli recipe is.... -\f[R] -.fi -.SH Map -.PP -Maps values of an array. -Use \f[C]map_values\f[R] to map values of an object. -.SS Map array -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- 1 -- 2 -- 3 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]map(. + 1)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- 2 -- 3 -- 4 -\f[R] -.fi -.SS Map object values -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 1 -b: 2 -c: 3 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]map_values(. + 1)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: 2 -b: 3 -c: 4 -\f[R] -.fi -.SH Max -.PP -Computes the maximum among an incoming sequence of scalar values. -.SS Maximum int -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- 99 -- 16 -- 12 -- 6 -- 66 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]max\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -99 -\f[R] -.fi -.SS Maximum string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- foo -- bar -- baz -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]max\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -foo -\f[R] -.fi -.SS Maximum of empty -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -[] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]max\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -\f[R] -.fi -.SH Min -.PP -Computes the minimum among an incoming sequence of scalar values. -.SS Minimum int -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- 99 -- 16 -- 12 -- 6 -- 66 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]min\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -6 -\f[R] -.fi -.SS Minimum string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- foo -- bar -- baz -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]min\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -bar -\f[R] -.fi -.SS Minimum of empty -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -[] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]min\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -\f[R] -.fi -.SH Modulo -.PP -Arithmetic modulo operator, returns the remainder from dividing two -numbers. -.SS Number modulo - int -.PP -If the lhs and rhs are ints then the expression will be calculated with -ints. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 13 -b: 2 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a = .a % .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: 1 -b: 2 -\f[R] -.fi -.SS Number modulo - float -.PP -If the lhs or rhs are floats then the expression will be calculated with -floats. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 12 -b: 2.5 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a = .a % .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: !!float 2 -b: 2.5 -\f[R] -.fi -.SS Number modulo - int by zero -.PP -If the lhs is an int and rhs is a 0 the result is an error. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 1 -b: 0 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a = .a % .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -Error: cannot modulo by 0 -\f[R] -.fi -.SS Number modulo - float by zero -.PP -If the lhs is a float and rhs is a 0 the result is NaN. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 1.1 -b: 0 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a = .a % .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: !!float NaN -b: 0 -\f[R] -.fi -.SH Multiply (Merge) -.PP -Like the multiple operator in jq, depending on the operands, this -multiply operator will do different things. -Currently numbers, arrays and objects are supported. -.SS Objects and arrays - merging -.PP -Objects are merged \f[I]deeply\f[R] matching on matching keys. -By default, array values override and are not deeply merged. -.PP -You can use the add operator \f[C]+\f[R], to shallow merge objects, see -more info here (https://mikefarah.gitbook.io/yq/operators/add). -.PP -Note that when merging objects, this operator returns the merged object -(not the parent). -This will be clearer in the examples below. -.SS Merge Flags -.PP -You can control how objects are merged by using one or more of the -following flags. -Multiple flags can be used together, e.g.\ \f[C].a *+? .b\f[R]. -See examples below -.IP \[bu] 2 -\f[C]+\f[R] append arrays -.IP \[bu] 2 -\f[C]d\f[R] deeply merge arrays -.IP \[bu] 2 -\f[C]?\f[R] only merge \f[I]existing\f[R] fields -.IP \[bu] 2 -\f[C]n\f[R] only merge \f[I]new\f[R] fields -.IP \[bu] 2 -\f[C]c\f[R] clobber custom tags -.PP -To perform a shallow merge only, use the add operator \f[C]+\f[R], see -more info here (https://mikefarah.gitbook.io/yq/operators/add). -.SS Merge two files together -.PP -This uses the load operator to merge file2 into file1. -.IP -.nf -\f[C] -yq \[aq]. *= load(\[dq]file2.yml\[dq])\[aq] file1.yml -\f[R] -.fi -.SS Merging all files -.PP -Note the use of \f[C]eval-all\f[R] to ensure all documents are loaded -into memory. -.IP -.nf -\f[C] -yq eval-all \[aq]. as $item ireduce ({}; . * $item )\[aq] *.yml -\f[R] -.fi -.SH Merging complex arrays together by a key field -.PP -By default - \f[C]yq\f[R] merge is naive. -It merges maps when they match the key name, and arrays are merged -either by appending them together, or merging the entries by their -position in the array. -.PP -For more complex array merging (e.g.\ merging items that match on a -certain key) please see the example -here (https://mikefarah.gitbook.io/yq/operators/multiply-merge#merge-arrays-of-objects-together-matching-on-a-key) -.SS Multiply integers -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 3 -b: 4 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a *= .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: 12 -b: 4 -\f[R] -.fi -.SS Multiply string node X int -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -b: banana -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].b * 4\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -bananabananabananabanana -\f[R] -.fi -.SS Multiply int X string node -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -b: banana -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]4 * .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -bananabananabananabanana -\f[R] -.fi -.SS Multiply string X int node -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -n: 4 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]\[dq]banana\[dq] * .n\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -bananabananabananabanana -\f[R] -.fi -.SS Multiply int node X string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -n: 4 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].n * \[dq]banana\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -bananabananabananabanana -\f[R] -.fi -.SS Merge objects together, returning merged result only -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - field: me - fieldA: cat -b: - field: - g: wizz - fieldB: dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a * .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -field: - g: wizz -fieldA: cat -fieldB: dog -\f[R] -.fi -.SS Merge objects together, returning parent object -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - field: me - fieldA: cat -b: - field: - g: wizz - fieldB: dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]. * {\[dq]a\[dq]:.b}\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - field: - g: wizz - fieldA: cat - fieldB: dog -b: - field: - g: wizz - fieldB: dog -\f[R] -.fi -.SS Merge keeps style of LHS -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: {things: great} -b: - also: \[dq]me\[dq] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]. * {\[dq]a\[dq]:.b}\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: {things: great, also: \[dq]me\[dq]} -b: - also: \[dq]me\[dq] -\f[R] -.fi -.SS Merge arrays -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - - 1 - - 2 - - 3 -b: - - 3 - - 4 - - 5 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]. * {\[dq]a\[dq]:.b}\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - - 3 - - 4 - - 5 -b: - - 3 - - 4 - - 5 -\f[R] -.fi -.SS Merge, only existing fields -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - thing: one - cat: frog -b: - missing: two - thing: two -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a *? .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -thing: two -cat: frog -\f[R] -.fi -.SS Merge, only new fields -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - thing: one - cat: frog -b: - missing: two - thing: two -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a *n .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -thing: one -cat: frog -missing: two -\f[R] -.fi -.SS Merge, appending arrays -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - array: - - 1 - - 2 - - animal: dog - value: coconut -b: - array: - - 3 - - 4 - - animal: cat - value: banana -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a *+ .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -array: - - 1 - - 2 - - animal: dog - - 3 - - 4 - - animal: cat -value: banana -\f[R] -.fi -.SS Merge, only existing fields, appending arrays -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - thing: - - 1 - - 2 -b: - thing: - - 3 - - 4 - another: - - 1 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a *?+ .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -thing: - - 1 - - 2 - - 3 - - 4 -\f[R] -.fi -.SS Merge, deeply merging arrays -.PP -Merging arrays deeply means arrays are merged like objects, with indices -as their key. -In this case, we merge the first item in the array and do nothing with -the second. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - - name: fred - age: 12 - - name: bob - age: 32 -b: - - name: fred - age: 34 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a *d .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- name: fred - age: 34 -- name: bob - age: 32 -\f[R] -.fi -.SS Merge arrays of objects together, matching on a key -.PP -This is a fairly complex expression - you can use it as is by providing -the environment variables as seen in the example below. -.PP -It merges in the array provided in the second file into the first - -matching on equal keys. -.PP -Explanation: -.PP -The approach, at a high level, is to reduce into a merged map (keyed by -the unique key) and then convert that back into an array. -.PP -First the expression will create a map from the arrays keyed by the -idPath, the unique field we want to merge by. -The reduce operator is merging `({}; . * $item )', so array elements -with the matching key will be merged together. -.PP -Next, we convert the map back to an array, using reduce again, -concatenating all the map values together. -.PP -Finally, we set the result of the merged array back into the first doc. -.PP -Thanks Kev from -stackoverflow (https://stackoverflow.com/a/70109529/1168223) -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -myArray: - - a: apple - b: appleB - - a: kiwi - b: kiwiB - - a: banana - b: bananaB -something: else -\f[R] -.fi -.PP -And another sample another.yml file of: -.IP -.nf -\f[C] -newArray: - - a: banana - c: bananaC - - a: apple - b: appleB2 - - a: dingo - c: dingoC -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -idPath=\[dq].a\[dq] originalPath=\[dq].myArray\[dq] otherPath=\[dq].newArray\[dq] yq eval-all \[aq] -( - (( (eval(strenv(originalPath)) + eval(strenv(otherPath))) | .[] | {(eval(strenv(idPath))): .}) as $item ireduce ({}; . * $item )) as $uniqueMap - | ( $uniqueMap | to_entries | .[]) as $item ireduce([]; . + $item.value) -) as $mergedArray -| select(fi == 0) | (eval(strenv(originalPath))) = $mergedArray -\[aq] sample.yml another.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -myArray: - - a: apple - b: appleB2 - - a: kiwi - b: kiwiB - - a: banana - b: bananaB - c: bananaC - - a: dingo - c: dingoC -something: else -\f[R] -.fi -.SS Merge to prefix an element -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]. * {\[dq]a\[dq]: {\[dq]c\[dq]: .a}}\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - c: cat -b: dog -\f[R] -.fi -.SS Merge with simple aliases -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: &cat - c: frog -b: - f: *cat -c: - g: thongs -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].c * .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -g: thongs -f: *cat -\f[R] -.fi -.SS Merge copies anchor names -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - c: &cat frog -b: - f: *cat -c: - g: thongs -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].c * .a\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -g: thongs -c: &cat frog -\f[R] -.fi -.SS Merge with merge anchors -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -foo: &foo - a: foo_a - thing: foo_thing - c: foo_c -bar: &bar - b: bar_b - thing: bar_thing - c: bar_c -foobarList: - b: foobarList_b - !!merge <<: - - *foo - - *bar - c: foobarList_c -foobar: - c: foobar_c - !!merge <<: *foo - thing: foobar_thing -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].foobar * .foobarList\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -c: foobarList_c -!!merge <<: - - *foo - - *bar -thing: foobar_thing -b: foobarList_b -\f[R] -.fi -.SS Custom types: that are really numbers -.PP -When custom tags are encountered, yq will try to decode the underlying -type. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: !horse 2 -b: !goat 3 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a = .a * .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: !horse 6 -b: !goat 3 -\f[R] -.fi -.SS Custom types: that are really maps -.PP -Custom tags will be maintained. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: !horse - cat: meow -b: !goat - dog: woof -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a = .a * .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: !horse - cat: meow - dog: woof -b: !goat - dog: woof -\f[R] -.fi -.SS Custom types: clobber tags -.PP -Use the \f[C]c\f[R] option to clobber custom tags. -Note that the second tag is now used. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: !horse - cat: meow -b: !goat - dog: woof -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a *=c .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: !goat - cat: meow - dog: woof -b: !goat - dog: woof -\f[R] -.fi -.SS Merging a null with a map -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]null * {\[dq]some\[dq]: \[dq]thing\[dq]}\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -some: thing -\f[R] -.fi -.SS Merging a map with null -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]{\[dq]some\[dq]: \[dq]thing\[dq]} * null\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -some: thing -\f[R] -.fi -.SS Merging a null with an array -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]null * [\[dq]some\[dq]]\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- some -\f[R] -.fi -.SS Merging an array with null -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq][\[dq]some\[dq]] * null\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- some -\f[R] -.fi -.SH Omit -.PP -Works like \f[C]pick\f[R], but instead you specify the keys/indices that -you \f[I]don\[cq]t\f[R] want included. -.SS Omit keys from map -.PP -Note that non existent keys are skipped. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -myMap: - cat: meow - dog: bark - thing: hamster - hamster: squeak -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].myMap |= omit([\[dq]hamster\[dq], \[dq]cat\[dq], \[dq]goat\[dq]])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -myMap: - dog: bark - thing: hamster -\f[R] -.fi -.SS Omit indices from array -.PP -Note that non existent indices are skipped. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- cat -- leopard -- lion -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]omit([2, 0, 734, -5])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- leopard -\f[R] -.fi -.SH Parent -.PP -Parent simply returns the parent nodes of the matching nodes. -.SS Simple example -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - nested: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a.nested | parent\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -nested: cat -\f[R] -.fi -.SS Parent of nested matches -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - fruit: apple - name: bob -b: - fruit: banana - name: sam -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].. | select(. == \[dq]banana\[dq]) | parent\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -fruit: banana -name: sam -\f[R] -.fi -.SS N-th parent -.PP -You can optionally supply the number of levels to go up for the parent, -the default being 1. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: - c: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a.b.c | parent(2)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -b: - c: cat -\f[R] -.fi -.SS N-th parent - another level -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: - c: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a.b.c | parent(3)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: - c: cat -\f[R] -.fi -.SS No parent -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -{} -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]parent\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -\f[R] -.fi -.SH Path -.PP -The \f[C]path\f[R] operator can be used to get the traversal paths of -matching nodes in an expression. -The path is returned as an array, which if traversed in order will lead -to the matching node. -.PP -You can get the key/index of matching nodes by using the \f[C]path\f[R] -operator to return the path array then piping that through -\f[C].[-1]\f[R] to get the last element of that array, the key. -.PP -Use \f[C]setpath\f[R] to set a value to the path array returned by -\f[C]path\f[R], and similarly \f[C]delpaths\f[R] for an array of path -arrays. -.SS Map path -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a.b | path\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- a -- b -\f[R] -.fi -.SS Get map key -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a.b | path | .[-1]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -b -\f[R] -.fi -.SS Array path -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - - cat - - dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a.[] | select(. == \[dq]dog\[dq]) | path\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- a -- 1 -\f[R] -.fi -.SS Get array index -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - - cat - - dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a.[] | select(. == \[dq]dog\[dq]) | path | .[-1]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -1 -\f[R] -.fi -.SS Print path and value -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - - cat - - dog - - frog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a[] | select(. == \[dq]*og\[dq]) | [{\[dq]path\[dq]:path, \[dq]value\[dq]:.}]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- path: - - a - - 1 - value: dog -- path: - - a - - 2 - value: frog -\f[R] -.fi -.SS Set path -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]setpath([\[dq]a\[dq], \[dq]b\[dq]]; \[dq]things\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: things -\f[R] -.fi -.SS Set on empty document -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]setpath([\[dq]a\[dq], \[dq]b\[dq]]; \[dq]things\[dq])\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: things -\f[R] -.fi -.SS Set path to prune deep paths -.PP -Like pick but recursive. -This uses \f[C]ireduce\f[R] to deeply set the selected paths into an -empty object. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -parentA: bob -parentB: - child1: i am child1 - child2: i am child2 -parentC: - child1: me child1 - child2: me child2 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq](.parentB.child2, .parentC.child1) as $i - ireduce({}; setpath($i | path; $i))\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -parentB: - child2: i am child2 -parentC: - child1: me child1 -\f[R] -.fi -.SS Set array path -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - - cat - - frog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]setpath([\[dq]a\[dq], 0]; \[dq]things\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - - things - - frog -\f[R] -.fi -.SS Set array path empty -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]setpath([\[dq]a\[dq], 0]; \[dq]things\[dq])\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - - things -\f[R] -.fi -.SS Delete path -.PP -Notice delpaths takes an \f[I]array\f[R] of paths. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: cat - c: dog - d: frog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]delpaths([[\[dq]a\[dq], \[dq]c\[dq]], [\[dq]a\[dq], \[dq]d\[dq]]])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: cat -\f[R] -.fi -.SS Delete array path -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - - cat - - frog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]delpaths([[\[dq]a\[dq], 0]])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - - frog -\f[R] -.fi -.SS Delete - wrong parameter -.PP -delpaths does not work with a single path array -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - - cat - - frog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]delpaths([\[dq]a\[dq], 0])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -Error: DELPATHS: expected entry [0] to be a sequence, but its a !!str. Note that delpaths takes an array of path arrays, e.g. [[\[dq]a\[dq], \[dq]b\[dq]]] -\f[R] -.fi -.SH Pick -.PP -Filter a map by the specified list of keys. -Map is returned with the key in the order of the pick list. -.PP -Similarly, filter an array by the specified list of indices. -.SS Pick keys from map -.PP -Note that the order of the keys matches the pick order and non existent -keys are skipped. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -myMap: - cat: meow - dog: bark - thing: hamster - hamster: squeak -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].myMap |= pick([\[dq]hamster\[dq], \[dq]cat\[dq], \[dq]goat\[dq]])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -myMap: - hamster: squeak - cat: meow -\f[R] -.fi -.SS Pick indices from array -.PP -Note that the order of the indices matches the pick order and non -existent indices are skipped. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- cat -- leopard -- lion -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]pick([2, 0, 734, -5])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- lion -- cat -\f[R] -.fi -.SH Pipe -.PP -Pipe the results of an expression into another. -Like the bash operator. -.SS Simple Pipe -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a | .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cat -\f[R] -.fi -.SS Multiple updates -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cow -b: sheep -c: same -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a = \[dq]cat\[dq] | .b = \[dq]dog\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: cat -b: dog -c: same -\f[R] -.fi -.SH Pivot -.PP -Emulates the \f[C]PIVOT\f[R] function supported by several popular RDBMS -systems. -.SS Pivot a sequence of sequences -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- - foo - - bar - - baz -- - sis - - boom - - bah -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]pivot\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- - foo - - sis -- - bar - - boom -- - baz - - bah -\f[R] -.fi -.SS Pivot sequence of heterogeneous sequences -.PP -Missing values are \[lq]padded\[rq] to null. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- - foo - - bar - - baz -- - sis - - boom - - bah - - blah -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]pivot\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- - foo - - sis -- - bar - - boom -- - baz - - bah -- - - - blah -\f[R] -.fi -.SS Pivot sequence of maps -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- foo: a - bar: b - baz: c -- foo: x - bar: y - baz: z -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]pivot\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -foo: - - a - - x -bar: - - b - - y -baz: - - c - - z -\f[R] -.fi -.SS Pivot sequence of heterogeneous maps -.PP -Missing values are \[lq]padded\[rq] to null. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- foo: a - bar: b - baz: c -- foo: x - bar: y - baz: z - what: ever -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]pivot\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -foo: - - a - - x -bar: - - b - - y -baz: - - c - - z -what: - - - - ever -\f[R] -.fi -.SH Recursive Descent (Glob) -.PP -This operator recursively matches (or globs) all children nodes given of -a particular element, including that node itself. -This is most often used to apply a filter recursively against all -matches. -.SS match values form \f[C]..\f[R] -.PP -This will, like the \f[C]jq\f[R] equivalent, recursively match all -\f[I]value\f[R] nodes. -Use it to find/manipulate particular values. -.PP -For instance to set the \f[C]style\f[R] of all \f[I]value\f[R] nodes in -a yaml doc, excluding map keys: -.IP -.nf -\f[C] -yq \[aq].. style= \[dq]flow\[dq]\[aq] file.yaml -\f[R] -.fi -.SS match values and map keys form \f[C]...\f[R] -.PP -The also includes map keys in the results set. -This is particularly useful in YAML as unlike JSON, map keys can have -their own styling and tags and also use anchors and aliases. -.PP -For instance to set the \f[C]style\f[R] of all nodes in a yaml doc, -including the map keys: -.IP -.nf -\f[C] -yq \[aq]... style= \[dq]flow\[dq]\[aq] file.yaml -\f[R] -.fi -.SS Recurse map (values only) -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: frog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]..\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: frog -frog -\f[R] -.fi -.SS Recursively find nodes with keys -.PP -Note that this example has wrapped the expression in \f[C][]\f[R] to -show that there are two matches returned. -You do not have to wrap in \f[C][]\f[R] in your path expression. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - name: frog - b: - name: blog - age: 12 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq][.. | select(has(\[dq]name\[dq]))]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- name: frog - b: - name: blog - age: 12 -- name: blog - age: 12 -\f[R] -.fi -.SS Recursively find nodes with values -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - nameA: frog - b: - nameB: frog - age: 12 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].. | select(. == \[dq]frog\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -frog -frog -\f[R] -.fi -.SS Recurse map (values and keys) -.PP -Note that the map key appears in the results -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: frog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]...\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: frog -a -frog -\f[R] -.fi -.SS Aliases are not traversed -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: &cat - c: frog -b: *cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq][..]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- a: &cat - c: frog - b: *cat -- &cat - c: frog -- frog -- *cat -\f[R] -.fi -.SS Merge docs are not traversed -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -foo: &foo - a: foo_a - thing: foo_thing - c: foo_c -bar: &bar - b: bar_b - thing: bar_thing - c: bar_c -foobarList: - b: foobarList_b - !!merge <<: - - *foo - - *bar - c: foobarList_c -foobar: - c: foobar_c - !!merge <<: *foo - thing: foobar_thing -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].foobar | [..]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- c: foobar_c - !!merge <<: *foo - thing: foobar_thing -- foobar_c -- *foo -- foobar_thing -\f[R] -.fi -.SH Reduce -.PP -Reduce is a powerful way to process a collection of data into a new -form. -.IP -.nf -\f[C] -<exp> as $<name> ireduce (<init>; <block>) -\f[R] -.fi -.PP -e.g. -.IP -.nf -\f[C] -\&.[] as $item ireduce (0; . + $item) -\f[R] -.fi -.PP -On the LHS we are configuring the collection of items that will be -reduced \f[C]<exp>\f[R] as well as what each element will be called -\f[C]$<name>\f[R]. -Note that the array has been splatted into its individual elements. -.PP -On the RHS there is \f[C]<init>\f[R], the starting value of the -accumulator and \f[C]<block>\f[R], the expression that will update the -accumulator for each element in the collection. -Note that within the block expression, \f[C].\f[R] will evaluate to the -current value of the accumulator. -.SS yq vs jq syntax -.PP -Reduce syntax in \f[C]yq\f[R] is a little different from \f[C]jq\f[R] - -as \f[C]yq\f[R] (currently) isn\[cq]t as sophisticated as \f[C]jq\f[R] -and its only supports infix notation (e.g.\ a + b, where the operator is -in the middle of the two parameters) - where as \f[C]jq\f[R] uses a mix -of infix notation with \f[I]prefix\f[R] notation -(e.g.\ \f[C]reduce a b\f[R] is like writing \f[C]+ a b\f[R]). -.PP -To that end, the reduce operator is called \f[C]ireduce\f[R] for -backwards compatibility if a \f[C]jq\f[R] like prefix version of -\f[C]reduce\f[R] is ever added. -.SS Sum numbers -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- 10 -- 2 -- 5 -- 3 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] as $item ireduce (0; . + $item)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -20 -\f[R] -.fi -.SS Merge all yaml files together -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -\f[R] -.fi -.PP -And another sample another.yml file of: -.IP -.nf -\f[C] -b: dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq eval-all \[aq]. as $item ireduce ({}; . * $item )\[aq] sample.yml another.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: cat -b: dog -\f[R] -.fi -.SS Convert an array to an object -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- name: Cathy - has: apples -- name: Bob - has: bananas -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] as $item ireduce ({}; .[$item | .name] = ($item | .has) )\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -Cathy: apples -Bob: bananas -\f[R] -.fi -.SH Reverse -.PP -Reverses the order of the items in an array -.SS Reverse -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- 1 -- 2 -- 3 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]reverse\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- 3 -- 2 -- 1 -\f[R] -.fi -.SS Sort descending by string field -.PP -Use sort with reverse to sort in descending order. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- a: banana -- a: cat -- a: apple -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]sort_by(.a) | reverse\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- a: cat -- a: banana -- a: apple -\f[R] -.fi -.SH Select -.PP -Select is used to filter arrays and maps by a boolean expression. -.SS Related Operators -.IP \[bu] 2 -equals / not equals (\f[C]==\f[R], \f[C]!=\f[R]) operators -here (https://mikefarah.gitbook.io/yq/operators/equals) -.IP \[bu] 2 -comparison (\f[C]>=\f[R], \f[C]<\f[R] etc) operators -here (https://mikefarah.gitbook.io/yq/operators/compare) -.IP \[bu] 2 -boolean operators (\f[C]and\f[R], \f[C]or\f[R], \f[C]any\f[R] etc) -here (https://mikefarah.gitbook.io/yq/operators/boolean-operators) -.SS Select elements from array using wildcard prefix -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- cat -- goat -- dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] | select(. == \[dq]*at\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cat -goat -\f[R] -.fi -.SS Select elements from array using wildcard suffix -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- go-kart -- goat -- dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] | select(. == \[dq]go*\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -go-kart -goat -\f[R] -.fi -.SS Select elements from array using wildcard prefix and suffix -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- ago -- go -- meow -- going -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] | select(. == \[dq]*go*\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -ago -go -going -\f[R] -.fi -.SS Select elements from array with regular expression -.PP -See more regular expression examples under the \f[C]string\f[R] operator -docs (https://mikefarah.gitbook.io/yq/operators/string-operators). -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- this_0 -- not_this -- nor_0_this -- thisTo_4 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] | select(test(\[dq][a-zA-Z]+_[0-9]$\[dq]))\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -this_0 -thisTo_4 -\f[R] -.fi -.SS Select items from a map -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -things: cat -bob: goat -horse: dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] | select(. == \[dq]cat\[dq] or test(\[dq]og$\[dq]))\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cat -dog -\f[R] -.fi -.SS Use select and with_entries to filter map keys -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -name: bob -legs: 2 -game: poker -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]with_entries(select(.key | test(\[dq]ame$\[dq])))\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -name: bob -game: poker -\f[R] -.fi -.SS Select multiple items in a map and update -.PP -Note the brackets around the entire LHS. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - things: cat - bob: goat - horse: dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq](.a.[] | select(. == \[dq]cat\[dq] or . == \[dq]goat\[dq])) |= \[dq]rabbit\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - things: rabbit - bob: rabbit - horse: dog -\f[R] -.fi -.SH Shuffle -.PP -Shuffles an array. -Note that this command does \f[I]not\f[R] use a cryptographically secure -random number generator to randomise the array order. -.SS Shuffle array -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- 1 -- 2 -- 3 -- 4 -- 5 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]shuffle\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- 5 -- 2 -- 4 -- 1 -- 3 -\f[R] -.fi -.SS Shuffle array in place -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -cool: - - 1 - - 2 - - 3 - - 4 - - 5 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].cool |= shuffle\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cool: - - 5 - - 2 - - 4 - - 1 - - 3 -\f[R] -.fi -.SH Slice/Splice Array -.PP -The slice array operator takes an array as input and returns a subarray. -Like the \f[C]jq\f[R] equivalent, \f[C].[10:15]\f[R] will return an -array of length 5, starting from index 10 inclusive, up to index 15 -exclusive. -Negative numbers count backwards from the end of the array. -.PP -You may leave out the first or second number, which will refer to the -start or end of the array respectively. -.SS Slicing arrays -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- cat -- dog -- frog -- cow -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[1:3]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- dog -- frog -\f[R] -.fi -.SS Slicing arrays - without the first number -.PP -Starts from the start of the array -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- cat -- dog -- frog -- cow -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[:2]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- cat -- dog -\f[R] -.fi -.SS Slicing arrays - without the second number -.PP -Finishes at the end of the array -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- cat -- dog -- frog -- cow -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[2:]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- frog -- cow -\f[R] -.fi -.SS Slicing arrays - use negative numbers to count backwards from the end -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- cat -- dog -- frog -- cow -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[1:-1]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- dog -- frog -\f[R] -.fi -.SS Inserting into the middle of an array -.PP -using an expression to find the index -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- cat -- dog -- frog -- cow -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq](.[] | select(. == \[dq]dog\[dq]) | key + 1) as $pos | .[0:($pos)] + [\[dq]rabbit\[dq]] + .[$pos:]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- cat -- dog -- rabbit -- frog -- cow -\f[R] -.fi -.SH Sort Keys -.PP -The Sort Keys operator sorts maps by their keys (based on their string -value). -This operator does not do anything to arrays or scalars (so you can -easily recursively apply it to all maps). -.PP -Sort is particularly useful for diffing two different yaml documents: -.IP -.nf -\f[C] -yq -i -P \[aq]sort_keys(..)\[aq] file1.yml -yq -i -P \[aq]sort_keys(..)\[aq] file2.yml -diff file1.yml file2.yml -\f[R] -.fi -.PP -Note that \f[C]yq\f[R] does not yet consider anchors when sorting by -keys - this may result in invalid yaml documents if you are using merge -anchors. -.PP -For more advanced sorting, using \f[C]to_entries\f[R] to convert the map -to an array, then sort/process the array as you like (e.g.\ using -\f[C]sort_by\f[R]) and convert back to a map using -\f[C]from_entries\f[R]. -See -here (https://mikefarah.gitbook.io/yq/operators/entries#custom-sort-map-keys) -for an example. -.SS Sort keys of map -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -c: frog -a: blah -b: bing -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]sort_keys(.)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: blah -b: bing -c: frog -\f[R] -.fi -.SS Sort keys recursively -.PP -Note the array elements are left unsorted, but maps inside arrays are -sorted -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -bParent: - c: dog - array: - - 3 - - 1 - - 2 -aParent: - z: donkey - x: - - c: yum - b: delish - - b: ew - a: apple -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]sort_keys(..)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -aParent: - x: - - b: delish - c: yum - - a: apple - b: ew - z: donkey -bParent: - array: - - 3 - - 1 - - 2 - c: dog -\f[R] -.fi -.SH Sort -.PP -Sorts an array. -Use \f[C]sort\f[R] to sort an array as is, or \f[C]sort_by(exp)\f[R] to -sort by a particular expression (e.g.\ subfield). -.PP -To sort by descending order, pipe the results through the -\f[C]reverse\f[R] operator after sorting. -.PP -Note that at this stage, \f[C]yq\f[R] only sorts scalar fields. -.SS Sort by string field -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- a: banana -- a: cat -- a: apple -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]sort_by(.a)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- a: apple -- a: banana -- a: cat -\f[R] -.fi -.SS Sort by multiple fields -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- a: dog -- a: cat - b: banana -- a: cat - b: apple -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]sort_by(.a, .b)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- a: cat - b: apple -- a: cat - b: banana -- a: dog -\f[R] -.fi -.SS Sort descending by string field -.PP -Use sort with reverse to sort in descending order. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- a: banana -- a: cat -- a: apple -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]sort_by(.a) | reverse\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- a: cat -- a: banana -- a: apple -\f[R] -.fi -.SS Sort array in place -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -cool: - - a: banana - - a: cat - - a: apple -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].cool |= sort_by(.a)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cool: - - a: apple - - a: banana - - a: cat -\f[R] -.fi -.SS Sort array of objects by key -.PP -Note that you can give sort_by complex expressions, not just paths -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -cool: - - b: banana - - a: banana - - c: banana -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].cool |= sort_by(keys | .[0])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cool: - - a: banana - - b: banana - - c: banana -\f[R] -.fi -.SS Sort is stable -.PP -Note the order of the elements in unchanged when equal in sorting. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- a: banana - b: 1 -- a: banana - b: 2 -- a: banana - b: 3 -- a: banana - b: 4 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]sort_by(.a)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- a: banana - b: 1 -- a: banana - b: 2 -- a: banana - b: 3 -- a: banana - b: 4 -\f[R] -.fi -.SS Sort by numeric field -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- a: 10 -- a: 100 -- a: 1 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]sort_by(.a)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- a: 1 -- a: 10 -- a: 100 -\f[R] -.fi -.SS Sort by custom date field -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- a: 12-Jun-2011 -- a: 23-Dec-2010 -- a: 10-Aug-2011 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]with_dtf(\[dq]02-Jan-2006\[dq]; sort_by(.a))\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- a: 23-Dec-2010 -- a: 12-Jun-2011 -- a: 10-Aug-2011 -\f[R] -.fi -.SS Sort, nulls come first -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- 8 -- 3 -- null -- 6 -- true -- false -- cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]sort\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- null -- false -- true -- 3 -- 6 -- 8 -- cat -\f[R] -.fi -.SH Split into Documents -.PP -This operator splits all matches into separate documents -.SS Split empty -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]split_doc\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -\f[R] -.fi -.SS Split array -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- a: cat -- b: dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] | split_doc\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: cat ---- -b: dog -\f[R] -.fi -.SH String Operators -.SS RegEx -.PP -This uses Golang\[cq]s native regex functions under the hood - See their -docs (https://github.com/google/re2/wiki/Syntax) for the supported -syntax. -.PP -Case insensitive tip: prefix the regex with \f[C](?i)\f[R] - -e.g.\ \f[C]test(\[dq](?i)cats)\[dq]\f[R]. -.SS match(regEx) -.PP -This operator returns the substring match details of the given regEx. -.SS capture(regEx) -.PP -Capture returns named RegEx capture groups in a map. -Can be more convenient than \f[C]match\f[R] depending on what you are -doing. -.SS test(regEx) -.PP -Returns true if the string matches the RegEx, false otherwise. -.SS sub(regEx, replacement) -.PP -Substitutes matched substrings. -The first parameter is the regEx to match substrings within the original -string. -The second parameter specifies what to replace those matches with. -This can refer to capture groups from the first RegEx. -.SS String blocks, bash and newlines -.PP -Bash is notorious for chomping on precious trailing newline characters, -making it tricky to set strings with newlines properly. -In particular, the \f[C]$( exp )\f[R] \f[I]will trim trailing -newlines\f[R]. -.PP -For instance to get this yaml: -.IP -.nf -\f[C] -a: | - cat -\f[R] -.fi -.PP -Using \f[C]$( exp )\f[R] wont work, as it will trim the trailing -newline. -.IP -.nf -\f[C] -m=$(echo \[dq]cat\[rs]n\[dq]) yq -n \[aq].a = strenv(m)\[aq] -a: cat -\f[R] -.fi -.PP -However, using printf works: -.IP -.nf -\f[C] -printf -v m \[dq]cat\[rs]n\[dq] ; m=\[dq]$m\[dq] yq -n \[aq].a = strenv(m)\[aq] -a: | - cat -\f[R] -.fi -.PP -As well as having multiline expressions: -.IP -.nf -\f[C] -m=\[dq]cat -\[dq] yq -n \[aq].a = strenv(m)\[aq] -a: | - cat -\f[R] -.fi -.PP -Similarly, if you\[cq]re trying to set the content from a file, and want -a trailing newline: -.IP -.nf -\f[C] -IFS= read -rd \[aq]\[aq] output < <(cat my_file) -output=$output ./yq \[aq].data.values = strenv(output)\[aq] first.yml -\f[R] -.fi -.SS Interpolation -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -value: things -another: stuff -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].message = \[dq]I like \[rs](.value) and \[rs](.another)\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -value: things -another: stuff -message: I like things and stuff -\f[R] -.fi -.SS Interpolation - not a string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -value: - an: apple -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].message = \[dq]I like \[rs](.value)\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -value: - an: apple -message: \[aq]I like an: apple\[aq] -\f[R] -.fi -.SS To up (upper) case -.PP -Works with unicode characters -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -\['a]gua -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]upcase\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -\['A]GUA -\f[R] -.fi -.SS To down (lower) case -.PP -Works with unicode characters -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -\['A]gUA -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]downcase\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -\['a]gua -\f[R] -.fi -.SS Join strings -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- cat -- meow -- 1 -- null -- true -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]join(\[dq]; \[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cat; meow; 1; ; true -\f[R] -.fi -.SS Trim strings -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- \[aq] cat\[aq] -- \[aq]dog \[aq] -- \[aq] cow cow \[aq] -- horse -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] | trim\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cat -dog -cow cow -horse -\f[R] -.fi -.SS Match string -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -foo bar foo -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]match(\[dq]foo\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -string: foo -offset: 0 -length: 3 -captures: [] -\f[R] -.fi -.SS Match string, case insensitive -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -foo bar FOO -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq][match(\[dq](?i)foo\[dq]; \[dq]g\[dq])]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- string: foo - offset: 0 - length: 3 - captures: [] -- string: FOO - offset: 8 - length: 3 - captures: [] -\f[R] -.fi -.SS Match with global capture group -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -abc abc -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq][match(\[dq](ab)(c)\[dq]; \[dq]g\[dq])]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- string: abc - offset: 0 - length: 3 - captures: - - string: ab - offset: 0 - length: 2 - - string: c - offset: 2 - length: 1 -- string: abc - offset: 4 - length: 3 - captures: - - string: ab - offset: 4 - length: 2 - - string: c - offset: 6 - length: 1 -\f[R] -.fi -.SS Match with named capture groups -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -foo bar foo foo foo -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq][match(\[dq]foo (?P<bar123>bar)? foo\[dq]; \[dq]g\[dq])]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- string: foo bar foo - offset: 0 - length: 11 - captures: - - string: bar - offset: 4 - length: 3 - name: bar123 -- string: foo foo - offset: 12 - length: 8 - captures: - - string: null - offset: -1 - length: 0 - name: bar123 -\f[R] -.fi -.SS Capture named groups into a map -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -xyzzy-14 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]capture(\[dq](?P<a>[a-z]+)-(?P<n>[0-9]+)\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: xyzzy -n: \[dq]14\[dq] -\f[R] -.fi -.SS Match without global flag -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -cat cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]match(\[dq]cat\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -string: cat -offset: 0 -length: 3 -captures: [] -\f[R] -.fi -.SS Match with global flag -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -cat cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq][match(\[dq]cat\[dq]; \[dq]g\[dq])]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- string: cat - offset: 0 - length: 3 - captures: [] -- string: cat - offset: 4 - length: 3 - captures: [] -\f[R] -.fi -.SS Test using regex -.PP -Like jq\[cq]s equivalent, this works like match but only returns -true/false instead of full match details -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- cat -- dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] | test(\[dq]at\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -true -false -\f[R] -.fi -.SS Substitute / Replace string -.PP -This uses Golang\[cq]s regex, described -here (https://github.com/google/re2/wiki/Syntax). -Note the use of \f[C]|=\f[R] to run in context of the current string -value. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: dogs are great -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a |= sub(\[dq]dogs\[dq], \[dq]cats\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: cats are great -\f[R] -.fi -.SS Substitute / Replace string with regex -.PP -This uses Golang\[cq]s regex, described -here (https://github.com/google/re2/wiki/Syntax). -Note the use of \f[C]|=\f[R] to run in context of the current string -value. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: heat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] |= sub(\[dq](a)\[dq], \[dq]${1}r\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: cart -b: heart -\f[R] -.fi -.SS Custom types: that are really strings -.PP -When custom tags are encountered, yq will try to decode the underlying -type. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: !horse cat -b: !goat heat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] |= sub(\[dq](a)\[dq], \[dq]${1}r\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: !horse cart -b: !goat heart -\f[R] -.fi -.SS Split strings -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -cat; meow; 1; ; true -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]split(\[dq]; \[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- cat -- meow -- \[dq]1\[dq] -- \[dq]\[dq] -- \[dq]true\[dq] -\f[R] -.fi -.SS Split strings one match -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -word -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]split(\[dq]; \[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- word -\f[R] -.fi -.SS To string -.PP -Note that you may want to force \f[C]yq\f[R] to leave scalar values -wrapped by passing in \f[C]--unwrapScalar=false\f[R] or \f[C]-r=f\f[R] -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- 1 -- true -- null -- \[ti] -- cat -- an: object -- - array - - 2 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] |= to_string\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- \[dq]1\[dq] -- \[dq]true\[dq] -- \[dq]null\[dq] -- \[dq]\[ti]\[dq] -- cat -- \[dq]an: object\[dq] -- \[dq]- array\[rs]n- 2\[dq] -\f[R] -.fi -.SH Style -.PP -The style operator can be used to get or set the style of nodes -(e.g.\ string style, yaml style). -Use this to control the formatting of the document in yaml. -.SS Update and set style of a particular node (simple) -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: thing - c: something -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a.b = \[dq]new\[dq] | .a.b style=\[dq]double\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: \[dq]new\[dq] - c: something -\f[R] -.fi -.SS Update and set style of a particular node using path variables -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: thing - c: something -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]with(.a.b ; . = \[dq]new\[dq] | . style=\[dq]double\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: \[dq]new\[dq] - c: something -\f[R] -.fi -.SS Set tagged style -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: 5 -c: 3.2 -e: true -f: - - 1 - - 2 - - 3 -g: - something: cool -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].. style=\[dq]tagged\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -!!map -a: !!str cat -b: !!int 5 -c: !!float 3.2 -e: !!bool true -f: !!seq - - !!int 1 - - !!int 2 - - !!int 3 -g: !!map - something: !!str cool -\f[R] -.fi -.SS Set double quote style -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: 5 -c: 3.2 -e: true -f: - - 1 - - 2 - - 3 -g: - something: cool -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].. style=\[dq]double\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: \[dq]cat\[dq] -b: \[dq]5\[dq] -c: \[dq]3.2\[dq] -e: \[dq]true\[dq] -f: - - \[dq]1\[dq] - - \[dq]2\[dq] - - \[dq]3\[dq] -g: - something: \[dq]cool\[dq] -\f[R] -.fi -.SS Set double quote style on map keys too -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: 5 -c: 3.2 -e: true -f: - - 1 - - 2 - - 3 -g: - something: cool -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]... style=\[dq]double\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -\[dq]a\[dq]: \[dq]cat\[dq] -\[dq]b\[dq]: \[dq]5\[dq] -\[dq]c\[dq]: \[dq]3.2\[dq] -\[dq]e\[dq]: \[dq]true\[dq] -\[dq]f\[dq]: - - \[dq]1\[dq] - - \[dq]2\[dq] - - \[dq]3\[dq] -\[dq]g\[dq]: - \[dq]something\[dq]: \[dq]cool\[dq] -\f[R] -.fi -.SS Set single quote style -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: 5 -c: 3.2 -e: true -f: - - 1 - - 2 - - 3 -g: - something: cool -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].. style=\[dq]single\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: \[aq]cat\[aq] -b: \[aq]5\[aq] -c: \[aq]3.2\[aq] -e: \[aq]true\[aq] -f: - - \[aq]1\[aq] - - \[aq]2\[aq] - - \[aq]3\[aq] -g: - something: \[aq]cool\[aq] -\f[R] -.fi -.SS Set literal quote style -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: 5 -c: 3.2 -e: true -f: - - 1 - - 2 - - 3 -g: - something: cool -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].. style=\[dq]literal\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: |- - cat -b: |- - 5 -c: |- - 3.2 -e: |- - true -f: - - |- - 1 - - |- - 2 - - |- - 3 -g: - something: |- - cool -\f[R] -.fi -.SS Set folded quote style -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: 5 -c: 3.2 -e: true -f: - - 1 - - 2 - - 3 -g: - something: cool -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].. style=\[dq]folded\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: >- - cat -b: >- - 5 -c: >- - 3.2 -e: >- - true -f: - - >- - 1 - - >- - 2 - - >- - 3 -g: - something: >- - cool -\f[R] -.fi -.SS Set flow quote style -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: 5 -c: 3.2 -e: true -f: - - 1 - - 2 - - 3 -g: - something: cool -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].. style=\[dq]flow\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -{a: cat, b: 5, c: 3.2, e: true, f: [1, 2, 3], g: {something: cool}} -\f[R] -.fi -.SS Reset style - or pretty print -.PP -Set empty (default) quote style, note the usage of \f[C]...\f[R] to -match keys too. -Note that there is a \f[C]--prettyPrint/-P\f[R] short flag for this. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -{a: cat, \[dq]b\[dq]: 5, \[aq]c\[aq]: 3.2, \[dq]e\[dq]: true, f: [1,2,3], \[dq]g\[dq]: { something: \[dq]cool\[dq]} } -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]... style=\[dq]\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: cat -b: 5 -c: 3.2 -e: true -f: - - 1 - - 2 - - 3 -g: - something: cool -\f[R] -.fi -.SS Set style relatively with assign-update -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: single -b: double -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] style |= .\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: \[aq]single\[aq] -b: \[dq]double\[dq] -\f[R] -.fi -.SS Read style -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -{a: \[dq]cat\[dq], b: \[aq]thing\[aq]} -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].. | style\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -flow -double -single -\f[R] -.fi -.SH Subtract -.PP -You can use subtract to subtract numbers as well as remove elements from -an array. -.SS Array subtraction -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq][1,2] - [2,3]\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- 1 -\f[R] -.fi -.SS Array subtraction with nested array -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq][[1], 1, 2] - [[1], 3]\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- 1 -- 2 -\f[R] -.fi -.SS Array subtraction with nested object -.PP -Note that order of the keys does not matter -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- a: b - c: d -- a: b -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]. - [{\[dq]c\[dq]: \[dq]d\[dq], \[dq]a\[dq]: \[dq]b\[dq]}]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- a: b -\f[R] -.fi -.SS Number subtraction - float -.PP -If the lhs or rhs are floats then the expression will be calculated with -floats. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 3 -b: 4.5 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a = .a - .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: -1.5 -b: 4.5 -\f[R] -.fi -.SS Number subtraction - int -.PP -If both the lhs and rhs are ints then the expression will be calculated -with ints. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 3 -b: 4 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a = .a - .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: -1 -b: 4 -\f[R] -.fi -.SS Decrement numbers -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 3 -b: 5 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] -= 1\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: 2 -b: 4 -\f[R] -.fi -.SS Date subtraction -.PP -You can subtract durations from dates. -Assumes RFC3339 date time format, see date-time -operators (https://mikefarah.gitbook.io/yq/operators/date-time-operators) -for more information. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: 2021-01-01T03:10:00Z -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a -= \[dq]3h10m\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: 2021-01-01T00:00:00Z -\f[R] -.fi -.SS Date subtraction - custom format -.PP -Use with_dtf to specify your datetime format. -See date-time -operators (https://mikefarah.gitbook.io/yq/operators/date-time-operators) -for more information. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: Saturday, 15-Dec-01 at 6:00AM GMT -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]with_dtf(\[dq]Monday, 02-Jan-06 at 3:04PM MST\[dq], .a -= \[dq]3h1m\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: Saturday, 15-Dec-01 at 2:59AM GMT -\f[R] -.fi -.SS Custom types: that are really numbers -.PP -When custom tags are encountered, yq will try to decode the underlying -type. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: !horse 2 -b: !goat 1 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a -= .b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: !horse 1 -b: !goat 1 -\f[R] -.fi -.SH Tag -.PP -The tag operator can be used to get or set the tag of nodes -(e.g.\ \f[C]!!str\f[R], \f[C]!!int\f[R], \f[C]!!bool\f[R]). -.SS Get tag -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: 5 -c: 3.2 -e: true -f: [] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].. | tag\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -!!map -!!str -!!int -!!float -!!bool -!!seq -\f[R] -.fi -.SS type is an alias for tag -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: 5 -c: 3.2 -e: true -f: [] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].. | type\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -!!map -!!str -!!int -!!float -!!bool -!!seq -\f[R] -.fi -.SS Set custom tag -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: str -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a tag = \[dq]!!mikefarah\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: !!mikefarah str -\f[R] -.fi -.SS Find numbers and convert them to strings -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -b: 5 -c: 3.2 -e: true -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq](.. | select(tag == \[dq]!!int\[dq])) tag= \[dq]!!str\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: cat -b: \[dq]5\[dq] -c: 3.2 -e: true -\f[R] -.fi -.SH To Number -.PP -Parses the input as a number. -yq will try to parse values as an int first, failing that it will try -float. -Values that already ints or floats will be left alone. -.SS Converts strings to numbers -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- \[dq]3\[dq] -- \[dq]3.1\[dq] -- \[dq]-1e3\[dq] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] | to_number\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -3 -3.1 --1e3 -\f[R] -.fi -.SS Doesn\[cq]t change numbers -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- 3 -- 3.1 -- -1e3 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] | to_number\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -3 -3.1 --1e3 -\f[R] -.fi -.SS Cannot convert null -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq].a.b | to_number\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -Error: cannot convert node value [null] at path a.b of tag !!null to number -\f[R] -.fi -.SH Traverse (Read) -.PP -This is the simplest (and perhaps most used) operator. -It is used to navigate deeply into yaml structures. -.SS Simple map navigation -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: apple -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -b: apple -\f[R] -.fi -.SS Splat -.PP -Often used to pipe children into other operators -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- b: apple -- c: banana -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -b: apple -c: banana -\f[R] -.fi -.SS Optional Splat -.PP -Just like splat, but won\[cq]t error if you run it against scalars -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -\f[R] -.fi -.SS Special characters -.PP -Use quotes with square brackets around path elements with special -characters -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -\[dq]{}\[dq]: frog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[\[dq]{}\[dq]]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -frog -\f[R] -.fi -.SS Nested special characters -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - \[dq]key.withdots\[dq]: - \[dq]another.key\[dq]: apple -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a[\[dq]key.withdots\[dq]][\[dq]another.key\[dq]]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -apple -\f[R] -.fi -.SS Keys with spaces -.PP -Use quotes with square brackets around path elements with special -characters -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -\[dq]red rabbit\[dq]: frog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[\[dq]red rabbit\[dq]]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -frog -\f[R] -.fi -.SS Dynamic keys -.PP -Expressions within [] can be used to dynamically lookup / calculate keys -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -b: apple -apple: crispy yum -banana: soft yum -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[.b]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -crispy yum -\f[R] -.fi -.SS Children don\[cq]t exist -.PP -Nodes are added dynamically while traversing -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -c: banana -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a.b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -null -\f[R] -.fi -.SS Optional identifier -.PP -Like jq, does not output an error when the yaml is not an array or -object as expected -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- 1 -- 2 -- 3 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a?\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -\f[R] -.fi -.SS Wildcard matching -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - cat: apple - mad: things -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a.\[dq]*a*\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -apple -things -\f[R] -.fi -.SS Aliases -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: &cat - c: frog -b: *cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].b\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -*cat -\f[R] -.fi -.SS Traversing aliases with splat -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: &cat - c: frog -b: *cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].b[]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -frog -\f[R] -.fi -.SS Traversing aliases explicitly -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: &cat - c: frog -b: *cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].b.c\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -frog -\f[R] -.fi -.SS Traversing arrays by index -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- 1 -- 2 -- 3 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[0]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -1 -\f[R] -.fi -.SS Traversing nested arrays by index -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -[[], [cat]] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[1][0]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cat -\f[R] -.fi -.SS Maps with numeric keys -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -2: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[2]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cat -\f[R] -.fi -.SS Maps with non existing numeric keys -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: b -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[0]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -null -\f[R] -.fi -.SS Traversing merge anchors -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -foo: &foo - a: foo_a - thing: foo_thing - c: foo_c -bar: &bar - b: bar_b - thing: bar_thing - c: bar_c -foobarList: - b: foobarList_b - !!merge <<: - - *foo - - *bar - c: foobarList_c -foobar: - c: foobar_c - !!merge <<: *foo - thing: foobar_thing -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].foobar.a\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -foo_a -\f[R] -.fi -.SS Traversing merge anchors with override -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -foo: &foo - a: foo_a - thing: foo_thing - c: foo_c -bar: &bar - b: bar_b - thing: bar_thing - c: bar_c -foobarList: - b: foobarList_b - !!merge <<: - - *foo - - *bar - c: foobarList_c -foobar: - c: foobar_c - !!merge <<: *foo - thing: foobar_thing -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].foobar.c\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -foo_c -\f[R] -.fi -.SS Traversing merge anchors with local override -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -foo: &foo - a: foo_a - thing: foo_thing - c: foo_c -bar: &bar - b: bar_b - thing: bar_thing - c: bar_c -foobarList: - b: foobarList_b - !!merge <<: - - *foo - - *bar - c: foobarList_c -foobar: - c: foobar_c - !!merge <<: *foo - thing: foobar_thing -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].foobar.thing\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -foobar_thing -\f[R] -.fi -.SS Splatting merge anchors -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -foo: &foo - a: foo_a - thing: foo_thing - c: foo_c -bar: &bar - b: bar_b - thing: bar_thing - c: bar_c -foobarList: - b: foobarList_b - !!merge <<: - - *foo - - *bar - c: foobarList_c -foobar: - c: foobar_c - !!merge <<: *foo - thing: foobar_thing -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].foobar[]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -foo_c -foo_a -foobar_thing -\f[R] -.fi -.SS Traversing merge anchor lists -.PP -Note that the later merge anchors override previous -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -foo: &foo - a: foo_a - thing: foo_thing - c: foo_c -bar: &bar - b: bar_b - thing: bar_thing - c: bar_c -foobarList: - b: foobarList_b - !!merge <<: - - *foo - - *bar - c: foobarList_c -foobar: - c: foobar_c - !!merge <<: *foo - thing: foobar_thing -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].foobarList.thing\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -bar_thing -\f[R] -.fi -.SS Splatting merge anchor lists -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -foo: &foo - a: foo_a - thing: foo_thing - c: foo_c -bar: &bar - b: bar_b - thing: bar_thing - c: bar_c -foobarList: - b: foobarList_b - !!merge <<: - - *foo - - *bar - c: foobarList_c -foobar: - c: foobar_c - !!merge <<: *foo - thing: foobar_thing -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].foobarList[]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -bar_b -foo_a -bar_thing -foobarList_c -\f[R] -.fi -.SS Select multiple indices -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - - a - - b - - c -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a[0, 2]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a -c -\f[R] -.fi -.SH Union -.PP -This operator is used to combine different results together. -.SS Combine scalars -.PP -Running -.IP -.nf -\f[C] -yq --null-input \[aq]1, true, \[dq]cat\[dq]\[aq] -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -1 -true -cat -\f[R] -.fi -.SS Combine selected paths -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: fieldA -b: fieldB -c: fieldC -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a, .c\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -fieldA -fieldC -\f[R] -.fi -.SH Unique -.PP -This is used to filter out duplicated items in an array. -Note that the original order of the array is maintained. -.SS Unique array of scalars (string/numbers) -.PP -Note that unique maintains the original order of the array. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- 2 -- 1 -- 3 -- 2 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]unique\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- 2 -- 1 -- 3 -\f[R] -.fi -.SS Unique nulls -.PP -Unique works on the node value, so it considers different -representations of nulls to be different -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- \[ti] -- null -- \[ti] -- null -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]unique\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- \[ti] -- null -\f[R] -.fi -.SS Unique all nulls -.PP -Run against the node tag to unique all the nulls -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- \[ti] -- null -- \[ti] -- null -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]unique_by(tag)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- \[ti] -\f[R] -.fi -.SS Unique array objects -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- name: harry - pet: cat -- name: billy - pet: dog -- name: harry - pet: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]unique\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- name: harry - pet: cat -- name: billy - pet: dog -\f[R] -.fi -.SS Unique array of objects by a field -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- name: harry - pet: cat -- name: billy - pet: dog -- name: harry - pet: dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]unique_by(.name)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- name: harry - pet: cat -- name: billy - pet: dog -\f[R] -.fi -.SS Unique array of arrays -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- - cat - - dog -- - cat - - sheep -- - cat - - dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]unique\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- - cat - - dog -- - cat - - sheep -\f[R] -.fi -.SH Variable Operators -.PP -Like the \f[C]jq\f[R] equivalents, variables are sometimes required for -the more complex expressions (or swapping values between fields). -.PP -Note that there is also an additional \f[C]ref\f[R] operator that holds -a reference (instead of a copy) of the path, allowing you to make -multiple changes to the same path. -.SS Single value variable -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a as $foo | $foo\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cat -\f[R] -.fi -.SS Multi value variable -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- cat -- dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] as $foo | $foo\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cat -dog -\f[R] -.fi -.SS Using variables as a lookup -.PP -Example taken from -jq (https://stedolan.github.io/jq/manual/#Variable/SymbolicBindingOperator:...as$identifier%7C...) -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -\[dq]posts\[dq]: - - \[dq]title\[dq]: First post - \[dq]author\[dq]: anon - - \[dq]title\[dq]: A well-written article - \[dq]author\[dq]: person1 -\[dq]realnames\[dq]: - \[dq]anon\[dq]: Anonymous Coward - \[dq]person1\[dq]: Person McPherson -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].realnames as $names | .posts[] | {\[dq]title\[dq]:.title, \[dq]author\[dq]: $names[.author]}\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -title: First post -author: Anonymous Coward -title: A well-written article -author: Person McPherson -\f[R] -.fi -.SS Using variables to swap values -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: a_value -b: b_value -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a as $x | .b as $y | .b = $x | .a = $y\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: b_value -b: a_value -\f[R] -.fi -.SS Use ref to reference a path repeatedly -.PP -Note: You may find the \f[C]with\f[R] operator more useful. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - b: thing - c: something -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].a.b ref $x | $x = \[dq]new\[dq] | $x style=\[dq]double\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: \[dq]new\[dq] - c: something -\f[R] -.fi -.SH With -.PP -Use the \f[C]with\f[R] operator to conveniently make multiple updates to -a deeply nested path, or to update array elements relatively to each -other. -The first argument expression sets the root context, and the second -expression runs against that root context. -.SS Update and style -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - deeply: - nested: value -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]with(.a.deeply.nested; . = \[dq]newValue\[dq] | . style=\[dq]single\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - deeply: - nested: \[aq]newValue\[aq] -\f[R] -.fi -.SS Update multiple deeply nested properties -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -a: - deeply: - nested: value - other: thing -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]with(.a.deeply; .nested = \[dq]newValue\[dq] | .other= \[dq]newThing\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - deeply: - nested: newValue - other: newThing -\f[R] -.fi -.SS Update array elements relatively -.PP -The second expression runs with each element of the array as it\[cq]s -contextual root. -This allows you to make updates relative to the element. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -myArray: - - a: apple - - a: banana -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]with(.myArray[]; .b = .a + \[dq] yum\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -myArray: - - a: apple - b: apple yum - - a: banana - b: banana yum -\f[R] -.fi -.SH JSON -.PP -Encode and decode to and from JSON. -Supports multiple JSON documents in a single file (e.g.\ NDJSON). -.PP -Note that YAML is a superset of (single document) JSON - so you -don\[cq]t have to use the JSON parser to read JSON when there is only -one JSON document in the input. -You will probably want to pretty print the result in this case, to get -idiomatic YAML styling. -.SS Parse json: simple -.PP -JSON is a subset of yaml, so all you need to do is prettify the output -.PP -Given a sample.json file of: -.IP -.nf -\f[C] -{\[dq]cat\[dq]: \[dq]meow\[dq]} -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -p=json sample.json -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -cat: meow -\f[R] -.fi -.SS Parse json: complex -.PP -JSON is a subset of yaml, so all you need to do is prettify the output -.PP -Given a sample.json file of: -.IP -.nf -\f[C] -{\[dq]a\[dq]:\[dq]Easy! as one two three\[dq],\[dq]b\[dq]:{\[dq]c\[dq]:2,\[dq]d\[dq]:[3,4]}} -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -p=json sample.json -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: Easy! as one two three -b: - c: 2 - d: - - 3 - - 4 -\f[R] -.fi -.SS Encode json: simple -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -cat: meow -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=json \[aq].\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -{ - \[dq]cat\[dq]: \[dq]meow\[dq] -} -\f[R] -.fi -.SS Encode json: simple - in one line -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -cat: meow # this is a comment, and it will be dropped. -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=json -I=0 \[aq].\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -{\[dq]cat\[dq]:\[dq]meow\[dq]} -\f[R] -.fi -.SS Encode json: comments -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -cat: meow # this is a comment, and it will be dropped. -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=json \[aq].\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -{ - \[dq]cat\[dq]: \[dq]meow\[dq] -} -\f[R] -.fi -.SS Encode json: anchors -.PP -Anchors are dereferenced -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -cat: &ref meow -anotherCat: *ref -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=json \[aq].\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -{ - \[dq]cat\[dq]: \[dq]meow\[dq], - \[dq]anotherCat\[dq]: \[dq]meow\[dq] -} -\f[R] -.fi -.SS Encode json: multiple results -.PP -Each matching node is converted into a json doc. -This is best used with 0 indent (json document per line) -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -things: [{stuff: cool}, {whatever: cat}] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=json -I=0 \[aq].things[]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -{\[dq]stuff\[dq]:\[dq]cool\[dq]} -{\[dq]whatever\[dq]:\[dq]cat\[dq]} -\f[R] -.fi -.SS Roundtrip JSON Lines / NDJSON -.PP -Given a sample.json file of: -.IP -.nf -\f[C] -{\[dq]this\[dq]: \[dq]is a multidoc json file\[dq]} -{\[dq]each\[dq]: [\[dq]line is a valid json document\[dq]]} -{\[dq]a number\[dq]: 4} -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -p=json -o=json -I=0 sample.json -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -{\[dq]this\[dq]:\[dq]is a multidoc json file\[dq]} -{\[dq]each\[dq]:[\[dq]line is a valid json document\[dq]]} -{\[dq]a number\[dq]:4} -\f[R] -.fi -.SS Roundtrip multi-document JSON -.PP -The parser can also handle multiple multi-line json documents in a -single file (despite this not being in the JSON Lines / NDJSON spec). -Typically you would have one entire JSON document per line, but the -parser also supports multiple multi-line json documents -.PP -Given a sample.json file of: -.IP -.nf -\f[C] -{ - \[dq]this\[dq]: \[dq]is a multidoc json file\[dq] -} -{ - \[dq]it\[dq]: [ - \[dq]has\[dq], - \[dq]consecutive\[dq], - \[dq]json documents\[dq] - ] -} -{ - \[dq]a number\[dq]: 4 -} -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -p=json -o=json -I=2 sample.json -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -{ - \[dq]this\[dq]: \[dq]is a multidoc json file\[dq] -} -{ - \[dq]it\[dq]: [ - \[dq]has\[dq], - \[dq]consecutive\[dq], - \[dq]json documents\[dq] - ] -} -{ - \[dq]a number\[dq]: 4 -} -\f[R] -.fi -.SS Update a specific document in a multi-document json -.PP -Documents are indexed by the \f[C]documentIndex\f[R] or \f[C]di\f[R] -operator. -.PP -Given a sample.json file of: -.IP -.nf -\f[C] -{\[dq]this\[dq]: \[dq]is a multidoc json file\[dq]} -{\[dq]each\[dq]: [\[dq]line is a valid json document\[dq]]} -{\[dq]a number\[dq]: 4} -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -p=json -o=json -I=0 \[aq](select(di == 1) | .each ) += \[dq]cool\[dq]\[aq] sample.json -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -{\[dq]this\[dq]:\[dq]is a multidoc json file\[dq]} -{\[dq]each\[dq]:[\[dq]line is a valid json document\[dq],\[dq]cool\[dq]]} -{\[dq]a number\[dq]:4} -\f[R] -.fi -.SS Find and update a specific document in a multi-document json -.PP -Use expressions as you normally would. -.PP -Given a sample.json file of: -.IP -.nf -\f[C] -{\[dq]this\[dq]: \[dq]is a multidoc json file\[dq]} -{\[dq]each\[dq]: [\[dq]line is a valid json document\[dq]]} -{\[dq]a number\[dq]: 4} -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -p=json -o=json -I=0 \[aq](select(has(\[dq]each\[dq])) | .each ) += \[dq]cool\[dq]\[aq] sample.json -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -{\[dq]this\[dq]:\[dq]is a multidoc json file\[dq]} -{\[dq]each\[dq]:[\[dq]line is a valid json document\[dq],\[dq]cool\[dq]]} -{\[dq]a number\[dq]:4} -\f[R] -.fi -.SS Decode JSON Lines / NDJSON -.PP -Given a sample.json file of: -.IP -.nf -\f[C] -{\[dq]this\[dq]: \[dq]is a multidoc json file\[dq]} -{\[dq]each\[dq]: [\[dq]line is a valid json document\[dq]]} -{\[dq]a number\[dq]: 4} -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -p=json sample.json -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -this: is a multidoc json file ---- -each: - - line is a valid json document ---- -a number: 4 -\f[R] -.fi -.SH CSV -.PP -Encode/Decode/Roundtrip CSV and TSV files. -.SS Encode -.PP -Currently supports arrays of homogeneous flat objects, that is: no -nesting and it assumes the \f[I]first\f[R] object has all the keys -required: -.IP -.nf -\f[C] -- name: Bobo - type: dog -- name: Fifi - type: cat -\f[R] -.fi -.PP -As well as arrays of arrays of scalars (strings/numbers/booleans): -.IP -.nf -\f[C] -- [Bobo, dog] -- [Fifi, cat] -\f[R] -.fi -.SS Decode -.PP -Decode assumes the first CSV/TSV row is the header row, and all rows -beneath are the entries. -The data will be coded into an array of objects, using the header rows -as keys. -.IP -.nf -\f[C] -name,type -Bobo,dog -Fifi,cat -\f[R] -.fi -.SS Encode CSV simple -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- [i, like, csv] -- [because, excel, is, cool] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=csv sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -i,like,csv -because,excel,is,cool -\f[R] -.fi -.SS Encode TSV simple -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- [i, like, csv] -- [because, excel, is, cool] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=tsv sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -i like csv -because excel is cool -\f[R] -.fi -.SS Encode array of objects to csv -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- name: Gary - numberOfCats: 1 - likesApples: true - height: 168.8 -- name: Samantha\[aq]s Rabbit - numberOfCats: 2 - likesApples: false - height: -188.8 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=csv sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -name,numberOfCats,likesApples,height -Gary,1,true,168.8 -Samantha\[aq]s Rabbit,2,false,-188.8 -\f[R] -.fi -.SS Encode array of objects to custom csv format -.PP -Add the header row manually, then the we convert each object into an -array of values - resulting in an array of arrays. -Pick the columns and call the header whatever you like. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- name: Gary - numberOfCats: 1 - likesApples: true - height: 168.8 -- name: Samantha\[aq]s Rabbit - numberOfCats: 2 - likesApples: false - height: -188.8 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=csv \[aq][[\[dq]Name\[dq], \[dq]Number of Cats\[dq]]] + [.[] | [.name, .numberOfCats ]]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -Name,Number of Cats -Gary,1 -Samantha\[aq]s Rabbit,2 -\f[R] -.fi -.SS Encode array of objects to csv - missing fields behaviour -.PP -First entry is used to determine the headers, and it is missing -`likesApples', so it is not included in the csv. -Second entry does not have `numberOfCats' so that is blank -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- name: Gary - numberOfCats: 1 - height: 168.8 -- name: Samantha\[aq]s Rabbit - height: -188.8 - likesApples: false -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=csv sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -name,numberOfCats,height -Gary,1,168.8 -Samantha\[aq]s Rabbit,,-188.8 -\f[R] -.fi -.SS Parse CSV into an array of objects -.PP -First row is assumed to be the header row. -By default, entries with YAML/JSON formatting will be parsed! -.PP -Given a sample.csv file of: -.IP -.nf -\f[C] -name,numberOfCats,likesApples,height,facts -Gary,1,true,168.8,cool: true -Samantha\[aq]s Rabbit,2,false,-188.8,tall: indeed -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -p=csv sample.csv -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- name: Gary - numberOfCats: 1 - likesApples: true - height: 168.8 - facts: - cool: true -- name: Samantha\[aq]s Rabbit - numberOfCats: 2 - likesApples: false - height: -188.8 - facts: - tall: indeed -\f[R] -.fi -.SS Parse CSV into an array of objects, no auto-parsing -.PP -First row is assumed to be the header row. -Entries with YAML/JSON will be left as strings. -.PP -Given a sample.csv file of: -.IP -.nf -\f[C] -name,numberOfCats,likesApples,height,facts -Gary,1,true,168.8,cool: true -Samantha\[aq]s Rabbit,2,false,-188.8,tall: indeed -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -p=csv --csv-auto-parse=f sample.csv -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- name: Gary - numberOfCats: 1 - likesApples: true - height: 168.8 - facts: \[aq]cool: true\[aq] -- name: Samantha\[aq]s Rabbit - numberOfCats: 2 - likesApples: false - height: -188.8 - facts: \[aq]tall: indeed\[aq] -\f[R] -.fi -.SS Parse TSV into an array of objects -.PP -First row is assumed to be the header row. -.PP -Given a sample.tsv file of: -.IP -.nf -\f[C] -name numberOfCats likesApples height -Gary 1 true 168.8 -Samantha\[aq]s Rabbit 2 false -188.8 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -p=tsv sample.tsv -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- name: Gary - numberOfCats: 1 - likesApples: true - height: 168.8 -- name: Samantha\[aq]s Rabbit - numberOfCats: 2 - likesApples: false - height: -188.8 -\f[R] -.fi -.SS Round trip -.PP -Given a sample.csv file of: -.IP -.nf -\f[C] -name,numberOfCats,likesApples,height -Gary,1,true,168.8 -Samantha\[aq]s Rabbit,2,false,-188.8 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -p=csv -o=csv \[aq](.[] | select(.name == \[dq]Gary\[dq]) | .numberOfCats) = 3\[aq] sample.csv -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -name,numberOfCats,likesApples,height -Gary,3,true,168.8 -Samantha\[aq]s Rabbit,2,false,-188.8 -\f[R] -.fi -.SH Formatting Expressions -.PP -\f[C]From version v4.41+\f[R] -.PP -You can put expressions into \f[C].yq\f[R] files, use whitespace and -comments to break up complex expressions and explain what\[cq]s going -on. -.SS Using expression files and comments -.PP -Note that you can execute the file directly - but make sure you make the -expression file executable. -.PP -Given a sample.yaml file of: -.IP -.nf -\f[C] -a: - b: old -\f[R] -.fi -.PP -And an `update.yq' expression file of: -.IP -.nf -\f[C] -#! yq - -# This is a yq expression that updates the map -# for several great reasons outlined here. - -\&.a.b = \[dq]new\[dq] # line comment here -| .a.c = \[dq]frog\[dq] - -# Now good things will happen. -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -\&./update.yq sample.yaml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: new - c: frog -\f[R] -.fi -.SS Flags in expression files -.PP -You can specify flags on the shebang line, this only works when -executing the file directly. -.PP -Given a sample.yaml file of: -.IP -.nf -\f[C] -a: - b: old -\f[R] -.fi -.PP -And an `update.yq' expression file of: -.IP -.nf -\f[C] -#! yq -oj - -# This is a yq expression that updates the map -# for several great reasons outlined here. - -\&.a.b = \[dq]new\[dq] # line comment here -| .a.c = \[dq]frog\[dq] - -# Now good things will happen. -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -\&./update.yq sample.yaml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -{ - \[dq]a\[dq]: { - \[dq]b\[dq]: \[dq]new\[dq], - \[dq]c\[dq]: \[dq]frog\[dq] - } -} -\f[R] -.fi -.SS Commenting out yq expressions -.PP -Note that \f[C]c\f[R] is no longer set to `frog'. -In this example we\[cq]re calling yq directly and passing the expression -file into \f[C]--from-file\f[R], this is no different from executing the -expression file directly. -.PP -Given a sample.yaml file of: -.IP -.nf -\f[C] -a: - b: old -\f[R] -.fi -.PP -And an `update.yq' expression file of: -.IP -.nf -\f[C] -#! yq -# This is a yq expression that updates the map -# for several great reasons outlined here. - -\&.a.b = \[dq]new\[dq] # line comment here -# | .a.c = \[dq]frog\[dq] - -# Now good things will happen. -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq --from-file update.yq sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: new -\f[R] -.fi -.SS Basic input example -.PP -Given a sample.lua file of: -.IP -.nf -\f[C] -return { - [\[dq]country\[dq]] = \[dq]Australia\[dq]; -- this place - [\[dq]cities\[dq]] = { - \[dq]Sydney\[dq], - \[dq]Melbourne\[dq], - \[dq]Brisbane\[dq], - \[dq]Perth\[dq], - }; -}; -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -oy \[aq].\[aq] sample.lua -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -country: Australia -cities: - - Sydney - - Melbourne - - Brisbane - - Perth -\f[R] -.fi -.SS Basic output example -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] ---- -country: Australia # this place -cities: -- Sydney -- Melbourne -- Brisbane -- Perth -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=lua \[aq].\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -return { - [\[dq]country\[dq]] = \[dq]Australia\[dq]; -- this place - [\[dq]cities\[dq]] = { - \[dq]Sydney\[dq], - \[dq]Melbourne\[dq], - \[dq]Brisbane\[dq], - \[dq]Perth\[dq], - }; -}; -\f[R] -.fi -.SS Unquoted keys -.PP -Uses the \f[C]--lua-unquoted\f[R] option to produce a nicer-looking -output. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] ---- -country: Australia # this place -cities: -- Sydney -- Melbourne -- Brisbane -- Perth -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=lua --lua-unquoted \[aq].\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -return { - country = \[dq]Australia\[dq]; -- this place - cities = { - \[dq]Sydney\[dq], - \[dq]Melbourne\[dq], - \[dq]Brisbane\[dq], - \[dq]Perth\[dq], - }; -}; -\f[R] -.fi -.SS Globals -.PP -Uses the \f[C]--lua-globals\f[R] option to export the values into the -global scope. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] ---- -country: Australia # this place -cities: -- Sydney -- Melbourne -- Brisbane -- Perth -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=lua --lua-globals \[aq].\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -country = \[dq]Australia\[dq]; -- this place -cities = { - \[dq]Sydney\[dq], - \[dq]Melbourne\[dq], - \[dq]Brisbane\[dq], - \[dq]Perth\[dq], -}; -\f[R] -.fi -.SS Elaborate example -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] ---- -hello: world -tables: - like: this - keys: values - ? look: non-string keys - : True -numbers: - - decimal: 12345 - - hex: 0x7fabc123 - - octal: 0o30 - - float: 123.45 - - infinity: .inf - plus_infinity: +.inf - minus_infinity: -.inf - - not: .nan -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=lua \[aq].\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -return { - [\[dq]hello\[dq]] = \[dq]world\[dq]; - [\[dq]tables\[dq]] = { - [\[dq]like\[dq]] = \[dq]this\[dq]; - [\[dq]keys\[dq]] = \[dq]values\[dq]; - [{ - [\[dq]look\[dq]] = \[dq]non-string keys\[dq]; - }] = true; - }; - [\[dq]numbers\[dq]] = { - { - [\[dq]decimal\[dq]] = 12345; - }, - { - [\[dq]hex\[dq]] = 0x7fabc123; - }, - { - [\[dq]octal\[dq]] = 24; - }, - { - [\[dq]float\[dq]] = 123.45; - }, - { - [\[dq]infinity\[dq]] = (1/0); - [\[dq]plus_infinity\[dq]] = (1/0); - [\[dq]minus_infinity\[dq]] = (-1/0); - }, - { - [\[dq]not\[dq]] = (0/0); - }, - }; -}; -\f[R] -.fi -.SH Properties -.PP -Encode/Decode/Roundtrip to/from a property file. -Line comments on value nodes will be copied across. -.PP -By default, empty maps and arrays are not encoded - see below for an -example on how to encode a value for these. -.SS Encode properties -.PP -Note that empty arrays and maps are not encoded by default. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -# block comments come through -person: # neither do comments on maps - name: Mike Wazowski # comments on values appear - pets: - - cat # comments on array values appear - - nested: - - list entry - food: [pizza] # comments on arrays do not -emptyArray: [] -emptyMap: [] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=props sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -# block comments come through -# comments on values appear -person.name = Mike Wazowski - -# comments on array values appear -person.pets.0 = cat -person.pets.1.nested.0 = list entry -person.food.0 = pizza -\f[R] -.fi -.SS Encode properties with array brackets -.PP -Declare the \[en]properties-array-brackets flag to give array paths in -brackets (e.g.\ SpringBoot). -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -# block comments come through -person: # neither do comments on maps - name: Mike Wazowski # comments on values appear - pets: - - cat # comments on array values appear - - nested: - - list entry - food: [pizza] # comments on arrays do not -emptyArray: [] -emptyMap: [] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=props --properties-array-brackets sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -# block comments come through -# comments on values appear -person.name = Mike Wazowski - -# comments on array values appear -person.pets[0] = cat -person.pets[1].nested[0] = list entry -person.food[0] = pizza -\f[R] -.fi -.SS Encode properties - custom separator -.PP -Use the \[en]properties-customer-separator flag to specify your own -key/value separator. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -# block comments come through -person: # neither do comments on maps - name: Mike Wazowski # comments on values appear - pets: - - cat # comments on array values appear - - nested: - - list entry - food: [pizza] # comments on arrays do not -emptyArray: [] -emptyMap: [] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=props --properties-customer-separator=\[dq] :\[at] \[dq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -# block comments come through -# comments on values appear -person.name :\[at] Mike Wazowski - -# comments on array values appear -person.pets.0 :\[at] cat -person.pets.1.nested.0 :\[at] list entry -person.food.0 :\[at] pizza -\f[R] -.fi -.SS Encode properties: scalar encapsulation -.PP -Note that string values with blank characters in them are encapsulated -with double quotes -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -# block comments come through -person: # neither do comments on maps - name: Mike Wazowski # comments on values appear - pets: - - cat # comments on array values appear - - nested: - - list entry - food: [pizza] # comments on arrays do not -emptyArray: [] -emptyMap: [] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=props --unwrapScalar=false sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -# block comments come through -# comments on values appear -person.name = \[dq]Mike Wazowski\[dq] - -# comments on array values appear -person.pets.0 = cat -person.pets.1.nested.0 = \[dq]list entry\[dq] -person.food.0 = pizza -\f[R] -.fi -.SS Encode properties: no comments -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -# block comments come through -person: # neither do comments on maps - name: Mike Wazowski # comments on values appear - pets: - - cat # comments on array values appear - - nested: - - list entry - food: [pizza] # comments on arrays do not -emptyArray: [] -emptyMap: [] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=props \[aq]... comments = \[dq]\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -person.name = Mike Wazowski -person.pets.0 = cat -person.pets.1.nested.0 = list entry -person.food.0 = pizza -\f[R] -.fi -.SS Encode properties: include empty maps and arrays -.PP -Use a yq expression to set the empty maps and sequences to your desired -value. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -# block comments come through -person: # neither do comments on maps - name: Mike Wazowski # comments on values appear - pets: - - cat # comments on array values appear - - nested: - - list entry - food: [pizza] # comments on arrays do not -emptyArray: [] -emptyMap: [] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=props \[aq](.. | select( (tag == \[dq]!!map\[dq] or tag ==\[dq]!!seq\[dq]) and length == 0)) = \[dq]\[dq]\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -# block comments come through -# comments on values appear -person.name = Mike Wazowski - -# comments on array values appear -person.pets.0 = cat -person.pets.1.nested.0 = list entry -person.food.0 = pizza -emptyArray = -emptyMap = -\f[R] -.fi -.SS Decode properties -.PP -Given a sample.properties file of: -.IP -.nf -\f[C] -# block comments come through -# comments on values appear -person.name = Mike Wazowski - -# comments on array values appear -person.pets.0 = cat -person.pets.1.nested.0 = list entry -person.food.0 = pizza -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -p=props sample.properties -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -person: - # block comments come through - # comments on values appear - name: Mike Wazowski - pets: - # comments on array values appear - - cat - - nested: - - list entry - food: - - pizza -\f[R] -.fi -.SS Decode properties: numbers -.PP -All values are assumed to be strings when parsing properties, but you -can use the \f[C]from_yaml\f[R] operator on all the strings values to -autoparse into the correct type. -.PP -Given a sample.properties file of: -.IP -.nf -\f[C] -a.b = 10 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -p=props \[aq] (.. | select(tag == \[dq]!!str\[dq])) |= from_yaml\[aq] sample.properties -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -a: - b: 10 -\f[R] -.fi -.SS Decode properties - array should be a map -.PP -If you have a numeric map key in your property files, use array_to_map -to convert them to maps. -.PP -Given a sample.properties file of: -.IP -.nf -\f[C] -things.10 = mike -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -p=props \[aq].things |= array_to_map\[aq] sample.properties -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -things: - 10: mike -\f[R] -.fi -.SS Roundtrip -.PP -Given a sample.properties file of: -.IP -.nf -\f[C] -# block comments come through -# comments on values appear -person.name = Mike Wazowski - -# comments on array values appear -person.pets.0 = cat -person.pets.1.nested.0 = list entry -person.food.0 = pizza -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -p=props -o=props \[aq].person.pets.0 = \[dq]dog\[dq]\[aq] sample.properties -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -# block comments come through -# comments on values appear -person.name = Mike Wazowski - -# comments on array values appear -person.pets.0 = dog -person.pets.1.nested.0 = list entry -person.food.0 = pizza -\f[R] -.fi -.SH Recipes -.PP -These examples are intended to show how you can use multiple operators -together so you get an idea of how you can perform complex data -manipulation. -.PP -Please see the details operator -docs (https://mikefarah.gitbook.io/yq/operators) for details on each -individual operator. -.SS Find items in an array -.PP -We have an array and we want to find the elements with a particular -name. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- name: Foo - numBuckets: 0 -- name: Bar - numBuckets: 0 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] | select(.name == \[dq]Foo\[dq])\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -name: Foo -numBuckets: 0 -\f[R] -.fi -.SS Explanation: -.IP \[bu] 2 -\f[C].[]\f[R] splats the array, and puts all the items in the context. -.IP \[bu] 2 -These items are then piped (\f[C]|\f[R]) into -\f[C]select(.name == \[dq]Foo\[dq])\f[R] which will select all the nodes -that have a name property set to `Foo'. -.IP \[bu] 2 -See the select (https://mikefarah.gitbook.io/yq/operators/select) -operator for more information. -.SS Find and update items in an array -.PP -We have an array and we want to \f[I]update\f[R] the elements with a -particular name. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- name: Foo - numBuckets: 0 -- name: Bar - numBuckets: 0 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq](.[] | select(.name == \[dq]Foo\[dq]) | .numBuckets) |= . + 1\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- name: Foo - numBuckets: 1 -- name: Bar - numBuckets: 0 -\f[R] -.fi -.SS Explanation: -.IP \[bu] 2 -Following from the example above\f[C].[]\f[R] splats the array, selects -filters the items. -.IP \[bu] 2 -We then pipe (\f[C]|\f[R]) that into \f[C].numBuckets\f[R], which will -select that field from all the matching items -.IP \[bu] 2 -Splat, select and the field are all in brackets, that whole expression -is passed to the \f[C]|=\f[R] operator as the left hand side expression, -with \f[C]. + 1\f[R] as the right hand side expression. -.IP \[bu] 2 -\f[C]|=\f[R] is the operator that updates fields relative to their own -value, which is referenced as dot (\f[C].\f[R]). -.IP \[bu] 2 -The expression \f[C]. + 1\f[R] increments the numBuckets counter. -.IP \[bu] 2 -See the assign (https://mikefarah.gitbook.io/yq/operators/assign-update) -and add (https://mikefarah.gitbook.io/yq/operators/add) operators for -more information. -.SS Deeply prune a tree -.PP -Say we are only interested in child1 and child2, and want to filter -everything else out. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -parentA: - - bob -parentB: - child1: i am child1 - child3: hiya -parentC: - childX: cool - child2: me child2 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]( - .. | # recurse through all the nodes - select(has(\[dq]child1\[dq]) or has(\[dq]child2\[dq])) | # match parents that have either child1 or child2 - (.child1, .child2) | # select those children - select(.) # filter out nulls -) as $i ireduce({}; # using that set of nodes, create a new result map - setpath($i | path; $i) # and put in each node, using its original path -)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -parentB: - child1: i am child1 -parentC: - child2: me child2 -\f[R] -.fi -.SS Explanation: -.IP \[bu] 2 -Find all the matching child1 and child2 nodes -.IP \[bu] 2 -Using ireduce, create a new map using just those nodes -.IP \[bu] 2 -Set each node into the new map using its original path -.SS Multiple or complex updates to items in an array -.PP -We have an array and we want to \f[I]update\f[R] the elements with a -particular name in reference to its type. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -myArray: - - name: Foo - type: cat - - name: Bar - type: dog -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq]with(.myArray[]; .name = .name + \[dq] - \[dq] + .type)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -myArray: - - name: Foo - cat - type: cat - - name: Bar - dog - type: dog -\f[R] -.fi -.SS Explanation: -.IP \[bu] 2 -The with operator will effectively loop through each given item in the -first given expression, and run the second expression against it. -.IP \[bu] 2 -\f[C].myArray[]\f[R] splats the array in \f[C]myArray\f[R]. -So \f[C]with\f[R] will run against each item in that array -.IP \[bu] 2 -\f[C].name = .name + \[dq] - \[dq] + .type\f[R] this expression is run -against every item, updating the name to be a concatenation of the -original name as well as the type. -.IP \[bu] 2 -See the with (https://mikefarah.gitbook.io/yq/operators/with) operator -for more information and examples. -.SS Sort an array by a field -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -myArray: - - name: Foo - numBuckets: 1 - - name: Bar - numBuckets: 0 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].myArray |= sort_by(.numBuckets)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -myArray: - - name: Bar - numBuckets: 0 - - name: Foo - numBuckets: 1 -\f[R] -.fi -.SS Explanation: -.IP \[bu] 2 -We want to resort \f[C].myArray\f[R]. -.IP \[bu] 2 -\f[C]sort_by\f[R] works by piping an array into it, and it pipes out a -sorted array. -.IP \[bu] 2 -So, we use \f[C]|=\f[R] to update \f[C].myArray\f[R]. -This is the same as doing -\f[C].myArray = (.myArray | sort_by(.numBuckets))\f[R] -.SS Filter, flatten, sort and unique -.PP -Lets find the unique set of names from the document. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -- type: foo - names: - - Fred - - Catherine -- type: bar - names: - - Zelda -- type: foo - names: Fred -- type: foo - names: Ava -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq][.[] | select(.type == \[dq]foo\[dq]) | .names] | flatten | sort | unique\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- Ava -- Catherine -- Fred -\f[R] -.fi -.SS Explanation: -.IP \[bu] 2 -\f[C].[] | select(.type == \[dq]foo\[dq]) | .names\f[R] will select the -array elements of type \[lq]foo\[rq] -.IP \[bu] 2 -Splat \f[C].[]\f[R] will unwrap the array and match all the items. -We need to do this so we can work on the child items, for instance, -filter items out using the \f[C]select\f[R] operator. -.IP \[bu] 2 -But we still want the final results back into an array. -So after we\[cq]re doing working on the children, we wrap everything -back into an array using square brackets around the expression. -\f[C][.[] | select(.type == \[dq]foo\[dq]) | .names]\f[R] -.IP \[bu] 2 -Now have have an array of all the `names' values. -Which includes arrays of strings as well as strings on their own. -.IP \[bu] 2 -Pipe \f[C]|\f[R] this array through \f[C]flatten\f[R]. -This will flatten nested arrays. -So now we have a flat list of all the name value strings -.IP \[bu] 2 -Next we pipe \f[C]|\f[R] that through \f[C]sort\f[R] and then -\f[C]unique\f[R] to get a sorted, unique list of the names! -.IP \[bu] 2 -See the flatten (https://mikefarah.gitbook.io/yq/operators/flatten), -sort (https://mikefarah.gitbook.io/yq/operators/sort) and -unique (https://mikefarah.gitbook.io/yq/operators/unique) for more -information and examples. -.SS Export as environment variables (script), or any custom format -.PP -Given a yaml document, lets output a script that will configure -environment variables with that data. -This same approach can be used for exporting into custom formats. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -var0: string0 -var1: string1 -fruit: - - apple - - banana - - peach -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].[] |( - ( select(kind == \[dq]scalar\[dq]) | key + \[dq]=\[aq]\[rs]\[aq]\[aq]\[dq] + . + \[dq]\[aq]\[rs]\[aq]\[aq]\[dq]), - ( select(kind == \[dq]seq\[dq]) | key + \[dq]=(\[dq] + (map(\[dq]\[aq]\[rs]\[aq]\[aq]\[dq] + . + \[dq]\[aq]\[rs]\[aq]\[aq]\[dq]) | join(\[dq],\[dq])) + \[dq])\[dq]) -)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -var0=\[aq]string0\[aq] -var1=\[aq]string1\[aq] -fruit=(\[aq]apple\[aq],\[aq]banana\[aq],\[aq]peach\[aq]) -\f[R] -.fi -.SS Explanation: -.IP \[bu] 2 -\f[C].[]\f[R] matches all top level elements -.IP \[bu] 2 -We need a string expression for each of the different types that will -produce the bash syntax, we\[cq]ll use the union operator, to join them -together -.IP \[bu] 2 -Scalars, we just need the key and quoted value: -\f[C]( select(kind == \[dq]scalar\[dq]) | key + \[dq]=\[aq]\[dq] + . + \[dq]\[aq]\[dq])\f[R] -.IP \[bu] 2 -Sequences (or arrays) are trickier, we need to quote each value and -\f[C]join\f[R] them with \f[C],\f[R]: -\f[C]map(\[dq]\[aq]\[dq] + . + \[dq]\[aq]\[dq]) | join(\[dq],\[dq])\f[R] -.SS Custom format with nested data -.PP -Like the previous example, but lets handle nested data structures. -In this custom example, we\[cq]re going to join the property paths with -_. -The important thing to keep in mind is that our expression is not -recursive (despite the data structure being so). -Instead we match \f[I]all\f[R] elements on the tree and operate on them. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -simple: string0 -simpleArray: - - apple - - banana - - peach -deep: - property: value - array: - - cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].. |( - ( select(kind == \[dq]scalar\[dq] and parent | kind != \[dq]seq\[dq]) | (path | join(\[dq]_\[dq])) + \[dq]=\[aq]\[rs]\[aq]\[aq]\[dq] + . + \[dq]\[aq]\[rs]\[aq]\[aq]\[dq]), - ( select(kind == \[dq]seq\[dq]) | (path | join(\[dq]_\[dq])) + \[dq]=(\[dq] + (map(\[dq]\[aq]\[rs]\[aq]\[aq]\[dq] + . + \[dq]\[aq]\[rs]\[aq]\[aq]\[dq]) | join(\[dq],\[dq])) + \[dq])\[dq]) -)\[aq] sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -simple=\[aq]string0\[aq] -deep_property=\[aq]value\[aq] -simpleArray=(\[aq]apple\[aq],\[aq]banana\[aq],\[aq]peach\[aq]) -deep_array=(\[aq]cat\[aq]) -\f[R] -.fi -.SS Explanation: -.IP \[bu] 2 -You\[cq]ll need to understand how the previous example works to -understand this extension. -.IP \[bu] 2 -\f[C]..\f[R] matches \f[I]all\f[R] elements, instead of \f[C].[]\f[R] -from the previous example that just matches top level elements. -.IP \[bu] 2 -Like before, we need a string expression for each of the different types -that will produce the bash syntax, we\[cq]ll use the union operator, to -join them together -.IP \[bu] 2 -This time, however, our expression matches every node in the data -structure. -.IP \[bu] 2 -We only want to print scalars that are not in arrays (because we handle -the separately), so well add -\f[C]and parent | kind != \[dq]seq\[dq]\f[R] to the select operator -expression for scalars -.IP \[bu] 2 -We don\[cq]t just want the key any more, we want the full path. -So instead of \f[C]key\f[R] we have \f[C]path | join(\[dq]_\[dq])\f[R] -.IP \[bu] 2 -The expression for sequences follows the same logic -.SS Encode shell variables -.PP -Note that comments are dropped and values will be enclosed in single -quotes as needed. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -# comment -name: Mike Wazowski -eyes: - color: turquoise - number: 1 -friends: - - James P. Sullivan - - Celia Mae -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=shell sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -name=\[aq]Mike Wazowski\[aq] -eyes_color=turquoise -eyes_number=1 -friends_0=\[aq]James P. Sullivan\[aq] -friends_1=\[aq]Celia Mae\[aq] -\f[R] -.fi -.SS Encode shell variables: illegal variable names as key. -.PP -Keys that would be illegal as variable keys are adapted. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -ascii_=_symbols: replaced with _ -\[dq]ascii_ _controls\[dq]: dropped (this example uses \[rs]t) -nonascii_\[u05D0]_characters: dropped -effort_expe\[~n]ded_t\[`o]_preserve_accented_latin_letters: moderate (via unicode NFKD) -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=shell sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -ascii___symbols=\[aq]replaced with _\[aq] -ascii__controls=\[aq]dropped (this example uses \[rs]t)\[aq] -nonascii__characters=dropped -effort_expended_to_preserve_accented_latin_letters=\[aq]moderate (via unicode NFKD)\[aq] -\f[R] -.fi -.SS Encode shell variables: empty values, arrays and maps -.PP -Empty values are encoded to empty variables, but empty arrays and maps -are skipped. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -empty: - value: - array: [] - map: {} -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=shell sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -empty_value= -\f[R] -.fi -.SS Encode shell variables: single quotes in values -.PP -Single quotes in values are encoded as `\[lq]'\[lq]\[cq] (close single -quote, double-quoted single quote, open single quote). -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -name: Miles O\[aq]Brien -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=shell sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -name=\[aq]Miles O\[aq]\[dq]\[aq]\[dq]\[aq]Brien\[aq] -\f[R] -.fi -.SH TOML -.PP -Decode from TOML. -Note that \f[C]yq\f[R] does not yet support outputting in TOML format -(and therefore it cannot roundtrip) -.SS Parse: Simple -.PP -Given a sample.toml file of: -.IP -.nf -\f[C] -A = \[dq]hello\[dq] -B = 12 -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -oy \[aq].\[aq] sample.toml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -A: hello -B: 12 -\f[R] -.fi -.SS Parse: Deep paths -.PP -Given a sample.toml file of: -.IP -.nf -\f[C] -person.name = \[dq]hello\[dq] -person.address = \[dq]12 cat st\[dq] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -oy \[aq].\[aq] sample.toml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -person: - name: hello - address: 12 cat st -\f[R] -.fi -.SS Encode: Scalar -.PP -Given a sample.toml file of: -.IP -.nf -\f[C] -person.name = \[dq]hello\[dq] -person.address = \[dq]12 cat st\[dq] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].person.name\[aq] sample.toml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -hello -\f[R] -.fi -.SS Parse: inline table -.PP -Given a sample.toml file of: -.IP -.nf -\f[C] -name = { first = \[dq]Tom\[dq], last = \[dq]Preston-Werner\[dq] } -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -oy \[aq].\[aq] sample.toml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -name: - first: Tom - last: Preston-Werner -\f[R] -.fi -.SS Parse: Array Table -.PP -Given a sample.toml file of: -.IP -.nf -\f[C] -[owner.contact] -name = \[dq]Tom Preston-Werner\[dq] -age = 36 - -[[owner.addresses]] -street = \[dq]first street\[dq] -suburb = \[dq]ok\[dq] - -[[owner.addresses]] -street = \[dq]second street\[dq] -suburb = \[dq]nice\[dq] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -oy \[aq].\[aq] sample.toml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -owner: - contact: - name: Tom Preston-Werner - age: 36 - addresses: - - street: first street - suburb: ok - - street: second street - suburb: nice -\f[R] -.fi -.SS Parse: Empty Table -.PP -Given a sample.toml file of: -.IP -.nf -\f[C] -[dependencies] -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -oy \[aq].\[aq] sample.toml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -dependencies: {} -\f[R] -.fi -.SH XML -.PP -Encode and decode to and from XML. -Whitespace is not conserved for round trips - but the order of the -fields are. -.PP -Consecutive xml nodes with the same name are assumed to be arrays. -.PP -XML content data, attributes processing instructions and directives are -all created as plain fields. -.PP -This can be controlled by: -.PP -.TS -tab(@); -l l l. -T{ -Flag -T}@T{ -Default -T}@T{ -Sample XML -T} -_ -T{ -\f[C]--xml-attribute-prefix\f[R] -T}@T{ -\f[C]+\f[R] (changing to \f[C]+\[at]\f[R] soon) -T}@T{ -Legs in \f[C]<cat legs=\[dq]4\[dq]/>\f[R] -T} -T{ -\f[C]--xml-content-name\f[R] -T}@T{ -\f[C]+content\f[R] -T}@T{ -Meow in \f[C]<cat>Meow <fur>true</true></cat>\f[R] -T} -T{ -\f[C]--xml-directive-name\f[R] -T}@T{ -\f[C]+directive\f[R] -T}@T{ -\f[C]<!DOCTYPE config system \[dq]blah\[dq]>\f[R] -T} -T{ -\f[C]--xml-proc-inst-prefix\f[R] -T}@T{ -\f[C]+p_\f[R] -T}@T{ -\f[C]<?xml version=\[dq]1\[dq]?>\f[R] -T} -.TE -.PP -{% hint style=\[lq]warning\[rq] %} Default Attribute Prefix will be -changing in v4.30! In order to avoid name conflicts (e.g.\ having an -attribute named \[lq]content\[rq] will create a field that clashes with -the default content name of \[lq]+content\[rq]) the attribute prefix -will be changing to \[lq]+\[at]\[rq]. -.PP -This will affect users that have not set their own prefix and are not -roundtripping XML changes. -.PP -{% endhint %} -.SS Encoder / Decoder flag options -.PP -In addition to the above flags, there are the following xml -encoder/decoder options controlled by flags: -.PP -.TS -tab(@); -lw(23.3n) lw(23.3n) lw(23.3n). -T{ -Flag -T}@T{ -Default -T}@T{ -Description -T} -_ -T{ -\f[C]--xml-strict-mode\f[R] -T}@T{ -false -T}@T{ -Strict mode enforces the requirements of the XML specification. -When switched off the parser allows input containing common mistakes. -See the Golang xml decoder (https://pkg.go.dev/encoding/xml#Decoder) for -more details. -T} -T{ -\f[C]--xml-keep-namespace\f[R] -T}@T{ -true -T}@T{ -Keeps the namespace of attributes -T} -T{ -\f[C]--xml-raw-token\f[R] -T}@T{ -true -T}@T{ -Does not verify that start and end elements match and does not translate -name space prefixes to their corresponding URLs. -T} -T{ -\f[C]--xml-skip-proc-inst\f[R] -T}@T{ -false -T}@T{ -Skips over processing instructions, -e.g.\ \f[C]<?xml version=\[dq]1\[dq]?>\f[R] -T} -T{ -\f[C]--xml-skip-directives\f[R] -T}@T{ -false -T}@T{ -Skips over directives, -e.g.\ \f[C]<!DOCTYPE config system \[dq]blah\[dq]>\f[R] -T} -.TE -.PP -See below for examples -.SS Parse xml: simple -.PP -Notice how all the values are strings, see the next example on how you -can fix that. -.PP -Given a sample.xml file of: -.IP -.nf -\f[C] -<?xml version=\[dq]1.0\[dq] encoding=\[dq]UTF-8\[dq]?> -<cat> - <says>meow</says> - <legs>4</legs> - <cute>true</cute> -</cat> -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -oy \[aq].\[aq] sample.xml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -+p_xml: version=\[dq]1.0\[dq] encoding=\[dq]UTF-8\[dq] -cat: - says: meow - legs: \[dq]4\[dq] - cute: \[dq]true\[dq] -\f[R] -.fi -.SS Parse xml: number -.PP -All values are assumed to be strings when parsing XML, but you can use -the \f[C]from_yaml\f[R] operator on all the strings values to autoparse -into the correct type. -.PP -Given a sample.xml file of: -.IP -.nf -\f[C] -<?xml version=\[dq]1.0\[dq] encoding=\[dq]UTF-8\[dq]?> -<cat> - <says>meow</says> - <legs>4</legs> - <cute>true</cute> -</cat> -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -oy \[aq] (.. | select(tag == \[dq]!!str\[dq])) |= from_yaml\[aq] sample.xml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -+p_xml: version=\[dq]1.0\[dq] encoding=\[dq]UTF-8\[dq] -cat: - says: meow - legs: 4 - cute: true -\f[R] -.fi -.SS Parse xml: array -.PP -Consecutive nodes with identical xml names are assumed to be arrays. -.PP -Given a sample.xml file of: -.IP -.nf -\f[C] -<?xml version=\[dq]1.0\[dq] encoding=\[dq]UTF-8\[dq]?> -<animal>cat</animal> -<animal>goat</animal> -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -oy \[aq].\[aq] sample.xml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -+p_xml: version=\[dq]1.0\[dq] encoding=\[dq]UTF-8\[dq] -animal: - - cat - - goat -\f[R] -.fi -.SS Parse xml: force as an array -.PP -In XML, if your array has a single item, then yq doesn\[cq]t know its an -array. -This is how you can consistently force it to be an array. -This handles the 3 scenarios of having nothing in the array, having a -single item and having multiple. -.PP -Given a sample.xml file of: -.IP -.nf -\f[C] -<zoo><animal>cat</animal></zoo> -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -oy \[aq].zoo.animal |= ([] + .)\[aq] sample.xml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -zoo: - animal: - - cat -\f[R] -.fi -.SS Parse xml: force all as an array -.PP -Given a sample.xml file of: -.IP -.nf -\f[C] -<zoo><thing><frog>boing</frog></thing></zoo> -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -oy \[aq].. |= [] + .\[aq] sample.xml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -- zoo: - - thing: - - frog: - - boing -\f[R] -.fi -.SS Parse xml: attributes -.PP -Attributes are converted to fields, with the default attribute prefix -`+'. -Use \[cq]\[en]xml-attribute-prefix\[ga] to set your own. -.PP -Given a sample.xml file of: -.IP -.nf -\f[C] -<?xml version=\[dq]1.0\[dq] encoding=\[dq]UTF-8\[dq]?> -<cat legs=\[dq]4\[dq]> - <legs>7</legs> -</cat> -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -oy \[aq].\[aq] sample.xml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -+p_xml: version=\[dq]1.0\[dq] encoding=\[dq]UTF-8\[dq] -cat: - +\[at]legs: \[dq]4\[dq] - legs: \[dq]7\[dq] -\f[R] -.fi -.SS Parse xml: attributes with content -.PP -Content is added as a field, using the default content name of -\f[C]+content\f[R]. -Use \f[C]--xml-content-name\f[R] to set your own. -.PP -Given a sample.xml file of: -.IP -.nf -\f[C] -<?xml version=\[dq]1.0\[dq] encoding=\[dq]UTF-8\[dq]?> -<cat legs=\[dq]4\[dq]>meow</cat> -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -oy \[aq].\[aq] sample.xml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -+p_xml: version=\[dq]1.0\[dq] encoding=\[dq]UTF-8\[dq] -cat: - +content: meow - +\[at]legs: \[dq]4\[dq] -\f[R] -.fi -.SS Parse xml: content split between comments/children -.PP -Multiple content texts are collected into a sequence. -.PP -Given a sample.xml file of: -.IP -.nf -\f[C] -<root> value <!-- comment-->anotherValue <a>frog</a> cool!</root> -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -oy \[aq].\[aq] sample.xml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -root: - +content: # comment - - value - - anotherValue - - cool! - a: frog -\f[R] -.fi -.SS Parse xml: custom dtd -.PP -DTD entities are processed as directives. -.PP -Given a sample.xml file of: -.IP -.nf -\f[C] -<?xml version=\[dq]1.0\[dq]?> -<!DOCTYPE root [ -<!ENTITY writer \[dq]Blah.\[dq]> -<!ENTITY copyright \[dq]Blah\[dq]> -]> -<root> - <item>&writer;©right;</item> -</root> -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].\[aq] sample.xml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -<?xml version=\[dq]1.0\[dq]?> -<!DOCTYPE root [ -<!ENTITY writer \[dq]Blah.\[dq]> -<!ENTITY copyright \[dq]Blah\[dq]> -]> -<root> - <item>&writer;&copyright;</item> -</root> -\f[R] -.fi -.SS Parse xml: skip custom dtd -.PP -DTDs are directives, skip over directives to skip DTDs. -.PP -Given a sample.xml file of: -.IP -.nf -\f[C] -<?xml version=\[dq]1.0\[dq]?> -<!DOCTYPE root [ -<!ENTITY writer \[dq]Blah.\[dq]> -<!ENTITY copyright \[dq]Blah\[dq]> -]> -<root> - <item>&writer;©right;</item> -</root> -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq --xml-skip-directives \[aq].\[aq] sample.xml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -<?xml version=\[dq]1.0\[dq]?> -<root> - <item>&writer;&copyright;</item> -</root> -\f[R] -.fi -.SS Parse xml: with comments -.PP -A best attempt is made to preserve comments. -.PP -Given a sample.xml file of: -.IP -.nf -\f[C] -<!-- before cat --> -<cat> - <!-- in cat before --> - <x>3<!-- multi -line comment -for x --></x> - <!-- before y --> - <y> - <!-- in y before --> - <d><!-- in d before -->z<!-- in d after --></d> - - <!-- in y after --> - </y> - <!-- in_cat_after --> -</cat> -<!-- after cat --> -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -oy \[aq].\[aq] sample.xml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -# before cat -cat: - # in cat before - x: \[dq]3\[dq] # multi - # line comment - # for x - # before y - - y: - # in y before - # in d before - d: z # in d after - # in y after - # in_cat_after -# after cat -\f[R] -.fi -.SS Parse xml: keep attribute namespace -.PP -Defaults to true -.PP -Given a sample.xml file of: -.IP -.nf -\f[C] -<?xml version=\[dq]1.0\[dq]?> -<map xmlns=\[dq]some-namespace\[dq] xmlns:xsi=\[dq]some-instance\[dq] xsi:schemaLocation=\[dq]some-url\[dq]></map> -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq --xml-keep-namespace=false \[aq].\[aq] sample.xml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -<?xml version=\[dq]1.0\[dq]?> -<map xmlns=\[dq]some-namespace\[dq] xsi=\[dq]some-instance\[dq] schemaLocation=\[dq]some-url\[dq]></map> -\f[R] -.fi -.PP -instead of -.IP -.nf -\f[C] -<?xml version=\[dq]1.0\[dq]?> -<map xmlns=\[dq]some-namespace\[dq] xmlns:xsi=\[dq]some-instance\[dq] xsi:schemaLocation=\[dq]some-url\[dq]></map> -\f[R] -.fi -.SS Parse xml: keep raw attribute namespace -.PP -Defaults to true -.PP -Given a sample.xml file of: -.IP -.nf -\f[C] -<?xml version=\[dq]1.0\[dq]?> -<map xmlns=\[dq]some-namespace\[dq] xmlns:xsi=\[dq]some-instance\[dq] xsi:schemaLocation=\[dq]some-url\[dq]></map> -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq --xml-raw-token=false \[aq].\[aq] sample.xml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -<?xml version=\[dq]1.0\[dq]?> -<map xmlns=\[dq]some-namespace\[dq] xmlns:xsi=\[dq]some-instance\[dq] some-instance:schemaLocation=\[dq]some-url\[dq]></map> -\f[R] -.fi -.PP -instead of -.IP -.nf -\f[C] -<?xml version=\[dq]1.0\[dq]?> -<map xmlns=\[dq]some-namespace\[dq] xmlns:xsi=\[dq]some-instance\[dq] xsi:schemaLocation=\[dq]some-url\[dq]></map> -\f[R] -.fi -.SS Encode xml: simple -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -cat: purrs -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=xml sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -<cat>purrs</cat> -\f[R] -.fi -.SS Encode xml: array -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -pets: - cat: - - purrs - - meows -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=xml sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -<pets> - <cat>purrs</cat> - <cat>meows</cat> -</pets> -\f[R] -.fi -.SS Encode xml: attributes -.PP -Fields with the matching xml-attribute-prefix are assumed to be -attributes. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -cat: - +\[at]name: tiger - meows: true -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=xml sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -<cat name=\[dq]tiger\[dq]> - <meows>true</meows> -</cat> -\f[R] -.fi -.SS Encode xml: attributes with content -.PP -Fields with the matching xml-content-name is assumed to be content. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -cat: - +\[at]name: tiger - +content: cool -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=xml sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -<cat name=\[dq]tiger\[dq]>cool</cat> -\f[R] -.fi -.SS Encode xml: comments -.PP -A best attempt is made to copy comments to xml. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -# -# header comment -# above_cat -# -cat: # inline_cat - # above_array - array: # inline_array - - val1 # inline_val1 - # above_val2 - - val2 # inline_val2 -# below_cat -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=xml sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -<!-- -header comment -above_cat ---> -<!-- inline_cat --> -<cat><!-- above_array inline_array --> - <array>val1<!-- inline_val1 --></array> - <array><!-- above_val2 -->val2<!-- inline_val2 --></array> -</cat><!-- below_cat --> -\f[R] -.fi -.SS Encode: doctype and xml declaration -.PP -Use the special xml names to add/modify proc instructions and -directives. -.PP -Given a sample.yml file of: -.IP -.nf -\f[C] -+p_xml: version=\[dq]1.0\[dq] -+directive: \[aq]DOCTYPE config SYSTEM \[dq]/etc/iwatch/iwatch.dtd\[dq] \[aq] -apple: - +p_coolioo: version=\[dq]1.0\[dq] - +directive: \[aq]CATYPE meow purr puss \[aq] - b: things -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq -o=xml sample.yml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -<?xml version=\[dq]1.0\[dq]?> -<!DOCTYPE config SYSTEM \[dq]/etc/iwatch/iwatch.dtd\[dq] > -<apple><?coolioo version=\[dq]1.0\[dq]?><!CATYPE meow purr puss > - <b>things</b> -</apple> -\f[R] -.fi -.SS Round trip: with comments -.PP -A best effort is made, but comment positions and white space are not -preserved perfectly. -.PP -Given a sample.xml file of: -.IP -.nf -\f[C] -<!-- before cat --> -<cat> - <!-- in cat before --> - <x>3<!-- multi -line comment -for x --></x> - <!-- before y --> - <y> - <!-- in y before --> - <d><!-- in d before -->z<!-- in d after --></d> - - <!-- in y after --> - </y> - <!-- in_cat_after --> -</cat> -<!-- after cat --> -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].\[aq] sample.xml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -<!-- before cat --> -<cat><!-- in cat before --> - <x>3<!-- multi -line comment -for x --></x><!-- before y --> - <y><!-- in y before -in d before --> - <d>z<!-- in d after --></d><!-- in y after --> - </y><!-- in_cat_after --> -</cat><!-- after cat --> -\f[R] -.fi -.SS Roundtrip: with doctype and declaration -.PP -yq parses XML proc instructions and directives into nodes. -Unfortunately the underlying XML parser loses whitespace information. -.PP -Given a sample.xml file of: -.IP -.nf -\f[C] -<?xml version=\[dq]1.0\[dq]?> -<!DOCTYPE config SYSTEM \[dq]/etc/iwatch/iwatch.dtd\[dq] > -<apple> - <?coolioo version=\[dq]1.0\[dq]?> - <!CATYPE meow purr puss > - <b>things</b> -</apple> -\f[R] -.fi -.PP -then -.IP -.nf -\f[C] -yq \[aq].\[aq] sample.xml -\f[R] -.fi -.PP -will output -.IP -.nf -\f[C] -<?xml version=\[dq]1.0\[dq]?> -<!DOCTYPE config SYSTEM \[dq]/etc/iwatch/iwatch.dtd\[dq] > -<apple><?coolioo version=\[dq]1.0\[dq]?><!CATYPE meow purr puss > - <b>things</b> -</apple> -\f[R] -.fi -.SH AUTHORS -Mike Farah.