diff --git a/.docker/docker-compose.yml b/.docker/docker-compose.yml
index 2d16bb1b5096385a0c8d8e1578cbb16b556bcbeb..1b826fedfdaa47fe087d255dff50723d1dc9e25d 100644
--- a/.docker/docker-compose.yml
+++ b/.docker/docker-compose.yml
@@ -85,7 +85,6 @@ services:
       - ./config/master-realm.json:/opt/keycloak/data/import/master-realm.json
       - ./config/dbrepo-realm.json:/opt/keycloak/data/import/dbrepo-realm.json
     environment:
-      BITNAMI_DEBUG: "true"
       KEYCLOAK_ENABLE_HTTPS: "false"
       KEYCLOAK_ENABLE_STATISTICS: "true"
       KEYCLOAK_ENABLE_HEALTH_ENDPOINTS: "true"
diff --git a/.docs/.swagger/api.base.yaml b/.docs/.swagger/api.base.yaml
index c7b01fab0ebd3aebaa2f61dd68afdec362ccf236..b7bd0570eea5eeee9cb656bd55d88b25edd361a9 100644
--- a/.docs/.swagger/api.base.yaml
+++ b/.docs/.swagger/api.base.yaml
@@ -11,7 +11,7 @@ components:
       type: http
 externalDocs:
   description: Project Website
-  url: https://www.ifs.tuwien.ac.at/infrastructures/dbrepo/
+  url: https://www.ifs.tuwien.ac.at/infrastructures/dbrepo/1.4.7/
 info:
   contact:
     email: andreas.rauber@tuwien.ac.at
@@ -24,7 +24,7 @@ info:
     name: Apache 2.0
     url: https://www.apache.org/licenses/LICENSE-2.0
   title: DBRepo REST API
-  version: 1.4.4
+  version: 1.4.7
 openapi: 3.1.0
 servers:
   - description: Test Instance
diff --git a/.docs/.swagger/api.yaml b/.docs/.swagger/api.yaml
index 1495e398e358d0a78bc7306b71726bd2e092efe4..a3f7eea7f2d4a34a3108ed41e5be9c0500615565 100644
--- a/.docs/.swagger/api.yaml
+++ b/.docs/.swagger/api.yaml
@@ -16,7 +16,7 @@ info:
     name: Apache 2.0
     url: 'https://www.apache.org/licenses/LICENSE-2.0'
   title: DBRepo REST API
-  version: 1.4.4
+  version: 1.4.7
 servers:
   - description: Test Instance
     url: 'https://test.dbrepo.tuwien.ac.at'
@@ -929,7 +929,7 @@ paths:
         content:
           application/json:
             schema:
-              $ref: '#/components/schemas/ImportCsvDto'
+              $ref: '#/components/schemas/ImportDto'
         required: true
       responses:
         '202':
@@ -5605,7 +5605,7 @@ components:
           type: object
           additionalProperties:
             type: object
-    ImportCsvDto:
+    ImportDto:
       required:
         - location
         - separator
@@ -5624,13 +5624,6 @@ components:
           minimum: 0
           type: integer
           format: int64
-        false_element:
-          type: string
-        true_element:
-          type: string
-        null_element:
-          type: string
-          example: NA
         line_termination:
           type: string
           example: \r\n
@@ -5760,6 +5753,7 @@ components:
             - longblob
             - enum
             - set
+            - serial
             - bit
             - tinyint
             - bool
@@ -5777,7 +5771,6 @@ components:
             - year
     ColumnDto:
       required:
-        - auto_generated
         - column_type
         - database_id
         - id
@@ -5845,11 +5838,6 @@ components:
           minLength: 0
           type: string
           example: mdb_date
-        date_format:
-          $ref: '#/components/schemas/ImageDateDto'
-        auto_generated:
-          type: boolean
-          example: false
         index_length:
           type: integer
           format: int64
@@ -5874,6 +5862,7 @@ components:
             - longblob
             - enum
             - set
+            - serial
             - bit
             - tinyint
             - bool
@@ -6014,6 +6003,64 @@ components:
         ui_port:
           type: integer
           format: int32
+    DataTypeDto:
+      required:
+        - display_name
+        - documentation
+        - is_buildable
+        - is_quoted
+        - value
+      type: object
+      properties:
+        value:
+          type: string
+          example: time
+        documentation:
+          type: string
+          example: 'https://mariadb.com/kb/en/time/'
+        display_name:
+          type: string
+          example: TIME(fsp)
+        size_min:
+          type: integer
+          format: int32
+          example: 0
+        size_max:
+          type: integer
+          format: int32
+          example: 6
+        size_default:
+          type: integer
+          format: int32
+          example: 0
+        size_required:
+          type: boolean
+          example: false
+        d_min:
+          type: integer
+          format: int32
+        d_max:
+          type: integer
+          format: int32
+        d_default:
+          type: integer
+          format: int32
+        d_required:
+          type: boolean
+        data_hint:
+          type: string
+          example: 'e.g. HH:MM:SS, HH:MM, HHMMSS, H:M:S'
+        type_hint:
+          type: string
+          example: 'fsp=microsecond precision, min. 0, max. 6'
+        is_quoted:
+          type: boolean
+          description: frontend needs to quote this data type
+          example: false
+        is_buildable:
+          type: boolean
+          description: frontend can build this data type
+          example: true
     DatabaseAccessDto:
       required:
         - created
@@ -6164,33 +6211,9 @@ components:
           $ref: '#/components/schemas/ForeignKeyBriefDto'
         referenced_column:
           $ref: '#/components/schemas/ColumnBriefDto'
-    ImageDateDto:
-      required:
-        - created_at
-        - database_format
-        - has_time
-        - id
-        - unix_format
-      type: object
-      properties:
-        id:
-          type: integer
-          format: int64
-        database_format:
-          type: string
-          example: '%d.%c.%Y'
-        unix_format:
-          type: string
-          example: dd.MM.YYYY
-        has_time:
-          type: boolean
-          example: false
-        created_at:
-          type: string
-          format: date-time
-          example: '2021-03-12T15:26:21.000Z'
     ImageDto:
       required:
+        - data_types
         - default
         - default_port
         - dialect
@@ -6220,10 +6243,6 @@ components:
         driver_class:
           type: string
           example: org.mariadb.jdbc.Driver
-        date_formats:
-          type: array
-          items:
-            $ref: '#/components/schemas/ImageDateDto'
         jdbc_method:
           type: string
           example: mariadb
@@ -6234,6 +6253,10 @@ components:
           type: integer
           format: int32
           example: 3306
+        data_types:
+          type: array
+          items:
+            $ref: '#/components/schemas/DataTypeDto'
     PrimaryKeyDto:
       required:
         - column
@@ -6494,8 +6517,6 @@ components:
           minLength: 0
           type: string
           example: mdb_date
-        date_format:
-          $ref: '#/components/schemas/ImageDateDto'
         auto_generated:
           type: boolean
           example: false
@@ -6523,6 +6544,7 @@ components:
             - longblob
             - enum
             - set
+            - serial
             - bit
             - tinyint
             - bool
@@ -8169,6 +8191,7 @@ components:
             - longblob
             - enum
             - set
+            - serial
             - bit
             - tinyint
             - bool
@@ -8197,10 +8220,6 @@ components:
           minLength: 0
           type: string
           example: Formatted as YYYY-MM-dd
-        dfid:
-          type: integer
-          description: date format id
-          format: int64
         enums:
           type: array
           description: 'enum values, only considered when type = ENUM'
@@ -8409,14 +8428,14 @@ components:
           type: string
         resumptionToken:
           type: string
+        parametersString:
+          type: string
         fromDate:
           type: string
           format: date-time
         untilDate:
           type: string
           format: date-time
-        parametersString:
-          type: string
     BannerMessageDto:
       required:
         - id
@@ -8692,10 +8711,6 @@ components:
           format: int32
         isDefault:
           type: boolean
-        dateFormats:
-          type: array
-          items:
-            $ref: '#/components/schemas/ContainerImageDate'
         containers:
           type: array
           items:
@@ -8706,28 +8721,10 @@ components:
         lastModified:
           type: string
           format: date-time
-    ContainerImageDate:
-      type: object
-      properties:
-        id:
-          type: integer
-          format: int64
-        iid:
-          type: integer
-          format: int64
-        image:
-          $ref: '#/components/schemas/ContainerImage'
-        example:
-          type: string
-        hasTime:
-          type: boolean
-        databaseFormat:
-          type: string
-        unixFormat:
-          type: string
-        createdAt:
-          type: string
-          format: date-time
+        dataTypes:
+          type: array
+          items:
+            $ref: '#/components/schemas/DataType'
     Creator:
       type: object
       properties:
@@ -8776,6 +8773,50 @@ components:
           type: string
         ieeeName:
           type: string
+    DataType:
+      type: object
+      properties:
+        id:
+          type: integer
+          format: int64
+        displayName:
+          type: string
+        value:
+          type: string
+        sizeMin:
+          type: integer
+          format: int32
+        sizeMax:
+          type: integer
+          format: int32
+        sizeDefault:
+          type: integer
+          format: int32
+        sizeRequired:
+          type: boolean
+        documentation:
+          type: string
+        typeHint:
+          type: string
+        dataHint:
+          type: string
+        quoted:
+          type: boolean
+        buildable:
+          type: boolean
+        image:
+          $ref: '#/components/schemas/ContainerImage'
+        dmin:
+          type: integer
+          format: int32
+        dmax:
+          type: integer
+          format: int32
+        ddefault:
+          type: integer
+          format: int32
+        drequired:
+          type: boolean
     Database:
       type: object
       properties:
@@ -9775,14 +9816,10 @@ components:
         id:
           type: integer
           format: int64
-        dateFormat:
-          $ref: '#/components/schemas/ContainerImageDate'
         table:
           $ref: '#/components/schemas/Table'
         name:
           type: string
-        autoGenerated:
-          type: boolean
         internalName:
           type: string
         description:
@@ -9809,6 +9846,7 @@ components:
             - TableColumnType.LONGBLOB
             - TableColumnType.ENUM
             - TableColumnType.SET
+            - TableColumnType.SERIAL
             - TableColumnType.BIT
             - TableColumnType.TINYINT
             - TableColumnType.BOOL
@@ -9994,8 +10032,6 @@ components:
         id:
           type: integer
           format: int64
-        dateFormat:
-          $ref: '#/components/schemas/ContainerImageDate'
         view:
           $ref: '#/components/schemas/View'
         name:
@@ -10021,6 +10057,7 @@ components:
             - TableColumnType.LONGBLOB
             - TableColumnType.ENUM
             - TableColumnType.SET
+            - TableColumnType.SERIAL
             - TableColumnType.BIT
             - TableColumnType.TINYINT
             - TableColumnType.BOOL
diff --git a/.docs/.swagger/swagger-generate.sh b/.docs/.swagger/swagger-generate.sh
index c293e6c5cf19a3b78b75de21e06faadbadeefdd4..8ea2981243601c51fa17f75570f38656cb301ff5 100644
--- a/.docs/.swagger/swagger-generate.sh
+++ b/.docs/.swagger/swagger-generate.sh
@@ -6,6 +6,8 @@ services[9093]=data
 services[9099]=metadata
 services[3305]=sidecar
 
+# requires https://github.com/mikefarah/yq/ -> v4.44.3
+
 function retrieve () {
   if [[ "$2" == analyse ]] || [[ "$2" == search ]] || [[ "$2" == sidecar ]]; then
     echo "... retrieve json api from localhost:$1"
diff --git a/.docs/api/gateway-service.md b/.docs/api/gateway-service.md
index 26ad76f092c599fbb865648577eba7b1a283ba72..9a44f9635b32ecc0d238ed1c1a0e918157ecd046 100644
--- a/.docs/api/gateway-service.md
+++ b/.docs/api/gateway-service.md
@@ -60,6 +60,40 @@ services:
   ...
 ```
 
+## Monitoring (Optional)
+
+By default the Gateway Service is not monitored. You need to add the following to the `docker-compose.yml` file.
+
+```yaml title="docker-compose.yml"
+services:
+  ...
+  dbrepo-gateway-service-sidecar:
+    restart: "no"
+    container_name: dbrepo-gateway-service-sidecar
+    hostname: dbrepo-gateway-service-sidecar
+    image: docker.io/nginx/nginx-prometheus-exporter:1.3.0
+    command:
+      - "-nginx.scrape-uri=http://gateway-service/basic_status"
+    ports:
+      - "9113:9113"
+    depends_on:
+      dbrepo-gateway-service:
+        condition: service_started
+    logging:
+      driver: json-file
+```
+
+Then, uncomment the scrape config from the `prometheus.yml` file.
+
+```yaml title="prometheus.yml"
+scrape_configs:
+  ...
+  - job_name: 'gateway scrape'
+    metrics_path: '/metrics'
+    static_configs:
+      - targets: ['dbrepo-gateway-service-sidecar:9113']
+```
+
 ## Limitations
 
 (none relevant to DBRepo)
diff --git a/.docs/changelog.md b/.docs/changelog.md
new file mode 100644
index 0000000000000000000000000000000000000000..256e245d5d6aac818396dae86aed655e5e4613d6
--- /dev/null
+++ b/.docs/changelog.md
@@ -0,0 +1,56 @@
+---
+author: Martin Weise
+---
+
+## v1.4.7 (???)
+
+[:simple-gitlab: GitLab Release](https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/tags/v1.4.7)
+
+!!! warning "Contains Breaking Changes"
+
+    This release updates the Metadata Database schema which is incompatible to v1.4.6!
+
+### What's Changed
+
+#### Features
+
+* Added `SERIAL` data type to create incrementing key
+  in [#454](https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/issues/454)
+
+#### Changes
+
+* Change the Docker image of the Auth Service to Bitnami-maintained similar to Kubernetes deployment with accompanying
+  Auth Database change to PostgreSQL
+  in [#455](https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/issues/455)
+
+#### Fixes
+
+* No hardcoded data type metadata in UI but instead added it hardcoded (associated with `image_id`) Metadata Database.
+
+## v1.4.6 (2024-10-11)
+
+[:simple-gitlab: GitLab Release](https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/tags/v1.4.6)
+
+!!! warning "Contains Breaking Changes"
+
+    This release updates the Metadata Database schema which is incompatible to v1.4.5!
+
+### What's Changed
+
+#### Features
+
+* Added [Dashboard Service](../api/dashboard-service/) and monitoring in default setup.
+
+#### Changes
+
+* Show the progress of dataset uploads in the UI
+  in [#448](https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/issues/448)
+* Anonymous users are allowed to create (non-persistent) subsets
+  in [#449](https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/issues/449)
+* Removed logic that maps `True`, `False` and `null`
+
+#### Fixes
+
+* Import of datasets stabilized in the UI
+  in [#442](https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/issues/442)
+* Install script in [#444](https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/issues/444)
\ No newline at end of file
diff --git a/.docs/help.md b/.docs/help.md
index bde92d1b13f10fb4d40c70ca5a9438ae69f54ad5..63ede84f973867d044c8438a7dd774fb60b5a39c 100644
--- a/.docs/help.md
+++ b/.docs/help.md
@@ -10,14 +10,6 @@ The [concepts documentation](../concepts/) is the most complete guide on how to
 
 The [API documentation](../api/) present reference docs for all APIs.
 
-## Troubleshooting
-
-**The Dashboard Service reloads recurrently**
-
-:   *Origin*:   Your Ad-Blocker blocks client-side requests of Grafana to its endpoints, resulting in authorization
-                errors, causing full page reloads.
-:   *Solution*: Disable uBlock Origin, AdBlock Plus, etc. for `/dashboard/*`.
-
 !!! info "Additional Help"
 
     [Contact us](../contact) via e-mail.
\ No newline at end of file
diff --git a/.docs/images/architecture.drawio b/.docs/images/architecture.drawio
index a6512707b1b695126832f16c8fa1f271ad81cead..8e44f6c84bbcc9c0d0e1197f512d6748fb128d68 100644
--- a/.docs/images/architecture.drawio
+++ b/.docs/images/architecture.drawio
@@ -1,13 +1,13 @@
-<mxfile host="Electron" agent="Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) draw.io/24.7.8 Chrome/128.0.6613.36 Electron/32.0.1 Safari/537.36" version="24.7.8" pages="8">
+<mxfile host="Electron" agent="Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) draw.io/24.7.17 Chrome/128.0.6613.36 Electron/32.0.1 Safari/537.36" version="24.7.17" pages="8">
   <diagram id="mvBsv1rP8O80Qe3yGnn_" name="docker-compose">
-    <mxGraphModel dx="2390" dy="1370" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="1169" pageHeight="827" math="0" shadow="0">
+    <mxGraphModel dx="683" dy="391" grid="1" gridSize="10" guides="1" tooltips="1" connect="1" arrows="1" fold="1" page="1" pageScale="1" pageWidth="1169" pageHeight="827" math="0" shadow="0">
       <root>
         <mxCell id="0" />
         <mxCell id="1" parent="0" />
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-76" value="" style="rounded=1;whiteSpace=wrap;html=1;fillColor=none;dashed=1;arcSize=2;" vertex="1" parent="1">
-          <mxGeometry x="320" y="160" width="530" height="397" as="geometry" />
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-76" value="" style="rounded=1;whiteSpace=wrap;html=1;fillColor=none;dashed=1;arcSize=2;" parent="1" vertex="1">
+          <mxGeometry x="320" y="160" width="640" height="397" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-77" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.75;exitDx=0;exitDy=0;entryX=0.5;entryY=0;entryDx=0;entryDy=0;startArrow=classic;startFill=1;endArrow=classic;endFill=1;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-119" target="FWEJ_FGA9GBXbfwohBE8-108">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-77" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.75;exitDx=0;exitDy=0;entryX=0.5;entryY=0;entryDx=0;entryDy=0;startArrow=classic;startFill=1;endArrow=classic;endFill=1;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-119" target="FWEJ_FGA9GBXbfwohBE8-108" edge="1">
           <mxGeometry relative="1" as="geometry">
             <Array as="points">
               <mxPoint x="668" y="206" />
@@ -16,55 +16,55 @@
             </Array>
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-78" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="FWEJ_FGA9GBXbfwohBE8-77">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-78" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FWEJ_FGA9GBXbfwohBE8-77" vertex="1" connectable="0">
           <mxGeometry x="-0.2051" y="1" relative="1" as="geometry">
             <mxPoint x="61" y="40" as="offset" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-79" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="FWEJ_FGA9GBXbfwohBE8-77">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-79" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FWEJ_FGA9GBXbfwohBE8-77" vertex="1" connectable="0">
           <mxGeometry x="-0.3724" relative="1" as="geometry">
             <mxPoint x="-2" y="11" as="offset" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-80" value="" style="rounded=1;whiteSpace=wrap;html=1;fillColor=none;dashed=1;arcSize=6;" vertex="1" parent="1">
-          <mxGeometry x="540" y="557" width="310" height="123" as="geometry" />
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-80" value="" style="rounded=1;whiteSpace=wrap;html=1;fillColor=none;dashed=1;arcSize=6;" parent="1" vertex="1">
+          <mxGeometry x="540" y="557" width="420" height="123" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-81" value="LDAP" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0.5;entryY=0;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-96" target="FWEJ_FGA9GBXbfwohBE8-104">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-81" value="LDAP" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0.5;entryY=0;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-96" target="FWEJ_FGA9GBXbfwohBE8-104" edge="1">
           <mxGeometry x="-0.2381" relative="1" as="geometry">
             <mxPoint as="offset" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-82" value="data-db" style="text;html=1;strokeColor=none;fillColor=none;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;spacing=-1;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-82" value="data-db" style="text;html=1;strokeColor=none;fillColor=none;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;spacing=-1;" parent="1" vertex="1">
           <mxGeometry x="352.5" y="658" width="85" height="20" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-83" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.25;exitY=0;exitDx=0;exitDy=0;startArrow=classic;startFill=1;endArrow=none;endFill=0;entryX=0.25;entryY=1;entryDx=0;entryDy=0;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-85" target="FWEJ_FGA9GBXbfwohBE8-92">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-83" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.25;exitY=0;exitDx=0;exitDy=0;startArrow=classic;startFill=1;endArrow=none;endFill=0;entryX=0.25;entryY=1;entryDx=0;entryDy=0;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-85" target="FWEJ_FGA9GBXbfwohBE8-92" edge="1">
           <mxGeometry relative="1" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-84" value="AMQP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="FWEJ_FGA9GBXbfwohBE8-83">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-84" value="AMQP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FWEJ_FGA9GBXbfwohBE8-83" vertex="1" connectable="0">
           <mxGeometry x="-0.0476" y="-1" relative="1" as="geometry">
             <mxPoint x="-1" y="-4" as="offset" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-85" value="Data Service" style="rounded=1;whiteSpace=wrap;html=1;fontStyle=1" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-85" value="Data Service" style="rounded=1;whiteSpace=wrap;html=1;fontStyle=1" parent="1" vertex="1">
           <mxGeometry x="330" y="504" width="130" height="40" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-86" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=0.5;entryY=0;entryDx=0;entryDy=0;entryPerimeter=0;startArrow=classic;startFill=1;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-85" target="FWEJ_FGA9GBXbfwohBE8-137">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-86" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=0.5;entryY=0;entryDx=0;entryDy=0;entryPerimeter=0;startArrow=classic;startFill=1;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-85" target="FWEJ_FGA9GBXbfwohBE8-137" edge="1">
           <mxGeometry relative="1" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-87" value="JDBC" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="FWEJ_FGA9GBXbfwohBE8-86">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-87" value="JDBC" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FWEJ_FGA9GBXbfwohBE8-86" vertex="1" connectable="0">
           <mxGeometry x="0.3566" relative="1" as="geometry">
             <mxPoint y="-11" as="offset" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-88" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-92" target="FWEJ_FGA9GBXbfwohBE8-96">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-88" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-92" target="FWEJ_FGA9GBXbfwohBE8-96" edge="1">
           <mxGeometry relative="1" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-89" value="LDAP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="FWEJ_FGA9GBXbfwohBE8-88">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-89" value="LDAP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FWEJ_FGA9GBXbfwohBE8-88" vertex="1" connectable="0">
           <mxGeometry x="-0.1051" y="-1" relative="1" as="geometry">
             <mxPoint x="3" y="-1" as="offset" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-90" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;entryX=0;entryY=0.75;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-92" target="FWEJ_FGA9GBXbfwohBE8-119">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-90" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;entryX=0;entryY=0.75;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-92" target="FWEJ_FGA9GBXbfwohBE8-119" edge="1">
           <mxGeometry relative="1" as="geometry">
             <Array as="points">
               <mxPoint x="395" y="400" />
@@ -73,31 +73,31 @@
             </Array>
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-91" value="HTTP,&lt;div&gt;AMQP&lt;/div&gt;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="FWEJ_FGA9GBXbfwohBE8-90">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-91" value="HTTP,&lt;div&gt;AMQP&lt;/div&gt;" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FWEJ_FGA9GBXbfwohBE8-90" vertex="1" connectable="0">
           <mxGeometry x="-0.1797" y="2" relative="1" as="geometry">
             <mxPoint x="2" y="-77" as="offset" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-92" value="&lt;b&gt;Broker Service&lt;/b&gt;&lt;div&gt;&lt;i&gt;rabbitmq&lt;/i&gt;&lt;/div&gt;" style="rounded=1;whiteSpace=wrap;html=1;fillColor=#E6E6E6;fontColor=#000000;strokeColor=#000000;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-92" value="&lt;b&gt;Broker Service&lt;/b&gt;&lt;div&gt;&lt;i&gt;rabbitmq&lt;/i&gt;&lt;/div&gt;" style="rounded=1;whiteSpace=wrap;html=1;fillColor=#E6E6E6;fontColor=#000000;strokeColor=#000000;" parent="1" vertex="1">
           <mxGeometry x="330" y="422" width="130" height="40" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-93" value="LDAP" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-96" target="FWEJ_FGA9GBXbfwohBE8-108">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-93" value="LDAP" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-96" target="FWEJ_FGA9GBXbfwohBE8-108" edge="1">
           <mxGeometry relative="1" as="geometry">
             <mxPoint as="offset" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-94" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-96" target="FWEJ_FGA9GBXbfwohBE8-119">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-94" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-96" target="FWEJ_FGA9GBXbfwohBE8-119" edge="1">
           <mxGeometry relative="1" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-95" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="FWEJ_FGA9GBXbfwohBE8-94">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-95" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FWEJ_FGA9GBXbfwohBE8-94" vertex="1" connectable="0">
           <mxGeometry x="0.125" relative="1" as="geometry">
             <mxPoint as="offset" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-96" value="&lt;b&gt;Identity Service *&lt;/b&gt;&lt;div&gt;&lt;i&gt;openldap&lt;/i&gt;&lt;/div&gt;" style="rounded=1;whiteSpace=wrap;html=1;fillColor=#E6E6E6;fontColor=#000000;strokeColor=#000000;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-96" value="&lt;b&gt;Identity Service *&lt;/b&gt;&lt;div&gt;&lt;i&gt;openldap&lt;/i&gt;&lt;/div&gt;" style="rounded=1;whiteSpace=wrap;html=1;fillColor=#E6E6E6;fontColor=#000000;strokeColor=#000000;" parent="1" vertex="1">
           <mxGeometry x="520" y="422" width="130" height="40" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-97" value="" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;entryX=0.75;entryY=1;entryDx=0;entryDy=0;jumpStyle=arc;startArrow=classic;startFill=1;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-101" target="FWEJ_FGA9GBXbfwohBE8-119">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-97" value="" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;entryX=0.75;entryY=1;entryDx=0;entryDy=0;jumpStyle=arc;startArrow=classic;startFill=1;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-101" target="FWEJ_FGA9GBXbfwohBE8-119" edge="1">
           <mxGeometry relative="1" as="geometry">
             <Array as="points">
               <mxPoint x="775" y="480" />
@@ -107,12 +107,12 @@
             </Array>
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-98" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="FWEJ_FGA9GBXbfwohBE8-97">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-98" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FWEJ_FGA9GBXbfwohBE8-97" vertex="1" connectable="0">
           <mxGeometry x="0.7012" y="1" relative="1" as="geometry">
             <mxPoint x="1" y="2" as="offset" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-99" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.25;exitY=0;exitDx=0;exitDy=0;entryX=0.75;entryY=0;entryDx=0;entryDy=0;jumpStyle=arc;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-101" target="FWEJ_FGA9GBXbfwohBE8-85">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-99" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.25;exitY=0;exitDx=0;exitDy=0;entryX=0.75;entryY=0;entryDx=0;entryDy=0;jumpStyle=arc;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-101" target="FWEJ_FGA9GBXbfwohBE8-85" edge="1">
           <mxGeometry relative="1" as="geometry">
             <Array as="points">
               <mxPoint x="743" y="490" />
@@ -120,63 +120,63 @@
             </Array>
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-100" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="FWEJ_FGA9GBXbfwohBE8-99">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-100" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FWEJ_FGA9GBXbfwohBE8-99" vertex="1" connectable="0">
           <mxGeometry x="0.3494" relative="1" as="geometry">
             <mxPoint as="offset" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-101" value="Metadata Service" style="rounded=1;whiteSpace=wrap;html=1;fontStyle=1" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-101" value="Metadata Service" style="rounded=1;whiteSpace=wrap;html=1;fontStyle=1" parent="1" vertex="1">
           <mxGeometry x="710" y="504" width="130" height="40" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-102" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.75;exitDx=0;exitDy=0;entryX=0;entryY=0.75;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-104" target="FWEJ_FGA9GBXbfwohBE8-101">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-102" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.75;exitDx=0;exitDy=0;entryX=0;entryY=0.75;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-104" target="FWEJ_FGA9GBXbfwohBE8-101" edge="1">
           <mxGeometry relative="1" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-103" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="FWEJ_FGA9GBXbfwohBE8-102">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-103" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FWEJ_FGA9GBXbfwohBE8-102" vertex="1" connectable="0">
           <mxGeometry x="-0.1111" relative="1" as="geometry">
             <mxPoint x="3" as="offset" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-104" value="&lt;b&gt;Auth Service&lt;/b&gt;&lt;br&gt;&lt;i&gt;keycloak&lt;/i&gt;" style="rounded=1;whiteSpace=wrap;html=1;fillColor=#E6E6E6;fontColor=#000000;strokeColor=#000000;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-104" value="&lt;b&gt;Auth Service&lt;/b&gt;&lt;br&gt;&lt;i&gt;keycloak&lt;/i&gt;" style="rounded=1;whiteSpace=wrap;html=1;fillColor=#E6E6E6;fontColor=#000000;strokeColor=#000000;" parent="1" vertex="1">
           <mxGeometry x="520" y="504" width="130" height="40" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-105" value="Search Service" style="rounded=1;whiteSpace=wrap;html=1;fontStyle=1" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-105" value="Search Service" style="rounded=1;whiteSpace=wrap;html=1;fontStyle=1" parent="1" vertex="1">
           <mxGeometry x="710" y="176" width="130" height="40" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-106" value="" style="shape=cylinder3;whiteSpace=wrap;html=1;boundedLbl=1;backgroundOutline=1;size=8.600000000000023;fillColor=#E6E6E6;strokeColor=#000000;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-106" value="" style="shape=cylinder3;whiteSpace=wrap;html=1;boundedLbl=1;backgroundOutline=1;size=8.600000000000023;fillColor=#E6E6E6;strokeColor=#000000;" parent="1" vertex="1">
           <mxGeometry x="750" y="256" width="50" height="64" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-107" value="search-db" style="text;html=1;strokeColor=none;fillColor=none;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;spacing=-1;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-107" value="search-db" style="text;html=1;strokeColor=none;fillColor=none;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;spacing=-1;" parent="1" vertex="1">
           <mxGeometry x="732.5" y="320.5" width="85" height="17" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-108" value="&lt;b&gt;Dashboard Service&lt;/b&gt;&lt;div&gt;&lt;i&gt;grafana&lt;/i&gt;&lt;/div&gt;" style="rounded=1;whiteSpace=wrap;html=1;fillColor=#E6E6E6;fontColor=#000000;strokeColor=#000000;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-108" value="&lt;b&gt;Dashboard Service&lt;/b&gt;&lt;div&gt;&lt;i&gt;grafana&lt;/i&gt;&lt;/div&gt;" style="rounded=1;whiteSpace=wrap;html=1;fillColor=#E6E6E6;fontColor=#000000;strokeColor=#000000;" parent="1" vertex="1">
           <mxGeometry x="710" y="422" width="130" height="40" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-109" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0.5;entryY=0;entryDx=0;entryDy=0;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-124" target="FWEJ_FGA9GBXbfwohBE8-120">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-109" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0.5;entryY=0;entryDx=0;entryDy=0;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-124" target="FWEJ_FGA9GBXbfwohBE8-120" edge="1">
           <mxGeometry relative="1" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-110" value="S3" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="FWEJ_FGA9GBXbfwohBE8-109">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-110" value="S3" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FWEJ_FGA9GBXbfwohBE8-109" vertex="1" connectable="0">
           <mxGeometry x="0.2961" y="-3" relative="1" as="geometry">
             <mxPoint x="3" y="-9" as="offset" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-111" value="" style="shape=cylinder3;whiteSpace=wrap;html=1;boundedLbl=1;backgroundOutline=1;size=8.600000000000023;fillColor=#E6E6E6;strokeColor=#000000;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-111" value="" style="shape=cylinder3;whiteSpace=wrap;html=1;boundedLbl=1;backgroundOutline=1;size=8.600000000000023;fillColor=#E6E6E6;strokeColor=#000000;" parent="1" vertex="1">
           <mxGeometry x="560" y="584" width="50" height="64" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-112" value="auth-db" style="text;html=1;strokeColor=none;fillColor=none;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;spacing=-1;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-112" value="auth-db" style="text;html=1;strokeColor=none;fillColor=none;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;spacing=-1;" parent="1" vertex="1">
           <mxGeometry x="542.5" y="648" width="85" height="20" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-113" value="" style="shape=cylinder3;whiteSpace=wrap;html=1;boundedLbl=1;backgroundOutline=1;size=8.600000000000023;fillColor=#E6E6E6;strokeColor=#000000;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-113" value="" style="shape=cylinder3;whiteSpace=wrap;html=1;boundedLbl=1;backgroundOutline=1;size=8.600000000000023;fillColor=#E6E6E6;strokeColor=#000000;" parent="1" vertex="1">
           <mxGeometry x="750" y="584" width="50" height="64" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-114" value="metadata-db" style="text;html=1;strokeColor=none;fillColor=default;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;spacing=-1;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-114" value="metadata-db" style="text;html=1;strokeColor=none;fillColor=default;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;spacing=-1;" parent="1" vertex="1">
           <mxGeometry x="732.5" y="649" width="85" height="17" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-115" value="HTTP" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.25;exitDx=0;exitDy=0;entryX=0;entryY=0.25;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-119" target="FWEJ_FGA9GBXbfwohBE8-105">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-115" value="HTTP" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.25;exitDx=0;exitDy=0;entryX=0;entryY=0.25;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-119" target="FWEJ_FGA9GBXbfwohBE8-105" edge="1">
           <mxGeometry relative="1" as="geometry">
             <mxPoint as="offset" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-116" value="HTTP" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.25;exitDx=0;exitDy=0;entryX=1;entryY=0.25;entryDx=0;entryDy=0;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-119" target="FWEJ_FGA9GBXbfwohBE8-124">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-116" value="HTTP" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.25;exitDx=0;exitDy=0;entryX=1;entryY=0.25;entryDx=0;entryDy=0;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-119" target="FWEJ_FGA9GBXbfwohBE8-124" edge="1">
           <mxGeometry relative="1" as="geometry">
             <Array as="points">
               <mxPoint x="490" y="186" />
@@ -185,7 +185,7 @@
             <mxPoint as="offset" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-117" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;entryX=1;entryY=0.5;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-119" target="FWEJ_FGA9GBXbfwohBE8-123">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-117" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.5;exitDx=0;exitDy=0;entryX=1;entryY=0.5;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-119" target="FWEJ_FGA9GBXbfwohBE8-123" edge="1">
           <mxGeometry relative="1" as="geometry">
             <Array as="points">
               <mxPoint x="480" y="196" />
@@ -193,102 +193,102 @@
             </Array>
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-118" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="FWEJ_FGA9GBXbfwohBE8-117">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-118" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FWEJ_FGA9GBXbfwohBE8-117" vertex="1" connectable="0">
           <mxGeometry x="0.5551" y="-1" relative="1" as="geometry">
             <mxPoint x="1" y="-64" as="offset" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-119" value="&lt;b&gt;Gateway Service&lt;/b&gt;&lt;div&gt;&lt;i&gt;nginx&lt;/i&gt;&lt;/div&gt;" style="rounded=1;whiteSpace=wrap;html=1;fillColor=#E6E6E6;fontColor=#000000;strokeColor=#000000;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-119" value="&lt;b&gt;Gateway Service&lt;/b&gt;&lt;div&gt;&lt;i&gt;nginx&lt;/i&gt;&lt;/div&gt;" style="rounded=1;whiteSpace=wrap;html=1;fillColor=#E6E6E6;fontColor=#000000;strokeColor=#000000;" parent="1" vertex="1">
           <mxGeometry x="520" y="176" width="130" height="40" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-120" value="&lt;b&gt;Storage Service&lt;/b&gt;&lt;div&gt;&lt;i&gt;seaweedfs&lt;/i&gt;&lt;/div&gt;" style="rounded=1;whiteSpace=wrap;html=1;fillColor=#E6E6E6;fontColor=#000000;strokeColor=#000000;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-120" value="&lt;b&gt;Storage Service&lt;/b&gt;&lt;div&gt;&lt;i&gt;seaweedfs&lt;/i&gt;&lt;/div&gt;" style="rounded=1;whiteSpace=wrap;html=1;fillColor=#E6E6E6;fontColor=#000000;strokeColor=#000000;" parent="1" vertex="1">
           <mxGeometry x="329.5" y="258" width="130" height="40" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-121" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-123" target="FWEJ_FGA9GBXbfwohBE8-120">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-121" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;entryX=0.5;entryY=1;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-123" target="FWEJ_FGA9GBXbfwohBE8-120" edge="1">
           <mxGeometry relative="1" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-122" value="S3" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="FWEJ_FGA9GBXbfwohBE8-121">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-122" value="S3" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FWEJ_FGA9GBXbfwohBE8-121" vertex="1" connectable="0">
           <mxGeometry x="0.0536" relative="1" as="geometry">
             <mxPoint y="1" as="offset" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-123" value="Analyse Service" style="rounded=1;whiteSpace=wrap;html=1;fontStyle=1" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-123" value="Analyse Service" style="rounded=1;whiteSpace=wrap;html=1;fontStyle=1" parent="1" vertex="1">
           <mxGeometry x="330" y="340" width="130" height="40" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-124" value="&lt;b&gt;Upload Service&lt;/b&gt;&lt;div&gt;&lt;i&gt;tusd&lt;/i&gt;&lt;/div&gt;" style="rounded=1;whiteSpace=wrap;html=1;fillColor=#E6E6E6;fontColor=#000000;strokeColor=#000000;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-124" value="&lt;b&gt;Upload Service&lt;/b&gt;&lt;div&gt;&lt;i&gt;tusd&lt;/i&gt;&lt;/div&gt;" style="rounded=1;whiteSpace=wrap;html=1;fillColor=#E6E6E6;fontColor=#000000;strokeColor=#000000;" parent="1" vertex="1">
           <mxGeometry x="330" y="176" width="130" height="40" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-125" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0.5;entryY=0;entryDx=0;entryDy=0;entryPerimeter=0;startArrow=classic;startFill=1;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-105" target="FWEJ_FGA9GBXbfwohBE8-106">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-125" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0.5;entryY=0;entryDx=0;entryDy=0;entryPerimeter=0;startArrow=classic;startFill=1;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-105" target="FWEJ_FGA9GBXbfwohBE8-106" edge="1">
           <mxGeometry relative="1" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-126" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="FWEJ_FGA9GBXbfwohBE8-125">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-126" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FWEJ_FGA9GBXbfwohBE8-125" vertex="1" connectable="0">
           <mxGeometry x="-0.0782" y="-1" relative="1" as="geometry">
             <mxPoint x="2" y="1" as="offset" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-127" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;startArrow=classic;startFill=1;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-128" target="FWEJ_FGA9GBXbfwohBE8-119">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-127" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;startArrow=classic;startFill=1;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-128" target="FWEJ_FGA9GBXbfwohBE8-119" edge="1">
           <mxGeometry relative="1" as="geometry">
             <Array as="points">
               <mxPoint x="585" y="110" />
             </Array>
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-128" value="Researcher" style="shape=umlActor;verticalLabelPosition=bottom;verticalAlign=top;html=1;outlineConnect=0;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-128" value="Researcher" style="shape=umlActor;verticalLabelPosition=bottom;verticalAlign=top;html=1;outlineConnect=0;" parent="1" vertex="1">
           <mxGeometry x="520" y="69" width="30" height="60" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-129" value="Database&lt;div&gt;Engineer&lt;/div&gt;" style="shape=umlActor;verticalLabelPosition=bottom;verticalAlign=top;html=1;outlineConnect=0;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-129" value="Database&lt;div&gt;Engineer&lt;/div&gt;" style="shape=umlActor;verticalLabelPosition=bottom;verticalAlign=top;html=1;outlineConnect=0;" parent="1" vertex="1">
           <mxGeometry x="490" y="586" width="30" height="60" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-130" value="" style="rounded=0;whiteSpace=wrap;html=1;strokeColor=none;" vertex="1" parent="1">
-          <mxGeometry x="540" y="550" width="310" height="14" as="geometry" />
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-130" value="" style="rounded=0;whiteSpace=wrap;html=1;strokeColor=none;" parent="1" vertex="1">
+          <mxGeometry x="540" y="550" width="420" height="14" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-131" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0.5;entryY=0;entryDx=0;entryDy=0;entryPerimeter=0;startArrow=classic;startFill=1;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-101" target="FWEJ_FGA9GBXbfwohBE8-113">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-131" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;entryX=0.5;entryY=0;entryDx=0;entryDy=0;entryPerimeter=0;startArrow=classic;startFill=1;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-101" target="FWEJ_FGA9GBXbfwohBE8-113" edge="1">
           <mxGeometry relative="1" as="geometry">
             <mxPoint x="840" y="524" as="sourcePoint" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-132" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="FWEJ_FGA9GBXbfwohBE8-131">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-132" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FWEJ_FGA9GBXbfwohBE8-131" vertex="1" connectable="0">
           <mxGeometry x="-0.0169" y="-1" relative="1" as="geometry">
             <mxPoint x="1" as="offset" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-133" value="JDBC" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;startArrow=classic;startFill=1;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-104" target="FWEJ_FGA9GBXbfwohBE8-111">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-133" value="JDBC" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=1;exitDx=0;exitDy=0;startArrow=classic;startFill=1;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-104" target="FWEJ_FGA9GBXbfwohBE8-111" edge="1">
           <mxGeometry relative="1" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-134" value="System&lt;div&gt;Engineer&lt;/div&gt;" style="shape=umlActor;verticalLabelPosition=bottom;verticalAlign=top;html=1;outlineConnect=0;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-134" value="System&lt;div&gt;Engineer&lt;/div&gt;" style="shape=umlActor;verticalLabelPosition=bottom;verticalAlign=top;html=1;outlineConnect=0;" parent="1" vertex="1">
           <mxGeometry x="662" y="571" width="30" height="60" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-135" value="" style="shape=cylinder3;whiteSpace=wrap;html=1;boundedLbl=1;backgroundOutline=1;size=8.600000000000023;fillColor=#E6E6E6;strokeColor=#000000;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-135" value="" style="shape=cylinder3;whiteSpace=wrap;html=1;boundedLbl=1;backgroundOutline=1;size=8.600000000000023;fillColor=#E6E6E6;strokeColor=#000000;" parent="1" vertex="1">
           <mxGeometry x="339.5" y="584" width="50" height="64" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-136" value="" style="shape=cylinder3;whiteSpace=wrap;html=1;boundedLbl=1;backgroundOutline=1;size=8.600000000000023;fillColor=#E6E6E6;strokeColor=#000000;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-136" value="" style="shape=cylinder3;whiteSpace=wrap;html=1;boundedLbl=1;backgroundOutline=1;size=8.600000000000023;fillColor=#E6E6E6;strokeColor=#000000;" parent="1" vertex="1">
           <mxGeometry x="399.5" y="584" width="50" height="64" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-137" value="" style="shape=cylinder3;whiteSpace=wrap;html=1;boundedLbl=1;backgroundOutline=1;size=8.600000000000023;fillColor=#E6E6E6;strokeColor=#000000;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-137" value="" style="shape=cylinder3;whiteSpace=wrap;html=1;boundedLbl=1;backgroundOutline=1;size=8.600000000000023;fillColor=#E6E6E6;strokeColor=#000000;" parent="1" vertex="1">
           <mxGeometry x="369.5" y="594" width="50" height="64" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-138" value="" style="rounded=1;whiteSpace=wrap;html=1;fillColor=none;dashed=1;arcSize=6;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-138" value="" style="rounded=1;whiteSpace=wrap;html=1;fillColor=none;dashed=1;arcSize=6;" parent="1" vertex="1">
           <mxGeometry x="320" y="575" width="150" height="105" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-139" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;dashed=1;entryX=1.004;entryY=0.397;entryDx=0;entryDy=0;entryPerimeter=0;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-129" target="FWEJ_FGA9GBXbfwohBE8-138">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-139" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;dashed=1;entryX=1.004;entryY=0.397;entryDx=0;entryDy=0;entryPerimeter=0;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-129" target="FWEJ_FGA9GBXbfwohBE8-138" edge="1">
           <mxGeometry relative="1" as="geometry">
             <mxPoint x="460" y="616" as="targetPoint" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-140" value="User Interface" style="rounded=1;whiteSpace=wrap;html=1;fontStyle=1" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-140" value="User Interface" style="rounded=1;whiteSpace=wrap;html=1;fontStyle=1" parent="1" vertex="1">
           <mxGeometry x="710" y="340" width="130" height="40" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-141" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=0.5;entryY=0;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-142" target="FWEJ_FGA9GBXbfwohBE8-119">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-141" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=0.5;entryY=0;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-142" target="FWEJ_FGA9GBXbfwohBE8-119" edge="1">
           <mxGeometry relative="1" as="geometry">
             <Array as="points">
               <mxPoint x="585" y="110" />
             </Array>
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-142" value="Machine" style="shape=umlActor;verticalLabelPosition=bottom;verticalAlign=top;html=1;outlineConnect=0;" vertex="1" parent="1">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-142" value="Machine" style="shape=umlActor;verticalLabelPosition=bottom;verticalAlign=top;html=1;outlineConnect=0;" parent="1" vertex="1">
           <mxGeometry x="620" y="69" width="30" height="60" as="geometry" />
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-143" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;entryX=0.25;entryY=1;entryDx=0;entryDy=0;jumpStyle=arc;startArrow=classic;startFill=1;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-85" target="FWEJ_FGA9GBXbfwohBE8-119">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-143" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0.5;exitY=0;exitDx=0;exitDy=0;entryX=0.25;entryY=1;entryDx=0;entryDy=0;jumpStyle=arc;startArrow=classic;startFill=1;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-85" target="FWEJ_FGA9GBXbfwohBE8-119" edge="1">
           <mxGeometry relative="1" as="geometry">
             <Array as="points">
               <mxPoint x="395" y="480" />
@@ -298,24 +298,24 @@
             </Array>
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-144" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="FWEJ_FGA9GBXbfwohBE8-143">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-144" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FWEJ_FGA9GBXbfwohBE8-143" vertex="1" connectable="0">
           <mxGeometry x="0.6707" relative="1" as="geometry">
             <mxPoint x="-1" as="offset" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-145" value="" style="endArrow=none;dashed=1;html=1;rounded=0;entryX=1;entryY=0.976;entryDx=0;entryDy=0;entryPerimeter=0;exitX=1;exitY=0.076;exitDx=0;exitDy=0;exitPerimeter=0;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-80" target="FWEJ_FGA9GBXbfwohBE8-76">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-145" value="" style="endArrow=none;dashed=1;html=1;rounded=0;entryX=1;entryY=0.976;entryDx=0;entryDy=0;entryPerimeter=0;exitX=1;exitY=0.076;exitDx=0;exitDy=0;exitPerimeter=0;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-80" target="FWEJ_FGA9GBXbfwohBE8-76" edge="1">
           <mxGeometry width="50" height="50" relative="1" as="geometry">
             <mxPoint x="810" y="570" as="sourcePoint" />
             <mxPoint x="860" y="520" as="targetPoint" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-146" value="" style="endArrow=none;dashed=1;html=1;rounded=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;exitX=0;exitY=0.121;exitDx=0;exitDy=0;exitPerimeter=0;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-80" target="FWEJ_FGA9GBXbfwohBE8-130">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-146" value="" style="endArrow=none;dashed=1;html=1;rounded=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;exitX=0;exitY=0.121;exitDx=0;exitDy=0;exitPerimeter=0;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-80" target="FWEJ_FGA9GBXbfwohBE8-130" edge="1">
           <mxGeometry width="50" height="50" relative="1" as="geometry">
             <mxPoint x="540" y="590" as="sourcePoint" />
             <mxPoint x="590" y="540" as="targetPoint" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-147" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-119" target="FWEJ_FGA9GBXbfwohBE8-140">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-147" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=1;exitY=0.5;exitDx=0;exitDy=0;entryX=0;entryY=0.5;entryDx=0;entryDy=0;startArrow=classic;startFill=1;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-119" target="FWEJ_FGA9GBXbfwohBE8-140" edge="1">
           <mxGeometry relative="1" as="geometry">
             <Array as="points">
               <mxPoint x="683" y="196" />
@@ -323,12 +323,12 @@
             </Array>
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-148" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="FWEJ_FGA9GBXbfwohBE8-147">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-148" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FWEJ_FGA9GBXbfwohBE8-147" vertex="1" connectable="0">
           <mxGeometry x="0.0179" relative="1" as="geometry">
             <mxPoint y="8" as="offset" />
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-149" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.75;exitDx=0;exitDy=0;entryX=0;entryY=0.25;entryDx=0;entryDy=0;jumpStyle=arc;startArrow=classic;startFill=1;endArrow=none;endFill=0;" edge="1" parent="1" source="FWEJ_FGA9GBXbfwohBE8-105" target="FWEJ_FGA9GBXbfwohBE8-101">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-149" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;exitX=0;exitY=0.75;exitDx=0;exitDy=0;entryX=0;entryY=0.25;entryDx=0;entryDy=0;jumpStyle=arc;startArrow=classic;startFill=1;endArrow=none;endFill=0;" parent="1" source="FWEJ_FGA9GBXbfwohBE8-105" target="FWEJ_FGA9GBXbfwohBE8-101" edge="1">
           <mxGeometry relative="1" as="geometry">
             <Array as="points">
               <mxPoint x="697" y="206" />
@@ -336,11 +336,28 @@
             </Array>
           </mxGeometry>
         </mxCell>
-        <mxCell id="FWEJ_FGA9GBXbfwohBE8-150" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="FWEJ_FGA9GBXbfwohBE8-149">
+        <mxCell id="FWEJ_FGA9GBXbfwohBE8-150" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" parent="FWEJ_FGA9GBXbfwohBE8-149" vertex="1" connectable="0">
           <mxGeometry x="-0.5289" y="-1" relative="1" as="geometry">
             <mxPoint x="4" y="34" as="offset" />
           </mxGeometry>
         </mxCell>
+        <mxCell id="a_cl7nsyDpLQFaXOHeFD-5" style="edgeStyle=orthogonalEdgeStyle;rounded=0;orthogonalLoop=1;jettySize=auto;html=1;entryX=1;entryY=0.5;entryDx=0;entryDy=0;" edge="1" parent="1" source="a_cl7nsyDpLQFaXOHeFD-3" target="FWEJ_FGA9GBXbfwohBE8-108">
+          <mxGeometry relative="1" as="geometry" />
+        </mxCell>
+        <mxCell id="a_cl7nsyDpLQFaXOHeFD-6" value="HTTP" style="edgeLabel;html=1;align=center;verticalAlign=middle;resizable=0;points=[];" vertex="1" connectable="0" parent="a_cl7nsyDpLQFaXOHeFD-5">
+          <mxGeometry x="-0.1222" relative="1" as="geometry">
+            <mxPoint as="offset" />
+          </mxGeometry>
+        </mxCell>
+        <mxCell id="a_cl7nsyDpLQFaXOHeFD-3" value="" style="shape=cylinder3;whiteSpace=wrap;html=1;boundedLbl=1;backgroundOutline=1;size=8.600000000000023;fillColor=#E6E6E6;strokeColor=#000000;" vertex="1" parent="1">
+          <mxGeometry x="900" y="410" width="50" height="64" as="geometry" />
+        </mxCell>
+        <mxCell id="a_cl7nsyDpLQFaXOHeFD-4" value="metric-db" style="text;html=1;strokeColor=none;fillColor=none;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;spacing=-1;" vertex="1" parent="1">
+          <mxGeometry x="882.5" y="474" width="85" height="17" as="geometry" />
+        </mxCell>
+        <mxCell id="a_cl7nsyDpLQFaXOHeFD-7" value="* omitted edges for all services to metric-db" style="text;html=1;align=center;verticalAlign=middle;whiteSpace=wrap;rounded=0;" vertex="1" parent="1">
+          <mxGeometry x="727" y="683" width="233" height="11" as="geometry" />
+        </mxCell>
       </root>
     </mxGraphModel>
   </diagram>
diff --git a/.docs/kubernetes.md b/.docs/kubernetes.md
index 0539dc542ed1f96af8fdc142a0df93432a6c47d4..2384f61d0451435b0515e09b128f71d942039bee 100644
--- a/.docs/kubernetes.md
+++ b/.docs/kubernetes.md
@@ -20,36 +20,6 @@ helm upgrade --install dbrepo \
   --cleanup-on-fail
 ```
 
-This chart is also on [Artifact Hub](https://artifacthub.io/packages/helm/dbrepo/dbrepo) with a full documentation
-about values, etc. Before installing, you need to change credentials, e.g. the Broker Service administrator user
-password:
-
-```yaml title="values.yaml"
-brokerservice:
-  ...
-  auth:
-    ...
-    username: broker
-    password: broker
-    passwordHash: 1gwjNNTBPKLgyzbsUykfR0JIFC6nNqbNJaxzZ14uPT8JGcTZ
-```
-
-The `brokerservice.auth.passwordHash` field is the RabbitMQ SHA512-hash of the `brokerservice.auth.password` field and
-can be obtained with
-the [`generate-rabbitmq-pw.sh`](https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/blob/release-1.4.7/helm/dbrepo/hack/generate-rabbitmq-pw.sh)
-script:
-
-```console
-$ ./generate-rabbitmq-pw.sh my_password
-klPdmv4dgnRH64czHolIHAfXvc0G9hc24FQmPlI6eeI1NOf9
-```
-
-The script needs the package `xxd` for generation of the random salt. If you don't have `xxd` installed, install it:
-
-* Debian/Ubuntu: `apt install xxd`
-* Windows: `choco install xxd`
-* MacOS: `brew install coreutils`
-
 ## Prerequisites
 
 * Kubernetes 1.24+
@@ -58,9 +28,7 @@ The script needs the package `xxd` for generation of the random salt. If you don
 
 ## Limitations
 
-1. MariaDB Galera does not (yet) support XA-transactions required by the authentication service (=Keycloak). Therefore
-   only a single MariaDB pod can be deployed at once for the Auth database.
-2. The entire Helm deployment is rootless (=`runAsNonRoot=true`) except for
+1. The entire Helm deployment is rootless (=`runAsNonRoot=true`) except for
    the [Storage Service](../api/storage-service) which still requires a root user.
 
 !!! question "Do you miss functionality? Do these limitations affect you?"
diff --git a/dbrepo-analyse-service/.gitignore b/dbrepo-analyse-service/.gitignore
index 4ae9f6930d70a7da1f7b06d28fc7dcecf7ce24c4..d339f8575ccfbafdb8eef6431cce6c1add7aa92e 100644
--- a/dbrepo-analyse-service/.gitignore
+++ b/dbrepo-analyse-service/.gitignore
@@ -17,6 +17,12 @@ venv/
 .venv/
 env*
 
+# Libraries
+./lib/dbrepo-1.4.4*
+./lib/dbrepo-1.4.5*
+./lib/dbrepo-1.4.6*
+./lib/dbrepo-1.4.7rc*
+
 # LLM
 *.bin
 
diff --git a/dbrepo-analyse-service/Pipfile.lock b/dbrepo-analyse-service/Pipfile.lock
index 681bdc8a9b2742f7a137fe6b2a25183071a6480c..91f5b55d641405ee5efabe19d0183109a2a60b7e 100644
--- a/dbrepo-analyse-service/Pipfile.lock
+++ b/dbrepo-analyse-service/Pipfile.lock
@@ -190,20 +190,20 @@
         },
         "boto3": {
             "hashes": [
-                "sha256:234a475fe56b65e99b4f5cfff50adaac6b23d39558d6b55137bbf1e50dd0ef08",
-                "sha256:90c8cddc4a08c8040057ad44c7468ff82fea9fe8b6517db5ff01a9b2900299cc"
+                "sha256:5970b62c1ec8177501e02520f0d41839ca5fc549b30bac4e8c0c0882ae776217",
+                "sha256:670f811c65e3c5fe4ed8c8d69be0b44b1d649e992c0fc16de43816d1188f88f1"
             ],
             "index": "pypi",
             "markers": "python_version >= '3.8'",
-            "version": "==1.35.38"
+            "version": "==1.35.39"
         },
         "botocore": {
             "hashes": [
-                "sha256:2eb17d32fa2d3bb5d475132a83564d28e3acc2161534f24b75a54418a1d51359",
-                "sha256:55d9305c44e5ba29476df456120fa4fb919f03f066afa82f2ae400485e7465f4"
+                "sha256:781c547eb6a79c0e4b0bedd87b81fbfed957816b4841d33e20c8f1989c7c19ce",
+                "sha256:cb7f851933b5ccc2fba4f0a8b846252410aa0efac5bfbe93b82d10801f5f8e90"
             ],
             "markers": "python_version >= '3.8'",
-            "version": "==1.35.38"
+            "version": "==1.35.39"
         },
         "certifi": {
             "hashes": [
@@ -440,7 +440,7 @@
         },
         "dbrepo": {
             "hashes": [
-                "sha256:5aa92850231c25a57ffa58395e0f6bbda2818b1f0d4edd83f51fd8143d909451"
+                "sha256:654d487f1c0fd99b4978f5756aec4046f3e6019aeb225ecdd449768795f6e7e0"
             ],
             "markers": "python_version >= '3.11'",
             "path": "./lib/dbrepo-1.4.7.tar.gz"
@@ -1613,7 +1613,7 @@
                 "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d",
                 "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"
             ],
-            "markers": "python_version < '3.13'",
+            "markers": "python_version >= '3.8'",
             "version": "==4.12.2"
         },
         "tzdata": {
@@ -1629,7 +1629,7 @@
                 "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac",
                 "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"
             ],
-            "markers": "python_version >= '3.10'",
+            "markers": "python_version >= '3.8'",
             "version": "==2.2.3"
         },
         "werkzeug": {
@@ -1642,101 +1642,107 @@
         },
         "yarl": {
             "hashes": [
-                "sha256:047b258e00b99091b6f90355521f026238c63bd76dcf996d93527bb13320eefd",
-                "sha256:06ff23462398333c78b6f4f8d3d70410d657a471c2c5bbe6086133be43fc8f1a",
-                "sha256:07f9eaf57719d6721ab15805d85f4b01a5b509a0868d7320134371bcb652152d",
-                "sha256:0aa92e3e30a04f9462a25077db689c4ac5ea9ab6cc68a2e563881b987d42f16d",
-                "sha256:0cf21f46a15d445417de8fc89f2568852cf57fe8ca1ab3d19ddb24d45c0383ae",
-                "sha256:0fd7b941dd1b00b5f0acb97455fea2c4b7aac2dd31ea43fb9d155e9bc7b78664",
-                "sha256:147e36331f6f63e08a14640acf12369e041e0751bb70d9362df68c2d9dcf0c87",
-                "sha256:16a682a127930f3fc4e42583becca6049e1d7214bcad23520c590edd741d2114",
-                "sha256:176110bff341b6730f64a1eb3a7070e12b373cf1c910a9337e7c3240497db76f",
-                "sha256:19268b4fec1d7760134f2de46ef2608c2920134fb1fa61e451f679e41356dc55",
-                "sha256:1b16f6c75cffc2dc0616ea295abb0e1967601bd1fb1e0af6a1de1c6c887f3439",
-                "sha256:1bfc25aa6a7c99cf86564210f79a0b7d4484159c67e01232b116e445b3036547",
-                "sha256:1ca3894e9e9f72da93544f64988d9c052254a338a9f855165f37f51edb6591de",
-                "sha256:1dda53508df0de87b6e6b0a52d6718ff6c62a5aca8f5552748404963df639269",
-                "sha256:217a782020b875538eebf3948fac3a7f9bbbd0fd9bf8538f7c2ad7489e80f4e8",
-                "sha256:2192f718db4a8509f63dd6d950f143279211fa7e6a2c612edc17d85bf043d36e",
-                "sha256:29a84a46ec3ebae7a1c024c055612b11e9363a8a23238b3e905552d77a2bc51b",
-                "sha256:3007a5b75cb50140708420fe688c393e71139324df599434633019314ceb8b59",
-                "sha256:30600ba5db60f7c0820ef38a2568bb7379e1418ecc947a0f76fd8b2ff4257a97",
-                "sha256:337912bcdcf193ade64b9aae5a4017a0a1950caf8ca140362e361543c6773f21",
-                "sha256:37001e5d4621cef710c8dc1429ca04e189e572f128ab12312eab4e04cf007132",
-                "sha256:3d569f877ed9a708e4c71a2d13d2940cb0791da309f70bd970ac1a5c088a0a92",
-                "sha256:4009def9be3a7e5175db20aa2d7307ecd00bbf50f7f0f989300710eee1d0b0b9",
-                "sha256:46a9772a1efa93f9cd170ad33101c1817c77e0e9914d4fe33e2da299d7cf0f9b",
-                "sha256:47eede5d11d669ab3759b63afb70d28d5328c14744b8edba3323e27dc52d298d",
-                "sha256:498b3c55087b9d762636bca9b45f60d37e51d24341786dc01b81253f9552a607",
-                "sha256:4e0d45ebf975634468682c8bec021618b3ad52c37619e5c938f8f831fa1ac5c0",
-                "sha256:4f24f08b6c9b9818fd80612c97857d28f9779f0d1211653ece9844fc7b414df2",
-                "sha256:55c144d363ad4626ca744556c049c94e2b95096041ac87098bb363dcc8635e8d",
-                "sha256:582cedde49603f139be572252a318b30dc41039bc0b8165f070f279e5d12187f",
-                "sha256:587c3cc59bc148a9b1c07a019346eda2549bc9f468acd2f9824d185749acf0a6",
-                "sha256:5cd5dad8366e0168e0fd23d10705a603790484a6dbb9eb272b33673b8f2cce72",
-                "sha256:5d02d700705d67e09e1f57681f758f0b9d4412eeb70b2eb8d96ca6200b486db3",
-                "sha256:625f207b1799e95e7c823f42f473c1e9dbfb6192bd56bba8695656d92be4535f",
-                "sha256:659603d26d40dd4463200df9bfbc339fbfaed3fe32e5c432fe1dc2b5d4aa94b4",
-                "sha256:689a99a42ee4583fcb0d3a67a0204664aa1539684aed72bdafcbd505197a91c4",
-                "sha256:68ac1a09392ed6e3fd14be880d39b951d7b981fd135416db7d18a6208c536561",
-                "sha256:6a615cad11ec3428020fb3c5a88d85ce1b5c69fd66e9fcb91a7daa5e855325dd",
-                "sha256:73bedd2be05f48af19f0f2e9e1353921ce0c83f4a1c9e8556ecdcf1f1eae4892",
-                "sha256:742aef0a99844faaac200564ea6f5e08facb285d37ea18bd1a5acf2771f3255a",
-                "sha256:75ff4c819757f9bdb35de049a509814d6ce851fe26f06eb95a392a5640052482",
-                "sha256:781e2495e408a81e4eaeedeb41ba32b63b1980dddf8b60dbbeff6036bcd35049",
-                "sha256:7a9f917966d27f7ce30039fe8d900f913c5304134096554fd9bea0774bcda6d1",
-                "sha256:7e2637d75e92763d1322cb5041573279ec43a80c0f7fbbd2d64f5aee98447b17",
-                "sha256:8089d4634d8fa2b1806ce44fefa4979b1ab2c12c0bc7ef3dfa45c8a374811348",
-                "sha256:816d24f584edefcc5ca63428f0b38fee00b39fe64e3c5e558f895a18983efe96",
-                "sha256:8385ab36bf812e9d37cf7613999a87715f27ef67a53f0687d28c44b819df7cb0",
-                "sha256:85cb3e40eaa98489f1e2e8b29f5ad02ee1ee40d6ce6b88d50cf0f205de1d9d2c",
-                "sha256:8648180b34faaea4aa5b5ca7e871d9eb1277033fa439693855cf0ea9195f85f1",
-                "sha256:8892fa575ac9b1b25fae7b221bc4792a273877b9b56a99ee2d8d03eeb3dbb1d2",
-                "sha256:88c7d9d58aab0724b979ab5617330acb1c7030b79379c8138c1c8c94e121d1b3",
-                "sha256:8a2f8fb7f944bcdfecd4e8d855f84c703804a594da5123dd206f75036e536d4d",
-                "sha256:8f4e475f29a9122f908d0f1f706e1f2fc3656536ffd21014ff8a6f2e1b14d1d8",
-                "sha256:8f50eb3837012a937a2b649ec872b66ba9541ad9d6f103ddcafb8231cfcafd22",
-                "sha256:91d875f75fabf76b3018c5f196bf3d308ed2b49ddcb46c1576d6b075754a1393",
-                "sha256:94b2bb9bcfd5be9d27004ea4398fb640373dd0c1a9e219084f42c08f77a720ab",
-                "sha256:9557c9322aaa33174d285b0c1961fb32499d65ad1866155b7845edc876c3c835",
-                "sha256:95e16e9eaa2d7f5d87421b8fe694dd71606aa61d74b824c8d17fc85cc51983d1",
-                "sha256:96952f642ac69075e44c7d0284528938fdff39422a1d90d3e45ce40b72e5e2d9",
-                "sha256:985623575e5c4ea763056ffe0e2d63836f771a8c294b3de06d09480538316b13",
-                "sha256:99ff3744f5fe48288be6bc402533b38e89749623a43208e1d57091fc96b783b9",
-                "sha256:9abe80ae2c9d37c17599557b712e6515f4100a80efb2cda15f5f070306477cd2",
-                "sha256:a152751af7ef7b5d5fa6d215756e508dd05eb07d0cf2ba51f3e740076aa74373",
-                "sha256:a2e4725a08cb2b4794db09e350c86dee18202bb8286527210e13a1514dc9a59a",
-                "sha256:a56fbe3d7f3bce1d060ea18d2413a2ca9ca814eea7cedc4d247b5f338d54844e",
-                "sha256:ab3abc0b78a5dfaa4795a6afbe7b282b6aa88d81cf8c1bb5e394993d7cae3457",
-                "sha256:b03384eed107dbeb5f625a99dc3a7de8be04fc8480c9ad42fccbc73434170b20",
-                "sha256:b0547ab1e9345dc468cac8368d88ea4c5bd473ebc1d8d755347d7401982b5dd8",
-                "sha256:b4c1ecba93e7826dc71ddba75fb7740cdb52e7bd0be9f03136b83f54e6a1f511",
-                "sha256:b693c63e7e64b524f54aa4888403c680342d1ad0d97be1707c531584d6aeeb4f",
-                "sha256:b6d0147574ce2e7b812c989e50fa72bbc5338045411a836bd066ce5fc8ac0bce",
-                "sha256:b9cfef3f14f75bf6aba73a76caf61f9d00865912a04a4393c468a7ce0981b519",
-                "sha256:b9f805e37ed16cc212fdc538a608422d7517e7faf539bedea4fe69425bc55d76",
-                "sha256:bab03192091681d54e8225c53f270b0517637915d9297028409a2a5114ff4634",
-                "sha256:bc24f968b82455f336b79bf37dbb243b7d76cd40897489888d663d4e028f5069",
-                "sha256:c14b504a74e58e2deb0378b3eca10f3d076635c100f45b113c18c770b4a47a50",
-                "sha256:c2089a9afef887664115f7fa6d3c0edd6454adaca5488dba836ca91f60401075",
-                "sha256:c8ed4034f0765f8861620c1f2f2364d2e58520ea288497084dae880424fc0d9f",
-                "sha256:cd2660c01367eb3ef081b8fa0a5da7fe767f9427aa82023a961a5f28f0d4af6c",
-                "sha256:d8361c7d04e6a264481f0b802e395f647cd3f8bbe27acfa7c12049efea675bd1",
-                "sha256:d9baec588f015d0ee564057aa7574313c53a530662ffad930b7886becc85abdf",
-                "sha256:dbd9ff43a04f8ffe8a959a944c2dca10d22f5f99fc6a459f49c3ebfb409309d9",
-                "sha256:e3f8bfc1db82589ef965ed234b87de30d140db8b6dc50ada9e33951ccd8ec07a",
-                "sha256:e6a2c5c5bb2556dfbfffffc2bcfb9c235fd2b566d5006dfb2a37afc7e3278a07",
-                "sha256:e749af6c912a7bb441d105c50c1a3da720474e8acb91c89350080dd600228f0e",
-                "sha256:e85d86527baebb41a214cc3b45c17177177d900a2ad5783dbe6f291642d4906f",
-                "sha256:ee2c68e4f2dd1b1c15b849ba1c96fac105fca6ffdb7c1e8be51da6fabbdeafb9",
-                "sha256:f3ab950f8814f3b7b5e3eebc117986f817ec933676f68f0a6c5b2137dd7c9c69",
-                "sha256:f4f4547944d4f5cfcdc03f3f097d6f05bbbc915eaaf80a2ee120d0e756de377d",
-                "sha256:f72a0d746d38cb299b79ce3d4d60ba0892c84bbc905d0d49c13df5bace1b65f8",
-                "sha256:fc2c80bc87fba076e6cbb926216c27fba274dae7100a7b9a0983b53132dd99f2",
-                "sha256:fe4d2536c827f508348d7b40c08767e8c7071614250927233bf0c92170451c0a"
+                "sha256:0127bc2ea72c1eaae6808ace661f0edf222f32ffa987d37f2dbb4798288f2656",
+                "sha256:0358b697abdf1f2d68038bd02ef8ddcc4813835744f79c755f8743aa485585e7",
+                "sha256:06306c74f0775621a70fa5acd292119bbb6961d1f9a5f3d657a4c8c15b86f7b9",
+                "sha256:06b5b462cadf59c1df28ffbb0a3971fa16b60cf0c9d59a38bf5679a986d18685",
+                "sha256:097094a979af7b31520517c59179f6817b8426724343cecbec0eb3af1f8fb6cf",
+                "sha256:0c791a2d42da20ac568e5c0cc9b8af313188becd203a936ad959b578dafbcebb",
+                "sha256:1656a8b531a96427f26f498b7d0f19931166ff30e4344eca99bdb27faca14fc5",
+                "sha256:18614630533ac37ec373bd8035aec8fa4dd9aedac641209c06de7e082622ff77",
+                "sha256:1e5fa4c4e55cdacef1844f609bc9a02c8cd29c324a71ca1d3ee454701d4bb496",
+                "sha256:1edaf4171fc1582352ac5d9b2783966fa0f4ff86187279ef2a491613d23b894a",
+                "sha256:2124c642b8cc9b68e5981e429842dadc32bb850b010cccec9d24236253a19f60",
+                "sha256:229f222bb47cd7ab225648efd1ae47fe6943f18e4c91bce66471faf09fe33128",
+                "sha256:2429a651a2191c3fb8c9de21546c6046da539034d51dcb8df52302748004593d",
+                "sha256:25a4e29ee758596b2a0daffa4814714e9b464077ca862baf78ed0e8698e46b61",
+                "sha256:27c323b28723faed046f906c70466144c4dd12046a0128a301b29a65cfeff758",
+                "sha256:2add8ed2acf42398dfaa7dffd32e4d18ffbae341d62c8d4765bd9929336379b5",
+                "sha256:2bece7fdc13e23db005879b67190db0d397f6ba89c81dc7e3c77e9f5819aff7f",
+                "sha256:2eafb4e92f72a3b6c27f1d5e921d046e2728850af8887f86857c3fe868a5b5c0",
+                "sha256:32840ff92c713053591ff0e66845d4e9f4bea8fd5fba3da00f8d92e77722f24e",
+                "sha256:33896afca6fb4e1988c099534c52823870dfc8730bc6f96a3831f24c1e0ab814",
+                "sha256:350b468a217d433cbb4482e9414a14dfd360a3d5ab92013175925abb234364cc",
+                "sha256:38cab8f91b1085f1fd0765d40c46c8f43282f109018d5fcd017c46ac3eaba0cf",
+                "sha256:3e24a778470f3a9e9c11250d09daf5dea93369bc51aefca6605dbc963737a117",
+                "sha256:4224bbbc8a2e9b9a3828d36c1bab7458441d7fb9fb3af321eb735732ba8ee89d",
+                "sha256:4424082edff76fe46ff08851e91865097c0ad780fa79b87063dc5d5b80efc9d6",
+                "sha256:454707fb16f180984da6338d1f51897f0b8d8c4c2e0592d9d1e9fa02a5bb8218",
+                "sha256:4b1ab96a1ac91bd1233706d638ade35f663684deaa4e5e5f190858cba044afb9",
+                "sha256:4c5ff3e7609c214667c7d7e00d5f4f3576fefde47ebcb7e492c015117dafebbf",
+                "sha256:5107d89c9edec6ee077970a95fb9eeb4776ea8c2337b6a39c0ade9a58f50f3e4",
+                "sha256:5156c12a97405339ec93facbc7860566db381af2de1bec338195563fb64f37ef",
+                "sha256:553a1e3537aeeb29c0eb29ef28b80e0e801697fa71d96ac60675b284ff8e582a",
+                "sha256:5e1cc7823f43781390965c4762b54262cfcf76b6f152e489d00a5a1ac63063e4",
+                "sha256:5eef9804e65eb292e9c5587e88fe6a27a11f121d358312ac47211e8f42876751",
+                "sha256:6237637b496bc04819190e724a4e61ff2f251abf432f70cf491b3bc4a3f2f253",
+                "sha256:627bb5bc4ed3d3ebceb9fb55717cec6cd58bb47fdb5669169ebbc248e9bf156c",
+                "sha256:676d7356bb30825b7dbdad4fdd7a9feac379d074e5d4a36299767d72857ded42",
+                "sha256:6960b0d2e713e726cb2914e3051e080b12412f70dcb8731cf7a8fb52c37931bb",
+                "sha256:6b93a666cd8cfd43f605d1b81a32b9e290bf45c74c2bfd51ba705449c78448c7",
+                "sha256:6ca160b4c649f0d56daef04751eef4571de46ed4b80f9051a87d090fef32f08e",
+                "sha256:70ac7893e67a81ed1346ee3e71203ca4b0c3550c005b1d1cf87bc1e61eecd04b",
+                "sha256:73c4af08e9bb9a9aa7df6c789b05b924b9a0c6a368bb0e418d0b85181b64b631",
+                "sha256:748dcacc19c69957f7063ea4fb359fa2180735b1a638c81a4a96b86a382a6f29",
+                "sha256:75d9762f65205a86381298eb9079f27c60b84de0c262e402dcf45c6cbc385234",
+                "sha256:7711d83dafe52cda16ff2dd205cd83c05e4c06d5aaac596ae2cf7d50d094a530",
+                "sha256:7aa9f9af452c3e8486a0b88fddd58352e6cea17b691b18861d26e46cf65ffff0",
+                "sha256:7f713d8f3c4e2eac0d91b741e8ef2e1082022de244685601ec83e899b445d86a",
+                "sha256:81edbd9bf9f25cd995e6d51c307e1d279587d40b7473e258fef6d5e548560cd2",
+                "sha256:83363a5789f128618041b9a737c7b146f1965abddf4294b0444591406b437c1e",
+                "sha256:85e273e59b8b1a5f60a89df82cddeaf918181abd7ae7a2f2f899b68b0c774ff1",
+                "sha256:8ad2e487824ba4cda87851a371139e255410e45d3bf2e334194789278d709cec",
+                "sha256:8b7f902f13a230686f01bcff17cd9ba045653069811c8fd5027f0f414b417e2f",
+                "sha256:8f074a24aa9a6a3d406474ec889ebb5d661f329349068e05e8dfcb3c4be67752",
+                "sha256:9084d99933824ed8d665f10f4ce62d08fed714e7678d5ff11a8c2c98b2dc18f9",
+                "sha256:928f7a61c4311f3dd003af19bb779f99683f97a0559b765c80fdb8846dab0452",
+                "sha256:97fcaf530318369da3cfd6ff52f5ab38daf8cb10ecee9a76efebf8031de09eef",
+                "sha256:994d27b24b61b1870f3571395c840433faabec5dcd239bd11ff6af7e34234bb6",
+                "sha256:9ae454916aa3abe28d0ef1c21ca1e8e36a14ccf52183d465dfaccffaa7ed462c",
+                "sha256:9fac5416c44e8e1d8ea9440096f88e1a7273257f3157184c5c715060e0c448a1",
+                "sha256:a2fe45c1143eefb680a4589c55e671fabd482a7f8c7791f311ea3bcc20139246",
+                "sha256:a3f8be3e785009ffa148e66474fea5c787ccb203b3d0bd1f22e1e22f7da0f3b3",
+                "sha256:a616c2e4b60cb8cdd9eb3b0c6fda4ab5f3e26244b427aaade560dcf63c5754fb",
+                "sha256:a94c9058c5703c172904103d7b479f7e23dd4e5f8e67b49f6cd256d35ff169cb",
+                "sha256:b1208f2e081d34832f509cbe311237a0543effe23d60b2fa14c0d3f86e6d1d07",
+                "sha256:b4b25de7e85ba90b2ff230153123b6b000a7f69c41d84a3a0dc3f878334c8509",
+                "sha256:bbe72c41cdd55c88b238a8925849fde4069c0cdcdef83f8d967f8f3982659326",
+                "sha256:c0a86dd3e85c6aa3fc73236eb5cf7ce69dd8ad7abcd23f8ae1126831c8e40c2f",
+                "sha256:c3b08d9e98d1a15338fcfbd52c02003704322c2d460c9b9be7df08f2952bdce6",
+                "sha256:c4d9c221cc8e32b14196498679bf2b324bec1d1127c4ba934d98e19298faa661",
+                "sha256:c4f882e42c6cea89488b9a16919edde8c0b1a98f307c05abdd3dd3bc4368af40",
+                "sha256:c5cc25cbd9ae01d49ac7b504ef5f3cbdcc8d139f9750dcfa0b80d405b4645cc2",
+                "sha256:c7f2deac59dc3e0528bdded248e637e789e5111ba1723a8d7a262eb93e133e15",
+                "sha256:c8b034b60e74fb29064f765851e77e5910055e1c4a3cb75c32eccf2b470fc00f",
+                "sha256:c9b9159eeeb7cd1c7131dc7f5878454f97a4dc20cd157e6474174ccac448b844",
+                "sha256:c9c405ca78c70c3599d8956e53be0c9def9c51ad949964a49ad96c79729a5b1a",
+                "sha256:ceb200918c9bd163bd390cc169b254b23b4be121026b003be93a4f2f5b554b4b",
+                "sha256:d06040266b5e6512a37b4703684d1798124764b43328254799e9678c588882a6",
+                "sha256:d3f5e201bd170fb97c643e84df58e221372cd053fbb291ebbd878b165ea5057e",
+                "sha256:d4aa7cca009817789fd5b8e52e8122f9e85dc580c88b816a93321c00a8acbced",
+                "sha256:d772ae3c12d3b8629db656050c86ee66924eaa98f7125a889175a59cfaafdb19",
+                "sha256:d816969b55a970b3accc7f9e4ea8f60043e3f7de96f21c06063d747ffc2f18ba",
+                "sha256:d885dcdca7bae42bc9a2f6cbf766abcb2a6cc043b1905fc3782c6ea1f74a2b95",
+                "sha256:db903458a457a53ee0f764ed11c5b5368398e216b442c42dca9d90fbd2bbf31c",
+                "sha256:dc63bb79e896d6ce6aaf672ac304b54969280e949c45727867fc154a17ec7ab2",
+                "sha256:dd042e6c3bf36448e3e3ed302b12ce79762480f4aff8e7a167cdf8c35dc93297",
+                "sha256:ddea4abc4606c10dddb70651b210b7ab5b663148d6d7bc85d76963c923629891",
+                "sha256:df57f3c3ef760489f2e82192e6c93286c2bc80d6d854ef940e5345ae7153cd4b",
+                "sha256:e1ddf05eeb422810b1aa919095db0691493442eebbf9cfb0f1e478a7b2fbdf3d",
+                "sha256:e2e3cb74684ff357e6b3c82dd71031d3c1fd7ee9f9b0a5205e5568c963e074f9",
+                "sha256:e4f64c8c52dde564bf3251b41d7a6746564b0fc0516cebe9c9e6695224440d22",
+                "sha256:e4f7efb38331e8327c1cc7fb2a2905a7db03d1a7fdb04706bf6465d0e44d41d4",
+                "sha256:e61b2019ebb5345510b833c4dd1f4afb1f0c07753f86f184c63836ffc3fb08ba",
+                "sha256:e7e38bf6e52797084c5c396db5bb519615727e491e9003e2449631457bf77738",
+                "sha256:eae041f535fe2e57681954f7cccb81854d777ce4c2a87749428ebe6c71c02ec0",
+                "sha256:eb964d18c01b7a1263a6f07b88d63711fcd564fc429d934279cf12f4b467bf53",
+                "sha256:ef780f9d480ffb423380abeb4cfcad66ecb8f93526dfa367d322fdad9ec7c25f",
+                "sha256:efc0430b80ed834c80c99c32946cfc6ee29dfcd7c62ad3c8f15657322ade7942",
+                "sha256:f2508ee2bad8381b5254eadc35d32fe800d12eb2c63b744183341f3a66e435a7",
+                "sha256:fee9acd5e39c8611957074dfba06552e430020eea831caf5eb2cea30f10e06bd"
             ],
             "markers": "python_version >= '3.8'",
-            "version": "==1.14.0"
+            "version": "==1.15.0"
         },
         "zope.event": {
             "hashes": [
@@ -2268,7 +2274,7 @@
                 "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d",
                 "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"
             ],
-            "markers": "python_version < '3.13'",
+            "markers": "python_version >= '3.8'",
             "version": "==4.12.2"
         },
         "urllib3": {
@@ -2276,7 +2282,7 @@
                 "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac",
                 "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"
             ],
-            "markers": "python_version >= '3.10'",
+            "markers": "python_version >= '3.8'",
             "version": "==2.2.3"
         },
         "wrapt": {
diff --git a/dbrepo-analyse-service/lib/dbrepo-1.4.7.tar.gz b/dbrepo-analyse-service/lib/dbrepo-1.4.7.tar.gz
index 5774a413a678f3179e5e215737c5e39f5a773ef8..936ec4dac16fe8f065ac7d37a09aedca421086a6 100644
Binary files a/dbrepo-analyse-service/lib/dbrepo-1.4.7.tar.gz and b/dbrepo-analyse-service/lib/dbrepo-1.4.7.tar.gz differ
diff --git a/dbrepo-dashboard-service/dashboards/system.json b/dbrepo-dashboard-service/dashboards/system.json
index 52bf6d067122e2e803d2858256bfb274351d1f49..d0234fe47706400ed5e75b72906b8a61461a09bf 100644
--- a/dbrepo-dashboard-service/dashboards/system.json
+++ b/dbrepo-dashboard-service/dashboards/system.json
@@ -1797,6 +1797,368 @@
       ],
       "title": "Successful API Requests",
       "type": "timeseries"
+    },
+    {
+      "collapsed": true,
+      "gridPos": {
+        "h": 1,
+        "w": 24,
+        "x": 0,
+        "y": 40
+      },
+      "id": 31,
+      "panels": [
+        {
+          "datasource": {
+            "type": "prometheus",
+            "uid": "P18F45E9DC7E75912"
+          },
+          "description": "",
+          "fieldConfig": {
+            "defaults": {
+              "mappings": [],
+              "max": 100,
+              "min": 0,
+              "thresholds": {
+                "mode": "absolute",
+                "steps": [
+                  {
+                    "color": "blue",
+                    "value": null
+                  }
+                ]
+              },
+              "unit": "none"
+            },
+            "overrides": []
+          },
+          "gridPos": {
+            "h": 3,
+            "w": 4,
+            "x": 0,
+            "y": 41
+          },
+          "id": 29,
+          "options": {
+            "colorMode": "background",
+            "graphMode": "none",
+            "justifyMode": "auto",
+            "orientation": "auto",
+            "percentChangeColorMode": "standard",
+            "reduceOptions": {
+              "calcs": [
+                "lastNotNull"
+              ],
+              "fields": "",
+              "values": false
+            },
+            "showPercentChange": false,
+            "textMode": "auto",
+            "wideLayout": true
+          },
+          "pluginVersion": "10.4.9",
+          "targets": [
+            {
+              "datasource": {
+                "type": "prometheus",
+                "uid": "P18F45E9DC7E75912"
+              },
+              "disableTextWrap": false,
+              "editorMode": "code",
+              "expr": "nginx_connections_active",
+              "fullMetaSearch": false,
+              "includeNullMetadata": true,
+              "instant": false,
+              "legendFormat": "__auto",
+              "range": true,
+              "refId": "A",
+              "useBackend": false
+            }
+          ],
+          "title": "Connections (Active)",
+          "type": "stat"
+        },
+        {
+          "datasource": {
+            "type": "prometheus",
+            "uid": "P18F45E9DC7E75912"
+          },
+          "description": "",
+          "fieldConfig": {
+            "defaults": {
+              "mappings": [],
+              "max": 100,
+              "min": 0,
+              "thresholds": {
+                "mode": "absolute",
+                "steps": [
+                  {
+                    "color": "green",
+                    "value": null
+                  },
+                  {
+                    "color": "yellow",
+                    "value": 1
+                  },
+                  {
+                    "color": "orange",
+                    "value": 3
+                  },
+                  {
+                    "color": "red",
+                    "value": 5
+                  }
+                ]
+              },
+              "unit": "none"
+            },
+            "overrides": []
+          },
+          "gridPos": {
+            "h": 3,
+            "w": 4,
+            "x": 4,
+            "y": 41
+          },
+          "id": 30,
+          "options": {
+            "colorMode": "background",
+            "graphMode": "none",
+            "justifyMode": "auto",
+            "orientation": "auto",
+            "percentChangeColorMode": "standard",
+            "reduceOptions": {
+              "calcs": [
+                "lastNotNull"
+              ],
+              "fields": "",
+              "values": false
+            },
+            "showPercentChange": false,
+            "textMode": "auto",
+            "wideLayout": true
+          },
+          "pluginVersion": "10.4.9",
+          "targets": [
+            {
+              "datasource": {
+                "type": "prometheus",
+                "uid": "P18F45E9DC7E75912"
+              },
+              "disableTextWrap": false,
+              "editorMode": "code",
+              "expr": "nginx_connections_waiting",
+              "fullMetaSearch": false,
+              "includeNullMetadata": true,
+              "instant": false,
+              "legendFormat": "__auto",
+              "range": true,
+              "refId": "A",
+              "useBackend": false
+            }
+          ],
+          "title": "Connections (Waiting)",
+          "type": "stat"
+        },
+        {
+          "datasource": {
+            "type": "prometheus",
+            "uid": "P18F45E9DC7E75912"
+          },
+          "description": "",
+          "fieldConfig": {
+            "defaults": {
+              "mappings": [],
+              "max": 100,
+              "min": 0,
+              "thresholds": {
+                "mode": "absolute",
+                "steps": [
+                  {
+                    "color": "green",
+                    "value": null
+                  },
+                  {
+                    "color": "yellow",
+                    "value": 0.0001
+                  },
+                  {
+                    "color": "orange",
+                    "value": 0.001
+                  },
+                  {
+                    "color": "red",
+                    "value": 0.01
+                  }
+                ]
+              },
+              "unit": "none"
+            },
+            "overrides": []
+          },
+          "gridPos": {
+            "h": 3,
+            "w": 4,
+            "x": 8,
+            "y": 41
+          },
+          "id": 33,
+          "options": {
+            "colorMode": "background",
+            "graphMode": "none",
+            "justifyMode": "auto",
+            "orientation": "auto",
+            "percentChangeColorMode": "standard",
+            "reduceOptions": {
+              "calcs": [
+                "lastNotNull"
+              ],
+              "fields": "",
+              "values": false
+            },
+            "showPercentChange": false,
+            "textMode": "auto",
+            "wideLayout": true
+          },
+          "pluginVersion": "10.4.9",
+          "targets": [
+            {
+              "datasource": {
+                "type": "prometheus",
+                "uid": "P18F45E9DC7E75912"
+              },
+              "disableTextWrap": false,
+              "editorMode": "code",
+              "expr": "max(rate(promhttp_metric_handler_requests_total{job=\"gateway scrape\", code!=\"200\"}[24h]))",
+              "fullMetaSearch": false,
+              "includeNullMetadata": true,
+              "instant": false,
+              "legendFormat": "__auto",
+              "range": true,
+              "refId": "A",
+              "useBackend": false
+            }
+          ],
+          "title": "Failed Requests (24h)",
+          "type": "stat"
+        },
+        {
+          "datasource": {
+            "type": "prometheus",
+            "uid": "P18F45E9DC7E75912"
+          },
+          "fieldConfig": {
+            "defaults": {
+              "color": {
+                "mode": "thresholds"
+              },
+              "mappings": [],
+              "thresholds": {
+                "mode": "absolute",
+                "steps": [
+                  {
+                    "color": "green",
+                    "value": null
+                  },
+                  {
+                    "color": "#EAB839",
+                    "value": 0.0001
+                  },
+                  {
+                    "color": "orange",
+                    "value": 0.001
+                  },
+                  {
+                    "color": "red",
+                    "value": 0.01
+                  }
+                ]
+              }
+            },
+            "overrides": []
+          },
+          "gridPos": {
+            "h": 7,
+            "w": 12,
+            "x": 12,
+            "y": 41
+          },
+          "id": 32,
+          "options": {
+            "displayMode": "basic",
+            "maxVizHeight": 300,
+            "minVizHeight": 16,
+            "minVizWidth": 8,
+            "namePlacement": "auto",
+            "orientation": "horizontal",
+            "reduceOptions": {
+              "calcs": [
+                "lastNotNull"
+              ],
+              "fields": "",
+              "values": false
+            },
+            "showUnfilled": true,
+            "sizing": "auto",
+            "valueMode": "color"
+          },
+          "pluginVersion": "10.4.9",
+          "targets": [
+            {
+              "datasource": {
+                "type": "prometheus",
+                "uid": "P18F45E9DC7E75912"
+              },
+              "disableTextWrap": false,
+              "editorMode": "builder",
+              "expr": "rate(promhttp_metric_handler_requests_total{job=\"gateway scrape\", code!=\"200\"}[24h])",
+              "fullMetaSearch": false,
+              "includeNullMetadata": false,
+              "instant": false,
+              "legendFormat": "Code {{code}}",
+              "range": true,
+              "refId": "A",
+              "useBackend": false
+            }
+          ],
+          "title": "Failed Requests (24h)",
+          "type": "bargauge"
+        },
+        {
+          "datasource": {
+            "type": "prometheus",
+            "uid": "P18F45E9DC7E75912"
+          },
+          "gridPos": {
+            "h": 3,
+            "w": 12,
+            "x": 0,
+            "y": 44
+          },
+          "id": 34,
+          "links": [
+            {
+              "targetBlank": true,
+              "title": "Documentation",
+              "url": "https://www.ifs.tuwien.ac.at/infrastructures/dbrepo/1.4.6/api/gateway-service/#monitoring-optional"
+            }
+          ],
+          "options": {
+            "code": {
+              "language": "plaintext",
+              "showLineNumbers": false,
+              "showMiniMap": false
+            },
+            "content": "## Optional Panel\n\nActivate this panel by setting-up the Gateway Service Monitoring by clicking the link above.",
+            "mode": "markdown"
+          },
+          "pluginVersion": "10.4.9",
+          "transparent": true,
+          "type": "text"
+        }
+      ],
+      "title": "Gateway",
+      "type": "row"
     }
   ],
   "refresh": "1m",
diff --git a/dbrepo-data-service/rest-service/src/main/java/at/tuwien/endpoints/TableEndpoint.java b/dbrepo-data-service/rest-service/src/main/java/at/tuwien/endpoints/TableEndpoint.java
index 548558cafd8ccba785bda4c52dde303932c24a84..06afef015df02c8077ce312691e140916ea4e2f0 100644
--- a/dbrepo-data-service/rest-service/src/main/java/at/tuwien/endpoints/TableEndpoint.java
+++ b/dbrepo-data-service/rest-service/src/main/java/at/tuwien/endpoints/TableEndpoint.java
@@ -555,7 +555,7 @@ public class TableEndpoint {
             throws DatabaseUnavailableException, RemoteUnavailableException, TableNotFoundException,
             NotAllowedException, StorageUnavailableException, QueryMalformedException, SidecarExportException,
             StorageNotFoundException, MetadataServiceException {
-        log.debug("endpoint find table history, databaseId={}, tableId={}, timestamp={}", databaseId, tableId, timestamp);
+        log.debug("endpoint export table data, databaseId={}, tableId={}, timestamp={}", databaseId, tableId, timestamp);
         /* parameters */
         if (timestamp == null) {
             timestamp = Instant.now();
diff --git a/dbrepo-data-service/rest-service/src/main/java/at/tuwien/endpoints/ViewEndpoint.java b/dbrepo-data-service/rest-service/src/main/java/at/tuwien/endpoints/ViewEndpoint.java
index a4c07c3f55ff5ea2fab84b6293c7cc6a424c278c..9d0bd3b6adec78c34690073a46f35ab220386504 100644
--- a/dbrepo-data-service/rest-service/src/main/java/at/tuwien/endpoints/ViewEndpoint.java
+++ b/dbrepo-data-service/rest-service/src/main/java/at/tuwien/endpoints/ViewEndpoint.java
@@ -1,9 +1,12 @@
 package at.tuwien.endpoints;
 
-import at.tuwien.api.database.*;
+import at.tuwien.ExportResourceDto;
+import at.tuwien.api.database.ViewCreateDto;
+import at.tuwien.api.database.ViewDto;
 import at.tuwien.api.database.internal.PrivilegedDatabaseDto;
 import at.tuwien.api.database.internal.PrivilegedViewDto;
 import at.tuwien.api.database.query.QueryResultDto;
+import at.tuwien.api.database.table.internal.PrivilegedTableDto;
 import at.tuwien.api.error.ApiErrorDto;
 import at.tuwien.exception.*;
 import at.tuwien.gateway.MetadataServiceGateway;
@@ -24,6 +27,7 @@ import jakarta.validation.constraints.NotBlank;
 import jakarta.validation.constraints.NotNull;
 import lombok.extern.log4j.Log4j2;
 import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.core.io.InputStreamResource;
 import org.springframework.http.HttpHeaders;
 import org.springframework.http.HttpStatus;
 import org.springframework.http.ResponseEntity;
@@ -282,4 +286,67 @@ public class ViewEndpoint {
         }
     }
 
+    @GetMapping("/{viewId}/export")
+    @Observed(name = "dbrepo_view_data_export")
+    @Operation(summary = "Get view data",
+            description = "Gets data from view with id as downloadable file. For tables in private databases, the user needs to have at least *READ* access to the associated database.",
+            security = {@SecurityRequirement(name = "basicAuth"), @SecurityRequirement(name = "bearerAuth")})
+    @ApiResponses(value = {
+            @ApiResponse(responseCode = "200",
+                    description = "Exported view data",
+                    content = {@Content(
+                            mediaType = "application/json",
+                            schema = @Schema(implementation = InputStreamResource.class))}),
+            @ApiResponse(responseCode = "400",
+                    description = "Request pagination or view data select query is malformed",
+                    content = {@Content(
+                            mediaType = "application/json",
+                            schema = @Schema(implementation = ApiErrorDto.class))}),
+            @ApiResponse(responseCode = "403",
+                    description = "Export view data not allowed",
+                    content = {@Content(
+                            mediaType = "application/json",
+                            schema = @Schema(implementation = ApiErrorDto.class))}),
+            @ApiResponse(responseCode = "404",
+                    description = "Failed to find view in metadata database or export dataset",
+                    content = {@Content(
+                            mediaType = "application/json",
+                            schema = @Schema(implementation = ApiErrorDto.class))}),
+            @ApiResponse(responseCode = "503",
+                    description = "Failed to establish connection with the metadata service",
+                    content = {@Content(
+                            mediaType = "application/json",
+                            schema = @Schema(implementation = ApiErrorDto.class))}),
+    })
+    public ResponseEntity<InputStreamResource> exportDataset(@NotBlank @PathVariable("databaseId") Long databaseId,
+                                                             @NotBlank @PathVariable("viewId") Long viewId,
+                                                             Principal principal)
+            throws DatabaseUnavailableException, RemoteUnavailableException, ViewNotFoundException,
+            NotAllowedException, MetadataServiceException, StorageUnavailableException, QueryMalformedException,
+            SidecarExportException, StorageNotFoundException {
+        log.debug("endpoint export view data, databaseId={}, viewId={}", databaseId, viewId);
+        /* parameters */
+        final PrivilegedViewDto view = metadataServiceGateway.getViewById(databaseId, viewId);
+        if (!view.getIsPublic()) {
+            if (principal == null) {
+                log.error("Failed to export private view: principal is null");
+                throw new NotAllowedException("Failed to export private view: principal is null");
+            }
+            metadataServiceGateway.getAccess(databaseId, UserUtil.getId(principal));
+        }
+        try {
+            final HttpHeaders headers = new HttpHeaders();
+            final ExportResourceDto resource = viewService.exportDataset(view);
+            headers.add("Content-Disposition", "attachment; filename=\"" + resource.getFilename() + "\"");
+            log.trace("export table resulted in resource {}", resource);
+            return ResponseEntity.ok()
+                    .headers(headers)
+                    .body(resource.getResource());
+
+        } catch (SQLException e) {
+            log.error("Failed to establish connection to database: {}", e.getMessage());
+            throw new DatabaseUnavailableException("Failed to establish connection to database", e);
+        }
+    }
+
 }
diff --git a/dbrepo-data-service/rest-service/src/main/java/at/tuwien/validation/EndpointValidator.java b/dbrepo-data-service/rest-service/src/main/java/at/tuwien/validation/EndpointValidator.java
index 3cbb865293553e91a6730cfd731c62bbfc79c915..1c6adfd6a5b87355e76f9b40147fe91f5de4d4e2 100644
--- a/dbrepo-data-service/rest-service/src/main/java/at/tuwien/validation/EndpointValidator.java
+++ b/dbrepo-data-service/rest-service/src/main/java/at/tuwien/validation/EndpointValidator.java
@@ -1,8 +1,6 @@
 package at.tuwien.validation;
 
 import at.tuwien.api.database.AccessTypeDto;
-import at.tuwien.api.database.DatabaseAccessDto;
-import at.tuwien.api.database.DatabaseDto;
 import at.tuwien.config.QueryConfig;
 import at.tuwien.exception.NotAllowedException;
 import at.tuwien.exception.PaginationException;
diff --git a/dbrepo-data-service/rest-service/src/test/java/at/tuwien/service/SchemaServiceIntegrationTest.java b/dbrepo-data-service/rest-service/src/test/java/at/tuwien/service/SchemaServiceIntegrationTest.java
index 85e2b8071174adf3bb401a286cf1e595713e279c..23503384b617ec7e066d9a0aeebe6c04c5f22a13 100644
--- a/dbrepo-data-service/rest-service/src/test/java/at/tuwien/service/SchemaServiceIntegrationTest.java
+++ b/dbrepo-data-service/rest-service/src/test/java/at/tuwien/service/SchemaServiceIntegrationTest.java
@@ -167,7 +167,7 @@ public class SchemaServiceIntegrationTest extends AbstractUnitTest {
         final List<ColumnDto> columns = response.getColumns();
         assertNotNull(columns);
         assertEquals(5, columns.size());
-        assertColumn(columns.get(0), null, null, DATABASE_1_ID, "id", "id", ColumnTypeDto.BIGINT, 19L, 0L, false, null);
+        assertColumn(columns.get(0), null, null, DATABASE_1_ID, "id", "id", ColumnTypeDto.BIGINT, 20L, 0L, false, null);
         assertColumn(columns.get(1), null, null, DATABASE_1_ID, "date", "date", ColumnTypeDto.DATE, null, null, false, null);
         assertColumn(columns.get(2), null, null, DATABASE_1_ID, "location", "location", ColumnTypeDto.VARCHAR, 255L, null, true, "Closest city");
         assertColumn(columns.get(3), null, null, DATABASE_1_ID, "mintemp", "mintemp", ColumnTypeDto.DOUBLE, 22L, null, true, null);
diff --git a/dbrepo-data-service/rest-service/src/test/resources/init/weather.sql b/dbrepo-data-service/rest-service/src/test/resources/init/weather.sql
index 10cbf855655b57d02b4ed561d0c3cf50f0272dc4..322e67cc07397105bb7c763efe4d37c905cc1b18 100644
--- a/dbrepo-data-service/rest-service/src/test/resources/init/weather.sql
+++ b/dbrepo-data-service/rest-service/src/test/resources/init/weather.sql
@@ -11,7 +11,7 @@ CREATE TABLE weather_location
 
 CREATE TABLE weather_aus
 (
-    id       BIGINT           NOT NULL PRIMARY KEY,
+    id       SERIAL PRIMARY KEY,
     `date`   DATE             NOT NULL,
     location VARCHAR(255)     NULL COMMENT 'Closest city',
     mintemp  DOUBLE PRECISION NULL,
diff --git a/dbrepo-data-service/services/src/main/java/at/tuwien/mapper/DataMapper.java b/dbrepo-data-service/services/src/main/java/at/tuwien/mapper/DataMapper.java
index 01854e0cb7ac4dfccadd6faa6004c88f1e4d3903..62b529976d029c0612e3849f032e0a1093270562 100644
--- a/dbrepo-data-service/services/src/main/java/at/tuwien/mapper/DataMapper.java
+++ b/dbrepo-data-service/services/src/main/java/at/tuwien/mapper/DataMapper.java
@@ -140,7 +140,6 @@ public interface DataMapper {
     default TableDto resultSetToTable(ResultSet resultSet, TableDto table, QueryConfig queryConfig) throws SQLException {
         final ColumnDto column = ColumnDto.builder()
                 .ordinalPosition(resultSet.getInt(1) - 1) /* start at zero */
-                .autoGenerated(resultSet.getString(2) != null && resultSet.getString(2).startsWith("nextval"))
                 .isNullAllowed(resultSet.getString(3).equals("YES"))
                 .columnType(ColumnTypeDto.valueOf(resultSet.getString(4).toUpperCase()))
                 .d(resultSet.getString(7) != null ? resultSet.getLong(7) : null)
@@ -551,7 +550,7 @@ public interface DataMapper {
             case TEXT, CHAR, VARCHAR, TINYTEXT, MEDIUMTEXT, LONGTEXT, ENUM, SET -> {
                 return String.valueOf(data);
             }
-            case BIGINT -> {
+            case BIGINT, SERIAL -> {
                 return new BigInteger(String.valueOf(data));
             }
             case INT, SMALLINT, MEDIUMINT, TINYINT -> {
@@ -643,7 +642,7 @@ public interface DataMapper {
                 }
                 ps.setString(idx, String.valueOf(value));
                 break;
-            case BIGINT:
+            case BIGINT, SERIAL:
                 if (value == null) {
                     ps.setNull(idx, Types.BIGINT);
                     break;
diff --git a/dbrepo-data-service/services/src/main/java/at/tuwien/mapper/MariaDbMapper.java b/dbrepo-data-service/services/src/main/java/at/tuwien/mapper/MariaDbMapper.java
index bf0483496d0125c9eb5faf665d027353e949bd09..bfca2d923fc550a6420f1140a9a9fe6d3aae7e59 100644
--- a/dbrepo-data-service/services/src/main/java/at/tuwien/mapper/MariaDbMapper.java
+++ b/dbrepo-data-service/services/src/main/java/at/tuwien/mapper/MariaDbMapper.java
@@ -664,9 +664,6 @@ public interface MariaDbMapper {
                         log.error("Failed to find table column {}", key);
                         throw new IllegalArgumentException("Failed to find table column");
                     }
-                    if (optional.get().getAutoGenerated()) {
-                        return;
-                    }
                     statement.append(idx[0]++ == 0 ? "" : ", ")
                             .append("`")
                             .append(key)
@@ -683,9 +680,6 @@ public interface MariaDbMapper {
                         log.error("Failed to find table column {}", key);
                         throw new IllegalArgumentException("Failed to find table column");
                     }
-                    if (optional.get().getAutoGenerated()) {
-                        return;
-                    }
                     statement.append(jdx[0]++ == 0 ? "" : ", ")
                             .append("?");
                 });
@@ -717,9 +711,9 @@ public interface MariaDbMapper {
                     statement.setNull(idx, Types.DATE);
                     break;
                 }
-                statement.setDate(idx, Date.valueOf(String.valueOf(value)));
+                statement.setString(idx, String.valueOf(value));
                 break;
-            case BIGINT:
+            case BIGINT, SERIAL:
                 if (value == null) {
                     statement.setNull(idx, Types.BIGINT);
                     break;
@@ -731,21 +725,21 @@ public interface MariaDbMapper {
                     statement.setNull(idx, Types.INTEGER);
                     break;
                 }
-                statement.setLong(idx, Long.parseLong(String.valueOf(value)));
+                statement.setLong(idx, Integer.parseInt(String.valueOf(value)));
                 break;
             case TINYINT:
                 if (value == null) {
                     statement.setNull(idx, Types.TINYINT);
                     break;
                 }
-                statement.setLong(idx, Long.parseLong(String.valueOf(value)));
+                statement.setLong(idx, Integer.parseInt(String.valueOf(value)));
                 break;
             case SMALLINT:
                 if (value == null) {
                     statement.setNull(idx, Types.SMALLINT);
                     break;
                 }
-                statement.setLong(idx, Long.parseLong(String.valueOf(value)));
+                statement.setInt(idx, Integer.parseInt(String.valueOf(value)));
                 break;
             case DECIMAL:
                 if (value == null) {
@@ -787,16 +781,9 @@ public interface MariaDbMapper {
                     statement.setNull(idx, Types.TIMESTAMP);
                     break;
                 }
-                statement.setTimestamp(idx, Timestamp.valueOf(String.valueOf(value)));
-                break;
-            case TIME:
-                if (value == null) {
-                    statement.setNull(idx, Types.TIME);
-                    break;
-                }
-                statement.setTime(idx, Time.valueOf(String.valueOf(value)));
+                statement.setString(idx, String.valueOf(value));
                 break;
-            case YEAR:
+            case TIME, YEAR:
                 if (value == null) {
                     statement.setNull(idx, Types.TIME);
                     break;
diff --git a/dbrepo-data-service/services/src/main/java/at/tuwien/service/QueueService.java b/dbrepo-data-service/services/src/main/java/at/tuwien/service/QueueService.java
index 3a94045c9d209d57dc8bc9f5417a41980852fe6a..79a23932b5aee74da800c0b41023a7257fa4d32b 100644
--- a/dbrepo-data-service/services/src/main/java/at/tuwien/service/QueueService.java
+++ b/dbrepo-data-service/services/src/main/java/at/tuwien/service/QueueService.java
@@ -12,6 +12,7 @@ public interface QueueService {
      *
      * @param table    The table.
      * @param data     The data.
+     * @throws SQLException The connection to the database could not be established.
      */
     void insert(PrivilegedTableDto table, Map<String, Object> data) throws SQLException;
 }
diff --git a/dbrepo-data-service/services/src/main/java/at/tuwien/service/TableService.java b/dbrepo-data-service/services/src/main/java/at/tuwien/service/TableService.java
index 33ef0026a8b3df7e9feddbe22d57da33be0af39d..c93186f451c5b41d2c55924ce685d578b40bfbde 100644
--- a/dbrepo-data-service/services/src/main/java/at/tuwien/service/TableService.java
+++ b/dbrepo-data-service/services/src/main/java/at/tuwien/service/TableService.java
@@ -104,18 +104,66 @@ public interface TableService {
     Long getCount(PrivilegedTableDto table, Instant timestamp) throws SQLException,
             QueryMalformedException;
 
+    /**
+     * Imports a dataset by metadata into the sidecar of the target database by given table.
+     * @param table The table.
+     * @param data The dataset metadata.
+     * @throws SidecarImportException The sidecar of the target database failed to import the dataset.
+     * @throws StorageNotFoundException The storage service was not able to find the dataset for import.
+     * @throws SQLException Failed to parse SQL query, contains invalid syntax.
+     * @throws QueryMalformedException The import query is malformed, likely due to a bug in the application.
+     * @throws RemoteUnavailableException Failed to establish connection to the sidecar.
+     */
     void importDataset(PrivilegedTableDto table, ImportDto data) throws SidecarImportException,
             StorageNotFoundException, SQLException, QueryMalformedException, RemoteUnavailableException;
 
+    /**
+     * Imports a dataset by metadata into the sidecar of the target database by given table.
+     * @param table The table.
+     * @param data The dataset metadata.
+     * @throws SQLException Failed to parse SQL query, contains invalid syntax.
+     * @throws TableMalformedException The tuple is malformed and does not fit the table schema.
+     * @throws QueryMalformedException The delete query is malformed, likely due to a bug in the application.
+     */
     void deleteTuple(PrivilegedTableDto table, TupleDeleteDto data) throws SQLException,
             TableMalformedException, QueryMalformedException;
 
+    /**
+     * Creates a tuple in a table.
+     * @param table The table.
+     * @param data The tuple.
+     * @throws SQLException Failed to parse SQL query, contains invalid syntax.
+     * @throws QueryMalformedException The create query is malformed, likely due to a bug in the application.
+     * @throws TableMalformedException The tuple is malformed and does not fit the table schema.
+     * @throws StorageUnavailableException Failed to establish a connection with the Storage Service.
+     * @throws StorageNotFoundException The storage service was not able to find the dataset for import.
+     */
     void createTuple(PrivilegedTableDto table, TupleDto data) throws SQLException,
             QueryMalformedException, TableMalformedException, StorageUnavailableException, StorageNotFoundException;
 
+    /**
+     * Updates a tuple in a table.
+     * @param table The table.
+     * @param data The tuple.
+     * @throws SQLException Failed to parse SQL query, contains invalid syntax.
+     * @throws QueryMalformedException The update query is malformed, likely due to a bug in the application.
+     * @throws TableMalformedException  The tuple is malformed and does not fit the table schema.
+     */
     void updateTuple(PrivilegedTableDto table, TupleUpdateDto data) throws SQLException,
             QueryMalformedException, TableMalformedException;
 
+    /**
+     * Exports a table at given system-versioning time.
+     * @param table The table.
+     * @param timestamp The system-versioning time.
+     * @return The exported resource.
+     * @throws SQLException Failed to parse SQL query, contains invalid syntax.
+     * @throws SidecarExportException The sidecar of the target database failed to export the dataset.
+     * @throws StorageNotFoundException The storage service was not able to find the dataset for export.
+     * @throws StorageUnavailableException Failed to establish a connection with the Storage Service.
+     * @throws QueryMalformedException The export query is malformed, likely due to a bug in the application.
+     * @throws RemoteUnavailableException Failed to establish connection to the sidecar.
+     */
     ExportResourceDto exportDataset(PrivilegedTableDto table, Instant timestamp)
             throws SQLException, SidecarExportException, StorageNotFoundException, StorageUnavailableException,
             QueryMalformedException, RemoteUnavailableException;
diff --git a/dbrepo-data-service/services/src/main/java/at/tuwien/service/ViewService.java b/dbrepo-data-service/services/src/main/java/at/tuwien/service/ViewService.java
index f4bef4f067706688dc30234a3a375678214cdb8f..9151f868de9f271f9c73cc8f7da4fccf168b668d 100644
--- a/dbrepo-data-service/services/src/main/java/at/tuwien/service/ViewService.java
+++ b/dbrepo-data-service/services/src/main/java/at/tuwien/service/ViewService.java
@@ -15,12 +15,12 @@ import java.util.List;
 public interface ViewService {
 
     /**
-     *
-     * @param database
+     * Gets the metadata schema for a given database.
+     * @param database The database.
      * @return The list of view metadata.
-     * @throws SQLException
-     * @throws DatabaseMalformedException
-     * @throws ViewNotFoundException
+     * @throws SQLException The connection to the data database was unsuccessful.
+     * @throws DatabaseMalformedException The columns that are referenced in the views are unknown to the Metadata Database. Call {@link TableService#getSchemas(PrivilegedDatabaseDto)} beforehand.
+     * @throws ViewNotFoundException The view with given name was not found.
      */
     List<ViewDto> getSchemas(PrivilegedDatabaseDto database) throws SQLException, DatabaseMalformedException,
             ViewNotFoundException;
@@ -50,11 +50,35 @@ public interface ViewService {
     QueryResultDto data(PrivilegedViewDto view, Instant timestamp, Long page, Long size) throws SQLException,
             ViewMalformedException;
 
+    /**
+     * Deletes a view.
+     * @param view The view.
+     * @throws SQLException The connection to the data database was unsuccessful.
+     * @throws ViewMalformedException The query is malformed and was rejected by the data database.
+     */
     void delete(PrivilegedViewDto view) throws SQLException, ViewMalformedException;
 
+    /**
+     * Counts tuples in a view at system-versioned timestamp.
+     * @param view The view.
+     * @param timestamp The system-versioned timestamp.
+     * @return The number of tuples.
+     * @throws SQLException The connection to the data database was unsuccessful.
+     * @throws QueryMalformedException The query is malformed and was rejected by the data database.
+     */
     Long count(PrivilegedViewDto view, Instant timestamp) throws SQLException, QueryMalformedException;
 
-    ExportResourceDto exportDataset(PrivilegedDatabaseDto database, ViewDto view, Instant timestamp)
-            throws SQLException, QueryMalformedException, SidecarExportException, StorageNotFoundException,
-            StorageUnavailableException, RemoteUnavailableException;
+    /**
+     * Exports view data into a dataset.
+     * @param view The view.
+     * @return The dataset.
+     * @throws SQLException The connection to the data database was unsuccessful.
+     * @throws QueryMalformedException The query is malformed and was rejected by the data database.
+     * @throws SidecarExportException The sidecar of the target database failed to export the dataset.
+     * @throws RemoteUnavailableException Failed to establish connection to the sidecar.
+     * @throws StorageNotFoundException The storage service was not able to find the dataset for export.
+     * @throws StorageUnavailableException Failed to establish a connection with the Storage Service.
+     */
+    ExportResourceDto exportDataset(PrivilegedViewDto view) throws SQLException, QueryMalformedException,
+            SidecarExportException, RemoteUnavailableException, StorageNotFoundException, StorageUnavailableException;
 }
diff --git a/dbrepo-data-service/services/src/main/java/at/tuwien/service/impl/ViewServiceMariaDbImpl.java b/dbrepo-data-service/services/src/main/java/at/tuwien/service/impl/ViewServiceMariaDbImpl.java
index bc7a36acd5cad95d84caa428c94e73f0c3bb753a..366cfa5faf5fabe34210b880d3fdca3659267a3a 100644
--- a/dbrepo-data-service/services/src/main/java/at/tuwien/service/impl/ViewServiceMariaDbImpl.java
+++ b/dbrepo-data-service/services/src/main/java/at/tuwien/service/impl/ViewServiceMariaDbImpl.java
@@ -228,12 +228,11 @@ public class ViewServiceMariaDbImpl extends HibernateConnector implements ViewSe
     }
 
     @Override
-    public ExportResourceDto exportDataset(PrivilegedDatabaseDto database, ViewDto view, Instant timestamp)
-            throws SQLException, QueryMalformedException, StorageNotFoundException, StorageUnavailableException,
-            RemoteUnavailableException, SidecarExportException {
+    public ExportResourceDto exportDataset(PrivilegedViewDto view) throws SQLException, QueryMalformedException,
+            SidecarExportException, RemoteUnavailableException, StorageNotFoundException, StorageUnavailableException {
         final String fileName = RandomStringUtils.randomAlphabetic(40) + ".csv";
         final String filePath = s3Config.getS3FilePath() + File.separator + fileName;
-        final ComboPooledDataSource dataSource = getPrivilegedDataSource(database);
+        final ComboPooledDataSource dataSource = getPrivilegedDataSource(view.getDatabase());
         final Connection connection = dataSource.getConnection();
         try {
             /* export to data database sidecar */
@@ -242,8 +241,8 @@ public class ViewServiceMariaDbImpl extends HibernateConnector implements ViewSe
                     .map(metadataMapper::viewColumnDtoToColumnDto)
                     .toList();
             final long start = System.currentTimeMillis();
-            connection.prepareStatement(mariaDbMapper.tableOrViewToRawExportQuery(database.getInternalName(),
-                            view.getInternalName(), columns, timestamp, filePath))
+            connection.prepareStatement(mariaDbMapper.tableOrViewToRawExportQuery(view.getDatabase().getInternalName(),
+                            view.getInternalName(), columns, null, filePath))
                     .executeUpdate();
             log.debug("executed statement in {} ms", System.currentTimeMillis() - start);
             connection.commit();
@@ -254,8 +253,8 @@ public class ViewServiceMariaDbImpl extends HibernateConnector implements ViewSe
         } finally {
             dataSource.close();
         }
-        dataDatabaseSidecarGateway.exportFile(database.getContainer().getSidecarHost(),
-                database.getContainer().getSidecarPort(), fileName);
+        dataDatabaseSidecarGateway.exportFile(view.getDatabase().getContainer().getSidecarHost(),
+                view.getDatabase().getContainer().getSidecarPort(), fileName);
         httpDataAccessCounter.increment();
         return storageService.getResource(fileName);
     }
diff --git a/dbrepo-gateway-service/dbrepo.conf b/dbrepo-gateway-service/dbrepo.conf
index 6cb634f9f44aa8db656bcfc3c289bbe84a698466..6d3dce2b9e38b52d607cf3af1246c27e6b2e09bf 100644
--- a/dbrepo-gateway-service/dbrepo.conf
+++ b/dbrepo-gateway-service/dbrepo.conf
@@ -40,6 +40,10 @@ server {
     listen 80 default_server;
     server_name _;
 
+    location = /basic_status {
+        stub_status;
+    }
+
     location /api/search {
         proxy_set_header        Host $host;
         proxy_set_header        X-Real-IP $remote_addr;
@@ -93,7 +97,7 @@ server {
         proxy_read_timeout      90;
     }
 
-    location ~ /api/database/([0-9]+)/view/([0-9]+)/data {
+    location ~ /api/database/([0-9]+)/view/([0-9]+)/(data|export) {
         proxy_set_header        Host $host;
         proxy_set_header        X-Real-IP $remote_addr;
         proxy_set_header        X-Forwarded-For $proxy_add_x_forwarded_for;
diff --git a/dbrepo-metadata-db/1_setup-schema.sql b/dbrepo-metadata-db/1_setup-schema.sql
index db90e94c1fa7989b390f09ba8d2bc78fd92c2fdd..d52cb4cae88dffa827432d0e0befee8e273589e8 100644
--- a/dbrepo-metadata-db/1_setup-schema.sql
+++ b/dbrepo-metadata-db/1_setup-schema.sql
@@ -144,14 +144,13 @@ CREATE TABLE IF NOT EXISTS `mdb_columns`
     tID              BIGINT UNSIGNED NOT NULL,
     cName            VARCHAR(64),
     internal_name    VARCHAR(64)     NOT NULL,
-    Datatype         ENUM ('CHAR','VARCHAR','BINARY','VARBINARY','TINYBLOB','TINYTEXT','TEXT','BLOB','MEDIUMTEXT','MEDIUMBLOB','LONGTEXT','LONGBLOB','ENUM','SET','BIT','TINYINT','BOOL','SMALLINT','MEDIUMINT','INT','BIGINT','FLOAT','DOUBLE','DECIMAL','DATE','DATETIME','TIMESTAMP','TIME','YEAR'),
+    Datatype         ENUM ('CHAR','VARCHAR','BINARY','VARBINARY','TINYBLOB','TINYTEXT','TEXT','BLOB','MEDIUMTEXT','MEDIUMBLOB','LONGTEXT','LONGBLOB','ENUM','SET','SERIAL','BIT','TINYINT','BOOL','SMALLINT','MEDIUMINT','INT','BIGINT','FLOAT','DOUBLE','DECIMAL','DATE','DATETIME','TIMESTAMP','TIME','YEAR'),
     length           BIGINT UNSIGNED NULL,
     ordinal_position INTEGER         NOT NULL,
     index_length     BIGINT UNSIGNED NULL,
     description      VARCHAR(2048),
     size             BIGINT UNSIGNED,
     d                BIGINT UNSIGNED,
-    auto_generated   BOOLEAN                  DEFAULT false,
     is_null_allowed  BOOLEAN         NOT NULL DEFAULT true,
     val_min          NUMERIC         NULL,
     val_max          NUMERIC         NULL,
@@ -208,7 +207,7 @@ CREATE TABLE IF NOT EXISTS `mdb_columns_cat`
 
 CREATE TABLE IF NOT EXISTS `mdb_constraints_foreign_key`
 (
-    fkid      BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,
+    fkid      SERIAL,
     tid       BIGINT UNSIGNED NOT NULL,
     rtid      BIGINT UNSIGNED NOT NULL,
     name      VARCHAR(255)    NOT NULL,
@@ -245,7 +244,7 @@ CREATE TABLE IF NOT EXISTS `mdb_constraints_foreign_key_reference`
 
 CREATE TABLE IF NOT EXISTS `mdb_constraints_unique`
 (
-    uid      BIGINT UNSIGNED NOT NULL AUTO_INCREMENT,
+    uid      SERIAL,
     name     VARCHAR(255)    NOT NULL,
     tid      BIGINT UNSIGNED NOT NULL,
     position INT             NULL,
@@ -362,17 +361,16 @@ CREATE TABLE IF NOT EXISTS `mdb_view_columns`
 (
     id               SERIAL,
     view_id          BIGINT UNSIGNED NOT NULL,
-    dfID             BIGINT UNSIGNED,
     name             VARCHAR(64),
     internal_name    VARCHAR(64)     NOT NULL,
     column_type      ENUM ('CHAR','VARCHAR','BINARY','VARBINARY','TINYBLOB','TINYTEXT','TEXT','BLOB','MEDIUMTEXT','MEDIUMBLOB','LONGTEXT','LONGBLOB','ENUM','SET','BIT','TINYINT','BOOL','SMALLINT','MEDIUMINT','INT','BIGINT','FLOAT','DOUBLE','DECIMAL','DATE','DATETIME','TIMESTAMP','TIME','YEAR'),
     ordinal_position INTEGER         NOT NULL,
     size             BIGINT UNSIGNED,
     d                BIGINT UNSIGNED,
-    auto_generated   BOOLEAN                  DEFAULT false,
     is_null_allowed  BOOLEAN         NOT NULL DEFAULT true,
     PRIMARY KEY (id),
-    FOREIGN KEY (view_id) REFERENCES mdb_view (id)
+    FOREIGN KEY (view_id) REFERENCES mdb_view (id),
+    UNIQUE (view_id, internal_name)
 ) WITH SYSTEM VERSIONING;
 
 CREATE TABLE IF NOT EXISTS `mdb_identifiers`
@@ -526,8 +524,10 @@ CREATE TABLE IF NOT EXISTS `mdb_image_types`
     d_default     INT UNSIGNED,
     d_required    BOOLEAN COMMENT 'When setting NULL, the service assumes the data type has no d',
     d_step        INT UNSIGNED,
-    hint          TEXT,
+    type_hint     TEXT,
+    data_hint     TEXT,
     documentation TEXT            NOT NULL,
+    is_generated  BOOLEAN         NOT NULL,
     is_quoted     BOOLEAN         NOT NULL,
     is_buildable  BOOLEAN         NOT NULL,
     PRIMARY KEY (id),
@@ -550,67 +550,74 @@ VALUES ('mariadb', 'docker.io', '11.1.3', 3306, 'org.hibernate.dialect.MariaDBDi
         'mariadb');
 
 INSERT INTO `mdb_image_types` (image_id, display_name, value, size_min, size_max, size_default, size_required,
-                               size_step, d_min, d_max, d_default, d_required, d_step, hint, documentation, is_quoted,
-                               is_buildable)
-VALUES (1, 'BIGINT(size)', 'bigint', 0, null, null, false, 1, null, null, null, null, null, null,
-        'https://mariadb.com/kb/en/bigint/', false, true),
-       (1, 'BINARY(size)', 'binary', 0, 255, 255, true, 1, null, null, null, null, null, 'size in Bytes',
-        'https://mariadb.com/kb/en/binary/', false, true),
-       (1, 'BIT(size)', 'bit', 0, 64, null, false, 1, null, null, null, null, null, null,
-        'https://mariadb.com/kb/en/bit/', false, true),
-       (1, 'BLOB(size)', 'blob', 0, 65535, null, false, 1, null, null, null, null, null, 'size in Bytes',
-        'https://mariadb.com/kb/en/blob/', false, false),
-       (1, 'BOOL', 'bool', null, null, null, null, null, null, null, null, null, null, null,
-        'https://mariadb.com/kb/en/bool/', false, true),
-       (1, 'CHAR(size)', 'char', 0, 255, 255, false, 1, null, null, null, null, null, null,
-        'https://mariadb.com/kb/en/char/', false, true),
+                               size_step, d_min, d_max, d_default, d_required, d_step, type_hint, data_hint,
+                               documentation, is_quoted, is_buildable, is_generated)
+VALUES (1, 'BIGINT(size)', 'bigint', 0, null, null, false, 1, null, null, null, null, null, null, null,
+        'https://mariadb.com/kb/en/bigint/', false, true, false),
+       (1, 'BINARY(size)', 'binary', 0, 255, 255, true, 1, null, null, null, null, null, 'size in Bytes', null,
+        'https://mariadb.com/kb/en/binary/', false, true, false),
+       (1, 'BIT(size)', 'bit', 0, 64, null, false, 1, null, null, null, null, null, null, null,
+        'https://mariadb.com/kb/en/bit/', false, true, false),
+       (1, 'BLOB(size)', 'blob', 0, 65535, null, false, 1, null, null, null, null, null, 'size in Bytes', null,
+        'https://mariadb.com/kb/en/blob/', false, false, false),
+       (1, 'BOOL', 'bool', null, null, null, null, null, null, null, null, null, null, null, null,
+        'https://mariadb.com/kb/en/bool/', false, true, false),
+       (1, 'CHAR(size)', 'char', 0, 255, 255, false, 1, null, null, null, null, null, null, null,
+        'https://mariadb.com/kb/en/char/', false, true, false),
        (1, 'DATE', 'date', null, null, null, null, null, null, null, null, null, null,
-        'min. 1000-01-01, max. 9999-12-31', 'https://mariadb.com/kb/en/date/', true, true),
+        'min. 1000-01-01, max. 9999-12-31', 'e.g. YYYY-MM-DD, YY-MM-DD, YYMMDD, YYYY/MM/DD',
+        'https://mariadb.com/kb/en/date/', true, true, false),
        (1, 'DATETIME(fsp)', 'datetime', 0, 6, null, null, 1, null, null, null, null, null,
         'fsp=microsecond precision, min. 1000-01-01 00:00:00.0, max. 9999-12-31 23:59:59.9',
-        'https://mariadb.com/kb/en/datetime/', true, true),
-       (1, 'DECIMAL(size, d)', 'decimal', 0, 65, null, false, 1, 0, 38, null, false, null, null,
-        'https://mariadb.com/kb/en/decimal/', false, true),
-       (1, 'DOUBLE(size, d)', 'double', null, null, null, false, null, null, null, null, false, null, null,
-        'https://mariadb.com/kb/en/double/', false, true),
+        'e.g. YYYY-MM-DD HH:MM:SS, YY-MM-DD HH:MM:SS, YYYYMMDDHHMMSS, YYMMDDHHMMSS, YYYYMMDD, YYMMDD',
+        'https://mariadb.com/kb/en/datetime/', true, true, false),
+       (1, 'DECIMAL(size, d)', 'decimal', 0, 65, null, false, 1, 0, 38, null, false, null, null, null,
+        'https://mariadb.com/kb/en/decimal/', false, true, false),
+       (1, 'DOUBLE(size, d)', 'double', null, null, null, false, null, null, null, null, false, null, null, null,
+        'https://mariadb.com/kb/en/double/', false, true, false),
        (1, 'ENUM(v1,v2,...)', 'enum', null, null, null, null, null, null, null, null, null, null, null,
-        'https://mariadb.com/kb/en/enum/', true, true),
-       (1, 'FLOAT(size)', 'float', null, null, null, false, null, null, null, null, null, null, null,
-        'https://mariadb.com/kb/en/float/', false, true),
-       (1, 'INT(size)', 'int', null, null, null, false, null, null, null, null, null, null, 'size in Bytes',
-        'https://mariadb.com/kb/en/int/', false, true),
-       (1, 'LONGBLOB', 'longblob', null, null, null, null, null, null, null, null, null, null, 'max. 3.999 GiB',
-        'https://mariadb.com/kb/en/longblob/', false, true),
-       (1, 'LONGTEXT', 'longtext', null, null, null, null, null, null, null, null, null, null, 'max. 3.999 GiB',
-        'https://mariadb.com/kb/en/longtext/', true, true),
+        'e.g. value1, value2, ...', 'https://mariadb.com/kb/en/enum/', true, true, false),
+       (1, 'FLOAT(size)', 'float', null, null, null, false, null, null, null, null, null, null, null, null,
+        'https://mariadb.com/kb/en/float/', false, true, false),
+       (1, 'INT(size)', 'int', null, null, null, false, null, null, null, null, null, null, 'size in Bytes', null,
+        'https://mariadb.com/kb/en/int/', false, true, false),
+       (1, 'LONGBLOB', 'longblob', null, null, null, null, null, null, null, null, null, null, 'max. 3.999 GiB', null,
+        'https://mariadb.com/kb/en/longblob/', false, true, false),
+       (1, 'LONGTEXT', 'longtext', null, null, null, null, null, null, null, null, null, null, 'max. 3.999 GiB', null,
+        'https://mariadb.com/kb/en/longtext/', true, true, false),
        (1, 'MEDIUMBLOB', 'mediumblob', null, null, null, null, null, null, null, null, null, null, 'max. 15.999 MiB',
-        'https://mariadb.com/kb/en/mediumblob/', false, true),
-       (1, 'MEDIUMINT', 'mediumint', null, null, null, null, null, null, null, null, null, null, 'size in Bytes',
-        'https://mariadb.com/kb/en/mediumint/', false, true),
+        null, 'https://mariadb.com/kb/en/mediumblob/', false, true, false),
+       (1, 'MEDIUMINT', 'mediumint', null, null, null, null, null, null, null, null, null, null, 'size in Bytes', null,
+        'https://mariadb.com/kb/en/mediumint/', false, true, false),
        (1, 'MEDIUMTEXT', 'mediumtext', null, null, null, null, null, null, null, null, null, null, 'size in Bytes',
-        'https://mariadb.com/kb/en/mediumtext/', true, true),
+        null, 'https://mariadb.com/kb/en/mediumtext/', true, true, false),
+       (1, 'SERIAL', 'serial', null, null, null, null, null, null, null, null, null, null, null,
+        null, 'https://mariadb.com/kb/en/bigint/', true, true, true),
        (1, 'SET(v1,v2,...)', 'set', null, null, null, null, null, null, null, null, null, null, null,
-        'https://mariadb.com/kb/en/set/', true, true),
+        'e.g. value1, value2, ...', 'https://mariadb.com/kb/en/set/', true, true, false),
        (1, 'SMALLINT(size)', 'smallint', 0, null, null, false, null, null, null, null, null, null, 'size in Bytes',
-        'https://mariadb.com/kb/en/smallint/', false, true),
-       (1, 'TEXT(size)', 'text', 0, null, null, false, null, null, null, null, null, null, 'size in Bytes',
-        'https://mariadb.com/kb/en/text/', true, true),
+        null, 'https://mariadb.com/kb/en/smallint/', false, true, false),
+       (1, 'TEXT(size)', 'text', 0, null, null, false, null, null, null, null, null, null, 'size in Bytes', null,
+        'https://mariadb.com/kb/en/text/', true, true, false),
        (1, 'TIME(fsp)', 'time', 0, 6, 0, false, null, null, null, null, null, null,
-        'fsp=microsecond precision, min. 0, max. 6', 'https://mariadb.com/kb/en/time/', true, true),
+        'fsp=microsecond precision, min. 0, max. 6', 'e.g. HH:MM:SS, HH:MM, HHMMSS, H:M:S',
+        'https://mariadb.com/kb/en/time/', true, true, false),
        (1, 'TIMESTAMP(fsp)', 'timestamp', 0, 6, 0, false, null, null, null, null, null, null,
-        'fsp=microsecond precision, min. 0, max. 6', 'https://mariadb.com/kb/en/timestamp/', true, true),
-       (1, 'TINYBLOB', 'tinyblob', null, null, null, null, null, null, null, null, null, null,
-        'fsp=microsecond precision, min. 0, max. 6', 'https://mariadb.com/kb/en/timestamp/', false, true),
-       (1, 'TINYINT(size)', 'tinyint', 0, null, null, false, null, null, null, null, null, null,
-        'size in Bytes', 'https://mariadb.com/kb/en/tinyint/', false, true),
-       (1, 'TINYTEXT', 'tinytext', null, null, null, null, null, null, null, null, null, null,
-        'max. 255 characters', 'https://mariadb.com/kb/en/tinytext/', true, true),
-       (1, 'YEAR', 'year', 2, 4, null, false, 2, null, null, null, null, null, 'min. 1901, max. 2155',
-        'https://mariadb.com/kb/en/year/', false, true),
-       (1, 'VARBINARY(size)', 'varbinary', 0, null, null, true, null, null, null, null, null, null,
-        null, 'https://mariadb.com/kb/en/varbinary/', false, true),
-       (1, 'VARCHAR(size)', 'varchar', 0, 65532, 255, true, null, null, null, null, null, null,
-        null, 'https://mariadb.com/kb/en/varchar/', false, true);
+        'fsp=microsecond precision, min. 0, max. 6',
+        'e.g. YYYY-MM-DD HH:MM:SS, YY-MM-DD HH:MM:SS, YYYYMMDDHHMMSS, YYMMDDHHMMSS, YYYYMMDD, YYMMDD',
+        'https://mariadb.com/kb/en/timestamp/', true, true, false),
+       (1, 'TINYBLOB', 'tinyblob', null, null, null, null, null, null, null, null, null, null, null,
+        'fsp=microsecond precision, min. 0, max. 6', 'https://mariadb.com/kb/en/timestamp/', false, true, false),
+       (1, 'TINYINT(size)', 'tinyint', 0, null, null, false, null, null, null, null, null, null, null,
+        'size in Bytes', 'https://mariadb.com/kb/en/tinyint/', false, true, false),
+       (1, 'TINYTEXT', 'tinytext', null, null, null, null, null, null, null, null, null, null, null,
+        'max. 255 characters', 'https://mariadb.com/kb/en/tinytext/', true, true, false),
+       (1, 'YEAR', 'year', 2, 4, null, false, 2, null, null, null, null, null, 'min. 1901, max. 2155', 'e.g. YYYY, YY',
+        'https://mariadb.com/kb/en/year/', false, true, false),
+       (1, 'VARBINARY(size)', 'varbinary', 0, null, null, true, null, null, null, null, null, null, null,
+        null, 'https://mariadb.com/kb/en/varbinary/', false, true, false),
+       (1, 'VARCHAR(size)', 'varchar', 0, 65532, 255, true, null, null, null, null, null, null, null,
+        null, 'https://mariadb.com/kb/en/varchar/', false, true, false);
 
 INSERT
 INTO `mdb_ontologies` (prefix, uri, uri_pattern, sparql_endpoint, rdf_path)
@@ -629,4 +636,4 @@ VALUES ('om', 'http://www.ontology-of-units-of-measure.org/resource/om-2/',
        ('owl', 'http://www.w3.org/2002/07/owl#', null, null, null),
        ('prov', 'http://www.w3.org/ns/prov#', null, null, null),
        ('db', 'http://dbpedia.org', 'http://dbpedia.org/ontology/.*', 'http://dbpedia.org/sparql', null);
-COMMIT;
+COMMIT;
\ No newline at end of file
diff --git a/dbrepo-metadata-service/api/src/main/java/at/tuwien/api/container/image/DataTypeDto.java b/dbrepo-metadata-service/api/src/main/java/at/tuwien/api/container/image/DataTypeDto.java
index cd31aa225531b4331e77144b51445a8daeec0046..312dcf998417f13c6d552dcb25d843516f799f1e 100644
--- a/dbrepo-metadata-service/api/src/main/java/at/tuwien/api/container/image/DataTypeDto.java
+++ b/dbrepo-metadata-service/api/src/main/java/at/tuwien/api/container/image/DataTypeDto.java
@@ -18,23 +18,27 @@ public class DataTypeDto {
 
     @NotBlank
     @JsonProperty("display_name")
-    @Schema(example = "BIGINT")
+    @Schema(example = "TIME(fsp)")
     private String displayName;
 
     @NotBlank
-    @Schema(example = "bigint")
+    @Schema(example = "time")
     private String value;
 
     @JsonProperty("size_min")
+    @Schema(example = "0")
     private Integer sizeMin;
 
     @JsonProperty("size_max")
+    @Schema(example = "6")
     private Integer sizeMax;
 
     @JsonProperty("size_default")
+    @Schema(example = "0")
     private Integer sizeDefault;
 
     @JsonProperty("size_required")
+    @Schema(example = "false")
     private Boolean sizeRequired;
 
     @JsonProperty("d_min")
@@ -50,17 +54,25 @@ public class DataTypeDto {
     private Boolean dRequired;
 
     @NotNull
-    @Schema(example = "https://mariadb.com/kb/en/bigint/")
+    @Schema(example = "https://mariadb.com/kb/en/time/")
     private String documentation;
 
+    @JsonProperty("data_hint")
+    @Schema(example = "e.g. HH:MM:SS, HH:MM, HHMMSS, H:M:S")
+    private String dataHint;
+
+    @JsonProperty("type_hint")
+    @Schema(example = "fsp=microsecond precision, min. 0, max. 6")
+    private String typeHint;
+
     @NotNull
-    @Schema(description = "frontend needs to quote this data type")
     @JsonProperty("is_quoted")
+    @Schema(example = "false", description = "frontend needs to quote this data type")
     private Boolean quoted;
 
     @NotNull
     @JsonProperty("is_buildable")
-    @Schema(description = "frontend can build this data type")
+    @Schema(example = "true", description = "frontend can build this data type")
     private Boolean buildable;
 
 }
diff --git a/dbrepo-metadata-service/api/src/main/java/at/tuwien/api/database/ViewColumnDto.java b/dbrepo-metadata-service/api/src/main/java/at/tuwien/api/database/ViewColumnDto.java
index 75b0a3d68452e5ac223a85f351fb6078f04fc09e..613f9c5e71e54bdc09a8c78752d7a40f0eb0eb88 100644
--- a/dbrepo-metadata-service/api/src/main/java/at/tuwien/api/database/ViewColumnDto.java
+++ b/dbrepo-metadata-service/api/src/main/java/at/tuwien/api/database/ViewColumnDto.java
@@ -43,9 +43,6 @@ public class ViewColumnDto {
     @Schema(example = "mdb_date")
     private String internalName;
 
-    @Schema
-    private String alias;
-
     @NotNull
     @JsonProperty("auto_generated")
     @Schema(example = "false")
diff --git a/dbrepo-metadata-service/api/src/main/java/at/tuwien/api/database/table/columns/ColumnDto.java b/dbrepo-metadata-service/api/src/main/java/at/tuwien/api/database/table/columns/ColumnDto.java
index 4e95fefc3e993d53c13107c48112939f55c0e03e..92092fca33d726616ccba63f08a4b4fefbdba1a2 100644
--- a/dbrepo-metadata-service/api/src/main/java/at/tuwien/api/database/table/columns/ColumnDto.java
+++ b/dbrepo-metadata-service/api/src/main/java/at/tuwien/api/database/table/columns/ColumnDto.java
@@ -58,11 +58,6 @@ public class ColumnDto {
     @Schema
     private String alias;
 
-    @NotNull
-    @JsonProperty("auto_generated")
-    @Schema(example = "false")
-    private Boolean autoGenerated;
-
     @JsonProperty("index_length")
     private Long indexLength;
 
diff --git a/dbrepo-metadata-service/api/src/main/java/at/tuwien/api/database/table/columns/ColumnTypeDto.java b/dbrepo-metadata-service/api/src/main/java/at/tuwien/api/database/table/columns/ColumnTypeDto.java
index 676600c6ff77fb79fd2c19b89d21f826a9cd7d38..d44b25b84ed4c51075ab4c075433750c4aafca42 100644
--- a/dbrepo-metadata-service/api/src/main/java/at/tuwien/api/database/table/columns/ColumnTypeDto.java
+++ b/dbrepo-metadata-service/api/src/main/java/at/tuwien/api/database/table/columns/ColumnTypeDto.java
@@ -49,6 +49,9 @@ public enum ColumnTypeDto {
     @JsonProperty("set")
     SET("set"),
 
+    @JsonProperty("serial")
+    SERIAL("serial"),
+
     @JsonProperty("bit")
     BIT("bit"),
 
diff --git a/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/container/image/DataType.java b/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/container/image/DataType.java
index 8333470ad3ba7fdf6014caf32f786ce4b496b0e0..a98da8d530e550cd12546d3bdfed8ad383461964 100644
--- a/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/container/image/DataType.java
+++ b/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/container/image/DataType.java
@@ -5,8 +5,6 @@ import lombok.*;
 import org.hibernate.annotations.GenericGenerator;
 import org.springframework.data.jpa.domain.support.AuditingEntityListener;
 
-import java.util.List;
-
 @Data
 @Entity
 @Builder
@@ -58,6 +56,12 @@ public class DataType {
     @Column(nullable = false)
     private String documentation;
 
+    @Column(name = "type_hint")
+    private String typeHint;
+
+    @Column(name = "data_hint")
+    private String dataHint;
+
     @Column(name = "is_quoted", nullable = false)
     private Boolean quoted;
 
diff --git a/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/Database.java b/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/Database.java
index 2e154b86973601af8560c6055fe388fd614858a6..17b8308bbaac7e202557208875e566240d928d32 100644
--- a/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/Database.java
+++ b/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/Database.java
@@ -122,11 +122,12 @@ public class Database implements Serializable {
     private List<Table> tables;
 
     @ToString.Exclude
-    @OneToMany(fetch = FetchType.LAZY, cascade = {CascadeType.MERGE, CascadeType.PERSIST}, mappedBy = "database", orphanRemoval = true)
+    @OrderBy("id DESC")
+    @OneToMany(fetch = FetchType.LAZY, cascade = {CascadeType.ALL, CascadeType.PERSIST}, mappedBy = "database", orphanRemoval = true)
     private List<View> views;
 
     @ToString.Exclude
-    @OneToMany(fetch = FetchType.LAZY, cascade = {CascadeType.MERGE, CascadeType.PERSIST}, mappedBy = "database", orphanRemoval = true)
+    @OneToMany(fetch = FetchType.LAZY, cascade = {CascadeType.ALL, CascadeType.PERSIST}, mappedBy = "database", orphanRemoval = true)
     private List<DatabaseAccess> accesses;
 
     @Column(nullable = false)
diff --git a/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/View.java b/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/View.java
index 44ffab29592243bc605083aa993e0215ac95475a..28f2ec69c28e84dffcb7c08f9f28941c4dcced54 100644
--- a/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/View.java
+++ b/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/View.java
@@ -97,7 +97,7 @@ public class View {
     @ToString.Exclude
     @OnDelete(action = OnDeleteAction.CASCADE)
     @OneToMany(fetch = FetchType.LAZY, cascade = {CascadeType.MERGE, CascadeType.PERSIST}, mappedBy = "view")
-    @OrderColumn(name = "ordinalPosition")
+    @OrderBy("ordinalPosition")
     private List<ViewColumn> columns;
 
     @CreatedDate
diff --git a/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/ViewColumn.java b/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/ViewColumn.java
index b9eff2c6947de4e8f69693467b57f67db720a8b4..5016630500cc2b792cacc5df8ec5e58c8ca610b4 100644
--- a/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/ViewColumn.java
+++ b/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/ViewColumn.java
@@ -1,12 +1,11 @@
 package at.tuwien.entities.database;
 
 import at.tuwien.entities.database.table.columns.TableColumnType;
+import jakarta.persistence.*;
 import lombok.*;
 import org.hibernate.annotations.GenericGenerator;
 import org.springframework.data.jpa.domain.support.AuditingEntityListener;
 
-import jakarta.persistence.*;
-
 @Data
 @Entity
 @Builder(toBuilder = true)
@@ -14,7 +13,9 @@ import jakarta.persistence.*;
 @AllArgsConstructor
 @NoArgsConstructor
 @EntityListeners(AuditingEntityListener.class)
-@jakarta.persistence.Table(name = "mdb_view_columns")
+@jakarta.persistence.Table(name = "mdb_view_columns", uniqueConstraints = {
+        @UniqueConstraint(columnNames = {"view_id", "internal_name"})
+})
 public class ViewColumn implements Comparable<ViewColumn> {
 
     @Id
@@ -34,10 +35,7 @@ public class ViewColumn implements Comparable<ViewColumn> {
     @Column(nullable = false, columnDefinition = "VARCHAR(64)")
     private String name;
 
-    @Column(name = "auto_generated", columnDefinition = "BOOLEAN default false")
-    private Boolean autoGenerated;
-
-    @Column(nullable = false, columnDefinition = "VARCHAR(64)")
+    @Column(name = "internal_name", nullable = false, columnDefinition = "VARCHAR(64)")
     private String internalName;
 
     @Column(nullable = false, columnDefinition = "ENUM('CHAR','VARCHAR','BINARY','VARBINARY','TINYBLOB','TINYTEXT','TEXT','BLOB','MEDIUMTEXT','MEDIUMBLOB','LONGTEXT','LONGBLOB','ENUM','SET','BIT','TINYINT','BOOL','SMALLINT','MEDIUMINT','INT','BIGINT','FLOAT','DOUBLE','DECIMAL','DATE','DATETIME','TIMESTAMP','TIME','YEAR')")
diff --git a/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/table/Table.java b/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/table/Table.java
index 9a402201eaf38191213abb609544709d28b516c4..08f6bd9426a8906d4edc4863ecc6f3540cb39448 100644
--- a/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/table/Table.java
+++ b/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/table/Table.java
@@ -88,12 +88,6 @@ public class Table {
     })
     private Database database;
 
-    @ToString.Exclude
-    @OnDelete(action = OnDeleteAction.CASCADE)
-    @OneToMany(fetch = FetchType.LAZY, cascade = {CascadeType.MERGE, CascadeType.PERSIST}, mappedBy = "table")
-    @OrderBy("ordinalPosition")
-    private List<TableColumn> columns;
-
     @ToString.Exclude
     @org.springframework.data.annotation.Transient
     @OneToMany(fetch = FetchType.LAZY)
@@ -124,6 +118,12 @@ public class Table {
     @Column(name = "avg_row_length")
     private Long avgRowLength;
 
+    @ToString.Exclude
+    @OnDelete(action = OnDeleteAction.CASCADE)
+    @OneToMany(fetch = FetchType.LAZY, cascade = {CascadeType.MERGE, CascadeType.PERSIST}, mappedBy = "table")
+    @OrderBy("ordinalPosition")
+    private List<TableColumn> columns;
+
     @CreatedDate
     @Column(nullable = false, updatable = false, columnDefinition = "TIMESTAMP")
     @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd'T'HH:mm:ss.SSSXXX", timezone = "UTC")
diff --git a/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/table/columns/TableColumn.java b/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/table/columns/TableColumn.java
index 9ddf89c453617c6f345c73b4c98cb9eb2678e3a0..841dbde754a09a5fdd51cec7872ae4c8a4e5e946 100644
--- a/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/table/columns/TableColumn.java
+++ b/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/table/columns/TableColumn.java
@@ -2,14 +2,13 @@ package at.tuwien.entities.database.table.columns;
 
 import at.tuwien.entities.database.table.Table;
 import com.fasterxml.jackson.annotation.JsonFormat;
+import jakarta.persistence.*;
 import lombok.*;
 import org.hibernate.annotations.GenericGenerator;
 import org.springframework.data.annotation.CreatedDate;
 import org.springframework.data.annotation.LastModifiedDate;
 import org.springframework.data.jpa.domain.support.AuditingEntityListener;
 
-import jakarta.persistence.*;
-
 import java.math.BigDecimal;
 import java.time.Instant;
 import java.util.List;
@@ -47,9 +46,6 @@ public class TableColumn implements Comparable<TableColumn> {
     @Column(name = "cname", nullable = false, columnDefinition = "VARCHAR(64)")
     private String name;
 
-    @Column(name = "auto_generated", columnDefinition = "BOOLEAN default false")
-    private Boolean autoGenerated;
-
     @Column(name = "internal_name", nullable = false, columnDefinition = "VARCHAR(64)")
     private String internalName;
 
@@ -62,7 +58,7 @@ public class TableColumn implements Comparable<TableColumn> {
     @Transient
     private String alias;
 
-    @Column(name = "datatype", nullable = false, columnDefinition = "ENUM('CHAR','VARCHAR','BINARY','VARBINARY','TINYBLOB','TINYTEXT','TEXT','BLOB','MEDIUMTEXT','MEDIUMBLOB','LONGTEXT','LONGBLOB','ENUM','SET','BIT','TINYINT','BOOL','SMALLINT','MEDIUMINT','INT','BIGINT','FLOAT','DOUBLE','DECIMAL','DATE','DATETIME','TIMESTAMP','TIME','YEAR')")
+    @Column(name = "Datatype", nullable = false, columnDefinition = "ENUM('CHAR','VARCHAR','BINARY','VARBINARY','TINYBLOB','TINYTEXT','TEXT','BLOB','MEDIUMTEXT','MEDIUMBLOB','LONGTEXT','LONGBLOB','ENUM','SET','SERIAL','BIT','TINYINT','BOOL','SMALLINT','MEDIUMINT','INT','BIGINT','FLOAT','DOUBLE','DECIMAL','DATE','DATETIME','TIMESTAMP','TIME','YEAR')")
     @Enumerated(EnumType.STRING)
     private TableColumnType columnType;
 
diff --git a/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/table/columns/TableColumnType.java b/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/table/columns/TableColumnType.java
index 074620a349fff43977ad585a7e4abe4ad770f39e..7f95c476ddf2d80aa78f773ff5db67aee751b86c 100644
--- a/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/table/columns/TableColumnType.java
+++ b/dbrepo-metadata-service/entities/src/main/java/at/tuwien/entities/database/table/columns/TableColumnType.java
@@ -20,6 +20,7 @@ public enum TableColumnType {
     LONGBLOB,
     ENUM,
     SET,
+    SERIAL,
     BIT,
     TINYINT,
     BOOL,
diff --git a/dbrepo-metadata-service/repositories/src/main/java/at/tuwien/mapper/MetadataMapper.java b/dbrepo-metadata-service/repositories/src/main/java/at/tuwien/mapper/MetadataMapper.java
index 0555e1c62c5d2ee91ba7e0b7f59f68b5372ceb02..bc20219105e23371712fe61ff822e5628383f67b 100644
--- a/dbrepo-metadata-service/repositories/src/main/java/at/tuwien/mapper/MetadataMapper.java
+++ b/dbrepo-metadata-service/repositories/src/main/java/at/tuwien/mapper/MetadataMapper.java
@@ -725,7 +725,6 @@ public interface MetadataMapper {
             @Mapping(target = "columnType", source = "data.type"),
             @Mapping(target = "isNullAllowed", source = "data.nullAllowed"),
             @Mapping(target = "name", source = "data.name"),
-            @Mapping(target = "autoGenerated", expression = "java(false)"),
             @Mapping(target = "internalName", expression = "java(nameToInternalName(data.getName()))"),
     })
     TableColumn columnCreateDtoToTableColumn(ColumnCreateDto data, ContainerImage image);
diff --git a/dbrepo-metadata-service/rest-service/src/main/java/at/tuwien/validation/EndpointValidator.java b/dbrepo-metadata-service/rest-service/src/main/java/at/tuwien/validation/EndpointValidator.java
index e5a1467296c90262df5b7388dd5b9b7bd4f11bd8..75f76d440a4d059dfe9c918952519976aa5cbdb3 100644
--- a/dbrepo-metadata-service/rest-service/src/main/java/at/tuwien/validation/EndpointValidator.java
+++ b/dbrepo-metadata-service/rest-service/src/main/java/at/tuwien/validation/EndpointValidator.java
@@ -29,6 +29,7 @@ import java.util.Optional;
 @Component
 public class EndpointValidator {
 
+    public static final List<ColumnTypeDto> NEED_NOTHING = List.of(ColumnTypeDto.BOOL, ColumnTypeDto.SERIAL);
     public static final List<ColumnTypeDto> NEED_SIZE = List.of(ColumnTypeDto.VARCHAR, ColumnTypeDto.BINARY, ColumnTypeDto.VARBINARY);
     public static final List<ColumnTypeDto> CAN_HAVE_SIZE = List.of(ColumnTypeDto.CHAR, ColumnTypeDto.VARCHAR, ColumnTypeDto.BINARY, ColumnTypeDto.VARBINARY, ColumnTypeDto.BIT, ColumnTypeDto.TINYINT, ColumnTypeDto.SMALLINT, ColumnTypeDto.MEDIUMINT, ColumnTypeDto.INT);
     public static final List<ColumnTypeDto> CAN_HAVE_SIZE_AND_D = List.of(ColumnTypeDto.DOUBLE, ColumnTypeDto.DECIMAL);
@@ -134,6 +135,35 @@ public class EndpointValidator {
             log.error("Validation failed: column {} needs at least 1 allowed set value", optional3.get().getName());
             throw new MalformedException("Validation failed: column " + optional3.get().getName() + " needs at least 1 allowed set value");
         }
+        /* check serial */
+        final List<ColumnCreateDto> list4a = data.getColumns()
+                .stream()
+                .filter(c -> c.getType().equals(ColumnTypeDto.SERIAL))
+                .toList();
+        if (list4a.size() > 1) {
+            log.error("Validation failed: only one column of type serial allowed");
+            throw new MalformedException("Validation failed: only one column of type serial allowed");
+        }
+        final Optional<ColumnCreateDto> optional4a = data.getColumns()
+                .stream()
+                .filter(c -> c.getType().equals(ColumnTypeDto.SERIAL))
+                .filter(ColumnCreateDto::getNullAllowed)
+                .findFirst();
+        if (optional4a.isPresent()) {
+            log.error("Validation failed: column {} type serial demands non-null", optional4a.get().getName());
+            throw new MalformedException("Validation failed: column " + optional4a.get().getName() + " type serial demands non-null");
+        }
+        final Optional<ColumnCreateDto> optional4b = data.getColumns()
+                .stream()
+                .filter(c -> c.getType().equals(ColumnTypeDto.SERIAL) && data.getConstraints()
+                        .getUniques()
+                        .stream()
+                        .noneMatch(uk -> uk.size() == 1 && uk.contains(c.getName())))
+                .findFirst();
+        if (optional4b.isPresent()) {
+            log.error("Validation failed: column {} type serial demands a unique constraint", optional4b.get().getName());
+            throw new MalformedException("Validation failed: column " + optional4b.get().getName() + " type serial demands a unique constraint");
+        }
     }
 
     public boolean validateOnlyMineOrWriteAccessOrHasRole(User owner, Principal principal, DatabaseAccess access, String role) {
diff --git a/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/endpoints/TableEndpointUnitTest.java b/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/endpoints/TableEndpointUnitTest.java
index 4b6c1c29233cc83370ffb944f730c052c07b8486..1ceb6fd75ff22547f630c85bb7a1cbbb27bd86ab 100644
--- a/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/endpoints/TableEndpointUnitTest.java
+++ b/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/endpoints/TableEndpointUnitTest.java
@@ -7,6 +7,7 @@ import at.tuwien.api.database.table.columns.ColumnCreateDto;
 import at.tuwien.api.database.table.columns.ColumnDto;
 import at.tuwien.api.database.table.columns.ColumnTypeDto;
 import at.tuwien.api.database.table.columns.concepts.ColumnSemanticsUpdateDto;
+import at.tuwien.api.database.table.constraints.ConstraintsCreateDto;
 import at.tuwien.api.semantics.EntityDto;
 import at.tuwien.api.semantics.TableColumnEntityDto;
 import at.tuwien.entities.database.Database;
@@ -69,6 +70,11 @@ public class TableEndpointUnitTest extends AbstractUnitTest {
     @Autowired
     private TableEndpoint tableEndpoint;
 
+    public static Stream<Arguments> needNothing_parameters() {
+        return EndpointValidator.NEED_NOTHING.stream()
+                .map(Arguments::arguments);
+    }
+
     public static Stream<Arguments> needSize_parameters() {
         return EndpointValidator.NEED_SIZE.stream()
                 .map(Arguments::arguments);
@@ -282,6 +288,32 @@ public class TableEndpointUnitTest extends AbstractUnitTest {
         generic_create(DATABASE_3_ID, DATABASE_3, request, USER_1_PRINCIPAL, USER_1, DATABASE_3_USER_1_WRITE_OWN_ACCESS);
     }
 
+    @ParameterizedTest
+    @MethodSource("needNothing_parameters")
+    @WithMockUser(username = USER_3_USERNAME, authorities = {"create-table"})
+    public void create_publicNeedNothing_succeeds(ColumnTypeDto columnType) throws UserNotFoundException, SearchServiceException,
+            NotAllowedException, SemanticEntityNotFoundException, DataServiceConnectionException, TableNotFoundException, MalformedException, DataServiceException, DatabaseNotFoundException, AccessNotFoundException, OntologyNotFoundException, TableExistsException, SearchServiceConnectionException {
+        final TableCreateDto request = TableCreateDto.builder()
+                .name("Some Table")
+                .description("Some Description")
+                .columns(List.of(ColumnCreateDto.builder()
+                        .name("ID")
+                        .type(columnType)
+                        .nullAllowed(false)
+                        .build()))
+                .constraints(ConstraintsCreateDto.builder()
+                        .uniques(List.of(List.of("ID")))
+                        .build())
+                .build();
+
+        /* mock */
+        when(tableService.createTable(DATABASE_3, request, USER_1_PRINCIPAL))
+                .thenReturn(TABLE_1) /* some table */;
+
+        /* test */
+        generic_create(DATABASE_3_ID, DATABASE_3, request, USER_1_PRINCIPAL, USER_1, DATABASE_3_USER_1_WRITE_OWN_ACCESS);
+    }
+
     @ParameterizedTest
     @MethodSource("needSize_parameters")
     @WithMockUser(username = USER_3_USERNAME, authorities = {"create-table"})
@@ -401,6 +433,56 @@ public class TableEndpointUnitTest extends AbstractUnitTest {
         generic_create(DATABASE_3_ID, DATABASE_3, request, USER_1_PRINCIPAL, USER_1, DATABASE_3_USER_1_WRITE_OWN_ACCESS);
     }
 
+    @Test
+    @WithMockUser(username = USER_3_USERNAME, authorities = {"create-table"})
+    public void create_publicHasMultipleSerial_fails() {
+        final TableCreateDto request = TableCreateDto.builder()
+                .name("Some Table")
+                .description("Some Description")
+                .columns(List.of(ColumnCreateDto.builder()
+                                .name("ID")
+                                .type(ColumnTypeDto.SERIAL)
+                                .nullAllowed(false)
+                                .build(),
+                        ColumnCreateDto.builder()
+                                .name("Counter")
+                                .type(ColumnTypeDto.SERIAL)
+                                .nullAllowed(false)
+                                .build()))
+                .constraints(ConstraintsCreateDto.builder()
+                        .uniques(List.of(List.of("ID"),
+                                List.of("Counter")))
+                        .build())
+                .build();
+
+        /* test */
+        assertThrows(MalformedException.class, () -> {
+            generic_create(DATABASE_3_ID, DATABASE_3, request, USER_1_PRINCIPAL, USER_1, DATABASE_3_USER_1_WRITE_OWN_ACCESS);
+        });
+    }
+
+    @Test
+    @WithMockUser(username = USER_3_USERNAME, authorities = {"create-table"})
+    public void create_publicSerialNullAllowed_fails() {
+        final TableCreateDto request = TableCreateDto.builder()
+                .name("Some Table")
+                .description("Some Description")
+                .columns(List.of(ColumnCreateDto.builder()
+                                .name("ID")
+                                .type(ColumnTypeDto.SERIAL)
+                                .nullAllowed(true) // <<<
+                                .build()))
+                .constraints(ConstraintsCreateDto.builder()
+                        .uniques(List.of(List.of("ID")))
+                        .build())
+                .build();
+
+        /* test */
+        assertThrows(MalformedException.class, () -> {
+            generic_create(DATABASE_3_ID, DATABASE_3, request, USER_1_PRINCIPAL, USER_1, DATABASE_3_USER_1_WRITE_OWN_ACCESS);
+        });
+    }
+
     @ParameterizedTest
     @MethodSource("canHaveSizeAndD_parameters")
     @WithMockUser(username = USER_3_USERNAME, authorities = {"create-table"})
diff --git a/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/mapper/MetadataMapperUnitTest.java b/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/mapper/MetadataMapperUnitTest.java
index 8a72f2cabbe0f4eda33ff77ef125eed2c842e12d..c84990098596b159be2aa65459638ba0b498c7ee 100644
--- a/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/mapper/MetadataMapperUnitTest.java
+++ b/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/mapper/MetadataMapperUnitTest.java
@@ -182,8 +182,6 @@ public class MetadataMapperUnitTest extends AbstractUnitTest {
     @Test
     public void customDatabaseToDatabaseDto_succeeds() {
 
-        final Database debug = DATABASE_1;
-
         /* test */
         final DatabaseDto response = metadataMapper.customDatabaseToDatabaseDto(DATABASE_1);
         assertEquals(DATABASE_1_ID, response.getId());
@@ -241,11 +239,10 @@ public class MetadataMapperUnitTest extends AbstractUnitTest {
             assertEquals(TABLE_1_COLUMNS.get(i).getTable().getId(), table0.getColumns().get(i).getTableId());
             assertEquals(TABLE_1_COLUMNS.get(i).getName(), table0.getColumns().get(i).getName());
             assertEquals(TABLE_1_COLUMNS.get(i).getInternalName(), table0.getColumns().get(i).getInternalName());
-            assertEquals(List.of(ColumnTypeDto.BIGINT, ColumnTypeDto.DATE, ColumnTypeDto.VARCHAR, ColumnTypeDto.DECIMAL, ColumnTypeDto.DECIMAL).get(i), table0.getColumns().get(i).getColumnType());
+            assertEquals(List.of(ColumnTypeDto.SERIAL, ColumnTypeDto.DATE, ColumnTypeDto.VARCHAR, ColumnTypeDto.DECIMAL, ColumnTypeDto.DECIMAL).get(i), table0.getColumns().get(i).getColumnType());
             assertEquals(TABLE_1_COLUMNS.get(i).getSize(), table0.getColumns().get(i).getSize());
             assertEquals(TABLE_1_COLUMNS.get(i).getD(), table0.getColumns().get(i).getD());
             assertEquals(TABLE_1_COLUMNS.get(i).getIsNullAllowed(), table0.getColumns().get(i).getIsNullAllowed());
-            assertEquals(TABLE_1_COLUMNS.get(i).getAutoGenerated(), table0.getColumns().get(i).getAutoGenerated());
             assertEquals(TABLE_1_COLUMNS.get(i).getEnums(), table0.getColumns().get(i).getEnums());
             assertEquals(TABLE_1_COLUMNS.get(i).getSets(), table0.getColumns().get(i).getSets());
         }
@@ -264,7 +261,7 @@ public class MetadataMapperUnitTest extends AbstractUnitTest {
         assertEquals(TABLE_1_COLUMNS_BRIEF_0_DTO.getInternalName(), table0pk.getColumn().getInternalName());
         assertEquals(TABLE_1_ID, table0pk.getTable().getId());
         assertEquals(DATABASE_1_ID, table0pk.getTable().getDatabaseId());
-        assertEquals(ColumnTypeDto.BIGINT, table0pk.getColumn().getColumnType());
+        assertEquals(ColumnTypeDto.SERIAL, table0pk.getColumn().getColumnType());
         assertNull(table0pk.getColumn().getAlias());
         assertEquals(TABLE_1_ID, table0pk.getColumn().getTableId());
         assertEquals(DATABASE_1_ID, table0pk.getColumn().getDatabaseId());
@@ -298,7 +295,6 @@ public class MetadataMapperUnitTest extends AbstractUnitTest {
             assertEquals(TABLE_2_COLUMNS.get(i).getSize(), table1.getColumns().get(i).getSize());
             assertEquals(TABLE_2_COLUMNS.get(i).getD(), table1.getColumns().get(i).getD());
             assertEquals(TABLE_2_COLUMNS.get(i).getIsNullAllowed(), table1.getColumns().get(i).getIsNullAllowed());
-            assertEquals(TABLE_2_COLUMNS.get(i).getAutoGenerated(), table1.getColumns().get(i).getAutoGenerated());
             assertEquals(TABLE_2_COLUMNS.get(i).getEnums(), table1.getColumns().get(i).getEnums());
             assertEquals(TABLE_2_COLUMNS.get(i).getSets(), table1.getColumns().get(i).getSets());
         }
@@ -372,7 +368,6 @@ public class MetadataMapperUnitTest extends AbstractUnitTest {
             assertEquals(TABLE_3_COLUMNS.get(i).getSize(), table2.getColumns().get(i).getSize());
             assertEquals(TABLE_3_COLUMNS.get(i).getD(), table2.getColumns().get(i).getD());
             assertEquals(TABLE_3_COLUMNS.get(i).getIsNullAllowed(), table2.getColumns().get(i).getIsNullAllowed());
-            assertEquals(TABLE_3_COLUMNS.get(i).getAutoGenerated(), table2.getColumns().get(i).getAutoGenerated());
             assertEquals(TABLE_3_COLUMNS.get(i).getEnums(), table2.getColumns().get(i).getEnums());
             assertEquals(TABLE_3_COLUMNS.get(i).getSets(), table2.getColumns().get(i).getSets());
         }
@@ -417,7 +412,6 @@ public class MetadataMapperUnitTest extends AbstractUnitTest {
             assertEquals(TABLE_4_COLUMNS.get(i).getSize(), table3.getColumns().get(i).getSize());
             assertEquals(TABLE_4_COLUMNS.get(i).getD(), table3.getColumns().get(i).getD());
             assertEquals(TABLE_4_COLUMNS.get(i).getIsNullAllowed(), table3.getColumns().get(i).getIsNullAllowed());
-            assertEquals(TABLE_4_COLUMNS.get(i).getAutoGenerated(), table3.getColumns().get(i).getAutoGenerated());
             assertEquals(TABLE_4_COLUMNS.get(i).getEnums(), table3.getColumns().get(i).getEnums());
             assertEquals(TABLE_4_COLUMNS.get(i).getSets(), table3.getColumns().get(i).getSets());
         }
diff --git a/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/service/TableServiceUnitTest.java b/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/service/TableServiceUnitTest.java
index 94636bb3c5122539d6bba533a80940203b80df5d..551a6c350a95e1c9a22fb55d72e04530386cb522 100644
--- a/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/service/TableServiceUnitTest.java
+++ b/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/service/TableServiceUnitTest.java
@@ -176,7 +176,6 @@ public class TableServiceUnitTest extends AbstractUnitTest {
         assertEquals("i_am_spa_shu_l", column0.getInternalName());
         assertEquals(TableColumnType.TEXT, column0.getColumnType());
         assertTrue(column0.getIsNullAllowed());
-        assertFalse(column0.getAutoGenerated());
         /* constraints */
         final Constraints constraints = response.getConstraints();
         assertEquals(0, constraints.getPrimaryKey().size());
diff --git a/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/service/ViewServicePersistenceTest.java b/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/service/ViewServicePersistenceTest.java
index 3ed06bfd7c8ba6222bcbfbf45cfeb9a602e8fa91..68eca349672106870801561689f26d02b18916e6 100644
--- a/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/service/ViewServicePersistenceTest.java
+++ b/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/service/ViewServicePersistenceTest.java
@@ -2,6 +2,7 @@ package at.tuwien.service;
 
 import at.tuwien.entities.database.Database;
 import at.tuwien.entities.database.View;
+import at.tuwien.entities.database.ViewColumn;
 import at.tuwien.exception.*;
 import at.tuwien.gateway.DataServiceGateway;
 import at.tuwien.gateway.SearchServiceGateway;
@@ -63,7 +64,7 @@ public class ViewServicePersistenceTest extends AbstractUnitTest {
         licenseRepository.save(LICENSE_1);
         userRepository.saveAll(List.of(USER_1, USER_2, USER_3));
         containerRepository.save(CONTAINER_1);
-        databaseRepository.save(DATABASE_1);
+        databaseRepository.saveAll(List.of(DATABASE_1, DATABASE_2, DATABASE_3));
     }
 
     @Test
diff --git a/dbrepo-metadata-service/services/src/main/java/at/tuwien/service/impl/TableServiceImpl.java b/dbrepo-metadata-service/services/src/main/java/at/tuwien/service/impl/TableServiceImpl.java
index 0013f0156301e03872939957db011de70e98ef82..4866fea2c10b8afa3bb4f93f2eedb3d2aa8f1ad7 100644
--- a/dbrepo-metadata-service/services/src/main/java/at/tuwien/service/impl/TableServiceImpl.java
+++ b/dbrepo-metadata-service/services/src/main/java/at/tuwien/service/impl/TableServiceImpl.java
@@ -191,7 +191,9 @@ public class TableServiceImpl implements TableService {
         /* delete at data service */
         dataServiceGateway.deleteTable(table.getDatabase().getId(), table.getId());
         /* update in metadata database */
-        table.getDatabase().getTables().remove(table);
+        table.getDatabase()
+                .getTables()
+                .remove(table);
         final Database database = databaseRepository.save(table.getDatabase());
         /* update in search service */
         searchServiceGateway.update(database);
diff --git a/dbrepo-metadata-service/services/src/main/java/at/tuwien/service/impl/ViewServiceImpl.java b/dbrepo-metadata-service/services/src/main/java/at/tuwien/service/impl/ViewServiceImpl.java
index 0826d9dcc88e344227f56550b2aebd6f42e3494e..17a2c26d8929e6e89d220427100e1124c75b92b8 100644
--- a/dbrepo-metadata-service/services/src/main/java/at/tuwien/service/impl/ViewServiceImpl.java
+++ b/dbrepo-metadata-service/services/src/main/java/at/tuwien/service/impl/ViewServiceImpl.java
@@ -75,7 +75,9 @@ public class ViewServiceImpl implements ViewService {
         /* delete in data service */
         dataServiceGateway.deleteView(view.getDatabase().getId(), view.getId());
         /* delete in metadata database */
-        view.getDatabase().getViews().remove(view);
+        view.getDatabase()
+                .getViews()
+                .remove(view);
         final Database database = databaseRepository.save(view.getDatabase());
         /* update in search service */
         searchServiceGateway.update(database);
diff --git a/dbrepo-metadata-service/test/src/main/java/at/tuwien/test/AbstractUnitTest.java b/dbrepo-metadata-service/test/src/main/java/at/tuwien/test/AbstractUnitTest.java
index 924db22930ac5bbffc94d5cad5c768cb45406be3..3ab89a39051326901f9b4029689bf4be82bd177f 100644
--- a/dbrepo-metadata-service/test/src/main/java/at/tuwien/test/AbstractUnitTest.java
+++ b/dbrepo-metadata-service/test/src/main/java/at/tuwien/test/AbstractUnitTest.java
@@ -38,7 +38,7 @@ public abstract class AbstractUnitTest extends BaseTest {
         DATABASE_1_PRIVILEGED_DTO.setIdentifiers(new LinkedList<>(List.of(IDENTIFIER_1_DTO, IDENTIFIER_2_DTO, IDENTIFIER_3_DTO, IDENTIFIER_4_DTO)));
         DATABASE_1_PRIVILEGED_DTO.setTables(new LinkedList<>(List.of(TABLE_1_DTO, TABLE_2_DTO, TABLE_3_DTO, TABLE_4_DTO)));
         DATABASE_1_PRIVILEGED_DTO.setViews(new LinkedList<>(List.of(VIEW_1_DTO, VIEW_2_DTO, VIEW_3_DTO)));
-        TABLE_1_DTO.setColumns(TABLE_1_COLUMNS_DTO);
+        TABLE_1_DTO.setColumns(new LinkedList<>(TABLE_1_COLUMNS_DTO));
         TABLE_1_DTO.setConstraints(TABLE_1_CONSTRAINTS_DTO);
         TABLE_2.setDatabase(DATABASE_1);
         TABLE_2.setColumns(new LinkedList<>(TABLE_2_COLUMNS));
@@ -46,12 +46,12 @@ public abstract class AbstractUnitTest extends BaseTest {
         TABLE_2.setConstraints(TABLE_2_CONSTRAINTS);
         TABLE_2_PRIVILEGED_DTO.setColumns(new LinkedList<>(TABLE_2_COLUMNS_DTO));
         TABLE_2_PRIVILEGED_DTO.setDatabase(DATABASE_1_PRIVILEGED_DTO);
-        TABLE_2_DTO.setColumns(TABLE_2_COLUMNS_DTO);
+        TABLE_2_DTO.setColumns(new LinkedList<>(TABLE_2_COLUMNS_DTO));
         TABLE_2_DTO.setConstraints(TABLE_2_CONSTRAINTS_DTO);
         TABLE_3.setDatabase(DATABASE_1);
         TABLE_3.setColumns(new LinkedList<>(TABLE_3_COLUMNS));
         TABLE_3.setConstraints(TABLE_3_CONSTRAINTS);
-        TABLE_3_DTO.setColumns(TABLE_3_COLUMNS_DTO);
+        TABLE_3_DTO.setColumns(new LinkedList<>(TABLE_3_COLUMNS_DTO));
         TABLE_3_DTO.setConstraints(TABLE_3_CONSTRAINTS_DTO);
         TABLE_4.setDatabase(DATABASE_1);
         TABLE_4.setColumns(new LinkedList<>(TABLE_4_COLUMNS));
@@ -59,14 +59,14 @@ public abstract class AbstractUnitTest extends BaseTest {
         TABLE_4_DTO.setColumns(TABLE_4_COLUMNS_DTO);
         TABLE_4_DTO.setConstraints(TABLE_4_CONSTRAINTS_DTO);
         VIEW_1.setDatabase(DATABASE_1);
-        VIEW_1.setColumns(VIEW_1_COLUMNS);
+        VIEW_1.setColumns(new LinkedList<>(VIEW_1_COLUMNS));
         VIEW_1.setIdentifiers(new LinkedList<>(List.of(IDENTIFIER_3)));
         VIEW_1_PRIVILEGED_DTO.setDatabase(DATABASE_1_PRIVILEGED_DTO);
         VIEW_2.setDatabase(DATABASE_1);
-        VIEW_2.setColumns(VIEW_2_COLUMNS);
+        VIEW_2.setColumns(new LinkedList<>(VIEW_2_COLUMNS));
         VIEW_2_PRIVILEGED_DTO.setDatabase(DATABASE_1_PRIVILEGED_DTO);
         VIEW_3.setDatabase(DATABASE_1);
-        VIEW_3.setColumns(VIEW_3_COLUMNS);
+        VIEW_3.setColumns(new LinkedList<>(VIEW_3_COLUMNS));
         VIEW_3_PRIVILEGED_DTO.setDatabase(DATABASE_1_PRIVILEGED_DTO);
         IDENTIFIER_1.setDatabase(DATABASE_1);
         IDENTIFIER_2.setDatabase(DATABASE_1);
@@ -77,6 +77,7 @@ public abstract class AbstractUnitTest extends BaseTest {
         DATABASE_2.setAccesses(new LinkedList<>(List.of(DATABASE_2_USER_2_WRITE_ALL_ACCESS, DATABASE_2_USER_3_READ_ACCESS)));
         DATABASE_2_PRIVILEGED_DTO.setAccesses(new LinkedList<>(List.of(DATABASE_2_USER_2_WRITE_ALL_ACCESS_DTO, DATABASE_2_USER_3_READ_ACCESS_DTO)));
         DATABASE_2.setTables(new LinkedList<>(List.of(TABLE_5, TABLE_6, TABLE_7)));
+        VIEW_4.setColumns(new LinkedList<>(VIEW_4_COLUMNS));
         DATABASE_2.setViews(new LinkedList<>(List.of(VIEW_4)));
         DATABASE_2.setIdentifiers(new LinkedList<>(List.of(IDENTIFIER_5)));
         DATABASE_2_PRIVILEGED_DTO.setTables(new LinkedList<>(List.of(TABLE_5_DTO, TABLE_6_DTO, TABLE_7_DTO)));
diff --git a/dbrepo-metadata-service/test/src/main/java/at/tuwien/test/BaseTest.java b/dbrepo-metadata-service/test/src/main/java/at/tuwien/test/BaseTest.java
index f372879434524410c299763e72041dcbb3dd1442..c6c0e4ec10150224ee4c21c6a1bf582649ba941e 100644
--- a/dbrepo-metadata-service/test/src/main/java/at/tuwien/test/BaseTest.java
+++ b/dbrepo-metadata-service/test/src/main/java/at/tuwien/test/BaseTest.java
@@ -1509,9 +1509,8 @@ public abstract class BaseTest {
                     .name("id")
                     .internalName("id")
                     .ordinalPosition(0)
-                    .columnType(ColumnTypeDto.BIGINT)
+                    .columnType(ColumnTypeDto.SERIAL)
                     .isNullAllowed(false)
-                    .autoGenerated(false)
                     .enums(null)
                     .sets(null)
                     .build(),
@@ -1525,7 +1524,6 @@ public abstract class BaseTest {
                     .ordinalPosition(1)
                     .columnType(ColumnTypeDto.DATE)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .enums(null)
                     .sets(null)
                     .build(),
@@ -1540,7 +1538,6 @@ public abstract class BaseTest {
                     .columnType(ColumnTypeDto.VARCHAR)
                     .size(255L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .enums(null)
                     .sets(null)
                     .build(),
@@ -1556,7 +1553,6 @@ public abstract class BaseTest {
                     .size(10L)
                     .d(0L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .enums(null)
                     .sets(null)
                     .build(),
@@ -1574,7 +1570,6 @@ public abstract class BaseTest {
                     .concept(CONCEPT_1_DTO)
                     .unit(UNIT_1_DTO)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .enums(null)
                     .sets(null)
                     .build());
@@ -2089,7 +2084,6 @@ public abstract class BaseTest {
                     .internalName("timestamp")
                     .columnType(TableColumnType.TIMESTAMP)
                     .isNullAllowed(false)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_4_2_ID)
@@ -2099,7 +2093,6 @@ public abstract class BaseTest {
                     .internalName("value")
                     .columnType(TableColumnType.DECIMAL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build());
 
     public final static List<ColumnCreateDto> TABLE_4_COLUMNS_CREATE_DTO = List.of(ColumnCreateDto.builder()
@@ -2144,7 +2137,6 @@ public abstract class BaseTest {
                     .internalName("timestamp")
                     .columnType(ColumnTypeDto.TIMESTAMP)
                     .isNullAllowed(false)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_4_2_ID)
@@ -2154,7 +2146,6 @@ public abstract class BaseTest {
                     .internalName("value")
                     .columnType(ColumnTypeDto.DECIMAL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build());
 
     public final static Long TABLE_8_ID = 8L;
@@ -2392,7 +2383,6 @@ public abstract class BaseTest {
                     .internalName(COLUMN_8_1_INTERNAL_NAME)
                     .columnType(COLUMN_8_1_TYPE)
                     .isNullAllowed(COLUMN_8_1_NULL)
-                    .autoGenerated(COLUMN_8_1_AUTO_GENERATED)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_8_2_ID)
@@ -2402,7 +2392,6 @@ public abstract class BaseTest {
                     .internalName(COLUMN_8_2_INTERNAL_NAME)
                     .columnType(COLUMN_8_2_TYPE)
                     .isNullAllowed(COLUMN_8_2_NULL)
-                    .autoGenerated(COLUMN_8_2_AUTO_GENERATED)
                     .size(COLUMN_8_2_SIZE)
                     .d(COLUMN_8_2_D)
                     .build(),
@@ -2414,7 +2403,6 @@ public abstract class BaseTest {
                     .internalName(COLUMN_8_3_INTERNAL_NAME)
                     .columnType(COLUMN_8_3_TYPE)
                     .isNullAllowed(COLUMN_8_3_NULL)
-                    .autoGenerated(COLUMN_8_3_AUTO_GENERATED)
                     .build());
 
     public final static List<ColumnDto> TABLE_8_COLUMNS_DTO = List.of(ColumnDto.builder()
@@ -2425,7 +2413,6 @@ public abstract class BaseTest {
                     .internalName(COLUMN_8_1_INTERNAL_NAME)
                     .columnType(COLUMN_8_1_TYPE_DTO)
                     .isNullAllowed(COLUMN_8_1_NULL)
-                    .autoGenerated(COLUMN_8_1_AUTO_GENERATED)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_8_2_ID)
@@ -2435,7 +2422,6 @@ public abstract class BaseTest {
                     .internalName(COLUMN_8_2_INTERNAL_NAME)
                     .columnType(COLUMN_8_2_TYPE_DTO)
                     .isNullAllowed(COLUMN_8_2_NULL)
-                    .autoGenerated(COLUMN_8_2_AUTO_GENERATED)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_8_3_ID)
@@ -2445,7 +2431,6 @@ public abstract class BaseTest {
                     .internalName(COLUMN_8_3_INTERNAL_NAME)
                     .columnType(COLUMN_8_3_TYPE_DTO)
                     .isNullAllowed(COLUMN_8_3_NULL)
-                    .autoGenerated(COLUMN_8_3_AUTO_GENERATED)
                     .build());
 
     public final static Long TABLE_8_DATA_COUNT = 6L;
@@ -2770,9 +2755,8 @@ public abstract class BaseTest {
                     .table(TABLE_1)
                     .name("id")
                     .internalName("id")
-                    .columnType(TableColumnType.BIGINT)
+                    .columnType(TableColumnType.SERIAL)
                     .isNullAllowed(false)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_1_2_ID)
@@ -2782,7 +2766,6 @@ public abstract class BaseTest {
                     .internalName("date")
                     .columnType(TableColumnType.DATE)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_1_3_ID)
@@ -2793,7 +2776,6 @@ public abstract class BaseTest {
                     .columnType(TableColumnType.VARCHAR)
                     .size(255L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_1_4_ID)
@@ -2805,7 +2787,6 @@ public abstract class BaseTest {
                     .size(10L)
                     .d(0L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_1_5_ID)
@@ -2819,7 +2800,6 @@ public abstract class BaseTest {
                     .concept(CONCEPT_1)
                     .unit(UNIT_1)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build());
 
     public final static List<ColumnCreateDto> TABLE_1_COLUMNS_CREATE_DTO = List.of(ColumnCreateDto.builder()
@@ -2908,7 +2888,6 @@ public abstract class BaseTest {
                     .columnType(TableColumnType.VARCHAR)
                     .size(255L)
                     .isNullAllowed(false)
-                    .autoGenerated(false)
                     .enums(null)
                     .sets(null)
                     .build(),
@@ -2923,7 +2902,6 @@ public abstract class BaseTest {
                     .size(10L)
                     .d(0L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .enums(null)
                     .sets(null)
                     .build(),
@@ -2938,7 +2916,6 @@ public abstract class BaseTest {
                     .size(10L)
                     .d(0L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .enums(null)
                     .sets(null)
                     .build());
@@ -2968,7 +2945,6 @@ public abstract class BaseTest {
                     .columnType(ColumnTypeDto.VARCHAR)
                     .size(255L)
                     .isNullAllowed(false)
-                    .autoGenerated(false)
                     .enums(null)
                     .sets(null)
                     .build(),
@@ -2983,7 +2959,6 @@ public abstract class BaseTest {
                     .columnType(ColumnTypeDto.DOUBLE)
                     .size(22L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .enums(null)
                     .sets(null)
                     .build(),
@@ -2998,7 +2973,6 @@ public abstract class BaseTest {
                     .columnType(ColumnTypeDto.DOUBLE)
                     .size(22L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .enums(null)
                     .sets(null)
                     .build());
@@ -3084,7 +3058,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_1_ID)
                     .table(TABLE_3)
                     .ordinalPosition(0)
-                    .autoGenerated(true)
                     .columnType(TableColumnType.BIGINT)
                     .name("id")
                     .internalName("id")
@@ -3096,7 +3069,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_2_ID)
                     .table(TABLE_3)
                     .ordinalPosition(1)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("linie")
                     .internalName("linie")
@@ -3108,7 +3080,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_3_ID)
                     .table(TABLE_3)
                     .ordinalPosition(2)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("richtung")
                     .internalName("richtung")
@@ -3120,7 +3091,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_4_ID)
                     .table(TABLE_3)
                     .ordinalPosition(3)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.DATE)
                     .name("betriebsdatum")
                     .internalName("betriebsdatum")
@@ -3132,7 +3102,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_5_ID)
                     .table(TABLE_3)
                     .ordinalPosition(4)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("fahrzeug")
                     .internalName("fahrzeug")
@@ -3144,7 +3113,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_6_ID)
                     .table(TABLE_3)
                     .ordinalPosition(5)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("kurs")
                     .internalName("kurs")
@@ -3156,7 +3124,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_7_ID)
                     .table(TABLE_3)
                     .ordinalPosition(6)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("seq_von")
                     .internalName("seq_von")
@@ -3168,7 +3135,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_8_ID)
                     .table(TABLE_3)
                     .ordinalPosition(7)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("halt_diva_von")
                     .internalName("halt_diva_von")
@@ -3180,7 +3146,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_9_ID)
                     .table(TABLE_3)
                     .ordinalPosition(8)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("halt_punkt_diva_von")
                     .internalName("halt_punkt_diva_von")
@@ -3192,7 +3157,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_10_ID)
                     .table(TABLE_3)
                     .ordinalPosition(9)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("halt_kurz_von1")
                     .internalName("halt_kurz_von1")
@@ -3204,7 +3168,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_11_ID)
                     .table(TABLE_3)
                     .ordinalPosition(10)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.DATE)
                     .name("datum_von")
                     .internalName("datum_von")
@@ -3216,7 +3179,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_12_ID)
                     .table(TABLE_3)
                     .ordinalPosition(11)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("soll_an_von")
                     .internalName("soll_an_von")
@@ -3228,7 +3190,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_13_ID)
                     .table(TABLE_3)
                     .ordinalPosition(12)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("ist_an_von")
                     .internalName("ist_an_von")
@@ -3240,7 +3201,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_14_ID)
                     .table(TABLE_3)
                     .ordinalPosition(13)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("soll_ab_von")
                     .internalName("soll_ab_von")
@@ -3252,7 +3212,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_15_ID)
                     .table(TABLE_3)
                     .ordinalPosition(14)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("ist_ab_von")
                     .internalName("ist_ab_von")
@@ -3264,7 +3223,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_16_ID)
                     .table(TABLE_3)
                     .ordinalPosition(15)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("seq_nach")
                     .internalName("seq_nach")
@@ -3276,7 +3234,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_17_ID)
                     .table(TABLE_3)
                     .ordinalPosition(16)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("halt_diva_nach")
                     .internalName("halt_diva_nach")
@@ -3288,7 +3245,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_18_ID)
                     .table(TABLE_3)
                     .ordinalPosition(17)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("halt_punkt_diva_nach")
                     .internalName("halt_punkt_diva_nach")
@@ -3300,7 +3256,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_19_ID)
                     .table(TABLE_3)
                     .ordinalPosition(18)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("halt_kurz_nach1")
                     .internalName("halt_kurz_nach1")
@@ -3312,7 +3267,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_20_ID)
                     .table(TABLE_3)
                     .ordinalPosition(19)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.DATE)
                     .name("datum_nach")
                     .internalName("datum_nach")
@@ -3324,7 +3278,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_21_ID)
                     .table(TABLE_3)
                     .ordinalPosition(20)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("soll_an_nach")
                     .internalName("soll_an_nach")
@@ -3336,7 +3289,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_22_ID)
                     .table(TABLE_3)
                     .ordinalPosition(21)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("ist_an_nach1")
                     .internalName("ist_an_nach1")
@@ -3348,7 +3300,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_23_ID)
                     .table(TABLE_3)
                     .ordinalPosition(22)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("soll_ab_nach")
                     .internalName("soll_ab_nach")
@@ -3360,7 +3311,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_24_ID)
                     .table(TABLE_3)
                     .ordinalPosition(23)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("ist_ab_nach")
                     .internalName("ist_ab_nach")
@@ -3372,7 +3322,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_25_ID)
                     .table(TABLE_3)
                     .ordinalPosition(24)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("fahrt_id")
                     .internalName("fahrt_id")
@@ -3384,7 +3333,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_26_ID)
                     .table(TABLE_3)
                     .ordinalPosition(25)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("fahrweg_id")
                     .internalName("fahrweg_id")
@@ -3396,7 +3344,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_27_ID)
                     .table(TABLE_3)
                     .ordinalPosition(26)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("fw_no")
                     .internalName("fw_no")
@@ -3408,7 +3355,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_28_ID)
                     .table(TABLE_3)
                     .ordinalPosition(27)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("fw_typ")
                     .internalName("fw_typ")
@@ -3420,7 +3366,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_29_ID)
                     .table(TABLE_3)
                     .ordinalPosition(28)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("fw_kurz")
                     .internalName("fw_kurz")
@@ -3432,7 +3377,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_30_ID)
                     .table(TABLE_3)
                     .ordinalPosition(29)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("fw_lang")
                     .internalName("fw_lang")
@@ -3444,7 +3388,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_31_ID)
                     .table(TABLE_3)
                     .ordinalPosition(30)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("umlauf_von")
                     .internalName("umlauf_von")
@@ -3456,7 +3399,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_32_ID)
                     .table(TABLE_3)
                     .ordinalPosition(31)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("halt_id_von")
                     .internalName("halt_id_von")
@@ -3468,7 +3410,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_33_ID)
                     .table(TABLE_3)
                     .ordinalPosition(32)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("halt_id_nach")
                     .internalName("halt_id_nach")
@@ -3480,7 +3421,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_34_ID)
                     .table(TABLE_3)
                     .ordinalPosition(33)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("halt_punkt_id_von")
                     .internalName("halt_punkt_id_von")
@@ -3492,7 +3432,6 @@ public abstract class BaseTest {
                     .id(COLUMN_3_35_ID)
                     .table(TABLE_3)
                     .ordinalPosition(34)
-                    .autoGenerated(false)
                     .columnType(TableColumnType.INT)
                     .name("halt_punkt_id_nach")
                     .internalName("halt_punkt_id_nach")
@@ -3506,7 +3445,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(true)
                     .columnType(ColumnTypeDto.BIGINT)
                     .name("id")
                     .internalName("id")
@@ -3519,7 +3457,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("linie")
                     .internalName("linie")
@@ -3532,7 +3469,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("richtung")
                     .internalName("richtung")
@@ -3545,7 +3481,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.DATE)
                     .name("betriebsdatum")
                     .internalName("betriebsdatum")
@@ -3558,7 +3493,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("fahrzeug")
                     .internalName("fahrzeug")
@@ -3571,7 +3505,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("kurs")
                     .internalName("kurs")
@@ -3584,7 +3517,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("seq_von")
                     .internalName("seq_von")
@@ -3597,7 +3529,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("halt_diva_von")
                     .internalName("halt_diva_von")
@@ -3610,7 +3541,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("halt_punkt_diva_von")
                     .internalName("halt_punkt_diva_von")
@@ -3623,7 +3553,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("halt_kurz_von1")
                     .internalName("halt_kurz_von1")
@@ -3636,7 +3565,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.DATE)
                     .name("datum_von")
                     .internalName("datum_von")
@@ -3649,7 +3577,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("soll_an_von")
                     .internalName("soll_an_von")
@@ -3662,7 +3589,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("ist_an_von")
                     .internalName("ist_an_von")
@@ -3675,7 +3601,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("soll_ab_von")
                     .internalName("soll_ab_von")
@@ -3688,7 +3613,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("ist_ab_von")
                     .internalName("ist_ab_von")
@@ -3701,7 +3625,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("seq_nach")
                     .internalName("seq_nach")
@@ -3714,7 +3637,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("halt_diva_nach")
                     .internalName("halt_diva_nach")
@@ -3727,7 +3649,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("halt_punkt_diva_nach")
                     .internalName("halt_punkt_diva_nach")
@@ -3740,7 +3661,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("halt_kurz_nach1")
                     .internalName("halt_kurz_nach1")
@@ -3753,7 +3673,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.DATE)
                     .name("datum_nach")
                     .internalName("datum_nach")
@@ -3766,7 +3685,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("soll_an_nach")
                     .internalName("soll_an_nach")
@@ -3779,7 +3697,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("ist_an_nach1")
                     .internalName("ist_an_nach1")
@@ -3792,7 +3709,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("soll_ab_nach")
                     .internalName("soll_ab_nach")
@@ -3805,7 +3721,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("ist_ab_nach")
                     .internalName("ist_ab_nach")
@@ -3818,7 +3733,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("fahrt_id")
                     .internalName("fahrt_id")
@@ -3831,7 +3745,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("fahrweg_id")
                     .internalName("fahrweg_id")
@@ -3844,7 +3757,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("fw_no")
                     .internalName("fw_no")
@@ -3857,7 +3769,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("fw_typ")
                     .internalName("fw_typ")
@@ -3870,7 +3781,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("fw_kurz")
                     .internalName("fw_kurz")
@@ -3883,7 +3793,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("fw_lang")
                     .internalName("fw_lang")
@@ -3896,7 +3805,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("umlauf_von")
                     .internalName("umlauf_von")
@@ -3909,7 +3817,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("halt_id_von")
                     .internalName("halt_id_von")
@@ -3922,7 +3829,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("halt_id_nach")
                     .internalName("halt_id_nach")
@@ -3935,7 +3841,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("halt_punkt_id_von")
                     .internalName("halt_punkt_id_von")
@@ -3948,7 +3853,6 @@ public abstract class BaseTest {
                     .tableId(TABLE_3_ID)
                     .table(TABLE_3_DTO)
                     .databaseId(DATABASE_1_ID)
-                    .autoGenerated(false)
                     .columnType(ColumnTypeDto.INT)
                     .name("halt_punkt_id_nach")
                     .internalName("halt_punkt_id_nach")
@@ -4014,7 +3918,6 @@ public abstract class BaseTest {
                     .internalName("id")
                     .columnType(TableColumnType.BIGINT)
                     .isNullAllowed(false)
-                    .autoGenerated(true)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_5_2_ID)
@@ -4024,7 +3927,6 @@ public abstract class BaseTest {
                     .internalName("animal_name")
                     .columnType(TableColumnType.VARCHAR)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_5_3_ID)
@@ -4034,7 +3936,6 @@ public abstract class BaseTest {
                     .internalName("hair")
                     .columnType(TableColumnType.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_5_4_ID)
@@ -4044,7 +3945,6 @@ public abstract class BaseTest {
                     .internalName("feathers")
                     .columnType(TableColumnType.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_5_5_ID)
@@ -4054,7 +3954,6 @@ public abstract class BaseTest {
                     .internalName("bread")
                     .columnType(TableColumnType.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_5_6_ID)
@@ -4064,7 +3963,6 @@ public abstract class BaseTest {
                     .internalName("eggs")
                     .columnType(TableColumnType.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_5_7_ID)
@@ -4074,7 +3972,6 @@ public abstract class BaseTest {
                     .internalName("milk")
                     .columnType(TableColumnType.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_5_8_ID)
@@ -4084,7 +3981,6 @@ public abstract class BaseTest {
                     .internalName("water")
                     .columnType(TableColumnType.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_5_9_ID)
@@ -4094,7 +3990,6 @@ public abstract class BaseTest {
                     .internalName("airborne")
                     .columnType(TableColumnType.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_5_10_ID)
@@ -4104,7 +3999,6 @@ public abstract class BaseTest {
                     .internalName("waterborne")
                     .columnType(TableColumnType.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_5_11_ID)
@@ -4114,7 +4008,6 @@ public abstract class BaseTest {
                     .internalName("aquantic")
                     .columnType(TableColumnType.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_5_12_ID)
@@ -4124,7 +4017,6 @@ public abstract class BaseTest {
                     .internalName("predator")
                     .columnType(TableColumnType.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_5_13_ID)
@@ -4134,7 +4026,6 @@ public abstract class BaseTest {
                     .internalName("backbone")
                     .columnType(TableColumnType.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_5_14_ID)
@@ -4144,7 +4035,6 @@ public abstract class BaseTest {
                     .internalName("breathes")
                     .columnType(TableColumnType.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_5_15_ID)
@@ -4154,7 +4044,6 @@ public abstract class BaseTest {
                     .internalName("venomous")
                     .columnType(TableColumnType.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_5_16_ID)
@@ -4164,7 +4053,6 @@ public abstract class BaseTest {
                     .internalName("fin")
                     .columnType(TableColumnType.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_5_17_ID)
@@ -4174,7 +4062,6 @@ public abstract class BaseTest {
                     .internalName("legs")
                     .columnType(TableColumnType.INT)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_5_18_ID)
@@ -4184,7 +4071,6 @@ public abstract class BaseTest {
                     .internalName("tail")
                     .columnType(TableColumnType.DECIMAL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_5_19_ID)
@@ -4194,7 +4080,6 @@ public abstract class BaseTest {
                     .internalName("domestic")
                     .columnType(TableColumnType.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_5_20_ID)
@@ -4204,7 +4089,6 @@ public abstract class BaseTest {
                     .internalName("catsize")
                     .columnType(TableColumnType.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_5_21_ID)
@@ -4214,7 +4098,6 @@ public abstract class BaseTest {
                     .internalName("class_type")
                     .columnType(TableColumnType.DECIMAL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build());
 
     public final static List<ColumnDto> TABLE_5_COLUMNS_DTO = List.of(ColumnDto.builder()
@@ -4226,7 +4109,6 @@ public abstract class BaseTest {
                     .internalName("id")
                     .columnType(ColumnTypeDto.BIGINT)
                     .isNullAllowed(false)
-                    .autoGenerated(true)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_5_2_ID)
@@ -4237,7 +4119,6 @@ public abstract class BaseTest {
                     .internalName("animal_name")
                     .columnType(ColumnTypeDto.VARCHAR)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_5_3_ID)
@@ -4248,7 +4129,6 @@ public abstract class BaseTest {
                     .internalName("hair")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_5_4_ID)
@@ -4259,7 +4139,6 @@ public abstract class BaseTest {
                     .internalName("feathers")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_5_5_ID)
@@ -4270,7 +4149,6 @@ public abstract class BaseTest {
                     .internalName("bread")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_5_6_ID)
@@ -4281,7 +4159,6 @@ public abstract class BaseTest {
                     .internalName("eggs")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_5_7_ID)
@@ -4292,7 +4169,6 @@ public abstract class BaseTest {
                     .internalName("milk")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_5_8_ID)
@@ -4303,7 +4179,6 @@ public abstract class BaseTest {
                     .internalName("water")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_5_9_ID)
@@ -4314,7 +4189,6 @@ public abstract class BaseTest {
                     .internalName("airborne")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_5_10_ID)
@@ -4325,7 +4199,6 @@ public abstract class BaseTest {
                     .internalName("waterborne")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_5_11_ID)
@@ -4336,7 +4209,6 @@ public abstract class BaseTest {
                     .internalName("aquantic")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_5_12_ID)
@@ -4347,7 +4219,6 @@ public abstract class BaseTest {
                     .internalName("predator")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_5_13_ID)
@@ -4358,7 +4229,6 @@ public abstract class BaseTest {
                     .internalName("backbone")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_5_14_ID)
@@ -4369,7 +4239,6 @@ public abstract class BaseTest {
                     .internalName("breathes")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_5_15_ID)
@@ -4380,7 +4249,6 @@ public abstract class BaseTest {
                     .internalName("venomous")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_5_16_ID)
@@ -4391,7 +4259,6 @@ public abstract class BaseTest {
                     .internalName("fin")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_5_17_ID)
@@ -4402,7 +4269,6 @@ public abstract class BaseTest {
                     .internalName("legs")
                     .columnType(ColumnTypeDto.INT)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_5_18_ID)
@@ -4413,7 +4279,6 @@ public abstract class BaseTest {
                     .internalName("tail")
                     .columnType(ColumnTypeDto.DECIMAL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_5_19_ID)
@@ -4424,7 +4289,6 @@ public abstract class BaseTest {
                     .internalName("domestic")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_5_20_ID)
@@ -4435,7 +4299,6 @@ public abstract class BaseTest {
                     .internalName("catsize")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_5_21_ID)
@@ -4446,7 +4309,6 @@ public abstract class BaseTest {
                     .internalName("class_type")
                     .columnType(ColumnTypeDto.DECIMAL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build());
 
     public final static List<ForeignKeyCreateDto> TABLE_5_FOREIGN_KEYS_INVALID_CREATE = List.of(ForeignKeyCreateDto.builder()
@@ -4594,7 +4456,6 @@ public abstract class BaseTest {
                     .internalName("id")
                     .columnType(TableColumnType.BIGINT)
                     .isNullAllowed(false)
-                    .autoGenerated(true)
                     .build(),
             TableColumn.builder()
                     .id(68L)
@@ -4604,7 +4465,6 @@ public abstract class BaseTest {
                     .internalName("firstname")
                     .columnType(TableColumnType.VARCHAR)
                     .isNullAllowed(false)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(69L)
@@ -4614,7 +4474,6 @@ public abstract class BaseTest {
                     .internalName("lastname")
                     .columnType(TableColumnType.VARCHAR)
                     .isNullAllowed(false)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(70L)
@@ -4624,7 +4483,6 @@ public abstract class BaseTest {
                     .internalName("birth")
                     .columnType(TableColumnType.YEAR)
                     .isNullAllowed(false)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(71L)
@@ -4634,7 +4492,6 @@ public abstract class BaseTest {
                     .internalName("reminder")
                     .columnType(TableColumnType.TIME)
                     .isNullAllowed(false)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(72L)
@@ -4644,7 +4501,6 @@ public abstract class BaseTest {
                     .internalName("ref_id")
                     .columnType(TableColumnType.BIGINT)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build());
 
     public final static ColumnBriefDto TABLE_6_COLUMNS_BRIEF_0_DTO = ColumnBriefDto.builder()
@@ -4663,7 +4519,6 @@ public abstract class BaseTest {
                     .internalName("id")
                     .columnType(ColumnTypeDto.BIGINT)
                     .isNullAllowed(false)
-                    .autoGenerated(true)
                     .build(),
             ColumnDto.builder()
                     .id(68L)
@@ -4674,7 +4529,6 @@ public abstract class BaseTest {
                     .internalName("firstname")
                     .columnType(ColumnTypeDto.VARCHAR)
                     .isNullAllowed(false)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(69L)
@@ -4685,7 +4539,6 @@ public abstract class BaseTest {
                     .internalName("lastname")
                     .columnType(ColumnTypeDto.VARCHAR)
                     .isNullAllowed(false)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(70L)
@@ -4696,7 +4549,6 @@ public abstract class BaseTest {
                     .internalName("birth")
                     .columnType(ColumnTypeDto.YEAR)
                     .isNullAllowed(false)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(71L)
@@ -4707,7 +4559,6 @@ public abstract class BaseTest {
                     .internalName("reminder")
                     .columnType(ColumnTypeDto.TIME)
                     .isNullAllowed(false)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(72L)
@@ -4718,7 +4569,6 @@ public abstract class BaseTest {
                     .internalName("ref_id")
                     .columnType(ColumnTypeDto.BIGINT)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build());
 
     public final static List<List<String>> TABLE_6_UNIQUES_CREATE = List.of(
@@ -4785,7 +4635,6 @@ public abstract class BaseTest {
                     .internalName("name_id")
                     .columnType(TableColumnType.BIGINT)
                     .isNullAllowed(false)
-                    .autoGenerated(false)
                     .build(),
             TableColumn.builder()
                     .id(COLUMN_7_2_ID)
@@ -4795,7 +4644,6 @@ public abstract class BaseTest {
                     .internalName("zoo_id")
                     .columnType(TableColumnType.BIGINT)
                     .isNullAllowed(false)
-                    .autoGenerated(false)
                     .build());
 
     public final static List<ColumnDto> TABLE_7_COLUMNS_DTO = List.of(ColumnDto.builder()
@@ -4807,7 +4655,6 @@ public abstract class BaseTest {
                     .internalName("name_id")
                     .columnType(ColumnTypeDto.BIGINT)
                     .isNullAllowed(false)
-                    .autoGenerated(false)
                     .build(),
             ColumnDto.builder()
                     .id(COLUMN_7_2_ID)
@@ -4818,7 +4665,6 @@ public abstract class BaseTest {
                     .internalName("zoo_id")
                     .columnType(ColumnTypeDto.BIGINT)
                     .isNullAllowed(false)
-                    .autoGenerated(false)
                     .build());
 
     public final static Long VIEW_1_ID = 1L;
@@ -4841,7 +4687,6 @@ public abstract class BaseTest {
                     .columnType(ColumnTypeDto.VARCHAR)
                     .size(255L)
                     .isNullAllowed(false)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(2L)
@@ -4852,7 +4697,6 @@ public abstract class BaseTest {
                     .columnType(ColumnTypeDto.DOUBLE)
                     .size(22L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(3L)
@@ -4863,7 +4707,6 @@ public abstract class BaseTest {
                     .columnType(ColumnTypeDto.DOUBLE)
                     .size(22L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build());
 
     public final static View VIEW_1 = View.builder()
@@ -4915,7 +4758,6 @@ public abstract class BaseTest {
                     .columnType(TableColumnType.VARCHAR)
                     .size(255L)
                     .isNullAllowed(false)
-                    .autoGenerated(false)
                     .view(VIEW_1)
                     .build(),
             ViewColumn.builder()
@@ -4928,7 +4770,6 @@ public abstract class BaseTest {
                     .size(10L)
                     .d(0L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .view(VIEW_1)
                     .build(),
             ViewColumn.builder()
@@ -4941,7 +4782,6 @@ public abstract class BaseTest {
                     .size(10L)
                     .d(0L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .view(VIEW_1)
                     .build()
     );
@@ -5010,18 +4850,15 @@ public abstract class BaseTest {
                     .ordinalPosition(1)
                     .columnType(ColumnTypeDto.DATE)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(5L)
                     .name("loc")
                     .internalName("loc")
-                    .alias("loc")
                     .ordinalPosition(2)
                     .columnType(ColumnTypeDto.VARCHAR)
                     .size(255L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(6L)
@@ -5032,7 +4869,6 @@ public abstract class BaseTest {
                     .size(10L)
                     .d(0L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(7L)
@@ -5043,7 +4879,6 @@ public abstract class BaseTest {
                     .size(10L)
                     .d(0L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build()
     );
 
@@ -5068,18 +4903,16 @@ public abstract class BaseTest {
                     .internalName("date")
                     .columnType(TableColumnType.DATE)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .view(VIEW_2)
                     .build(),
             ViewColumn.builder()
                     .id(5L)
                     .ordinalPosition(1)
-                    .name("Location")
-                    .internalName("location")
+                    .name("loc")
+                    .internalName("loc")
                     .columnType(TableColumnType.VARCHAR)
                     .size(255L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .view(VIEW_2)
                     .build(),
             ViewColumn.builder()
@@ -5091,7 +4924,6 @@ public abstract class BaseTest {
                     .size(10L)
                     .d(0L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .view(VIEW_2)
                     .build(),
             ViewColumn.builder()
@@ -5103,7 +4935,6 @@ public abstract class BaseTest {
                     .size(10L)
                     .d(0L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .view(VIEW_2)
                     .build()
     );
@@ -5169,7 +5000,6 @@ public abstract class BaseTest {
                     .size(10L)
                     .d(0L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(9L)
@@ -5182,7 +5012,6 @@ public abstract class BaseTest {
                     .concept(CONCEPT_1_DTO)
                     .unit(UNIT_1_DTO)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(10L)
@@ -5192,7 +5021,6 @@ public abstract class BaseTest {
                     .columnType(ColumnTypeDto.VARCHAR)
                     .size(255L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(11L)
@@ -5201,7 +5029,6 @@ public abstract class BaseTest {
                     .ordinalPosition(3)
                     .columnType(ColumnTypeDto.DATE)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build()
     );
 
@@ -5242,7 +5069,6 @@ public abstract class BaseTest {
                     .size(10L)
                     .d(0L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .view(VIEW_3)
                     .build(),
             ViewColumn.builder()
@@ -5254,7 +5080,6 @@ public abstract class BaseTest {
                     .size(10L)
                     .d(0L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .view(VIEW_3)
                     .build(),
             ViewColumn.builder()
@@ -5265,7 +5090,6 @@ public abstract class BaseTest {
                     .columnType(TableColumnType.VARCHAR)
                     .size(255L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .view(VIEW_3)
                     .build(),
             ViewColumn.builder()
@@ -5275,7 +5099,6 @@ public abstract class BaseTest {
                     .internalName("date")
                     .columnType(TableColumnType.DATE)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .view(VIEW_3)
                     .build()
     );
@@ -5325,7 +5148,6 @@ public abstract class BaseTest {
                     .internalName("animal_name")
                     .columnType(ColumnTypeDto.VARCHAR)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(13L)
@@ -5334,7 +5156,6 @@ public abstract class BaseTest {
                     .internalName("hair")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(14L)
@@ -5343,7 +5164,6 @@ public abstract class BaseTest {
                     .internalName("feathers")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(15L)
@@ -5352,7 +5172,6 @@ public abstract class BaseTest {
                     .internalName("eggs")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(16L)
@@ -5361,7 +5180,6 @@ public abstract class BaseTest {
                     .internalName("milk")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(17L)
@@ -5370,7 +5188,6 @@ public abstract class BaseTest {
                     .internalName("airborne")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(18L)
@@ -5379,7 +5196,6 @@ public abstract class BaseTest {
                     .internalName("aquantic")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(19L)
@@ -5388,7 +5204,6 @@ public abstract class BaseTest {
                     .internalName("predator")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(20L)
@@ -5397,7 +5212,6 @@ public abstract class BaseTest {
                     .internalName("backbone")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(21L)
@@ -5406,7 +5220,6 @@ public abstract class BaseTest {
                     .internalName("breathes")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(22L)
@@ -5415,7 +5228,6 @@ public abstract class BaseTest {
                     .internalName("venomous")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(23L)
@@ -5424,7 +5236,6 @@ public abstract class BaseTest {
                     .internalName("fin")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(24L)
@@ -5433,7 +5244,6 @@ public abstract class BaseTest {
                     .internalName("legs")
                     .columnType(ColumnTypeDto.INT)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(25L)
@@ -5442,7 +5252,6 @@ public abstract class BaseTest {
                     .internalName("tail")
                     .columnType(ColumnTypeDto.DECIMAL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(26L)
@@ -5451,7 +5260,6 @@ public abstract class BaseTest {
                     .internalName("domestic")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(27L)
@@ -5460,7 +5268,6 @@ public abstract class BaseTest {
                     .internalName("catsize")
                     .columnType(ColumnTypeDto.BOOL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build(),
             ViewColumnDto.builder()
                     .id(28L)
@@ -5469,7 +5276,6 @@ public abstract class BaseTest {
                     .internalName("class_type")
                     .columnType(ColumnTypeDto.DECIMAL)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .build());
 
     public final static View VIEW_4 = View.builder()
@@ -5498,6 +5304,161 @@ public abstract class BaseTest {
             .columns(VIEW_4_COLUMNS_DTO)
             .build();
 
+    public final static List<ViewColumn> VIEW_4_COLUMNS = List.of(
+            ViewColumn.builder()
+                    .id(12L)
+                    .ordinalPosition(0)
+                    .name("Animal Name")
+                    .internalName("animal_name")
+                    .columnType(TableColumnType.VARCHAR)
+                    .isNullAllowed(true)
+                    .view(VIEW_4)
+                    .build(),
+            ViewColumn.builder()
+                    .id(13L)
+                    .ordinalPosition(1)
+                    .name("Hair")
+                    .internalName("hair")
+                    .columnType(TableColumnType.BOOL)
+                    .isNullAllowed(true)
+                    .view(VIEW_4)
+                    .build(),
+            ViewColumn.builder()
+                    .id(14L)
+                    .ordinalPosition(2)
+                    .name("Feathers")
+                    .internalName("feathers")
+                    .columnType(TableColumnType.BOOL)
+                    .isNullAllowed(true)
+                    .view(VIEW_4)
+                    .build(),
+            ViewColumn.builder()
+                    .id(15L)
+                    .ordinalPosition(3)
+                    .name("Eggs")
+                    .internalName("eggs")
+                    .columnType(TableColumnType.BOOL)
+                    .isNullAllowed(true)
+                    .view(VIEW_4)
+                    .build(),
+            ViewColumn.builder()
+                    .id(16L)
+                    .ordinalPosition(4)
+                    .name("Milk")
+                    .internalName("milk")
+                    .columnType(TableColumnType.BOOL)
+                    .isNullAllowed(true)
+                    .view(VIEW_4)
+                    .build(),
+            ViewColumn.builder()
+                    .id(17L)
+                    .ordinalPosition(5)
+                    .name("Airborne")
+                    .internalName("airborne")
+                    .columnType(TableColumnType.BOOL)
+                    .isNullAllowed(true)
+                    .view(VIEW_4)
+                    .build(),
+            ViewColumn.builder()
+                    .id(18L)
+                    .ordinalPosition(6)
+                    .name("Aquantic")
+                    .internalName("aquantic")
+                    .columnType(TableColumnType.BOOL)
+                    .isNullAllowed(true)
+                    .view(VIEW_4)
+                    .build(),
+            ViewColumn.builder()
+                    .id(19L)
+                    .ordinalPosition(7)
+                    .name("Predator")
+                    .internalName("predator")
+                    .columnType(TableColumnType.BOOL)
+                    .isNullAllowed(true)
+                    .view(VIEW_4)
+                    .build(),
+            ViewColumn.builder()
+                    .id(20L)
+                    .ordinalPosition(8)
+                    .name("Backbone")
+                    .internalName("backbone")
+                    .columnType(TableColumnType.BOOL)
+                    .isNullAllowed(true)
+                    .view(VIEW_4)
+                    .build(),
+            ViewColumn.builder()
+                    .id(21L)
+                    .ordinalPosition(9)
+                    .name("Breathes")
+                    .internalName("breathes")
+                    .columnType(TableColumnType.BOOL)
+                    .isNullAllowed(true)
+                    .view(VIEW_4)
+                    .build(),
+            ViewColumn.builder()
+                    .id(22L)
+                    .ordinalPosition(10)
+                    .name("Venomous")
+                    .internalName("venomous")
+                    .columnType(TableColumnType.BOOL)
+                    .isNullAllowed(true)
+                    .view(VIEW_4)
+                    .build(),
+            ViewColumn.builder()
+                    .id(23L)
+                    .ordinalPosition(11)
+                    .name("Fin")
+                    .internalName("fin")
+                    .columnType(TableColumnType.BOOL)
+                    .isNullAllowed(true)
+                    .view(VIEW_4)
+                    .build(),
+            ViewColumn.builder()
+                    .id(24L)
+                    .ordinalPosition(12)
+                    .name("Legs")
+                    .internalName("legs")
+                    .columnType(TableColumnType.INT)
+                    .isNullAllowed(true)
+                    .view(VIEW_4)
+                    .build(),
+            ViewColumn.builder()
+                    .id(25L)
+                    .ordinalPosition(13)
+                    .name("Tail")
+                    .internalName("tail")
+                    .columnType(TableColumnType.DECIMAL)
+                    .isNullAllowed(true)
+                    .view(VIEW_4)
+                    .build(),
+            ViewColumn.builder()
+                    .id(26L)
+                    .ordinalPosition(14)
+                    .name("Domestic")
+                    .internalName("domestic")
+                    .columnType(TableColumnType.BOOL)
+                    .isNullAllowed(true)
+                    .view(VIEW_4)
+                    .build(),
+            ViewColumn.builder()
+                    .id(27L)
+                    .ordinalPosition(15)
+                    .name("Catsize")
+                    .internalName("catsize")
+                    .columnType(TableColumnType.BOOL)
+                    .isNullAllowed(true)
+                    .view(VIEW_4)
+                    .build(),
+            ViewColumn.builder()
+                    .id(28L)
+                    .ordinalPosition(16)
+                    .name("Class Type")
+                    .internalName("class_type")
+                    .columnType(TableColumnType.DECIMAL)
+                    .isNullAllowed(true)
+                    .view(VIEW_4)
+                    .build());
+
     public final static Long VIEW_5_ID = 5L;
     public final static Boolean VIEW_5_INITIAL_VIEW = false;
     public final static String VIEW_5_NAME = "Mock View";
@@ -5544,7 +5505,6 @@ public abstract class BaseTest {
                     .columnType(TableColumnType.VARCHAR)
                     .size(255L)
                     .isNullAllowed(false)
-                    .autoGenerated(false)
                     .view(VIEW_5)
                     .build(),
             ViewColumn.builder()
@@ -5557,7 +5517,6 @@ public abstract class BaseTest {
                     .size(10L)
                     .d(0L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .view(VIEW_5)
                     .build(),
             ViewColumn.builder()
@@ -5570,7 +5529,6 @@ public abstract class BaseTest {
                     .size(10L)
                     .d(0L)
                     .isNullAllowed(true)
-                    .autoGenerated(false)
                     .view(VIEW_5)
                     .build());
 
diff --git a/dbrepo-metric-db/prometheus.yml b/dbrepo-metric-db/prometheus.yml
index a3c1945b64e9b458ecab4c8a41d308f4e36e114b..10df4f18959a0630be82ec22e44adbf13818f7bf 100644
--- a/dbrepo-metric-db/prometheus.yml
+++ b/dbrepo-metric-db/prometheus.yml
@@ -17,3 +17,7 @@ scrape_configs:
     metrics_path: '/metrics'
     static_configs:
       - targets: ['ui:3000', 'auth-service:9000', 'analyse-service:8080', 'search-service:8080', 'data-db-sidecar:8080', 'broker-service:15692', 'storage-service:9090', 'upload-service:8080', 'dashboard-service:3000']
+#  - job_name: 'gateway scrape'
+#    metrics_path: '/metrics'
+#    static_configs:
+#      - targets: ['dbrepo-gateway-service-sidecar:9113']
diff --git a/dbrepo-search-service/.gitignore b/dbrepo-search-service/.gitignore
index 4acceedc9aee48621f76f34ea6dc297dae6065f4..12a8c1aa3630c30664b9b59f3cdff7b765275f04 100644
--- a/dbrepo-search-service/.gitignore
+++ b/dbrepo-search-service/.gitignore
@@ -10,6 +10,12 @@ __pycache__/
 coverage.txt
 report.xml
 
+# Libraries
+./lib/dbrepo-1.4.4*
+./lib/dbrepo-1.4.5*
+./lib/dbrepo-1.4.6*
+./lib/dbrepo-1.4.7rc*
+
 # Distribution / packaging
 .Python
 build/
diff --git a/dbrepo-search-service/Pipfile.lock b/dbrepo-search-service/Pipfile.lock
index 44765766be1713fe518d7600e63ad9b61776f853..0a4189531723dec8e470e010bfd71551ad0ca5ba 100644
--- a/dbrepo-search-service/Pipfile.lock
+++ b/dbrepo-search-service/Pipfile.lock
@@ -388,7 +388,7 @@
         },
         "dbrepo": {
             "hashes": [
-                "sha256:5aa92850231c25a57ffa58395e0f6bbda2818b1f0d4edd83f51fd8143d909451"
+                "sha256:654d487f1c0fd99b4978f5756aec4046f3e6019aeb225ecdd449768795f6e7e0"
             ],
             "markers": "python_version >= '3.11'",
             "path": "./lib/dbrepo-1.4.7.tar.gz"
@@ -1690,101 +1690,107 @@
         },
         "yarl": {
             "hashes": [
-                "sha256:047b258e00b99091b6f90355521f026238c63bd76dcf996d93527bb13320eefd",
-                "sha256:06ff23462398333c78b6f4f8d3d70410d657a471c2c5bbe6086133be43fc8f1a",
-                "sha256:07f9eaf57719d6721ab15805d85f4b01a5b509a0868d7320134371bcb652152d",
-                "sha256:0aa92e3e30a04f9462a25077db689c4ac5ea9ab6cc68a2e563881b987d42f16d",
-                "sha256:0cf21f46a15d445417de8fc89f2568852cf57fe8ca1ab3d19ddb24d45c0383ae",
-                "sha256:0fd7b941dd1b00b5f0acb97455fea2c4b7aac2dd31ea43fb9d155e9bc7b78664",
-                "sha256:147e36331f6f63e08a14640acf12369e041e0751bb70d9362df68c2d9dcf0c87",
-                "sha256:16a682a127930f3fc4e42583becca6049e1d7214bcad23520c590edd741d2114",
-                "sha256:176110bff341b6730f64a1eb3a7070e12b373cf1c910a9337e7c3240497db76f",
-                "sha256:19268b4fec1d7760134f2de46ef2608c2920134fb1fa61e451f679e41356dc55",
-                "sha256:1b16f6c75cffc2dc0616ea295abb0e1967601bd1fb1e0af6a1de1c6c887f3439",
-                "sha256:1bfc25aa6a7c99cf86564210f79a0b7d4484159c67e01232b116e445b3036547",
-                "sha256:1ca3894e9e9f72da93544f64988d9c052254a338a9f855165f37f51edb6591de",
-                "sha256:1dda53508df0de87b6e6b0a52d6718ff6c62a5aca8f5552748404963df639269",
-                "sha256:217a782020b875538eebf3948fac3a7f9bbbd0fd9bf8538f7c2ad7489e80f4e8",
-                "sha256:2192f718db4a8509f63dd6d950f143279211fa7e6a2c612edc17d85bf043d36e",
-                "sha256:29a84a46ec3ebae7a1c024c055612b11e9363a8a23238b3e905552d77a2bc51b",
-                "sha256:3007a5b75cb50140708420fe688c393e71139324df599434633019314ceb8b59",
-                "sha256:30600ba5db60f7c0820ef38a2568bb7379e1418ecc947a0f76fd8b2ff4257a97",
-                "sha256:337912bcdcf193ade64b9aae5a4017a0a1950caf8ca140362e361543c6773f21",
-                "sha256:37001e5d4621cef710c8dc1429ca04e189e572f128ab12312eab4e04cf007132",
-                "sha256:3d569f877ed9a708e4c71a2d13d2940cb0791da309f70bd970ac1a5c088a0a92",
-                "sha256:4009def9be3a7e5175db20aa2d7307ecd00bbf50f7f0f989300710eee1d0b0b9",
-                "sha256:46a9772a1efa93f9cd170ad33101c1817c77e0e9914d4fe33e2da299d7cf0f9b",
-                "sha256:47eede5d11d669ab3759b63afb70d28d5328c14744b8edba3323e27dc52d298d",
-                "sha256:498b3c55087b9d762636bca9b45f60d37e51d24341786dc01b81253f9552a607",
-                "sha256:4e0d45ebf975634468682c8bec021618b3ad52c37619e5c938f8f831fa1ac5c0",
-                "sha256:4f24f08b6c9b9818fd80612c97857d28f9779f0d1211653ece9844fc7b414df2",
-                "sha256:55c144d363ad4626ca744556c049c94e2b95096041ac87098bb363dcc8635e8d",
-                "sha256:582cedde49603f139be572252a318b30dc41039bc0b8165f070f279e5d12187f",
-                "sha256:587c3cc59bc148a9b1c07a019346eda2549bc9f468acd2f9824d185749acf0a6",
-                "sha256:5cd5dad8366e0168e0fd23d10705a603790484a6dbb9eb272b33673b8f2cce72",
-                "sha256:5d02d700705d67e09e1f57681f758f0b9d4412eeb70b2eb8d96ca6200b486db3",
-                "sha256:625f207b1799e95e7c823f42f473c1e9dbfb6192bd56bba8695656d92be4535f",
-                "sha256:659603d26d40dd4463200df9bfbc339fbfaed3fe32e5c432fe1dc2b5d4aa94b4",
-                "sha256:689a99a42ee4583fcb0d3a67a0204664aa1539684aed72bdafcbd505197a91c4",
-                "sha256:68ac1a09392ed6e3fd14be880d39b951d7b981fd135416db7d18a6208c536561",
-                "sha256:6a615cad11ec3428020fb3c5a88d85ce1b5c69fd66e9fcb91a7daa5e855325dd",
-                "sha256:73bedd2be05f48af19f0f2e9e1353921ce0c83f4a1c9e8556ecdcf1f1eae4892",
-                "sha256:742aef0a99844faaac200564ea6f5e08facb285d37ea18bd1a5acf2771f3255a",
-                "sha256:75ff4c819757f9bdb35de049a509814d6ce851fe26f06eb95a392a5640052482",
-                "sha256:781e2495e408a81e4eaeedeb41ba32b63b1980dddf8b60dbbeff6036bcd35049",
-                "sha256:7a9f917966d27f7ce30039fe8d900f913c5304134096554fd9bea0774bcda6d1",
-                "sha256:7e2637d75e92763d1322cb5041573279ec43a80c0f7fbbd2d64f5aee98447b17",
-                "sha256:8089d4634d8fa2b1806ce44fefa4979b1ab2c12c0bc7ef3dfa45c8a374811348",
-                "sha256:816d24f584edefcc5ca63428f0b38fee00b39fe64e3c5e558f895a18983efe96",
-                "sha256:8385ab36bf812e9d37cf7613999a87715f27ef67a53f0687d28c44b819df7cb0",
-                "sha256:85cb3e40eaa98489f1e2e8b29f5ad02ee1ee40d6ce6b88d50cf0f205de1d9d2c",
-                "sha256:8648180b34faaea4aa5b5ca7e871d9eb1277033fa439693855cf0ea9195f85f1",
-                "sha256:8892fa575ac9b1b25fae7b221bc4792a273877b9b56a99ee2d8d03eeb3dbb1d2",
-                "sha256:88c7d9d58aab0724b979ab5617330acb1c7030b79379c8138c1c8c94e121d1b3",
-                "sha256:8a2f8fb7f944bcdfecd4e8d855f84c703804a594da5123dd206f75036e536d4d",
-                "sha256:8f4e475f29a9122f908d0f1f706e1f2fc3656536ffd21014ff8a6f2e1b14d1d8",
-                "sha256:8f50eb3837012a937a2b649ec872b66ba9541ad9d6f103ddcafb8231cfcafd22",
-                "sha256:91d875f75fabf76b3018c5f196bf3d308ed2b49ddcb46c1576d6b075754a1393",
-                "sha256:94b2bb9bcfd5be9d27004ea4398fb640373dd0c1a9e219084f42c08f77a720ab",
-                "sha256:9557c9322aaa33174d285b0c1961fb32499d65ad1866155b7845edc876c3c835",
-                "sha256:95e16e9eaa2d7f5d87421b8fe694dd71606aa61d74b824c8d17fc85cc51983d1",
-                "sha256:96952f642ac69075e44c7d0284528938fdff39422a1d90d3e45ce40b72e5e2d9",
-                "sha256:985623575e5c4ea763056ffe0e2d63836f771a8c294b3de06d09480538316b13",
-                "sha256:99ff3744f5fe48288be6bc402533b38e89749623a43208e1d57091fc96b783b9",
-                "sha256:9abe80ae2c9d37c17599557b712e6515f4100a80efb2cda15f5f070306477cd2",
-                "sha256:a152751af7ef7b5d5fa6d215756e508dd05eb07d0cf2ba51f3e740076aa74373",
-                "sha256:a2e4725a08cb2b4794db09e350c86dee18202bb8286527210e13a1514dc9a59a",
-                "sha256:a56fbe3d7f3bce1d060ea18d2413a2ca9ca814eea7cedc4d247b5f338d54844e",
-                "sha256:ab3abc0b78a5dfaa4795a6afbe7b282b6aa88d81cf8c1bb5e394993d7cae3457",
-                "sha256:b03384eed107dbeb5f625a99dc3a7de8be04fc8480c9ad42fccbc73434170b20",
-                "sha256:b0547ab1e9345dc468cac8368d88ea4c5bd473ebc1d8d755347d7401982b5dd8",
-                "sha256:b4c1ecba93e7826dc71ddba75fb7740cdb52e7bd0be9f03136b83f54e6a1f511",
-                "sha256:b693c63e7e64b524f54aa4888403c680342d1ad0d97be1707c531584d6aeeb4f",
-                "sha256:b6d0147574ce2e7b812c989e50fa72bbc5338045411a836bd066ce5fc8ac0bce",
-                "sha256:b9cfef3f14f75bf6aba73a76caf61f9d00865912a04a4393c468a7ce0981b519",
-                "sha256:b9f805e37ed16cc212fdc538a608422d7517e7faf539bedea4fe69425bc55d76",
-                "sha256:bab03192091681d54e8225c53f270b0517637915d9297028409a2a5114ff4634",
-                "sha256:bc24f968b82455f336b79bf37dbb243b7d76cd40897489888d663d4e028f5069",
-                "sha256:c14b504a74e58e2deb0378b3eca10f3d076635c100f45b113c18c770b4a47a50",
-                "sha256:c2089a9afef887664115f7fa6d3c0edd6454adaca5488dba836ca91f60401075",
-                "sha256:c8ed4034f0765f8861620c1f2f2364d2e58520ea288497084dae880424fc0d9f",
-                "sha256:cd2660c01367eb3ef081b8fa0a5da7fe767f9427aa82023a961a5f28f0d4af6c",
-                "sha256:d8361c7d04e6a264481f0b802e395f647cd3f8bbe27acfa7c12049efea675bd1",
-                "sha256:d9baec588f015d0ee564057aa7574313c53a530662ffad930b7886becc85abdf",
-                "sha256:dbd9ff43a04f8ffe8a959a944c2dca10d22f5f99fc6a459f49c3ebfb409309d9",
-                "sha256:e3f8bfc1db82589ef965ed234b87de30d140db8b6dc50ada9e33951ccd8ec07a",
-                "sha256:e6a2c5c5bb2556dfbfffffc2bcfb9c235fd2b566d5006dfb2a37afc7e3278a07",
-                "sha256:e749af6c912a7bb441d105c50c1a3da720474e8acb91c89350080dd600228f0e",
-                "sha256:e85d86527baebb41a214cc3b45c17177177d900a2ad5783dbe6f291642d4906f",
-                "sha256:ee2c68e4f2dd1b1c15b849ba1c96fac105fca6ffdb7c1e8be51da6fabbdeafb9",
-                "sha256:f3ab950f8814f3b7b5e3eebc117986f817ec933676f68f0a6c5b2137dd7c9c69",
-                "sha256:f4f4547944d4f5cfcdc03f3f097d6f05bbbc915eaaf80a2ee120d0e756de377d",
-                "sha256:f72a0d746d38cb299b79ce3d4d60ba0892c84bbc905d0d49c13df5bace1b65f8",
-                "sha256:fc2c80bc87fba076e6cbb926216c27fba274dae7100a7b9a0983b53132dd99f2",
-                "sha256:fe4d2536c827f508348d7b40c08767e8c7071614250927233bf0c92170451c0a"
+                "sha256:0127bc2ea72c1eaae6808ace661f0edf222f32ffa987d37f2dbb4798288f2656",
+                "sha256:0358b697abdf1f2d68038bd02ef8ddcc4813835744f79c755f8743aa485585e7",
+                "sha256:06306c74f0775621a70fa5acd292119bbb6961d1f9a5f3d657a4c8c15b86f7b9",
+                "sha256:06b5b462cadf59c1df28ffbb0a3971fa16b60cf0c9d59a38bf5679a986d18685",
+                "sha256:097094a979af7b31520517c59179f6817b8426724343cecbec0eb3af1f8fb6cf",
+                "sha256:0c791a2d42da20ac568e5c0cc9b8af313188becd203a936ad959b578dafbcebb",
+                "sha256:1656a8b531a96427f26f498b7d0f19931166ff30e4344eca99bdb27faca14fc5",
+                "sha256:18614630533ac37ec373bd8035aec8fa4dd9aedac641209c06de7e082622ff77",
+                "sha256:1e5fa4c4e55cdacef1844f609bc9a02c8cd29c324a71ca1d3ee454701d4bb496",
+                "sha256:1edaf4171fc1582352ac5d9b2783966fa0f4ff86187279ef2a491613d23b894a",
+                "sha256:2124c642b8cc9b68e5981e429842dadc32bb850b010cccec9d24236253a19f60",
+                "sha256:229f222bb47cd7ab225648efd1ae47fe6943f18e4c91bce66471faf09fe33128",
+                "sha256:2429a651a2191c3fb8c9de21546c6046da539034d51dcb8df52302748004593d",
+                "sha256:25a4e29ee758596b2a0daffa4814714e9b464077ca862baf78ed0e8698e46b61",
+                "sha256:27c323b28723faed046f906c70466144c4dd12046a0128a301b29a65cfeff758",
+                "sha256:2add8ed2acf42398dfaa7dffd32e4d18ffbae341d62c8d4765bd9929336379b5",
+                "sha256:2bece7fdc13e23db005879b67190db0d397f6ba89c81dc7e3c77e9f5819aff7f",
+                "sha256:2eafb4e92f72a3b6c27f1d5e921d046e2728850af8887f86857c3fe868a5b5c0",
+                "sha256:32840ff92c713053591ff0e66845d4e9f4bea8fd5fba3da00f8d92e77722f24e",
+                "sha256:33896afca6fb4e1988c099534c52823870dfc8730bc6f96a3831f24c1e0ab814",
+                "sha256:350b468a217d433cbb4482e9414a14dfd360a3d5ab92013175925abb234364cc",
+                "sha256:38cab8f91b1085f1fd0765d40c46c8f43282f109018d5fcd017c46ac3eaba0cf",
+                "sha256:3e24a778470f3a9e9c11250d09daf5dea93369bc51aefca6605dbc963737a117",
+                "sha256:4224bbbc8a2e9b9a3828d36c1bab7458441d7fb9fb3af321eb735732ba8ee89d",
+                "sha256:4424082edff76fe46ff08851e91865097c0ad780fa79b87063dc5d5b80efc9d6",
+                "sha256:454707fb16f180984da6338d1f51897f0b8d8c4c2e0592d9d1e9fa02a5bb8218",
+                "sha256:4b1ab96a1ac91bd1233706d638ade35f663684deaa4e5e5f190858cba044afb9",
+                "sha256:4c5ff3e7609c214667c7d7e00d5f4f3576fefde47ebcb7e492c015117dafebbf",
+                "sha256:5107d89c9edec6ee077970a95fb9eeb4776ea8c2337b6a39c0ade9a58f50f3e4",
+                "sha256:5156c12a97405339ec93facbc7860566db381af2de1bec338195563fb64f37ef",
+                "sha256:553a1e3537aeeb29c0eb29ef28b80e0e801697fa71d96ac60675b284ff8e582a",
+                "sha256:5e1cc7823f43781390965c4762b54262cfcf76b6f152e489d00a5a1ac63063e4",
+                "sha256:5eef9804e65eb292e9c5587e88fe6a27a11f121d358312ac47211e8f42876751",
+                "sha256:6237637b496bc04819190e724a4e61ff2f251abf432f70cf491b3bc4a3f2f253",
+                "sha256:627bb5bc4ed3d3ebceb9fb55717cec6cd58bb47fdb5669169ebbc248e9bf156c",
+                "sha256:676d7356bb30825b7dbdad4fdd7a9feac379d074e5d4a36299767d72857ded42",
+                "sha256:6960b0d2e713e726cb2914e3051e080b12412f70dcb8731cf7a8fb52c37931bb",
+                "sha256:6b93a666cd8cfd43f605d1b81a32b9e290bf45c74c2bfd51ba705449c78448c7",
+                "sha256:6ca160b4c649f0d56daef04751eef4571de46ed4b80f9051a87d090fef32f08e",
+                "sha256:70ac7893e67a81ed1346ee3e71203ca4b0c3550c005b1d1cf87bc1e61eecd04b",
+                "sha256:73c4af08e9bb9a9aa7df6c789b05b924b9a0c6a368bb0e418d0b85181b64b631",
+                "sha256:748dcacc19c69957f7063ea4fb359fa2180735b1a638c81a4a96b86a382a6f29",
+                "sha256:75d9762f65205a86381298eb9079f27c60b84de0c262e402dcf45c6cbc385234",
+                "sha256:7711d83dafe52cda16ff2dd205cd83c05e4c06d5aaac596ae2cf7d50d094a530",
+                "sha256:7aa9f9af452c3e8486a0b88fddd58352e6cea17b691b18861d26e46cf65ffff0",
+                "sha256:7f713d8f3c4e2eac0d91b741e8ef2e1082022de244685601ec83e899b445d86a",
+                "sha256:81edbd9bf9f25cd995e6d51c307e1d279587d40b7473e258fef6d5e548560cd2",
+                "sha256:83363a5789f128618041b9a737c7b146f1965abddf4294b0444591406b437c1e",
+                "sha256:85e273e59b8b1a5f60a89df82cddeaf918181abd7ae7a2f2f899b68b0c774ff1",
+                "sha256:8ad2e487824ba4cda87851a371139e255410e45d3bf2e334194789278d709cec",
+                "sha256:8b7f902f13a230686f01bcff17cd9ba045653069811c8fd5027f0f414b417e2f",
+                "sha256:8f074a24aa9a6a3d406474ec889ebb5d661f329349068e05e8dfcb3c4be67752",
+                "sha256:9084d99933824ed8d665f10f4ce62d08fed714e7678d5ff11a8c2c98b2dc18f9",
+                "sha256:928f7a61c4311f3dd003af19bb779f99683f97a0559b765c80fdb8846dab0452",
+                "sha256:97fcaf530318369da3cfd6ff52f5ab38daf8cb10ecee9a76efebf8031de09eef",
+                "sha256:994d27b24b61b1870f3571395c840433faabec5dcd239bd11ff6af7e34234bb6",
+                "sha256:9ae454916aa3abe28d0ef1c21ca1e8e36a14ccf52183d465dfaccffaa7ed462c",
+                "sha256:9fac5416c44e8e1d8ea9440096f88e1a7273257f3157184c5c715060e0c448a1",
+                "sha256:a2fe45c1143eefb680a4589c55e671fabd482a7f8c7791f311ea3bcc20139246",
+                "sha256:a3f8be3e785009ffa148e66474fea5c787ccb203b3d0bd1f22e1e22f7da0f3b3",
+                "sha256:a616c2e4b60cb8cdd9eb3b0c6fda4ab5f3e26244b427aaade560dcf63c5754fb",
+                "sha256:a94c9058c5703c172904103d7b479f7e23dd4e5f8e67b49f6cd256d35ff169cb",
+                "sha256:b1208f2e081d34832f509cbe311237a0543effe23d60b2fa14c0d3f86e6d1d07",
+                "sha256:b4b25de7e85ba90b2ff230153123b6b000a7f69c41d84a3a0dc3f878334c8509",
+                "sha256:bbe72c41cdd55c88b238a8925849fde4069c0cdcdef83f8d967f8f3982659326",
+                "sha256:c0a86dd3e85c6aa3fc73236eb5cf7ce69dd8ad7abcd23f8ae1126831c8e40c2f",
+                "sha256:c3b08d9e98d1a15338fcfbd52c02003704322c2d460c9b9be7df08f2952bdce6",
+                "sha256:c4d9c221cc8e32b14196498679bf2b324bec1d1127c4ba934d98e19298faa661",
+                "sha256:c4f882e42c6cea89488b9a16919edde8c0b1a98f307c05abdd3dd3bc4368af40",
+                "sha256:c5cc25cbd9ae01d49ac7b504ef5f3cbdcc8d139f9750dcfa0b80d405b4645cc2",
+                "sha256:c7f2deac59dc3e0528bdded248e637e789e5111ba1723a8d7a262eb93e133e15",
+                "sha256:c8b034b60e74fb29064f765851e77e5910055e1c4a3cb75c32eccf2b470fc00f",
+                "sha256:c9b9159eeeb7cd1c7131dc7f5878454f97a4dc20cd157e6474174ccac448b844",
+                "sha256:c9c405ca78c70c3599d8956e53be0c9def9c51ad949964a49ad96c79729a5b1a",
+                "sha256:ceb200918c9bd163bd390cc169b254b23b4be121026b003be93a4f2f5b554b4b",
+                "sha256:d06040266b5e6512a37b4703684d1798124764b43328254799e9678c588882a6",
+                "sha256:d3f5e201bd170fb97c643e84df58e221372cd053fbb291ebbd878b165ea5057e",
+                "sha256:d4aa7cca009817789fd5b8e52e8122f9e85dc580c88b816a93321c00a8acbced",
+                "sha256:d772ae3c12d3b8629db656050c86ee66924eaa98f7125a889175a59cfaafdb19",
+                "sha256:d816969b55a970b3accc7f9e4ea8f60043e3f7de96f21c06063d747ffc2f18ba",
+                "sha256:d885dcdca7bae42bc9a2f6cbf766abcb2a6cc043b1905fc3782c6ea1f74a2b95",
+                "sha256:db903458a457a53ee0f764ed11c5b5368398e216b442c42dca9d90fbd2bbf31c",
+                "sha256:dc63bb79e896d6ce6aaf672ac304b54969280e949c45727867fc154a17ec7ab2",
+                "sha256:dd042e6c3bf36448e3e3ed302b12ce79762480f4aff8e7a167cdf8c35dc93297",
+                "sha256:ddea4abc4606c10dddb70651b210b7ab5b663148d6d7bc85d76963c923629891",
+                "sha256:df57f3c3ef760489f2e82192e6c93286c2bc80d6d854ef940e5345ae7153cd4b",
+                "sha256:e1ddf05eeb422810b1aa919095db0691493442eebbf9cfb0f1e478a7b2fbdf3d",
+                "sha256:e2e3cb74684ff357e6b3c82dd71031d3c1fd7ee9f9b0a5205e5568c963e074f9",
+                "sha256:e4f64c8c52dde564bf3251b41d7a6746564b0fc0516cebe9c9e6695224440d22",
+                "sha256:e4f7efb38331e8327c1cc7fb2a2905a7db03d1a7fdb04706bf6465d0e44d41d4",
+                "sha256:e61b2019ebb5345510b833c4dd1f4afb1f0c07753f86f184c63836ffc3fb08ba",
+                "sha256:e7e38bf6e52797084c5c396db5bb519615727e491e9003e2449631457bf77738",
+                "sha256:eae041f535fe2e57681954f7cccb81854d777ce4c2a87749428ebe6c71c02ec0",
+                "sha256:eb964d18c01b7a1263a6f07b88d63711fcd564fc429d934279cf12f4b467bf53",
+                "sha256:ef780f9d480ffb423380abeb4cfcad66ecb8f93526dfa367d322fdad9ec7c25f",
+                "sha256:efc0430b80ed834c80c99c32946cfc6ee29dfcd7c62ad3c8f15657322ade7942",
+                "sha256:f2508ee2bad8381b5254eadc35d32fe800d12eb2c63b744183341f3a66e435a7",
+                "sha256:fee9acd5e39c8611957074dfba06552e430020eea831caf5eb2cea30f10e06bd"
             ],
             "markers": "python_version >= '3.8'",
-            "version": "==1.14.0"
+            "version": "==1.15.0"
         }
     },
     "develop": {
diff --git a/dbrepo-search-service/init/.gitignore b/dbrepo-search-service/init/.gitignore
new file mode 100644
index 0000000000000000000000000000000000000000..588a8e94456726edc68db7182457ab0118ab69bf
--- /dev/null
+++ b/dbrepo-search-service/init/.gitignore
@@ -0,0 +1,5 @@
+# Libraries
+./lib/dbrepo-1.4.4*
+./lib/dbrepo-1.4.5*
+./lib/dbrepo-1.4.6*
+./lib/dbrepo-1.4.7rc*
\ No newline at end of file
diff --git a/dbrepo-search-service/init/Pipfile.lock b/dbrepo-search-service/init/Pipfile.lock
index 8c6ddaeaa82fc243881a19ad3998166823f26d2a..01e860ce1156a1ad10e66eb1341ebce320578d7a 100644
--- a/dbrepo-search-service/init/Pipfile.lock
+++ b/dbrepo-search-service/init/Pipfile.lock
@@ -1128,101 +1128,107 @@
         },
         "yarl": {
             "hashes": [
-                "sha256:047b258e00b99091b6f90355521f026238c63bd76dcf996d93527bb13320eefd",
-                "sha256:06ff23462398333c78b6f4f8d3d70410d657a471c2c5bbe6086133be43fc8f1a",
-                "sha256:07f9eaf57719d6721ab15805d85f4b01a5b509a0868d7320134371bcb652152d",
-                "sha256:0aa92e3e30a04f9462a25077db689c4ac5ea9ab6cc68a2e563881b987d42f16d",
-                "sha256:0cf21f46a15d445417de8fc89f2568852cf57fe8ca1ab3d19ddb24d45c0383ae",
-                "sha256:0fd7b941dd1b00b5f0acb97455fea2c4b7aac2dd31ea43fb9d155e9bc7b78664",
-                "sha256:147e36331f6f63e08a14640acf12369e041e0751bb70d9362df68c2d9dcf0c87",
-                "sha256:16a682a127930f3fc4e42583becca6049e1d7214bcad23520c590edd741d2114",
-                "sha256:176110bff341b6730f64a1eb3a7070e12b373cf1c910a9337e7c3240497db76f",
-                "sha256:19268b4fec1d7760134f2de46ef2608c2920134fb1fa61e451f679e41356dc55",
-                "sha256:1b16f6c75cffc2dc0616ea295abb0e1967601bd1fb1e0af6a1de1c6c887f3439",
-                "sha256:1bfc25aa6a7c99cf86564210f79a0b7d4484159c67e01232b116e445b3036547",
-                "sha256:1ca3894e9e9f72da93544f64988d9c052254a338a9f855165f37f51edb6591de",
-                "sha256:1dda53508df0de87b6e6b0a52d6718ff6c62a5aca8f5552748404963df639269",
-                "sha256:217a782020b875538eebf3948fac3a7f9bbbd0fd9bf8538f7c2ad7489e80f4e8",
-                "sha256:2192f718db4a8509f63dd6d950f143279211fa7e6a2c612edc17d85bf043d36e",
-                "sha256:29a84a46ec3ebae7a1c024c055612b11e9363a8a23238b3e905552d77a2bc51b",
-                "sha256:3007a5b75cb50140708420fe688c393e71139324df599434633019314ceb8b59",
-                "sha256:30600ba5db60f7c0820ef38a2568bb7379e1418ecc947a0f76fd8b2ff4257a97",
-                "sha256:337912bcdcf193ade64b9aae5a4017a0a1950caf8ca140362e361543c6773f21",
-                "sha256:37001e5d4621cef710c8dc1429ca04e189e572f128ab12312eab4e04cf007132",
-                "sha256:3d569f877ed9a708e4c71a2d13d2940cb0791da309f70bd970ac1a5c088a0a92",
-                "sha256:4009def9be3a7e5175db20aa2d7307ecd00bbf50f7f0f989300710eee1d0b0b9",
-                "sha256:46a9772a1efa93f9cd170ad33101c1817c77e0e9914d4fe33e2da299d7cf0f9b",
-                "sha256:47eede5d11d669ab3759b63afb70d28d5328c14744b8edba3323e27dc52d298d",
-                "sha256:498b3c55087b9d762636bca9b45f60d37e51d24341786dc01b81253f9552a607",
-                "sha256:4e0d45ebf975634468682c8bec021618b3ad52c37619e5c938f8f831fa1ac5c0",
-                "sha256:4f24f08b6c9b9818fd80612c97857d28f9779f0d1211653ece9844fc7b414df2",
-                "sha256:55c144d363ad4626ca744556c049c94e2b95096041ac87098bb363dcc8635e8d",
-                "sha256:582cedde49603f139be572252a318b30dc41039bc0b8165f070f279e5d12187f",
-                "sha256:587c3cc59bc148a9b1c07a019346eda2549bc9f468acd2f9824d185749acf0a6",
-                "sha256:5cd5dad8366e0168e0fd23d10705a603790484a6dbb9eb272b33673b8f2cce72",
-                "sha256:5d02d700705d67e09e1f57681f758f0b9d4412eeb70b2eb8d96ca6200b486db3",
-                "sha256:625f207b1799e95e7c823f42f473c1e9dbfb6192bd56bba8695656d92be4535f",
-                "sha256:659603d26d40dd4463200df9bfbc339fbfaed3fe32e5c432fe1dc2b5d4aa94b4",
-                "sha256:689a99a42ee4583fcb0d3a67a0204664aa1539684aed72bdafcbd505197a91c4",
-                "sha256:68ac1a09392ed6e3fd14be880d39b951d7b981fd135416db7d18a6208c536561",
-                "sha256:6a615cad11ec3428020fb3c5a88d85ce1b5c69fd66e9fcb91a7daa5e855325dd",
-                "sha256:73bedd2be05f48af19f0f2e9e1353921ce0c83f4a1c9e8556ecdcf1f1eae4892",
-                "sha256:742aef0a99844faaac200564ea6f5e08facb285d37ea18bd1a5acf2771f3255a",
-                "sha256:75ff4c819757f9bdb35de049a509814d6ce851fe26f06eb95a392a5640052482",
-                "sha256:781e2495e408a81e4eaeedeb41ba32b63b1980dddf8b60dbbeff6036bcd35049",
-                "sha256:7a9f917966d27f7ce30039fe8d900f913c5304134096554fd9bea0774bcda6d1",
-                "sha256:7e2637d75e92763d1322cb5041573279ec43a80c0f7fbbd2d64f5aee98447b17",
-                "sha256:8089d4634d8fa2b1806ce44fefa4979b1ab2c12c0bc7ef3dfa45c8a374811348",
-                "sha256:816d24f584edefcc5ca63428f0b38fee00b39fe64e3c5e558f895a18983efe96",
-                "sha256:8385ab36bf812e9d37cf7613999a87715f27ef67a53f0687d28c44b819df7cb0",
-                "sha256:85cb3e40eaa98489f1e2e8b29f5ad02ee1ee40d6ce6b88d50cf0f205de1d9d2c",
-                "sha256:8648180b34faaea4aa5b5ca7e871d9eb1277033fa439693855cf0ea9195f85f1",
-                "sha256:8892fa575ac9b1b25fae7b221bc4792a273877b9b56a99ee2d8d03eeb3dbb1d2",
-                "sha256:88c7d9d58aab0724b979ab5617330acb1c7030b79379c8138c1c8c94e121d1b3",
-                "sha256:8a2f8fb7f944bcdfecd4e8d855f84c703804a594da5123dd206f75036e536d4d",
-                "sha256:8f4e475f29a9122f908d0f1f706e1f2fc3656536ffd21014ff8a6f2e1b14d1d8",
-                "sha256:8f50eb3837012a937a2b649ec872b66ba9541ad9d6f103ddcafb8231cfcafd22",
-                "sha256:91d875f75fabf76b3018c5f196bf3d308ed2b49ddcb46c1576d6b075754a1393",
-                "sha256:94b2bb9bcfd5be9d27004ea4398fb640373dd0c1a9e219084f42c08f77a720ab",
-                "sha256:9557c9322aaa33174d285b0c1961fb32499d65ad1866155b7845edc876c3c835",
-                "sha256:95e16e9eaa2d7f5d87421b8fe694dd71606aa61d74b824c8d17fc85cc51983d1",
-                "sha256:96952f642ac69075e44c7d0284528938fdff39422a1d90d3e45ce40b72e5e2d9",
-                "sha256:985623575e5c4ea763056ffe0e2d63836f771a8c294b3de06d09480538316b13",
-                "sha256:99ff3744f5fe48288be6bc402533b38e89749623a43208e1d57091fc96b783b9",
-                "sha256:9abe80ae2c9d37c17599557b712e6515f4100a80efb2cda15f5f070306477cd2",
-                "sha256:a152751af7ef7b5d5fa6d215756e508dd05eb07d0cf2ba51f3e740076aa74373",
-                "sha256:a2e4725a08cb2b4794db09e350c86dee18202bb8286527210e13a1514dc9a59a",
-                "sha256:a56fbe3d7f3bce1d060ea18d2413a2ca9ca814eea7cedc4d247b5f338d54844e",
-                "sha256:ab3abc0b78a5dfaa4795a6afbe7b282b6aa88d81cf8c1bb5e394993d7cae3457",
-                "sha256:b03384eed107dbeb5f625a99dc3a7de8be04fc8480c9ad42fccbc73434170b20",
-                "sha256:b0547ab1e9345dc468cac8368d88ea4c5bd473ebc1d8d755347d7401982b5dd8",
-                "sha256:b4c1ecba93e7826dc71ddba75fb7740cdb52e7bd0be9f03136b83f54e6a1f511",
-                "sha256:b693c63e7e64b524f54aa4888403c680342d1ad0d97be1707c531584d6aeeb4f",
-                "sha256:b6d0147574ce2e7b812c989e50fa72bbc5338045411a836bd066ce5fc8ac0bce",
-                "sha256:b9cfef3f14f75bf6aba73a76caf61f9d00865912a04a4393c468a7ce0981b519",
-                "sha256:b9f805e37ed16cc212fdc538a608422d7517e7faf539bedea4fe69425bc55d76",
-                "sha256:bab03192091681d54e8225c53f270b0517637915d9297028409a2a5114ff4634",
-                "sha256:bc24f968b82455f336b79bf37dbb243b7d76cd40897489888d663d4e028f5069",
-                "sha256:c14b504a74e58e2deb0378b3eca10f3d076635c100f45b113c18c770b4a47a50",
-                "sha256:c2089a9afef887664115f7fa6d3c0edd6454adaca5488dba836ca91f60401075",
-                "sha256:c8ed4034f0765f8861620c1f2f2364d2e58520ea288497084dae880424fc0d9f",
-                "sha256:cd2660c01367eb3ef081b8fa0a5da7fe767f9427aa82023a961a5f28f0d4af6c",
-                "sha256:d8361c7d04e6a264481f0b802e395f647cd3f8bbe27acfa7c12049efea675bd1",
-                "sha256:d9baec588f015d0ee564057aa7574313c53a530662ffad930b7886becc85abdf",
-                "sha256:dbd9ff43a04f8ffe8a959a944c2dca10d22f5f99fc6a459f49c3ebfb409309d9",
-                "sha256:e3f8bfc1db82589ef965ed234b87de30d140db8b6dc50ada9e33951ccd8ec07a",
-                "sha256:e6a2c5c5bb2556dfbfffffc2bcfb9c235fd2b566d5006dfb2a37afc7e3278a07",
-                "sha256:e749af6c912a7bb441d105c50c1a3da720474e8acb91c89350080dd600228f0e",
-                "sha256:e85d86527baebb41a214cc3b45c17177177d900a2ad5783dbe6f291642d4906f",
-                "sha256:ee2c68e4f2dd1b1c15b849ba1c96fac105fca6ffdb7c1e8be51da6fabbdeafb9",
-                "sha256:f3ab950f8814f3b7b5e3eebc117986f817ec933676f68f0a6c5b2137dd7c9c69",
-                "sha256:f4f4547944d4f5cfcdc03f3f097d6f05bbbc915eaaf80a2ee120d0e756de377d",
-                "sha256:f72a0d746d38cb299b79ce3d4d60ba0892c84bbc905d0d49c13df5bace1b65f8",
-                "sha256:fc2c80bc87fba076e6cbb926216c27fba274dae7100a7b9a0983b53132dd99f2",
-                "sha256:fe4d2536c827f508348d7b40c08767e8c7071614250927233bf0c92170451c0a"
+                "sha256:0127bc2ea72c1eaae6808ace661f0edf222f32ffa987d37f2dbb4798288f2656",
+                "sha256:0358b697abdf1f2d68038bd02ef8ddcc4813835744f79c755f8743aa485585e7",
+                "sha256:06306c74f0775621a70fa5acd292119bbb6961d1f9a5f3d657a4c8c15b86f7b9",
+                "sha256:06b5b462cadf59c1df28ffbb0a3971fa16b60cf0c9d59a38bf5679a986d18685",
+                "sha256:097094a979af7b31520517c59179f6817b8426724343cecbec0eb3af1f8fb6cf",
+                "sha256:0c791a2d42da20ac568e5c0cc9b8af313188becd203a936ad959b578dafbcebb",
+                "sha256:1656a8b531a96427f26f498b7d0f19931166ff30e4344eca99bdb27faca14fc5",
+                "sha256:18614630533ac37ec373bd8035aec8fa4dd9aedac641209c06de7e082622ff77",
+                "sha256:1e5fa4c4e55cdacef1844f609bc9a02c8cd29c324a71ca1d3ee454701d4bb496",
+                "sha256:1edaf4171fc1582352ac5d9b2783966fa0f4ff86187279ef2a491613d23b894a",
+                "sha256:2124c642b8cc9b68e5981e429842dadc32bb850b010cccec9d24236253a19f60",
+                "sha256:229f222bb47cd7ab225648efd1ae47fe6943f18e4c91bce66471faf09fe33128",
+                "sha256:2429a651a2191c3fb8c9de21546c6046da539034d51dcb8df52302748004593d",
+                "sha256:25a4e29ee758596b2a0daffa4814714e9b464077ca862baf78ed0e8698e46b61",
+                "sha256:27c323b28723faed046f906c70466144c4dd12046a0128a301b29a65cfeff758",
+                "sha256:2add8ed2acf42398dfaa7dffd32e4d18ffbae341d62c8d4765bd9929336379b5",
+                "sha256:2bece7fdc13e23db005879b67190db0d397f6ba89c81dc7e3c77e9f5819aff7f",
+                "sha256:2eafb4e92f72a3b6c27f1d5e921d046e2728850af8887f86857c3fe868a5b5c0",
+                "sha256:32840ff92c713053591ff0e66845d4e9f4bea8fd5fba3da00f8d92e77722f24e",
+                "sha256:33896afca6fb4e1988c099534c52823870dfc8730bc6f96a3831f24c1e0ab814",
+                "sha256:350b468a217d433cbb4482e9414a14dfd360a3d5ab92013175925abb234364cc",
+                "sha256:38cab8f91b1085f1fd0765d40c46c8f43282f109018d5fcd017c46ac3eaba0cf",
+                "sha256:3e24a778470f3a9e9c11250d09daf5dea93369bc51aefca6605dbc963737a117",
+                "sha256:4224bbbc8a2e9b9a3828d36c1bab7458441d7fb9fb3af321eb735732ba8ee89d",
+                "sha256:4424082edff76fe46ff08851e91865097c0ad780fa79b87063dc5d5b80efc9d6",
+                "sha256:454707fb16f180984da6338d1f51897f0b8d8c4c2e0592d9d1e9fa02a5bb8218",
+                "sha256:4b1ab96a1ac91bd1233706d638ade35f663684deaa4e5e5f190858cba044afb9",
+                "sha256:4c5ff3e7609c214667c7d7e00d5f4f3576fefde47ebcb7e492c015117dafebbf",
+                "sha256:5107d89c9edec6ee077970a95fb9eeb4776ea8c2337b6a39c0ade9a58f50f3e4",
+                "sha256:5156c12a97405339ec93facbc7860566db381af2de1bec338195563fb64f37ef",
+                "sha256:553a1e3537aeeb29c0eb29ef28b80e0e801697fa71d96ac60675b284ff8e582a",
+                "sha256:5e1cc7823f43781390965c4762b54262cfcf76b6f152e489d00a5a1ac63063e4",
+                "sha256:5eef9804e65eb292e9c5587e88fe6a27a11f121d358312ac47211e8f42876751",
+                "sha256:6237637b496bc04819190e724a4e61ff2f251abf432f70cf491b3bc4a3f2f253",
+                "sha256:627bb5bc4ed3d3ebceb9fb55717cec6cd58bb47fdb5669169ebbc248e9bf156c",
+                "sha256:676d7356bb30825b7dbdad4fdd7a9feac379d074e5d4a36299767d72857ded42",
+                "sha256:6960b0d2e713e726cb2914e3051e080b12412f70dcb8731cf7a8fb52c37931bb",
+                "sha256:6b93a666cd8cfd43f605d1b81a32b9e290bf45c74c2bfd51ba705449c78448c7",
+                "sha256:6ca160b4c649f0d56daef04751eef4571de46ed4b80f9051a87d090fef32f08e",
+                "sha256:70ac7893e67a81ed1346ee3e71203ca4b0c3550c005b1d1cf87bc1e61eecd04b",
+                "sha256:73c4af08e9bb9a9aa7df6c789b05b924b9a0c6a368bb0e418d0b85181b64b631",
+                "sha256:748dcacc19c69957f7063ea4fb359fa2180735b1a638c81a4a96b86a382a6f29",
+                "sha256:75d9762f65205a86381298eb9079f27c60b84de0c262e402dcf45c6cbc385234",
+                "sha256:7711d83dafe52cda16ff2dd205cd83c05e4c06d5aaac596ae2cf7d50d094a530",
+                "sha256:7aa9f9af452c3e8486a0b88fddd58352e6cea17b691b18861d26e46cf65ffff0",
+                "sha256:7f713d8f3c4e2eac0d91b741e8ef2e1082022de244685601ec83e899b445d86a",
+                "sha256:81edbd9bf9f25cd995e6d51c307e1d279587d40b7473e258fef6d5e548560cd2",
+                "sha256:83363a5789f128618041b9a737c7b146f1965abddf4294b0444591406b437c1e",
+                "sha256:85e273e59b8b1a5f60a89df82cddeaf918181abd7ae7a2f2f899b68b0c774ff1",
+                "sha256:8ad2e487824ba4cda87851a371139e255410e45d3bf2e334194789278d709cec",
+                "sha256:8b7f902f13a230686f01bcff17cd9ba045653069811c8fd5027f0f414b417e2f",
+                "sha256:8f074a24aa9a6a3d406474ec889ebb5d661f329349068e05e8dfcb3c4be67752",
+                "sha256:9084d99933824ed8d665f10f4ce62d08fed714e7678d5ff11a8c2c98b2dc18f9",
+                "sha256:928f7a61c4311f3dd003af19bb779f99683f97a0559b765c80fdb8846dab0452",
+                "sha256:97fcaf530318369da3cfd6ff52f5ab38daf8cb10ecee9a76efebf8031de09eef",
+                "sha256:994d27b24b61b1870f3571395c840433faabec5dcd239bd11ff6af7e34234bb6",
+                "sha256:9ae454916aa3abe28d0ef1c21ca1e8e36a14ccf52183d465dfaccffaa7ed462c",
+                "sha256:9fac5416c44e8e1d8ea9440096f88e1a7273257f3157184c5c715060e0c448a1",
+                "sha256:a2fe45c1143eefb680a4589c55e671fabd482a7f8c7791f311ea3bcc20139246",
+                "sha256:a3f8be3e785009ffa148e66474fea5c787ccb203b3d0bd1f22e1e22f7da0f3b3",
+                "sha256:a616c2e4b60cb8cdd9eb3b0c6fda4ab5f3e26244b427aaade560dcf63c5754fb",
+                "sha256:a94c9058c5703c172904103d7b479f7e23dd4e5f8e67b49f6cd256d35ff169cb",
+                "sha256:b1208f2e081d34832f509cbe311237a0543effe23d60b2fa14c0d3f86e6d1d07",
+                "sha256:b4b25de7e85ba90b2ff230153123b6b000a7f69c41d84a3a0dc3f878334c8509",
+                "sha256:bbe72c41cdd55c88b238a8925849fde4069c0cdcdef83f8d967f8f3982659326",
+                "sha256:c0a86dd3e85c6aa3fc73236eb5cf7ce69dd8ad7abcd23f8ae1126831c8e40c2f",
+                "sha256:c3b08d9e98d1a15338fcfbd52c02003704322c2d460c9b9be7df08f2952bdce6",
+                "sha256:c4d9c221cc8e32b14196498679bf2b324bec1d1127c4ba934d98e19298faa661",
+                "sha256:c4f882e42c6cea89488b9a16919edde8c0b1a98f307c05abdd3dd3bc4368af40",
+                "sha256:c5cc25cbd9ae01d49ac7b504ef5f3cbdcc8d139f9750dcfa0b80d405b4645cc2",
+                "sha256:c7f2deac59dc3e0528bdded248e637e789e5111ba1723a8d7a262eb93e133e15",
+                "sha256:c8b034b60e74fb29064f765851e77e5910055e1c4a3cb75c32eccf2b470fc00f",
+                "sha256:c9b9159eeeb7cd1c7131dc7f5878454f97a4dc20cd157e6474174ccac448b844",
+                "sha256:c9c405ca78c70c3599d8956e53be0c9def9c51ad949964a49ad96c79729a5b1a",
+                "sha256:ceb200918c9bd163bd390cc169b254b23b4be121026b003be93a4f2f5b554b4b",
+                "sha256:d06040266b5e6512a37b4703684d1798124764b43328254799e9678c588882a6",
+                "sha256:d3f5e201bd170fb97c643e84df58e221372cd053fbb291ebbd878b165ea5057e",
+                "sha256:d4aa7cca009817789fd5b8e52e8122f9e85dc580c88b816a93321c00a8acbced",
+                "sha256:d772ae3c12d3b8629db656050c86ee66924eaa98f7125a889175a59cfaafdb19",
+                "sha256:d816969b55a970b3accc7f9e4ea8f60043e3f7de96f21c06063d747ffc2f18ba",
+                "sha256:d885dcdca7bae42bc9a2f6cbf766abcb2a6cc043b1905fc3782c6ea1f74a2b95",
+                "sha256:db903458a457a53ee0f764ed11c5b5368398e216b442c42dca9d90fbd2bbf31c",
+                "sha256:dc63bb79e896d6ce6aaf672ac304b54969280e949c45727867fc154a17ec7ab2",
+                "sha256:dd042e6c3bf36448e3e3ed302b12ce79762480f4aff8e7a167cdf8c35dc93297",
+                "sha256:ddea4abc4606c10dddb70651b210b7ab5b663148d6d7bc85d76963c923629891",
+                "sha256:df57f3c3ef760489f2e82192e6c93286c2bc80d6d854ef940e5345ae7153cd4b",
+                "sha256:e1ddf05eeb422810b1aa919095db0691493442eebbf9cfb0f1e478a7b2fbdf3d",
+                "sha256:e2e3cb74684ff357e6b3c82dd71031d3c1fd7ee9f9b0a5205e5568c963e074f9",
+                "sha256:e4f64c8c52dde564bf3251b41d7a6746564b0fc0516cebe9c9e6695224440d22",
+                "sha256:e4f7efb38331e8327c1cc7fb2a2905a7db03d1a7fdb04706bf6465d0e44d41d4",
+                "sha256:e61b2019ebb5345510b833c4dd1f4afb1f0c07753f86f184c63836ffc3fb08ba",
+                "sha256:e7e38bf6e52797084c5c396db5bb519615727e491e9003e2449631457bf77738",
+                "sha256:eae041f535fe2e57681954f7cccb81854d777ce4c2a87749428ebe6c71c02ec0",
+                "sha256:eb964d18c01b7a1263a6f07b88d63711fcd564fc429d934279cf12f4b467bf53",
+                "sha256:ef780f9d480ffb423380abeb4cfcad66ecb8f93526dfa367d322fdad9ec7c25f",
+                "sha256:efc0430b80ed834c80c99c32946cfc6ee29dfcd7c62ad3c8f15657322ade7942",
+                "sha256:f2508ee2bad8381b5254eadc35d32fe800d12eb2c63b744183341f3a66e435a7",
+                "sha256:fee9acd5e39c8611957074dfba06552e430020eea831caf5eb2cea30f10e06bd"
             ],
             "markers": "python_version >= '3.8'",
-            "version": "==1.14.0"
+            "version": "==1.15.0"
         }
     },
     "develop": {
diff --git a/dbrepo-search-service/init/lib/dbrepo-1.4.7.tar.gz b/dbrepo-search-service/init/lib/dbrepo-1.4.7.tar.gz
index 5774a413a678f3179e5e215737c5e39f5a773ef8..936ec4dac16fe8f065ac7d37a09aedca421086a6 100644
Binary files a/dbrepo-search-service/init/lib/dbrepo-1.4.7.tar.gz and b/dbrepo-search-service/init/lib/dbrepo-1.4.7.tar.gz differ
diff --git a/dbrepo-search-service/init/test/test_app.py b/dbrepo-search-service/init/test/test_app.py
index a8e6d9755b63d35ac6af4c2b5f1d2d23e311e085..0df10e51c74b59b995148a350998566a30a4a52e 100644
--- a/dbrepo-search-service/init/test/test_app.py
+++ b/dbrepo-search-service/init/test/test_app.py
@@ -41,7 +41,6 @@ class OpenSearchClientTest(unittest.TestCase):
                                                 internal_name="id",
                                                 database_id=req.id,
                                                 table_id=1,
-                                                auto_generated=True,
                                                 column_type=ColumnType.BIGINT,
                                                 is_public=True,
                                                 is_null_allowed=False)])]
@@ -90,6 +89,5 @@ class OpenSearchClientTest(unittest.TestCase):
             self.assertEqual(ColumnType.BIGINT, database.tables[0].columns[0].column_type)
             self.assertEqual(1, database.tables[0].columns[0].database_id)
             self.assertEqual(1, database.tables[0].columns[0].table_id)
-            self.assertEqual(True, database.tables[0].columns[0].auto_generated)
             self.assertEqual(True, database.tables[0].columns[0].is_public)
             self.assertEqual(False, database.tables[0].columns[0].is_null_allowed)
diff --git a/dbrepo-search-service/lib/dbrepo-1.4.7.tar.gz b/dbrepo-search-service/lib/dbrepo-1.4.7.tar.gz
index 5774a413a678f3179e5e215737c5e39f5a773ef8..936ec4dac16fe8f065ac7d37a09aedca421086a6 100644
Binary files a/dbrepo-search-service/lib/dbrepo-1.4.7.tar.gz and b/dbrepo-search-service/lib/dbrepo-1.4.7.tar.gz differ
diff --git a/dbrepo-search-service/test/test_opensearch_client.py b/dbrepo-search-service/test/test_opensearch_client.py
index 2bab038128fa7dd201dfcca81240f906358e37eb..adf421af74dee1690ffe631b5fc33c975655300b 100644
--- a/dbrepo-search-service/test/test_opensearch_client.py
+++ b/dbrepo-search-service/test/test_opensearch_client.py
@@ -55,8 +55,8 @@ req = Database(id=1,
                              routing_key="dbrepo.1.1",
                              is_public=True,
                              columns=[Column(id=1, database_id=1, table_id=1, name="ID", internal_name="id",
-                                             auto_generated=True, column_type=ColumnType.BIGINT, is_public=True,
-                                             is_null_allowed=False, size=20, d=0,
+                                             column_type=ColumnType.BIGINT, is_public=True, is_null_allowed=False,
+                                             size=20, d=0,
                                              concept=Concept(id=1, uri="http://www.wikidata.org/entity/Q2221906",
                                                              created=datetime.datetime(2024, 3, 1, 10,
                                                                                        tzinfo=datetime.timezone.utc)),
@@ -104,7 +104,6 @@ class OpenSearchClientTest(unittest.TestCase):
                                                 internal_name="id",
                                                 database_id=req.id,
                                                 table_id=1,
-                                                auto_generated=True,
                                                 column_type=ColumnType.BIGINT,
                                                 is_public=True,
                                                 is_null_allowed=False)])]
@@ -153,7 +152,6 @@ class OpenSearchClientTest(unittest.TestCase):
             self.assertEqual(ColumnType.BIGINT, database.tables[0].columns[0].column_type)
             self.assertEqual(1, database.tables[0].columns[0].database_id)
             self.assertEqual(1, database.tables[0].columns[0].table_id)
-            self.assertEqual(True, database.tables[0].columns[0].auto_generated)
             self.assertEqual(True, database.tables[0].columns[0].is_public)
             self.assertEqual(False, database.tables[0].columns[0].is_null_allowed)
 
@@ -308,4 +306,3 @@ class OpenSearchClientTest(unittest.TestCase):
     #         response = client.general_search(type="database", field_value_pairs={"name": "Test",
     #                                                                              "id": None})
     #         self.assertTrue(len(response) > 0)
-
diff --git a/dbrepo-ui/bun.lockb b/dbrepo-ui/bun.lockb
index 32e73e364f9bd9e573f92d3f4a72d9596cb21363..3f85eb8de9c7a334e2a6a89d3cbcf75254751610 100755
Binary files a/dbrepo-ui/bun.lockb and b/dbrepo-ui/bun.lockb differ
diff --git a/dbrepo-ui/components/dialogs/EditTuple.vue b/dbrepo-ui/components/dialogs/EditTuple.vue
index da8000dae28372ec49fd8bfec8772a4bbe354fe0..ea0bfb3c5b9161a907c2ca61544daafb737dd128 100644
--- a/dbrepo-ui/components/dialogs/EditTuple.vue
+++ b/dbrepo-ui/components/dialogs/EditTuple.vue
@@ -17,14 +17,35 @@
               <v-text-field
                 v-if="isNumber(column)"
                 v-model.number="tuple[column.internal_name]"
-                :disabled="(!edit && column.auto_generated)"
+                :disabled="!edit"
                 persistent-hint
                 :variant="inputVariant"
                 :label="column.internal_name"
                 :hint="hint(column)"
                 :rules="rules(column)"
                 :required="required(column)"
-                type="number" /><v-text-field
+                type="number">
+                <template
+                  v-slot:append>
+                  {{ column.column_type.toUpperCase() }}
+                  <NuxtLink
+                    target="_blank"
+                    class="ml-2"
+                    :href="documentationLink(column)">
+                    <v-tooltip
+                      location="bottom">
+                      <template
+                        v-slot:activator="{ props }">
+                        <v-icon
+                          v-bind="props"
+                          icon="mdi-help-circle-outline" />
+                      </template>
+                      {{ $t('navigation.help') }}
+                    </v-tooltip>
+                  </NuxtLink>
+                </template>
+              </v-text-field>
+              <v-text-field
                 v-if="isTextField(column)"
                 v-model="tuple[column.internal_name]"
                 :disabled="disabled(column)"
@@ -37,7 +58,27 @@
                 :variant="inputVariant"
                 :label="column.internal_name"
                 :hint="hint(column)"
-                type="text" />
+                type="text">
+                <template
+                  v-slot:append>
+                  {{ column.column_type.toUpperCase() }}
+                  <NuxtLink
+                    target="_blank"
+                    class="ml-2"
+                    :href="documentationLink(column)">
+                    <v-tooltip
+                      location="bottom">
+                      <template
+                        v-slot:activator="{ props }">
+                        <v-icon
+                          v-bind="props"
+                          icon="mdi-help-circle-outline" />
+                      </template>
+                      {{ $t('navigation.help') }}
+                    </v-tooltip>
+                  </NuxtLink>
+                </template>
+              </v-text-field>
               <v-text-field
                 v-if="isFloatingPoint(column)"
                 v-model="tuple[column.internal_name]"
@@ -50,7 +91,27 @@
                 :variant="inputVariant"
                 :label="column.internal_name"
                 :hint="hint(column)"
-                type="number" />
+                type="number">
+                <template
+                  v-slot:append>
+                  {{ column.column_type.toUpperCase() }}
+                  <NuxtLink
+                    target="_blank"
+                    class="ml-2"
+                    :href="documentationLink(column)">
+                    <v-tooltip
+                      location="bottom">
+                      <template
+                        v-slot:activator="{ props }">
+                        <v-icon
+                          v-bind="props"
+                          icon="mdi-help-circle-outline" />
+                      </template>
+                      {{ $t('navigation.help') }}
+                    </v-tooltip>
+                  </NuxtLink>
+                </template>
+              </v-text-field>
               <v-textarea
                 v-if="isTextArea(column)"
                 v-model="tuple[column.internal_name]"
@@ -62,7 +123,27 @@
                 persistent-hint
                 :variant="inputVariant"
                 :label="column.internal_name"
-                :hint="hint(column)" />
+                :hint="hint(column)">
+                <template
+                  v-slot:append>
+                  {{ column.column_type.toUpperCase() }}
+                  <NuxtLink
+                    target="_blank"
+                    class="ml-2"
+                    :href="documentationLink(column)">
+                    <v-tooltip
+                      location="bottom">
+                      <template
+                        v-slot:activator="{ props }">
+                        <v-icon
+                          v-bind="props"
+                          icon="mdi-help-circle-outline" />
+                      </template>
+                      {{ $t('navigation.help') }}
+                    </v-tooltip>
+                  </NuxtLink>
+                </template>
+              </v-textarea>
               <BlobUpload
                 v-if="isFileField(column)"
                 :column="column"
@@ -77,7 +158,27 @@
                 :rules="rules(column)"
                 :required="required(column)"
                 :clearable="!required(column)"
-                :items="isSet(column) ? column.sets : column.enums" />
+                :items="isSet(column) ? column.sets : column.enums">
+                <template
+                  v-slot:append>
+                  {{ column.column_type.toUpperCase() }}
+                  <NuxtLink
+                    target="_blank"
+                    class="ml-2"
+                    :href="documentationLink(column)">
+                    <v-tooltip
+                      location="bottom">
+                      <template
+                        v-slot:activator="{ props }">
+                        <v-icon
+                          v-bind="props"
+                          icon="mdi-help-circle-outline" />
+                      </template>
+                      {{ $t('navigation.help') }}
+                    </v-tooltip>
+                  </NuxtLink>
+                </template>
+              </v-select>
               <v-select
                 v-if="isBoolean(column)"
                 v-model="tuple[column.internal_name]"
@@ -88,7 +189,27 @@
                 :rules="rules(column)"
                 :required="required(column)"
                 :items="bools"
-                :clearable="!required(column)" />
+                :clearable="!required(column)">
+                <template
+                  v-slot:append>
+                  {{ column.column_type.toUpperCase() }}
+                  <NuxtLink
+                    target="_blank"
+                    class="ml-2"
+                    :href="documentationLink(column)">
+                    <v-tooltip
+                      location="bottom">
+                      <template
+                        v-slot:activator="{ props }">
+                        <v-icon
+                          v-bind="props"
+                          icon="mdi-help-circle-outline" />
+                      </template>
+                      {{ $t('navigation.help') }}
+                    </v-tooltip>
+                  </NuxtLink>
+                </template>
+              </v-select>
               <v-text-field
                 v-if="isTimeField(column)"
                 v-model="tuple[column.internal_name]"
@@ -97,7 +218,27 @@
                 persistent-hint
                 :variant="inputVariant"
                 :label="column.internal_name"
-                :hint="hint(column)" />
+                :hint="hint(column)">
+                <template
+                  v-slot:append>
+                  {{ column.column_type.toUpperCase() }}
+                  <NuxtLink
+                    target="_blank"
+                    class="ml-2"
+                    :href="documentationLink(column)">
+                    <v-tooltip
+                      location="bottom">
+                      <template
+                        v-slot:activator="{ props }">
+                        <v-icon
+                          v-bind="props"
+                          icon="mdi-help-circle-outline" />
+                      </template>
+                      {{ $t('navigation.help') }}
+                    </v-tooltip>
+                  </NuxtLink>
+                </template>
+              </v-text-field>
             </v-col>
           </v-row>
         </v-card-text>
@@ -175,7 +316,8 @@ export default {
       bools: [
         { title: 'true', value: true },
         { title: 'false', value: false }
-      ]
+      ],
+      cacheStore: useCacheStore()
     }
   },
   mounted() {
@@ -183,6 +325,15 @@ export default {
     this.oldTuple = Object.assign({}, this.tuple)
   },
   computed: {
+    database () {
+      return this.cacheStore.getDatabase
+    },
+    columnTypes () {
+      if (!this.database) {
+        return []
+      }
+      return this.database.container.image.data_types
+    },
     title () {
       return (this.edit ? this.$t('toolbars.table.data.edit') : this.$t('toolbars.table.data.add')) + ' ' + this.$t('toolbars.table.data.tuple')
     },
@@ -204,24 +355,35 @@ export default {
       this.$emit('close', { success: false })
     },
     hint (column) {
-      const { is_null_allowed, auto_generated, is_primary_key, column_type, date_format, size, d } = column
-      let hint = is_null_allowed ? '' : this.$t('pages.table.subpages.data.required.hint')
-      if (auto_generated) {
+      const { is_null_allowed, is_primary_key } = column
+      let hint = ''
+      if (!is_null_allowed) {
+        hint += this.$t('pages.table.subpages.data.required.hint')
+      }
+      if (column.column_type === 'sequence') {
         hint += ' ' + this.$t('pages.table.subpages.data.auto.hint')
       }
       if (is_primary_key) {
         hint += ' ' + this.$t('pages.table.subpages.data.primary-key.hint')
       }
-      if (['double', 'decimal'].includes(column_type)) {
-        hint += ' ' + this.$t('pages.table.subpages.data.format.hint') + ' ddd.f'
+      if (this.formatHint(column)) {
+        hint += this.$t('pages.table.subpages.data.format.hint') + ' ' + this.formatHint(column)
       }
-      if (['date', 'datetime', 'timestamp', 'time'].includes(column_type) && date_format) {
-        hint += ' ' + this.$t('pages.table.subpages.data.format.hint') + ' ' + date_format.unix_format
+      return hint
+    },
+    documentationLink ({column_type}) {
+      const filter = this.columnTypes.filter(t => t.value === column_type)
+      if (filter.length !== 1) {
+        return null
       }
-      if (['year'].includes(column_type)) {
-        hint += ' ' + this.$t('pages.table.subpages.data.format.hint') + ' YYYY'
+      return filter[0].documentation
+    },
+    formatHint ({column_type}) {
+      const filter = this.columnTypes.filter(t => t.value === column_type)
+      if (filter.length !== 1) {
+        return null
       }
-      return hint
+      return filter[0].data_hint
     },
     isTextField (column) {
       const { column_type } = column
@@ -252,7 +414,7 @@ export default {
       return ['date', 'datetime', 'timestamp', 'time', 'year'].includes(column.column_type)
     },
     rules (column) {
-      if (column.auto_generated || column.is_null_allowed) {
+      if (column.is_null_allowed) {
         return []
       }
       const rules = []
@@ -273,7 +435,7 @@ export default {
       return column.is_null_allowed === false
     },
     disabled (column) {
-      return (this.edit && column.is_primary_key) || (!this.edit && column.auto_generated)
+      return (this.edit && column.is_primary_key) || !this.edit
     },
     updateTuple () {
       const constraints = {}
diff --git a/dbrepo-ui/components/subset/Builder.vue b/dbrepo-ui/components/subset/Builder.vue
index d19ec1598fb39cf0e9a02d4b6aafcf6d722b99c7..a6f53c6e4f0dba0ef1f8935c9112dc1e2d1d0df2 100644
--- a/dbrepo-ui/components/subset/Builder.vue
+++ b/dbrepo-ui/components/subset/Builder.vue
@@ -123,7 +123,23 @@
                     :rules="[v => !!v || $t('validation.required')]"
                     return-object
                     multiple
-                    @update:model-value="buildQuery" />
+                    @update:model-value="buildQuery">
+                    <template
+                      v-slot:prepend-item>
+                      <v-list-item
+                        title="Select All"
+                        :active="select.length === columns.length"
+                        @click="toggleColumns">
+                        <template
+                          v-slot:prepend>
+                          <v-checkbox-btn
+                            :model-value="select.length === columns.length" />
+                        </template>
+                      </v-list-item>
+                      <v-divider
+                        class="mt-2" />
+                    </template>
+                  </v-select>
                 </v-col>
               </v-row>
               <v-row v-if="select.length > 0">
@@ -594,6 +610,14 @@ export default {
           keywordCase: 'upper'
         })
       }
+    },
+    toggleColumns () {
+      if (this.select.length !== this.columns.length) {
+        this.select = this.columns
+        this.buildQuery()
+      } else {
+        this.select = []
+      }
     }
   }
 }
diff --git a/dbrepo-ui/components/table/TableHistory.vue b/dbrepo-ui/components/table/TableHistory.vue
index dd3dad66e2961b59f5591a586150e88fa2c00ff6..34d45248e7341b9a5b8b3af1c2d799e4408e6ecb 100644
--- a/dbrepo-ui/components/table/TableHistory.vue
+++ b/dbrepo-ui/components/table/TableHistory.vue
@@ -28,6 +28,10 @@
           :options="chartOptions"
           :height="200"
           :width="400" />
+        <pre>{{ history }}</pre>
+        <p>
+          {{ $t('pages.table.subpages.versioning.chart.legend') }}
+        </p>
       </v-card-text>
       <v-card-actions>
         <v-spacer />
@@ -52,9 +56,9 @@
 </template>
 
 <script>
+import { UTCDate } from '@date-fns/utc'
 import { Bar } from 'vue-chartjs'
 import { format } from 'date-fns'
-import { useCacheStore } from '~/stores/cache.js'
 import { Chart as ChartJS, Title, Tooltip, BarElement, CategoryScale, LinearScale, LogarithmicScale } from 'chart.js'
 
 ChartJS.register(Title, Tooltip, BarElement, CategoryScale, LinearScale, LogarithmicScale)
@@ -69,6 +73,7 @@ export default {
       loading: true,
       datetime: null,
       history: null,
+      chartData: null,
       chartOptions: {
         responsive: true,
         onClick: this.handle,
@@ -92,7 +97,7 @@ export default {
             },
           },
           x: {
-            display: true,
+            display: false,
             ticks: {
               min: 0,
               stepSize: 1
@@ -118,15 +123,6 @@ export default {
     buttonVariant () {
       const runtimeConfig = useRuntimeConfig()
       return this.$vuetify.theme.global.name.toLowerCase().endsWith('contrast') ? runtimeConfig.public.variant.button.contrast : runtimeConfig.public.variant.button.normal
-    },
-    chartData () {
-      return {
-        labels: this.history ? this.history.map(d => format(new Date(d.timestamp), 'yyyy-MM-dd HH:mm:ss')) : [],
-        datasets: [
-          this.history ? { backgroundColor: this.$vuetify.theme.current.colors.success, data: this.history.filter(d => d.event === 'INSERT').map(d => d.total) } : { data: [] },
-          this.history ? { backgroundColor: this.$vuetify.theme.current.colors.error, data: this.history.filter(d => d.event === 'DELETE').map(d => d.total) } : { data: [] },
-        ]
-      }
     }
   },
   mounted() {
@@ -153,13 +149,28 @@ export default {
       this.datetime = this.chartData.labels[idx]
       console.debug('date time', this.datetime, 'idx', idx)
     },
+    filterHistoryEventType (history, type) {
+      return history.map(d => {
+        if (d.event === type) {
+          return d.total
+        }
+        return null
+      })
+    },
     loadHistory () {
       this.loading = true
       const tableService = useTableService()
       tableService.history(this.table.database_id, this.table.id)
         .then((history) => {
           this.loading = false
-          this.history = history
+          this.chartData = {
+            // labels: history ? history.map(d => format(new UTCDate(d.timestamp), 'yyyy-MM-dd HH:mm:ss.SSS')) : [],
+            labels: history ? history.map(d => format(new UTCDate(d.timestamp), 'yyyy-MM-dd HH:mm:ss')) : [],
+            datasets: [
+              { backgroundColor: this.$vuetify.theme.current.colors.success, data: this.filterHistoryEventType(history, 'INSERT') },
+              { backgroundColor: this.$vuetify.theme.current.colors.error, data: this.filterHistoryEventType(history, 'DELETE') }
+            ]
+          }
         })
         .catch(({message}) => {
           const toast = useToastInstance()
diff --git a/dbrepo-ui/components/table/TableList.vue b/dbrepo-ui/components/table/TableList.vue
index 362f4a93665161b36a432d147fb862890c389f1a..2fc2c7d791b5ce5f84614bc9e2266d99c4c49e9b 100644
--- a/dbrepo-ui/components/table/TableList.vue
+++ b/dbrepo-ui/components/table/TableList.vue
@@ -60,7 +60,6 @@ export default {
         { value: 'is_primary_key', title: 'Primary Key' },
         { value: 'unique', title: 'Unique' },
         { value: 'is_null_allowed', title: 'Nullable' },
-        { value: 'auto_generated', title: 'Sequence' }
       ],
       columnTypes: [
         // { value: 'ENUM', text: 'Enumeration' }, // Disabled for now, not implemented, #145
diff --git a/dbrepo-ui/components/table/TableSchema.vue b/dbrepo-ui/components/table/TableSchema.vue
index 5e105577d7c5ab4f4c5d156a0a9a521240b39073..57169a95fa1af760389723064b098d079547d9f4 100644
--- a/dbrepo-ui/components/table/TableSchema.vue
+++ b/dbrepo-ui/components/table/TableSchema.vue
@@ -76,9 +76,10 @@
           <v-text-field
             v-model.number="c.size"
             type="number"
-            :min="columnType(c).size_min !== null ? columnType(c).size_min : null"
-            :max="columnType(c).size_max !== null ? columnType(c).size_max : null"
+            :min="columnType(c).size_min"
+            :max="columnType(c).size_max"
             :step="columnType(c).size_step"
+            :value="columnType(c).size_required === true ? columnType(c).size_default : null"
             :hint="sizeHint(c)"
             :clearable="!columnType(c).size_required"
             persistent-hint
@@ -125,7 +126,7 @@
           class="pl-10">
           <v-checkbox
             v-model="c.null_allowed"
-            :disabled="c.primary_key || disabled"
+            :disabled="c.primary_key || c.type === 'serial' || disabled"
             :label="$t('pages.table.subpages.schema.null.label')" />
         </v-col>
         <v-col
@@ -133,7 +134,7 @@
           class="pl-10">
           <v-checkbox
             v-model="c.unique"
-            :disabled="disabled"
+            :disabled="disabled || c.type === 'serial'"
             :hidden="c.primary_key"
             :label="$t('pages.table.subpages.schema.unique.label')" />
         </v-col>
@@ -233,7 +234,11 @@ export default {
       if (!this.database) {
         return []
       }
-      return this.database.container.image.data_types
+      const types = this.database.container.image.data_types
+      if (this.columns.filter(c => c.type === 'serial').length > 0) {
+        return types.filter(t => t.value !== 'serial')
+      }
+      return types
     },
     dateFormats () {
       if (!this.database || !('container' in this.database) || !('image' in this.database.container) || !('date_formats' in this.database.container.image)) {
@@ -373,21 +378,13 @@ export default {
         column.d = null
       }
       console.debug('for column type', column.type, 'set default size', column.size, '& d', column.d)
-    },
-    hasDate (column) {
-      return column.type === 'date' || column.type === 'datetime' || column.type === 'timestamp' || column.type === 'time'
+      if (column.type === 'serial') {
+        this.setOthers(column)
+      }
     },
     hasEnumOrSet (column) {
       return column.type === 'enum' || column.type === 'set'
     },
-    filterDateFormats (column) {
-      return this.dateFormats.filter((df) => {
-        if (column.type === 'date') {
-          return !df.has_time
-        }
-        return df.has_time
-      })
-    },
     sizeErrorMessages (column) {
       if (column.size < column.d) {
         return ['Size needs to be bigger or equal to d']
diff --git a/dbrepo-ui/components/table/TableToolbar.vue b/dbrepo-ui/components/table/TableToolbar.vue
index 4c1c86fba656fb0d87f87fac7690c98289a26c9e..1c222cc7c61bd4229c3a8b8e3dd7560a1228eea5 100644
--- a/dbrepo-ui/components/table/TableToolbar.vue
+++ b/dbrepo-ui/components/table/TableToolbar.vue
@@ -13,7 +13,7 @@
           width="200" />
         <span
           v-if="table && $vuetify.display.lgAndUp">
-          {{ table.name}}
+          {{ table.name }}
         </span>
       </v-toolbar-title>
       <v-spacer />
diff --git a/dbrepo-ui/composables/view-service.ts b/dbrepo-ui/composables/view-service.ts
index 642a7c6e51ca7344dae72dd3ee12550c84c893a3..5b3a25a149813ddf30f622fcb3d51fccb31f6730 100644
--- a/dbrepo-ui/composables/view-service.ts
+++ b/dbrepo-ui/composables/view-service.ts
@@ -1,4 +1,5 @@
 import {axiosErrorToApiError} from '@/utils'
+import type {AxiosRequestConfig} from "axios";
 
 export const useViewService = (): any => {
   async function remove(databaseId: number, viewId: number): Promise<void> {
@@ -66,5 +67,27 @@ export const useViewService = (): any => {
     })
   }
 
-  return {remove, create, reExecuteData, reExecuteCount}
+  async function exportData(databaseId: number, viewId: number): Promise<QueryResultDto> {
+    const axios = useAxiosInstance()
+    const config: AxiosRequestConfig = {
+      responseType: 'blob',
+      headers: {
+        Accept: 'text/csv'
+      }
+    }
+    console.debug('export data for view with id', viewId, 'in database with id', databaseId);
+    return new Promise<QueryResultDto>((resolve, reject) => {
+      axios.get<QueryResultDto>(`/api/database/${databaseId}/view/${viewId}/export`, config)
+        .then((response) => {
+          console.info('Exported data for view with id', viewId, 'in database with id', databaseId)
+          resolve(response.data)
+        })
+        .catch((error) => {
+          console.error('Failed to export data', error)
+          reject(axiosErrorToApiError(error))
+        })
+    })
+  }
+
+  return {remove, create, reExecuteData, reExecuteCount, exportData}
 }
diff --git a/dbrepo-ui/dto/index.ts b/dbrepo-ui/dto/index.ts
index 543bd0a3910a7f8ffbe2d0d10486234e2c446424..74b3911764fbd3da5d41d5d72cd269d79ee3963d 100644
--- a/dbrepo-ui/dto/index.ts
+++ b/dbrepo-ui/dto/index.ts
@@ -336,7 +336,6 @@ interface ColumnDto {
   table_id: number;
   internal_name: string;
   date_format: ImageDateDto;
-  auto_generated: boolean;
   is_primary_key: boolean;
   index_length: number;
   length: number;
diff --git a/dbrepo-ui/locales/en-US.json b/dbrepo-ui/locales/en-US.json
index db2c41bfd272feb51c5ef36fa6bf4a490e0d5e08..3a3846e2efdb70cb02793834c3e018ea9c189f08 100644
--- a/dbrepo-ui/locales/en-US.json
+++ b/dbrepo-ui/locales/en-US.json
@@ -33,7 +33,8 @@
     "mine": "(mine)",
     "loading": "Loading",
     "view": "View",
-    "modify": "Modify"
+    "modify": "Modify",
+    "help": "Help"
   },
   "pages": {
     "identifier": {
@@ -544,8 +545,9 @@
           "subtitle": "Select a timestamp to view the data for this specific time of day",
           "chart": {
             "title": "Data Events",
+            "legend": "Chart legend: green color marks data insertions, red color marks data deletions (=data updates in some cases)",
             "ylabel": "# Events",
-            "xlabel": "Timestamp"
+            "xlabel": "Data Timestamp (UTC)"
           },
           "timestamp": {
             "label": "Timestamp",
@@ -562,7 +564,7 @@
             "hint": "Value is a primary key"
           },
           "format": {
-            "hint": "Value must be in format"
+            "hint": "Format hint:"
           },
           "required": {
             "hint": "Required. "
diff --git a/dbrepo-ui/package.json b/dbrepo-ui/package.json
index d521b4e61f91766ca0355ec39dd8ea6496be7cf4..5af18adf4d6bd4c58e0e1e89fdf02b8937bb05b3 100644
--- a/dbrepo-ui/package.json
+++ b/dbrepo-ui/package.json
@@ -12,6 +12,7 @@
   },
   "dependencies": {
     "@artmizu/nuxt-prometheus": "^2.4.0",
+    "@date-fns/utc": "^2.1.0",
     "@fontsource/open-sans": "^5.0.24",
     "@mdi/font": "^7.4.47",
     "@nuxtjs/robots": "^3.0.0",
diff --git a/dbrepo-ui/pages/database/[database_id]/table/[table_id]/data.vue b/dbrepo-ui/pages/database/[database_id]/table/[table_id]/data.vue
index 85570c5c363ff0cafda08d389c6f8f055a932ce0..e9173726e96af147ab849aa2fcd4515484c03825 100644
--- a/dbrepo-ui/pages/database/[database_id]/table/[table_id]/data.vue
+++ b/dbrepo-ui/pages/database/[database_id]/table/[table_id]/data.vue
@@ -50,7 +50,7 @@
         :prepend-icon="$vuetify.display.lgAndUp ? 'mdi-update' : null"
         variant="flat"
         :text="$t('toolbars.table.data.version')"
-        class="ml-2"
+        class="ml-2 mr-2"
         @click.stop="pick" />
     </v-toolbar>
     <TimeDrift />
@@ -348,6 +348,7 @@ export default {
         const tableService = useTableService()
         tableService.exportData(this.$route.params.database_id, this.$route.params.table_id)
           .then((data) => {
+            this.downloadLoading = false
             const url = URL.createObjectURL(data)
             const link = document.createElement('a')
             link.href = url
diff --git a/dbrepo-ui/pages/database/[database_id]/table/[table_id]/schema.vue b/dbrepo-ui/pages/database/[database_id]/table/[table_id]/schema.vue
index c9f9882c6cff6bc4b2d91d2857eb3d3b15e37c84..65b6034348399844ddc8f195da9f9cc828056efa 100644
--- a/dbrepo-ui/pages/database/[database_id]/table/[table_id]/schema.vue
+++ b/dbrepo-ui/pages/database/[database_id]/table/[table_id]/schema.vue
@@ -28,9 +28,6 @@
         <template v-slot:item.extra="{ item }">
           <pre>{{ extra(item) }}</pre>
         </template>
-        <template v-slot:item.auto_generated="{ item }">
-          <span v-if="item.auto_generated">●</span> {{ item.auto_generated }}
-        </template>
         <template v-slot:item.column_concept="{ item }">
           <v-btn
             v-if="canAssignSemanticInformation && !hasConcept(item)"
@@ -167,7 +164,6 @@ export default {
         { value: 'column_concept', title: this.$t('pages.table.subpages.schema.concept.title') },
         { value: 'column_unit', title: this.$t('pages.table.subpages.schema.unit.title') },
         { value: 'is_null_allowed', title: this.$t('pages.table.subpages.schema.nullable.title') },
-        { value: 'auto_generated', title: this.$t('pages.table.subpages.schema.sequence.title') },
         { value: 'description', title: this.$t('pages.table.subpages.schema.description.title') },
       ],
       dateColumns: [],
diff --git a/dbrepo-ui/pages/database/[database_id]/view/[view_id]/data.vue b/dbrepo-ui/pages/database/[database_id]/view/[view_id]/data.vue
index 838ef2f0f1adf0a90f722ffd73d43457a5b92186..60bfe33a13f367ba88349a94a7808567dbde92d3 100644
--- a/dbrepo-ui/pages/database/[database_id]/view/[view_id]/data.vue
+++ b/dbrepo-ui/pages/database/[database_id]/view/[view_id]/data.vue
@@ -6,11 +6,19 @@
       color="secondary"
       :title="$t('toolbars.database.current')"
       flat>
+      <v-btn
+        v-if="canDownload"
+        :prepend-icon="$vuetify.display.lgAndUp ? 'mdi-download' : null"
+        variant="flat"
+        :loading="downloadLoading"
+        :text="$t('toolbars.table.data.download')"
+        class="mr-2"
+        @click.stop="download" />
       <v-btn
         :prepend-icon="$vuetify.display.lgAndUp ? 'mdi-refresh' : null"
         variant="flat"
         :text="$t('toolbars.table.data.refresh')"
-        class="mb-1 mr-2"
+        class="mr-2"
         :loading="loadingData"
         @click="reload" />
     </v-toolbar>
@@ -29,7 +37,6 @@
 <script>
 import TimeDrift from '@/components/TimeDrift.vue'
 import QueryResults from '@/components/subset/Results.vue'
-import { useCacheStore } from '@/stores/cache'
 
 export default {
   components: {
@@ -39,6 +46,7 @@ export default {
   data () {
     return {
       loadingData: false,
+      downloadLoading: false,
       items: [
         {
           title: this.$t('navigation.databases'),
@@ -73,6 +81,21 @@ export default {
         return null
       }
       return this.database.views.filter(v => v.id === Number(this.$route.params.view_id))[0]
+    },
+    access () {
+      return this.userStore.getAccess
+    },
+    canDownload () {
+      if (!this.view) {
+        return false
+      }
+      if (this.view.is_public) {
+        return true
+      }
+      if (!this.access) {
+        return false
+      }
+      return this.access.type === 'read' || this.access.type === 'write_own' || this.access.type === 'write_all'
     }
   },
   mounted () {
@@ -82,6 +105,31 @@ export default {
     reload () {
       this.$refs.queryResults.reExecute(Number(this.$route.params.view_id))
       this.$refs.queryResults.reExecuteCount(Number(this.$route.params.view_id))
+    },
+    download () {
+      this.downloadLoading = true
+      const viewService = useViewService()
+      viewService.exportData(this.$route.params.database_id, this.$route.params.view_id)
+        .then((data) => {
+          this.downloadLoading = false
+          const url = URL.createObjectURL(data)
+          const link = document.createElement('a')
+          link.href = url
+          link.download = 'view.csv'
+          document.body.appendChild(link)
+          link.click()
+        })
+        .catch(({code}) => {
+          this.downloadLoading = false
+          const toast = useToastInstance()
+          if (typeof code !== 'string') {
+            return
+          }
+          toast.error(this.$t(code))
+        })
+        .finally(() => {
+          this.downloadLoading = false
+        })
     }
   }
 }
diff --git a/dbrepo-ui/plugins/vuetify.ts b/dbrepo-ui/plugins/vuetify.ts
index 8f48e315dda516f320283f4b52e262d4e6d6f4ac..e942e529f50cbb6f7eddd9d63080a751006aaa94 100644
--- a/dbrepo-ui/plugins/vuetify.ts
+++ b/dbrepo-ui/plugins/vuetify.ts
@@ -4,6 +4,7 @@ import colors from 'vuetify/util/colors'
 import * as components from 'vuetify/components'
 import * as directives from 'vuetify/directives'
 import '@mdi/font/css/materialdesignicons.css'
+import {rgbParse} from "@kurkle/color";
 
 const tuwThemeLight: ThemeDefinition = {
   dark: false,
diff --git a/docker-compose.yml b/docker-compose.yml
index e8725ed37fa3f99fbaf036962c1c12483473488f..e8cec6aa893092b08fe98a1dc352fa4d5c378aa0 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -89,7 +89,6 @@ services:
     ports:
       - "8080:8080"
     environment:
-      BITNAMI_DEBUG: "true"
       KEYCLOAK_ENABLE_HTTPS: "false"
       KEYCLOAK_ENABLE_STATISTICS: "true"
       KEYCLOAK_ENABLE_HEALTH_ENDPOINTS: "true"
diff --git a/lib/python/dbrepo/api/dto.py b/lib/python/dbrepo/api/dto.py
index 72a12bc3d4b4959c5eecbb8a8fc471fa4c308794..9bfb775f0ce7f04e2d2ede8024867e022aab25eb 100644
--- a/lib/python/dbrepo/api/dto.py
+++ b/lib/python/dbrepo/api/dto.py
@@ -193,6 +193,7 @@ class ColumnType(str, Enum):
     LONGTEXT = "longtext"
     LONGBLOB = "longblob"
     ENUM = "enum"
+    SERIAL = "serial"
     SET = "set"
     BIT = "bit"
     TINYINT = "tinyint"
@@ -888,7 +889,6 @@ class Column(BaseModel):
     database_id: int
     table_id: int
     internal_name: str
-    auto_generated: bool
     column_type: ColumnType
     is_public: bool
     is_null_allowed: bool
@@ -918,7 +918,6 @@ class ViewColumn(BaseModel):
     name: str
     database_id: int
     internal_name: str
-    auto_generated: bool
     column_type: ColumnType
     is_public: bool
     is_null_allowed: bool
diff --git a/mkdocs.yml b/mkdocs.yml
index b2b9bc681d4187e4cbe74008ab0ab13cc1b116bf..28f04c8365f0d0a729a0eaf093cf87ebd72ff04a 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -12,6 +12,7 @@ nav:
     - Help with DBRepo: help.md
     - Installation: installation.md
     - Kubernetes: kubernetes.md
+    - changelog.md
     - contributing.md
   - Concepts:
     - Overview: concepts/index.md
diff --git a/yq.1 b/yq.1
new file mode 100644
index 0000000000000000000000000000000000000000..5e4a37a5d0d9bbc8f4060fa8c4c6a4f3af9c313d
--- /dev/null
+++ b/yq.1
@@ -0,0 +1,17196 @@
+'\" t
+.\" Automatically generated by Pandoc 2.14.2
+.\"
+.TH "YQ" "1" "" "" "yq (https://github.com/mikefarah/yq/) version v4.44.3"
+.hy
+.SH NAME
+.PP
+\f[I]yq\f[R] is a portable command-line data file processor
+.SH SYNOPSIS
+.PP
+yq [eval/eval-all] [expression] files..
+.PP
+eval/e - (default) Apply the expression to each document in each yaml
+file in sequence
+.PP
+eval-all/ea - Loads all yaml documents of all yaml files and runs
+expression once
+.SH DESCRIPTION
+.PP
+a lightweight and portable command-line data file processor.
+\f[C]yq\f[R] uses jq (https://github.com/stedolan/jq) like syntax but
+works with yaml, json, xml, csv, properties and TOML files.
+It doesn\[cq]t yet support everything \f[C]jq\f[R] does - but it does
+support the most common operations and functions, and more is being
+added continuously.
+.PP
+This documentation is also available at https://mikefarah.gitbook.io/yq/
+# QUICK GUIDE
+.SS Read a value:
+.IP
+.nf
+\f[C]
+yq \[aq].a.b[0].c\[aq] file.yaml
+\f[R]
+.fi
+.SS Pipe from STDIN:
+.IP
+.nf
+\f[C]
+cat file.yaml | yq \[aq].a.b[0].c\[aq]
+\f[R]
+.fi
+.SS Update a yaml file, in place
+.IP
+.nf
+\f[C]
+yq -i \[aq].a.b[0].c = \[dq]cool\[dq]\[aq] file.yaml
+\f[R]
+.fi
+.SS Update using environment variables
+.IP
+.nf
+\f[C]
+NAME=mike yq -i \[aq].a.b[0].c = strenv(NAME)\[aq] file.yaml
+\f[R]
+.fi
+.SS Merge multiple files
+.IP
+.nf
+\f[C]
+yq ea \[aq]. as $item ireduce ({}; . * $item )\[aq] path/to/*.yml
+\f[R]
+.fi
+.PP
+Note the use of \f[C]ea\f[R] to evaluate all files at once (instead of
+in sequence.)
+.SS Multiple updates to a yaml file
+.IP
+.nf
+\f[C]
+yq -i \[aq]
+  .a.b[0].c = \[dq]cool\[dq] |
+  .x.y.z = \[dq]foobar\[dq] |
+  .person.name = strenv(NAME)
+\[aq] file.yaml
+\f[R]
+.fi
+.PP
+See the documentation (https://mikefarah.gitbook.io/yq/) for more.
+.SH KNOWN ISSUES / MISSING FEATURES
+.IP \[bu] 2
+\f[C]yq\f[R] attempts to preserve comment positions and whitespace as
+much as possible, but it does not handle all scenarios (see
+https://github.com/go-yaml/yaml/tree/v3 for details)
+.IP \[bu] 2
+Powershell has its own\&...opinions:
+https://mikefarah.gitbook.io/yq/usage/tips-and-tricks#quotes-in-windows-powershell
+.SH BUGS / ISSUES / FEATURE REQUESTS
+.PP
+Please visit the GitHub page https://github.com/mikefarah/yq/.
+.SH HOW IT WORKS
+.PP
+In \f[C]yq\f[R] expressions are made up of operators and pipes.
+A context of nodes is passed through the expression and each operation
+takes the context as input and returns a new context as output.
+That output is piped in as input for the next operation in the
+expression.
+To begin with, the context is set to the first yaml document of the
+first yaml file (if processing in sequence using eval).
+.PP
+Lets look at a couple of examples.
+.SS Simple assignment example
+.PP
+Given a document like:
+.IP
+.nf
+\f[C]
+a: cat
+b: dog
+\f[R]
+.fi
+.PP
+with an expression:
+.IP
+.nf
+\f[C]
+\&.a = .b
+\f[R]
+.fi
+.PP
+Like math expressions - operator precedence is important.
+.PP
+The \f[C]=\f[R] operator takes two arguments, a \f[C]lhs\f[R]
+expression, which in this case is \f[C].a\f[R] and \f[C]rhs\f[R]
+expression which is \f[C].b\f[R].
+.PP
+It pipes the current, lets call it `root' context through the
+\f[C]lhs\f[R] expression of \f[C].a\f[R] to return the node
+.IP
+.nf
+\f[C]
+cat
+\f[R]
+.fi
+.PP
+Side note: this node holds not only its value `cat', but comments and
+metadata too, including path and parent information.
+.PP
+The \f[C]=\f[R] operator then pipes the `root' context through the
+\f[C]rhs\f[R] expression of \f[C].b\f[R] to return the node
+.IP
+.nf
+\f[C]
+dog
+\f[R]
+.fi
+.PP
+Both sides have now been evaluated, so now the operator copies across
+the value from the RHS (\f[C].b\f[R]) to the LHS (\f[C].a\f[R]), and it
+returns the now updated context:
+.IP
+.nf
+\f[C]
+a: dog
+b: dog
+\f[R]
+.fi
+.SS Complex assignment, operator precedence rules
+.PP
+Just like math expressions - \f[C]yq\f[R] expressions have an order of
+precedence.
+The pipe \f[C]|\f[R] operator has a low order of precedence, so
+operators with higher precedence will get evaluated first.
+.PP
+Most of the time, this is intuitively what you\[cq]d want, for instance
+\f[C].a = \[dq]cat\[dq] | .b = \[dq]dog\[dq]\f[R] is effectively:
+\f[C](.a = \[dq]cat\[dq]) | (.b = \[dq]dog\[dq])\f[R].
+.PP
+However, this is not always the case, particularly if you have a complex
+LHS or RHS expression, for instance if you want to select particular
+nodes to update.
+.PP
+Lets say you had:
+.IP
+.nf
+\f[C]
+- name: bob
+  fruit: apple
+- name: sally
+  fruit: orange
+\f[R]
+.fi
+.PP
+Lets say you wanted to update the \f[C]sally\f[R] entry to have fruit:
+`mango'.
+The \f[I]incorrect\f[R] way to do that is:
+\f[C].[] | select(.name == \[dq]sally\[dq]) | .fruit = \[dq]mango\[dq]\f[R].
+.PP
+Because \f[C]|\f[R] has a low operator precedence, this will be
+evaluated (\f[I]incorrectly\f[R]) as :
+\f[C](.[]) | (select(.name == \[dq]sally\[dq])) | (.fruit = \[dq]mango\[dq])\f[R].
+What you\[cq]ll see is only the updated segment returned:
+.IP
+.nf
+\f[C]
+name: sally
+fruit: mango
+\f[R]
+.fi
+.PP
+To properly update this yaml, you will need to use brackets (think
+BODMAS from maths) and wrap the entire LHS:
+\f[C](.[] | select(.name == \[dq]sally\[dq]) | .fruit) = \[dq]mango\[dq]\f[R]
+.PP
+Now that entire LHS expression is passed to the `assign' (\f[C]=\f[R])
+operator, and the yaml is correctly updated and returned:
+.IP
+.nf
+\f[C]
+- name: bob
+  fruit: apple
+- name: sally
+  fruit: mango
+\f[R]
+.fi
+.SS Relative update (e.g.\ \f[C]|=\f[R])
+.PP
+There is another form of the \f[C]=\f[R] operator which we call the
+relative form.
+It\[cq]s very similar to \f[C]=\f[R] but with one key difference when
+evaluating the RHS expression.
+.PP
+In the plain form, we pass in the `root' level context to the RHS
+expression.
+In relative form, we pass in \f[I]each result of the LHS\f[R] to the RHS
+expression.
+Let\[cq]s go through an example.
+.PP
+Given a document like:
+.IP
+.nf
+\f[C]
+a: 1
+b: thing
+\f[R]
+.fi
+.PP
+with an expression:
+.IP
+.nf
+\f[C]
+\&.a |= . + 1
+\f[R]
+.fi
+.PP
+Similar to the \f[C]=\f[R] operator, \f[C]|=\f[R] takes two operands,
+the LHS and RHS.
+.PP
+It pipes the current context (the whole document) through the LHS
+expression of \f[C].a\f[R] to get the node value:
+.IP
+.nf
+\f[C]
+1
+\f[R]
+.fi
+.PP
+Now it pipes \f[I]that LHS context\f[R] into the RHS expression
+\f[C]. + 1\f[R] (whereas in the \f[C]=\f[R] plain form it piped the
+original document context into the RHS) to yield:
+.IP
+.nf
+\f[C]
+2
+\f[R]
+.fi
+.PP
+The assignment operator then copies across the value from the RHS to the
+value on the LHS, and it returns the now updated `root' context:
+.IP
+.nf
+\f[C]
+a: 2
+b: thing
+\f[R]
+.fi
+.SH Add
+.PP
+Add behaves differently according to the type of the LHS: * arrays:
+concatenate * number scalars: arithmetic addition * string scalars:
+concatenate * maps: shallow merge (use the multiply operator
+(\f[C]*\f[R]) to deeply merge)
+.PP
+Use \f[C]+=\f[R] as a relative append assign for things like increment.
+Note that \f[C].a += .x\f[R] is equivalent to running
+\f[C].a = .a + .x\f[R].
+.SS Concatenate arrays
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  - 1
+  - 2
+b:
+  - 3
+  - 4
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a + .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- 1
+- 2
+- 3
+- 4
+\f[R]
+.fi
+.SS Concatenate to existing array
+.PP
+Note that the styling of \f[C]a\f[R] is kept.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: [1,2]
+b:
+  - 3
+  - 4
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a += .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: [1, 2, 3, 4]
+b:
+  - 3
+  - 4
+\f[R]
+.fi
+.SS Concatenate null to array
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  - 1
+  - 2
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a + null\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- 1
+- 2
+\f[R]
+.fi
+.SS Append to existing array
+.PP
+Note that the styling is copied from existing array elements
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: [\[aq]dog\[aq]]
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a += \[dq]cat\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: [\[aq]dog\[aq], \[aq]cat\[aq]]
+\f[R]
+.fi
+.SS Prepend to existing array
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  - dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a = [\[dq]cat\[dq]] + .a\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  - cat
+  - dog
+\f[R]
+.fi
+.SS Add new object to array
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  - dog: woof
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a + {\[dq]cat\[dq]: \[dq]meow\[dq]}\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- dog: woof
+- cat: meow
+\f[R]
+.fi
+.SS Relative append
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  a1:
+    b:
+      - cat
+  a2:
+    b:
+      - dog
+  a3: {}
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a[].b += [\[dq]mouse\[dq]]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  a1:
+    b:
+      - cat
+      - mouse
+  a2:
+    b:
+      - dog
+      - mouse
+  a3:
+    b:
+      - mouse
+\f[R]
+.fi
+.SS String concatenation
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b: meow
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a += .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: catmeow
+b: meow
+\f[R]
+.fi
+.SS Number addition - float
+.PP
+If the lhs or rhs are floats then the expression will be calculated with
+floats.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 3
+b: 4.9
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a = .a + .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: 7.9
+b: 4.9
+\f[R]
+.fi
+.SS Number addition - int
+.PP
+If both the lhs and rhs are ints then the expression will be calculated
+with ints.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 3
+b: 4
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a = .a + .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: 7
+b: 4
+\f[R]
+.fi
+.SS Increment numbers
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 3
+b: 5
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] += 1\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: 4
+b: 6
+\f[R]
+.fi
+.SS Date addition
+.PP
+You can add durations to dates.
+Assumes RFC3339 date time format, see date-time
+operators (https://mikefarah.gitbook.io/yq/operators/date-time-operators)
+for more information.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 2021-01-01T00:00:00Z
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a += \[dq]3h10m\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: 2021-01-01T03:10:00Z
+\f[R]
+.fi
+.SS Date addition - custom format
+.PP
+You can add durations to dates.
+See date-time
+operators (https://mikefarah.gitbook.io/yq/operators/date-time-operators)
+for more information.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: Saturday, 15-Dec-01 at 2:59AM GMT
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]with_dtf(\[dq]Monday, 02-Jan-06 at 3:04PM MST\[dq], .a += \[dq]3h1m\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: Saturday, 15-Dec-01 at 6:00AM GMT
+\f[R]
+.fi
+.SS Add to null
+.PP
+Adding to null simply returns the rhs
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]null + \[dq]cat\[dq]\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cat
+\f[R]
+.fi
+.SS Add maps to shallow merge
+.PP
+Adding objects together shallow merges them.
+Use \f[C]*\f[R] to deeply merge.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  thing:
+    name: Astuff
+    value: x
+  a1: cool
+b:
+  thing:
+    name: Bstuff
+    legs: 3
+  b1: neat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a += .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  thing:
+    name: Bstuff
+    legs: 3
+  a1: cool
+  b1: neat
+b:
+  thing:
+    name: Bstuff
+    legs: 3
+  b1: neat
+\f[R]
+.fi
+.SS Custom types: that are really strings
+.PP
+When custom tags are encountered, yq will try to decode the underlying
+type.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: !horse cat
+b: !goat _meow
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a += .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: !horse cat_meow
+b: !goat _meow
+\f[R]
+.fi
+.SS Custom types: that are really numbers
+.PP
+When custom tags are encountered, yq will try to decode the underlying
+type.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: !horse 1.2
+b: !goat 2.3
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a += .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: !horse 3.5
+b: !goat 2.3
+\f[R]
+.fi
+.SH Alternative (Default value)
+.PP
+This operator is used to provide alternative (or default) values when a
+particular expression is either null or false.
+.SS LHS is defined
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: bridge
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a // \[dq]hello\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+bridge
+\f[R]
+.fi
+.SS LHS is not defined
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+{}
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a // \[dq]hello\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+hello
+\f[R]
+.fi
+.SS LHS is null
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: \[ti]
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a // \[dq]hello\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+hello
+\f[R]
+.fi
+.SS LHS is false
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: false
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a // \[dq]hello\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+hello
+\f[R]
+.fi
+.SS RHS is an expression
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: false
+b: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a // .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cat
+\f[R]
+.fi
+.SS Update or create - entity exists
+.PP
+This initialises \f[C]a\f[R] if it\[cq]s not present
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 1
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq](.a // (.a = 0)) += 1\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: 2
+\f[R]
+.fi
+.SS Update or create - entity does not exist
+.PP
+This initialises \f[C]a\f[R] if it\[cq]s not present
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+b: camel
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq](.a // (.a = 0)) += 1\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+b: camel
+a: 1
+\f[R]
+.fi
+.SH Anchor and Alias Operators
+.PP
+Use the \f[C]alias\f[R] and \f[C]anchor\f[R] operators to read and write
+yaml aliases and anchors.
+The \f[C]explode\f[R] operator normalises a yaml file (dereference (or
+expands) aliases and remove anchor names).
+.PP
+\f[C]yq\f[R] supports merge aliases (like \f[C]<<: *blah\f[R]) however
+this is no longer in the standard yaml spec (1.2) and so \f[C]yq\f[R]
+will automatically add the \f[C]!!merge\f[R] tag to these nodes as it is
+effectively a custom tag.
+.SS Merge one map
+.PP
+see https://yaml.org/type/merge.html
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- &CENTER
+  x: 1
+  y: 2
+- &LEFT
+  x: 0
+  y: 2
+- &BIG
+  r: 10
+- &SMALL
+  r: 1
+- !!merge <<: *CENTER
+  r: 10
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[4] | explode(.)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+x: 1
+y: 2
+r: 10
+\f[R]
+.fi
+.SS Merge multiple maps
+.PP
+see https://yaml.org/type/merge.html
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- &CENTER
+  x: 1
+  y: 2
+- &LEFT
+  x: 0
+  y: 2
+- &BIG
+  r: 10
+- &SMALL
+  r: 1
+- !!merge <<:
+    - *CENTER
+    - *BIG
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[4] | explode(.)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+r: 10
+x: 1
+y: 2
+\f[R]
+.fi
+.SS Override
+.PP
+see https://yaml.org/type/merge.html
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- &CENTER
+  x: 1
+  y: 2
+- &LEFT
+  x: 0
+  y: 2
+- &BIG
+  r: 10
+- &SMALL
+  r: 1
+- !!merge <<:
+    - *BIG
+    - *LEFT
+    - *SMALL
+  x: 1
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[4] | explode(.)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+r: 10
+x: 1
+y: 2
+\f[R]
+.fi
+.SS Get anchor
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: &billyBob cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a | anchor\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+billyBob
+\f[R]
+.fi
+.SS Set anchor
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a anchor = \[dq]foobar\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: &foobar cat
+\f[R]
+.fi
+.SS Set anchor relatively using assign-update
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a anchor |= .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: &cat
+  b: cat
+\f[R]
+.fi
+.SS Get alias
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+b: &billyBob meow
+a: *billyBob
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a | alias\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+billyBob
+\f[R]
+.fi
+.SS Set alias
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+b: &meow purr
+a: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a alias = \[dq]meow\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+b: &meow purr
+a: *meow
+\f[R]
+.fi
+.SS Set alias to blank does nothing
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+b: &meow purr
+a: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a alias = \[dq]\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+b: &meow purr
+a: cat
+\f[R]
+.fi
+.SS Set alias relatively using assign-update
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+b: &meow purr
+a:
+  f: meow
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a alias |= .f\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+b: &meow purr
+a: *meow
+\f[R]
+.fi
+.SS Explode alias and anchor
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+f:
+  a: &a cat
+  b: *a
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]explode(.f)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+f:
+  a: cat
+  b: cat
+\f[R]
+.fi
+.SS Explode with no aliases or anchors
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: mike
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]explode(.a)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: mike
+\f[R]
+.fi
+.SS Explode with alias keys
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+f:
+  a: &a cat
+  *a: b
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]explode(.f)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+f:
+  a: cat
+  cat: b
+\f[R]
+.fi
+.SS Explode with merge anchors
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+foo: &foo
+  a: foo_a
+  thing: foo_thing
+  c: foo_c
+bar: &bar
+  b: bar_b
+  thing: bar_thing
+  c: bar_c
+foobarList:
+  b: foobarList_b
+  !!merge <<:
+    - *foo
+    - *bar
+  c: foobarList_c
+foobar:
+  c: foobar_c
+  !!merge <<: *foo
+  thing: foobar_thing
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]explode(.)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+foo:
+  a: foo_a
+  thing: foo_thing
+  c: foo_c
+bar:
+  b: bar_b
+  thing: bar_thing
+  c: bar_c
+foobarList:
+  b: bar_b
+  thing: foo_thing
+  c: foobarList_c
+  a: foo_a
+foobar:
+  c: foo_c
+  a: foo_a
+  thing: foobar_thing
+\f[R]
+.fi
+.SS Dereference and update a field
+.PP
+Use explode with multiply to dereference an object
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+item_value: &item_value
+  value: true
+thingOne:
+  name: item_1
+  !!merge <<: *item_value
+thingTwo:
+  name: item_2
+  !!merge <<: *item_value
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].thingOne |= explode(.) * {\[dq]value\[dq]: false}\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+item_value: &item_value
+  value: true
+thingOne:
+  name: item_1
+  value: false
+thingTwo:
+  name: item_2
+  !!merge <<: *item_value
+\f[R]
+.fi
+.SH Array to Map
+.PP
+Use this operator to convert an array to..a map.
+The indices are used as map keys, null values in the array are skipped
+over.
+.PP
+Behind the scenes, this is implemented using reduce:
+.IP
+.nf
+\f[C]
+(.[] | select(. != null) ) as $i ireduce({}; .[$i | key] = $i)
+\f[R]
+.fi
+.SS Simple example
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+cool:
+  - null
+  - null
+  - hello
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].cool |= array_to_map\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cool:
+  2: hello
+\f[R]
+.fi
+.SH Assign (Update)
+.PP
+This operator is used to update node values.
+It can be used in either the:
+.SS plain form: \f[C]=\f[R]
+.PP
+Which will set the LHS node values equal to the RHS node values.
+The RHS expression is run against the matching nodes in the pipeline.
+.SS relative form: \f[C]|=\f[R]
+.PP
+This will do a similar thing to the plain form, but the RHS expression
+is run with \f[I]each LHS node as context\f[R].
+This is useful for updating values based on old values, e.g.\ increment.
+.SS Flags
+.IP \[bu] 2
+\f[C]c\f[R] clobber custom tags
+.SS Create yaml file
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq].a.b = \[dq]cat\[dq] | .x = \[dq]frog\[dq]\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b: cat
+x: frog
+\f[R]
+.fi
+.SS Update node to be the child value
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b:
+    g: foof
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a |= .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  g: foof
+\f[R]
+.fi
+.SS Double elements in an array
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- 1
+- 2
+- 3
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] |= . * 2\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- 2
+- 4
+- 6
+\f[R]
+.fi
+.SS Update node from another file
+.PP
+Note this will also work when the second file is a scalar
+(string/number)
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: apples
+\f[R]
+.fi
+.PP
+And another sample another.yml file of:
+.IP
+.nf
+\f[C]
+b: bob
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq eval-all \[aq]select(fileIndex==0).a = select(fileIndex==1) | select(fileIndex==0)\[aq] sample.yml another.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b: bob
+\f[R]
+.fi
+.SS Update node to be the sibling value
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b: child
+b: sibling
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a = .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: sibling
+b: sibling
+\f[R]
+.fi
+.SS Updated multiple paths
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: fieldA
+b: fieldB
+c: fieldC
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq](.a, .c) = \[dq]potato\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: potato
+b: fieldB
+c: potato
+\f[R]
+.fi
+.SS Update string value
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b: apple
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a.b = \[dq]frog\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b: frog
+\f[R]
+.fi
+.SS Update string value via |=
+.PP
+Note there is no difference between \f[C]=\f[R] and \f[C]|=\f[R] when
+the RHS is a scalar
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b: apple
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a.b |= \[dq]frog\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b: frog
+\f[R]
+.fi
+.SS Update deeply selected results
+.PP
+Note that the LHS is wrapped in brackets! This is to ensure we don\[cq]t
+first filter out the yaml and then update the snippet.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b: apple
+  c: cactus
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq](.a[] | select(. == \[dq]apple\[dq])) = \[dq]frog\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b: frog
+  c: cactus
+\f[R]
+.fi
+.SS Update array values
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- candy
+- apple
+- sandy
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq](.[] | select(. == \[dq]*andy\[dq])) = \[dq]bogs\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- bogs
+- apple
+- bogs
+\f[R]
+.fi
+.SS Update empty object
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+{}
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a.b |= \[dq]bogs\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b: bogs
+\f[R]
+.fi
+.SS Update node value that has an anchor
+.PP
+Anchor will remain
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: &cool cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a = \[dq]dog\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: &cool dog
+\f[R]
+.fi
+.SS Update empty object and array
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+{}
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a.b.[0] |= \[dq]bogs\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b:
+    - bogs
+\f[R]
+.fi
+.SS Custom types are maintained by default
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: !cat meow
+b: !dog woof
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a = .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: !cat woof
+b: !dog woof
+\f[R]
+.fi
+.SS Custom types: clobber
+.PP
+Use the \f[C]c\f[R] option to clobber custom tags
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: !cat meow
+b: !dog woof
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a =c .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: !dog woof
+b: !dog woof
+\f[R]
+.fi
+.SH Boolean Operators
+.PP
+The \f[C]or\f[R] and \f[C]and\f[R] operators take two parameters and
+return a boolean result.
+.PP
+\f[C]not\f[R] flips a boolean from true to false, or vice versa.
+.PP
+\f[C]any\f[R] will return \f[C]true\f[R] if there are any \f[C]true\f[R]
+values in an array sequence, and \f[C]all\f[R] will return true if
+\f[I]all\f[R] elements in an array are true.
+.PP
+\f[C]any_c(condition)\f[R] and \f[C]all_c(condition)\f[R] are like
+\f[C]any\f[R] and \f[C]all\f[R] but they take a condition expression
+that is used against each element to determine if it\[cq]s
+\f[C]true\f[R].
+Note: in \f[C]jq\f[R] you can simply pass a condition to \f[C]any\f[R]
+or \f[C]all\f[R] and it simply works - \f[C]yq\f[R] isn\[cq]t that
+clever..yet
+.PP
+These are most commonly used with the \f[C]select\f[R] operator to
+filter particular nodes.
+.SS Related Operators
+.IP \[bu] 2
+equals / not equals (\f[C]==\f[R], \f[C]!=\f[R]) operators
+here (https://mikefarah.gitbook.io/yq/operators/equals)
+.IP \[bu] 2
+comparison (\f[C]>=\f[R], \f[C]<\f[R] etc) operators
+here (https://mikefarah.gitbook.io/yq/operators/compare)
+.IP \[bu] 2
+select operator here (https://mikefarah.gitbook.io/yq/operators/select)
+.SS \f[C]or\f[R] example
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]true or false\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+true
+\f[R]
+.fi
+.SS \[lq]yes\[rq] and \[lq]no\[rq] are strings
+.PP
+In the yaml 1.2 standard, support for yes/no as booleans was dropped -
+they are now considered strings.
+See `10.2.1.2. Boolean' in https://yaml.org/spec/1.2.2/
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- yes
+- no
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] | tag\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+!!str
+!!str
+\f[R]
+.fi
+.SS \f[C]and\f[R] example
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]true and false\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+false
+\f[R]
+.fi
+.SS Matching nodes with select, equals and or
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- a: bird
+  b: dog
+- a: frog
+  b: bird
+- a: cat
+  b: fly
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq][.[] | select(.a == \[dq]cat\[dq] or .b == \[dq]dog\[dq])]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- a: bird
+  b: dog
+- a: cat
+  b: fly
+\f[R]
+.fi
+.SS \f[C]any\f[R] returns true if any boolean in a given array is true
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- false
+- true
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]any\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+true
+\f[R]
+.fi
+.SS \f[C]any\f[R] returns false for an empty array
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+[]
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]any\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+false
+\f[R]
+.fi
+.SS \f[C]any_c\f[R] returns true if any element in the array is true for the given condition.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  - rad
+  - awesome
+b:
+  - meh
+  - whatever
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] |= any_c(. == \[dq]awesome\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: true
+b: false
+\f[R]
+.fi
+.SS \f[C]all\f[R] returns true if all booleans in a given array are true
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- true
+- true
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]all\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+true
+\f[R]
+.fi
+.SS \f[C]all\f[R] returns true for an empty array
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+[]
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]all\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+true
+\f[R]
+.fi
+.SS \f[C]all_c\f[R] returns true if all elements in the array are true for the given condition.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  - rad
+  - awesome
+b:
+  - meh
+  - 12
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] |= all_c(tag == \[dq]!!str\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: true
+b: false
+\f[R]
+.fi
+.SS Not true is false
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]true | not\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+false
+\f[R]
+.fi
+.SS Not false is true
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]false | not\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+true
+\f[R]
+.fi
+.SS String values considered to be true
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]\[dq]cat\[dq] | not\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+false
+\f[R]
+.fi
+.SS Empty string value considered to be true
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]\[dq]\[dq] | not\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+false
+\f[R]
+.fi
+.SS Numbers are considered to be true
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]1 | not\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+false
+\f[R]
+.fi
+.SS Zero is considered to be true
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]0 | not\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+false
+\f[R]
+.fi
+.SS Null is considered to be false
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]\[ti] | not\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+true
+\f[R]
+.fi
+.SH Collect into Array
+.PP
+This creates an array using the expression between the square brackets.
+.SS Collect empty
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq][]\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+[]
+\f[R]
+.fi
+.SS Collect single
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq][\[dq]cat\[dq]]\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- cat
+\f[R]
+.fi
+.SS Collect many
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b: dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq][.a, .b]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- cat
+- dog
+\f[R]
+.fi
+.SH Column
+.PP
+Returns the column of the matching node.
+Starts from 1, 0 indicates there was no column data.
+.PP
+Column is the number of characters that precede that node on the line it
+starts.
+.SS Returns column of \f[I]value\f[R] node
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b: bob
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].b | column\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+4
+\f[R]
+.fi
+.SS Returns column of \f[I]key\f[R] node
+.PP
+Pipe through the key operator to get the column of the key
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b: bob
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].b | key | column\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+1
+\f[R]
+.fi
+.SS First column is 1
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a | key | column\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+1
+\f[R]
+.fi
+.SS No column data is 0
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]{\[dq]a\[dq]: \[dq]new entry\[dq]} | column\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+0
+\f[R]
+.fi
+.SH Comment Operators
+.PP
+Use these comment operators to set or retrieve comments.
+Note that line comments on maps/arrays are actually set on the
+\f[I]key\f[R] node as opposed to the \f[I]value\f[R] (map/array).
+See below for examples.
+.PP
+Like the \f[C]=\f[R] and \f[C]|=\f[R] assign operators, the same syntax
+applies when updating comments:
+.SS plain form: \f[C]=\f[R]
+.PP
+This will set the LHS nodes\[cq] comments equal to the expression on the
+RHS.
+The RHS is run against the matching nodes in the pipeline
+.SS relative form: \f[C]|=\f[R]
+.PP
+This is similar to the plain form, but it evaluates the RHS with
+\f[I]each matching LHS node as context\f[R].
+This is useful if you want to set the comments as a relative expression
+of the node, for instance its value or path.
+.SS Set line comment
+.PP
+Set the comment on the key node for more reliability (see below).
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a line_comment=\[dq]single\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: cat # single
+\f[R]
+.fi
+.SS Set line comment of a maps/arrays
+.PP
+For maps and arrays, you need to set the line comment on the
+\f[I]key\f[R] node.
+This will also work for scalars.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b: things
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq](.a | key) line_comment=\[dq]single\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: # single
+  b: things
+\f[R]
+.fi
+.SS Use update assign to perform relative updates
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b: dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].. line_comment |= .\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: cat # cat
+b: dog # dog
+\f[R]
+.fi
+.SS Where is the comment - map key example
+.PP
+The underlying yaml parser can assign comments in a document to
+surprising nodes.
+Use an expression like this to find where you comment is.
+`p' indicates the path, `isKey' is if the node is a map key (as opposed
+to a map value).
+From this, you can see the `hello-world-comment' is actually on the
+`hello' key
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+hello: # hello-world-comment
+  message: world
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq][... | {\[dq]p\[dq]: path | join(\[dq].\[dq]), \[dq]isKey\[dq]: is_key, \[dq]hc\[dq]: headComment, \[dq]lc\[dq]: lineComment, \[dq]fc\[dq]: footComment}]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- p: \[dq]\[dq]
+  isKey: false
+  hc: \[dq]\[dq]
+  lc: \[dq]\[dq]
+  fc: \[dq]\[dq]
+- p: hello
+  isKey: true
+  hc: \[dq]\[dq]
+  lc: hello-world-comment
+  fc: \[dq]\[dq]
+- p: hello
+  isKey: false
+  hc: \[dq]\[dq]
+  lc: \[dq]\[dq]
+  fc: \[dq]\[dq]
+- p: hello.message
+  isKey: true
+  hc: \[dq]\[dq]
+  lc: \[dq]\[dq]
+  fc: \[dq]\[dq]
+- p: hello.message
+  isKey: false
+  hc: \[dq]\[dq]
+  lc: \[dq]\[dq]
+  fc: \[dq]\[dq]
+\f[R]
+.fi
+.SS Retrieve comment - map key example
+.PP
+From the previous example, we know that the comment is on the `hello'
+\f[I]key\f[R] as a lineComment
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+hello: # hello-world-comment
+  message: world
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].hello | key | line_comment\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+hello-world-comment
+\f[R]
+.fi
+.SS Where is the comment - array example
+.PP
+The underlying yaml parser can assign comments in a document to
+surprising nodes.
+Use an expression like this to find where you comment is.
+`p' indicates the path, `isKey' is if the node is a map key (as opposed
+to a map value).
+From this, you can see the `under-name-comment' is actually on the first
+child
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+name:
+  # under-name-comment
+  - first-array-child
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq][... | {\[dq]p\[dq]: path | join(\[dq].\[dq]), \[dq]isKey\[dq]: is_key, \[dq]hc\[dq]: headComment, \[dq]lc\[dq]: lineComment, \[dq]fc\[dq]: footComment}]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- p: \[dq]\[dq]
+  isKey: false
+  hc: \[dq]\[dq]
+  lc: \[dq]\[dq]
+  fc: \[dq]\[dq]
+- p: name
+  isKey: true
+  hc: \[dq]\[dq]
+  lc: \[dq]\[dq]
+  fc: \[dq]\[dq]
+- p: name
+  isKey: false
+  hc: \[dq]\[dq]
+  lc: \[dq]\[dq]
+  fc: \[dq]\[dq]
+- p: name.0
+  isKey: false
+  hc: under-name-comment
+  lc: \[dq]\[dq]
+  fc: \[dq]\[dq]
+\f[R]
+.fi
+.SS Retrieve comment - array example
+.PP
+From the previous example, we know that the comment is on the first
+child as a headComment
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+name:
+  # under-name-comment
+  - first-array-child
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].name[0] | headComment\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+under-name-comment
+\f[R]
+.fi
+.SS Set head comment
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]. head_comment=\[dq]single\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+# single
+a: cat
+\f[R]
+.fi
+.SS Set head comment of a map entry
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+f: foo
+a:
+  b: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq](.a | key) head_comment=\[dq]single\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+f: foo
+# single
+a:
+  b: cat
+\f[R]
+.fi
+.SS Set foot comment, using an expression
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]. foot_comment=.a\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: cat
+# cat
+\f[R]
+.fi
+.SS Remove comment
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat # comment
+b: dog # leave this
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a line_comment=\[dq]\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: cat
+b: dog # leave this
+\f[R]
+.fi
+.SS Remove (strip) all comments
+.PP
+Note the use of \f[C]...\f[R] to ensure key nodes are included.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+# hi
+
+a: cat # comment
+# great
+b: # key comment
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]... comments=\[dq]\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: cat
+b:
+\f[R]
+.fi
+.SS Get line comment
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+# welcome!
+
+a: cat # meow
+# have a great day
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a | line_comment\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+meow
+\f[R]
+.fi
+.SS Get head comment
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+# welcome!
+
+a: cat # meow
+
+# have a great day
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]. | head_comment\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+welcome!
+\f[R]
+.fi
+.SS Head comment with document split
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+# welcome!
+---
+# bob
+a: cat # meow
+
+# have a great day
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]head_comment\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+welcome!
+bob
+\f[R]
+.fi
+.SS Get foot comment
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+# welcome!
+
+a: cat # meow
+
+# have a great day
+# no really
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]. | foot_comment\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+have a great day
+no really
+\f[R]
+.fi
+.SH Compare Operators
+.PP
+Comparison operators (\f[C]>\f[R], \f[C]>=\f[R], \f[C]<\f[R],
+\f[C]<=\f[R]) can be used for comparing scalar values of the same time.
+.PP
+The following types are currently supported:
+.IP \[bu] 2
+numbers
+.IP \[bu] 2
+strings
+.IP \[bu] 2
+datetimes
+.SS Related Operators
+.IP \[bu] 2
+equals / not equals (\f[C]==\f[R], \f[C]!=\f[R]) operators
+here (https://mikefarah.gitbook.io/yq/operators/equals)
+.IP \[bu] 2
+boolean operators (\f[C]and\f[R], \f[C]or\f[R], \f[C]any\f[R] etc)
+here (https://mikefarah.gitbook.io/yq/operators/boolean-operators)
+.IP \[bu] 2
+select operator here (https://mikefarah.gitbook.io/yq/operators/select)
+.SS Compare numbers (>)
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 5
+b: 4
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a > .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+true
+\f[R]
+.fi
+.SS Compare equal numbers (>=)
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 5
+b: 5
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a >= .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+true
+\f[R]
+.fi
+.SS Compare strings
+.PP
+Compares strings by their bytecode.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: zoo
+b: apple
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a > .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+true
+\f[R]
+.fi
+.SS Compare date times
+.PP
+You can compare date times.
+Assumes RFC3339 date time format, see date-time
+operators (https://mikefarah.gitbook.io/yq/operators/date-time-operators)
+for more information.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 2021-01-01T03:10:00Z
+b: 2020-01-01T03:10:00Z
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a > .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+true
+\f[R]
+.fi
+.SS Both sides are null: > is false
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq].a > .b\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+false
+\f[R]
+.fi
+.SS Both sides are null: >= is true
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq].a >= .b\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+true
+\f[R]
+.fi
+.SH Contains
+.PP
+This returns \f[C]true\f[R] if the context contains the passed in
+parameter, and false otherwise.
+For arrays, this will return true if the passed in array is contained
+within the array.
+For strings, it will return true if the string is a substring.
+.PP
+{% hint style=\[lq]warning\[rq] %}
+.PP
+\f[I]Note\f[R] that, just like jq, when checking if an array of strings
+\f[C]contains\f[R] another, this will use \f[C]contains\f[R] and
+\f[I]not\f[R] equals to check each string.
+This means an expression like \f[C]contains([\[dq]cat\[dq]])\f[R] will
+return true for an array \f[C][\[dq]cats\[dq]]\f[R].
+.PP
+See the \[lq]Array has a subset array\[rq] example below on how to check
+for a subset.
+.PP
+{% endhint %}
+.SS Array contains array
+.PP
+Array is equal or subset of
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- foobar
+- foobaz
+- blarp
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]contains([\[dq]baz\[dq], \[dq]bar\[dq]])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+true
+\f[R]
+.fi
+.SS Array has a subset array
+.PP
+Subtract the superset array from the subset, if there\[cq]s anything
+left, it\[cq]s not a subset
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- foobar
+- foobaz
+- blarp
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq][\[dq]baz\[dq], \[dq]bar\[dq]] - . | length == 0\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+false
+\f[R]
+.fi
+.SS Object included in array
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+\[dq]foo\[dq]: 12
+\[dq]bar\[dq]:
+  - 1
+  - 2
+  - \[dq]barp\[dq]: 12
+    \[dq]blip\[dq]: 13
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]contains({\[dq]bar\[dq]: [{\[dq]barp\[dq]: 12}]})\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+true
+\f[R]
+.fi
+.SS Object not included in array
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+\[dq]foo\[dq]: 12
+\[dq]bar\[dq]:
+  - 1
+  - 2
+  - \[dq]barp\[dq]: 12
+    \[dq]blip\[dq]: 13
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]contains({\[dq]foo\[dq]: 12, \[dq]bar\[dq]: [{\[dq]barp\[dq]: 15}]})\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+false
+\f[R]
+.fi
+.SS String contains substring
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+foobar
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]contains(\[dq]bar\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+true
+\f[R]
+.fi
+.SS String equals string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+meow
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]contains(\[dq]meow\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+true
+\f[R]
+.fi
+.SH Create, Collect into Object
+.PP
+This is used to construct objects (or maps).
+This can be used against existing yaml, or to create fresh yaml
+documents.
+.SS Collect empty object
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]{}\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+{}
+\f[R]
+.fi
+.SS Wrap (prefix) existing object
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+name: Mike
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]{\[dq]wrap\[dq]: .}\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+wrap:
+  name: Mike
+\f[R]
+.fi
+.SS Using splat to create multiple objects
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+name: Mike
+pets:
+  - cat
+  - dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]{.name: .pets.[]}\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+Mike: cat
+Mike: dog
+\f[R]
+.fi
+.SS Working with multiple documents
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+name: Mike
+pets:
+  - cat
+  - dog
+---
+name: Rosey
+pets:
+  - monkey
+  - sheep
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]{.name: .pets.[]}\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+Mike: cat
+Mike: dog
+---
+Rosey: monkey
+Rosey: sheep
+\f[R]
+.fi
+.SS Creating yaml from scratch
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]{\[dq]wrap\[dq]: \[dq]frog\[dq]}\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+wrap: frog
+\f[R]
+.fi
+.SS Creating yaml from scratch with multiple objects
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq](.a.b = \[dq]foo\[dq]) | (.d.e = \[dq]bar\[dq])\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b: foo
+d:
+  e: bar
+\f[R]
+.fi
+.SH Date Time
+.PP
+Various operators for parsing and manipulating dates.
+.SS Date time formattings
+.PP
+This uses Golang\[cq]s built in time library for parsing and formatting
+date times.
+.PP
+When not specified, the RFC3339 standard is assumed
+\f[C]2006-01-02T15:04:05Z07:00\f[R] for parsing.
+.PP
+To specify a custom parsing format, use the \f[C]with_dtf\f[R] operator.
+The first parameter sets the datetime parsing format for the expression
+in the second parameter.
+The expression can be any valid \f[C]yq\f[R] expression tree.
+.IP
+.nf
+\f[C]
+yq \[aq]with_dtf(\[dq]myformat\[dq]; .a + \[dq]3h\[dq] | tz(\[dq]Australia/Melbourne\[dq]))\[aq]
+\f[R]
+.fi
+.PP
+See the library docs (https://pkg.go.dev/time#pkg-constants) for
+examples of formatting options.
+.SS Timezones
+.PP
+This uses Golang\[cq]s built in LoadLocation function to parse timezones
+strings.
+See the library docs (https://pkg.go.dev/time#LoadLocation) for more
+details.
+.SS Durations
+.PP
+Durations are parsed using Golang\[cq]s built in
+ParseDuration (https://pkg.go.dev/time#ParseDuration) function.
+.PP
+You can add durations to time using the \f[C]+\f[R] operator.
+.SS Format: from standard RFC3339 format
+.PP
+Providing a single parameter assumes a standard RFC3339 datetime format.
+If the target format is not a valid yaml datetime format, the result
+will be a string tagged node.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 2001-12-15T02:59:43.1Z
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a |= format_datetime(\[dq]Monday, 02-Jan-06 at 3:04PM\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: Saturday, 15-Dec-01 at 2:59AM
+\f[R]
+.fi
+.SS Format: from custom date time
+.PP
+Use with_dtf to set a custom datetime format for parsing.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: Saturday, 15-Dec-01 at 2:59AM
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a |= with_dtf(\[dq]Monday, 02-Jan-06 at 3:04PM\[dq]; format_datetime(\[dq]2006-01-02\[dq]))\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: 2001-12-15
+\f[R]
+.fi
+.SS Format: get the day of the week
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 2001-12-15
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a | format_datetime(\[dq]Monday\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+Saturday
+\f[R]
+.fi
+.SS Now
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cool
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].updated = now\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: cool
+updated: 2021-05-19T01:02:03Z
+\f[R]
+.fi
+.SS From Unix
+.PP
+Converts from unix time.
+Note, you don\[cq]t have to pipe through the tz operator :)
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]1675301929 | from_unix | tz(\[dq]UTC\[dq])\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+2023-02-02T01:38:49Z
+\f[R]
+.fi
+.SS To Unix
+.PP
+Converts to unix time
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]now | to_unix\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+1621386123
+\f[R]
+.fi
+.SS Timezone: from standard RFC3339 format
+.PP
+Returns a new datetime in the specified timezone.
+Specify standard IANA Time Zone format or `utc', `local'.
+When given a single parameter, this assumes the datetime is in RFC3339
+format.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cool
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].updated = (now | tz(\[dq]Australia/Sydney\[dq]))\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: cool
+updated: 2021-05-19T11:02:03+10:00
+\f[R]
+.fi
+.SS Timezone: with custom format
+.PP
+Specify standard IANA Time Zone format or `utc', `local'
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: Saturday, 15-Dec-01 at 2:59AM GMT
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a |= with_dtf(\[dq]Monday, 02-Jan-06 at 3:04PM MST\[dq]; tz(\[dq]Australia/Sydney\[dq]))\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: Saturday, 15-Dec-01 at 1:59PM AEDT
+\f[R]
+.fi
+.SS Add and tz custom format
+.PP
+Specify standard IANA Time Zone format or `utc', `local'
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: Saturday, 15-Dec-01 at 2:59AM GMT
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a |= with_dtf(\[dq]Monday, 02-Jan-06 at 3:04PM MST\[dq]; tz(\[dq]Australia/Sydney\[dq]))\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: Saturday, 15-Dec-01 at 1:59PM AEDT
+\f[R]
+.fi
+.SS Date addition
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 2021-01-01T00:00:00Z
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a += \[dq]3h10m\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: 2021-01-01T03:10:00Z
+\f[R]
+.fi
+.SS Date subtraction
+.PP
+You can subtract durations from dates.
+Assumes RFC3339 date time format, see date-time
+operators (https://mikefarah.gitbook.io/yq/operators/datetime#date-time-formattings)
+for more information.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 2021-01-01T03:10:00Z
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a -= \[dq]3h10m\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: 2021-01-01T00:00:00Z
+\f[R]
+.fi
+.SS Date addition - custom format
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: Saturday, 15-Dec-01 at 2:59AM GMT
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]with_dtf(\[dq]Monday, 02-Jan-06 at 3:04PM MST\[dq]; .a += \[dq]3h1m\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: Saturday, 15-Dec-01 at 6:00AM GMT
+\f[R]
+.fi
+.SS Date script with custom format
+.PP
+You can embed full expressions in with_dtf if needed.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: Saturday, 15-Dec-01 at 2:59AM GMT
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]with_dtf(\[dq]Monday, 02-Jan-06 at 3:04PM MST\[dq]; .a = (.a + \[dq]3h1m\[dq] | tz(\[dq]Australia/Perth\[dq])))\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: Saturday, 15-Dec-01 at 2:00PM AWST
+\f[R]
+.fi
+.SH Delete
+.PP
+Deletes matching entries in maps or arrays.
+.SS Delete entry in map
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b: dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]del(.b)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: cat
+\f[R]
+.fi
+.SS Delete nested entry in map
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  a1: fred
+  a2: frood
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]del(.a.a1)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  a2: frood
+\f[R]
+.fi
+.SS Delete entry in array
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- 1
+- 2
+- 3
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]del(.[1])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- 1
+- 3
+\f[R]
+.fi
+.SS Delete nested entry in array
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- a: cat
+  b: dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]del(.[0].a)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- b: dog
+\f[R]
+.fi
+.SS Delete no matches
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b: dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]del(.c)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: cat
+b: dog
+\f[R]
+.fi
+.SS Delete matching entries
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b: dog
+c: bat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]del( .[] | select(. == \[dq]*at\[dq]) )\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+b: dog
+\f[R]
+.fi
+.SS Recursively delete matching keys
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  name: frog
+  b:
+    name: blog
+    age: 12
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]del(.. | select(has(\[dq]name\[dq])).name)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b:
+    age: 12
+\f[R]
+.fi
+.SH Divide
+.PP
+Divide behaves differently according to the type of the LHS: * strings:
+split by the divider * number: arithmetic division
+.SS String split
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat_meow
+b: _
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].c = .a / .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: cat_meow
+b: _
+c:
+  - cat
+  - meow
+\f[R]
+.fi
+.SS Number division
+.PP
+The result during division is calculated as a float
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 12
+b: 2.5
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a = .a / .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: 4.8
+b: 2.5
+\f[R]
+.fi
+.SS Number division by zero
+.PP
+Dividing by zero results in +Inf or -Inf
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 1
+b: -1
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a = .a / 0 | .b = .b / 0\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: !!float +Inf
+b: !!float -Inf
+\f[R]
+.fi
+.SH Document Index
+.PP
+Use the \f[C]documentIndex\f[R] operator (or the \f[C]di\f[R] shorthand)
+to select nodes of a particular document.
+.SS Retrieve a document index
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+---
+a: frog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a | document_index\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+0
+---
+1
+\f[R]
+.fi
+.SS Retrieve a document index, shorthand
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+---
+a: frog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a | di\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+0
+---
+1
+\f[R]
+.fi
+.SS Filter by document index
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+---
+a: frog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]select(document_index == 1)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: frog
+\f[R]
+.fi
+.SS Filter by document index shorthand
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+---
+a: frog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]select(di == 1)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: frog
+\f[R]
+.fi
+.SS Print Document Index with matches
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+---
+a: frog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a | ({\[dq]match\[dq]: ., \[dq]doc\[dq]: document_index})\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+match: cat
+doc: 0
+---
+match: frog
+doc: 1
+\f[R]
+.fi
+.SH Encoder / Decoder
+.PP
+Encode operators will take the piped in object structure and encode it
+as a string in the desired format.
+The decode operators do the opposite, they take a formatted string and
+decode it into the relevant object structure.
+.PP
+Note that you can optionally pass an indent value to the encode
+functions (see below).
+.PP
+These operators are useful to process yaml documents that have
+stringified embedded yaml/json/props in them.
+.PP
+.TS
+tab(@);
+l l l.
+T{
+Format
+T}@T{
+Decode (from string)
+T}@T{
+Encode (to string)
+T}
+_
+T{
+Yaml
+T}@T{
+from_yaml/\[at]yamld
+T}@T{
+to_yaml(i)/\[at]yaml
+T}
+T{
+JSON
+T}@T{
+from_json/\[at]jsond
+T}@T{
+to_json(i)/\[at]json
+T}
+T{
+Properties
+T}@T{
+from_props/\[at]propsd
+T}@T{
+to_props/\[at]props
+T}
+T{
+CSV
+T}@T{
+from_csv/\[at]csvd
+T}@T{
+to_csv/\[at]csv
+T}
+T{
+TSV
+T}@T{
+from_tsv/\[at]tsvd
+T}@T{
+to_tsv/\[at]tsv
+T}
+T{
+XML
+T}@T{
+from_xml/\[at]xmld
+T}@T{
+to_xml(i)/\[at]xml
+T}
+T{
+Base64
+T}@T{
+\[at]base64d
+T}@T{
+\[at]base64
+T}
+T{
+URI
+T}@T{
+\[at]urid
+T}@T{
+\[at]uri
+T}
+T{
+Shell
+T}@T{
+T}@T{
+\[at]sh
+T}
+.TE
+.PP
+See CSV and TSV
+documentation (https://mikefarah.gitbook.io/yq/usage/csv-tsv) for
+accepted formats.
+.PP
+XML uses the \f[C]--xml-attribute-prefix\f[R] and
+\f[C]xml-content-name\f[R] flags to identify attributes and content
+fields.
+.PP
+Base64 assumes rfc4648 (https://rfc-editor.org/rfc/rfc4648.html)
+encoding.
+Encoding and decoding both assume that the content is a utf-8 string and
+not binary content.
+.SS Encode value as json string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  cool: thing
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].b = (.a | to_json)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  cool: thing
+b: |
+  {
+    \[dq]cool\[dq]: \[dq]thing\[dq]
+  }
+\f[R]
+.fi
+.SS Encode value as json string, on one line
+.PP
+Pass in a 0 indent to print json on a single line.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  cool: thing
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].b = (.a | to_json(0))\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  cool: thing
+b: \[aq]{\[dq]cool\[dq]:\[dq]thing\[dq]}\[aq]
+\f[R]
+.fi
+.SS Encode value as json string, on one line shorthand
+.PP
+Pass in a 0 indent to print json on a single line.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  cool: thing
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].b = (.a | \[at]json)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  cool: thing
+b: \[aq]{\[dq]cool\[dq]:\[dq]thing\[dq]}\[aq]
+\f[R]
+.fi
+.SS Decode a json encoded string
+.PP
+Keep in mind JSON is a subset of YAML.
+If you want idiomatic yaml, pipe through the style operator to clear out
+the JSON styling.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: \[aq]{\[dq]cool\[dq]:\[dq]thing\[dq]}\[aq]
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a | from_json | ... style=\[dq]\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cool: thing
+\f[R]
+.fi
+.SS Encode value as props string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  cool: thing
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].b = (.a | \[at]props)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  cool: thing
+b: |
+  cool = thing
+\f[R]
+.fi
+.SS Decode props encoded string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: |-
+  cats=great
+  dogs=cool as well
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a |= \[at]propsd\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  cats: great
+  dogs: cool as well
+\f[R]
+.fi
+.SS Decode csv encoded string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: |-
+  cats,dogs
+  great,cool as well
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a |= \[at]csvd\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  - cats: great
+    dogs: cool as well
+\f[R]
+.fi
+.SS Decode tsv encoded string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: |-
+  cats  dogs
+  great cool as well
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a |= \[at]tsvd\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  - cats: great
+    dogs: cool as well
+\f[R]
+.fi
+.SS Encode value as yaml string
+.PP
+Indent defaults to 2
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  cool:
+    bob: dylan
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].b = (.a | to_yaml)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  cool:
+    bob: dylan
+b: |
+  cool:
+    bob: dylan
+\f[R]
+.fi
+.SS Encode value as yaml string, with custom indentation
+.PP
+You can specify the indentation level as the first parameter.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  cool:
+    bob: dylan
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].b = (.a | to_yaml(8))\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  cool:
+    bob: dylan
+b: |
+  cool:
+          bob: dylan
+\f[R]
+.fi
+.SS Decode a yaml encoded string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: \[aq]foo: bar\[aq]
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].b = (.a | from_yaml)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: \[aq]foo: bar\[aq]
+b:
+  foo: bar
+\f[R]
+.fi
+.SS Update a multiline encoded yaml string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: |
+  foo: bar
+  baz: dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a |= (from_yaml | .foo = \[dq]cat\[dq] | to_yaml)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: |
+  foo: cat
+  baz: dog
+\f[R]
+.fi
+.SS Update a single line encoded yaml string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: \[aq]foo: bar\[aq]
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a |= (from_yaml | .foo = \[dq]cat\[dq] | to_yaml)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: \[aq]foo: cat\[aq]
+\f[R]
+.fi
+.SS Encode array of scalars as csv string
+.PP
+Scalars are strings, numbers and booleans.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- cat
+- thing1,thing2
+- true
+- 3.40
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]\[at]csv\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cat,\[dq]thing1,thing2\[dq],true,3.40
+\f[R]
+.fi
+.SS Encode array of arrays as csv string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- - cat
+  - thing1,thing2
+  - true
+  - 3.40
+- - dog
+  - thing3
+  - false
+  - 12
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]\[at]csv\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cat,\[dq]thing1,thing2\[dq],true,3.40
+dog,thing3,false,12
+\f[R]
+.fi
+.SS Encode array of arrays as tsv string
+.PP
+Scalars are strings, numbers and booleans.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- - cat
+  - thing1,thing2
+  - true
+  - 3.40
+- - dog
+  - thing3
+  - false
+  - 12
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]\[at]tsv\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cat thing1,thing2   true    3.40
+dog thing3  false   12
+\f[R]
+.fi
+.SS Encode value as xml string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  cool:
+    foo: bar
+    +\[at]id: hi
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a | to_xml\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+<cool id=\[dq]hi\[dq]>
+  <foo>bar</foo>
+</cool>
+\f[R]
+.fi
+.SS Encode value as xml string on a single line
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  cool:
+    foo: bar
+    +\[at]id: hi
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a | \[at]xml\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+<cool id=\[dq]hi\[dq]><foo>bar</foo></cool>
+\f[R]
+.fi
+.SS Encode value as xml string with custom indentation
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  cool:
+    foo: bar
+    +\[at]id: hi
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]{\[dq]cat\[dq]: .a | to_xml(1)}\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cat: |
+  <cool id=\[dq]hi\[dq]>
+   <foo>bar</foo>
+  </cool>
+\f[R]
+.fi
+.SS Decode a xml encoded string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: <foo>bar</foo>
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].b = (.a | from_xml)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: <foo>bar</foo>
+b:
+  foo: bar
+\f[R]
+.fi
+.SS Encode a string to base64
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+coolData: a special string
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].coolData | \[at]base64\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+YSBzcGVjaWFsIHN0cmluZw==
+\f[R]
+.fi
+.SS Encode a yaml document to base64
+.PP
+Pipe through \[at]yaml first to convert to a string, then use
+\[at]base64 to encode it.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: apple
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]\[at]yaml | \[at]base64\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+YTogYXBwbGUK
+\f[R]
+.fi
+.SS Encode a string to uri
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+coolData: this has & special () characters *
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].coolData | \[at]uri\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+this+has+%26+special+%28%29+characters+%2A
+\f[R]
+.fi
+.SS Decode a URI to a string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+this+has+%26+special+%28%29+characters+%2A
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]\[at]urid\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+this has & special () characters *
+\f[R]
+.fi
+.SS Encode a string to sh
+.PP
+Sh/Bash friendly string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+coolData: strings with spaces and a \[aq]quote\[aq]
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].coolData | \[at]sh\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+strings\[aq] with spaces and a \[aq]\[rs]\[aq]quote\[rs]\[aq]
+\f[R]
+.fi
+.SS Decode a base64 encoded string
+.PP
+Decoded data is assumed to be a string.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+coolData: V29ya3Mgd2l0aCBVVEYtMTYg8J+Yig==
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].coolData | \[at]base64d\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+Works with UTF-16 \[u1F60A]
+\f[R]
+.fi
+.SS Decode a base64 encoded yaml document
+.PP
+Pipe through \f[C]from_yaml\f[R] to parse the decoded base64 string as a
+yaml document.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+coolData: YTogYXBwbGUK
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].coolData |= (\[at]base64d | from_yaml)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+coolData:
+  a: apple
+\f[R]
+.fi
+.SH Entries
+.PP
+Similar to the same named functions in \f[C]jq\f[R] these functions
+convert to/from an object and an array of key-value pairs.
+This is most useful for performing operations on keys of maps.
+.SS to_entries Map
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 1
+b: 2
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]to_entries\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- key: a
+  value: 1
+- key: b
+  value: 2
+\f[R]
+.fi
+.SS to_entries Array
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- a
+- b
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]to_entries\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- key: 0
+  value: a
+- key: 1
+  value: b
+\f[R]
+.fi
+.SS to_entries null
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+null
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]to_entries\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+\f[R]
+.fi
+.SS from_entries map
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 1
+b: 2
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]to_entries | from_entries\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: 1
+b: 2
+\f[R]
+.fi
+.SS from_entries with numeric key indices
+.PP
+from_entries always creates a map, even for numeric keys
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- a
+- b
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]to_entries | from_entries\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+0: a
+1: b
+\f[R]
+.fi
+.SS Use with_entries to update keys
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 1
+b: 2
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]with_entries(.key |= \[dq]KEY_\[dq] + .)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+KEY_a: 1
+KEY_b: 2
+\f[R]
+.fi
+.SS Custom sort map keys
+.PP
+Use to_entries to convert to an array of key/value pairs, sort the array
+using sort/sort_by/etc, and convert it back.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 1
+c: 3
+b: 2
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]to_entries | sort_by(.key) | reverse | from_entries\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+c: 3
+b: 2
+a: 1
+\f[R]
+.fi
+.SS Use with_entries to filter the map
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b: bird
+c:
+  d: dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]with_entries(select(.value | has(\[dq]b\[dq])))\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b: bird
+\f[R]
+.fi
+.SH Env Variable Operators
+.PP
+These operators are used to handle environment variables usage in
+expressions and documents.
+While environment variables can, of course, be passed in via your CLI
+with string interpolation, this often comes with complex quote escaping
+and can be tricky to write and read.
+.PP
+There are three operators:
+.IP \[bu] 2
+\f[C]env\f[R] which takes a single environment variable name and parse
+the variable as a yaml node (be it a map, array, string, number of
+boolean)
+.IP \[bu] 2
+\f[C]strenv\f[R] which also takes a single environment variable name,
+and always parses the variable as a string.
+.IP \[bu] 2
+\f[C]envsubst\f[R] which you pipe strings into and it interpolates
+environment variables in strings using
+envsubst (https://github.com/a8m/envsubst).
+.SS EnvSubst Options
+.PP
+You can optionally pass envsubst any of the following options:
+.IP \[bu] 2
+nu: NoUnset, this will fail if there are any referenced variables that
+are not set
+.IP \[bu] 2
+ne: NoEmpty, this will fail if there are any referenced variables that
+are empty
+.IP \[bu] 2
+ff: FailFast, this will abort on the first failure (rather than collect
+all the errors)
+.PP
+E.g: \f[C]envsubst(ne, ff)\f[R] will fail on the first empty variable.
+.PP
+See Imposing
+Restrictions (https://github.com/a8m/envsubst#imposing-restrictions) in
+the \f[C]envsubst\f[R] documentation for more information, and below for
+examples.
+.SS Tip
+.PP
+To replace environment variables across all values in a document,
+\f[C]envsubst\f[R] can be used with the recursive descent operator as
+follows:
+.IP
+.nf
+\f[C]
+yq \[aq](.. | select(tag == \[dq]!!str\[dq])) |= envsubst\[aq] file.yaml
+\f[R]
+.fi
+.SS Read string environment variable
+.PP
+Running
+.IP
+.nf
+\f[C]
+myenv=\[dq]cat meow\[dq] yq --null-input \[aq].a = env(myenv)\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: cat meow
+\f[R]
+.fi
+.SS Read boolean environment variable
+.PP
+Running
+.IP
+.nf
+\f[C]
+myenv=\[dq]true\[dq] yq --null-input \[aq].a = env(myenv)\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: true
+\f[R]
+.fi
+.SS Read numeric environment variable
+.PP
+Running
+.IP
+.nf
+\f[C]
+myenv=\[dq]12\[dq] yq --null-input \[aq].a = env(myenv)\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: 12
+\f[R]
+.fi
+.SS Read yaml environment variable
+.PP
+Running
+.IP
+.nf
+\f[C]
+myenv=\[dq]{b: fish}\[dq] yq --null-input \[aq].a = env(myenv)\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: {b: fish}
+\f[R]
+.fi
+.SS Read boolean environment variable as a string
+.PP
+Running
+.IP
+.nf
+\f[C]
+myenv=\[dq]true\[dq] yq --null-input \[aq].a = strenv(myenv)\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: \[dq]true\[dq]
+\f[R]
+.fi
+.SS Read numeric environment variable as a string
+.PP
+Running
+.IP
+.nf
+\f[C]
+myenv=\[dq]12\[dq] yq --null-input \[aq].a = strenv(myenv)\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: \[dq]12\[dq]
+\f[R]
+.fi
+.SS Dynamically update a path from an environment variable
+.PP
+The env variable can be any valid yq expression.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b:
+    - name: dog
+    - name: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+pathEnv=\[dq].a.b[0].name\[dq]  valueEnv=\[dq]moo\[dq] yq \[aq]eval(strenv(pathEnv)) = strenv(valueEnv)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b:
+    - name: moo
+    - name: cat
+\f[R]
+.fi
+.SS Dynamic key lookup with environment variable
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+cat: meow
+dog: woof
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+myenv=\[dq]cat\[dq] yq \[aq].[env(myenv)]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+meow
+\f[R]
+.fi
+.SS Replace strings with envsubst
+.PP
+Running
+.IP
+.nf
+\f[C]
+myenv=\[dq]cat\[dq] yq --null-input \[aq]\[dq]the ${myenv} meows\[dq] | envsubst\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+the cat meows
+\f[R]
+.fi
+.SS Replace strings with envsubst, missing variables
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]\[dq]the ${myenvnonexisting} meows\[dq] | envsubst\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+the  meows
+\f[R]
+.fi
+.SS Replace strings with envsubst(nu), missing variables
+.PP
+(nu) not unset, will fail if there are unset (missing) variables
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]\[dq]the ${myenvnonexisting} meows\[dq] | envsubst(nu)\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+Error: variable ${myenvnonexisting} not set
+\f[R]
+.fi
+.SS Replace strings with envsubst(ne), missing variables
+.PP
+(ne) not empty, only validates set variables
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]\[dq]the ${myenvnonexisting} meows\[dq] | envsubst(ne)\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+the  meows
+\f[R]
+.fi
+.SS Replace strings with envsubst(ne), empty variable
+.PP
+(ne) not empty, will fail if a references variable is empty
+.PP
+Running
+.IP
+.nf
+\f[C]
+myenv=\[dq]\[dq] yq --null-input \[aq]\[dq]the ${myenv} meows\[dq] | envsubst(ne)\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+Error: variable ${myenv} set but empty
+\f[R]
+.fi
+.SS Replace strings with envsubst, missing variables with defaults
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]\[dq]the ${myenvnonexisting-dog} meows\[dq] | envsubst\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+the dog meows
+\f[R]
+.fi
+.SS Replace strings with envsubst(nu), missing variables with defaults
+.PP
+Having a default specified skips over the missing variable.
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]\[dq]the ${myenvnonexisting-dog} meows\[dq] | envsubst(nu)\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+the dog meows
+\f[R]
+.fi
+.SS Replace strings with envsubst(ne), missing variables with defaults
+.PP
+Fails, because the variable is explicitly set to blank.
+.PP
+Running
+.IP
+.nf
+\f[C]
+myEmptyEnv=\[dq]\[dq] yq --null-input \[aq]\[dq]the ${myEmptyEnv-dog} meows\[dq] | envsubst(ne)\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+Error: variable ${myEmptyEnv} set but empty
+\f[R]
+.fi
+.SS Replace string environment variable in document
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+v: ${myenv}
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+myenv=\[dq]cat meow\[dq] yq \[aq].v |= envsubst\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+v: cat meow
+\f[R]
+.fi
+.SS (Default) Return all envsubst errors
+.PP
+By default, all errors are returned at once.
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]\[dq]the ${notThere} ${alsoNotThere}\[dq] | envsubst(nu)\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+Error: variable ${notThere} not set
+variable ${alsoNotThere} not set
+\f[R]
+.fi
+.SS Fail fast, return the first envsubst error (and abort)
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]\[dq]the ${notThere} ${alsoNotThere}\[dq] | envsubst(nu,ff)\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+Error: variable ${notThere} not set
+\f[R]
+.fi
+.SH Equals / Not Equals
+.PP
+This is a boolean operator that will return \f[C]true\f[R] if the LHS is
+equal to the RHS and \f[C]false\f[R] otherwise.
+.IP
+.nf
+\f[C]
+\&.a == .b
+\f[R]
+.fi
+.PP
+It is most often used with the select operator to find particular nodes:
+.IP
+.nf
+\f[C]
+select(.a == .b)
+\f[R]
+.fi
+.PP
+The not equals \f[C]!=\f[R] operator returns \f[C]false\f[R] if the LHS
+is equal to the RHS.
+.SS Related Operators
+.IP \[bu] 2
+comparison (\f[C]>=\f[R], \f[C]<\f[R] etc) operators
+here (https://mikefarah.gitbook.io/yq/operators/compare)
+.IP \[bu] 2
+boolean operators (\f[C]and\f[R], \f[C]or\f[R], \f[C]any\f[R] etc)
+here (https://mikefarah.gitbook.io/yq/operators/boolean-operators)
+.IP \[bu] 2
+select operator here (https://mikefarah.gitbook.io/yq/operators/select)
+.SS Match string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- cat
+- goat
+- dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] | (. == \[dq]*at\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+true
+true
+false
+\f[R]
+.fi
+.SS Don\[cq]t match string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- cat
+- goat
+- dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] | (. != \[dq]*at\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+false
+false
+true
+\f[R]
+.fi
+.SS Match number
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- 3
+- 4
+- 5
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] | (. == 4)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+false
+true
+false
+\f[R]
+.fi
+.SS Don\[cq]t match number
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- 3
+- 4
+- 5
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] | (. != 4)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+true
+false
+true
+\f[R]
+.fi
+.SS Match nulls
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]null == \[ti]\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+true
+\f[R]
+.fi
+.SS Non existent key doesn\[cq]t equal a value
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: frog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]select(.b != \[dq]thing\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: frog
+\f[R]
+.fi
+.SS Two non existent keys are equal
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: frog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]select(.b == .c)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: frog
+\f[R]
+.fi
+.SH Error
+.PP
+Use this operation to short-circuit expressions.
+Useful for validation.
+.SS Validate a particular value
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: hello
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]select(.a == \[dq]howdy\[dq]) or error(\[dq].a [\[dq] + .a + \[dq]] is not howdy!\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+Error: .a [hello] is not howdy!
+\f[R]
+.fi
+.SS Validate the environment variable is a number - invalid
+.PP
+Running
+.IP
+.nf
+\f[C]
+numberOfCats=\[dq]please\[dq] yq --null-input \[aq]env(numberOfCats) | select(tag == \[dq]!!int\[dq]) or error(\[dq]numberOfCats is not a number :(\[dq])\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+Error: numberOfCats is not a number :(
+\f[R]
+.fi
+.SS Validate the environment variable is a number - valid
+.PP
+\f[C]with\f[R] can be a convenient way of encapsulating validation.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+name: Bob
+favouriteAnimal: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+numberOfCats=\[dq]3\[dq] yq \[aq]
+    with(env(numberOfCats); select(tag == \[dq]!!int\[dq]) or error(\[dq]numberOfCats is not a number :(\[dq])) | 
+    .numPets = env(numberOfCats)
+\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+name: Bob
+favouriteAnimal: cat
+numPets: 3
+\f[R]
+.fi
+.SH Eval
+.PP
+Use \f[C]eval\f[R] to dynamically process an expression - for instance
+from an environment variable.
+.PP
+\f[C]eval\f[R] takes a single argument, and evaluates that as a
+\f[C]yq\f[R] expression.
+Any valid expression can be used, be it a path
+\f[C].a.b.c | select(. == \[dq]cat\[dq])\f[R], or an update
+\f[C].a.b.c = \[dq]gogo\[dq]\f[R].
+.PP
+Tip: This can be a useful way to parameterise complex scripts.
+.SS Dynamically evaluate a path
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+pathExp: .a.b[] | select(.name == \[dq]cat\[dq])
+a:
+  b:
+    - name: dog
+    - name: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]eval(.pathExp)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+name: cat
+\f[R]
+.fi
+.SS Dynamically update a path from an environment variable
+.PP
+The env variable can be any valid yq expression.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b:
+    - name: dog
+    - name: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+pathEnv=\[dq].a.b[0].name\[dq]  valueEnv=\[dq]moo\[dq] yq \[aq]eval(strenv(pathEnv)) = strenv(valueEnv)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b:
+    - name: moo
+    - name: cat
+\f[R]
+.fi
+.SH File Operators
+.PP
+File operators are most often used with merge when needing to merge
+specific files together.
+Note that when doing this, you will need to use \f[C]eval-all\f[R] to
+ensure all yaml documents are loaded into memory before performing the
+merge (as opposed to \f[C]eval\f[R] which runs the expression once per
+document).
+.PP
+Note that the \f[C]fileIndex\f[R] operator has a short alias of
+\f[C]fi\f[R].
+.SS Merging files
+.PP
+Note the use of eval-all to ensure all documents are loaded into memory.
+.IP
+.nf
+\f[C]
+yq eval-all \[aq]select(fi == 0) * select(filename == \[dq]file2.yaml\[dq])\[aq] file1.yaml file2.yaml
+\f[R]
+.fi
+.SS Get filename
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]filename\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+sample.yml
+\f[R]
+.fi
+.SS Get file index
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]file_index\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+0
+\f[R]
+.fi
+.SS Get file indices of multiple documents
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+\f[R]
+.fi
+.PP
+And another sample another.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq eval-all \[aq]file_index\[aq] sample.yml another.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+0
+1
+\f[R]
+.fi
+.SS Get file index alias
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]fi\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+0
+\f[R]
+.fi
+.SH Filter
+.PP
+Filters an array (or map values) by the expression given.
+Equivalent to doing \f[C]map(select(exp))\f[R].
+.SS Filter array
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- 1
+- 2
+- 3
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]filter(. < 3)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- 1
+- 2
+\f[R]
+.fi
+.SS Filter map values
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+c:
+  things: cool
+  frog: yes
+d:
+  things: hot
+  frog: false
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]filter(.things == \[dq]cool\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- things: cool
+  frog: yes
+\f[R]
+.fi
+.SH Flatten
+.PP
+This recursively flattens arrays.
+.SS Flatten
+.PP
+Recursively flattens all arrays
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- 1
+- - 2
+- - - 3
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]flatten\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- 1
+- 2
+- 3
+\f[R]
+.fi
+.SS Flatten with depth of one
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- 1
+- - 2
+- - - 3
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]flatten(1)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- 1
+- 2
+- - 3
+\f[R]
+.fi
+.SS Flatten empty array
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- []
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]flatten\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+[]
+\f[R]
+.fi
+.SS Flatten array of objects
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- foo: bar
+- - foo: baz
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]flatten\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- foo: bar
+- foo: baz
+\f[R]
+.fi
+.SH Group By
+.PP
+This is used to group items in an array by an expression.
+.SS Group by field
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- foo: 1
+  bar: 10
+- foo: 3
+  bar: 100
+- foo: 1
+  bar: 1
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]group_by(.foo)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- - foo: 1
+    bar: 10
+  - foo: 1
+    bar: 1
+- - foo: 3
+    bar: 100
+\f[R]
+.fi
+.SS Group by field, with nulls
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- cat: dog
+- foo: 1
+  bar: 10
+- foo: 3
+  bar: 100
+- no: foo for you
+- foo: 1
+  bar: 1
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]group_by(.foo)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- - cat: dog
+  - no: foo for you
+- - foo: 1
+    bar: 10
+  - foo: 1
+    bar: 1
+- - foo: 3
+    bar: 100
+\f[R]
+.fi
+.SH Has
+.PP
+This operation returns true if the key exists in a map (or index in an
+array), false otherwise.
+.SS Has map key
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- a: yes
+- a: \[ti]
+- a:
+- b: nope
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] | has(\[dq]a\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+true
+true
+true
+false
+\f[R]
+.fi
+.SS Select, checking for existence of deep paths
+.PP
+Simply pipe in parent expressions into \f[C]has\f[R]
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- a:
+    b:
+      c: cat
+- a:
+    b:
+      d: dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] | select(.a.b | has(\[dq]c\[dq]))\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b:
+    c: cat
+\f[R]
+.fi
+.SS Has array index
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- []
+- [1]
+- [1, 2]
+- [1, null]
+- [1, 2, 3]
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] | has(1)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+false
+false
+true
+true
+true
+\f[R]
+.fi
+.SH Keys
+.PP
+Use the \f[C]keys\f[R] operator to return map keys or array indices.
+.SS Map keys
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+dog: woof
+cat: meow
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]keys\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- dog
+- cat
+\f[R]
+.fi
+.SS Array keys
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- apple
+- banana
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]keys\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- 0
+- 1
+\f[R]
+.fi
+.SS Retrieve array key
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- 1
+- 2
+- 3
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[1] | key\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+1
+\f[R]
+.fi
+.SS Retrieve map key
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: thing
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a | key\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a
+\f[R]
+.fi
+.SS No key
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+{}
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]key\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+\f[R]
+.fi
+.SS Update map key
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  x: 3
+  y: 4
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq](.a.x | key) = \[dq]meow\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  meow: 3
+  y: 4
+\f[R]
+.fi
+.SS Get comment from map key
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  # comment on key
+  x: 3
+  y: 4
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a.x | key | headComment\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+comment on key
+\f[R]
+.fi
+.SS Check node is a key
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b:
+    - cat
+  c: frog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq][... | { \[dq]p\[dq]: path | join(\[dq].\[dq]), \[dq]isKey\[dq]: is_key, \[dq]tag\[dq]: tag }]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- p: \[dq]\[dq]
+  isKey: false
+  tag: \[aq]!!map\[aq]
+- p: a
+  isKey: true
+  tag: \[aq]!!str\[aq]
+- p: a
+  isKey: false
+  tag: \[aq]!!map\[aq]
+- p: a.b
+  isKey: true
+  tag: \[aq]!!str\[aq]
+- p: a.b
+  isKey: false
+  tag: \[aq]!!seq\[aq]
+- p: a.b.0
+  isKey: false
+  tag: \[aq]!!str\[aq]
+- p: a.c
+  isKey: true
+  tag: \[aq]!!str\[aq]
+- p: a.c
+  isKey: false
+  tag: \[aq]!!str\[aq]
+\f[R]
+.fi
+.SS Get kind
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b: 5
+c: 3.2
+e: true
+f: []
+g: {}
+h: null
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].. | kind\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+map
+scalar
+scalar
+scalar
+scalar
+seq
+map
+scalar
+\f[R]
+.fi
+.SS Get kind, ignores custom tags
+.PP
+Unlike tag, kind is not affected by custom tags.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: !!thing cat
+b: !!foo {}
+c: !!bar []
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].. | kind\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+map
+scalar
+map
+seq
+\f[R]
+.fi
+.SS Add comments only to scalars
+.PP
+An example of how you can use kind
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b: 5
+  c: 3.2
+e: true
+f: []
+g: {}
+h: null
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq](.. | select(kind == \[dq]scalar\[dq])) line_comment = \[dq]this is a scalar\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b: 5 # this is a scalar
+  c: 3.2 # this is a scalar
+e: true # this is a scalar
+f: []
+g: {}
+h: null # this is a scalar
+\f[R]
+.fi
+.SH Length
+.PP
+Returns the lengths of the nodes.
+Length is defined according to the type of the node.
+.SS String length
+.PP
+returns length of string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a | length\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+3
+\f[R]
+.fi
+.SS null length
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: null
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a | length\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+0
+\f[R]
+.fi
+.SS Map length
+.PP
+returns number of entries
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+c: dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]length\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+2
+\f[R]
+.fi
+.SS Array length
+.PP
+returns number of elements
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- 2
+- 4
+- 6
+- 8
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]length\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+4
+\f[R]
+.fi
+.SH Line
+.PP
+Returns the line of the matching node.
+Starts from 1, 0 indicates there was no line data.
+.SS Returns line of \f[I]value\f[R] node
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b:
+  c: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].b | line\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+3
+\f[R]
+.fi
+.SS Returns line of \f[I]key\f[R] node
+.PP
+Pipe through the key operator to get the line of the key
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b:
+  c: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].b | key | line\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+2
+\f[R]
+.fi
+.SS First line is 1
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a | line\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+1
+\f[R]
+.fi
+.SS No line data is 0
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]{\[dq]a\[dq]: \[dq]new entry\[dq]} | line\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+0
+\f[R]
+.fi
+.SH Load
+.PP
+The load operators allows you to load in content from another file.
+.PP
+Note that you can use string operators like \f[C]+\f[R] and
+\f[C]sub\f[R] to modify the value in the yaml file to a path that exists
+in your system.
+.PP
+You can load files of the following supported types:
+.PP
+.TS
+tab(@);
+l l.
+T{
+Format
+T}@T{
+Load Operator
+T}
+_
+T{
+Yaml
+T}@T{
+load
+T}
+T{
+XML
+T}@T{
+load_xml
+T}
+T{
+Properties
+T}@T{
+load_props
+T}
+T{
+Plain String
+T}@T{
+load_str
+T}
+T{
+Base64
+T}@T{
+load_base64
+T}
+.TE
+.PP
+Note that load_base64 only works for base64 encoded utf-8 strings.
+.SS Samples files for tests:
+.SS yaml
+.PP
+\f[C]../../examples/thing.yml\f[R]:
+.IP
+.nf
+\f[C]
+a: apple is included
+b: cool
+\f[R]
+.fi
+.SS xml
+.PP
+\f[C]small.xml\f[R]:
+.IP
+.nf
+\f[C]
+<this>is some xml</this>
+\f[R]
+.fi
+.SS properties
+.PP
+\f[C]small.properties\f[R]:
+.IP
+.nf
+\f[C]
+this.is = a properties file
+\f[R]
+.fi
+.SS base64
+.PP
+\f[C]base64.txt\f[R]:
+.IP
+.nf
+\f[C]
+bXkgc2VjcmV0IGNoaWxsaSByZWNpcGUgaXMuLi4u
+\f[R]
+.fi
+.SS Simple example
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+myFile: ../../examples/thing.yml
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]load(.myFile)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: apple is included
+b: cool.
+\f[R]
+.fi
+.SS Replace node with referenced file
+.PP
+Note that you can modify the filename in the load operator if needed.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+something:
+  file: thing.yml
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].something |= load(\[dq]../../examples/\[dq] + .file)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+something:
+  a: apple is included
+  b: cool.
+\f[R]
+.fi
+.SS Replace \f[I]all\f[R] nodes with referenced file
+.PP
+Recursively match all the nodes (\f[C]..\f[R]) and then filter the ones
+that have a `file' attribute.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+something:
+  file: thing.yml
+over:
+  here:
+    - file: thing.yml
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq](.. | select(has(\[dq]file\[dq]))) |= load(\[dq]../../examples/\[dq] + .file)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+something:
+  a: apple is included
+  b: cool.
+over:
+  here:
+    - a: apple is included
+      b: cool.
+\f[R]
+.fi
+.SS Replace node with referenced file as string
+.PP
+This will work for any text based file
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+something:
+  file: thing.yml
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].something |= load_str(\[dq]../../examples/\[dq] + .file)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+something: |-
+  a: apple is included
+  b: cool.
+\f[R]
+.fi
+.SS Load from XML
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+cool: things
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].more_stuff = load_xml(\[dq]../../examples/small.xml\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cool: things
+more_stuff:
+  this: is some xml
+\f[R]
+.fi
+.SS Load from Properties
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+cool: things
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].more_stuff = load_props(\[dq]../../examples/small.properties\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cool: things
+more_stuff:
+  this:
+    is: a properties file
+\f[R]
+.fi
+.SS Merge from properties
+.PP
+This can be used as a convenient way to update a yaml document
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+this:
+  is: from yaml
+  cool: ay
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]. *= load_props(\[dq]../../examples/small.properties\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+this:
+  is: a properties file
+  cool: ay
+\f[R]
+.fi
+.SS Load from base64 encoded file
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+cool: things
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].more_stuff = load_base64(\[dq]../../examples/base64.txt\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cool: things
+more_stuff: my secret chilli recipe is....
+\f[R]
+.fi
+.SH Map
+.PP
+Maps values of an array.
+Use \f[C]map_values\f[R] to map values of an object.
+.SS Map array
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- 1
+- 2
+- 3
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]map(. + 1)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- 2
+- 3
+- 4
+\f[R]
+.fi
+.SS Map object values
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 1
+b: 2
+c: 3
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]map_values(. + 1)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: 2
+b: 3
+c: 4
+\f[R]
+.fi
+.SH Max
+.PP
+Computes the maximum among an incoming sequence of scalar values.
+.SS Maximum int
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- 99
+- 16
+- 12
+- 6
+- 66
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]max\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+99
+\f[R]
+.fi
+.SS Maximum string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- foo
+- bar
+- baz
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]max\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+foo
+\f[R]
+.fi
+.SS Maximum of empty
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+[]
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]max\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+\f[R]
+.fi
+.SH Min
+.PP
+Computes the minimum among an incoming sequence of scalar values.
+.SS Minimum int
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- 99
+- 16
+- 12
+- 6
+- 66
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]min\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+6
+\f[R]
+.fi
+.SS Minimum string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- foo
+- bar
+- baz
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]min\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+bar
+\f[R]
+.fi
+.SS Minimum of empty
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+[]
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]min\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+\f[R]
+.fi
+.SH Modulo
+.PP
+Arithmetic modulo operator, returns the remainder from dividing two
+numbers.
+.SS Number modulo - int
+.PP
+If the lhs and rhs are ints then the expression will be calculated with
+ints.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 13
+b: 2
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a = .a % .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: 1
+b: 2
+\f[R]
+.fi
+.SS Number modulo - float
+.PP
+If the lhs or rhs are floats then the expression will be calculated with
+floats.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 12
+b: 2.5
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a = .a % .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: !!float 2
+b: 2.5
+\f[R]
+.fi
+.SS Number modulo - int by zero
+.PP
+If the lhs is an int and rhs is a 0 the result is an error.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 1
+b: 0
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a = .a % .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+Error: cannot modulo by 0
+\f[R]
+.fi
+.SS Number modulo - float by zero
+.PP
+If the lhs is a float and rhs is a 0 the result is NaN.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 1.1
+b: 0
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a = .a % .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: !!float NaN
+b: 0
+\f[R]
+.fi
+.SH Multiply (Merge)
+.PP
+Like the multiple operator in jq, depending on the operands, this
+multiply operator will do different things.
+Currently numbers, arrays and objects are supported.
+.SS Objects and arrays - merging
+.PP
+Objects are merged \f[I]deeply\f[R] matching on matching keys.
+By default, array values override and are not deeply merged.
+.PP
+You can use the add operator \f[C]+\f[R], to shallow merge objects, see
+more info here (https://mikefarah.gitbook.io/yq/operators/add).
+.PP
+Note that when merging objects, this operator returns the merged object
+(not the parent).
+This will be clearer in the examples below.
+.SS Merge Flags
+.PP
+You can control how objects are merged by using one or more of the
+following flags.
+Multiple flags can be used together, e.g.\ \f[C].a *+? .b\f[R].
+See examples below
+.IP \[bu] 2
+\f[C]+\f[R] append arrays
+.IP \[bu] 2
+\f[C]d\f[R] deeply merge arrays
+.IP \[bu] 2
+\f[C]?\f[R] only merge \f[I]existing\f[R] fields
+.IP \[bu] 2
+\f[C]n\f[R] only merge \f[I]new\f[R] fields
+.IP \[bu] 2
+\f[C]c\f[R] clobber custom tags
+.PP
+To perform a shallow merge only, use the add operator \f[C]+\f[R], see
+more info here (https://mikefarah.gitbook.io/yq/operators/add).
+.SS Merge two files together
+.PP
+This uses the load operator to merge file2 into file1.
+.IP
+.nf
+\f[C]
+yq \[aq]. *= load(\[dq]file2.yml\[dq])\[aq] file1.yml
+\f[R]
+.fi
+.SS Merging all files
+.PP
+Note the use of \f[C]eval-all\f[R] to ensure all documents are loaded
+into memory.
+.IP
+.nf
+\f[C]
+yq eval-all \[aq]. as $item ireduce ({}; . * $item )\[aq] *.yml
+\f[R]
+.fi
+.SH Merging complex arrays together by a key field
+.PP
+By default - \f[C]yq\f[R] merge is naive.
+It merges maps when they match the key name, and arrays are merged
+either by appending them together, or merging the entries by their
+position in the array.
+.PP
+For more complex array merging (e.g.\ merging items that match on a
+certain key) please see the example
+here (https://mikefarah.gitbook.io/yq/operators/multiply-merge#merge-arrays-of-objects-together-matching-on-a-key)
+.SS Multiply integers
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 3
+b: 4
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a *= .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: 12
+b: 4
+\f[R]
+.fi
+.SS Multiply string node X int
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+b: banana
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].b * 4\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+bananabananabananabanana
+\f[R]
+.fi
+.SS Multiply int X string node
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+b: banana
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]4 * .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+bananabananabananabanana
+\f[R]
+.fi
+.SS Multiply string X int node
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+n: 4
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]\[dq]banana\[dq] * .n\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+bananabananabananabanana
+\f[R]
+.fi
+.SS Multiply int node X string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+n: 4
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].n * \[dq]banana\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+bananabananabananabanana
+\f[R]
+.fi
+.SS Merge objects together, returning merged result only
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  field: me
+  fieldA: cat
+b:
+  field:
+    g: wizz
+  fieldB: dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a * .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+field:
+  g: wizz
+fieldA: cat
+fieldB: dog
+\f[R]
+.fi
+.SS Merge objects together, returning parent object
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  field: me
+  fieldA: cat
+b:
+  field:
+    g: wizz
+  fieldB: dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]. * {\[dq]a\[dq]:.b}\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  field:
+    g: wizz
+  fieldA: cat
+  fieldB: dog
+b:
+  field:
+    g: wizz
+  fieldB: dog
+\f[R]
+.fi
+.SS Merge keeps style of LHS
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: {things: great}
+b:
+  also: \[dq]me\[dq]
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]. * {\[dq]a\[dq]:.b}\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: {things: great, also: \[dq]me\[dq]}
+b:
+  also: \[dq]me\[dq]
+\f[R]
+.fi
+.SS Merge arrays
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  - 1
+  - 2
+  - 3
+b:
+  - 3
+  - 4
+  - 5
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]. * {\[dq]a\[dq]:.b}\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  - 3
+  - 4
+  - 5
+b:
+  - 3
+  - 4
+  - 5
+\f[R]
+.fi
+.SS Merge, only existing fields
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  thing: one
+  cat: frog
+b:
+  missing: two
+  thing: two
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a *? .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+thing: two
+cat: frog
+\f[R]
+.fi
+.SS Merge, only new fields
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  thing: one
+  cat: frog
+b:
+  missing: two
+  thing: two
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a *n .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+thing: one
+cat: frog
+missing: two
+\f[R]
+.fi
+.SS Merge, appending arrays
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  array:
+    - 1
+    - 2
+    - animal: dog
+  value: coconut
+b:
+  array:
+    - 3
+    - 4
+    - animal: cat
+  value: banana
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a *+ .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+array:
+  - 1
+  - 2
+  - animal: dog
+  - 3
+  - 4
+  - animal: cat
+value: banana
+\f[R]
+.fi
+.SS Merge, only existing fields, appending arrays
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  thing:
+    - 1
+    - 2
+b:
+  thing:
+    - 3
+    - 4
+  another:
+    - 1
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a *?+ .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+thing:
+  - 1
+  - 2
+  - 3
+  - 4
+\f[R]
+.fi
+.SS Merge, deeply merging arrays
+.PP
+Merging arrays deeply means arrays are merged like objects, with indices
+as their key.
+In this case, we merge the first item in the array and do nothing with
+the second.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  - name: fred
+    age: 12
+  - name: bob
+    age: 32
+b:
+  - name: fred
+    age: 34
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a *d .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- name: fred
+  age: 34
+- name: bob
+  age: 32
+\f[R]
+.fi
+.SS Merge arrays of objects together, matching on a key
+.PP
+This is a fairly complex expression - you can use it as is by providing
+the environment variables as seen in the example below.
+.PP
+It merges in the array provided in the second file into the first -
+matching on equal keys.
+.PP
+Explanation:
+.PP
+The approach, at a high level, is to reduce into a merged map (keyed by
+the unique key) and then convert that back into an array.
+.PP
+First the expression will create a map from the arrays keyed by the
+idPath, the unique field we want to merge by.
+The reduce operator is merging `({}; . * $item )', so array elements
+with the matching key will be merged together.
+.PP
+Next, we convert the map back to an array, using reduce again,
+concatenating all the map values together.
+.PP
+Finally, we set the result of the merged array back into the first doc.
+.PP
+Thanks Kev from
+stackoverflow (https://stackoverflow.com/a/70109529/1168223)
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+myArray:
+  - a: apple
+    b: appleB
+  - a: kiwi
+    b: kiwiB
+  - a: banana
+    b: bananaB
+something: else
+\f[R]
+.fi
+.PP
+And another sample another.yml file of:
+.IP
+.nf
+\f[C]
+newArray:
+  - a: banana
+    c: bananaC
+  - a: apple
+    b: appleB2
+  - a: dingo
+    c: dingoC
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+idPath=\[dq].a\[dq]  originalPath=\[dq].myArray\[dq]  otherPath=\[dq].newArray\[dq] yq eval-all \[aq]
+(
+  (( (eval(strenv(originalPath)) + eval(strenv(otherPath)))  | .[] | {(eval(strenv(idPath))):  .}) as $item ireduce ({}; . * $item )) as $uniqueMap
+  | ( $uniqueMap  | to_entries | .[]) as $item ireduce([]; . + $item.value)
+) as $mergedArray
+| select(fi == 0) | (eval(strenv(originalPath))) = $mergedArray
+\[aq] sample.yml another.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+myArray:
+  - a: apple
+    b: appleB2
+  - a: kiwi
+    b: kiwiB
+  - a: banana
+    b: bananaB
+    c: bananaC
+  - a: dingo
+    c: dingoC
+something: else
+\f[R]
+.fi
+.SS Merge to prefix an element
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b: dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]. * {\[dq]a\[dq]: {\[dq]c\[dq]: .a}}\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  c: cat
+b: dog
+\f[R]
+.fi
+.SS Merge with simple aliases
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: &cat
+  c: frog
+b:
+  f: *cat
+c:
+  g: thongs
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].c * .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+g: thongs
+f: *cat
+\f[R]
+.fi
+.SS Merge copies anchor names
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  c: &cat frog
+b:
+  f: *cat
+c:
+  g: thongs
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].c * .a\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+g: thongs
+c: &cat frog
+\f[R]
+.fi
+.SS Merge with merge anchors
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+foo: &foo
+  a: foo_a
+  thing: foo_thing
+  c: foo_c
+bar: &bar
+  b: bar_b
+  thing: bar_thing
+  c: bar_c
+foobarList:
+  b: foobarList_b
+  !!merge <<:
+    - *foo
+    - *bar
+  c: foobarList_c
+foobar:
+  c: foobar_c
+  !!merge <<: *foo
+  thing: foobar_thing
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].foobar * .foobarList\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+c: foobarList_c
+!!merge <<:
+  - *foo
+  - *bar
+thing: foobar_thing
+b: foobarList_b
+\f[R]
+.fi
+.SS Custom types: that are really numbers
+.PP
+When custom tags are encountered, yq will try to decode the underlying
+type.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: !horse 2
+b: !goat 3
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a = .a * .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: !horse 6
+b: !goat 3
+\f[R]
+.fi
+.SS Custom types: that are really maps
+.PP
+Custom tags will be maintained.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: !horse
+  cat: meow
+b: !goat
+  dog: woof
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a = .a * .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: !horse
+  cat: meow
+  dog: woof
+b: !goat
+  dog: woof
+\f[R]
+.fi
+.SS Custom types: clobber tags
+.PP
+Use the \f[C]c\f[R] option to clobber custom tags.
+Note that the second tag is now used.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: !horse
+  cat: meow
+b: !goat
+  dog: woof
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a *=c .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: !goat
+  cat: meow
+  dog: woof
+b: !goat
+  dog: woof
+\f[R]
+.fi
+.SS Merging a null with a map
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]null * {\[dq]some\[dq]: \[dq]thing\[dq]}\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+some: thing
+\f[R]
+.fi
+.SS Merging a map with null
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]{\[dq]some\[dq]: \[dq]thing\[dq]} * null\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+some: thing
+\f[R]
+.fi
+.SS Merging a null with an array
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]null * [\[dq]some\[dq]]\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- some
+\f[R]
+.fi
+.SS Merging an array with null
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq][\[dq]some\[dq]] * null\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- some
+\f[R]
+.fi
+.SH Omit
+.PP
+Works like \f[C]pick\f[R], but instead you specify the keys/indices that
+you \f[I]don\[cq]t\f[R] want included.
+.SS Omit keys from map
+.PP
+Note that non existent keys are skipped.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+myMap:
+  cat: meow
+  dog: bark
+  thing: hamster
+  hamster: squeak
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].myMap |= omit([\[dq]hamster\[dq], \[dq]cat\[dq], \[dq]goat\[dq]])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+myMap:
+  dog: bark
+  thing: hamster
+\f[R]
+.fi
+.SS Omit indices from array
+.PP
+Note that non existent indices are skipped.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- cat
+- leopard
+- lion
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]omit([2, 0, 734, -5])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- leopard
+\f[R]
+.fi
+.SH Parent
+.PP
+Parent simply returns the parent nodes of the matching nodes.
+.SS Simple example
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  nested: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a.nested | parent\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+nested: cat
+\f[R]
+.fi
+.SS Parent of nested matches
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  fruit: apple
+  name: bob
+b:
+  fruit: banana
+  name: sam
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].. | select(. == \[dq]banana\[dq]) | parent\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+fruit: banana
+name: sam
+\f[R]
+.fi
+.SS N-th parent
+.PP
+You can optionally supply the number of levels to go up for the parent,
+the default being 1.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b:
+    c: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a.b.c | parent(2)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+b:
+  c: cat
+\f[R]
+.fi
+.SS N-th parent - another level
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b:
+    c: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a.b.c | parent(3)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b:
+    c: cat
+\f[R]
+.fi
+.SS No parent
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+{}
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]parent\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+\f[R]
+.fi
+.SH Path
+.PP
+The \f[C]path\f[R] operator can be used to get the traversal paths of
+matching nodes in an expression.
+The path is returned as an array, which if traversed in order will lead
+to the matching node.
+.PP
+You can get the key/index of matching nodes by using the \f[C]path\f[R]
+operator to return the path array then piping that through
+\f[C].[-1]\f[R] to get the last element of that array, the key.
+.PP
+Use \f[C]setpath\f[R] to set a value to the path array returned by
+\f[C]path\f[R], and similarly \f[C]delpaths\f[R] for an array of path
+arrays.
+.SS Map path
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a.b | path\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- a
+- b
+\f[R]
+.fi
+.SS Get map key
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a.b | path | .[-1]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+b
+\f[R]
+.fi
+.SS Array path
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  - cat
+  - dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a.[] | select(. == \[dq]dog\[dq]) | path\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- a
+- 1
+\f[R]
+.fi
+.SS Get array index
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  - cat
+  - dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a.[] | select(. == \[dq]dog\[dq]) | path | .[-1]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+1
+\f[R]
+.fi
+.SS Print path and value
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  - cat
+  - dog
+  - frog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a[] | select(. == \[dq]*og\[dq]) | [{\[dq]path\[dq]:path, \[dq]value\[dq]:.}]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- path:
+    - a
+    - 1
+  value: dog
+- path:
+    - a
+    - 2
+  value: frog
+\f[R]
+.fi
+.SS Set path
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]setpath([\[dq]a\[dq], \[dq]b\[dq]]; \[dq]things\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b: things
+\f[R]
+.fi
+.SS Set on empty document
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]setpath([\[dq]a\[dq], \[dq]b\[dq]]; \[dq]things\[dq])\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b: things
+\f[R]
+.fi
+.SS Set path to prune deep paths
+.PP
+Like pick but recursive.
+This uses \f[C]ireduce\f[R] to deeply set the selected paths into an
+empty object.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+parentA: bob
+parentB:
+  child1: i am child1
+  child2: i am child2
+parentC:
+  child1: me child1
+  child2: me child2
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq](.parentB.child2, .parentC.child1) as $i
+  ireduce({}; setpath($i | path; $i))\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+parentB:
+  child2: i am child2
+parentC:
+  child1: me child1
+\f[R]
+.fi
+.SS Set array path
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  - cat
+  - frog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]setpath([\[dq]a\[dq], 0]; \[dq]things\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  - things
+  - frog
+\f[R]
+.fi
+.SS Set array path empty
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]setpath([\[dq]a\[dq], 0]; \[dq]things\[dq])\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  - things
+\f[R]
+.fi
+.SS Delete path
+.PP
+Notice delpaths takes an \f[I]array\f[R] of paths.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b: cat
+  c: dog
+  d: frog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]delpaths([[\[dq]a\[dq], \[dq]c\[dq]], [\[dq]a\[dq], \[dq]d\[dq]]])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b: cat
+\f[R]
+.fi
+.SS Delete array path
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  - cat
+  - frog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]delpaths([[\[dq]a\[dq], 0]])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  - frog
+\f[R]
+.fi
+.SS Delete - wrong parameter
+.PP
+delpaths does not work with a single path array
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  - cat
+  - frog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]delpaths([\[dq]a\[dq], 0])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+Error: DELPATHS: expected entry [0] to be a sequence, but its a !!str. Note that delpaths takes an array of path arrays, e.g. [[\[dq]a\[dq], \[dq]b\[dq]]]
+\f[R]
+.fi
+.SH Pick
+.PP
+Filter a map by the specified list of keys.
+Map is returned with the key in the order of the pick list.
+.PP
+Similarly, filter an array by the specified list of indices.
+.SS Pick keys from map
+.PP
+Note that the order of the keys matches the pick order and non existent
+keys are skipped.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+myMap:
+  cat: meow
+  dog: bark
+  thing: hamster
+  hamster: squeak
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].myMap |= pick([\[dq]hamster\[dq], \[dq]cat\[dq], \[dq]goat\[dq]])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+myMap:
+  hamster: squeak
+  cat: meow
+\f[R]
+.fi
+.SS Pick indices from array
+.PP
+Note that the order of the indices matches the pick order and non
+existent indices are skipped.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- cat
+- leopard
+- lion
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]pick([2, 0, 734, -5])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- lion
+- cat
+\f[R]
+.fi
+.SH Pipe
+.PP
+Pipe the results of an expression into another.
+Like the bash operator.
+.SS Simple Pipe
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a | .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cat
+\f[R]
+.fi
+.SS Multiple updates
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cow
+b: sheep
+c: same
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a = \[dq]cat\[dq] | .b = \[dq]dog\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: cat
+b: dog
+c: same
+\f[R]
+.fi
+.SH Pivot
+.PP
+Emulates the \f[C]PIVOT\f[R] function supported by several popular RDBMS
+systems.
+.SS Pivot a sequence of sequences
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- - foo
+  - bar
+  - baz
+- - sis
+  - boom
+  - bah
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]pivot\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- - foo
+  - sis
+- - bar
+  - boom
+- - baz
+  - bah
+\f[R]
+.fi
+.SS Pivot sequence of heterogeneous sequences
+.PP
+Missing values are \[lq]padded\[rq] to null.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- - foo
+  - bar
+  - baz
+- - sis
+  - boom
+  - bah
+  - blah
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]pivot\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- - foo
+  - sis
+- - bar
+  - boom
+- - baz
+  - bah
+- -
+  - blah
+\f[R]
+.fi
+.SS Pivot sequence of maps
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- foo: a
+  bar: b
+  baz: c
+- foo: x
+  bar: y
+  baz: z
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]pivot\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+foo:
+  - a
+  - x
+bar:
+  - b
+  - y
+baz:
+  - c
+  - z
+\f[R]
+.fi
+.SS Pivot sequence of heterogeneous maps
+.PP
+Missing values are \[lq]padded\[rq] to null.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- foo: a
+  bar: b
+  baz: c
+- foo: x
+  bar: y
+  baz: z
+  what: ever
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]pivot\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+foo:
+  - a
+  - x
+bar:
+  - b
+  - y
+baz:
+  - c
+  - z
+what:
+  -
+  - ever
+\f[R]
+.fi
+.SH Recursive Descent (Glob)
+.PP
+This operator recursively matches (or globs) all children nodes given of
+a particular element, including that node itself.
+This is most often used to apply a filter recursively against all
+matches.
+.SS match values form \f[C]..\f[R]
+.PP
+This will, like the \f[C]jq\f[R] equivalent, recursively match all
+\f[I]value\f[R] nodes.
+Use it to find/manipulate particular values.
+.PP
+For instance to set the \f[C]style\f[R] of all \f[I]value\f[R] nodes in
+a yaml doc, excluding map keys:
+.IP
+.nf
+\f[C]
+yq \[aq].. style= \[dq]flow\[dq]\[aq] file.yaml
+\f[R]
+.fi
+.SS match values and map keys form \f[C]...\f[R]
+.PP
+The also includes map keys in the results set.
+This is particularly useful in YAML as unlike JSON, map keys can have
+their own styling and tags and also use anchors and aliases.
+.PP
+For instance to set the \f[C]style\f[R] of all nodes in a yaml doc,
+including the map keys:
+.IP
+.nf
+\f[C]
+yq \[aq]... style= \[dq]flow\[dq]\[aq] file.yaml
+\f[R]
+.fi
+.SS Recurse map (values only)
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: frog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]..\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: frog
+frog
+\f[R]
+.fi
+.SS Recursively find nodes with keys
+.PP
+Note that this example has wrapped the expression in \f[C][]\f[R] to
+show that there are two matches returned.
+You do not have to wrap in \f[C][]\f[R] in your path expression.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  name: frog
+  b:
+    name: blog
+    age: 12
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq][.. | select(has(\[dq]name\[dq]))]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- name: frog
+  b:
+    name: blog
+    age: 12
+- name: blog
+  age: 12
+\f[R]
+.fi
+.SS Recursively find nodes with values
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  nameA: frog
+  b:
+    nameB: frog
+    age: 12
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].. | select(. == \[dq]frog\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+frog
+frog
+\f[R]
+.fi
+.SS Recurse map (values and keys)
+.PP
+Note that the map key appears in the results
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: frog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]...\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: frog
+a
+frog
+\f[R]
+.fi
+.SS Aliases are not traversed
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: &cat
+  c: frog
+b: *cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq][..]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- a: &cat
+    c: frog
+  b: *cat
+- &cat
+  c: frog
+- frog
+- *cat
+\f[R]
+.fi
+.SS Merge docs are not traversed
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+foo: &foo
+  a: foo_a
+  thing: foo_thing
+  c: foo_c
+bar: &bar
+  b: bar_b
+  thing: bar_thing
+  c: bar_c
+foobarList:
+  b: foobarList_b
+  !!merge <<:
+    - *foo
+    - *bar
+  c: foobarList_c
+foobar:
+  c: foobar_c
+  !!merge <<: *foo
+  thing: foobar_thing
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].foobar | [..]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- c: foobar_c
+  !!merge <<: *foo
+  thing: foobar_thing
+- foobar_c
+- *foo
+- foobar_thing
+\f[R]
+.fi
+.SH Reduce
+.PP
+Reduce is a powerful way to process a collection of data into a new
+form.
+.IP
+.nf
+\f[C]
+<exp> as $<name> ireduce (<init>; <block>)
+\f[R]
+.fi
+.PP
+e.g.
+.IP
+.nf
+\f[C]
+\&.[] as $item ireduce (0; . + $item)
+\f[R]
+.fi
+.PP
+On the LHS we are configuring the collection of items that will be
+reduced \f[C]<exp>\f[R] as well as what each element will be called
+\f[C]$<name>\f[R].
+Note that the array has been splatted into its individual elements.
+.PP
+On the RHS there is \f[C]<init>\f[R], the starting value of the
+accumulator and \f[C]<block>\f[R], the expression that will update the
+accumulator for each element in the collection.
+Note that within the block expression, \f[C].\f[R] will evaluate to the
+current value of the accumulator.
+.SS yq vs jq syntax
+.PP
+Reduce syntax in \f[C]yq\f[R] is a little different from \f[C]jq\f[R] -
+as \f[C]yq\f[R] (currently) isn\[cq]t as sophisticated as \f[C]jq\f[R]
+and its only supports infix notation (e.g.\ a + b, where the operator is
+in the middle of the two parameters) - where as \f[C]jq\f[R] uses a mix
+of infix notation with \f[I]prefix\f[R] notation
+(e.g.\ \f[C]reduce a b\f[R] is like writing \f[C]+ a b\f[R]).
+.PP
+To that end, the reduce operator is called \f[C]ireduce\f[R] for
+backwards compatibility if a \f[C]jq\f[R] like prefix version of
+\f[C]reduce\f[R] is ever added.
+.SS Sum numbers
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- 10
+- 2
+- 5
+- 3
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] as $item ireduce (0; . + $item)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+20
+\f[R]
+.fi
+.SS Merge all yaml files together
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+\f[R]
+.fi
+.PP
+And another sample another.yml file of:
+.IP
+.nf
+\f[C]
+b: dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq eval-all \[aq]. as $item ireduce ({}; . * $item )\[aq] sample.yml another.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: cat
+b: dog
+\f[R]
+.fi
+.SS Convert an array to an object
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- name: Cathy
+  has: apples
+- name: Bob
+  has: bananas
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] as $item ireduce ({}; .[$item | .name] = ($item | .has) )\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+Cathy: apples
+Bob: bananas
+\f[R]
+.fi
+.SH Reverse
+.PP
+Reverses the order of the items in an array
+.SS Reverse
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- 1
+- 2
+- 3
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]reverse\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- 3
+- 2
+- 1
+\f[R]
+.fi
+.SS Sort descending by string field
+.PP
+Use sort with reverse to sort in descending order.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- a: banana
+- a: cat
+- a: apple
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]sort_by(.a) | reverse\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- a: cat
+- a: banana
+- a: apple
+\f[R]
+.fi
+.SH Select
+.PP
+Select is used to filter arrays and maps by a boolean expression.
+.SS Related Operators
+.IP \[bu] 2
+equals / not equals (\f[C]==\f[R], \f[C]!=\f[R]) operators
+here (https://mikefarah.gitbook.io/yq/operators/equals)
+.IP \[bu] 2
+comparison (\f[C]>=\f[R], \f[C]<\f[R] etc) operators
+here (https://mikefarah.gitbook.io/yq/operators/compare)
+.IP \[bu] 2
+boolean operators (\f[C]and\f[R], \f[C]or\f[R], \f[C]any\f[R] etc)
+here (https://mikefarah.gitbook.io/yq/operators/boolean-operators)
+.SS Select elements from array using wildcard prefix
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- cat
+- goat
+- dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] | select(. == \[dq]*at\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cat
+goat
+\f[R]
+.fi
+.SS Select elements from array using wildcard suffix
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- go-kart
+- goat
+- dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] | select(. == \[dq]go*\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+go-kart
+goat
+\f[R]
+.fi
+.SS Select elements from array using wildcard prefix and suffix
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- ago
+- go
+- meow
+- going
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] | select(. == \[dq]*go*\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+ago
+go
+going
+\f[R]
+.fi
+.SS Select elements from array with regular expression
+.PP
+See more regular expression examples under the \f[C]string\f[R] operator
+docs (https://mikefarah.gitbook.io/yq/operators/string-operators).
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- this_0
+- not_this
+- nor_0_this
+- thisTo_4
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] | select(test(\[dq][a-zA-Z]+_[0-9]$\[dq]))\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+this_0
+thisTo_4
+\f[R]
+.fi
+.SS Select items from a map
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+things: cat
+bob: goat
+horse: dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] | select(. == \[dq]cat\[dq] or test(\[dq]og$\[dq]))\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cat
+dog
+\f[R]
+.fi
+.SS Use select and with_entries to filter map keys
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+name: bob
+legs: 2
+game: poker
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]with_entries(select(.key | test(\[dq]ame$\[dq])))\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+name: bob
+game: poker
+\f[R]
+.fi
+.SS Select multiple items in a map and update
+.PP
+Note the brackets around the entire LHS.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  things: cat
+  bob: goat
+  horse: dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq](.a.[] | select(. == \[dq]cat\[dq] or . == \[dq]goat\[dq])) |= \[dq]rabbit\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  things: rabbit
+  bob: rabbit
+  horse: dog
+\f[R]
+.fi
+.SH Shuffle
+.PP
+Shuffles an array.
+Note that this command does \f[I]not\f[R] use a cryptographically secure
+random number generator to randomise the array order.
+.SS Shuffle array
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- 1
+- 2
+- 3
+- 4
+- 5
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]shuffle\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- 5
+- 2
+- 4
+- 1
+- 3
+\f[R]
+.fi
+.SS Shuffle array in place
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+cool:
+  - 1
+  - 2
+  - 3
+  - 4
+  - 5
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].cool |= shuffle\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cool:
+  - 5
+  - 2
+  - 4
+  - 1
+  - 3
+\f[R]
+.fi
+.SH Slice/Splice Array
+.PP
+The slice array operator takes an array as input and returns a subarray.
+Like the \f[C]jq\f[R] equivalent, \f[C].[10:15]\f[R] will return an
+array of length 5, starting from index 10 inclusive, up to index 15
+exclusive.
+Negative numbers count backwards from the end of the array.
+.PP
+You may leave out the first or second number, which will refer to the
+start or end of the array respectively.
+.SS Slicing arrays
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- cat
+- dog
+- frog
+- cow
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[1:3]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- dog
+- frog
+\f[R]
+.fi
+.SS Slicing arrays - without the first number
+.PP
+Starts from the start of the array
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- cat
+- dog
+- frog
+- cow
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[:2]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- cat
+- dog
+\f[R]
+.fi
+.SS Slicing arrays - without the second number
+.PP
+Finishes at the end of the array
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- cat
+- dog
+- frog
+- cow
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[2:]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- frog
+- cow
+\f[R]
+.fi
+.SS Slicing arrays - use negative numbers to count backwards from the end
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- cat
+- dog
+- frog
+- cow
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[1:-1]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- dog
+- frog
+\f[R]
+.fi
+.SS Inserting into the middle of an array
+.PP
+using an expression to find the index
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- cat
+- dog
+- frog
+- cow
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq](.[] | select(. == \[dq]dog\[dq]) | key + 1) as $pos | .[0:($pos)] + [\[dq]rabbit\[dq]] + .[$pos:]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- cat
+- dog
+- rabbit
+- frog
+- cow
+\f[R]
+.fi
+.SH Sort Keys
+.PP
+The Sort Keys operator sorts maps by their keys (based on their string
+value).
+This operator does not do anything to arrays or scalars (so you can
+easily recursively apply it to all maps).
+.PP
+Sort is particularly useful for diffing two different yaml documents:
+.IP
+.nf
+\f[C]
+yq -i -P \[aq]sort_keys(..)\[aq] file1.yml
+yq -i -P \[aq]sort_keys(..)\[aq] file2.yml
+diff file1.yml file2.yml
+\f[R]
+.fi
+.PP
+Note that \f[C]yq\f[R] does not yet consider anchors when sorting by
+keys - this may result in invalid yaml documents if you are using merge
+anchors.
+.PP
+For more advanced sorting, using \f[C]to_entries\f[R] to convert the map
+to an array, then sort/process the array as you like (e.g.\ using
+\f[C]sort_by\f[R]) and convert back to a map using
+\f[C]from_entries\f[R].
+See
+here (https://mikefarah.gitbook.io/yq/operators/entries#custom-sort-map-keys)
+for an example.
+.SS Sort keys of map
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+c: frog
+a: blah
+b: bing
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]sort_keys(.)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: blah
+b: bing
+c: frog
+\f[R]
+.fi
+.SS Sort keys recursively
+.PP
+Note the array elements are left unsorted, but maps inside arrays are
+sorted
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+bParent:
+  c: dog
+  array:
+    - 3
+    - 1
+    - 2
+aParent:
+  z: donkey
+  x:
+    - c: yum
+      b: delish
+    - b: ew
+      a: apple
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]sort_keys(..)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+aParent:
+  x:
+    - b: delish
+      c: yum
+    - a: apple
+      b: ew
+  z: donkey
+bParent:
+  array:
+    - 3
+    - 1
+    - 2
+  c: dog
+\f[R]
+.fi
+.SH Sort
+.PP
+Sorts an array.
+Use \f[C]sort\f[R] to sort an array as is, or \f[C]sort_by(exp)\f[R] to
+sort by a particular expression (e.g.\ subfield).
+.PP
+To sort by descending order, pipe the results through the
+\f[C]reverse\f[R] operator after sorting.
+.PP
+Note that at this stage, \f[C]yq\f[R] only sorts scalar fields.
+.SS Sort by string field
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- a: banana
+- a: cat
+- a: apple
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]sort_by(.a)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- a: apple
+- a: banana
+- a: cat
+\f[R]
+.fi
+.SS Sort by multiple fields
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- a: dog
+- a: cat
+  b: banana
+- a: cat
+  b: apple
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]sort_by(.a, .b)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- a: cat
+  b: apple
+- a: cat
+  b: banana
+- a: dog
+\f[R]
+.fi
+.SS Sort descending by string field
+.PP
+Use sort with reverse to sort in descending order.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- a: banana
+- a: cat
+- a: apple
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]sort_by(.a) | reverse\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- a: cat
+- a: banana
+- a: apple
+\f[R]
+.fi
+.SS Sort array in place
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+cool:
+  - a: banana
+  - a: cat
+  - a: apple
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].cool |= sort_by(.a)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cool:
+  - a: apple
+  - a: banana
+  - a: cat
+\f[R]
+.fi
+.SS Sort array of objects by key
+.PP
+Note that you can give sort_by complex expressions, not just paths
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+cool:
+  - b: banana
+  - a: banana
+  - c: banana
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].cool |= sort_by(keys | .[0])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cool:
+  - a: banana
+  - b: banana
+  - c: banana
+\f[R]
+.fi
+.SS Sort is stable
+.PP
+Note the order of the elements in unchanged when equal in sorting.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- a: banana
+  b: 1
+- a: banana
+  b: 2
+- a: banana
+  b: 3
+- a: banana
+  b: 4
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]sort_by(.a)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- a: banana
+  b: 1
+- a: banana
+  b: 2
+- a: banana
+  b: 3
+- a: banana
+  b: 4
+\f[R]
+.fi
+.SS Sort by numeric field
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- a: 10
+- a: 100
+- a: 1
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]sort_by(.a)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- a: 1
+- a: 10
+- a: 100
+\f[R]
+.fi
+.SS Sort by custom date field
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- a: 12-Jun-2011
+- a: 23-Dec-2010
+- a: 10-Aug-2011
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]with_dtf(\[dq]02-Jan-2006\[dq]; sort_by(.a))\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- a: 23-Dec-2010
+- a: 12-Jun-2011
+- a: 10-Aug-2011
+\f[R]
+.fi
+.SS Sort, nulls come first
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- 8
+- 3
+- null
+- 6
+- true
+- false
+- cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]sort\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- null
+- false
+- true
+- 3
+- 6
+- 8
+- cat
+\f[R]
+.fi
+.SH Split into Documents
+.PP
+This operator splits all matches into separate documents
+.SS Split empty
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]split_doc\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+\f[R]
+.fi
+.SS Split array
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- a: cat
+- b: dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] | split_doc\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: cat
+---
+b: dog
+\f[R]
+.fi
+.SH String Operators
+.SS RegEx
+.PP
+This uses Golang\[cq]s native regex functions under the hood - See their
+docs (https://github.com/google/re2/wiki/Syntax) for the supported
+syntax.
+.PP
+Case insensitive tip: prefix the regex with \f[C](?i)\f[R] -
+e.g.\ \f[C]test(\[dq](?i)cats)\[dq]\f[R].
+.SS match(regEx)
+.PP
+This operator returns the substring match details of the given regEx.
+.SS capture(regEx)
+.PP
+Capture returns named RegEx capture groups in a map.
+Can be more convenient than \f[C]match\f[R] depending on what you are
+doing.
+.SS test(regEx)
+.PP
+Returns true if the string matches the RegEx, false otherwise.
+.SS sub(regEx, replacement)
+.PP
+Substitutes matched substrings.
+The first parameter is the regEx to match substrings within the original
+string.
+The second parameter specifies what to replace those matches with.
+This can refer to capture groups from the first RegEx.
+.SS String blocks, bash and newlines
+.PP
+Bash is notorious for chomping on precious trailing newline characters,
+making it tricky to set strings with newlines properly.
+In particular, the \f[C]$( exp )\f[R] \f[I]will trim trailing
+newlines\f[R].
+.PP
+For instance to get this yaml:
+.IP
+.nf
+\f[C]
+a: |
+  cat
+\f[R]
+.fi
+.PP
+Using \f[C]$( exp )\f[R] wont work, as it will trim the trailing
+newline.
+.IP
+.nf
+\f[C]
+m=$(echo \[dq]cat\[rs]n\[dq]) yq -n \[aq].a = strenv(m)\[aq]
+a: cat
+\f[R]
+.fi
+.PP
+However, using printf works:
+.IP
+.nf
+\f[C]
+printf -v m \[dq]cat\[rs]n\[dq] ; m=\[dq]$m\[dq] yq -n \[aq].a = strenv(m)\[aq]
+a: |
+  cat
+\f[R]
+.fi
+.PP
+As well as having multiline expressions:
+.IP
+.nf
+\f[C]
+m=\[dq]cat
+\[dq]  yq -n \[aq].a = strenv(m)\[aq]
+a: |
+  cat
+\f[R]
+.fi
+.PP
+Similarly, if you\[cq]re trying to set the content from a file, and want
+a trailing newline:
+.IP
+.nf
+\f[C]
+IFS= read -rd \[aq]\[aq] output < <(cat my_file)
+output=$output ./yq \[aq].data.values = strenv(output)\[aq] first.yml
+\f[R]
+.fi
+.SS Interpolation
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+value: things
+another: stuff
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].message = \[dq]I like \[rs](.value) and \[rs](.another)\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+value: things
+another: stuff
+message: I like things and stuff
+\f[R]
+.fi
+.SS Interpolation - not a string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+value:
+  an: apple
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].message = \[dq]I like \[rs](.value)\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+value:
+  an: apple
+message: \[aq]I like an: apple\[aq]
+\f[R]
+.fi
+.SS To up (upper) case
+.PP
+Works with unicode characters
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+\['a]gua
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]upcase\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+\['A]GUA
+\f[R]
+.fi
+.SS To down (lower) case
+.PP
+Works with unicode characters
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+\['A]gUA
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]downcase\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+\['a]gua
+\f[R]
+.fi
+.SS Join strings
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- cat
+- meow
+- 1
+- null
+- true
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]join(\[dq]; \[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cat; meow; 1; ; true
+\f[R]
+.fi
+.SS Trim strings
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- \[aq] cat\[aq]
+- \[aq]dog \[aq]
+- \[aq] cow cow \[aq]
+- horse
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] | trim\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cat
+dog
+cow cow
+horse
+\f[R]
+.fi
+.SS Match string
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+foo bar foo
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]match(\[dq]foo\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+string: foo
+offset: 0
+length: 3
+captures: []
+\f[R]
+.fi
+.SS Match string, case insensitive
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+foo bar FOO
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq][match(\[dq](?i)foo\[dq]; \[dq]g\[dq])]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- string: foo
+  offset: 0
+  length: 3
+  captures: []
+- string: FOO
+  offset: 8
+  length: 3
+  captures: []
+\f[R]
+.fi
+.SS Match with global capture group
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+abc abc
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq][match(\[dq](ab)(c)\[dq]; \[dq]g\[dq])]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- string: abc
+  offset: 0
+  length: 3
+  captures:
+    - string: ab
+      offset: 0
+      length: 2
+    - string: c
+      offset: 2
+      length: 1
+- string: abc
+  offset: 4
+  length: 3
+  captures:
+    - string: ab
+      offset: 4
+      length: 2
+    - string: c
+      offset: 6
+      length: 1
+\f[R]
+.fi
+.SS Match with named capture groups
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+foo bar foo foo  foo
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq][match(\[dq]foo (?P<bar123>bar)? foo\[dq]; \[dq]g\[dq])]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- string: foo bar foo
+  offset: 0
+  length: 11
+  captures:
+    - string: bar
+      offset: 4
+      length: 3
+      name: bar123
+- string: foo  foo
+  offset: 12
+  length: 8
+  captures:
+    - string: null
+      offset: -1
+      length: 0
+      name: bar123
+\f[R]
+.fi
+.SS Capture named groups into a map
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+xyzzy-14
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]capture(\[dq](?P<a>[a-z]+)-(?P<n>[0-9]+)\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: xyzzy
+n: \[dq]14\[dq]
+\f[R]
+.fi
+.SS Match without global flag
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+cat cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]match(\[dq]cat\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+string: cat
+offset: 0
+length: 3
+captures: []
+\f[R]
+.fi
+.SS Match with global flag
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+cat cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq][match(\[dq]cat\[dq]; \[dq]g\[dq])]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- string: cat
+  offset: 0
+  length: 3
+  captures: []
+- string: cat
+  offset: 4
+  length: 3
+  captures: []
+\f[R]
+.fi
+.SS Test using regex
+.PP
+Like jq\[cq]s equivalent, this works like match but only returns
+true/false instead of full match details
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- cat
+- dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] | test(\[dq]at\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+true
+false
+\f[R]
+.fi
+.SS Substitute / Replace string
+.PP
+This uses Golang\[cq]s regex, described
+here (https://github.com/google/re2/wiki/Syntax).
+Note the use of \f[C]|=\f[R] to run in context of the current string
+value.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: dogs are great
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a |= sub(\[dq]dogs\[dq], \[dq]cats\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: cats are great
+\f[R]
+.fi
+.SS Substitute / Replace string with regex
+.PP
+This uses Golang\[cq]s regex, described
+here (https://github.com/google/re2/wiki/Syntax).
+Note the use of \f[C]|=\f[R] to run in context of the current string
+value.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b: heat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] |= sub(\[dq](a)\[dq], \[dq]${1}r\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: cart
+b: heart
+\f[R]
+.fi
+.SS Custom types: that are really strings
+.PP
+When custom tags are encountered, yq will try to decode the underlying
+type.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: !horse cat
+b: !goat heat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] |= sub(\[dq](a)\[dq], \[dq]${1}r\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: !horse cart
+b: !goat heart
+\f[R]
+.fi
+.SS Split strings
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+cat; meow; 1; ; true
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]split(\[dq]; \[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- cat
+- meow
+- \[dq]1\[dq]
+- \[dq]\[dq]
+- \[dq]true\[dq]
+\f[R]
+.fi
+.SS Split strings one match
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+word
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]split(\[dq]; \[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- word
+\f[R]
+.fi
+.SS To string
+.PP
+Note that you may want to force \f[C]yq\f[R] to leave scalar values
+wrapped by passing in \f[C]--unwrapScalar=false\f[R] or \f[C]-r=f\f[R]
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- 1
+- true
+- null
+- \[ti]
+- cat
+- an: object
+- - array
+  - 2
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] |= to_string\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- \[dq]1\[dq]
+- \[dq]true\[dq]
+- \[dq]null\[dq]
+- \[dq]\[ti]\[dq]
+- cat
+- \[dq]an: object\[dq]
+- \[dq]- array\[rs]n- 2\[dq]
+\f[R]
+.fi
+.SH Style
+.PP
+The style operator can be used to get or set the style of nodes
+(e.g.\ string style, yaml style).
+Use this to control the formatting of the document in yaml.
+.SS Update and set style of a particular node (simple)
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b: thing
+  c: something
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a.b = \[dq]new\[dq] | .a.b style=\[dq]double\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b: \[dq]new\[dq]
+  c: something
+\f[R]
+.fi
+.SS Update and set style of a particular node using path variables
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b: thing
+  c: something
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]with(.a.b ; . = \[dq]new\[dq] | . style=\[dq]double\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b: \[dq]new\[dq]
+  c: something
+\f[R]
+.fi
+.SS Set tagged style
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b: 5
+c: 3.2
+e: true
+f:
+  - 1
+  - 2
+  - 3
+g:
+  something: cool
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].. style=\[dq]tagged\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+!!map
+a: !!str cat
+b: !!int 5
+c: !!float 3.2
+e: !!bool true
+f: !!seq
+  - !!int 1
+  - !!int 2
+  - !!int 3
+g: !!map
+  something: !!str cool
+\f[R]
+.fi
+.SS Set double quote style
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b: 5
+c: 3.2
+e: true
+f:
+  - 1
+  - 2
+  - 3
+g:
+  something: cool
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].. style=\[dq]double\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: \[dq]cat\[dq]
+b: \[dq]5\[dq]
+c: \[dq]3.2\[dq]
+e: \[dq]true\[dq]
+f:
+  - \[dq]1\[dq]
+  - \[dq]2\[dq]
+  - \[dq]3\[dq]
+g:
+  something: \[dq]cool\[dq]
+\f[R]
+.fi
+.SS Set double quote style on map keys too
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b: 5
+c: 3.2
+e: true
+f:
+  - 1
+  - 2
+  - 3
+g:
+  something: cool
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]... style=\[dq]double\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+\[dq]a\[dq]: \[dq]cat\[dq]
+\[dq]b\[dq]: \[dq]5\[dq]
+\[dq]c\[dq]: \[dq]3.2\[dq]
+\[dq]e\[dq]: \[dq]true\[dq]
+\[dq]f\[dq]:
+  - \[dq]1\[dq]
+  - \[dq]2\[dq]
+  - \[dq]3\[dq]
+\[dq]g\[dq]:
+  \[dq]something\[dq]: \[dq]cool\[dq]
+\f[R]
+.fi
+.SS Set single quote style
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b: 5
+c: 3.2
+e: true
+f:
+  - 1
+  - 2
+  - 3
+g:
+  something: cool
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].. style=\[dq]single\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: \[aq]cat\[aq]
+b: \[aq]5\[aq]
+c: \[aq]3.2\[aq]
+e: \[aq]true\[aq]
+f:
+  - \[aq]1\[aq]
+  - \[aq]2\[aq]
+  - \[aq]3\[aq]
+g:
+  something: \[aq]cool\[aq]
+\f[R]
+.fi
+.SS Set literal quote style
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b: 5
+c: 3.2
+e: true
+f:
+  - 1
+  - 2
+  - 3
+g:
+  something: cool
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].. style=\[dq]literal\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: |-
+  cat
+b: |-
+  5
+c: |-
+  3.2
+e: |-
+  true
+f:
+  - |-
+    1
+  - |-
+    2
+  - |-
+    3
+g:
+  something: |-
+    cool
+\f[R]
+.fi
+.SS Set folded quote style
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b: 5
+c: 3.2
+e: true
+f:
+  - 1
+  - 2
+  - 3
+g:
+  something: cool
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].. style=\[dq]folded\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: >-
+  cat
+b: >-
+  5
+c: >-
+  3.2
+e: >-
+  true
+f:
+  - >-
+    1
+  - >-
+    2
+  - >-
+    3
+g:
+  something: >-
+    cool
+\f[R]
+.fi
+.SS Set flow quote style
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b: 5
+c: 3.2
+e: true
+f:
+  - 1
+  - 2
+  - 3
+g:
+  something: cool
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].. style=\[dq]flow\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+{a: cat, b: 5, c: 3.2, e: true, f: [1, 2, 3], g: {something: cool}}
+\f[R]
+.fi
+.SS Reset style - or pretty print
+.PP
+Set empty (default) quote style, note the usage of \f[C]...\f[R] to
+match keys too.
+Note that there is a \f[C]--prettyPrint/-P\f[R] short flag for this.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+{a: cat, \[dq]b\[dq]: 5, \[aq]c\[aq]: 3.2, \[dq]e\[dq]: true,  f: [1,2,3], \[dq]g\[dq]: { something: \[dq]cool\[dq]} }
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]... style=\[dq]\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: cat
+b: 5
+c: 3.2
+e: true
+f:
+  - 1
+  - 2
+  - 3
+g:
+  something: cool
+\f[R]
+.fi
+.SS Set style relatively with assign-update
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: single
+b: double
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] style |= .\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: \[aq]single\[aq]
+b: \[dq]double\[dq]
+\f[R]
+.fi
+.SS Read style
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+{a: \[dq]cat\[dq], b: \[aq]thing\[aq]}
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].. | style\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+flow
+double
+single
+\f[R]
+.fi
+.SH Subtract
+.PP
+You can use subtract to subtract numbers as well as remove elements from
+an array.
+.SS Array subtraction
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq][1,2] - [2,3]\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- 1
+\f[R]
+.fi
+.SS Array subtraction with nested array
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq][[1], 1, 2] - [[1], 3]\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- 1
+- 2
+\f[R]
+.fi
+.SS Array subtraction with nested object
+.PP
+Note that order of the keys does not matter
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- a: b
+  c: d
+- a: b
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]. - [{\[dq]c\[dq]: \[dq]d\[dq], \[dq]a\[dq]: \[dq]b\[dq]}]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- a: b
+\f[R]
+.fi
+.SS Number subtraction - float
+.PP
+If the lhs or rhs are floats then the expression will be calculated with
+floats.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 3
+b: 4.5
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a = .a - .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: -1.5
+b: 4.5
+\f[R]
+.fi
+.SS Number subtraction - int
+.PP
+If both the lhs and rhs are ints then the expression will be calculated
+with ints.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 3
+b: 4
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a = .a - .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: -1
+b: 4
+\f[R]
+.fi
+.SS Decrement numbers
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 3
+b: 5
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] -= 1\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: 2
+b: 4
+\f[R]
+.fi
+.SS Date subtraction
+.PP
+You can subtract durations from dates.
+Assumes RFC3339 date time format, see date-time
+operators (https://mikefarah.gitbook.io/yq/operators/date-time-operators)
+for more information.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: 2021-01-01T03:10:00Z
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a -= \[dq]3h10m\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: 2021-01-01T00:00:00Z
+\f[R]
+.fi
+.SS Date subtraction - custom format
+.PP
+Use with_dtf to specify your datetime format.
+See date-time
+operators (https://mikefarah.gitbook.io/yq/operators/date-time-operators)
+for more information.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: Saturday, 15-Dec-01 at 6:00AM GMT
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]with_dtf(\[dq]Monday, 02-Jan-06 at 3:04PM MST\[dq], .a -= \[dq]3h1m\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: Saturday, 15-Dec-01 at 2:59AM GMT
+\f[R]
+.fi
+.SS Custom types: that are really numbers
+.PP
+When custom tags are encountered, yq will try to decode the underlying
+type.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: !horse 2
+b: !goat 1
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a -= .b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: !horse 1
+b: !goat 1
+\f[R]
+.fi
+.SH Tag
+.PP
+The tag operator can be used to get or set the tag of nodes
+(e.g.\ \f[C]!!str\f[R], \f[C]!!int\f[R], \f[C]!!bool\f[R]).
+.SS Get tag
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b: 5
+c: 3.2
+e: true
+f: []
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].. | tag\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+!!map
+!!str
+!!int
+!!float
+!!bool
+!!seq
+\f[R]
+.fi
+.SS type is an alias for tag
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b: 5
+c: 3.2
+e: true
+f: []
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].. | type\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+!!map
+!!str
+!!int
+!!float
+!!bool
+!!seq
+\f[R]
+.fi
+.SS Set custom tag
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: str
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a tag = \[dq]!!mikefarah\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: !!mikefarah str
+\f[R]
+.fi
+.SS Find numbers and convert them to strings
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+b: 5
+c: 3.2
+e: true
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq](.. | select(tag == \[dq]!!int\[dq])) tag= \[dq]!!str\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: cat
+b: \[dq]5\[dq]
+c: 3.2
+e: true
+\f[R]
+.fi
+.SH To Number
+.PP
+Parses the input as a number.
+yq will try to parse values as an int first, failing that it will try
+float.
+Values that already ints or floats will be left alone.
+.SS Converts strings to numbers
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- \[dq]3\[dq]
+- \[dq]3.1\[dq]
+- \[dq]-1e3\[dq]
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] | to_number\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+3
+3.1
+-1e3
+\f[R]
+.fi
+.SS Doesn\[cq]t change numbers
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- 3
+- 3.1
+- -1e3
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] | to_number\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+3
+3.1
+-1e3
+\f[R]
+.fi
+.SS Cannot convert null
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq].a.b | to_number\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+Error: cannot convert node value [null] at path a.b of tag !!null to number
+\f[R]
+.fi
+.SH Traverse (Read)
+.PP
+This is the simplest (and perhaps most used) operator.
+It is used to navigate deeply into yaml structures.
+.SS Simple map navigation
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b: apple
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+b: apple
+\f[R]
+.fi
+.SS Splat
+.PP
+Often used to pipe children into other operators
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- b: apple
+- c: banana
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+b: apple
+c: banana
+\f[R]
+.fi
+.SS Optional Splat
+.PP
+Just like splat, but won\[cq]t error if you run it against scalars
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+\f[R]
+.fi
+.SS Special characters
+.PP
+Use quotes with square brackets around path elements with special
+characters
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+\[dq]{}\[dq]: frog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[\[dq]{}\[dq]]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+frog
+\f[R]
+.fi
+.SS Nested special characters
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  \[dq]key.withdots\[dq]:
+    \[dq]another.key\[dq]: apple
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a[\[dq]key.withdots\[dq]][\[dq]another.key\[dq]]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+apple
+\f[R]
+.fi
+.SS Keys with spaces
+.PP
+Use quotes with square brackets around path elements with special
+characters
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+\[dq]red rabbit\[dq]: frog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[\[dq]red rabbit\[dq]]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+frog
+\f[R]
+.fi
+.SS Dynamic keys
+.PP
+Expressions within [] can be used to dynamically lookup / calculate keys
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+b: apple
+apple: crispy yum
+banana: soft yum
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[.b]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+crispy yum
+\f[R]
+.fi
+.SS Children don\[cq]t exist
+.PP
+Nodes are added dynamically while traversing
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+c: banana
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a.b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+null
+\f[R]
+.fi
+.SS Optional identifier
+.PP
+Like jq, does not output an error when the yaml is not an array or
+object as expected
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- 1
+- 2
+- 3
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a?\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+\f[R]
+.fi
+.SS Wildcard matching
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  cat: apple
+  mad: things
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a.\[dq]*a*\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+apple
+things
+\f[R]
+.fi
+.SS Aliases
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: &cat
+  c: frog
+b: *cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].b\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+*cat
+\f[R]
+.fi
+.SS Traversing aliases with splat
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: &cat
+  c: frog
+b: *cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].b[]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+frog
+\f[R]
+.fi
+.SS Traversing aliases explicitly
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: &cat
+  c: frog
+b: *cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].b.c\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+frog
+\f[R]
+.fi
+.SS Traversing arrays by index
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- 1
+- 2
+- 3
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[0]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+1
+\f[R]
+.fi
+.SS Traversing nested arrays by index
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+[[], [cat]]
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[1][0]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cat
+\f[R]
+.fi
+.SS Maps with numeric keys
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+2: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[2]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cat
+\f[R]
+.fi
+.SS Maps with non existing numeric keys
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: b
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[0]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+null
+\f[R]
+.fi
+.SS Traversing merge anchors
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+foo: &foo
+  a: foo_a
+  thing: foo_thing
+  c: foo_c
+bar: &bar
+  b: bar_b
+  thing: bar_thing
+  c: bar_c
+foobarList:
+  b: foobarList_b
+  !!merge <<:
+    - *foo
+    - *bar
+  c: foobarList_c
+foobar:
+  c: foobar_c
+  !!merge <<: *foo
+  thing: foobar_thing
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].foobar.a\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+foo_a
+\f[R]
+.fi
+.SS Traversing merge anchors with override
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+foo: &foo
+  a: foo_a
+  thing: foo_thing
+  c: foo_c
+bar: &bar
+  b: bar_b
+  thing: bar_thing
+  c: bar_c
+foobarList:
+  b: foobarList_b
+  !!merge <<:
+    - *foo
+    - *bar
+  c: foobarList_c
+foobar:
+  c: foobar_c
+  !!merge <<: *foo
+  thing: foobar_thing
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].foobar.c\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+foo_c
+\f[R]
+.fi
+.SS Traversing merge anchors with local override
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+foo: &foo
+  a: foo_a
+  thing: foo_thing
+  c: foo_c
+bar: &bar
+  b: bar_b
+  thing: bar_thing
+  c: bar_c
+foobarList:
+  b: foobarList_b
+  !!merge <<:
+    - *foo
+    - *bar
+  c: foobarList_c
+foobar:
+  c: foobar_c
+  !!merge <<: *foo
+  thing: foobar_thing
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].foobar.thing\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+foobar_thing
+\f[R]
+.fi
+.SS Splatting merge anchors
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+foo: &foo
+  a: foo_a
+  thing: foo_thing
+  c: foo_c
+bar: &bar
+  b: bar_b
+  thing: bar_thing
+  c: bar_c
+foobarList:
+  b: foobarList_b
+  !!merge <<:
+    - *foo
+    - *bar
+  c: foobarList_c
+foobar:
+  c: foobar_c
+  !!merge <<: *foo
+  thing: foobar_thing
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].foobar[]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+foo_c
+foo_a
+foobar_thing
+\f[R]
+.fi
+.SS Traversing merge anchor lists
+.PP
+Note that the later merge anchors override previous
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+foo: &foo
+  a: foo_a
+  thing: foo_thing
+  c: foo_c
+bar: &bar
+  b: bar_b
+  thing: bar_thing
+  c: bar_c
+foobarList:
+  b: foobarList_b
+  !!merge <<:
+    - *foo
+    - *bar
+  c: foobarList_c
+foobar:
+  c: foobar_c
+  !!merge <<: *foo
+  thing: foobar_thing
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].foobarList.thing\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+bar_thing
+\f[R]
+.fi
+.SS Splatting merge anchor lists
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+foo: &foo
+  a: foo_a
+  thing: foo_thing
+  c: foo_c
+bar: &bar
+  b: bar_b
+  thing: bar_thing
+  c: bar_c
+foobarList:
+  b: foobarList_b
+  !!merge <<:
+    - *foo
+    - *bar
+  c: foobarList_c
+foobar:
+  c: foobar_c
+  !!merge <<: *foo
+  thing: foobar_thing
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].foobarList[]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+bar_b
+foo_a
+bar_thing
+foobarList_c
+\f[R]
+.fi
+.SS Select multiple indices
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  - a
+  - b
+  - c
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a[0, 2]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a
+c
+\f[R]
+.fi
+.SH Union
+.PP
+This operator is used to combine different results together.
+.SS Combine scalars
+.PP
+Running
+.IP
+.nf
+\f[C]
+yq --null-input \[aq]1, true, \[dq]cat\[dq]\[aq]
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+1
+true
+cat
+\f[R]
+.fi
+.SS Combine selected paths
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: fieldA
+b: fieldB
+c: fieldC
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a, .c\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+fieldA
+fieldC
+\f[R]
+.fi
+.SH Unique
+.PP
+This is used to filter out duplicated items in an array.
+Note that the original order of the array is maintained.
+.SS Unique array of scalars (string/numbers)
+.PP
+Note that unique maintains the original order of the array.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- 2
+- 1
+- 3
+- 2
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]unique\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- 2
+- 1
+- 3
+\f[R]
+.fi
+.SS Unique nulls
+.PP
+Unique works on the node value, so it considers different
+representations of nulls to be different
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- \[ti]
+- null
+- \[ti]
+- null
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]unique\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- \[ti]
+- null
+\f[R]
+.fi
+.SS Unique all nulls
+.PP
+Run against the node tag to unique all the nulls
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- \[ti]
+- null
+- \[ti]
+- null
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]unique_by(tag)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- \[ti]
+\f[R]
+.fi
+.SS Unique array objects
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- name: harry
+  pet: cat
+- name: billy
+  pet: dog
+- name: harry
+  pet: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]unique\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- name: harry
+  pet: cat
+- name: billy
+  pet: dog
+\f[R]
+.fi
+.SS Unique array of objects by a field
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- name: harry
+  pet: cat
+- name: billy
+  pet: dog
+- name: harry
+  pet: dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]unique_by(.name)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- name: harry
+  pet: cat
+- name: billy
+  pet: dog
+\f[R]
+.fi
+.SS Unique array of arrays
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- - cat
+  - dog
+- - cat
+  - sheep
+- - cat
+  - dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]unique\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- - cat
+  - dog
+- - cat
+  - sheep
+\f[R]
+.fi
+.SH Variable Operators
+.PP
+Like the \f[C]jq\f[R] equivalents, variables are sometimes required for
+the more complex expressions (or swapping values between fields).
+.PP
+Note that there is also an additional \f[C]ref\f[R] operator that holds
+a reference (instead of a copy) of the path, allowing you to make
+multiple changes to the same path.
+.SS Single value variable
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a as $foo | $foo\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cat
+\f[R]
+.fi
+.SS Multi value variable
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- cat
+- dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] as $foo | $foo\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cat
+dog
+\f[R]
+.fi
+.SS Using variables as a lookup
+.PP
+Example taken from
+jq (https://stedolan.github.io/jq/manual/#Variable/SymbolicBindingOperator:...as$identifier%7C...)
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+\[dq]posts\[dq]:
+  - \[dq]title\[dq]: First post
+    \[dq]author\[dq]: anon
+  - \[dq]title\[dq]: A well-written article
+    \[dq]author\[dq]: person1
+\[dq]realnames\[dq]:
+  \[dq]anon\[dq]: Anonymous Coward
+  \[dq]person1\[dq]: Person McPherson
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].realnames as $names | .posts[] | {\[dq]title\[dq]:.title, \[dq]author\[dq]: $names[.author]}\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+title: First post
+author: Anonymous Coward
+title: A well-written article
+author: Person McPherson
+\f[R]
+.fi
+.SS Using variables to swap values
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a: a_value
+b: b_value
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a as $x  | .b as $y | .b = $x | .a = $y\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: b_value
+b: a_value
+\f[R]
+.fi
+.SS Use ref to reference a path repeatedly
+.PP
+Note: You may find the \f[C]with\f[R] operator more useful.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  b: thing
+  c: something
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].a.b ref $x | $x = \[dq]new\[dq] | $x style=\[dq]double\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b: \[dq]new\[dq]
+  c: something
+\f[R]
+.fi
+.SH With
+.PP
+Use the \f[C]with\f[R] operator to conveniently make multiple updates to
+a deeply nested path, or to update array elements relatively to each
+other.
+The first argument expression sets the root context, and the second
+expression runs against that root context.
+.SS Update and style
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  deeply:
+    nested: value
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]with(.a.deeply.nested; . = \[dq]newValue\[dq] | . style=\[dq]single\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  deeply:
+    nested: \[aq]newValue\[aq]
+\f[R]
+.fi
+.SS Update multiple deeply nested properties
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+a:
+  deeply:
+    nested: value
+    other: thing
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]with(.a.deeply; .nested = \[dq]newValue\[dq] | .other= \[dq]newThing\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  deeply:
+    nested: newValue
+    other: newThing
+\f[R]
+.fi
+.SS Update array elements relatively
+.PP
+The second expression runs with each element of the array as it\[cq]s
+contextual root.
+This allows you to make updates relative to the element.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+myArray:
+  - a: apple
+  - a: banana
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]with(.myArray[]; .b = .a + \[dq] yum\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+myArray:
+  - a: apple
+    b: apple yum
+  - a: banana
+    b: banana yum
+\f[R]
+.fi
+.SH JSON
+.PP
+Encode and decode to and from JSON.
+Supports multiple JSON documents in a single file (e.g.\ NDJSON).
+.PP
+Note that YAML is a superset of (single document) JSON - so you
+don\[cq]t have to use the JSON parser to read JSON when there is only
+one JSON document in the input.
+You will probably want to pretty print the result in this case, to get
+idiomatic YAML styling.
+.SS Parse json: simple
+.PP
+JSON is a subset of yaml, so all you need to do is prettify the output
+.PP
+Given a sample.json file of:
+.IP
+.nf
+\f[C]
+{\[dq]cat\[dq]: \[dq]meow\[dq]}
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -p=json sample.json
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+cat: meow
+\f[R]
+.fi
+.SS Parse json: complex
+.PP
+JSON is a subset of yaml, so all you need to do is prettify the output
+.PP
+Given a sample.json file of:
+.IP
+.nf
+\f[C]
+{\[dq]a\[dq]:\[dq]Easy! as one two three\[dq],\[dq]b\[dq]:{\[dq]c\[dq]:2,\[dq]d\[dq]:[3,4]}}
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -p=json sample.json
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a: Easy! as one two three
+b:
+  c: 2
+  d:
+    - 3
+    - 4
+\f[R]
+.fi
+.SS Encode json: simple
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+cat: meow
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=json \[aq].\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+{
+  \[dq]cat\[dq]: \[dq]meow\[dq]
+}
+\f[R]
+.fi
+.SS Encode json: simple - in one line
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+cat: meow # this is a comment, and it will be dropped.
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=json -I=0 \[aq].\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+{\[dq]cat\[dq]:\[dq]meow\[dq]}
+\f[R]
+.fi
+.SS Encode json: comments
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+cat: meow # this is a comment, and it will be dropped.
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=json \[aq].\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+{
+  \[dq]cat\[dq]: \[dq]meow\[dq]
+}
+\f[R]
+.fi
+.SS Encode json: anchors
+.PP
+Anchors are dereferenced
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+cat: &ref meow
+anotherCat: *ref
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=json \[aq].\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+{
+  \[dq]cat\[dq]: \[dq]meow\[dq],
+  \[dq]anotherCat\[dq]: \[dq]meow\[dq]
+}
+\f[R]
+.fi
+.SS Encode json: multiple results
+.PP
+Each matching node is converted into a json doc.
+This is best used with 0 indent (json document per line)
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+things: [{stuff: cool}, {whatever: cat}]
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=json -I=0 \[aq].things[]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+{\[dq]stuff\[dq]:\[dq]cool\[dq]}
+{\[dq]whatever\[dq]:\[dq]cat\[dq]}
+\f[R]
+.fi
+.SS Roundtrip JSON Lines / NDJSON
+.PP
+Given a sample.json file of:
+.IP
+.nf
+\f[C]
+{\[dq]this\[dq]: \[dq]is a multidoc json file\[dq]}
+{\[dq]each\[dq]: [\[dq]line is a valid json document\[dq]]}
+{\[dq]a number\[dq]: 4}
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -p=json -o=json -I=0 sample.json
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+{\[dq]this\[dq]:\[dq]is a multidoc json file\[dq]}
+{\[dq]each\[dq]:[\[dq]line is a valid json document\[dq]]}
+{\[dq]a number\[dq]:4}
+\f[R]
+.fi
+.SS Roundtrip multi-document JSON
+.PP
+The parser can also handle multiple multi-line json documents in a
+single file (despite this not being in the JSON Lines / NDJSON spec).
+Typically you would have one entire JSON document per line, but the
+parser also supports multiple multi-line json documents
+.PP
+Given a sample.json file of:
+.IP
+.nf
+\f[C]
+{
+    \[dq]this\[dq]: \[dq]is a multidoc json file\[dq]
+}
+{
+    \[dq]it\[dq]: [
+        \[dq]has\[dq],
+        \[dq]consecutive\[dq],
+        \[dq]json documents\[dq]
+    ]
+}
+{
+    \[dq]a number\[dq]: 4
+}
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -p=json -o=json -I=2 sample.json
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+{
+  \[dq]this\[dq]: \[dq]is a multidoc json file\[dq]
+}
+{
+  \[dq]it\[dq]: [
+    \[dq]has\[dq],
+    \[dq]consecutive\[dq],
+    \[dq]json documents\[dq]
+  ]
+}
+{
+  \[dq]a number\[dq]: 4
+}
+\f[R]
+.fi
+.SS Update a specific document in a multi-document json
+.PP
+Documents are indexed by the \f[C]documentIndex\f[R] or \f[C]di\f[R]
+operator.
+.PP
+Given a sample.json file of:
+.IP
+.nf
+\f[C]
+{\[dq]this\[dq]: \[dq]is a multidoc json file\[dq]}
+{\[dq]each\[dq]: [\[dq]line is a valid json document\[dq]]}
+{\[dq]a number\[dq]: 4}
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -p=json -o=json -I=0 \[aq](select(di == 1) | .each ) += \[dq]cool\[dq]\[aq] sample.json
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+{\[dq]this\[dq]:\[dq]is a multidoc json file\[dq]}
+{\[dq]each\[dq]:[\[dq]line is a valid json document\[dq],\[dq]cool\[dq]]}
+{\[dq]a number\[dq]:4}
+\f[R]
+.fi
+.SS Find and update a specific document in a multi-document json
+.PP
+Use expressions as you normally would.
+.PP
+Given a sample.json file of:
+.IP
+.nf
+\f[C]
+{\[dq]this\[dq]: \[dq]is a multidoc json file\[dq]}
+{\[dq]each\[dq]: [\[dq]line is a valid json document\[dq]]}
+{\[dq]a number\[dq]: 4}
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -p=json -o=json -I=0 \[aq](select(has(\[dq]each\[dq])) | .each ) += \[dq]cool\[dq]\[aq] sample.json
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+{\[dq]this\[dq]:\[dq]is a multidoc json file\[dq]}
+{\[dq]each\[dq]:[\[dq]line is a valid json document\[dq],\[dq]cool\[dq]]}
+{\[dq]a number\[dq]:4}
+\f[R]
+.fi
+.SS Decode JSON Lines / NDJSON
+.PP
+Given a sample.json file of:
+.IP
+.nf
+\f[C]
+{\[dq]this\[dq]: \[dq]is a multidoc json file\[dq]}
+{\[dq]each\[dq]: [\[dq]line is a valid json document\[dq]]}
+{\[dq]a number\[dq]: 4}
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -p=json sample.json
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+this: is a multidoc json file
+---
+each:
+  - line is a valid json document
+---
+a number: 4
+\f[R]
+.fi
+.SH CSV
+.PP
+Encode/Decode/Roundtrip CSV and TSV files.
+.SS Encode
+.PP
+Currently supports arrays of homogeneous flat objects, that is: no
+nesting and it assumes the \f[I]first\f[R] object has all the keys
+required:
+.IP
+.nf
+\f[C]
+- name: Bobo
+  type: dog
+- name: Fifi
+  type: cat
+\f[R]
+.fi
+.PP
+As well as arrays of arrays of scalars (strings/numbers/booleans):
+.IP
+.nf
+\f[C]
+- [Bobo, dog]
+- [Fifi, cat]
+\f[R]
+.fi
+.SS Decode
+.PP
+Decode assumes the first CSV/TSV row is the header row, and all rows
+beneath are the entries.
+The data will be coded into an array of objects, using the header rows
+as keys.
+.IP
+.nf
+\f[C]
+name,type
+Bobo,dog
+Fifi,cat
+\f[R]
+.fi
+.SS Encode CSV simple
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- [i, like, csv]
+- [because, excel, is, cool]
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=csv sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+i,like,csv
+because,excel,is,cool
+\f[R]
+.fi
+.SS Encode TSV simple
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- [i, like, csv]
+- [because, excel, is, cool]
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=tsv sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+i   like    csv
+because excel   is  cool
+\f[R]
+.fi
+.SS Encode array of objects to csv
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- name: Gary
+  numberOfCats: 1
+  likesApples: true
+  height: 168.8
+- name: Samantha\[aq]s Rabbit
+  numberOfCats: 2
+  likesApples: false
+  height: -188.8
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=csv sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+name,numberOfCats,likesApples,height
+Gary,1,true,168.8
+Samantha\[aq]s Rabbit,2,false,-188.8
+\f[R]
+.fi
+.SS Encode array of objects to custom csv format
+.PP
+Add the header row manually, then the we convert each object into an
+array of values - resulting in an array of arrays.
+Pick the columns and call the header whatever you like.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- name: Gary
+  numberOfCats: 1
+  likesApples: true
+  height: 168.8
+- name: Samantha\[aq]s Rabbit
+  numberOfCats: 2
+  likesApples: false
+  height: -188.8
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=csv \[aq][[\[dq]Name\[dq], \[dq]Number of Cats\[dq]]] +  [.[] | [.name, .numberOfCats ]]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+Name,Number of Cats
+Gary,1
+Samantha\[aq]s Rabbit,2
+\f[R]
+.fi
+.SS Encode array of objects to csv - missing fields behaviour
+.PP
+First entry is used to determine the headers, and it is missing
+`likesApples', so it is not included in the csv.
+Second entry does not have `numberOfCats' so that is blank
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- name: Gary
+  numberOfCats: 1
+  height: 168.8
+- name: Samantha\[aq]s Rabbit
+  height: -188.8
+  likesApples: false
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=csv sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+name,numberOfCats,height
+Gary,1,168.8
+Samantha\[aq]s Rabbit,,-188.8
+\f[R]
+.fi
+.SS Parse CSV into an array of objects
+.PP
+First row is assumed to be the header row.
+By default, entries with YAML/JSON formatting will be parsed!
+.PP
+Given a sample.csv file of:
+.IP
+.nf
+\f[C]
+name,numberOfCats,likesApples,height,facts
+Gary,1,true,168.8,cool: true
+Samantha\[aq]s Rabbit,2,false,-188.8,tall: indeed
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -p=csv sample.csv
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- name: Gary
+  numberOfCats: 1
+  likesApples: true
+  height: 168.8
+  facts:
+    cool: true
+- name: Samantha\[aq]s Rabbit
+  numberOfCats: 2
+  likesApples: false
+  height: -188.8
+  facts:
+    tall: indeed
+\f[R]
+.fi
+.SS Parse CSV into an array of objects, no auto-parsing
+.PP
+First row is assumed to be the header row.
+Entries with YAML/JSON will be left as strings.
+.PP
+Given a sample.csv file of:
+.IP
+.nf
+\f[C]
+name,numberOfCats,likesApples,height,facts
+Gary,1,true,168.8,cool: true
+Samantha\[aq]s Rabbit,2,false,-188.8,tall: indeed
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -p=csv --csv-auto-parse=f sample.csv
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- name: Gary
+  numberOfCats: 1
+  likesApples: true
+  height: 168.8
+  facts: \[aq]cool: true\[aq]
+- name: Samantha\[aq]s Rabbit
+  numberOfCats: 2
+  likesApples: false
+  height: -188.8
+  facts: \[aq]tall: indeed\[aq]
+\f[R]
+.fi
+.SS Parse TSV into an array of objects
+.PP
+First row is assumed to be the header row.
+.PP
+Given a sample.tsv file of:
+.IP
+.nf
+\f[C]
+name    numberOfCats    likesApples height
+Gary    1   true    168.8
+Samantha\[aq]s Rabbit   2   false   -188.8
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -p=tsv sample.tsv
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- name: Gary
+  numberOfCats: 1
+  likesApples: true
+  height: 168.8
+- name: Samantha\[aq]s Rabbit
+  numberOfCats: 2
+  likesApples: false
+  height: -188.8
+\f[R]
+.fi
+.SS Round trip
+.PP
+Given a sample.csv file of:
+.IP
+.nf
+\f[C]
+name,numberOfCats,likesApples,height
+Gary,1,true,168.8
+Samantha\[aq]s Rabbit,2,false,-188.8
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -p=csv -o=csv \[aq](.[] | select(.name == \[dq]Gary\[dq]) | .numberOfCats) = 3\[aq] sample.csv
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+name,numberOfCats,likesApples,height
+Gary,3,true,168.8
+Samantha\[aq]s Rabbit,2,false,-188.8
+\f[R]
+.fi
+.SH Formatting Expressions
+.PP
+\f[C]From version v4.41+\f[R]
+.PP
+You can put expressions into \f[C].yq\f[R] files, use whitespace and
+comments to break up complex expressions and explain what\[cq]s going
+on.
+.SS Using expression files and comments
+.PP
+Note that you can execute the file directly - but make sure you make the
+expression file executable.
+.PP
+Given a sample.yaml file of:
+.IP
+.nf
+\f[C]
+a:
+  b: old
+\f[R]
+.fi
+.PP
+And an `update.yq' expression file of:
+.IP
+.nf
+\f[C]
+#! yq
+
+# This is a yq expression that updates the map
+# for several great reasons outlined here.
+
+\&.a.b = \[dq]new\[dq] # line comment here
+| .a.c = \[dq]frog\[dq]
+
+# Now good things will happen.
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+\&./update.yq sample.yaml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b: new
+  c: frog
+\f[R]
+.fi
+.SS Flags in expression files
+.PP
+You can specify flags on the shebang line, this only works when
+executing the file directly.
+.PP
+Given a sample.yaml file of:
+.IP
+.nf
+\f[C]
+a:
+  b: old
+\f[R]
+.fi
+.PP
+And an `update.yq' expression file of:
+.IP
+.nf
+\f[C]
+#! yq -oj
+
+# This is a yq expression that updates the map
+# for several great reasons outlined here.
+
+\&.a.b = \[dq]new\[dq] # line comment here
+| .a.c = \[dq]frog\[dq]
+
+# Now good things will happen.
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+\&./update.yq sample.yaml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+{
+  \[dq]a\[dq]: {
+    \[dq]b\[dq]: \[dq]new\[dq],
+    \[dq]c\[dq]: \[dq]frog\[dq]
+  }
+}
+\f[R]
+.fi
+.SS Commenting out yq expressions
+.PP
+Note that \f[C]c\f[R] is no longer set to `frog'.
+In this example we\[cq]re calling yq directly and passing the expression
+file into \f[C]--from-file\f[R], this is no different from executing the
+expression file directly.
+.PP
+Given a sample.yaml file of:
+.IP
+.nf
+\f[C]
+a:
+  b: old
+\f[R]
+.fi
+.PP
+And an `update.yq' expression file of:
+.IP
+.nf
+\f[C]
+#! yq
+# This is a yq expression that updates the map
+# for several great reasons outlined here.
+
+\&.a.b = \[dq]new\[dq] # line comment here
+# | .a.c = \[dq]frog\[dq]
+
+# Now good things will happen.
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq --from-file update.yq sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b: new
+\f[R]
+.fi
+.SS Basic input example
+.PP
+Given a sample.lua file of:
+.IP
+.nf
+\f[C]
+return {
+    [\[dq]country\[dq]] = \[dq]Australia\[dq]; -- this place
+    [\[dq]cities\[dq]] = {
+        \[dq]Sydney\[dq],
+        \[dq]Melbourne\[dq],
+        \[dq]Brisbane\[dq],
+        \[dq]Perth\[dq],
+    };
+};
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -oy \[aq].\[aq] sample.lua
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+country: Australia
+cities:
+  - Sydney
+  - Melbourne
+  - Brisbane
+  - Perth
+\f[R]
+.fi
+.SS Basic output example
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+---
+country: Australia # this place
+cities:
+- Sydney
+- Melbourne
+- Brisbane
+- Perth
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=lua \[aq].\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+return {
+    [\[dq]country\[dq]] = \[dq]Australia\[dq]; -- this place
+    [\[dq]cities\[dq]] = {
+        \[dq]Sydney\[dq],
+        \[dq]Melbourne\[dq],
+        \[dq]Brisbane\[dq],
+        \[dq]Perth\[dq],
+    };
+};
+\f[R]
+.fi
+.SS Unquoted keys
+.PP
+Uses the \f[C]--lua-unquoted\f[R] option to produce a nicer-looking
+output.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+---
+country: Australia # this place
+cities:
+- Sydney
+- Melbourne
+- Brisbane
+- Perth
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=lua --lua-unquoted \[aq].\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+return {
+    country = \[dq]Australia\[dq]; -- this place
+    cities = {
+        \[dq]Sydney\[dq],
+        \[dq]Melbourne\[dq],
+        \[dq]Brisbane\[dq],
+        \[dq]Perth\[dq],
+    };
+};
+\f[R]
+.fi
+.SS Globals
+.PP
+Uses the \f[C]--lua-globals\f[R] option to export the values into the
+global scope.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+---
+country: Australia # this place
+cities:
+- Sydney
+- Melbourne
+- Brisbane
+- Perth
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=lua --lua-globals \[aq].\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+country = \[dq]Australia\[dq]; -- this place
+cities = {
+    \[dq]Sydney\[dq],
+    \[dq]Melbourne\[dq],
+    \[dq]Brisbane\[dq],
+    \[dq]Perth\[dq],
+};
+\f[R]
+.fi
+.SS Elaborate example
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+---
+hello: world
+tables:
+  like: this
+  keys: values
+  ? look: non-string keys
+  : True
+numbers:
+  - decimal: 12345
+  - hex: 0x7fabc123
+  - octal: 0o30
+  - float: 123.45
+  - infinity: .inf
+    plus_infinity: +.inf
+    minus_infinity: -.inf
+  - not: .nan
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=lua \[aq].\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+return {
+    [\[dq]hello\[dq]] = \[dq]world\[dq];
+    [\[dq]tables\[dq]] = {
+        [\[dq]like\[dq]] = \[dq]this\[dq];
+        [\[dq]keys\[dq]] = \[dq]values\[dq];
+        [{
+            [\[dq]look\[dq]] = \[dq]non-string keys\[dq];
+        }] = true;
+    };
+    [\[dq]numbers\[dq]] = {
+        {
+            [\[dq]decimal\[dq]] = 12345;
+        },
+        {
+            [\[dq]hex\[dq]] = 0x7fabc123;
+        },
+        {
+            [\[dq]octal\[dq]] = 24;
+        },
+        {
+            [\[dq]float\[dq]] = 123.45;
+        },
+        {
+            [\[dq]infinity\[dq]] = (1/0);
+            [\[dq]plus_infinity\[dq]] = (1/0);
+            [\[dq]minus_infinity\[dq]] = (-1/0);
+        },
+        {
+            [\[dq]not\[dq]] = (0/0);
+        },
+    };
+};
+\f[R]
+.fi
+.SH Properties
+.PP
+Encode/Decode/Roundtrip to/from a property file.
+Line comments on value nodes will be copied across.
+.PP
+By default, empty maps and arrays are not encoded - see below for an
+example on how to encode a value for these.
+.SS Encode properties
+.PP
+Note that empty arrays and maps are not encoded by default.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+# block comments come through
+person: # neither do comments on maps
+    name: Mike Wazowski # comments on values appear
+    pets: 
+    - cat # comments on array values appear
+    - nested:
+        - list entry
+    food: [pizza] # comments on arrays do not
+emptyArray: []
+emptyMap: []
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=props sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+# block comments come through
+# comments on values appear
+person.name = Mike Wazowski
+
+# comments on array values appear
+person.pets.0 = cat
+person.pets.1.nested.0 = list entry
+person.food.0 = pizza
+\f[R]
+.fi
+.SS Encode properties with array brackets
+.PP
+Declare the \[en]properties-array-brackets flag to give array paths in
+brackets (e.g.\ SpringBoot).
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+# block comments come through
+person: # neither do comments on maps
+    name: Mike Wazowski # comments on values appear
+    pets: 
+    - cat # comments on array values appear
+    - nested:
+        - list entry
+    food: [pizza] # comments on arrays do not
+emptyArray: []
+emptyMap: []
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=props --properties-array-brackets sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+# block comments come through
+# comments on values appear
+person.name = Mike Wazowski
+
+# comments on array values appear
+person.pets[0] = cat
+person.pets[1].nested[0] = list entry
+person.food[0] = pizza
+\f[R]
+.fi
+.SS Encode properties - custom separator
+.PP
+Use the \[en]properties-customer-separator flag to specify your own
+key/value separator.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+# block comments come through
+person: # neither do comments on maps
+    name: Mike Wazowski # comments on values appear
+    pets: 
+    - cat # comments on array values appear
+    - nested:
+        - list entry
+    food: [pizza] # comments on arrays do not
+emptyArray: []
+emptyMap: []
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=props --properties-customer-separator=\[dq] :\[at] \[dq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+# block comments come through
+# comments on values appear
+person.name :\[at] Mike Wazowski
+
+# comments on array values appear
+person.pets.0 :\[at] cat
+person.pets.1.nested.0 :\[at] list entry
+person.food.0 :\[at] pizza
+\f[R]
+.fi
+.SS Encode properties: scalar encapsulation
+.PP
+Note that string values with blank characters in them are encapsulated
+with double quotes
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+# block comments come through
+person: # neither do comments on maps
+    name: Mike Wazowski # comments on values appear
+    pets: 
+    - cat # comments on array values appear
+    - nested:
+        - list entry
+    food: [pizza] # comments on arrays do not
+emptyArray: []
+emptyMap: []
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=props --unwrapScalar=false sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+# block comments come through
+# comments on values appear
+person.name = \[dq]Mike Wazowski\[dq]
+
+# comments on array values appear
+person.pets.0 = cat
+person.pets.1.nested.0 = \[dq]list entry\[dq]
+person.food.0 = pizza
+\f[R]
+.fi
+.SS Encode properties: no comments
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+# block comments come through
+person: # neither do comments on maps
+    name: Mike Wazowski # comments on values appear
+    pets: 
+    - cat # comments on array values appear
+    - nested:
+        - list entry
+    food: [pizza] # comments on arrays do not
+emptyArray: []
+emptyMap: []
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=props \[aq]... comments = \[dq]\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+person.name = Mike Wazowski
+person.pets.0 = cat
+person.pets.1.nested.0 = list entry
+person.food.0 = pizza
+\f[R]
+.fi
+.SS Encode properties: include empty maps and arrays
+.PP
+Use a yq expression to set the empty maps and sequences to your desired
+value.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+# block comments come through
+person: # neither do comments on maps
+    name: Mike Wazowski # comments on values appear
+    pets: 
+    - cat # comments on array values appear
+    - nested:
+        - list entry
+    food: [pizza] # comments on arrays do not
+emptyArray: []
+emptyMap: []
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=props \[aq](.. | select( (tag == \[dq]!!map\[dq] or tag ==\[dq]!!seq\[dq]) and length == 0)) = \[dq]\[dq]\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+# block comments come through
+# comments on values appear
+person.name = Mike Wazowski
+
+# comments on array values appear
+person.pets.0 = cat
+person.pets.1.nested.0 = list entry
+person.food.0 = pizza
+emptyArray = 
+emptyMap = 
+\f[R]
+.fi
+.SS Decode properties
+.PP
+Given a sample.properties file of:
+.IP
+.nf
+\f[C]
+# block comments come through
+# comments on values appear
+person.name = Mike Wazowski
+
+# comments on array values appear
+person.pets.0 = cat
+person.pets.1.nested.0 = list entry
+person.food.0 = pizza
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -p=props sample.properties
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+person:
+  # block comments come through
+  # comments on values appear
+  name: Mike Wazowski
+  pets:
+    # comments on array values appear
+    - cat
+    - nested:
+        - list entry
+  food:
+    - pizza
+\f[R]
+.fi
+.SS Decode properties: numbers
+.PP
+All values are assumed to be strings when parsing properties, but you
+can use the \f[C]from_yaml\f[R] operator on all the strings values to
+autoparse into the correct type.
+.PP
+Given a sample.properties file of:
+.IP
+.nf
+\f[C]
+a.b = 10
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -p=props \[aq] (.. | select(tag == \[dq]!!str\[dq])) |= from_yaml\[aq] sample.properties
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+a:
+  b: 10
+\f[R]
+.fi
+.SS Decode properties - array should be a map
+.PP
+If you have a numeric map key in your property files, use array_to_map
+to convert them to maps.
+.PP
+Given a sample.properties file of:
+.IP
+.nf
+\f[C]
+things.10 = mike
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -p=props \[aq].things |= array_to_map\[aq] sample.properties
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+things:
+  10: mike
+\f[R]
+.fi
+.SS Roundtrip
+.PP
+Given a sample.properties file of:
+.IP
+.nf
+\f[C]
+# block comments come through
+# comments on values appear
+person.name = Mike Wazowski
+
+# comments on array values appear
+person.pets.0 = cat
+person.pets.1.nested.0 = list entry
+person.food.0 = pizza
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -p=props -o=props \[aq].person.pets.0 = \[dq]dog\[dq]\[aq] sample.properties
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+# block comments come through
+# comments on values appear
+person.name = Mike Wazowski
+
+# comments on array values appear
+person.pets.0 = dog
+person.pets.1.nested.0 = list entry
+person.food.0 = pizza
+\f[R]
+.fi
+.SH Recipes
+.PP
+These examples are intended to show how you can use multiple operators
+together so you get an idea of how you can perform complex data
+manipulation.
+.PP
+Please see the details operator
+docs (https://mikefarah.gitbook.io/yq/operators) for details on each
+individual operator.
+.SS Find items in an array
+.PP
+We have an array and we want to find the elements with a particular
+name.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- name: Foo
+  numBuckets: 0
+- name: Bar
+  numBuckets: 0
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] | select(.name == \[dq]Foo\[dq])\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+name: Foo
+numBuckets: 0
+\f[R]
+.fi
+.SS Explanation:
+.IP \[bu] 2
+\f[C].[]\f[R] splats the array, and puts all the items in the context.
+.IP \[bu] 2
+These items are then piped (\f[C]|\f[R]) into
+\f[C]select(.name == \[dq]Foo\[dq])\f[R] which will select all the nodes
+that have a name property set to `Foo'.
+.IP \[bu] 2
+See the select (https://mikefarah.gitbook.io/yq/operators/select)
+operator for more information.
+.SS Find and update items in an array
+.PP
+We have an array and we want to \f[I]update\f[R] the elements with a
+particular name.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- name: Foo
+  numBuckets: 0
+- name: Bar
+  numBuckets: 0
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq](.[] | select(.name == \[dq]Foo\[dq]) | .numBuckets) |= . + 1\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- name: Foo
+  numBuckets: 1
+- name: Bar
+  numBuckets: 0
+\f[R]
+.fi
+.SS Explanation:
+.IP \[bu] 2
+Following from the example above\f[C].[]\f[R] splats the array, selects
+filters the items.
+.IP \[bu] 2
+We then pipe (\f[C]|\f[R]) that into \f[C].numBuckets\f[R], which will
+select that field from all the matching items
+.IP \[bu] 2
+Splat, select and the field are all in brackets, that whole expression
+is passed to the \f[C]|=\f[R] operator as the left hand side expression,
+with \f[C]. + 1\f[R] as the right hand side expression.
+.IP \[bu] 2
+\f[C]|=\f[R] is the operator that updates fields relative to their own
+value, which is referenced as dot (\f[C].\f[R]).
+.IP \[bu] 2
+The expression \f[C]. + 1\f[R] increments the numBuckets counter.
+.IP \[bu] 2
+See the assign (https://mikefarah.gitbook.io/yq/operators/assign-update)
+and add (https://mikefarah.gitbook.io/yq/operators/add) operators for
+more information.
+.SS Deeply prune a tree
+.PP
+Say we are only interested in child1 and child2, and want to filter
+everything else out.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+parentA:
+  - bob
+parentB:
+  child1: i am child1
+  child3: hiya
+parentC:
+  childX: cool
+  child2: me child2
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq](
+  .. | # recurse through all the nodes
+  select(has(\[dq]child1\[dq]) or has(\[dq]child2\[dq])) | # match parents that have either child1 or child2
+  (.child1, .child2) | # select those children
+  select(.) # filter out nulls
+) as $i ireduce({};  # using that set of nodes, create a new result map
+  setpath($i | path; $i) # and put in each node, using its original path
+)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+parentB:
+  child1: i am child1
+parentC:
+  child2: me child2
+\f[R]
+.fi
+.SS Explanation:
+.IP \[bu] 2
+Find all the matching child1 and child2 nodes
+.IP \[bu] 2
+Using ireduce, create a new map using just those nodes
+.IP \[bu] 2
+Set each node into the new map using its original path
+.SS Multiple or complex updates to items in an array
+.PP
+We have an array and we want to \f[I]update\f[R] the elements with a
+particular name in reference to its type.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+myArray:
+  - name: Foo
+    type: cat
+  - name: Bar
+    type: dog
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq]with(.myArray[]; .name = .name + \[dq] - \[dq] + .type)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+myArray:
+  - name: Foo - cat
+    type: cat
+  - name: Bar - dog
+    type: dog
+\f[R]
+.fi
+.SS Explanation:
+.IP \[bu] 2
+The with operator will effectively loop through each given item in the
+first given expression, and run the second expression against it.
+.IP \[bu] 2
+\f[C].myArray[]\f[R] splats the array in \f[C]myArray\f[R].
+So \f[C]with\f[R] will run against each item in that array
+.IP \[bu] 2
+\f[C].name = .name + \[dq] - \[dq] + .type\f[R] this expression is run
+against every item, updating the name to be a concatenation of the
+original name as well as the type.
+.IP \[bu] 2
+See the with (https://mikefarah.gitbook.io/yq/operators/with) operator
+for more information and examples.
+.SS Sort an array by a field
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+myArray:
+  - name: Foo
+    numBuckets: 1
+  - name: Bar
+    numBuckets: 0
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].myArray |= sort_by(.numBuckets)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+myArray:
+  - name: Bar
+    numBuckets: 0
+  - name: Foo
+    numBuckets: 1
+\f[R]
+.fi
+.SS Explanation:
+.IP \[bu] 2
+We want to resort \f[C].myArray\f[R].
+.IP \[bu] 2
+\f[C]sort_by\f[R] works by piping an array into it, and it pipes out a
+sorted array.
+.IP \[bu] 2
+So, we use \f[C]|=\f[R] to update \f[C].myArray\f[R].
+This is the same as doing
+\f[C].myArray = (.myArray | sort_by(.numBuckets))\f[R]
+.SS Filter, flatten, sort and unique
+.PP
+Lets find the unique set of names from the document.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+- type: foo
+  names:
+    - Fred
+    - Catherine
+- type: bar
+  names:
+    - Zelda
+- type: foo
+  names: Fred
+- type: foo
+  names: Ava
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq][.[] | select(.type == \[dq]foo\[dq]) | .names] | flatten | sort | unique\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- Ava
+- Catherine
+- Fred
+\f[R]
+.fi
+.SS Explanation:
+.IP \[bu] 2
+\f[C].[] | select(.type == \[dq]foo\[dq]) | .names\f[R] will select the
+array elements of type \[lq]foo\[rq]
+.IP \[bu] 2
+Splat \f[C].[]\f[R] will unwrap the array and match all the items.
+We need to do this so we can work on the child items, for instance,
+filter items out using the \f[C]select\f[R] operator.
+.IP \[bu] 2
+But we still want the final results back into an array.
+So after we\[cq]re doing working on the children, we wrap everything
+back into an array using square brackets around the expression.
+\f[C][.[] | select(.type == \[dq]foo\[dq]) | .names]\f[R]
+.IP \[bu] 2
+Now have have an array of all the `names' values.
+Which includes arrays of strings as well as strings on their own.
+.IP \[bu] 2
+Pipe \f[C]|\f[R] this array through \f[C]flatten\f[R].
+This will flatten nested arrays.
+So now we have a flat list of all the name value strings
+.IP \[bu] 2
+Next we pipe \f[C]|\f[R] that through \f[C]sort\f[R] and then
+\f[C]unique\f[R] to get a sorted, unique list of the names!
+.IP \[bu] 2
+See the flatten (https://mikefarah.gitbook.io/yq/operators/flatten),
+sort (https://mikefarah.gitbook.io/yq/operators/sort) and
+unique (https://mikefarah.gitbook.io/yq/operators/unique) for more
+information and examples.
+.SS Export as environment variables (script), or any custom format
+.PP
+Given a yaml document, lets output a script that will configure
+environment variables with that data.
+This same approach can be used for exporting into custom formats.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+var0: string0
+var1: string1
+fruit:
+  - apple
+  - banana
+  - peach
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].[] |(
+    ( select(kind == \[dq]scalar\[dq]) | key + \[dq]=\[aq]\[rs]\[aq]\[aq]\[dq] + . + \[dq]\[aq]\[rs]\[aq]\[aq]\[dq]),
+    ( select(kind == \[dq]seq\[dq]) | key + \[dq]=(\[dq] + (map(\[dq]\[aq]\[rs]\[aq]\[aq]\[dq] + . + \[dq]\[aq]\[rs]\[aq]\[aq]\[dq]) | join(\[dq],\[dq])) + \[dq])\[dq])
+)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+var0=\[aq]string0\[aq]
+var1=\[aq]string1\[aq]
+fruit=(\[aq]apple\[aq],\[aq]banana\[aq],\[aq]peach\[aq])
+\f[R]
+.fi
+.SS Explanation:
+.IP \[bu] 2
+\f[C].[]\f[R] matches all top level elements
+.IP \[bu] 2
+We need a string expression for each of the different types that will
+produce the bash syntax, we\[cq]ll use the union operator, to join them
+together
+.IP \[bu] 2
+Scalars, we just need the key and quoted value:
+\f[C]( select(kind == \[dq]scalar\[dq]) | key + \[dq]=\[aq]\[dq] + . + \[dq]\[aq]\[dq])\f[R]
+.IP \[bu] 2
+Sequences (or arrays) are trickier, we need to quote each value and
+\f[C]join\f[R] them with \f[C],\f[R]:
+\f[C]map(\[dq]\[aq]\[dq] + . + \[dq]\[aq]\[dq]) | join(\[dq],\[dq])\f[R]
+.SS Custom format with nested data
+.PP
+Like the previous example, but lets handle nested data structures.
+In this custom example, we\[cq]re going to join the property paths with
+_.
+The important thing to keep in mind is that our expression is not
+recursive (despite the data structure being so).
+Instead we match \f[I]all\f[R] elements on the tree and operate on them.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+simple: string0
+simpleArray:
+  - apple
+  - banana
+  - peach
+deep:
+  property: value
+  array:
+    - cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].. |(
+    ( select(kind == \[dq]scalar\[dq] and parent | kind != \[dq]seq\[dq]) | (path | join(\[dq]_\[dq])) + \[dq]=\[aq]\[rs]\[aq]\[aq]\[dq] + . + \[dq]\[aq]\[rs]\[aq]\[aq]\[dq]),
+    ( select(kind == \[dq]seq\[dq]) | (path | join(\[dq]_\[dq])) + \[dq]=(\[dq] + (map(\[dq]\[aq]\[rs]\[aq]\[aq]\[dq] + . + \[dq]\[aq]\[rs]\[aq]\[aq]\[dq]) | join(\[dq],\[dq])) + \[dq])\[dq])
+)\[aq] sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+simple=\[aq]string0\[aq]
+deep_property=\[aq]value\[aq]
+simpleArray=(\[aq]apple\[aq],\[aq]banana\[aq],\[aq]peach\[aq])
+deep_array=(\[aq]cat\[aq])
+\f[R]
+.fi
+.SS Explanation:
+.IP \[bu] 2
+You\[cq]ll need to understand how the previous example works to
+understand this extension.
+.IP \[bu] 2
+\f[C]..\f[R] matches \f[I]all\f[R] elements, instead of \f[C].[]\f[R]
+from the previous example that just matches top level elements.
+.IP \[bu] 2
+Like before, we need a string expression for each of the different types
+that will produce the bash syntax, we\[cq]ll use the union operator, to
+join them together
+.IP \[bu] 2
+This time, however, our expression matches every node in the data
+structure.
+.IP \[bu] 2
+We only want to print scalars that are not in arrays (because we handle
+the separately), so well add
+\f[C]and parent | kind != \[dq]seq\[dq]\f[R] to the select operator
+expression for scalars
+.IP \[bu] 2
+We don\[cq]t just want the key any more, we want the full path.
+So instead of \f[C]key\f[R] we have \f[C]path | join(\[dq]_\[dq])\f[R]
+.IP \[bu] 2
+The expression for sequences follows the same logic
+.SS Encode shell variables
+.PP
+Note that comments are dropped and values will be enclosed in single
+quotes as needed.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+# comment
+name: Mike Wazowski
+eyes:
+  color: turquoise
+  number: 1
+friends:
+  - James P. Sullivan
+  - Celia Mae
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=shell sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+name=\[aq]Mike Wazowski\[aq]
+eyes_color=turquoise
+eyes_number=1
+friends_0=\[aq]James P. Sullivan\[aq]
+friends_1=\[aq]Celia Mae\[aq]
+\f[R]
+.fi
+.SS Encode shell variables: illegal variable names as key.
+.PP
+Keys that would be illegal as variable keys are adapted.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+ascii_=_symbols: replaced with _
+\[dq]ascii_ _controls\[dq]: dropped (this example uses \[rs]t)
+nonascii_\[u05D0]_characters: dropped
+effort_expe\[~n]ded_t\[`o]_preserve_accented_latin_letters: moderate (via unicode NFKD)
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=shell sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+ascii___symbols=\[aq]replaced with _\[aq]
+ascii__controls=\[aq]dropped (this example uses \[rs]t)\[aq]
+nonascii__characters=dropped
+effort_expended_to_preserve_accented_latin_letters=\[aq]moderate (via unicode NFKD)\[aq]
+\f[R]
+.fi
+.SS Encode shell variables: empty values, arrays and maps
+.PP
+Empty values are encoded to empty variables, but empty arrays and maps
+are skipped.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+empty:
+  value:
+  array: []
+  map:   {}
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=shell sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+empty_value=
+\f[R]
+.fi
+.SS Encode shell variables: single quotes in values
+.PP
+Single quotes in values are encoded as `\[lq]'\[lq]\[cq] (close single
+quote, double-quoted single quote, open single quote).
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+name: Miles O\[aq]Brien
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=shell sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+name=\[aq]Miles O\[aq]\[dq]\[aq]\[dq]\[aq]Brien\[aq]
+\f[R]
+.fi
+.SH TOML
+.PP
+Decode from TOML.
+Note that \f[C]yq\f[R] does not yet support outputting in TOML format
+(and therefore it cannot roundtrip)
+.SS Parse: Simple
+.PP
+Given a sample.toml file of:
+.IP
+.nf
+\f[C]
+A = \[dq]hello\[dq]
+B = 12
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -oy \[aq].\[aq] sample.toml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+A: hello
+B: 12
+\f[R]
+.fi
+.SS Parse: Deep paths
+.PP
+Given a sample.toml file of:
+.IP
+.nf
+\f[C]
+person.name = \[dq]hello\[dq]
+person.address = \[dq]12 cat st\[dq]
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -oy \[aq].\[aq] sample.toml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+person:
+  name: hello
+  address: 12 cat st
+\f[R]
+.fi
+.SS Encode: Scalar
+.PP
+Given a sample.toml file of:
+.IP
+.nf
+\f[C]
+person.name = \[dq]hello\[dq]
+person.address = \[dq]12 cat st\[dq]
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].person.name\[aq] sample.toml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+hello
+\f[R]
+.fi
+.SS Parse: inline table
+.PP
+Given a sample.toml file of:
+.IP
+.nf
+\f[C]
+name = { first = \[dq]Tom\[dq], last = \[dq]Preston-Werner\[dq] }
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -oy \[aq].\[aq] sample.toml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+name:
+  first: Tom
+  last: Preston-Werner
+\f[R]
+.fi
+.SS Parse: Array Table
+.PP
+Given a sample.toml file of:
+.IP
+.nf
+\f[C]
+[owner.contact]
+name = \[dq]Tom Preston-Werner\[dq]
+age = 36
+
+[[owner.addresses]]
+street = \[dq]first street\[dq]
+suburb = \[dq]ok\[dq]
+
+[[owner.addresses]]
+street = \[dq]second street\[dq]
+suburb = \[dq]nice\[dq]
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -oy \[aq].\[aq] sample.toml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+owner:
+  contact:
+    name: Tom Preston-Werner
+    age: 36
+  addresses:
+    - street: first street
+      suburb: ok
+    - street: second street
+      suburb: nice
+\f[R]
+.fi
+.SS Parse: Empty Table
+.PP
+Given a sample.toml file of:
+.IP
+.nf
+\f[C]
+[dependencies]
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -oy \[aq].\[aq] sample.toml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+dependencies: {}
+\f[R]
+.fi
+.SH XML
+.PP
+Encode and decode to and from XML.
+Whitespace is not conserved for round trips - but the order of the
+fields are.
+.PP
+Consecutive xml nodes with the same name are assumed to be arrays.
+.PP
+XML content data, attributes processing instructions and directives are
+all created as plain fields.
+.PP
+This can be controlled by:
+.PP
+.TS
+tab(@);
+l l l.
+T{
+Flag
+T}@T{
+Default
+T}@T{
+Sample XML
+T}
+_
+T{
+\f[C]--xml-attribute-prefix\f[R]
+T}@T{
+\f[C]+\f[R] (changing to \f[C]+\[at]\f[R] soon)
+T}@T{
+Legs in \f[C]<cat legs=\[dq]4\[dq]/>\f[R]
+T}
+T{
+\f[C]--xml-content-name\f[R]
+T}@T{
+\f[C]+content\f[R]
+T}@T{
+Meow in \f[C]<cat>Meow <fur>true</true></cat>\f[R]
+T}
+T{
+\f[C]--xml-directive-name\f[R]
+T}@T{
+\f[C]+directive\f[R]
+T}@T{
+\f[C]<!DOCTYPE config system \[dq]blah\[dq]>\f[R]
+T}
+T{
+\f[C]--xml-proc-inst-prefix\f[R]
+T}@T{
+\f[C]+p_\f[R]
+T}@T{
+\f[C]<?xml version=\[dq]1\[dq]?>\f[R]
+T}
+.TE
+.PP
+{% hint style=\[lq]warning\[rq] %} Default Attribute Prefix will be
+changing in v4.30! In order to avoid name conflicts (e.g.\ having an
+attribute named \[lq]content\[rq] will create a field that clashes with
+the default content name of \[lq]+content\[rq]) the attribute prefix
+will be changing to \[lq]+\[at]\[rq].
+.PP
+This will affect users that have not set their own prefix and are not
+roundtripping XML changes.
+.PP
+{% endhint %}
+.SS Encoder / Decoder flag options
+.PP
+In addition to the above flags, there are the following xml
+encoder/decoder options controlled by flags:
+.PP
+.TS
+tab(@);
+lw(23.3n) lw(23.3n) lw(23.3n).
+T{
+Flag
+T}@T{
+Default
+T}@T{
+Description
+T}
+_
+T{
+\f[C]--xml-strict-mode\f[R]
+T}@T{
+false
+T}@T{
+Strict mode enforces the requirements of the XML specification.
+When switched off the parser allows input containing common mistakes.
+See the Golang xml decoder (https://pkg.go.dev/encoding/xml#Decoder) for
+more details.
+T}
+T{
+\f[C]--xml-keep-namespace\f[R]
+T}@T{
+true
+T}@T{
+Keeps the namespace of attributes
+T}
+T{
+\f[C]--xml-raw-token\f[R]
+T}@T{
+true
+T}@T{
+Does not verify that start and end elements match and does not translate
+name space prefixes to their corresponding URLs.
+T}
+T{
+\f[C]--xml-skip-proc-inst\f[R]
+T}@T{
+false
+T}@T{
+Skips over processing instructions,
+e.g.\ \f[C]<?xml version=\[dq]1\[dq]?>\f[R]
+T}
+T{
+\f[C]--xml-skip-directives\f[R]
+T}@T{
+false
+T}@T{
+Skips over directives,
+e.g.\ \f[C]<!DOCTYPE config system \[dq]blah\[dq]>\f[R]
+T}
+.TE
+.PP
+See below for examples
+.SS Parse xml: simple
+.PP
+Notice how all the values are strings, see the next example on how you
+can fix that.
+.PP
+Given a sample.xml file of:
+.IP
+.nf
+\f[C]
+<?xml version=\[dq]1.0\[dq] encoding=\[dq]UTF-8\[dq]?>
+<cat>
+  <says>meow</says>
+  <legs>4</legs>
+  <cute>true</cute>
+</cat>
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -oy \[aq].\[aq] sample.xml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
++p_xml: version=\[dq]1.0\[dq] encoding=\[dq]UTF-8\[dq]
+cat:
+  says: meow
+  legs: \[dq]4\[dq]
+  cute: \[dq]true\[dq]
+\f[R]
+.fi
+.SS Parse xml: number
+.PP
+All values are assumed to be strings when parsing XML, but you can use
+the \f[C]from_yaml\f[R] operator on all the strings values to autoparse
+into the correct type.
+.PP
+Given a sample.xml file of:
+.IP
+.nf
+\f[C]
+<?xml version=\[dq]1.0\[dq] encoding=\[dq]UTF-8\[dq]?>
+<cat>
+  <says>meow</says>
+  <legs>4</legs>
+  <cute>true</cute>
+</cat>
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -oy \[aq] (.. | select(tag == \[dq]!!str\[dq])) |= from_yaml\[aq] sample.xml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
++p_xml: version=\[dq]1.0\[dq] encoding=\[dq]UTF-8\[dq]
+cat:
+  says: meow
+  legs: 4
+  cute: true
+\f[R]
+.fi
+.SS Parse xml: array
+.PP
+Consecutive nodes with identical xml names are assumed to be arrays.
+.PP
+Given a sample.xml file of:
+.IP
+.nf
+\f[C]
+<?xml version=\[dq]1.0\[dq] encoding=\[dq]UTF-8\[dq]?>
+<animal>cat</animal>
+<animal>goat</animal>
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -oy \[aq].\[aq] sample.xml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
++p_xml: version=\[dq]1.0\[dq] encoding=\[dq]UTF-8\[dq]
+animal:
+  - cat
+  - goat
+\f[R]
+.fi
+.SS Parse xml: force as an array
+.PP
+In XML, if your array has a single item, then yq doesn\[cq]t know its an
+array.
+This is how you can consistently force it to be an array.
+This handles the 3 scenarios of having nothing in the array, having a
+single item and having multiple.
+.PP
+Given a sample.xml file of:
+.IP
+.nf
+\f[C]
+<zoo><animal>cat</animal></zoo>
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -oy \[aq].zoo.animal |= ([] + .)\[aq] sample.xml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+zoo:
+  animal:
+    - cat
+\f[R]
+.fi
+.SS Parse xml: force all as an array
+.PP
+Given a sample.xml file of:
+.IP
+.nf
+\f[C]
+<zoo><thing><frog>boing</frog></thing></zoo>
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -oy \[aq].. |= [] + .\[aq] sample.xml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+- zoo:
+    - thing:
+        - frog:
+            - boing
+\f[R]
+.fi
+.SS Parse xml: attributes
+.PP
+Attributes are converted to fields, with the default attribute prefix
+`+'.
+Use \[cq]\[en]xml-attribute-prefix\[ga] to set your own.
+.PP
+Given a sample.xml file of:
+.IP
+.nf
+\f[C]
+<?xml version=\[dq]1.0\[dq] encoding=\[dq]UTF-8\[dq]?>
+<cat legs=\[dq]4\[dq]>
+  <legs>7</legs>
+</cat>
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -oy \[aq].\[aq] sample.xml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
++p_xml: version=\[dq]1.0\[dq] encoding=\[dq]UTF-8\[dq]
+cat:
+  +\[at]legs: \[dq]4\[dq]
+  legs: \[dq]7\[dq]
+\f[R]
+.fi
+.SS Parse xml: attributes with content
+.PP
+Content is added as a field, using the default content name of
+\f[C]+content\f[R].
+Use \f[C]--xml-content-name\f[R] to set your own.
+.PP
+Given a sample.xml file of:
+.IP
+.nf
+\f[C]
+<?xml version=\[dq]1.0\[dq] encoding=\[dq]UTF-8\[dq]?>
+<cat legs=\[dq]4\[dq]>meow</cat>
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -oy \[aq].\[aq] sample.xml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
++p_xml: version=\[dq]1.0\[dq] encoding=\[dq]UTF-8\[dq]
+cat:
+  +content: meow
+  +\[at]legs: \[dq]4\[dq]
+\f[R]
+.fi
+.SS Parse xml: content split between comments/children
+.PP
+Multiple content texts are collected into a sequence.
+.PP
+Given a sample.xml file of:
+.IP
+.nf
+\f[C]
+<root>  value  <!-- comment-->anotherValue <a>frog</a> cool!</root>
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -oy \[aq].\[aq] sample.xml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+root:
+  +content: # comment
+    - value
+    - anotherValue
+    - cool!
+  a: frog
+\f[R]
+.fi
+.SS Parse xml: custom dtd
+.PP
+DTD entities are processed as directives.
+.PP
+Given a sample.xml file of:
+.IP
+.nf
+\f[C]
+<?xml version=\[dq]1.0\[dq]?>
+<!DOCTYPE root [
+<!ENTITY writer \[dq]Blah.\[dq]>
+<!ENTITY copyright \[dq]Blah\[dq]>
+]>
+<root>
+    <item>&writer;&copyright;</item>
+</root>
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].\[aq] sample.xml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+<?xml version=\[dq]1.0\[dq]?>
+<!DOCTYPE root [
+<!ENTITY writer \[dq]Blah.\[dq]>
+<!ENTITY copyright \[dq]Blah\[dq]>
+]>
+<root>
+  <item>&amp;writer;&amp;copyright;</item>
+</root>
+\f[R]
+.fi
+.SS Parse xml: skip custom dtd
+.PP
+DTDs are directives, skip over directives to skip DTDs.
+.PP
+Given a sample.xml file of:
+.IP
+.nf
+\f[C]
+<?xml version=\[dq]1.0\[dq]?>
+<!DOCTYPE root [
+<!ENTITY writer \[dq]Blah.\[dq]>
+<!ENTITY copyright \[dq]Blah\[dq]>
+]>
+<root>
+    <item>&writer;&copyright;</item>
+</root>
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq --xml-skip-directives \[aq].\[aq] sample.xml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+<?xml version=\[dq]1.0\[dq]?>
+<root>
+  <item>&amp;writer;&amp;copyright;</item>
+</root>
+\f[R]
+.fi
+.SS Parse xml: with comments
+.PP
+A best attempt is made to preserve comments.
+.PP
+Given a sample.xml file of:
+.IP
+.nf
+\f[C]
+<!-- before cat -->
+<cat>
+    <!-- in cat before -->
+    <x>3<!-- multi
+line comment 
+for x --></x>
+    <!-- before y -->
+    <y>
+        <!-- in y before -->
+        <d><!-- in d before -->z<!-- in d after --></d>
+        
+        <!-- in y after -->
+    </y>
+    <!-- in_cat_after -->
+</cat>
+<!-- after cat -->
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -oy \[aq].\[aq] sample.xml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+# before cat
+cat:
+  # in cat before
+  x: \[dq]3\[dq] # multi
+  # line comment 
+  # for x
+  # before y
+
+  y:
+    # in y before
+    # in d before
+    d: z # in d after
+    # in y after
+  # in_cat_after
+# after cat
+\f[R]
+.fi
+.SS Parse xml: keep attribute namespace
+.PP
+Defaults to true
+.PP
+Given a sample.xml file of:
+.IP
+.nf
+\f[C]
+<?xml version=\[dq]1.0\[dq]?>
+<map xmlns=\[dq]some-namespace\[dq] xmlns:xsi=\[dq]some-instance\[dq] xsi:schemaLocation=\[dq]some-url\[dq]></map>
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq --xml-keep-namespace=false \[aq].\[aq] sample.xml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+<?xml version=\[dq]1.0\[dq]?>
+<map xmlns=\[dq]some-namespace\[dq] xsi=\[dq]some-instance\[dq] schemaLocation=\[dq]some-url\[dq]></map>
+\f[R]
+.fi
+.PP
+instead of
+.IP
+.nf
+\f[C]
+<?xml version=\[dq]1.0\[dq]?>
+<map xmlns=\[dq]some-namespace\[dq] xmlns:xsi=\[dq]some-instance\[dq] xsi:schemaLocation=\[dq]some-url\[dq]></map>
+\f[R]
+.fi
+.SS Parse xml: keep raw attribute namespace
+.PP
+Defaults to true
+.PP
+Given a sample.xml file of:
+.IP
+.nf
+\f[C]
+<?xml version=\[dq]1.0\[dq]?>
+<map xmlns=\[dq]some-namespace\[dq] xmlns:xsi=\[dq]some-instance\[dq] xsi:schemaLocation=\[dq]some-url\[dq]></map>
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq --xml-raw-token=false \[aq].\[aq] sample.xml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+<?xml version=\[dq]1.0\[dq]?>
+<map xmlns=\[dq]some-namespace\[dq] xmlns:xsi=\[dq]some-instance\[dq] some-instance:schemaLocation=\[dq]some-url\[dq]></map>
+\f[R]
+.fi
+.PP
+instead of
+.IP
+.nf
+\f[C]
+<?xml version=\[dq]1.0\[dq]?>
+<map xmlns=\[dq]some-namespace\[dq] xmlns:xsi=\[dq]some-instance\[dq] xsi:schemaLocation=\[dq]some-url\[dq]></map>
+\f[R]
+.fi
+.SS Encode xml: simple
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+cat: purrs
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=xml sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+<cat>purrs</cat>
+\f[R]
+.fi
+.SS Encode xml: array
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+pets:
+  cat:
+    - purrs
+    - meows
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=xml sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+<pets>
+  <cat>purrs</cat>
+  <cat>meows</cat>
+</pets>
+\f[R]
+.fi
+.SS Encode xml: attributes
+.PP
+Fields with the matching xml-attribute-prefix are assumed to be
+attributes.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+cat:
+  +\[at]name: tiger
+  meows: true
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=xml sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+<cat name=\[dq]tiger\[dq]>
+  <meows>true</meows>
+</cat>
+\f[R]
+.fi
+.SS Encode xml: attributes with content
+.PP
+Fields with the matching xml-content-name is assumed to be content.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+cat:
+  +\[at]name: tiger
+  +content: cool
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=xml sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+<cat name=\[dq]tiger\[dq]>cool</cat>
+\f[R]
+.fi
+.SS Encode xml: comments
+.PP
+A best attempt is made to copy comments to xml.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
+#
+# header comment
+# above_cat
+#
+cat: # inline_cat
+  # above_array
+  array: # inline_array
+    - val1 # inline_val1
+    # above_val2
+    - val2 # inline_val2
+# below_cat
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=xml sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+<!--
+header comment
+above_cat
+-->
+<!-- inline_cat -->
+<cat><!-- above_array inline_array -->
+  <array>val1<!-- inline_val1 --></array>
+  <array><!-- above_val2 -->val2<!-- inline_val2 --></array>
+</cat><!-- below_cat -->
+\f[R]
+.fi
+.SS Encode: doctype and xml declaration
+.PP
+Use the special xml names to add/modify proc instructions and
+directives.
+.PP
+Given a sample.yml file of:
+.IP
+.nf
+\f[C]
++p_xml: version=\[dq]1.0\[dq]
++directive: \[aq]DOCTYPE config SYSTEM \[dq]/etc/iwatch/iwatch.dtd\[dq] \[aq]
+apple:
+  +p_coolioo: version=\[dq]1.0\[dq]
+  +directive: \[aq]CATYPE meow purr puss \[aq]
+  b: things
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq -o=xml sample.yml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+<?xml version=\[dq]1.0\[dq]?>
+<!DOCTYPE config SYSTEM \[dq]/etc/iwatch/iwatch.dtd\[dq] >
+<apple><?coolioo version=\[dq]1.0\[dq]?><!CATYPE meow purr puss >
+  <b>things</b>
+</apple>
+\f[R]
+.fi
+.SS Round trip: with comments
+.PP
+A best effort is made, but comment positions and white space are not
+preserved perfectly.
+.PP
+Given a sample.xml file of:
+.IP
+.nf
+\f[C]
+<!-- before cat -->
+<cat>
+    <!-- in cat before -->
+    <x>3<!-- multi
+line comment 
+for x --></x>
+    <!-- before y -->
+    <y>
+        <!-- in y before -->
+        <d><!-- in d before -->z<!-- in d after --></d>
+        
+        <!-- in y after -->
+    </y>
+    <!-- in_cat_after -->
+</cat>
+<!-- after cat -->
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].\[aq] sample.xml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+<!-- before cat -->
+<cat><!-- in cat before -->
+  <x>3<!-- multi
+line comment 
+for x --></x><!-- before y -->
+  <y><!-- in y before
+in d before -->
+    <d>z<!-- in d after --></d><!-- in y after -->
+  </y><!-- in_cat_after -->
+</cat><!-- after cat -->
+\f[R]
+.fi
+.SS Roundtrip: with doctype and declaration
+.PP
+yq parses XML proc instructions and directives into nodes.
+Unfortunately the underlying XML parser loses whitespace information.
+.PP
+Given a sample.xml file of:
+.IP
+.nf
+\f[C]
+<?xml version=\[dq]1.0\[dq]?>
+<!DOCTYPE config SYSTEM \[dq]/etc/iwatch/iwatch.dtd\[dq] >
+<apple>
+  <?coolioo version=\[dq]1.0\[dq]?>
+  <!CATYPE meow purr puss >
+  <b>things</b>
+</apple>
+\f[R]
+.fi
+.PP
+then
+.IP
+.nf
+\f[C]
+yq \[aq].\[aq] sample.xml
+\f[R]
+.fi
+.PP
+will output
+.IP
+.nf
+\f[C]
+<?xml version=\[dq]1.0\[dq]?>
+<!DOCTYPE config SYSTEM \[dq]/etc/iwatch/iwatch.dtd\[dq] >
+<apple><?coolioo version=\[dq]1.0\[dq]?><!CATYPE meow purr puss >
+  <b>things</b>
+</apple>
+\f[R]
+.fi
+.SH AUTHORS
+Mike Farah.