diff --git a/.docker/docker-compose.yml b/.docker/docker-compose.yml index 2e2a41a065da226ac4dc9a3076d69be04d6f0d21..137e3776fcca7a618a7d3473c032b8fee5622ba0 100644 --- a/.docker/docker-compose.yml +++ b/.docker/docker-compose.yml @@ -8,6 +8,7 @@ volumes: upload-service-data: search-db-data: storage-service-data: + identity-service-data: services: dbrepo-metadata-db: @@ -66,7 +67,7 @@ services: MARIADB_ROOT_PASSWORD: "${AUTH_PASSWORD:-dbrepo}" healthcheck: test: mysqladmin ping --user="${AUTH_USERNAME:-root}" --password="${AUTH_PASSWORD:-dbrepo}" --silent - interval: 10s + interval: 15s timeout: 5s retries: 12 logging: @@ -76,19 +77,21 @@ services: restart: "no" container_name: dbrepo-auth-service hostname: auth-service - image: registry.datalab.tuwien.ac.at/dbrepo/auth-service:1.4.4 + image: registry.datalab.tuwien.ac.at/dbrepo/auth-service:1.4.5 healthcheck: test: curl -sSL 'http://0.0.0.0:8080/realms/dbrepo' | grep "dbrepo" || exit 1 - interval: 10s + interval: 15s timeout: 5s retries: 12 environment: AUTH_DB: "${AUTH_DB:-keycloak}" KC_DB_USERNAME: "${AUTH_USERNAME:-root}" KC_DB_PASSWORD: "${AUTH_PASSWORD:-dbrepo}" - KEYCLOAK_ADMIN: "${KEYCLOAK_ADMIN:-fda}" - KEYCLOAK_ADMIN_PASSWORD: "${KEYCLOAK_ADMIN_PASSWORD:-fda}" + KEYCLOAK_ADMIN: "${KEYCLOAK_ADMIN:-admin}" + KEYCLOAK_ADMIN_PASSWORD: "${KEYCLOAK_ADMIN_PASSWORD:-admin}" depends_on: + dbrepo-identity-service: + condition: service_healthy dbrepo-auth-db: condition: service_healthy logging: @@ -98,16 +101,14 @@ services: restart: "no" container_name: dbrepo-metadata-service hostname: metadata-service - image: registry.datalab.tuwien.ac.at/dbrepo/metadata-service:1.4.4 + image: registry.datalab.tuwien.ac.at/dbrepo/metadata-service:1.4.5 volumes: - "${SHARED_VOLUME:-/tmp}:/tmp" environment: ADMIN_EMAIL: "${ADMIN_EMAIL:-noreply@localhost}" - ADMIN_PASSWORD: "${ADMIN_PASSWORD:-admin}" - ADMIN_USERNAME: "${ADMIN_USERNAME:-admin}" ANALYSE_SERVICE_ENDPOINT: "${ANALYSE_SERVICE_ENDPOINT:-http://gateway-service}" - AUTH_SERVICE_ADMIN: ${AUTH_SERVICE_ADMIN:-fda} - AUTH_SERVICE_ADMIN_PASSWORD: ${AUTH_SERVICE_ADMIN_PASSWORD:-fda} + AUTH_SERVICE_ADMIN: ${AUTH_SERVICE_ADMIN:-admin} + AUTH_SERVICE_ADMIN_PASSWORD: ${AUTH_SERVICE_ADMIN_PASSWORD:-admin} AUTH_SERVICE_CLIENT: ${AUTH_SERVICE_CLIENT:-dbrepo-client} AUTH_SERVICE_CLIENT_SECRET: ${AUTH_SERVICE_CLIENT:-MUwRc7yfXSJwX8AdRMWaQC3Nep1VjwgG} AUTH_SERVICE_ENDPOINT: ${AUTH_SERVICE_ENDPOINT:-http://gateway-service/api/auth} @@ -115,11 +116,12 @@ services: BROKER_EXCHANGE_NAME: ${BROKER_EXCHANGE_NAME:-dbrepo} BROKER_QUEUE_NAME: ${BROKER_QUEUE_NAME:-dbrepo} BROKER_HOST: "${BROKER_ENDPOINT:-broker-service}" - BROKER_PASSWORD: ${BROKER_PASSWORD:-fda} + BROKER_PASSWORD: ${BROKER_PASSWORD:-admin} BROKER_PORT: ${BROKER_PORT:-5672} BROKER_SERVICE_ENDPOINT: ${BROKER_SERVICE_ENDPOINT:-http://gateway-service/admin/broker} - BROKER_USERNAME: ${BROKER_USERNAME:-fda} + BROKER_USERNAME: ${BROKER_USERNAME:-admin} BROKER_VIRTUALHOST: "${BROKER_VIRTUALHOST:-dbrepo}" + CROSSREF_ENDPOINT: "${CROSSREF_ENDPOINT:-http://data.crossref.org}" DATA_SERVICE_ENDPOINT: ${DATA_SERVICE_ENDPOINT:-http://data-service:8080} DELETED_RECORD: "${DELETED_RECORD:-persistent}" GRANULARITY: "${GRANULARITY:-YYYY-MM-DDThh:mm:ssZ}" @@ -132,13 +134,15 @@ services: METADATA_PASSWORD: "${METADATA_PASSWORD:-dbrepo}" PID_BASE: ${PID_BASE:-http://localhost/pid/} REPOSITORY_NAME: "${REPOSITORY_NAME:-Database Repository}" + ROR_ENDPOINT: "${ROR_ENDPOINT:-https://api.ror.org}" SEARCH_SERVICE_ENDPOINT: "${SEARCH_SERVICE_ENDPOINT:-http://gateway-service}" S3_ACCESS_KEY_ID: "${S3_ACCESS_KEY_ID:-seaweedfsadmin}" + S3_BUCKET: "${S3_BUCKET:-dbrepo}" S3_ENDPOINT: "${S3_ENDPOINT:-http://storage-service:9000}" - S3_EXPORT_BUCKET: "${S3_EXPORT_BUCKET:-dbrepo-download}" - S3_IMPORT_BUCKET: "${S3_IMPORT_BUCKET:-dbrepo-upload}" S3_SECRET_ACCESS_KEY: "${S3_SECRET_ACCESS_KEY:-seaweedfsadmin}" SPARQL_CONNECTION_TIMEOUT: "${SPARQL_CONNECTION_TIMEOUT:-10000}" + SYSTEM_USERNAME: "${SYSTEM_USERNAME:-admin}" + SYSTEM_PASSWORD: "${SYSTEM_PASSWORD:-admin}" healthcheck: test: curl -sSL localhost:8080/actuator/health/liveness | grep 'UP' || exit 1 interval: 10s @@ -160,7 +164,7 @@ services: restart: "no" container_name: dbrepo-analyse-service hostname: analyse-service - image: registry.datalab.tuwien.ac.at/dbrepo/analyse-service:1.4.4 + image: registry.datalab.tuwien.ac.at/dbrepo/analyse-service:1.4.5 environment: ADMIN_PASSWORD: "${ADMIN_PASSWORD:-admin}" ADMIN_USERNAME: "${ADMIN_USERNAME:-admin}" @@ -170,9 +174,8 @@ services: GATEWAY_SERVICE_ENDPOINT: ${GATEWAY_SERVICE_ENDPOINT:-http://gateway-service} JWT_PUBKEY: "${JWT_PUBKEY:-MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqqnHQ2BWWW9vDNLRCcxD++xZg/16oqMo/c1l+lcFEjjAIJjJp/HqrPYU/U9GvquGE6PbVFtTzW1KcKawOW+FJNOA3CGo8Q1TFEfz43B8rZpKsFbJKvQGVv1Z4HaKPvLUm7iMm8Hv91cLduuoWx6Q3DPe2vg13GKKEZe7UFghF+0T9u8EKzA/XqQ0OiICmsmYPbwvf9N3bCKsB/Y10EYmZRb8IhCoV9mmO5TxgWgiuNeCTtNCv2ePYqL/U0WvyGFW0reasIK8eg3KrAUj8DpyOgPOVBn3lBGf+3KFSYi+0bwZbJZWqbC/Xlk20Go1YfeJPRIt7ImxD27R/lNjgDO/MwIDAQAB}" S3_ACCESS_KEY_ID: "${S3_ACCESS_KEY_ID:-seaweedfsadmin}" + S3_BUCKET: "${S3_BUCKET:-dbrepo}" S3_ENDPOINT: "${S3_ENDPOINT:-http://storage-service:9000}" - S3_EXPORT_BUCKET: "${S3_EXPORT_BUCKET:-dbrepo-download}" - S3_IMPORT_BUCKET: "${S3_IMPORT_BUCKET:-dbrepo-upload}" S3_SECRET_ACCESS_KEY: "${S3_SECRET_ACCESS_KEY:-seaweedfsadmin}" volumes: - "${SHARED_FILESYSTEM:-/tmp}:/tmp" @@ -189,13 +192,16 @@ services: container_name: dbrepo-broker-service hostname: broker-service image: docker.io/bitnami/rabbitmq:3.12-debian-12 + ports: + - 5672:5672 volumes: - ./dist/rabbitmq.conf:/etc/rabbitmq/rabbitmq.conf + - ./dist/advanced.config:/etc/rabbitmq/advanced.config - ./dist/enabled_plugins:/etc/rabbitmq/enabled_plugins - ./dist/definitions.json:/app/definitions.json - broker-service-data:/bitnami/rabbitmq/mnesia depends_on: - dbrepo-auth-service: + dbrepo-identity-service: condition: service_healthy healthcheck: test: rabbitmq-diagnostics -q is_running | grep 'is fully booted and running' @@ -209,7 +215,7 @@ services: restart: "no" container_name: dbrepo-search-db hostname: search-db - image: registry.datalab.tuwien.ac.at/dbrepo/search-db:1.4.4 + image: registry.datalab.tuwien.ac.at/dbrepo/search-db:1.4.5 healthcheck: test: curl -sSL localhost:9200/_plugins/_security/health | jq .status | grep UP interval: 10s @@ -233,10 +239,8 @@ services: restart: "no" container_name: dbrepo-search-service hostname: search-service - image: registry.datalab.tuwien.ac.at/dbrepo/search-service:1.4.4 + image: registry.datalab.tuwien.ac.at/dbrepo/search-service:1.4.5 environment: - ADMIN_PASSWORD: "${ADMIN_PASSWORD:-admin}" - ADMIN_USERNAME: "${ADMIN_USERNAME:-admin}" AUTH_SERVICE_CLIENT: ${AUTH_SERVICE_CLIENT:-dbrepo-client} AUTH_SERVICE_CLIENT_SECRET: ${AUTH_SERVICE_CLIENT:-MUwRc7yfXSJwX8AdRMWaQC3Nep1VjwgG} AUTH_SERVICE_ENDPOINT: ${AUTH_SERVICE_ENDPOINT:-http://auth-service:8080} @@ -252,13 +256,12 @@ services: restart: "no" container_name: dbrepo-data-db-sidecar hostname: data-db-sidecar - image: registry.datalab.tuwien.ac.at/dbrepo/data-db-sidecar:1.4.4 + image: registry.datalab.tuwien.ac.at/dbrepo/data-db-sidecar:1.4.5 environment: S3_ACCESS_KEY_ID: "${S3_ACCESS_KEY_ID:-seaweedfsadmin}" + S3_BUCKET: "${S3_BUCKET:-dbrepo}" S3_ENDPOINT: "${S3_ENDPOINT:-http://storage-service:9000}" - S3_EXPORT_BUCKET: "${S3_EXPORT_BUCKET:-dbrepo-download}" S3_FILE_PATH: "${S3_FILE_PATH:-/tmp}" - S3_IMPORT_BUCKET: "${S3_IMPORT_BUCKET:-dbrepo-upload}" S3_SECRET_ACCESS_KEY: "${S3_SECRET_ACCESS_KEY:-seaweedfsadmin}" volumes: - "${SHARED_FILESYSTEM:-/tmp}:/tmp" @@ -274,7 +277,7 @@ services: restart: "no" container_name: dbrepo-ui hostname: ui - image: registry.datalab.tuwien.ac.at/dbrepo/ui:1.4.4 + image: registry.datalab.tuwien.ac.at/dbrepo/ui:1.4.5 depends_on: dbrepo-search-service: condition: service_started @@ -318,7 +321,7 @@ services: restart: "no" container_name: dbrepo-search-service-init hostname: search-service-init - image: registry.datalab.tuwien.ac.at/dbrepo/search-service-init:1.4.4 + image: registry.datalab.tuwien.ac.at/dbrepo/search-service-init:1.4.5 environment: GATEWAY_SERVICE_ENDPOINT: ${GATEWAY_SERVICE_ENDPOINT:-http://gateway-service} OPENSEARCH_HOST: ${OPENSEARCH_HOST:-search-db} @@ -341,6 +344,8 @@ services: volumes: - ./dist/s3_config.json:/app/s3_config.json - storage-service-data:/data + ports: + - "9000:9000" healthcheck: test: echo "cluster.check" | weed shell | grep "checking master.*ok" || exit 1 interval: 10s @@ -353,9 +358,10 @@ services: restart: "no" container_name: dbrepo-storage-service-init hostname: storage-service-init - image: registry.datalab.tuwien.ac.at/dbrepo/storage-service-init:1.4.4 + image: registry.datalab.tuwien.ac.at/dbrepo/storage-service-init:1.4.5 environment: - SEAWEEDFS_ENDPOINT: "${STORAGE_SEAWEEDFS_ENDPOINT:-storage-service:9333}" + WEED_CLUSTER_SW_MASTER: "${STORAGE_SERVICE_MASTER_ENDPOINT:-storage-service:9333}" + S3_BUCKET: "${S3_BUCKET:-dbrepo}" depends_on: dbrepo-storage-service: condition: service_healthy @@ -368,9 +374,11 @@ services: hostname: upload-service image: docker.io/tusproject/tusd:v2.4.0 command: - - "--base-path=/api/upload/files/" + - "-behind-proxy" + - "-max-size=2000000000" + - "-base-path=/api/upload/files/" - "-s3-endpoint=${STORAGE_ENDPOINT:-http://storage-service:9000}" - - "-s3-bucket=dbrepo-upload" + - "-s3-bucket=dbrepo" environment: AWS_ACCESS_KEY_ID: "${STORAGE_USERNAME:-seaweedfsadmin}" AWS_SECRET_ACCESS_KEY: "${STORAGE_PASSWORD:-seaweedfsadmin}" @@ -390,24 +398,22 @@ services: restart: "no" container_name: dbrepo-data-service hostname: data-service - image: registry.datalab.tuwien.ac.at/dbrepo/data-service:1.4.4 + image: registry.datalab.tuwien.ac.at/dbrepo/data-service:1.4.5 volumes: - "${SHARED_VOLUME:-/tmp}:/tmp" environment: - ADMIN_PASSWORD: "${ADMIN_PASSWORD:-admin}" - ADMIN_USERNAME: "${ADMIN_USERNAME:-admin}" - AUTH_SERVICE_ADMIN: ${AUTH_SERVICE_ADMIN:-fda} - AUTH_SERVICE_ADMIN_PASSWORD: ${AUTH_SERVICE_ADMIN_PASSWORD:-fda} + AUTH_SERVICE_ADMIN: ${AUTH_SERVICE_ADMIN:-admin} + AUTH_SERVICE_ADMIN_PASSWORD: ${AUTH_SERVICE_ADMIN_PASSWORD:-admin} AUTH_SERVICE_CLIENT: ${AUTH_SERVICE_CLIENT:-dbrepo-client} AUTH_SERVICE_CLIENT_SECRET: ${AUTH_SERVICE_CLIENT:-MUwRc7yfXSJwX8AdRMWaQC3Nep1VjwgG} AUTH_SERVICE_ENDPOINT: ${AUTH_SERVICE_ENDPOINT:-http://auth-service:8080} BROKER_EXCHANGE_NAME: ${BROKER_EXCHANGE_NAME:-dbrepo} BROKER_QUEUE_NAME: ${BROKER_QUEUE_NAME:-dbrepo} BROKER_HOST: "${BROKER_ENDPOINT:-broker-service}" - BROKER_PASSWORD: ${BROKER_PASSWORD:-fda} + BROKER_PASSWORD: ${BROKER_PASSWORD:-admin} BROKER_PORT: ${BROKER_PORT:-5672} BROKER_SERVICE_ENDPOINT: ${BROKER_SERVICE_ENDPOINT:-http://gateway-service/admin/broker} - BROKER_USERNAME: ${BROKER_USERNAME:-fda} + BROKER_USERNAME: ${BROKER_USERNAME:-admin} BROKER_VIRTUALHOST: "${BROKER_VIRTUALHOST:-dbrepo}" CONNECTION_TIMEOUT: ${CONNECTION_TIMEOUT:-60000} EXCHANGE_NAME: ${EXCHANGE_NAME:-dbrepo} @@ -422,11 +428,13 @@ services: REQUEUE_REJECTED: ${REQUEUE_REJECTED:-false} ROUTING_KEY: "${ROUTING_KEY:-dbrepo.#}" S3_ACCESS_KEY_ID: "${S3_ACCESS_KEY_ID:-seaweedfsadmin}" + S3_BUCKET: "${S3_BUCKET:-dbrepo}" S3_ENDPOINT: "${S3_ENDPOINT:-http://storage-service:9000}" - S3_EXPORT_BUCKET: "${S3_EXPORT_BUCKET:-dbrepo-download}" S3_FILE_PATH: "${S3_FILE_PATH:-/tmp}" S3_IMPORT_BUCKET: "${S3_IMPORT_BUCKET:-dbrepo-upload}" S3_SECRET_ACCESS_KEY: "${S3_SECRET_ACCESS_KEY:-seaweedfsadmin}" + SYSTEM_USERNAME: "${SYSTEM_USERNAME:-admin}" + SYSTEM_PASSWORD: "${SYSTEM_PASSWORD:-admin}" healthcheck: test: curl -sSL localhost:8080/actuator/health/liveness | grep 'UP' || exit 1 interval: 10s diff --git a/.docs/api/analyse-service.md b/.docs/api/analyse-service.md index fe45e9492c4a7c53c024603690132e2dfa5aeec9..3f5921968c7d1ebfd7cc4563c4b3fc4dccf7b23f 100644 --- a/.docs/api/analyse-service.md +++ b/.docs/api/analyse-service.md @@ -6,7 +6,7 @@ author: Martin Weise !!! debug "Debug Information" - Image: [`registry.datalab.tuwien.ac.at/dbrepo/analyse-service:1.4.4`](https://hub.docker.com/r/dbrepo/analyse-service) + Image: [`registry.datalab.tuwien.ac.at/dbrepo/analyse-service:1.4.5`](https://hub.docker.com/r/dbrepo/analyse-service) * Ports: 5000/tcp * Prometheus: `http://<hostname>:5000/metrics` diff --git a/.docs/api/auth-service.md b/.docs/api/auth-service.md index 35c715fc1b6b6e16734a6059a9eaf388e53bb3ad..f6c32497c79727a5b4db8afaea51138e1ea2867f 100644 --- a/.docs/api/auth-service.md +++ b/.docs/api/auth-service.md @@ -6,7 +6,7 @@ author: Martin Weise !!! debug "Debug Information" - Image: [`registry.datalab.tuwien.ac.at/dbrepo/authentication-service:1.4.4`](https://hub.docker.com/r/dbrepo/authentication-service) + Image: [`quay.io/keycloak/keycloak:24.0`](quay.io/keycloak/keycloak) * Ports: 8080/tcp * UI: `http://<hostname>/api/auth/` diff --git a/.docs/api/data-db.md b/.docs/api/data-db.md index 3b2738f981eefd2749b95a40a347951ea9a0a39c..648640bb4c85413a7f5afbf65d1649a6e1706c3c 100644 --- a/.docs/api/data-db.md +++ b/.docs/api/data-db.md @@ -11,7 +11,7 @@ author: Martin Weise !!! debug "Debug Information" - Image: [`dbrepo/data-db-sidecar:1.4.4`](https://hub.docker.com/r/dbrepo/data-db-sidecar) + Image: [`dbrepo/data-db-sidecar:1.4.5`](https://hub.docker.com/r/dbrepo/data-db-sidecar) * Ports: 8080/tcp diff --git a/.docs/api/data-service.md b/.docs/api/data-service.md index ab64c50d31dd29597f7c4bff956bee3cfeb66814..186732a472ba3218d0b65c3cc85e3653996040f0 100644 --- a/.docs/api/data-service.md +++ b/.docs/api/data-service.md @@ -6,7 +6,7 @@ author: Martin Weise !!! debug "Debug Information" - Image: [`registry.datalab.tuwien.ac.at/dbrepo/data-service:1.4.4`](https://hub.docker.com/r/dbrepo/data-service) + Image: [`registry.datalab.tuwien.ac.at/dbrepo/data-service:1.4.5`](https://hub.docker.com/r/dbrepo/data-service) * Ports: 9093/tcp * Info: `http://<hostname>:9093/actuator/info` diff --git a/.docs/api/metadata-service.md b/.docs/api/metadata-service.md index fa365219cc6d2c528197fc39354f5410e828166c..039f2fa097ac3ba89d56ff6d735fc453bc3bc505 100644 --- a/.docs/api/metadata-service.md +++ b/.docs/api/metadata-service.md @@ -6,7 +6,7 @@ author: Martin Weise !!! debug "Debug Information" - Image: [`registry.datalab.tuwien.ac.at/dbrepo/metadata-service:1.4.4`](https://hub.docker.com/r/dbrepo/metadata-service) + Image: [`registry.datalab.tuwien.ac.at/dbrepo/metadata-service:1.4.5`](https://hub.docker.com/r/dbrepo/metadata-service) * Ports: 9099/tcp * Info: `http://<hostname>:9099/actuator/info` @@ -46,7 +46,7 @@ To activate DOI minting, pass your DataCite Fabrica credentials in the environme ```yaml title="docker-compose.yml" services: dbrepo-metadata-service: - image: registry.datalab.tuwien.ac.at/dbrepo/metadata-service:1.4.4 + image: registry.datalab.tuwien.ac.at/dbrepo/metadata-service:1.4.5 environment: spring_profiles_active: doi DATACITE_URL: https://api.datacite.org diff --git a/.docs/api/python.md b/.docs/api/python.md index 3f1a8928542dd01b16e549482215f35fecf2a3ea..aa0c17fec9f2d24d7f03d3b107ddac9692df7a88 100644 --- a/.docs/api/python.md +++ b/.docs/api/python.md @@ -66,17 +66,17 @@ client.import_table_data(database_id=7, table_id=13, file_name_or_data_frame=df) ## Supported Features & Best-Practices - Manage user - account ([docs](https://www.ifs.tuwien.ac.at/infrastructures/dbrepo/1.4.4/api/#create-user-account)) + account ([docs](https://www.ifs.tuwien.ac.at/infrastructures/dbrepo/1.4.5/api/#create-user-account)) - Manage databases ([docs](https://www.ifs.tuwien.ac.at/infrastructures/dbrepo//usage-overview/#create-database)) - Manage database access & - visibility ([docs](https://www.ifs.tuwien.ac.at/infrastructures/dbrepo/1.4.4/api/#create-database)) + visibility ([docs](https://www.ifs.tuwien.ac.at/infrastructures/dbrepo/1.4.5/api/#create-database)) - Import - dataset ([docs](https://www.ifs.tuwien.ac.at/infrastructures/dbrepo/1.4.4/api/#import-dataset)) + dataset ([docs](https://www.ifs.tuwien.ac.at/infrastructures/dbrepo/1.4.5/api/#import-dataset)) - Create persistent - identifiers ([docs](https://www.ifs.tuwien.ac.at/infrastructures/dbrepo/1.4.4/api/#assign-database-pid)) + identifiers ([docs](https://www.ifs.tuwien.ac.at/infrastructures/dbrepo/1.4.5/api/#assign-database-pid)) - Execute - queries ([docs](https://www.ifs.tuwien.ac.at/infrastructures/dbrepo/1.4.4/api/#export-subset)) + queries ([docs](https://www.ifs.tuwien.ac.at/infrastructures/dbrepo/1.4.5/api/#export-subset)) - Get data from tables/views/subsets ## Configure diff --git a/.docs/api/search-service.md b/.docs/api/search-service.md index b48be919d6acec14bbfb3783c6e085f2bcf3e1e9..5c16b499552c32498e9e618284624d53ccf84641 100644 --- a/.docs/api/search-service.md +++ b/.docs/api/search-service.md @@ -6,7 +6,7 @@ author: Martin Weise !!! debug "Debug Information" - Image: [`registry.datalab.tuwien.ac.at/dbrepo/search-service:1.4.4`](https://hub.docker.com/r/dbrepo/search-service) + Image: [`registry.datalab.tuwien.ac.at/dbrepo/search-service:1.4.5`](https://hub.docker.com/r/dbrepo/search-service) * Ports: 4000/tcp * Health: `http://<hostname>:4000/api/search/health` diff --git a/.docs/api/ui.md b/.docs/api/ui.md index e8d409b92330774560b1d712730557048fb823b7..4ac5c7bad871284cc322bf7be0c8585942f63ea5 100644 --- a/.docs/api/ui.md +++ b/.docs/api/ui.md @@ -6,7 +6,7 @@ author: Martin Weise !!! debug "Debug Information" - Image: [`registry.datalab.tuwien.ac.at/dbrepo/ui:1.4.4`](https://hub.docker.com/r/dbrepo/ui) + Image: [`registry.datalab.tuwien.ac.at/dbrepo/ui:1.4.5`](https://hub.docker.com/r/dbrepo/ui) * Ports: 3000/tcp @@ -35,7 +35,7 @@ if you use a Kubernetes deployment via ConfigMap and Volumes). ```yaml title="docker-compose.yml" services: dbrepo-ui: - image: registry.datalab.tuwien.ac.at/dbrepo/ui:1.4.4 + image: registry.datalab.tuwien.ac.at/dbrepo/ui:1.4.5 volumes: - ./my_logo.png:/app/.output/public/my_logo.png ... diff --git a/.docs/installation.md b/.docs/installation.md index b6cb219cef3bbd97f5b8c7d54a91cbc52a19b8ad..bff622e419fb1e68e7928c0ad60ab9ae0d21bf28 100644 --- a/.docs/installation.md +++ b/.docs/installation.md @@ -11,7 +11,7 @@ author: Martin Weise If you have [Docker](https://docs.docker.com/engine/install/) already installed on your system, you can install DBRepo with: ```shell -curl -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.4.4/install.sh | bash +curl -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.4.5/install.sh | bash ``` Or perform a [custom install](#custom-install). @@ -47,7 +47,7 @@ are *not* recommended and not tested. In case you prefer a customized install, start by downloading the `docker-compose.yml` file used to define the services: ```bash -curl -O docker-compose.yml -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.4.4/.docker/docker-compose.yml +curl -O docker-compose.yml -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.4.5/.docker/docker-compose.yml ``` Create the folder `dist/` that hold necessary configuration files and download the Metadata Database schema and initial @@ -55,16 +55,16 @@ data to display the created Data Database container: ```bash mkdir -p dist -curl -O dist/setup-schema.sql -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.4.4/dbrepo-metadata-db/setup-schema.sql -curl -O dist/setup-data.sql -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.4.4/dbrepo-metadata-db/setup-data.sql +curl -O dist/setup-schema.sql -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.4.5/dbrepo-metadata-db/setup-schema.sql +curl -O dist/setup-data.sql -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.4.5/dbrepo-metadata-db/setup-data.sql ``` Download the Broker Service configuration files: ```bash -curl -O dist/rabbitmq.conf -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.4.4/dbrepo-broker-service/rabbitmq.conf -curl -O dist/enabled_plugins -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.4.4/dbrepo-broker-service/enabled_plugins -curl -O dist/definitions.json -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.4.4/dbrepo-broker-service/definitions.json +curl -O dist/rabbitmq.conf -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.4.5/dbrepo-broker-service/rabbitmq.conf +curl -O dist/enabled_plugins -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.4.5/dbrepo-broker-service/enabled_plugins +curl -O dist/definitions.json -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.4.5/dbrepo-broker-service/definitions.json ``` !!! warning "Default admin user credentials" @@ -76,13 +76,13 @@ curl -O dist/definitions.json -sSL https://gitlab.phaidra.org/fair-data-austria- Download the Gateway Service configuration file (or integrate it into your existing NGINX reverse proxy config): ```bash -curl -O dist/dbrepo.conf -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.4.4/dbrepo-gateway-service/dbrepo.conf +curl -O dist/dbrepo.conf -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.4.5/dbrepo-gateway-service/dbrepo.conf ``` Download the S3 configuration for the Storage Service: ```bash -curl -O dist/s3_config.conf -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.4.4/dbrepo-storage-service/s3_config.conf +curl -O dist/s3_config.conf -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.4.5/dbrepo-storage-service/s3_config.conf ``` Continue the custom install by customizing the [User Interface](../api/ui). diff --git a/.docs/kubernetes.md b/.docs/kubernetes.md index 608502e320b56ac829d92b781ea19c8128b927c5..04799b4edf3425f5ba46a7b0a1f140e092414ae0 100644 --- a/.docs/kubernetes.md +++ b/.docs/kubernetes.md @@ -7,7 +7,7 @@ author: Martin Weise ## TL;DR To install DBRepo in your existing cluster, download the -sample [`values.yaml`](https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/blob/release-1.4.4/helm/dbrepo/values.yaml) +sample [`values.yaml`](https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/blob/release-1.4.5/helm/dbrepo/values.yaml) for your deployment and update the variables, especially `hostname`. ```shell @@ -15,7 +15,7 @@ helm upgrade --install dbrepo \ -n dbrepo \ "oci://registry.datalab.tuwien.ac.at/dbrepo/helm/dbrepo" \ --values ./values.yaml \ - --version "1.4.4" \ + --version "1.4.5" \ --create-namespace \ --cleanup-on-fail ``` @@ -36,7 +36,7 @@ brokerservice: The `brokerservice.auth.passwordHash` field is the RabbitMQ SHA512-hash of the `brokerservice.auth.password` field and can be obtained with -the [`generate-rabbitmq-pw.sh`](https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/blob/release-1.4.4/helm/dbrepo/hack/generate-rabbitmq-pw.sh) +the [`generate-rabbitmq-pw.sh`](https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/blob/release-1.4.5/helm/dbrepo/hack/generate-rabbitmq-pw.sh) script: ```console diff --git a/README.md b/README.md index 1ab1d140ed50d77dbdc7426e10fefe9e53b9eb56..e4f843507c43232f4817dbd562b49594d75d9385 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,7 @@    + <img src="./dbrepo-ui/public/logo.png" width="200" alt="DBRepo — Repository for Data in Databases" /> diff --git a/dbrepo-analyse-service/app.py b/dbrepo-analyse-service/app.py index c006e5777b5a14cc64dde335ddbdad185f162bbb..968e019e3262f2bad5eba69e451c85092fd992c3 100644 --- a/dbrepo-analyse-service/app.py +++ b/dbrepo-analyse-service/app.py @@ -186,9 +186,8 @@ app.config["AUTH_SERVICE_ENDPOINT"] = os.getenv("AUTH_SERVICE_ENDPOINT", "http:/ app.config["AUTH_SERVICE_CLIENT"] = os.getenv("AUTH_SERVICE_CLIENT", "dbrepo-client") app.config["AUTH_SERVICE_CLIENT_SECRET"] = os.getenv("AUTH_SERVICE_CLIENT_SECRET", "MUwRc7yfXSJwX8AdRMWaQC3Nep1VjwgG") app.config["S3_ACCESS_KEY_ID"] = os.getenv('S3_ACCESS_KEY_ID', 'seaweedfsadmin') +app.config["S3_BUCKET"] = os.getenv('S3_BUCKET', 'dbrepo') app.config["S3_ENDPOINT"] = os.getenv('S3_ENDPOINT', 'http://localhost:9000') -app.config["S3_EXPORT_BUCKET"] = os.getenv('S3_EXPORT_BUCKET', 'dbrepo-download') -app.config["S3_IMPORT_BUCKET"] = os.getenv('S3_IMPORT_BUCKET', 'dbrepo-upload') app.config["S3_SECRET_ACCESS_KEY"] = os.getenv('S3_SECRET_ACCESS_KEY', 'seaweedfsadmin') app.json_encoder = LazyJSONEncoder diff --git a/dbrepo-analyse-service/clients/s3_client.py b/dbrepo-analyse-service/clients/s3_client.py index 5e8f3bb4378f7c448cf9722f63a27e91fb3e9f39..e9f58ea40727a36dbb53dea5e759a1d32aac254f 100644 --- a/dbrepo-analyse-service/clients/s3_client.py +++ b/dbrepo-analyse-service/clients/s3_client.py @@ -17,10 +17,9 @@ class S3Client: endpoint_url, aws_access_key_id) self.client = boto3.client(service_name='s3', endpoint_url=endpoint_url, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key) - self.bucket_exists_or_exit(current_app.config['S3_EXPORT_BUCKET']) - self.bucket_exists_or_exit(current_app.config['S3_IMPORT_BUCKET']) + self.bucket_exists_or_exit(current_app.config['S3_BUCKET']) - def upload_file(self, filename: str, path: str = "/tmp", bucket: str = "dbrepo-upload") -> bool: + def upload_file(self, filename: str, path: str = "/tmp", bucket: str = "dbrepo") -> bool: """ Uploads a file to the blob storage. Follows the official API https://boto3.amazonaws.com/v1/documentation/api/latest/guide/s3-uploading-files.html. @@ -43,7 +42,7 @@ class S3Client: logging.warning(f"Failed to upload file with key {filename}") raise ConnectionRefusedError(f"Failed to upload file with key {filename}", e) - def download_file(self, filename: str, path: str = "/tmp", bucket: str = "dbrepo-download"): + def download_file(self, filename: str, path: str = "/tmp", bucket: str = "dbrepo"): """ Downloads a file from the blob storage. Follows the official API https://boto3.amazonaws.com/v1/documentation/api/latest/guide/s3-example-download-file.html diff --git a/dbrepo-analyse-service/determine_dt.py b/dbrepo-analyse-service/determine_dt.py index 7c5401a20c3c37c12405c424161a5fb89e585bcd..3fcfc73e9454fc5f6c98488a6096a8cdb3859628 100644 --- a/dbrepo-analyse-service/determine_dt.py +++ b/dbrepo-analyse-service/determine_dt.py @@ -18,8 +18,8 @@ def determine_datatypes(filename, enum=False, enum_tol=0.0001, separator=None) - # Enum is not SQL standard, hence, it might not be supported by all db-engines. # However, it can be used in Postgres and MySQL. s3_client = S3Client() - s3_client.file_exists(current_app.config['S3_IMPORT_BUCKET'], filename) - response = s3_client.get_file(current_app.config['S3_IMPORT_BUCKET'], filename) + s3_client.file_exists(current_app.config['S3_BUCKET'], filename) + response = s3_client.get_file(current_app.config['S3_BUCKET'], filename) stream = response['Body'] if response['ContentLength'] == 0: logging.warning(f'Failed to determine data types: file {filename} has empty body') diff --git a/dbrepo-analyse-service/determine_pk.py b/dbrepo-analyse-service/determine_pk.py index 82ecca465c983346fd825c4e225ed03ec8d3212f..0e3a66ad19d47c66fa6c6fba5081a25e7e150aaf 100644 --- a/dbrepo-analyse-service/determine_pk.py +++ b/dbrepo-analyse-service/determine_pk.py @@ -16,8 +16,8 @@ def determine_pk(filename, separator=","): colindex = list(range(0, len(colnames))) s3_client = S3Client() - s3_client.file_exists('dbrepo-upload', filename) - response = s3_client.get_file('dbrepo-upload', filename) + s3_client.file_exists('dbrepo', filename) + response = s3_client.get_file('dbrepo', filename) stream = response['Body'] if response['ContentLength'] == 0: raise OSError(f'Failed to determine primary key: file {filename} has empty body') diff --git a/dbrepo-analyse-service/test/conftest.py b/dbrepo-analyse-service/test/conftest.py index 1a4775158f91ae80467542ddc8598a8c5dd2dc37..424588bdfd7479ac742401d91858864f47639a9e 100644 --- a/dbrepo-analyse-service/test/conftest.py +++ b/dbrepo-analyse-service/test/conftest.py @@ -41,9 +41,8 @@ def session(request, app_context): app.config["S3_ENDPOINT"] = endpoint client = container.get_client() # create buckets - logging.debug("[fixture] make buckets dbrepo-upload, dbrepo-download") - client.make_bucket("dbrepo-upload") - client.make_bucket("dbrepo-download") + logging.debug("[fixture] make bucket dbrepo") + client.make_bucket("dbrepo") # destructor def stop_minio(): @@ -61,17 +60,15 @@ def cleanup(request, session): :param session: / :return: """ - logging.info("[fixture] truncate buckets") - for bucket in ["dbrepo-upload", "dbrepo-download"]: - objects = [] - for obj in session.get_client().list_objects(bucket): - objects.append(DeleteObject(obj.object_name)) - logging.info(f"request to remove objects {objects}") - errors = session.get_client().remove_objects(bucket, objects) - for error in errors: - raise ConnectionError( - f"Failed to delete object with key {error.object_name} of bucket {bucket}" - ) + bucket = "dbrepo" + logging.info(f"[fixture] truncate bucket: {bucket}") + objects = [] + for obj in session.get_client().list_objects(bucket): + objects.append(DeleteObject(obj.object_name)) + logging.info(f"request to remove objects {objects}") + errors = session.get_client().remove_objects(bucket, objects) + for error in errors: + raise ConnectionError(f"Failed to delete object with key {error.object_name} of bucket: {bucket}") @pytest.fixture(scope="function") diff --git a/dbrepo-analyse-service/test/test_determine_dt.py b/dbrepo-analyse-service/test/test_determine_dt.py index e1f8dff291b2b0391a314235de35c5d1b42ba377..3d7e4f8d3bee3f60d593572d420b3243fea179a2 100644 --- a/dbrepo-analyse-service/test/test_determine_dt.py +++ b/dbrepo-analyse-service/test/test_determine_dt.py @@ -24,7 +24,7 @@ class DetermineDatatypesTest(unittest.TestCase): } # mock - S3Client().upload_file("datetime.csv", './data/test_dt/', 'dbrepo-upload') + S3Client().upload_file("datetime.csv", './data/test_dt/', 'dbrepo') # test response = determine_datatypes(filename="datetime.csv", separator=",") @@ -47,7 +47,7 @@ class DetermineDatatypesTest(unittest.TestCase): } # mock - S3Client().upload_file("datetime_tz.csv", './data/test_dt/', 'dbrepo-upload') + S3Client().upload_file("datetime_tz.csv", './data/test_dt/', 'dbrepo') # test response = determine_datatypes(filename="datetime_tz.csv", separator=",") @@ -70,7 +70,7 @@ class DetermineDatatypesTest(unittest.TestCase): } # mock - S3Client().upload_file("datetime_t.csv", './data/test_dt/', 'dbrepo-upload') + S3Client().upload_file("datetime_t.csv", './data/test_dt/', 'dbrepo') # test response = determine_datatypes(filename="datetime_t.csv", separator=",") @@ -94,7 +94,7 @@ class DetermineDatatypesTest(unittest.TestCase): } # mock - S3Client().upload_file("datatypes.csv", './data/test_dt/', 'dbrepo-upload') + S3Client().upload_file("datatypes.csv", './data/test_dt/', 'dbrepo') # test response = determine_datatypes(filename="datatypes.csv", separator=",") @@ -117,7 +117,7 @@ class DetermineDatatypesTest(unittest.TestCase): def test_determine_datatypes_fileEmpty_succeeds(self): # mock - S3Client().upload_file("empty.csv", './data/test_dt/', 'dbrepo-upload') + S3Client().upload_file("empty.csv", './data/test_dt/', 'dbrepo') # test response = determine_datatypes("empty.csv") @@ -129,7 +129,7 @@ class DetermineDatatypesTest(unittest.TestCase): def test_determine_datatypes_separatorSemicolon_succeeds(self): # mock - S3Client().upload_file("separator.csv", './data/test_dt/', 'dbrepo-upload') + S3Client().upload_file("separator.csv", './data/test_dt/', 'dbrepo') # test response = determine_datatypes(filename="separator.csv", separator=";") @@ -140,7 +140,7 @@ class DetermineDatatypesTest(unittest.TestCase): def test_determine_datatypes_separatorGuess_succeeds(self): # mock - S3Client().upload_file("separator.csv", './data/test_dt/', 'dbrepo-upload') + S3Client().upload_file("separator.csv", './data/test_dt/', 'dbrepo') # test response = determine_datatypes(filename="separator.csv") @@ -151,7 +151,7 @@ class DetermineDatatypesTest(unittest.TestCase): def test_determine_datatypes_separatorGuessLargeDataset_succeeds(self): # mock - S3Client().upload_file("large.csv", './data/test_dt/', 'dbrepo-upload') + S3Client().upload_file("large.csv", './data/test_dt/', 'dbrepo') # test response = determine_datatypes(filename="large.csv") @@ -171,7 +171,7 @@ class DetermineDatatypesTest(unittest.TestCase): } # mock - S3Client().upload_file("novel.csv", './data/test_dt/', 'dbrepo-upload') + S3Client().upload_file("novel.csv", './data/test_dt/', 'dbrepo') # test response = determine_datatypes(filename="novel.csv", separator=";") diff --git a/dbrepo-analyse-service/test/test_determine_pk.py b/dbrepo-analyse-service/test/test_determine_pk.py index 43bcf4e00f9ff5fa1fdf392279824e63042a50cd..4e960d39c0b0bebd263c0453a94477fcae964319 100644 --- a/dbrepo-analyse-service/test/test_determine_pk.py +++ b/dbrepo-analyse-service/test/test_determine_pk.py @@ -7,7 +7,7 @@ class DeterminePrimaryKeyTest(unittest.TestCase): # @Test def test_determine_pk_largeFileIdFirst_succeeds(self): # mock - S3Client().upload_file("largefile_idfirst.csv", './data/test_pk/', 'dbrepo-upload') + S3Client().upload_file("largefile_idfirst.csv", './data/test_pk/', 'dbrepo') # test response = determine_pk('largefile_idfirst.csv') @@ -16,7 +16,7 @@ class DeterminePrimaryKeyTest(unittest.TestCase): # @Test def test_determine_pk_largeFileIdInBetween_succeeds(self): # mock - S3Client().upload_file("largefile_idinbtw.csv", './data/test_pk/', 'dbrepo-upload') + S3Client().upload_file("largefile_idinbtw.csv", './data/test_pk/', 'dbrepo') # test response = determine_pk('largefile_idinbtw.csv') @@ -25,7 +25,7 @@ class DeterminePrimaryKeyTest(unittest.TestCase): # @Test def test_determine_pk_largeFileNoPrimaryKey_fails(self): # mock - S3Client().upload_file("largefile_no_pk.csv", './data/test_pk/', 'dbrepo-upload') + S3Client().upload_file("largefile_no_pk.csv", './data/test_pk/', 'dbrepo') # test response = determine_pk('largefile_no_pk.csv') @@ -34,7 +34,7 @@ class DeterminePrimaryKeyTest(unittest.TestCase): # @Test def test_determine_pk_largeFileNullInUnique_fails(self): # mock - S3Client().upload_file("largefile_nullinunique.csv", './data/test_pk/', 'dbrepo-upload') + S3Client().upload_file("largefile_nullinunique.csv", './data/test_pk/', 'dbrepo') # test response = determine_pk('largefile_nullinunique.csv') @@ -43,7 +43,7 @@ class DeterminePrimaryKeyTest(unittest.TestCase): # @Test def test_determine_pk_smallFileIdFirst_fails(self): # mock - S3Client().upload_file("smallfile_idfirst.csv", './data/test_pk/', 'dbrepo-upload') + S3Client().upload_file("smallfile_idfirst.csv", './data/test_pk/', 'dbrepo') # test response = determine_pk('smallfile_idfirst.csv') @@ -52,7 +52,7 @@ class DeterminePrimaryKeyTest(unittest.TestCase): # @Test def test_determine_pk_smallFileIdIntBetween_fails(self): # mock - S3Client().upload_file("smallfile_idinbtw.csv", './data/test_pk/', 'dbrepo-upload') + S3Client().upload_file("smallfile_idinbtw.csv", './data/test_pk/', 'dbrepo') # test response = determine_pk('smallfile_idinbtw.csv') @@ -61,7 +61,7 @@ class DeterminePrimaryKeyTest(unittest.TestCase): # @Test def test_determine_pk_smallFileNoPrimaryKey_fails(self): # mock - S3Client().upload_file("smallfile_no_pk.csv", './data/test_pk/', 'dbrepo-upload') + S3Client().upload_file("smallfile_no_pk.csv", './data/test_pk/', 'dbrepo') # test response = determine_pk('smallfile_no_pk.csv') @@ -70,7 +70,7 @@ class DeterminePrimaryKeyTest(unittest.TestCase): # @Test def test_determine_pk_smallFileNullInUnique_fails(self): # mock - S3Client().upload_file("smallfile_nullinunique.csv", './data/test_pk/', 'dbrepo-upload') + S3Client().upload_file("smallfile_nullinunique.csv", './data/test_pk/', 'dbrepo') # test response = determine_pk('smallfile_nullinunique.csv') diff --git a/dbrepo-analyse-service/test/test_s3_client.py b/dbrepo-analyse-service/test/test_s3_client.py index 11eb115e6d87120102e6d6fbf6e20111640ed9fd..4ab03078242ae5f68bd01e23887b603c6de239f9 100644 --- a/dbrepo-analyse-service/test/test_s3_client.py +++ b/dbrepo-analyse-service/test/test_s3_client.py @@ -43,17 +43,17 @@ class S3ClientTest(unittest.TestCase): def test_download_file_succeeds(self): # mock - S3Client().upload_file(filename="testdt01.csv", path="./data/", bucket="dbrepo-upload") + S3Client().upload_file(filename="testdt01.csv", path="./data/", bucket="dbrepo") # test - S3Client().download_file(filename="testdt01.csv", bucket="dbrepo-upload") + S3Client().download_file(filename="testdt01.csv", bucket="dbrepo") # @Test def test_download_file_notFound_fails(self): # test try: - S3Client().download_file(filename="testdt01.csv", bucket="dbrepo-upload") + S3Client().download_file(filename="testdt01.csv", bucket="dbrepo") except ClientError: pass except Exception: @@ -78,10 +78,10 @@ class S3ClientTest(unittest.TestCase): def test_get_file_succeeds(self): # mock - S3Client().upload_file(filename="testdt01.csv", path="./data/", bucket="dbrepo-upload") + S3Client().upload_file(filename="testdt01.csv", path="./data/", bucket="dbrepo") # test - response = S3Client().get_file(bucket="dbrepo-upload", filename="testdt01.csv") + response = S3Client().get_file(bucket="dbrepo", filename="testdt01.csv") self.assertIsNotNone(response) # @Test @@ -89,7 +89,7 @@ class S3ClientTest(unittest.TestCase): # test try: - S3Client().get_file(bucket="dbrepo-upload", filename="idonotexist.csv") + S3Client().get_file(bucket="dbrepo", filename="idonotexist.csv") except ClientError: pass except Exception: @@ -101,7 +101,7 @@ class S3ClientTest(unittest.TestCase): def test_bucket_exists_succeeds(self): # test - response = S3Client().bucket_exists_or_exit("dbrepo-upload") + response = S3Client().bucket_exists_or_exit("dbrepo") self.assertIsNotNone(response) # @Test diff --git a/dbrepo-auth-service/Dockerfile b/dbrepo-auth-service/Dockerfile index e988d634127d9c0e89a8f7a4e6b3328761c0b544..8caf422498607d1106adad82aab76d53190713d0 100644 --- a/dbrepo-auth-service/Dockerfile +++ b/dbrepo-auth-service/Dockerfile @@ -1,5 +1,5 @@ ###### FIRST STAGE ###### -FROM keycloak/keycloak:21.0 as config +FROM keycloak/keycloak:24.0 as config MAINTAINER Martin Weise <martin.weise@tuwien.ac.at> # Enable health and metrics support diff --git a/dbrepo-auth-service/dbrepo-realm.json b/dbrepo-auth-service/dbrepo-realm.json index 87eb81e777ad6fef5b893cfabea4366d035ddb7a..04fb056a966ab7e3799b59db42e6c3ae6162aa67 100644 --- a/dbrepo-auth-service/dbrepo-realm.json +++ b/dbrepo-auth-service/dbrepo-realm.json @@ -37,6 +37,7 @@ "editUsernameAllowed" : false, "bruteForceProtected" : false, "permanentLockout" : false, + "maxTemporaryLockouts" : 0, "maxFailureWaitSeconds" : 900, "minimumQuickLoginWaitSeconds" : 60, "waitIncrementSeconds" : 60, @@ -1104,34 +1105,34 @@ "id" : "f2ce17fe-7b15-47a4-bbf8-86f415298fa9", "name" : "data-stewards", "path" : "/data-stewards", + "subGroups" : [ ], "attributes" : { }, "realmRoles" : [ "default-data-steward-roles" ], - "clientRoles" : { }, - "subGroups" : [ ] + "clientRoles" : { } }, { "id" : "124d9888-0b6e-46aa-8225-077dcedaf16e", "name" : "developers", "path" : "/developers", + "subGroups" : [ ], "attributes" : { }, "realmRoles" : [ "default-developer-roles" ], - "clientRoles" : { }, - "subGroups" : [ ] + "clientRoles" : { } }, { "id" : "f467c38e-9041-4faa-ae0b-39cec65ff4db", "name" : "researchers", "path" : "/researchers", + "subGroups" : [ ], "attributes" : { }, "realmRoles" : [ "default-researcher-roles" ], - "clientRoles" : { }, - "subGroups" : [ ] + "clientRoles" : { } }, { "id" : "2b9f94b4-d434-4a98-8eab-25678cfee983", "name" : "system", "path" : "/system", + "subGroups" : [ ], "attributes" : { }, "realmRoles" : [ "default-system-roles" ], - "clientRoles" : { }, - "subGroups" : [ ] + "clientRoles" : { } } ], "defaultRole" : { "id" : "abd2d9ee-ebc4-4d0a-839e-6b588a6d442a", @@ -1151,6 +1152,7 @@ "otpPolicyPeriod" : 30, "otpPolicyCodeReusable" : false, "otpSupportedApplications" : [ "totpAppFreeOTPName", "totpAppGoogleName", "totpAppMicrosoftAuthenticatorName" ], + "localizationTexts" : { }, "webAuthnPolicyRpEntityName" : "keycloak", "webAuthnPolicySignatureAlgorithms" : [ "ES256" ], "webAuthnPolicyRpId" : "", @@ -1161,6 +1163,7 @@ "webAuthnPolicyCreateTimeout" : 0, "webAuthnPolicyAvoidSameAuthenticatorRegister" : false, "webAuthnPolicyAcceptableAaguids" : [ ], + "webAuthnPolicyExtraOrigins" : [ ], "webAuthnPolicyPasswordlessRpEntityName" : "keycloak", "webAuthnPolicyPasswordlessSignatureAlgorithms" : [ "ES256" ], "webAuthnPolicyPasswordlessRpId" : "", @@ -1171,6 +1174,7 @@ "webAuthnPolicyPasswordlessCreateTimeout" : 0, "webAuthnPolicyPasswordlessAvoidSameAuthenticatorRegister" : false, "webAuthnPolicyPasswordlessAcceptableAaguids" : [ ], + "webAuthnPolicyPasswordlessExtraOrigins" : [ ], "scopeMappings" : [ { "clientScope" : "rabbitmq.tag:administrator", "roles" : [ "escalated-broker-handling" ] @@ -2082,6 +2086,7 @@ "browserSecurityHeaders" : { "contentSecurityPolicyReportOnly" : "", "xContentTypeOptions" : "nosniff", + "referrerPolicy" : "no-referrer", "xRobotsTag" : "none", "xFrameOptions" : "SAMEORIGIN", "contentSecurityPolicy" : "frame-src 'self'; frame-ancestors 'self'; object-src 'none';", @@ -2138,7 +2143,7 @@ "subType" : "anonymous", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-usermodel-property-mapper", "oidc-address-mapper", "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "saml-role-list-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-full-name-mapper", "saml-user-property-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-full-name-mapper", "saml-user-attribute-mapper", "saml-user-property-mapper", "saml-role-list-mapper", "oidc-usermodel-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper" ] } }, { "id" : "1849e52a-b8c9-44a8-af3d-ee19376a1ed1", @@ -2164,7 +2169,7 @@ "subType" : "authenticated", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-full-name-mapper", "saml-role-list-mapper", "oidc-address-mapper", "oidc-usermodel-property-mapper", "saml-user-property-mapper", "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-attribute-mapper" ] + "allowed-protocol-mapper-types" : [ "saml-user-property-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "oidc-address-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper" ] } } ], "org.keycloak.storage.UserStorageProvider" : [ { @@ -2180,8 +2185,8 @@ "config" : { "ldap.attribute" : [ "createTimestamp" ], "is.mandatory.in.ldap" : [ "false" ], - "read.only" : [ "true" ], "always.read.value.from.ldap" : [ "true" ], + "read.only" : [ "true" ], "user.model.attribute" : [ "createTimestamp" ] } }, { @@ -2192,8 +2197,8 @@ "config" : { "ldap.attribute" : [ "sn" ], "is.mandatory.in.ldap" : [ "true" ], - "read.only" : [ "false" ], "always.read.value.from.ldap" : [ "true" ], + "read.only" : [ "false" ], "user.model.attribute" : [ "lastName" ] } }, { @@ -2204,8 +2209,8 @@ "config" : { "ldap.attribute" : [ "cn" ], "is.mandatory.in.ldap" : [ "true" ], - "always.read.value.from.ldap" : [ "true" ], "read.only" : [ "false" ], + "always.read.value.from.ldap" : [ "true" ], "user.model.attribute" : [ "firstName" ] } }, { @@ -2216,8 +2221,8 @@ "config" : { "ldap.attribute" : [ "mail" ], "is.mandatory.in.ldap" : [ "false" ], - "always.read.value.from.ldap" : [ "false" ], "read.only" : [ "false" ], + "always.read.value.from.ldap" : [ "false" ], "user.model.attribute" : [ "email" ] } }, { @@ -2228,8 +2233,8 @@ "config" : { "membership.attribute.type" : [ "DN" ], "group.name.ldap.attribute" : [ "cn" ], - "membership.user.ldap.attribute" : [ "uid" ], "preserve.group.inheritance" : [ "false" ], + "membership.user.ldap.attribute" : [ "uid" ], "groups.dn" : [ "ou=users,dc=dbrepo,dc=at" ], "mode" : [ "LDAP_ONLY" ], "user.roles.retrieve.strategy" : [ "LOAD_GROUPS_BY_MEMBER_ATTRIBUTE" ], @@ -2237,8 +2242,8 @@ "ignore.missing.groups" : [ "false" ], "group.object.classes" : [ "groupOfNames" ], "memberof.ldap.attribute" : [ "memberOf" ], - "drop.non.existing.groups.during.sync" : [ "false" ], - "groups.path" : [ "/" ] + "groups.path" : [ "/" ], + "drop.non.existing.groups.during.sync" : [ "false" ] } }, { "id" : "b6ff3285-35af-4e86-8bb4-d94b8e0d70bb", @@ -2248,8 +2253,8 @@ "config" : { "ldap.attribute" : [ "modifyTimestamp" ], "is.mandatory.in.ldap" : [ "false" ], - "read.only" : [ "true" ], "always.read.value.from.ldap" : [ "true" ], + "read.only" : [ "true" ], "user.model.attribute" : [ "modifyTimestamp" ] } }, { @@ -2259,37 +2264,37 @@ "subComponents" : { }, "config" : { "ldap.attribute" : [ "uid" ], - "attribute.force.default" : [ "false" ], "is.mandatory.in.ldap" : [ "true" ], + "attribute.force.default" : [ "false" ], "is.binary.attribute" : [ "false" ], - "read.only" : [ "false" ], "always.read.value.from.ldap" : [ "false" ], + "read.only" : [ "false" ], "user.model.attribute" : [ "username" ] } } ] }, "config" : { - "fullSyncPeriod" : [ "-1" ], "pagination" : [ "false" ], + "fullSyncPeriod" : [ "-1" ], "startTls" : [ "false" ], - "connectionPooling" : [ "true" ], "usersDn" : [ "ou=users,dc=dbrepo,dc=at" ], + "connectionPooling" : [ "true" ], "cachePolicy" : [ "DEFAULT" ], "useKerberosForPasswordAuthentication" : [ "false" ], "importEnabled" : [ "true" ], "enabled" : [ "true" ], - "changedSyncPeriod" : [ "-1" ], - "bindDn" : [ "cn=admin,dc=dbrepo,dc=at" ], "usernameLDAPAttribute" : [ "uid" ], "bindCredential" : [ "admin" ], + "bindDn" : [ "cn=admin,dc=dbrepo,dc=at" ], + "changedSyncPeriod" : [ "-1" ], "lastSync" : [ "1719252666" ], "vendor" : [ "other" ], "uuidLDAPAttribute" : [ "entryUUID" ], - "allowKerberosAuthentication" : [ "false" ], "connectionUrl" : [ "ldap://identity-service:1389" ], + "allowKerberosAuthentication" : [ "false" ], "syncRegistrations" : [ "true" ], "authType" : [ "simple" ], - "useTruststoreSpi" : [ "ldapsOnly" ], + "useTruststoreSpi" : [ "always" ], "usePasswordModifyExtendedOp" : [ "false" ], "trustEmail" : [ "false" ], "userObjectClasses" : [ "inetOrgPerson, organizationalPerson, person" ], @@ -2298,6 +2303,14 @@ "validatePasswordPolicy" : [ "false" ] } } ], + "org.keycloak.userprofile.UserProfileProvider" : [ { + "id" : "a407a1d6-a7f6-4a72-ba3a-149de03d5a43", + "providerId" : "declarative-user-profile", + "subComponents" : { }, + "config" : { + "kc.user.profile.config" : [ "{\"attributes\":[{\"name\":\"username\",\"displayName\":\"${username}\",\"validations\":{\"length\":{\"min\":3,\"max\":255},\"username-prohibited-characters\":{},\"up-username-not-idn-homograph\":{}},\"permissions\":{\"view\":[\"admin\",\"user\"],\"edit\":[\"admin\",\"user\"]},\"multivalued\":false},{\"name\":\"email\",\"displayName\":\"${email}\",\"validations\":{\"email\":{},\"length\":{\"max\":255}},\"required\":{\"roles\":[\"user\"]},\"permissions\":{\"view\":[\"admin\",\"user\"],\"edit\":[\"admin\",\"user\"]},\"multivalued\":false},{\"name\":\"firstName\",\"displayName\":\"${firstName}\",\"validations\":{\"length\":{\"max\":255},\"person-name-prohibited-characters\":{}},\"required\":{\"roles\":[\"user\"]},\"permissions\":{\"view\":[\"admin\",\"user\"],\"edit\":[\"admin\",\"user\"]},\"multivalued\":false},{\"name\":\"lastName\",\"displayName\":\"${lastName}\",\"validations\":{\"length\":{\"max\":255},\"person-name-prohibited-characters\":{}},\"required\":{\"roles\":[\"user\"]},\"permissions\":{\"view\":[\"admin\",\"user\"],\"edit\":[\"admin\",\"user\"]},\"multivalued\":false}],\"groups\":[{\"name\":\"user-metadata\",\"displayHeader\":\"User metadata\",\"displayDescription\":\"Attributes, which refer to user metadata\"}],\"unmanagedAttributePolicy\":\"ENABLED\"}" ] + } + } ], "org.keycloak.keys.KeyProvider" : [ { "id" : "2f53ccf3-37b0-4d34-83e7-ed497499ee51", "name" : "rsa-enc-generated", @@ -2326,8 +2339,8 @@ "providerId" : "hmac-generated", "subComponents" : { }, "config" : { - "kid" : [ "c8500166-5cc4-4085-ad0f-853c3b0b0233" ], - "secret" : [ "TI3xg__G2Qy8C47DracpYir2X4ItQZSrhgr5KSlwRNISDbBqZ-ky3OcAyokSXMcpweSOaCPvbivpvzJNklUBvw" ], + "kid" : [ "7f9f9054-5697-4f60-bdc8-67e3bd0f4db6" ], + "secret" : [ "1SCIY20z3AbAHCL28LuJfBU-7zfsZv5dacgliUeGdRW_WK3vH9fJUpPu1f7iDrdlhF7YQmHxLXsWjxhQId4ShI7QBdgKCArHWqi0GeH37oNXfZFg_uv-K_3JSfxfGBRu5jpRQhhSBxESZWsFVkskhxWUvNe6b5l9dFbMIif72rI" ], "priority" : [ "100" ], "algorithm" : [ "HS256" ] } @@ -2342,6 +2355,17 @@ "certificate" : [ "MIICmzCCAYMCBgGG3GWyBTANBgkqhkiG9w0BAQsFADARMQ8wDQYDVQQDDAZkYnJlcG8wHhcNMjMwMzEzMTkxMzE3WhcNMzMwMzEzMTkxNDU3WjARMQ8wDQYDVQQDDAZkYnJlcG8wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqqcdDYFZZb28M0tEJzEP77FmD/Xqioyj9zWX6VwUSOMAgmMmn8eqs9hT9T0a+q4YTo9tUW1PNbUpwprA5b4Uk04DcIajxDVMUR/PjcHytmkqwVskq9AZW/Vngdoo+8tSbuIybwe/3Vwt266hbHpDcM97a+DXcYooRl7tQWCEX7RP27wQrMD9epDQ6IgKayZg9vC9/03dsIqwH9jXQRiZlFvwiEKhX2aY7lPGBaCK414JO00K/Z49iov9TRa/IYVbSt5qwgrx6DcqsBSPwOnI6A85UGfeUEZ/7coVJiL7RvBlsllapsL9eWTbQajVh94k9Ei3sibEPbtH+U2OAM78zAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAASnN1Cuif1sdfEK2kWAURSXGJCohCROLWdKFjaeHPRaEfpbFJsgxW0Yj3nwX5O3bUlOWoTyENwnXSsXMQsqnNi+At32CKaKO8+AkhAbgQL9F0B+KeJwmYv3cUj5N/LYkJjBvZBzUZ4Ugu5dcxH0k7AktLAIwimkyEnxTNolOA3UyrGGpREr8MCKWVr10RFuOpF/0CsJNNwbHXzalO9D756EUcRWZ9VSg6QVNso0YYRKTnILWDn9hcTRnqGy3SHo3anFTqQZ+BB57YbgFWy6udC0LYRB3zdp6zNti87eu/VEymiDY/mmo1AB8Tm0b6vxFz4AKcL3ax5qS6YnZ9efSzk=" ], "priority" : [ "100" ] } + }, { + "id" : "addbae10-c6ae-4735-851f-7a5ea035ce25", + "name" : "hmac-generated-hs512", + "providerId" : "hmac-generated", + "subComponents" : { }, + "config" : { + "kid" : [ "352d0ea1-8218-42b5-ab78-e2ca56cf6a95" ], + "secret" : [ "_kr6EZOZ8IKqPWgJltHAAsQ34wCIGPs8oOQLYWwJrSIH7Qie3CEVKZnICyBP1goR-QgUtg25tR8Qu5MkvYkb8assJ8Iok5x_8iYCR4Txkf_mS-emrlAtQajlIjmOfNBtx704dTnZlP9rWzqpW6mrpeiOaiCw1K0XCpY5C_ZjXKw" ], + "priority" : [ "100" ], + "algorithm" : [ "HS512" ] + } } ] }, "internationalizationEnabled" : false, @@ -2368,35 +2392,6 @@ "flowAlias" : "Verify Existing Account by Re-authentication", "userSetupAllowed" : false } ] - }, { - "id" : "f94a4b6d-deaa-4505-be0f-544828436fa1", - "alias" : "Authentication Options", - "description" : "Authentication options.", - "providerId" : "basic-flow", - "topLevel" : false, - "builtIn" : true, - "authenticationExecutions" : [ { - "authenticator" : "basic-auth", - "authenticatorFlow" : false, - "requirement" : "REQUIRED", - "priority" : 10, - "autheticatorFlow" : false, - "userSetupAllowed" : false - }, { - "authenticator" : "basic-auth-otp", - "authenticatorFlow" : false, - "requirement" : "DISABLED", - "priority" : 20, - "autheticatorFlow" : false, - "userSetupAllowed" : false - }, { - "authenticator" : "auth-spnego", - "authenticatorFlow" : false, - "requirement" : "DISABLED", - "priority" : 30, - "autheticatorFlow" : false, - "userSetupAllowed" : false - } ] }, { "id" : "542ca1d7-9627-4102-b843-98837ce433fb", "alias" : "Browser - Conditional OTP", @@ -2713,28 +2708,6 @@ "flowAlias" : "Browser - Conditional OTP", "userSetupAllowed" : false } ] - }, { - "id" : "f8ba3c2e-3952-4434-98e8-b892eea90e9e", - "alias" : "http challenge", - "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", - "providerId" : "basic-flow", - "topLevel" : true, - "builtIn" : true, - "authenticationExecutions" : [ { - "authenticator" : "no-cookie-redirect", - "authenticatorFlow" : false, - "requirement" : "REQUIRED", - "priority" : 10, - "autheticatorFlow" : false, - "userSetupAllowed" : false - }, { - "authenticatorFlow" : true, - "requirement" : "REQUIRED", - "priority" : 20, - "autheticatorFlow" : true, - "flowAlias" : "Authentication Options", - "userSetupAllowed" : false - } ] }, { "id" : "04c2fe01-5076-4aa4-9596-4efb4004195f", "alias" : "registration", @@ -2765,13 +2738,6 @@ "priority" : 20, "autheticatorFlow" : false, "userSetupAllowed" : false - }, { - "authenticator" : "registration-profile-action", - "authenticatorFlow" : false, - "requirement" : "REQUIRED", - "priority" : 40, - "autheticatorFlow" : false, - "userSetupAllowed" : false }, { "authenticator" : "registration-password-action", "authenticatorFlow" : false, @@ -2916,6 +2882,14 @@ "defaultAction" : false, "priority" : 80, "config" : { } + }, { + "alias" : "delete_credential", + "name" : "Delete Credential", + "providerId" : "delete_credential", + "enabled" : true, + "defaultAction" : false, + "priority" : 100, + "config" : { } }, { "alias" : "update_user_locale", "name" : "Update User Locale", @@ -2931,6 +2905,7 @@ "resetCredentialsFlow" : "reset credentials", "clientAuthenticationFlow" : "clients", "dockerAuthenticationFlow" : "docker auth", + "firstBrokerLoginFlow" : "first broker login", "attributes" : { "cibaBackchannelTokenDeliveryMode" : "poll", "cibaAuthRequestedUserHint" : "login_hint", @@ -2950,7 +2925,7 @@ "clientSessionMaxLifespan" : "0", "shortVerificationUri" : "" }, - "keycloakVersion" : "21.0.2", + "keycloakVersion" : "24.0.5", "userManagedAccessAllowed" : false, "clientProfiles" : { "profiles" : [ ] diff --git a/dbrepo-data-db/enable_history_insert.cnf b/dbrepo-data-db/enable_history_insert.cnf deleted file mode 100644 index 7bced156c829b3dbd4d6221dfd9654045c6f501e..0000000000000000000000000000000000000000 --- a/dbrepo-data-db/enable_history_insert.cnf +++ /dev/null @@ -1 +0,0 @@ -secure_timestamp="SUPER" \ No newline at end of file diff --git a/dbrepo-data-db/sidecar/app.py b/dbrepo-data-db/sidecar/app.py index 40cb9fa4aab88e2478e9b7671043169c383e165c..955e3d9c3d6cb3b9e8140bc0773607db9118a0e0 100644 --- a/dbrepo-data-db/sidecar/app.py +++ b/dbrepo-data-db/sidecar/app.py @@ -121,10 +121,9 @@ app.config["AUTH_SERVICE_CLIENT"] = os.getenv("AUTH_SERVICE_CLIENT", "dbrepo-cli app.config["AUTH_SERVICE_CLIENT_SECRET"] = os.getenv("AUTH_SERVICE_CLIENT_SECRET", "MUwRc7yfXSJwX8AdRMWaQC3Nep1VjwgG") app.config["S3_ACCESS_KEY_ID"] = os.getenv('S3_ACCESS_KEY_ID', 'seaweedfsadmin') app.config["S3_ENDPOINT"] = os.getenv('S3_ENDPOINT', 'http://localhost:9000') -app.config["S3_EXPORT_BUCKET"] = os.getenv('S3_EXPORT_BUCKET', 'dbrepo-download') app.config["S3_FILE_PATH"] = os.getenv('S3_FILE_PATH', '/tmp') app.config["S3_SECRET_ACCESS_KEY"] = os.getenv('S3_SECRET_ACCESS_KEY', 'seaweedfsadmin') -app.config["S3_IMPORT_BUCKET"] = os.getenv('S3_IMPORT_BUCKET', 'dbrepo-upload') +app.config["S3_BUCKET"] = os.getenv('S3_BUCKET', 'dbrepo') app.json_encoder = LazyJSONEncoder @@ -180,7 +179,7 @@ def import_csv(filename): auth.current_user() logging.debug('endpoint import csv, filename=%s, body=%s', filename, request) s3_client = S3Client() - response = s3_client.download_file(filename, app.config["S3_FILE_PATH"], app.config['S3_IMPORT_BUCKET']) + response = s3_client.download_file(filename, app.config["S3_FILE_PATH"], app.config['S3_BUCKET']) if response is False: return Response(), 400 return Response(json.dumps(response)), 202 @@ -193,7 +192,7 @@ def import_csv(filename): def import_csv(filename): logging.debug('endpoint export csv, filename=%s, body=%s', filename, request) s3_client = S3Client() - response = s3_client.upload_file(filename, app.config["S3_FILE_PATH"], app.config['S3_EXPORT_BUCKET']) + response = s3_client.upload_file(filename, app.config["S3_FILE_PATH"], app.config['S3_BUCKET']) if response is False: return Response(), 400 return Response(), 202 diff --git a/dbrepo-data-db/sidecar/clients/s3_client.py b/dbrepo-data-db/sidecar/clients/s3_client.py index 547a1c3a30d5dce07000a49852aef04673163de1..d34760f6230321a4219df43124d404c43c98e043 100644 --- a/dbrepo-data-db/sidecar/clients/s3_client.py +++ b/dbrepo-data-db/sidecar/clients/s3_client.py @@ -17,8 +17,7 @@ class S3Client: f"retrieve file from S3, endpoint_url={endpoint_url}, aws_access_key_id={aws_access_key_id}, aws_secret_access_key=(hidden)") self.client = boto3.client(service_name='s3', endpoint_url=endpoint_url, aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key) - self.bucket_exists_or_exit(current_app.config['S3_IMPORT_BUCKET']) - self.bucket_exists_or_exit(current_app.config['S3_EXPORT_BUCKET']) + self.bucket_exists_or_exit(current_app.config['S3_BUCKET']) def upload_file(self, filename, path, bucket) -> bool: """ diff --git a/dbrepo-data-service/rest-service/src/main/java/at/tuwien/handlers/ApiExceptionHandler.java b/dbrepo-data-service/rest-service/src/main/java/at/tuwien/handlers/ApiExceptionHandler.java new file mode 100644 index 0000000000000000000000000000000000000000..cbbb4c76b0c12e7846151746093728212a4d3b8d --- /dev/null +++ b/dbrepo-data-service/rest-service/src/main/java/at/tuwien/handlers/ApiExceptionHandler.java @@ -0,0 +1,487 @@ +package at.tuwien.handlers; + +import at.tuwien.api.error.ApiErrorDto; +import at.tuwien.exception.*; +import io.swagger.v3.oas.annotations.Hidden; +import lombok.extern.log4j.Log4j2; +import org.springframework.http.HttpHeaders; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.security.access.AccessDeniedException; +import org.springframework.web.bind.annotation.ControllerAdvice; +import org.springframework.web.bind.annotation.ExceptionHandler; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.servlet.mvc.method.annotation.ResponseEntityExceptionHandler; + +@Log4j2 +@ControllerAdvice +public class ApiExceptionHandler extends ResponseEntityExceptionHandler { + + @Hidden + @ResponseStatus(code = HttpStatus.UNAUTHORIZED) + @ExceptionHandler(AccessDeniedException.class) + public ResponseEntity<ApiErrorDto> handle(AccessDeniedException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(AccessNotFoundException.class) + public ResponseEntity<ApiErrorDto> handle(AccessNotFoundException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.PRECONDITION_REQUIRED) + @ExceptionHandler(AccountNotSetupException.class) + public ResponseEntity<ApiErrorDto> handle(AccountNotSetupException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.BAD_GATEWAY) + @ExceptionHandler(AuthServiceConnectionException.class) + public ResponseEntity<ApiErrorDto> handle(AuthServiceConnectionException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.SERVICE_UNAVAILABLE) + @ExceptionHandler(AuthServiceException.class) + public ResponseEntity<ApiErrorDto> handle(AuthServiceException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.BAD_GATEWAY) + @ExceptionHandler(BrokerServiceConnectionException.class) + public ResponseEntity<ApiErrorDto> handle(BrokerServiceConnectionException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.SERVICE_UNAVAILABLE) + @ExceptionHandler(BrokerServiceException.class) + public ResponseEntity<ApiErrorDto> handle(BrokerServiceException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(ConceptNotFoundException.class) + public ResponseEntity<ApiErrorDto> handle(ConceptNotFoundException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.CONFLICT) + @ExceptionHandler(ContainerAlreadyExistsException.class) + public ResponseEntity<ApiErrorDto> handle(ContainerAlreadyExistsException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(ContainerNotFoundException.class) + public ResponseEntity<ApiErrorDto> handle(ContainerNotFoundException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.FORBIDDEN) + @ExceptionHandler(CredentialsInvalidException.class) + public ResponseEntity<ApiErrorDto> handle(CredentialsInvalidException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.EXPECTATION_FAILED) + @ExceptionHandler(DatabaseMalformedException.class) + public ResponseEntity<ApiErrorDto> handle(DatabaseMalformedException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(DatabaseNotFoundException.class) + public ResponseEntity<ApiErrorDto> handle(DatabaseNotFoundException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.SERVICE_UNAVAILABLE) + @ExceptionHandler(DatabaseUnavailableException.class) + public ResponseEntity<ApiErrorDto> handle(DatabaseUnavailableException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(DoiNotFoundException.class) + public ResponseEntity<ApiErrorDto> handle(DoiNotFoundException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.EXPECTATION_FAILED) + @ExceptionHandler(EmailExistsException.class) + public ResponseEntity<ApiErrorDto> handle(EmailExistsException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(ExchangeNotFoundException.class) + public ResponseEntity<ApiErrorDto> handle(ExchangeNotFoundException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.SERVICE_UNAVAILABLE) + @ExceptionHandler(ExternalServiceException.class) + public ResponseEntity<ApiErrorDto> handle(ExternalServiceException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.BAD_REQUEST) + @ExceptionHandler(FilterBadRequestException.class) + public ResponseEntity<ApiErrorDto> handle(FilterBadRequestException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_ACCEPTABLE) + @ExceptionHandler(FormatNotAvailableException.class) + public ResponseEntity<ApiErrorDto> handle(FormatNotAvailableException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(IdentifierNotFoundException.class) + public ResponseEntity<ApiErrorDto> handle(IdentifierNotFoundException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(IdentifierNotSupportedException.class) + public ResponseEntity<ApiErrorDto> handle(IdentifierNotSupportedException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.CONFLICT) + @ExceptionHandler(ImageAlreadyExistsException.class) + public ResponseEntity<ApiErrorDto> handle(ImageAlreadyExistsException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.BAD_REQUEST) + @ExceptionHandler(ImageInvalidException.class) + public ResponseEntity<ApiErrorDto> handle(ImageInvalidException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(ImageNotFoundException.class) + public ResponseEntity<ApiErrorDto> handle(ImageNotFoundException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(LicenseNotFoundException.class) + public ResponseEntity<ApiErrorDto> handle(LicenseNotFoundException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.BAD_REQUEST) + @ExceptionHandler(MalformedException.class) + public ResponseEntity<ApiErrorDto> handle(MalformedException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(MessageNotFoundException.class) + public ResponseEntity<ApiErrorDto> handle(MessageNotFoundException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.BAD_GATEWAY) + @ExceptionHandler(MetadataServiceConnectionException.class) + public ResponseEntity<ApiErrorDto> handle(MetadataServiceConnectionException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.SERVICE_UNAVAILABLE) + @ExceptionHandler(MetadataServiceException.class) + public ResponseEntity<ApiErrorDto> handle(MetadataServiceException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.FORBIDDEN) + @ExceptionHandler(NotAllowedException.class) + public ResponseEntity<ApiErrorDto> handle(NotAllowedException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(OntologyNotFoundException.class) + public ResponseEntity<ApiErrorDto> handle(OntologyNotFoundException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(OrcidNotFoundException.class) + public ResponseEntity<ApiErrorDto> handle(OrcidNotFoundException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.BAD_REQUEST) + @ExceptionHandler(PaginationException.class) + public ResponseEntity<ApiErrorDto> handle(PaginationException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.BAD_REQUEST) + @ExceptionHandler(QueryMalformedException.class) + public ResponseEntity<ApiErrorDto> handle(QueryMalformedException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(QueryNotFoundException.class) + public ResponseEntity<ApiErrorDto> handle(QueryNotFoundException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_IMPLEMENTED) + @ExceptionHandler(QueryNotSupportedException.class) + public ResponseEntity<ApiErrorDto> handle(QueryNotSupportedException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(QueueNotFoundException.class) + public ResponseEntity<ApiErrorDto> handle(QueueNotFoundException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.EXPECTATION_FAILED) + @ExceptionHandler(QueryStoreCreateException.class) + public ResponseEntity<ApiErrorDto> handle(QueryStoreCreateException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.BAD_REQUEST) + @ExceptionHandler(QueryStoreGCException.class) + public ResponseEntity<ApiErrorDto> handle(QueryStoreGCException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.EXPECTATION_FAILED) + @ExceptionHandler(QueryStoreInsertException.class) + public ResponseEntity<ApiErrorDto> handle(QueryStoreInsertException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.EXPECTATION_FAILED) + @ExceptionHandler(QueryStorePersistException.class) + public ResponseEntity<ApiErrorDto> handle(QueryStorePersistException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.SERVICE_UNAVAILABLE) + @ExceptionHandler(RemoteUnavailableException.class) + public ResponseEntity<ApiErrorDto> handle(RemoteUnavailableException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(RorNotFoundException.class) + public ResponseEntity<ApiErrorDto> handle(RorNotFoundException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.BAD_GATEWAY) + @ExceptionHandler(SearchServiceConnectionException.class) + public ResponseEntity<ApiErrorDto> handle(SearchServiceConnectionException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.SERVICE_UNAVAILABLE) + @ExceptionHandler(SearchServiceException.class) + public ResponseEntity<ApiErrorDto> handle(SearchServiceException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(SemanticEntityNotFoundException.class) + public ResponseEntity<ApiErrorDto> handle(SemanticEntityNotFoundException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.BAD_GATEWAY) + @ExceptionHandler(DataServiceConnectionException.class) + public ResponseEntity<ApiErrorDto> handle(DataServiceConnectionException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.SERVICE_UNAVAILABLE) + @ExceptionHandler(DataServiceException.class) + public ResponseEntity<ApiErrorDto> handle(DataServiceException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.SERVICE_UNAVAILABLE) + @ExceptionHandler(SidecarExportException.class) + public ResponseEntity<ApiErrorDto> handle(SidecarExportException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.SERVICE_UNAVAILABLE) + @ExceptionHandler(SidecarImportException.class) + public ResponseEntity<ApiErrorDto> handle(SidecarImportException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.BAD_REQUEST) + @ExceptionHandler(SortException.class) + public ResponseEntity<ApiErrorDto> handle(SortException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(StorageNotFoundException.class) + public ResponseEntity<ApiErrorDto> handle(StorageNotFoundException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.SERVICE_UNAVAILABLE) + @ExceptionHandler(StorageUnavailableException.class) + public ResponseEntity<ApiErrorDto> handle(StorageUnavailableException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.CONFLICT) + @ExceptionHandler(TableExistsException.class) + public ResponseEntity<ApiErrorDto> handle(TableExistsException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.BAD_REQUEST) + @ExceptionHandler(TableMalformedException.class) + public ResponseEntity<ApiErrorDto> handle(TableMalformedException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.CONFLICT) + @ExceptionHandler(TableSchemaException.class) + public ResponseEntity<ApiErrorDto> handle(TableSchemaException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(TableNotFoundException.class) + public ResponseEntity<ApiErrorDto> handle(TableNotFoundException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(UnitNotFoundException.class) + public ResponseEntity<ApiErrorDto> handle(UnitNotFoundException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.EXPECTATION_FAILED) + @ExceptionHandler(UriMalformedException.class) + public ResponseEntity<ApiErrorDto> handle(UriMalformedException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.CONFLICT) + @ExceptionHandler(UserExistsException.class) + public ResponseEntity<ApiErrorDto> handle(UserExistsException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(UserNotFoundException.class) + public ResponseEntity<ApiErrorDto> handle(UserNotFoundException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.BAD_REQUEST) + @ExceptionHandler(ViewMalformedException.class) + public ResponseEntity<ApiErrorDto> handle(ViewMalformedException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.NOT_FOUND) + @ExceptionHandler(ViewNotFoundException.class) + public ResponseEntity<ApiErrorDto> handle(ViewNotFoundException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + @Hidden + @ResponseStatus(code = HttpStatus.CONFLICT) + @ExceptionHandler(ViewSchemaException.class) + public ResponseEntity<ApiErrorDto> handle(ViewSchemaException e) { + return generic_handle(e.getClass(), e.getLocalizedMessage()); + } + + private ResponseEntity<ApiErrorDto> generic_handle(Class<?> exceptionClass, String message) { + final HttpHeaders headers = new HttpHeaders(); + headers.set("Content-Type", "application/problem+json"); + final ResponseStatus annotation = exceptionClass.getAnnotation(ResponseStatus.class); + final ApiErrorDto response = ApiErrorDto.builder() + .status(annotation.code()) + .message(message) + .code(annotation.reason()) + .build(); + return new ResponseEntity<>(response, headers, response.getStatus()); + } + +} diff --git a/dbrepo-data-service/rest-service/src/main/resources/application-local.yml b/dbrepo-data-service/rest-service/src/main/resources/application-local.yml index fc5445ce78e9a1f29ae6fd0f3d2d918dcb30a277..43cea9b9a538c7331562e7a068444d72826235a4 100644 --- a/dbrepo-data-service/rest-service/src/main/resources/application-local.yml +++ b/dbrepo-data-service/rest-service/src/main/resources/application-local.yml @@ -55,8 +55,7 @@ dbrepo: s3: accessKeyId: seaweedfsadmin secretAccessKey: seaweedfsadmin - importBucket: dbrepo-upload - exportBucket: dbrepo-download + bucket: dbrepo system: username: admin password: admin diff --git a/dbrepo-data-service/rest-service/src/main/resources/application.yml b/dbrepo-data-service/rest-service/src/main/resources/application.yml index 2043395f307070c0e95b9d442d5baa9b7bb58951..c07689e9d47e70b39d00c1511e7c0dd996837bc7 100644 --- a/dbrepo-data-service/rest-service/src/main/resources/application.yml +++ b/dbrepo-data-service/rest-service/src/main/resources/application.yml @@ -56,8 +56,7 @@ dbrepo: s3: accessKeyId: "${S3_ACCESS_KEY_ID:seaweedfsadmin}" secretAccessKey: "${S3_SECRET_ACCESS_KEY:seaweedfsadmin}" - importBucket: "${S3_IMPORT_BUCKET:dbrepo-upload}" - exportBucket: "${S3_EXPORT_BUCKET:dbrepo-download}" + bucket: "${S3_BUCKET:dbrepo}" filePath: "${S3_FILE_PATH:/tmp}" system: username: "${SYSTEM_USERNAME:admin}" diff --git a/dbrepo-data-service/rest-service/src/test/java/at/tuwien/service/StorageServiceIntegrationTest.java b/dbrepo-data-service/rest-service/src/test/java/at/tuwien/service/StorageServiceIntegrationTest.java index 7b3cc4703748b070bdab83d2e46b9bbd804dc266..d3432ad78b3ec1587619ff4cdf7ca2a8427f887b 100644 --- a/dbrepo-data-service/rest-service/src/test/java/at/tuwien/service/StorageServiceIntegrationTest.java +++ b/dbrepo-data-service/rest-service/src/test/java/at/tuwien/service/StorageServiceIntegrationTest.java @@ -60,14 +60,14 @@ public class StorageServiceIntegrationTest extends AbstractUnitTest { public void beforeEach() throws SQLException { genesis(); /* s3 */ - if (s3Client.listBuckets().buckets().stream().noneMatch(b -> b.name().equals(s3Config.getS3ImportBucket()))) { + if (s3Client.listBuckets().buckets().stream().noneMatch(b -> b.name().equals(s3Config.getS3Bucket()))) { s3Client.createBucket(CreateBucketRequest.builder() - .bucket(s3Config.getS3ImportBucket()) + .bucket(s3Config.getS3Bucket()) .build()); } - if (s3Client.listBuckets().buckets().stream().noneMatch(b -> b.name().equals(s3Config.getS3ExportBucket()))) { + if (s3Client.listBuckets().buckets().stream().noneMatch(b -> b.name().equals(s3Config.getS3Bucket()))) { s3Client.createBucket(CreateBucketRequest.builder() - .bucket(s3Config.getS3ExportBucket()) + .bucket(s3Config.getS3Bucket()) .build()); } } @@ -78,11 +78,11 @@ public class StorageServiceIntegrationTest extends AbstractUnitTest { /* mock */ s3Client.putObject(PutObjectRequest.builder() .key("s3key") - .bucket(s3Config.getS3ImportBucket()) + .bucket(s3Config.getS3Bucket()) .build(), RequestBody.fromFile(new File("src/test/resources/csv/weather_aus.csv"))); /* test */ - final InputStream response = storageService.getObject(s3Config.getS3ImportBucket(), "s3key"); + final InputStream response = storageService.getObject(s3Config.getS3Bucket(), "s3key"); assertNotNull(response); } @@ -91,7 +91,7 @@ public class StorageServiceIntegrationTest extends AbstractUnitTest { /* test */ assertThrows(StorageNotFoundException.class, () -> { - storageService.getObject(s3Config.getS3ImportBucket(), "i_do_not_exist"); + storageService.getObject(s3Config.getS3Bucket(), "i_do_not_exist"); }); } @@ -110,11 +110,11 @@ public class StorageServiceIntegrationTest extends AbstractUnitTest { /* mock */ s3Client.putObject(PutObjectRequest.builder() .key("s3key") - .bucket(s3Config.getS3ImportBucket()) + .bucket(s3Config.getS3Bucket()) .build(), RequestBody.fromFile(new File("src/test/resources/csv/weather_aus.csv"))); /* test */ - final byte[] response = storageService.getBytes(s3Config.getS3ImportBucket(), "s3key"); + final byte[] response = storageService.getBytes(s3Config.getS3Bucket(), "s3key"); assertNotNull(response); } @@ -124,7 +124,7 @@ public class StorageServiceIntegrationTest extends AbstractUnitTest { /* mock */ s3Client.putObject(PutObjectRequest.builder() .key("s3key") - .bucket(s3Config.getS3ImportBucket()) + .bucket(s3Config.getS3Bucket()) .build(), RequestBody.fromFile(new File("src/test/resources/csv/weather_aus.csv"))); /* test */ @@ -137,7 +137,7 @@ public class StorageServiceIntegrationTest extends AbstractUnitTest { /* test */ assertThrows(StorageNotFoundException.class, () -> { - storageService.getBytes(s3Config.getS3ImportBucket(), "i_do_not_exist"); + storageService.getBytes(s3Config.getS3Bucket(), "i_do_not_exist"); }); } @@ -147,11 +147,11 @@ public class StorageServiceIntegrationTest extends AbstractUnitTest { /* mock */ s3Client.putObject(PutObjectRequest.builder() .key("s3key") - .bucket(s3Config.getS3ImportBucket()) + .bucket(s3Config.getS3Bucket()) .build(), RequestBody.fromFile(new File("src/test/resources/csv/weather_aus.csv"))); /* test */ - final ExportResourceDto response = storageService.getResource(s3Config.getS3ImportBucket(), "s3key"); + final ExportResourceDto response = storageService.getResource(s3Config.getS3Bucket(), "s3key"); assertEquals("s3key", response.getFilename()); assertNotNull(response.getResource()); } @@ -161,7 +161,7 @@ public class StorageServiceIntegrationTest extends AbstractUnitTest { /* test */ assertThrows(StorageNotFoundException.class, () -> { - storageService.getBytes(s3Config.getS3ImportBucket(), "i_do_not_exist"); + storageService.getBytes(s3Config.getS3Bucket(), "i_do_not_exist"); }); } diff --git a/dbrepo-data-service/services/src/main/java/at/tuwien/config/S3Config.java b/dbrepo-data-service/services/src/main/java/at/tuwien/config/S3Config.java index 8adaf38d19ba81ae61a54a57988dd1c08910a09b..10630c717fe153f230703d045b6a2bda63706b6a 100644 --- a/dbrepo-data-service/services/src/main/java/at/tuwien/config/S3Config.java +++ b/dbrepo-data-service/services/src/main/java/at/tuwien/config/S3Config.java @@ -27,11 +27,8 @@ public class S3Config { @Value("${dbrepo.s3.secretAccessKey}") private String s3SecretAccessKey; - @Value("${dbrepo.s3.importBucket}") - private String s3ImportBucket; - - @Value("${dbrepo.s3.exportBucket}") - private String s3ExportBucket; + @Value("${dbrepo.s3.bucket}") + private String s3Bucket; @Value("${dbrepo.s3.filePath}") private String s3FilePath; diff --git a/dbrepo-data-service/services/src/main/java/at/tuwien/service/impl/StorageServiceS3Impl.java b/dbrepo-data-service/services/src/main/java/at/tuwien/service/impl/StorageServiceS3Impl.java index c8b49fd4cb5b91637b14d2325806254bd1bda527..9a748d96dcfadcaa5c3b669cfb290b6803100db0 100644 --- a/dbrepo-data-service/services/src/main/java/at/tuwien/service/impl/StorageServiceS3Impl.java +++ b/dbrepo-data-service/services/src/main/java/at/tuwien/service/impl/StorageServiceS3Impl.java @@ -14,9 +14,6 @@ import software.amazon.awssdk.services.s3.model.*; import java.io.IOException; import java.io.InputStream; -import java.time.ZonedDateTime; -import java.util.LinkedList; -import java.util.List; @Log4j2 @Service @@ -50,7 +47,7 @@ public class StorageServiceS3Impl implements StorageService { @Override public byte[] getBytes(String key) throws StorageNotFoundException, StorageUnavailableException { - return getBytes(s3Config.getS3ImportBucket(), key); + return getBytes(s3Config.getS3Bucket(), key); } @Override @@ -66,7 +63,7 @@ public class StorageServiceS3Impl implements StorageService { @Override public ExportResourceDto getResource(String key) throws StorageNotFoundException, StorageUnavailableException { - return getResource(s3Config.getS3ExportBucket(), key); + return getResource(s3Config.getS3Bucket(), key); } @Override diff --git a/dbrepo-metadata-service/rest-service/src/main/resources/application-local.yml b/dbrepo-metadata-service/rest-service/src/main/resources/application-local.yml index 63675da5656eb62b90b79ad2c9f2c99895e7e7c3..e2c9df6a59d8454661ca42b72617f2b67d6c1492 100644 --- a/dbrepo-metadata-service/rest-service/src/main/resources/application-local.yml +++ b/dbrepo-metadata-service/rest-service/src/main/resources/application-local.yml @@ -58,8 +58,7 @@ dbrepo: s3: accessKeyId: seaweedfsadmin secretAccessKey: seaweedfsadmin - importBucket: dbrepo-upload - exportBucket: dbrepo-download + bucket: dbrepo system: username: admin password: admin @@ -70,6 +69,8 @@ dbrepo: brokerService: http://localhost/admin/broker authService: http://localhost/api/auth storageService: http://localhost/api/storage + rorService: https://api.ror.org + crossRefService: http://data.crossref.org pid: base: http://localhost/pid/ jwt: diff --git a/dbrepo-metadata-service/rest-service/src/main/resources/application.yml b/dbrepo-metadata-service/rest-service/src/main/resources/application.yml index 9398db2b546026d422f85037c2e2b7eb8d4773b8..6cd249d7bfe5fb1691a68cb110fc1234cd02048e 100644 --- a/dbrepo-metadata-service/rest-service/src/main/resources/application.yml +++ b/dbrepo-metadata-service/rest-service/src/main/resources/application.yml @@ -60,8 +60,7 @@ dbrepo: s3: accessKeyId: "${S3_ACCESS_KEY_ID:seaweedfsadmin}" secretAccessKey: "${S3_SECRET_ACCESS_KEY:seaweedfsadmin}" - importBucket: "${S3_IMPORT_BUCKET:dbrepo-upload}" - exportBucket: "${S3_EXPORT_BUCKET:dbrepo-download}" + bucket: "${S3_BUCKET:dbrepo}" system: username: "${SYSTEM_USERNAME:admin}" password: "${SYSTEM_PASSWORD:admin}" @@ -72,6 +71,8 @@ dbrepo: brokerService: "${BROKER_SERVICE_ENDPOINT:http://gateway-service/admin/broker}" authService: "${AUTH_SERVICE_ENDPOINT:http://gateway-service/api/auth}" storageService: "${S3_ENDPOINT:http://gateway-service/api/storage}" + rorService: "${ROR_ENDPOINT:https://api.ror.org}" + crossRefService: "${CROSSREF_ENDPOINT:http://data.crossref.org}" pid: base: "${PID_BASE:http://localhost/pid/}" jwt: diff --git a/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/gateway/KeycloakGatewayUnitTest.java b/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/gateway/KeycloakGatewayUnitTest.java index f60211d5902972641a8e2473fd5fd76b289ee571..3c2ef1340ef8381a46c7277213d72311b5b357d6 100644 --- a/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/gateway/KeycloakGatewayUnitTest.java +++ b/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/gateway/KeycloakGatewayUnitTest.java @@ -39,67 +39,12 @@ public class KeycloakGatewayUnitTest extends AbstractUnitTest { @Autowired private KeycloakGatewayImpl keycloakGateway; - @Test - public void obtainToken_succeeds() throws AuthServiceException, AuthServiceConnectionException, - CredentialsInvalidException { - - /* mock */ - when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) - .thenReturn(ResponseEntity.status(HttpStatus.OK) - .body(TOKEN_DTO)); - - /* test */ - keycloakGateway.obtainToken(); - } - - @Test - public void obtainToken_connection_fails() { - - /* mock */ - doThrow(HttpServerErrorException.BadGateway.class) - .when(keycloakRestTemplate) - .exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class)); - - /* test */ - assertThrows(AuthServiceConnectionException.class, () -> { - keycloakGateway.obtainToken(); - }); - } - - @Test - public void obtainToken_unauthorized_fails() { - - /* mock */ - doThrow(HttpClientErrorException.Unauthorized.class) - .when(keycloakRestTemplate) - .exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class)); - - /* test */ - assertThrows(CredentialsInvalidException.class, () -> { - keycloakGateway.obtainToken(); - }); - } - - @Test - public void obtainToken_unexpected_fails() { - - /* mock */ - doThrow(HttpClientErrorException.BadRequest.class) - .when(keycloakRestTemplate) - .exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class)); - - /* test */ - assertThrows(AuthServiceException.class, () -> { - keycloakGateway.obtainToken(); - }); - } - @Test public void createUser_succeeds() throws UserExistsException, EmailExistsException, AuthServiceException, - AuthServiceConnectionException, CredentialsInvalidException { + AuthServiceConnectionException { /* mock */ - when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) + when(restTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) .thenReturn(ResponseEntity.status(HttpStatus.OK) .body(TOKEN_DTO)); when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(Void.class))) @@ -114,7 +59,7 @@ public class KeycloakGatewayUnitTest extends AbstractUnitTest { public void createUser_fails() { /* mock */ - when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) + when(restTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) .thenReturn(ResponseEntity.status(HttpStatus.OK) .body(TOKEN_DTO)); when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(Void.class))) @@ -131,7 +76,7 @@ public class KeycloakGatewayUnitTest extends AbstractUnitTest { public void createUser_sameUsername_fails() { /* mock */ - when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) + when(restTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) .thenReturn(ResponseEntity.status(HttpStatus.OK) .body(TOKEN_DTO)); doThrow(HttpClientErrorException.Conflict.class) @@ -148,7 +93,7 @@ public class KeycloakGatewayUnitTest extends AbstractUnitTest { public void createUser_connection_fails() { /* mock */ - when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) + when(restTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) .thenReturn(ResponseEntity.status(HttpStatus.OK) .body(TOKEN_DTO)); doThrow(HttpServerErrorException.class) @@ -165,7 +110,7 @@ public class KeycloakGatewayUnitTest extends AbstractUnitTest { public void deleteUser_fails() { /* mock */ - when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) + when(restTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) .thenReturn(ResponseEntity.status(HttpStatus.OK) .body(TOKEN_DTO)); when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.DELETE), any(HttpEntity.class), eq(Void.class))) @@ -180,10 +125,10 @@ public class KeycloakGatewayUnitTest extends AbstractUnitTest { @Test public void deleteUser_succeeds() throws UserNotFoundException, AuthServiceException, - AuthServiceConnectionException, CredentialsInvalidException { + AuthServiceConnectionException { /* mock */ - when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) + when(restTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) .thenReturn(ResponseEntity.status(HttpStatus.OK) .body(TOKEN_DTO)); when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.DELETE), any(HttpEntity.class), eq(Void.class))) @@ -198,7 +143,7 @@ public class KeycloakGatewayUnitTest extends AbstractUnitTest { public void deleteUser_notFound_fails() { /* mock */ - when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) + when(restTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) .thenReturn(ResponseEntity.status(HttpStatus.OK) .body(TOKEN_DTO)); doThrow(HttpClientErrorException.NotFound.class) @@ -215,7 +160,7 @@ public class KeycloakGatewayUnitTest extends AbstractUnitTest { public void deleteUser_unexpected_fails() { /* mock */ - when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) + when(restTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) .thenReturn(ResponseEntity.status(HttpStatus.OK) .body(TOKEN_DTO)); doThrow(HttpClientErrorException.Conflict.class) @@ -232,7 +177,7 @@ public class KeycloakGatewayUnitTest extends AbstractUnitTest { public void deleteUser_connection_fails() { /* mock */ - when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) + when(restTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) .thenReturn(ResponseEntity.status(HttpStatus.OK) .body(TOKEN_DTO)); doThrow(HttpServerErrorException.class) @@ -246,11 +191,10 @@ public class KeycloakGatewayUnitTest extends AbstractUnitTest { } @Test - public void updateUserCredentials_succeeds() throws AuthServiceException, AuthServiceConnectionException, - CredentialsInvalidException { + public void updateUserCredentials_succeeds() throws AuthServiceException, AuthServiceConnectionException { /* mock */ - when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) + when(restTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) .thenReturn(ResponseEntity.status(HttpStatus.OK) .body(TOKEN_DTO)); when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.PUT), any(HttpEntity.class), eq(Void.class))) @@ -265,7 +209,7 @@ public class KeycloakGatewayUnitTest extends AbstractUnitTest { public void updateUserCredentials_fails() { /* mock */ - when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) + when(restTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) .thenReturn(ResponseEntity.status(HttpStatus.OK) .body(TOKEN_DTO)); when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.PUT), any(HttpEntity.class), eq(Void.class))) @@ -282,7 +226,7 @@ public class KeycloakGatewayUnitTest extends AbstractUnitTest { public void updateUserCredentials_connection_fails() { /* mock */ - when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) + when(restTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) .thenReturn(ResponseEntity.status(HttpStatus.OK) .body(TOKEN_DTO)); doThrow(HttpServerErrorException.class) @@ -299,7 +243,7 @@ public class KeycloakGatewayUnitTest extends AbstractUnitTest { public void updateUserCredentials_unexpected_fails() { /* mock */ - when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) + when(restTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) .thenReturn(ResponseEntity.status(HttpStatus.OK) .body(TOKEN_DTO)); doThrow(HttpClientErrorException.Conflict.class) @@ -316,7 +260,7 @@ public class KeycloakGatewayUnitTest extends AbstractUnitTest { public void findByUsername_notFound_fails() { /* mock */ - when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) + when(restTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) .thenReturn(ResponseEntity.status(HttpStatus.OK) .body(TOKEN_DTO)); when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.GET), any(HttpEntity.class), eq(UserDto[].class))) @@ -333,7 +277,7 @@ public class KeycloakGatewayUnitTest extends AbstractUnitTest { public void findByUsername_connection_fails() { /* mock */ - when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) + when(restTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) .thenReturn(ResponseEntity.status(HttpStatus.OK) .body(TOKEN_DTO)); doThrow(HttpServerErrorException.class) @@ -350,7 +294,7 @@ public class KeycloakGatewayUnitTest extends AbstractUnitTest { public void findByUsername_unexpected_fails() { /* mock */ - when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) + when(restTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) .thenReturn(ResponseEntity.status(HttpStatus.OK) .body(TOKEN_DTO)); doThrow(HttpClientErrorException.Conflict.class) @@ -364,11 +308,10 @@ public class KeycloakGatewayUnitTest extends AbstractUnitTest { } @Test - public void findById_succeeds() throws UserNotFoundException, AuthServiceException, AuthServiceConnectionException, - CredentialsInvalidException { + public void findById_succeeds() throws UserNotFoundException, AuthServiceException, AuthServiceConnectionException { /* mock */ - when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) + when(restTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) .thenReturn(ResponseEntity.status(HttpStatus.OK) .body(TOKEN_DTO)); when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.GET), any(HttpEntity.class), eq(UserDto.class))) @@ -384,7 +327,7 @@ public class KeycloakGatewayUnitTest extends AbstractUnitTest { public void findById_notFound_fails() { /* mock */ - when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) + when(restTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) .thenReturn(ResponseEntity.status(HttpStatus.OK) .body(TOKEN_DTO)); doThrow(HttpClientErrorException.NotFound.class) @@ -401,7 +344,7 @@ public class KeycloakGatewayUnitTest extends AbstractUnitTest { public void findById_connection_fails() { /* mock */ - when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) + when(restTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) .thenReturn(ResponseEntity.status(HttpStatus.OK) .body(TOKEN_DTO)); doThrow(HttpServerErrorException.class) @@ -418,7 +361,7 @@ public class KeycloakGatewayUnitTest extends AbstractUnitTest { public void findById_unexpected_fails() { /* mock */ - when(keycloakRestTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) + when(restTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) .thenReturn(ResponseEntity.status(HttpStatus.OK) .body(TOKEN_DTO)); doThrow(HttpClientErrorException.Conflict.class) @@ -501,8 +444,8 @@ public class KeycloakGatewayUnitTest extends AbstractUnitTest { } @Test - public void obtainUserToken_succeeds() throws AuthServiceConnectionException, CredentialsInvalidException, - AccountNotSetupException { + public void obtainUserToken_succeeds() throws AuthServiceConnectionException, + AccountNotSetupException, CredentialsInvalidException { /* mock */ when(restTemplate.exchange(anyString(), eq(HttpMethod.POST), any(HttpEntity.class), eq(TokenDto.class))) diff --git a/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/mvc/AuthenticationIntegrationTest.java b/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/mvc/AuthenticationPrivilegedIntegrationMvcTest.java similarity index 96% rename from dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/mvc/AuthenticationIntegrationTest.java rename to dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/mvc/AuthenticationPrivilegedIntegrationMvcTest.java index c1ca4ab2f2bb168a77051a269c6a3ef79fd9d63b..5b2e607ec1ba9c6fe33055d4533042da4e88b2f9 100644 --- a/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/mvc/AuthenticationIntegrationTest.java +++ b/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/mvc/AuthenticationPrivilegedIntegrationMvcTest.java @@ -40,7 +40,7 @@ import static org.springframework.test.web.servlet.result.MockMvcResultMatchers. @AutoConfigureMockMvc @Testcontainers @SpringBootTest -public class AuthenticationIntegrationTest extends AbstractUnitTest { +public class AuthenticationPrivilegedIntegrationMvcTest extends AbstractUnitTest { @Autowired private MockMvc mockMvc; @@ -64,7 +64,7 @@ public class AuthenticationIntegrationTest extends AbstractUnitTest { private DatabaseRepository databaseRepository; @Container - private static KeycloakContainer keycloakContainer = new KeycloakContainer("quay.io/keycloak/keycloak:21.0") + private static KeycloakContainer keycloakContainer = new KeycloakContainer("quay.io/keycloak/keycloak:24.0") .withImagePullPolicy(PullPolicy.alwaysPull()) .withAdminUsername("admin") .withAdminPassword("admin") diff --git a/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/service/AuthenticationServiceIntegrationTest.java b/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/service/AuthenticationServiceIntegrationTest.java index 8b7be04cb6b4c2b383e54c02c18966855eeeca25..334b9776d6b792d255c036707b63456dd8961a70 100644 --- a/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/service/AuthenticationServiceIntegrationTest.java +++ b/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/service/AuthenticationServiceIntegrationTest.java @@ -32,7 +32,7 @@ public class AuthenticationServiceIntegrationTest extends AbstractUnitTest { private KeycloakGateway keycloakGateway; @Container - private static KeycloakContainer keycloakContainer = new KeycloakContainer("quay.io/keycloak/keycloak:21.0") + private static KeycloakContainer keycloakContainer = new KeycloakContainer("quay.io/keycloak/keycloak:24.0") .withImagePullPolicy(PullPolicy.alwaysPull()) .withAdminUsername("admin") .withAdminPassword("admin") diff --git a/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/service/StorageServiceIntegrationTest.java b/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/service/StorageServiceIntegrationTest.java index aa3a460b999c4e1dbfe78d9e9b50f3d0053e1bf7..5ef5f9742bbfdcc8c5ab21360ca926fd61261244 100644 --- a/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/service/StorageServiceIntegrationTest.java +++ b/dbrepo-metadata-service/rest-service/src/test/java/at/tuwien/service/StorageServiceIntegrationTest.java @@ -56,14 +56,14 @@ public class StorageServiceIntegrationTest extends AbstractUnitTest { public void beforeEach() throws SQLException { genesis(); /* s3 */ - if (s3Client.listBuckets().buckets().stream().noneMatch(b -> b.name().equals(s3Config.getS3ImportBucket()))) { + if (s3Client.listBuckets().buckets().stream().noneMatch(b -> b.name().equals(s3Config.getS3Bucket()))) { s3Client.createBucket(CreateBucketRequest.builder() - .bucket(s3Config.getS3ImportBucket()) + .bucket(s3Config.getS3Bucket()) .build()); } - if (s3Client.listBuckets().buckets().stream().noneMatch(b -> b.name().equals(s3Config.getS3ExportBucket()))) { + if (s3Client.listBuckets().buckets().stream().noneMatch(b -> b.name().equals(s3Config.getS3Bucket()))) { s3Client.createBucket(CreateBucketRequest.builder() - .bucket(s3Config.getS3ExportBucket()) + .bucket(s3Config.getS3Bucket()) .build()); } } @@ -73,14 +73,14 @@ public class StorageServiceIntegrationTest extends AbstractUnitTest { final String key = "s3key"; /* mock */ - log.trace("mock object with key {} to bucket {}", key, s3Config.getS3ImportBucket()); + log.trace("mock object with key {} to bucket {}", key, s3Config.getS3Bucket()); s3Client.putObject(PutObjectRequest.builder() .key(key) - .bucket(s3Config.getS3ImportBucket()) + .bucket(s3Config.getS3Bucket()) .build(), RequestBody.fromFile(new File("src/test/resources/csv/keyboard.csv"))); /* test */ - final InputStream response = storageService.getObject(s3Config.getS3ImportBucket(), key); + final InputStream response = storageService.getObject(s3Config.getS3Bucket(), key); assertNotNull(response); } @@ -89,7 +89,7 @@ public class StorageServiceIntegrationTest extends AbstractUnitTest { /* test */ assertThrows(StorageNotFoundException.class, () -> { - storageService.getObject(s3Config.getS3ImportBucket(), "i_do_not_exist"); + storageService.getObject(s3Config.getS3Bucket(), "i_do_not_exist"); }); } @@ -107,10 +107,10 @@ public class StorageServiceIntegrationTest extends AbstractUnitTest { final String key = "s3key"; /* mock */ - log.trace("mock object with key {} to bucket {}", key, s3Config.getS3ImportBucket()); + log.trace("mock object with key {} to bucket {}", key, s3Config.getS3Bucket()); s3Client.putObject(PutObjectRequest.builder() .key(key) - .bucket(s3Config.getS3ImportBucket()) + .bucket(s3Config.getS3Bucket()) .build(), RequestBody.fromFile(new File("src/test/resources/csv/keyboard.csv"))); /* test */ diff --git a/dbrepo-metadata-service/services/src/main/java/at/tuwien/auth/InternalRequestInterceptor.java b/dbrepo-metadata-service/services/src/main/java/at/tuwien/auth/InternalRequestInterceptor.java index 835b7245d1ef2ca017375990dd77c20693f3ef6f..5a7288cf55ead160c3b57436742f93693723592f 100644 --- a/dbrepo-metadata-service/services/src/main/java/at/tuwien/auth/InternalRequestInterceptor.java +++ b/dbrepo-metadata-service/services/src/main/java/at/tuwien/auth/InternalRequestInterceptor.java @@ -38,6 +38,7 @@ public class InternalRequestInterceptor implements ClientHttpRequestInterceptor gatewayConfig.getSystemPassword()); headers.setBearerAuth(token.getAccessToken()); log.trace("set bearer token for internal user: {}", gatewayConfig.getSystemUsername()); + log.trace("set access token: {}", token.getAccessToken()); return execution.execute(request, body); } catch (AuthServiceConnectionException | CredentialsInvalidException | AccountNotSetupException e) { log.error("Failed to obtain token for internal user: {}", gatewayConfig.getSystemUsername()); diff --git a/dbrepo-metadata-service/services/src/main/java/at/tuwien/config/GatewayConfig.java b/dbrepo-metadata-service/services/src/main/java/at/tuwien/config/GatewayConfig.java index d7fc192bb6c795fb9425ff99c318f2027fe8ff8d..0bcace730e7753b1dd21d2e1d4a91f47bda6b9f3 100644 --- a/dbrepo-metadata-service/services/src/main/java/at/tuwien/config/GatewayConfig.java +++ b/dbrepo-metadata-service/services/src/main/java/at/tuwien/config/GatewayConfig.java @@ -27,6 +27,12 @@ public class GatewayConfig { @Value("${dbrepo.endpoints.searchService}") private String searchEndpoint; + @Value("${dbrepo.endpoints.rorService}") + private String rorEndpoint; + + @Value("${dbrepo.endpoints.crossRefService}") + private String crossRefEndpoint; + @Value("${spring.rabbitmq.username}") private String brokerUsername; diff --git a/dbrepo-metadata-service/services/src/main/java/at/tuwien/config/KeycloakConfig.java b/dbrepo-metadata-service/services/src/main/java/at/tuwien/config/KeycloakConfig.java index e422223e06489ff144b148b8210913c1bdff3423..a24bbf41b8168b6e5c9ffef70c1b7bbbd6f5c674 100644 --- a/dbrepo-metadata-service/services/src/main/java/at/tuwien/config/KeycloakConfig.java +++ b/dbrepo-metadata-service/services/src/main/java/at/tuwien/config/KeycloakConfig.java @@ -37,7 +37,7 @@ public class KeycloakConfig { final RestTemplate restTemplate = new RestTemplate(); restTemplate.setUriTemplateHandler(new DefaultUriBuilderFactory(keycloakEndpoint)); restTemplate.getInterceptors() - .add(new KeycloakInterceptor(keycloakUsername, keycloakPassword, keycloakEndpoint)); + .add(new KeycloakInterceptor(restTemplate(), keycloakUsername, keycloakPassword, keycloakEndpoint)); return restTemplate; } } diff --git a/dbrepo-metadata-service/services/src/main/java/at/tuwien/config/S3Config.java b/dbrepo-metadata-service/services/src/main/java/at/tuwien/config/S3Config.java index 763505b933dd62259b95745e2059dea0c3edc9c6..6c82c29e25711de3e3a47ebcc9f86a359d31f6be 100644 --- a/dbrepo-metadata-service/services/src/main/java/at/tuwien/config/S3Config.java +++ b/dbrepo-metadata-service/services/src/main/java/at/tuwien/config/S3Config.java @@ -27,11 +27,8 @@ public class S3Config { @Value("${dbrepo.s3.secretAccessKey}") private String s3SecretAccessKey; - @Value("${dbrepo.s3.importBucket}") - private String s3ImportBucket; - - @Value("${dbrepo.s3.exportBucket}") - private String s3ExportBucket; + @Value("${dbrepo.s3.bucket}") + private String s3Bucket; @Bean public S3Client s3client() { diff --git a/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/KeycloakGateway.java b/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/KeycloakGateway.java index b6ee01d4917d36cf205cad35e79350f66403d143..71e30fb8606afc9c1fcef0c6fea3518bd7143d60 100644 --- a/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/KeycloakGateway.java +++ b/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/KeycloakGateway.java @@ -23,14 +23,15 @@ public interface KeycloakGateway { * @throws UserExistsException The user already exists at the Authentication Service. * @throws EmailExistsException The user email already exists in the metadata database. */ - void createUser(UserCreateDto data) throws AuthServiceException, AuthServiceConnectionException, EmailExistsException, UserExistsException, CredentialsInvalidException; + void createUser(UserCreateDto data) throws AuthServiceException, AuthServiceConnectionException, + EmailExistsException, UserExistsException; /** * Deletes a user at the Authentication Service with given user id. * * @param id The user id. */ - void deleteUser(UUID id) throws AuthServiceException, AuthServiceConnectionException, UserNotFoundException, CredentialsInvalidException; + void deleteUser(UUID id) throws AuthServiceException, AuthServiceConnectionException, UserNotFoundException; /** * Update the credentials for a given user. @@ -38,7 +39,8 @@ public interface KeycloakGateway { * @param id The user id. * @param password The user credential. */ - void updateUserCredentials(UUID id, UserPasswordDto password) throws AuthServiceException, AuthServiceConnectionException, CredentialsInvalidException; + void updateUserCredentials(UUID id, UserPasswordDto password) throws AuthServiceException, + AuthServiceConnectionException; /** * Finds a user in the metadata database by given username. @@ -46,8 +48,8 @@ public interface KeycloakGateway { * @param username The user username. * @return The updated user. */ - UserDto findByUsername(String username) throws AuthServiceException, AuthServiceConnectionException, UserNotFoundException, CredentialsInvalidException; + UserDto findByUsername(String username) throws AuthServiceException, AuthServiceConnectionException, + UserNotFoundException; - UserDto findById(UUID id) throws AuthServiceException, AuthServiceConnectionException, - UserNotFoundException, CredentialsInvalidException; + UserDto findById(UUID id) throws AuthServiceException, AuthServiceConnectionException, UserNotFoundException; } diff --git a/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/ApiTemplateInterceptorImpl.java b/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/ApiTemplateInterceptorImpl.java index 27517359c2a3c25e3ee057b1890b82ac805ad863..8a2bf70637a25822c526da93ac89467660dd02e0 100644 --- a/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/ApiTemplateInterceptorImpl.java +++ b/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/ApiTemplateInterceptorImpl.java @@ -2,12 +2,14 @@ package at.tuwien.gateway.impl; import at.tuwien.gateway.ApiTemplateInterceptor; import org.springframework.http.HttpRequest; +import org.springframework.http.MediaType; import org.springframework.http.client.ClientHttpRequestExecution; import org.springframework.http.client.ClientHttpRequestInterceptor; import org.springframework.http.client.ClientHttpResponse; import org.springframework.stereotype.Service; import java.io.IOException; +import java.util.List; @Service public class ApiTemplateInterceptorImpl implements ApiTemplateInterceptor, ClientHttpRequestInterceptor { @@ -15,8 +17,8 @@ public class ApiTemplateInterceptorImpl implements ApiTemplateInterceptor, Clien @Override public ClientHttpResponse intercept(HttpRequest request, byte[] body, ClientHttpRequestExecution execution) throws IOException { - request.getHeaders().set("Content-Type", "application/json"); - request.getHeaders().set("Accept", "application/json"); + request.getHeaders().setAccept(List.of(MediaType.APPLICATION_JSON)); + request.getHeaders().setContentType(MediaType.APPLICATION_JSON); return execution.execute(request, body); } } diff --git a/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/BrokerServiceGatewayImpl.java b/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/BrokerServiceGatewayImpl.java index 0313ca26edc598812ffc9f176737de1e05f4c1ec..307c166febb3634a08991c7b5d571bcf7bf072e1 100644 --- a/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/BrokerServiceGatewayImpl.java +++ b/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/BrokerServiceGatewayImpl.java @@ -31,10 +31,11 @@ public class BrokerServiceGatewayImpl implements BrokerServiceGateway { @Override public void grantTopicPermission(String username, ExchangeUpdatePermissionsDto data) throws BrokerServiceConnectionException, BrokerServiceException { - final String url = "/api/topic-permissions/" + rabbitConfig.getVirtualHost() + "/" + username; + final String path = "/api/topic-permissions/" + rabbitConfig.getVirtualHost() + "/" + username; + log.trace("grant topic permission at endpoint {} with path {}", rabbitConfig.getBrokerEndpoint(), path); final ResponseEntity<Void> response; try { - response = restTemplate.exchange(url, HttpMethod.PUT, new HttpEntity<>(data), Void.class); + response = restTemplate.exchange(path, HttpMethod.PUT, new HttpEntity<>(data), Void.class); } catch (HttpServerErrorException e) { log.error("Failed to grant topic permissions: {}", e.getMessage()); throw new BrokerServiceConnectionException("Failed to grant topic permissions: " + e.getMessage()); @@ -51,10 +52,11 @@ public class BrokerServiceGatewayImpl implements BrokerServiceGateway { @Override public void grantVirtualHostPermission(String username, GrantVirtualHostPermissionsDto data) throws BrokerServiceConnectionException, BrokerServiceException { - final String url = "/api/permissions/" + rabbitConfig.getVirtualHost() + "/" + username; + final String path = "/api/permissions/" + rabbitConfig.getVirtualHost() + "/" + username; + log.trace("grant virtual host permission at endpoint {} with path {}", rabbitConfig.getBrokerEndpoint(), path); final ResponseEntity<Void> response; try { - response = restTemplate.exchange(url, HttpMethod.PUT, new HttpEntity<>(data), Void.class); + response = restTemplate.exchange(path, HttpMethod.PUT, new HttpEntity<>(data), Void.class); } catch (HttpServerErrorException e) { log.error("Failed to grant virtual host permissions: {}", e.getMessage()); throw new BrokerServiceConnectionException("Failed to grant virtual host permissions: " + e.getMessage()); @@ -71,10 +73,11 @@ public class BrokerServiceGatewayImpl implements BrokerServiceGateway { @Override public void grantExchangePermission(String username, GrantExchangePermissionsDto data) throws BrokerServiceConnectionException, BrokerServiceException { - final String url = "/api/topic-permissions/" + rabbitConfig.getVirtualHost() + "/" + username; + final String path = "/api/topic-permissions/" + rabbitConfig.getVirtualHost() + "/" + username; + log.trace("grant exchange permission at endpoint {} with path {}", rabbitConfig.getBrokerEndpoint(), path); final ResponseEntity<Void> response; try { - response = restTemplate.exchange(url, HttpMethod.PUT, new HttpEntity<>(data), Void.class); + response = restTemplate.exchange(path, HttpMethod.PUT, new HttpEntity<>(data), Void.class); } catch (HttpServerErrorException e) { log.error("Failed to grant exchange permissions: {}", e.getMessage()); throw new BrokerServiceConnectionException("Failed to grant exchange permissions: " + e.getMessage()); diff --git a/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/CrossrefGatewayImpl.java b/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/CrossrefGatewayImpl.java index 9b675cba348f7cc142958f9ee1cd93de198c2055..542d9c981d82f6b8a300808ed94ca40034c95851 100644 --- a/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/CrossrefGatewayImpl.java +++ b/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/CrossrefGatewayImpl.java @@ -1,6 +1,7 @@ package at.tuwien.gateway.impl; import at.tuwien.api.crossref.CrossrefDto; +import at.tuwien.config.GatewayConfig; import at.tuwien.exception.DoiNotFoundException; import at.tuwien.gateway.CrossrefGateway; import lombok.extern.log4j.Log4j2; @@ -19,24 +20,24 @@ import org.springframework.web.client.RestTemplate; public class CrossrefGatewayImpl implements CrossrefGateway { private final RestTemplate restTemplate; + private final GatewayConfig gatewayConfig; @Autowired - public CrossrefGatewayImpl(RestTemplate restTemplate) { + public CrossrefGatewayImpl(RestTemplate restTemplate, GatewayConfig gatewayConfig) { this.restTemplate = restTemplate; + this.gatewayConfig = gatewayConfig; } @Override public CrossrefDto findById(String id) throws DoiNotFoundException { - final HttpHeaders headers = new HttpHeaders(); - headers.set("Accept", "application/json"); - final String url = "http://data.crossref.org/fundingdata/funder/" + id; + final String path = "/fundingdata/funder/" + id; + log.trace("find crossref metadata by id from endpoint {} with path {}", gatewayConfig.getCrossRefEndpoint(), path); final ResponseEntity<CrossrefDto> response; try { - log.trace("find crossref doi from url {}", url); - response = restTemplate.exchange(url, HttpMethod.GET, new HttpEntity<>(null, headers), CrossrefDto.class); + response = restTemplate.exchange(gatewayConfig.getCrossRefEndpoint() + path, HttpMethod.GET, HttpEntity.EMPTY, CrossrefDto.class); } catch (HttpServerErrorException e) { - log.error("Failed to retrieve CrossRef metadata from URL {}: {}", url, e.getMessage()); - throw new DoiNotFoundException("Failed to retrieve CrossRef metadata from URL " + url + ": " + e.getMessage()); + log.error("Failed to retrieve crossref metadata: {}", e.getMessage()); + throw new DoiNotFoundException("Failed to retrieve crossref metadata: " + e.getMessage()); } return response.getBody(); } diff --git a/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/DataServiceGatewayImpl.java b/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/DataServiceGatewayImpl.java index 4a3f12f759d603a9c76ea37ae223a43e05cf897f..886911d9f4a770fe8c620d7cc882cd7b4da05c55 100644 --- a/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/DataServiceGatewayImpl.java +++ b/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/DataServiceGatewayImpl.java @@ -8,6 +8,7 @@ import at.tuwien.api.database.table.TableCreateDto; import at.tuwien.api.database.table.TableDto; import at.tuwien.api.database.table.TableStatisticDto; import at.tuwien.api.user.internal.UpdateUserPasswordDto; +import at.tuwien.config.GatewayConfig; import at.tuwien.exception.*; import at.tuwien.gateway.DataServiceGateway; import lombok.extern.log4j.Log4j2; @@ -27,18 +28,22 @@ import java.util.UUID; public class DataServiceGatewayImpl implements DataServiceGateway { private final RestTemplate restTemplate; + private final GatewayConfig gatewayConfig; - public DataServiceGatewayImpl(@Qualifier("dataServiceRestTemplate") RestTemplate restTemplate) { + public DataServiceGatewayImpl(@Qualifier("dataServiceRestTemplate") RestTemplate restTemplate, + GatewayConfig gatewayConfig) { this.restTemplate = restTemplate; + this.gatewayConfig = gatewayConfig; } @Override public void createAccess(Long databaseId, UUID userId, AccessTypeDto access) throws DataServiceConnectionException, DataServiceException, DatabaseNotFoundException { final ResponseEntity<Void> response; - final String url = "/api/database/" + databaseId + "/access/" + userId; + final String path = "/api/database/" + databaseId + "/access/" + userId; + log.trace("create access at endpoint {} with path {}", gatewayConfig.getDataEndpoint(), path); try { - response = restTemplate.exchange(url, HttpMethod.POST, + response = restTemplate.exchange(path, HttpMethod.POST, new HttpEntity<>(UpdateDatabaseAccessDto.builder().type(access).build()), Void.class); } catch (HttpServerErrorException e) { log.error("Failed to create access: {}", e.getMessage()); @@ -60,9 +65,10 @@ public class DataServiceGatewayImpl implements DataServiceGateway { public void updateAccess(Long databaseId, UUID userId, AccessTypeDto access) throws DataServiceConnectionException, DataServiceException, AccessNotFoundException { final ResponseEntity<Void> response; - final String url = "/api/database/" + databaseId + "/access/" + userId; + final String path = "/api/database/" + databaseId + "/access/" + userId; + log.trace("update access at endpoint {} with path {}", gatewayConfig.getDataEndpoint(), path); try { - response = restTemplate.exchange(url, HttpMethod.PUT, + response = restTemplate.exchange(path, HttpMethod.PUT, new HttpEntity<>(UpdateDatabaseAccessDto.builder().type(access).build()), Void.class); } catch (HttpServerErrorException e) { log.error("Failed to update access: {}", e.getMessage()); @@ -84,9 +90,10 @@ public class DataServiceGatewayImpl implements DataServiceGateway { public void deleteAccess(Long databaseId, UUID userId) throws DataServiceConnectionException, DataServiceException, AccessNotFoundException { final ResponseEntity<Void> response; - final String url = "/api/database/" + databaseId + "/access/" + userId; + final String path = "/api/database/" + databaseId + "/access/" + userId; + log.trace("delete access at endpoint {} with path {}", gatewayConfig.getDataEndpoint(), path); try { - response = restTemplate.exchange(url, HttpMethod.DELETE, HttpEntity.EMPTY, Void.class); + response = restTemplate.exchange(path, HttpMethod.DELETE, HttpEntity.EMPTY, Void.class); } catch (HttpServerErrorException e) { log.error("Failed to delete access: {}", e.getMessage()); throw new DataServiceConnectionException("Failed to delete access: " + e.getMessage(), e); @@ -107,9 +114,10 @@ public class DataServiceGatewayImpl implements DataServiceGateway { public DatabaseDto createDatabase(CreateDatabaseDto data) throws DataServiceConnectionException, DataServiceException, DatabaseNotFoundException { final ResponseEntity<DatabaseDto> response; - final String url = "/api/database"; + final String path = "/api/database"; + log.trace("create database at endpoint {} with path {}", gatewayConfig.getDataEndpoint(), path); try { - response = restTemplate.exchange(url, HttpMethod.POST, new HttpEntity<>(data), DatabaseDto.class); + response = restTemplate.exchange(path, HttpMethod.POST, new HttpEntity<>(data), DatabaseDto.class); } catch (HttpServerErrorException e) { log.error("Failed to create database: {}", e.getMessage()); throw new DataServiceConnectionException("Failed to create database: " + e.getMessage(), e); @@ -131,9 +139,10 @@ public class DataServiceGatewayImpl implements DataServiceGateway { public void updateDatabase(Long databaseId, UpdateUserPasswordDto data) throws DataServiceConnectionException, DataServiceException, DatabaseNotFoundException { final ResponseEntity<Void> response; - final String url = "/api/database/" + databaseId; + final String path = "/api/database/" + databaseId; + log.trace("update database at endpoint {} with path {}", gatewayConfig.getDataEndpoint(), path); try { - response = restTemplate.exchange(url, HttpMethod.PUT, new HttpEntity<>(data), Void.class); + response = restTemplate.exchange(path, HttpMethod.PUT, new HttpEntity<>(data), Void.class); } catch (HttpServerErrorException e) { log.error("Failed to update user password in database: {}", e.getMessage()); throw new DataServiceConnectionException("Failed to update user password in database: " + e.getMessage(), e); @@ -154,9 +163,10 @@ public class DataServiceGatewayImpl implements DataServiceGateway { public void createTable(Long databaseId, TableCreateDto data) throws DataServiceConnectionException, DataServiceException, DatabaseNotFoundException, TableExistsException { final ResponseEntity<Void> response; - final String url = "/api/database/" + databaseId + "/table"; + final String path = "/api/database/" + databaseId + "/table"; + log.trace("create table at endpoint {} with path {}", gatewayConfig.getDataEndpoint(), path); try { - response = restTemplate.exchange(url, HttpMethod.POST, new HttpEntity<>(data), Void.class); + response = restTemplate.exchange(path, HttpMethod.POST, new HttpEntity<>(data), Void.class); } catch (HttpServerErrorException e) { log.error("Failed to create table: {}", e.getMessage()); throw new DataServiceConnectionException("Failed to create table: " + e.getMessage(), e); @@ -180,9 +190,10 @@ public class DataServiceGatewayImpl implements DataServiceGateway { public void deleteTable(Long databaseId, Long tableId) throws DataServiceConnectionException, DataServiceException, TableNotFoundException { final ResponseEntity<Void> response; - final String url = "/api/database/" + databaseId + "/table/" + tableId; + final String path = "/api/database/" + databaseId + "/table/" + tableId; + log.trace("delete table at endpoint {} with path {}", gatewayConfig.getDataEndpoint(), path); try { - response = restTemplate.exchange(url, HttpMethod.DELETE, HttpEntity.EMPTY, Void.class); + response = restTemplate.exchange(path, HttpMethod.DELETE, HttpEntity.EMPTY, Void.class); } catch (HttpServerErrorException e) { log.error("Failed to delete table: {}", e.getMessage()); throw new DataServiceConnectionException("Failed to delete table: " + e.getMessage(), e); @@ -202,9 +213,10 @@ public class DataServiceGatewayImpl implements DataServiceGateway { @Override public ViewDto createView(Long databaseId, ViewCreateDto data) throws DataServiceConnectionException, DataServiceException { final ResponseEntity<ViewDto> response; - final String url = "/api/database/" + databaseId + "/view"; + final String path = "/api/database/" + databaseId + "/view"; + log.trace("delete table at endpoint {} with path {}", gatewayConfig.getDataEndpoint(), path); try { - response = restTemplate.exchange(url, HttpMethod.POST, new HttpEntity<>(data), ViewDto.class); + response = restTemplate.exchange(path, HttpMethod.POST, new HttpEntity<>(data), ViewDto.class); } catch (HttpServerErrorException e) { log.error("Failed to create view: {}", e.getMessage()); throw new DataServiceConnectionException("Failed to create view: " + e.getMessage(), e); @@ -227,9 +239,10 @@ public class DataServiceGatewayImpl implements DataServiceGateway { public void deleteView(Long databaseId, Long viewId) throws DataServiceConnectionException, DataServiceException, ViewNotFoundException { final ResponseEntity<Void> response; - final String url = "/api/database/" + databaseId + "/view/" + viewId; + final String path = "/api/database/" + databaseId + "/view/" + viewId; + log.trace("delete view at endpoint {} with path {}", gatewayConfig.getDataEndpoint(), path); try { - response = restTemplate.exchange(url, HttpMethod.DELETE, HttpEntity.EMPTY, Void.class); + response = restTemplate.exchange(path, HttpMethod.DELETE, HttpEntity.EMPTY, Void.class); } catch (HttpServerErrorException e) { log.error("Failed to delete view: {}", e.getMessage()); throw new DataServiceConnectionException("Failed to delete view: " + e.getMessage(), e); @@ -250,9 +263,9 @@ public class DataServiceGatewayImpl implements DataServiceGateway { public QueryDto findQuery(Long databaseId, Long queryId) throws DataServiceConnectionException, DataServiceException, QueryNotFoundException { final ResponseEntity<QueryDto> response; - final String url = "/api/database/" + databaseId + "/subset/" + queryId; + final String path = "/api/database/" + databaseId + "/subset/" + queryId; try { - response = restTemplate.exchange(url, HttpMethod.GET, HttpEntity.EMPTY, QueryDto.class); + response = restTemplate.exchange(path, HttpMethod.GET, HttpEntity.EMPTY, QueryDto.class); } catch (HttpServerErrorException e) { log.error("Failed to find query: {}", e.getMessage()); throw new DataServiceConnectionException("Failed to find query", e); @@ -277,9 +290,9 @@ public class DataServiceGatewayImpl implements DataServiceGateway { public ExportResourceDto exportQuery(Long databaseId, Long queryId) throws DataServiceConnectionException, DataServiceException, QueryNotFoundException { final ResponseEntity<ExportResourceDto> response; - final String url = "/api/database/" + databaseId + "/subset/" + queryId; + final String path = "/api/database/" + databaseId + "/subset/" + queryId; try { - response = restTemplate.exchange(url, HttpMethod.GET, HttpEntity.EMPTY, ExportResourceDto.class); + response = restTemplate.exchange(path, HttpMethod.GET, HttpEntity.EMPTY, ExportResourceDto.class); } catch (HttpServerErrorException e) { log.error("Failed to export query: {}", e.getMessage()); throw new DataServiceConnectionException("Failed to export query: " + e.getMessage(), e); @@ -301,9 +314,9 @@ public class DataServiceGatewayImpl implements DataServiceGateway { public List<TableDto> getTableSchemas(Long databaseId) throws DataServiceConnectionException, DataServiceException, TableNotFoundException { final ResponseEntity<TableDto[]> response; - final String url = "/api/database/" + databaseId + "/table"; + final String path = "/api/database/" + databaseId + "/table"; try { - response = restTemplate.exchange(url, HttpMethod.GET, HttpEntity.EMPTY, TableDto[].class); + response = restTemplate.exchange(path, HttpMethod.GET, HttpEntity.EMPTY, TableDto[].class); } catch (HttpServerErrorException e) { log.error("Failed to get table schemas: {}", e.getMessage()); throw new DataServiceConnectionException("Failed to get table schemas: " + e.getMessage(), e); @@ -331,9 +344,10 @@ public class DataServiceGatewayImpl implements DataServiceGateway { public List<ViewDto> getViewSchemas(Long databaseId) throws DataServiceConnectionException, DataServiceException, ViewNotFoundException { final ResponseEntity<ViewDto[]> response; - final String url = "/api/database/" + databaseId + "/view"; + final String path = "/api/database/" + databaseId + "/view"; + log.trace("get view schemas at endpoint {} with path {}", gatewayConfig.getDataEndpoint(), path); try { - response = restTemplate.exchange(url, HttpMethod.GET, HttpEntity.EMPTY, ViewDto[].class); + response = restTemplate.exchange(path, HttpMethod.GET, HttpEntity.EMPTY, ViewDto[].class); } catch (HttpServerErrorException e) { log.error("Failed to get view schemas: {}", e.getMessage()); throw new DataServiceConnectionException("Failed to get view schemas: " + e.getMessage(), e); @@ -360,9 +374,10 @@ public class DataServiceGatewayImpl implements DataServiceGateway { @Override public TableStatisticDto getTableStatistics(Long databaseId, Long tableId) throws DataServiceConnectionException, DataServiceException, TableNotFoundException { final ResponseEntity<TableStatisticDto> response; - final String url = "/api/database/" + databaseId + "/table/" + tableId + "/statistic"; + final String path = "/api/database/" + databaseId + "/table/" + tableId + "/statistic"; + log.trace("get table statistics at endpoint {} with path {}", gatewayConfig.getDataEndpoint(), path); try { - response = restTemplate.exchange(url, HttpMethod.GET, HttpEntity.EMPTY, TableStatisticDto.class); + response = restTemplate.exchange(path, HttpMethod.GET, HttpEntity.EMPTY, TableStatisticDto.class); } catch (HttpServerErrorException e) { log.error("Failed to analyse table statistic: {}", e.getMessage()); throw new DataServiceConnectionException("Failed to analyse table statistic: " + e.getMessage(), e); diff --git a/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/KeycloakGatewayImpl.java b/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/KeycloakGatewayImpl.java index d05243f9a5bf81a752c085184b754acae1405ad3..38045e0399f00e0f4427a26aa85b4fe1274eeaa1 100644 --- a/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/KeycloakGatewayImpl.java +++ b/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/KeycloakGatewayImpl.java @@ -16,6 +16,7 @@ import org.springframework.util.MultiValueMap; import org.springframework.web.client.HttpClientErrorException; import org.springframework.web.client.HttpServerErrorException; import org.springframework.web.client.RestTemplate; +import org.springframework.web.util.DefaultUriBuilderFactory; import java.util.UUID; @@ -31,39 +32,13 @@ public class KeycloakGatewayImpl implements KeycloakGateway { public KeycloakGatewayImpl(@Qualifier("restTemplate") RestTemplate restTemplate, @Qualifier("keycloakRestTemplate") RestTemplate keycloakRestTemplate, KeycloakConfig keycloakConfig, MetadataMapper metadataMapper) { + restTemplate.setUriTemplateHandler(new DefaultUriBuilderFactory(keycloakConfig.getKeycloakEndpoint())); this.restTemplate = restTemplate; this.keycloakRestTemplate = keycloakRestTemplate; this.keycloakConfig = keycloakConfig; this.metadataMapper = metadataMapper; } - public TokenDto obtainToken() throws AuthServiceConnectionException, AuthServiceException, - CredentialsInvalidException { - final HttpHeaders headers = new HttpHeaders(); - headers.setContentType(MediaType.APPLICATION_FORM_URLENCODED); - final MultiValueMap<String, String> payload = new LinkedMultiValueMap<>(); - payload.add("username", keycloakConfig.getKeycloakUsername()); - payload.add("password", keycloakConfig.getKeycloakPassword()); - payload.add("grant_type", "password"); - payload.add("client_id", "admin-cli"); - final String url = keycloakConfig.getKeycloakEndpoint() + "/realms/master/protocol/openid-connect/token"; - log.trace("request admin token from url: {}", url); - final ResponseEntity<TokenDto> response; - try { - response = keycloakRestTemplate.exchange(url, HttpMethod.POST, new HttpEntity<>(payload, headers), TokenDto.class); - } catch (HttpServerErrorException e) { - log.error("Failed to obtain admin token: {}", e.getMessage()); - throw new AuthServiceConnectionException("Service unavailable", e); - } catch (HttpClientErrorException.Unauthorized e) { - log.error("Failed to obtain admin token: invalid credentials: {}", e.getMessage(), e); - throw new CredentialsInvalidException("Invalid credentials: " + e.getMessage(), e); - } catch (HttpClientErrorException.BadRequest e) { - log.error("Failed to obtain admin token: unexpected response: {}", e.getMessage(), e); - throw new AuthServiceException("Unexpected response: " + e.getMessage(), e); - } - return response.getBody(); - } - @Override public TokenDto obtainUserToken(String username, String password) throws AuthServiceConnectionException, CredentialsInvalidException, AccountNotSetupException { @@ -76,11 +51,11 @@ public class KeycloakGatewayImpl implements KeycloakGateway { payload.add("scope", "openid roles"); payload.add("client_id", keycloakConfig.getKeycloakClient()); payload.add("client_secret", keycloakConfig.getKeycloakClientSecret()); - final String url = keycloakConfig.getKeycloakEndpoint() + "/realms/dbrepo/protocol/openid-connect/token"; - log.trace("request admin token from url: {}", url); + final String path = "/realms/dbrepo/protocol/openid-connect/token"; + log.trace("obtain user token at endpoint {} with path {}", keycloakConfig.getKeycloakEndpoint(), path); final ResponseEntity<TokenDto> response; try { - response = restTemplate.exchange(url, HttpMethod.POST, new HttpEntity<>(payload, headers), TokenDto.class); + response = restTemplate.exchange(path, HttpMethod.POST, new HttpEntity<>(payload, headers), TokenDto.class); } catch (HttpServerErrorException e) { log.error("Failed to obtain user token: {}", e.getMessage()); throw new AuthServiceConnectionException("Service unavailable", e); @@ -109,11 +84,11 @@ public class KeycloakGatewayImpl implements KeycloakGateway { payload.add("grant_type", "refresh_token"); payload.add("client_id", keycloakConfig.getKeycloakClient()); payload.add("client_secret", keycloakConfig.getKeycloakClientSecret()); - final String url = keycloakConfig.getKeycloakEndpoint() + "/realms/dbrepo/protocol/openid-connect/token"; - log.trace("request user token from url: {}", url); + final String path = "/realms/dbrepo/protocol/openid-connect/token"; + log.trace("refresh user token at endpoint {} with path {}", keycloakConfig.getKeycloakEndpoint(), path); final ResponseEntity<TokenDto> response; try { - response = restTemplate.exchange(url, HttpMethod.POST, new HttpEntity<>(payload, headers), TokenDto.class); + response = restTemplate.exchange(path, HttpMethod.POST, new HttpEntity<>(payload, headers), TokenDto.class); } catch (HttpServerErrorException e) { log.error("Failed to refresh user token: {}", e.getMessage()); throw new AuthServiceConnectionException("Service unavailable", e); @@ -133,15 +108,12 @@ public class KeycloakGatewayImpl implements KeycloakGateway { @Override public void createUser(UserCreateDto data) throws AuthServiceException, AuthServiceConnectionException, - EmailExistsException, UserExistsException, CredentialsInvalidException { - /* obtain admin token */ - final HttpHeaders headers = new HttpHeaders(); - headers.set("Authorization", "Bearer " + obtainToken().getAccessToken()); - final String url = keycloakConfig.getKeycloakEndpoint() + "/admin/realms/dbrepo/users"; - log.debug("create user at url {}", url); + EmailExistsException, UserExistsException { + final String path = "/admin/realms/dbrepo/users"; + log.trace("create user at endpoint {} with path {}", keycloakConfig.getKeycloakEndpoint(), path); final ResponseEntity<Void> response; try { - response = keycloakRestTemplate.exchange(url, HttpMethod.POST, new HttpEntity<>(data, headers), Void.class); + response = keycloakRestTemplate.exchange(path, HttpMethod.POST, new HttpEntity<>(data), Void.class); } catch (HttpServerErrorException e) { log.error("Failed to create user: {}", e.getMessage()); throw new AuthServiceConnectionException("Service unavailable", e); @@ -164,16 +136,12 @@ public class KeycloakGatewayImpl implements KeycloakGateway { } @Override - public void deleteUser(UUID id) throws AuthServiceException, AuthServiceConnectionException, UserNotFoundException, - CredentialsInvalidException { - /* obtain admin token */ - final HttpHeaders headers = new HttpHeaders(); - headers.set("Authorization", "Bearer " + obtainToken().getAccessToken()); - final String url = keycloakConfig.getKeycloakEndpoint() + "/admin/realms/dbrepo/users/" + id; - log.debug("delete user at url {}", url); + public void deleteUser(UUID id) throws AuthServiceException, AuthServiceConnectionException, UserNotFoundException { + final String path = "/admin/realms/dbrepo/users/" + id; + log.trace("delete user at endpoint {} with path {}", keycloakConfig.getKeycloakEndpoint(), path); final ResponseEntity<Void> response; try { - response = keycloakRestTemplate.exchange(url, HttpMethod.DELETE, new HttpEntity<>(null, headers), Void.class); + response = keycloakRestTemplate.exchange(path, HttpMethod.DELETE, HttpEntity.EMPTY, Void.class); } catch (HttpServerErrorException e) { log.error("Failed to delete user: {}", e.getMessage()); throw new AuthServiceConnectionException("Service unavailable", e); @@ -193,16 +161,13 @@ public class KeycloakGatewayImpl implements KeycloakGateway { @Override public void updateUserCredentials(UUID id, UserPasswordDto data) throws AuthServiceException, - AuthServiceConnectionException, CredentialsInvalidException { - /* obtain admin token */ - final HttpHeaders headers = new HttpHeaders(); - headers.set("Authorization", "Bearer " + obtainToken().getAccessToken()); + AuthServiceConnectionException { final UpdateCredentialsDto payload = metadataMapper.passwordToUpdateCredentialsDto(data.getPassword()); - final String url = keycloakConfig.getKeycloakEndpoint() + "/admin/realms/dbrepo/users/" + id; - log.debug("update user credentials at url {}", url); + final String path = "/admin/realms/dbrepo/users/" + id; + log.trace("update user credentials at endpoint {} with path {}", keycloakConfig.getKeycloakEndpoint(), path); final ResponseEntity<Void> response; try { - response = keycloakRestTemplate.exchange(url, HttpMethod.PUT, new HttpEntity<>(payload, headers), Void.class); + response = keycloakRestTemplate.exchange(path, HttpMethod.PUT, new HttpEntity<>(payload), Void.class); } catch (HttpServerErrorException e) { log.error("Failed to update user credentials: {}", e.getMessage()); throw new AuthServiceConnectionException("Service unavailable", e); @@ -219,15 +184,12 @@ public class KeycloakGatewayImpl implements KeycloakGateway { @Override public UserDto findByUsername(String username) throws AuthServiceException, AuthServiceConnectionException, - UserNotFoundException, CredentialsInvalidException { - /* obtain admin token */ - final HttpHeaders headers = new HttpHeaders(); - headers.set("Authorization", "Bearer " + obtainToken().getAccessToken()); - final String url = keycloakConfig.getKeycloakEndpoint() + "/admin/realms/dbrepo/users/?username=" + username; - log.debug("find user from url {}", url); + UserNotFoundException { + final String path = "/admin/realms/dbrepo/users/?username=" + username; + log.trace("find user by username at endpoint {} with path {}", keycloakConfig.getKeycloakEndpoint(), path); final ResponseEntity<UserDto[]> response; try { - response = keycloakRestTemplate.exchange(url, HttpMethod.GET, new HttpEntity<>(null, headers), UserDto[].class); + response = keycloakRestTemplate.exchange(path, HttpMethod.GET, HttpEntity.EMPTY, UserDto[].class); } catch (HttpServerErrorException e) { log.error("Failed to find user: {}", e.getMessage()); throw new AuthServiceConnectionException("Service unavailable", e); @@ -245,15 +207,12 @@ public class KeycloakGatewayImpl implements KeycloakGateway { @Override public UserDto findById(UUID id) throws AuthServiceException, AuthServiceConnectionException, - UserNotFoundException, CredentialsInvalidException { - /* obtain admin token */ - final HttpHeaders headers = new HttpHeaders(); - headers.set("Authorization", "Bearer " + obtainToken().getAccessToken()); - final String url = keycloakConfig.getKeycloakEndpoint() + "/admin/realms/dbrepo/users/" + id; - log.debug("find user from url {}", url); + UserNotFoundException { + final String path = "/admin/realms/dbrepo/users/" + id; + log.trace("find user by id at endpoint {} with path {}", keycloakConfig.getKeycloakEndpoint(), path); final ResponseEntity<UserDto> response; try { - response = keycloakRestTemplate.exchange(url, HttpMethod.GET, new HttpEntity<>(null, headers), UserDto.class); + response = keycloakRestTemplate.exchange(path, HttpMethod.GET, HttpEntity.EMPTY, UserDto.class); } catch (HttpServerErrorException e) { log.error("Failed to find user: {}", e.getMessage()); throw new AuthServiceConnectionException("Service unavailable", e); diff --git a/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/OrcidGatewayImpl.java b/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/OrcidGatewayImpl.java index 15b73eb1931a2397d4dba3550479a839b23ec834..7634e421fe2cda96ebaaa13d029282d682c5d240 100644 --- a/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/OrcidGatewayImpl.java +++ b/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/OrcidGatewayImpl.java @@ -27,15 +27,13 @@ public class OrcidGatewayImpl implements OrcidGateway { @Override public OrcidDto findByUrl(String url) throws OrcidNotFoundException { - final HttpHeaders headers = new HttpHeaders(); - headers.set("Accept", "application/json"); + log.trace("find orcid by url at endpoint {}", url); final ResponseEntity<OrcidDto> response; try { - log.debug("find orcid from url {}", url); - response = restTemplate.exchange(url, HttpMethod.GET, new HttpEntity<>(null, headers), OrcidDto.class); + response = restTemplate.exchange(url, HttpMethod.GET, HttpEntity.EMPTY, OrcidDto.class); } catch (HttpServerErrorException e) { - log.error("Failed to retrieve ORCID metadata from URL {}: {}", url, e.getMessage()); - throw new OrcidNotFoundException("Failed to retrieve ORCID metadata from URL " + url + ": " + e.getMessage()); + log.error("Failed to retrieve orcid metadata: {}", e.getMessage()); + throw new OrcidNotFoundException("Failed to retrieve orcid metadata: " + e.getMessage()); } return response.getBody(); } diff --git a/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/RorGatewayImpl.java b/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/RorGatewayImpl.java index 7a5a64e8e24faa22f0051cdef8b94ded85ab5da3..37ed982f9e4489431e9e86b7fb5c5ec61fed57ec 100644 --- a/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/RorGatewayImpl.java +++ b/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/RorGatewayImpl.java @@ -1,6 +1,7 @@ package at.tuwien.gateway.impl; import at.tuwien.api.ror.RorDto; +import at.tuwien.config.GatewayConfig; import at.tuwien.exception.RorNotFoundException; import at.tuwien.gateway.RorGateway; import lombok.extern.log4j.Log4j2; @@ -19,24 +20,24 @@ import org.springframework.web.client.RestTemplate; public class RorGatewayImpl implements RorGateway { private final RestTemplate restTemplate; + private final GatewayConfig gatewayConfig; @Autowired - public RorGatewayImpl(RestTemplate restTemplate) { + public RorGatewayImpl(RestTemplate restTemplate, GatewayConfig gatewayConfig) { this.restTemplate = restTemplate; + this.gatewayConfig = gatewayConfig; } @Override public RorDto findById(String id) throws RorNotFoundException { - final HttpHeaders headers = new HttpHeaders(); - headers.set("Accept", "application/json"); - final String url = "https://api.ror.org/organizations/" + id; + final String path = "/organizations/" + id; + log.trace("find ror by id at endpoint {} with path {}", gatewayConfig.getRorEndpoint(), path); final ResponseEntity<RorDto> response; try { - log.trace("find ror from url {}", url); - response = restTemplate.exchange(url, HttpMethod.GET, new HttpEntity<>(null, headers), RorDto.class); + response = restTemplate.exchange(gatewayConfig.getRorEndpoint() + path, HttpMethod.GET, HttpEntity.EMPTY, RorDto.class); } catch (HttpServerErrorException e) { - log.error("Failed to retrieve ROR metadata from URL {}: {}", url, e.getMessage()); - throw new RorNotFoundException("Failed to retrieve ROR metadata from URL " + url + ": " + e.getMessage(), e); + log.error("Failed to retrieve ror metadata: {}", e.getMessage()); + throw new RorNotFoundException("Failed to retrieve ror metadata: " + e.getMessage(), e); } return response.getBody(); } diff --git a/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/SearchServiceGatewayImpl.java b/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/SearchServiceGatewayImpl.java index deba8360f21d2ac982c00dd959617f4c49f7059e..d97483beb1cac9e590b43a2cdf409067f8eb4d74 100644 --- a/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/SearchServiceGatewayImpl.java +++ b/dbrepo-metadata-service/services/src/main/java/at/tuwien/gateway/impl/SearchServiceGatewayImpl.java @@ -1,6 +1,7 @@ package at.tuwien.gateway.impl; import at.tuwien.api.database.DatabaseDto; +import at.tuwien.config.GatewayConfig; import at.tuwien.entities.database.Database; import at.tuwien.exception.*; import at.tuwien.gateway.SearchServiceGateway; @@ -20,12 +21,14 @@ import org.springframework.web.client.RestTemplate; public class SearchServiceGatewayImpl implements SearchServiceGateway { private final RestTemplate restTemplate; + private final GatewayConfig gatewayConfig; private final MetadataMapper metadataMapper; @Autowired public SearchServiceGatewayImpl(@Qualifier("searchServiceRestTemplate") RestTemplate restTemplate, - MetadataMapper metadataMapper) { + GatewayConfig gatewayConfig, MetadataMapper metadataMapper) { this.restTemplate = restTemplate; + this.gatewayConfig = gatewayConfig; this.metadataMapper = metadataMapper; } @@ -35,9 +38,10 @@ public class SearchServiceGatewayImpl implements SearchServiceGateway { final HttpHeaders headers = new HttpHeaders(); headers.set("Accept", "application/json"); headers.set("Content-Type", "application/json"); - final String url = "/api/search/database/" + database.getId(); + final String path = "/api/search/database/" + database.getId(); + log.trace("update database at endpoint {} with path {}", gatewayConfig.getSearchEndpoint(), path); try { - response = restTemplate.exchange(url, HttpMethod.PUT, new HttpEntity<>( + response = restTemplate.exchange(path, HttpMethod.PUT, new HttpEntity<>( metadataMapper.customDatabaseToDatabaseDto(database), headers), DatabaseDto.class); } catch (ResourceAccessException | HttpServerErrorException.ServiceUnavailable | HttpServerErrorException.InternalServerError e) { @@ -60,9 +64,10 @@ public class SearchServiceGatewayImpl implements SearchServiceGateway { @Override public void delete(Long databaseId) throws SearchServiceConnectionException, SearchServiceException, DatabaseNotFoundException { final ResponseEntity<Void> response; - final String url = "/api/search/database/" + databaseId; + final String path = "/api/search/database/" + databaseId; + log.trace("delete database at endpoint {} with path {}", gatewayConfig.getSearchEndpoint(), path); try { - response = restTemplate.exchange(url, HttpMethod.DELETE, new HttpEntity<>(null), Void.class); + response = restTemplate.exchange(path, HttpMethod.DELETE, new HttpEntity<>(null), Void.class); } catch (ResourceAccessException | HttpServerErrorException.ServiceUnavailable | HttpServerErrorException.InternalServerError e) { log.error("Failed to delete database: {}", e.getMessage()); diff --git a/dbrepo-metadata-service/services/src/main/java/at/tuwien/interceptor/KeycloakInterceptor.java b/dbrepo-metadata-service/services/src/main/java/at/tuwien/interceptor/KeycloakInterceptor.java index 78fb5adc61fd2420cfc62e72cb4aa4c700c3b82b..f73f8bd0dad15596ffecea3cbe9f4a411efa9b35 100644 --- a/dbrepo-metadata-service/services/src/main/java/at/tuwien/interceptor/KeycloakInterceptor.java +++ b/dbrepo-metadata-service/services/src/main/java/at/tuwien/interceptor/KeycloakInterceptor.java @@ -11,6 +11,7 @@ import org.springframework.util.MultiValueMap; import org.springframework.web.client.HttpServerErrorException; import org.springframework.web.client.ResourceAccessException; import org.springframework.web.client.RestTemplate; +import org.springframework.web.util.DefaultUriBuilderFactory; import java.io.IOException; @@ -20,17 +21,20 @@ public class KeycloakInterceptor implements ClientHttpRequestInterceptor { private final String adminUsername; private final String adminPassword; private final String keycloakEndpoint; + private final RestTemplate restTemplate; - public KeycloakInterceptor(String adminUsername, String adminPassword, String keycloakEndpoint) { + public KeycloakInterceptor(RestTemplate restTemplate, String adminUsername, String adminPassword, + String keycloakEndpoint) { this.adminUsername = adminUsername; this.adminPassword = adminPassword; this.keycloakEndpoint = keycloakEndpoint; + this.restTemplate = restTemplate; } @Override public ClientHttpResponse intercept(HttpRequest request, byte[] body, ClientHttpRequestExecution execution) throws IOException { - final RestTemplate restTemplate = new RestTemplate(); + restTemplate.setUriTemplateHandler(new DefaultUriBuilderFactory(keycloakEndpoint)); final HttpHeaders headers = new HttpHeaders(); headers.setContentType(MediaType.APPLICATION_FORM_URLENCODED); final MultiValueMap<String, String> payload = new LinkedMultiValueMap<>(); @@ -38,10 +42,11 @@ public class KeycloakInterceptor implements ClientHttpRequestInterceptor { payload.add("password", adminPassword); payload.add("grant_type", "password"); payload.add("client_id", "admin-cli"); + final String path = "/realms/master/protocol/openid-connect/token"; + log.trace("obtain admin token at endpoint {} with path {}", keycloakEndpoint, path); final ResponseEntity<TokenDto> response; try { - response = restTemplate.exchange(keycloakEndpoint + "/realms/master/protocol/openid-connect/token", - HttpMethod.POST, new HttpEntity<>(payload, headers), TokenDto.class); + response = restTemplate.exchange(path, HttpMethod.POST, new HttpEntity<>(payload, headers), TokenDto.class); } catch (ResourceAccessException | HttpServerErrorException.ServiceUnavailable e) { log.error("Failed to obtain admin token: {}", e.getMessage()); return execution.execute(request, body); diff --git a/dbrepo-metadata-service/services/src/main/java/at/tuwien/service/impl/StorageServiceS3Impl.java b/dbrepo-metadata-service/services/src/main/java/at/tuwien/service/impl/StorageServiceS3Impl.java index 9ad86b7f905c75a174d5cf031c2f666f460e16d8..aef3213ccf2cac81ef70ad4a053c803cde16002d 100644 --- a/dbrepo-metadata-service/services/src/main/java/at/tuwien/service/impl/StorageServiceS3Impl.java +++ b/dbrepo-metadata-service/services/src/main/java/at/tuwien/service/impl/StorageServiceS3Impl.java @@ -46,8 +46,8 @@ public class StorageServiceS3Impl implements StorageService { @Override public byte[] getBytes(String key) throws StorageNotFoundException, StorageUnavailableException { - log.trace("get bytes with key {} from bucket {}", key, s3Config.getS3ImportBucket()); - return getBytes(s3Config.getS3ImportBucket(), key); + log.trace("get bytes with key {} from bucket {}", key, s3Config.getS3Bucket()); + return getBytes(s3Config.getS3Bucket(), key); } @Override diff --git a/dbrepo-storage-service/init/create-buckets.sh b/dbrepo-storage-service/init/create-buckets.sh index 28fdb5b5b9f79427b650203989a84c5f06c71210..316a4cf11ee824101e7c23d688387dabf143bff7 100644 --- a/dbrepo-storage-service/init/create-buckets.sh +++ b/dbrepo-storage-service/init/create-buckets.sh @@ -1,11 +1,11 @@ #!/bin/bash +S3_BUCKET=${S3_BUCKET:-dbrepo} + function log { echo "$(date '+%Y-%m-%d %H:%M:%S') $1" } -log "SeaweedFS master is set to ${SEAWEEDFS_ENDPOINT}" -log "Starting to create buckets dbrepo-upload, dbrepo-download" -echo "s3.bucket.create -name dbrepo-upload" | weed shell -master="${SEAWEEDFS_ENDPOINT}" -log "Created bucket dbrepo-upload" -echo "s3.bucket.create -name dbrepo-download" | weed shell -master="${SEAWEEDFS_ENDPOINT}" -log "Created bucket dbrepo-download" \ No newline at end of file +log "SeaweedFS master is set to ${WEED_CLUSTER_SW_MASTER}" +log "Starting to create bucket ${S3_BUCKET}" +echo "s3.bucket.create -name ${S3_BUCKET}" | weed shell -master="${WEED_CLUSTER_SW_MASTER}" +log "Created bucket ${S3_BUCKET}" diff --git a/dbrepo-ui/bun.lockb b/dbrepo-ui/bun.lockb new file mode 100755 index 0000000000000000000000000000000000000000..34e1f3dd06864bd64d099c2212a7b45cb5140d8f Binary files /dev/null and b/dbrepo-ui/bun.lockb differ diff --git a/dbrepo-ui/components/table/BlobUpload.vue b/dbrepo-ui/components/table/BlobUpload.vue index 9f31278951a0df955bf553b488f577b0d1e180ae..4973c6b76eabdf4535426a54c35b766aa632a1f7 100644 --- a/dbrepo-ui/components/table/BlobUpload.vue +++ b/dbrepo-ui/components/table/BlobUpload.vue @@ -31,20 +31,20 @@ export default { if (!this.file || this.file.length === 0) { return } + console.debug('upload file', this.file) const uploadService = useUploadService() - uploadService.create(this.file[0]) + uploadService.create(this.file) .then((filename) => { console.debug('uploaded file', filename) this.filename = filename this.value = filename this.$emit('blob', { column: this.column, s3key: filename }) }) - .catch(({code}) => { + .catch((error) => { + console.error('Failed to upload dataset', error) const toast = useToastInstance() - if (typeof code !== 'string') { - return - } - toast.error(this.$t(code)) + toast.error(this.$t('error.upload.dataset')) + this.loading = false }) } } diff --git a/dbrepo-ui/components/table/TableImport.vue b/dbrepo-ui/components/table/TableImport.vue index b9162dd4d93377508e956968211518de1f6945cf..955a29cc75d187c2350c81307f4eee0bbe2355fc 100644 --- a/dbrepo-ui/components/table/TableImport.vue +++ b/dbrepo-ui/components/table/TableImport.vue @@ -174,7 +174,7 @@ <v-row> <v-col cols="8"> <v-file-input - v-model="fileModel" + v-model="file" accept=".csv,.tsv" :show-size="1000" counter @@ -279,8 +279,7 @@ export default { step: 1, validStep1: false, validStep2: false, - fileModel: null, - previousFile: null, + file: null, loading: false, rowCount: null, suggestedAnalyseSeparator: null, @@ -328,10 +327,10 @@ export default { return this.cacheStore.getTable }, isAnalyseAllowed () { - if (!this.fileModel || this.fileModel.length === 0) { - return true + if (!this.file || this.file.length === 0) { + return false } - return this.previousFile !== this.fileModel[0] + return true }, hasCompatibleSchema () { if (this.create) { @@ -422,15 +421,16 @@ export default { }, uploadAndAnalyse() { this.loading = true - this.previousFile = this.fileModel[0] + console.debug('upload file', this.file) const uploadService = useUploadService() - return uploadService.create(this.previousFile) + return uploadService.create(this.file) .then((s3key) => { const toast = useToastInstance() toast.success(this.$t('success.upload.dataset')) this.analyse(s3key) }) - .catch(() => { + .catch((error) => { + console.error('Failed to upload dataset', error) const toast = useToastInstance() toast.error(this.$t('error.upload.dataset')) this.loading = false diff --git a/dbrepo-ui/composables/upload-service.ts b/dbrepo-ui/composables/upload-service.ts index 63db245c2dff91e1166ea8f02fe354b50e57589c..bddae2692cf80ad2f116d058010b157740a39360 100644 --- a/dbrepo-ui/composables/upload-service.ts +++ b/dbrepo-ui/composables/upload-service.ts @@ -3,8 +3,8 @@ import * as tus from 'tus-js-client' export const useUploadService = (): any => { function create (data: File) { const config = useRuntimeConfig() + const endpoint = config.public.upload.client return new Promise<string>((resolve, reject) => { - const endpoint = `${config.public.api.client}/api/upload/files` if (!tus.isSupported) { console.error('Your browser does not support uploads!') return @@ -12,10 +12,6 @@ export const useUploadService = (): any => { const uploadClient: tus.Upload = new tus.Upload(data, { endpoint, retryDelays: [0, 3000, 5000, 10000, 20000], - metadata: { - filename: data.name, - filetype: data.type - }, onError (error) { console.error('Failed to upload:', error) reject(error) diff --git a/dbrepo-ui/nuxt.config.ts b/dbrepo-ui/nuxt.config.ts index 7dbf4914d2f76a75282017a38c93fb1e0a6607eb..7103e352a30f87450788b3e4cb7e50c820370f23 100644 --- a/dbrepo-ui/nuxt.config.ts +++ b/dbrepo-ui/nuxt.config.ts @@ -76,6 +76,9 @@ export default defineNuxtConfig({ client: 'http://localhost', server: 'http://gateway-service', }, + upload: { + client: 'http://localhost/api/upload/files' + }, database: { unsupported: '*,AVG,BIT_AND,BIT_OR,BIT_XOR,COUNT,COUNTDISTINCT,GROUP_CONCAT,JSON_ARRAYAGG,JSON_OBJECTAGG,MAX,MIN,STD,STDDEV,STDDEV_POP,STDDEV_SAMP,SUM,VARIANCE,VAR_POP,VAR_SAMP,--', image: { @@ -102,12 +105,6 @@ export default defineNuxtConfig({ text: 'Keycloak Admin', href: '/api/auth/' } - }, - keycloak: { - client: { - id: 'dbrepo-client', - secret: 'MUwRc7yfXSJwX8AdRMWaQC3Nep1VjwgG' - } } } }, diff --git a/dbrepo-ui/pages/database/[database_id]/settings.vue b/dbrepo-ui/pages/database/[database_id]/settings.vue index 063f9a5339c56d561f32f91f75facfe64ecbeecd..b7cca1b7aa02c4367c064a9033fc37b2416a2c08 100644 --- a/dbrepo-ui/pages/database/[database_id]/settings.vue +++ b/dbrepo-ui/pages/database/[database_id]/settings.vue @@ -31,7 +31,7 @@ <v-row dense> <v-col md="8"> <v-file-input - v-model="fileModel" + v-model="file" accept="image/*" :hint="$t('pages.database.subpages.settings.image.hint')" persistent-hint @@ -229,7 +229,7 @@ export default { loadingSchema: false, validUpload: false, loadingDeleteImage: false, - fileModel: null, + file: null, loadingUsers: false, editAccessDialog: false, editVisibilityDialog: false, @@ -366,10 +366,10 @@ export default { return this.roles.includes('modify-database-image') }, databaseImage () { - if (!this.fileModel) { + if (!this.file) { return null } - return URL.createObjectURL(this.fileModel[0]) + return URL.createObjectURL(this.file[0]) }, maxWidth () { return this.$config.public.database.image.width @@ -431,8 +431,9 @@ export default { }, uploadFile () { this.loadingUpload = true + console.debug('upload file', this.file) const uploadService = useUploadService() - uploadService.create(this.fileModel[0]) + uploadService.create(this.file) .then((s3key) => { console.debug('uploaded image', s3key) const toast = useToastInstance() @@ -440,6 +441,12 @@ export default { this.modifyImage.key = s3key this.loadingUpload = false }) + .catch((error) => { + console.error('Failed to upload dataset', error) + const toast = useToastInstance() + toast.error(this.$t('error.upload.dataset')) + this.loading = false + }) .finally(() => { this.loadingUpload = false }) diff --git a/docker-compose.yml b/docker-compose.yml index b67e23fc62ca4aec283723ce2d5495b79ea59803..85da1f0b551f6ba0bd97779e5713cf1476451d0f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -39,9 +39,8 @@ services: hostname: data-db image: docker.io/bitnami/mariadb:11.1.3-debian-11-r6 volumes: - - ./dbrepo-data-db/enable_history_insert.cnf:/opt/bitnami/mariadb/conf.default/enable_history_insert.cnf - - "${SHARED_VOLUME:-/tmp}:/tmp" - data-db-data:/bitnami/mariadb + - "${SHARED_VOLUME:-/tmp}:/tmp" ports: - "3307:3306" environment: @@ -68,7 +67,7 @@ services: MARIADB_ROOT_PASSWORD: "${AUTH_PASSWORD:-dbrepo}" healthcheck: test: mysqladmin ping --user="${AUTH_USERNAME:-root}" --password="${AUTH_PASSWORD:-dbrepo}" --silent - interval: 10s + interval: 15s timeout: 5s retries: 12 logging: @@ -86,7 +85,7 @@ services: network: host healthcheck: test: curl -sSL 'http://0.0.0.0:8080/realms/dbrepo' | grep "dbrepo" || exit 1 - interval: 10s + interval: 15s timeout: 5s retries: 12 environment: @@ -132,6 +131,7 @@ services: BROKER_SERVICE_ENDPOINT: ${BROKER_SERVICE_ENDPOINT:-http://gateway-service/admin/broker} BROKER_USERNAME: ${BROKER_USERNAME:-admin} BROKER_VIRTUALHOST: "${BROKER_VIRTUALHOST:-dbrepo}" + CROSSREF_ENDPOINT: "${CROSSREF_ENDPOINT:-http://data.crossref.org}" DATA_SERVICE_ENDPOINT: ${DATA_SERVICE_ENDPOINT:-http://data-service:8080} DELETED_RECORD: "${DELETED_RECORD:-persistent}" GRANULARITY: "${GRANULARITY:-YYYY-MM-DDThh:mm:ssZ}" @@ -144,11 +144,11 @@ services: METADATA_PASSWORD: "${METADATA_PASSWORD:-dbrepo}" PID_BASE: ${PID_BASE:-http://localhost/pid/} REPOSITORY_NAME: "${REPOSITORY_NAME:-Database Repository}" + ROR_ENDPOINT: "${ROR_ENDPOINT:-https://api.ror.org}" SEARCH_SERVICE_ENDPOINT: "${SEARCH_SERVICE_ENDPOINT:-http://gateway-service}" S3_ACCESS_KEY_ID: "${S3_ACCESS_KEY_ID:-seaweedfsadmin}" + S3_BUCKET: "${S3_BUCKET:-dbrepo}" S3_ENDPOINT: "${S3_ENDPOINT:-http://storage-service:9000}" - S3_EXPORT_BUCKET: "${S3_EXPORT_BUCKET:-dbrepo-download}" - S3_IMPORT_BUCKET: "${S3_IMPORT_BUCKET:-dbrepo-upload}" S3_SECRET_ACCESS_KEY: "${S3_SECRET_ACCESS_KEY:-seaweedfsadmin}" SPARQL_CONNECTION_TIMEOUT: "${SPARQL_CONNECTION_TIMEOUT:-10000}" SYSTEM_USERNAME: "${SYSTEM_USERNAME:-admin}" @@ -187,9 +187,8 @@ services: GATEWAY_SERVICE_ENDPOINT: ${GATEWAY_SERVICE_ENDPOINT:-http://gateway-service} JWT_PUBKEY: "${JWT_PUBKEY:-MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqqnHQ2BWWW9vDNLRCcxD++xZg/16oqMo/c1l+lcFEjjAIJjJp/HqrPYU/U9GvquGE6PbVFtTzW1KcKawOW+FJNOA3CGo8Q1TFEfz43B8rZpKsFbJKvQGVv1Z4HaKPvLUm7iMm8Hv91cLduuoWx6Q3DPe2vg13GKKEZe7UFghF+0T9u8EKzA/XqQ0OiICmsmYPbwvf9N3bCKsB/Y10EYmZRb8IhCoV9mmO5TxgWgiuNeCTtNCv2ePYqL/U0WvyGFW0reasIK8eg3KrAUj8DpyOgPOVBn3lBGf+3KFSYi+0bwZbJZWqbC/Xlk20Go1YfeJPRIt7ImxD27R/lNjgDO/MwIDAQAB}" S3_ACCESS_KEY_ID: "${S3_ACCESS_KEY_ID:-seaweedfsadmin}" + S3_BUCKET: "${S3_BUCKET:-dbrepo}" S3_ENDPOINT: "${S3_ENDPOINT:-http://storage-service:9000}" - S3_EXPORT_BUCKET: "${S3_EXPORT_BUCKET:-dbrepo-download}" - S3_IMPORT_BUCKET: "${S3_IMPORT_BUCKET:-dbrepo-upload}" S3_SECRET_ACCESS_KEY: "${S3_SECRET_ACCESS_KEY:-seaweedfsadmin}" volumes: - "${SHARED_FILESYSTEM:-/tmp}:/tmp" @@ -287,10 +286,9 @@ services: - "3305:8080" environment: S3_ACCESS_KEY_ID: "${S3_ACCESS_KEY_ID:-seaweedfsadmin}" + S3_BUCKET: "${S3_BUCKET:-dbrepo}" S3_ENDPOINT: "${S3_ENDPOINT:-http://storage-service:9000}" - S3_EXPORT_BUCKET: "${S3_EXPORT_BUCKET:-dbrepo-download}" S3_FILE_PATH: "${S3_FILE_PATH:-/tmp}" - S3_IMPORT_BUCKET: "${S3_IMPORT_BUCKET:-dbrepo-upload}" S3_SECRET_ACCESS_KEY: "${S3_SECRET_ACCESS_KEY:-seaweedfsadmin}" volumes: - "${SHARED_FILESYSTEM:-/tmp}:/tmp" @@ -439,7 +437,8 @@ services: context: ./dbrepo-storage-service/init network: host environment: - SEAWEEDFS_ENDPOINT: "${STORAGE_SEAWEEDFS_ENDPOINT:-storage-service:9333}" + WEED_CLUSTER_SW_MASTER: "${STORAGE_SERVICE_MASTER_ENDPOINT:-storage-service:9333}" + S3_BUCKET: "${S3_BUCKET:-dbrepo}" depends_on: dbrepo-storage-service: condition: service_healthy @@ -452,9 +451,11 @@ services: hostname: upload-service image: docker.io/tusproject/tusd:v2.4.0 command: - - "--base-path=/api/upload/files/" + - "-behind-proxy" + - "-max-size=2000000000" + - "-base-path=/api/upload/files/" - "-s3-endpoint=${STORAGE_ENDPOINT:-http://storage-service:9000}" - - "-s3-bucket=dbrepo-upload" + - "-s3-bucket=dbrepo" environment: AWS_ACCESS_KEY_ID: "${STORAGE_USERNAME:-seaweedfsadmin}" AWS_SECRET_ACCESS_KEY: "${STORAGE_PASSWORD:-seaweedfsadmin}" @@ -509,8 +510,8 @@ services: REQUEUE_REJECTED: ${REQUEUE_REJECTED:-false} ROUTING_KEY: "${ROUTING_KEY:-dbrepo.#}" S3_ACCESS_KEY_ID: "${S3_ACCESS_KEY_ID:-seaweedfsadmin}" + S3_BUCKET: "${S3_BUCKET:-dbrepo}" S3_ENDPOINT: "${S3_ENDPOINT:-http://storage-service:9000}" - S3_EXPORT_BUCKET: "${S3_EXPORT_BUCKET:-dbrepo-download}" S3_FILE_PATH: "${S3_FILE_PATH:-/tmp}" S3_IMPORT_BUCKET: "${S3_IMPORT_BUCKET:-dbrepo-upload}" S3_SECRET_ACCESS_KEY: "${S3_SECRET_ACCESS_KEY:-seaweedfsadmin}" diff --git a/helm/dbrepo/Chart.lock b/helm/dbrepo/Chart.lock index aa7d0b93cc0303567b1782ff85ce7b2ff6055341..45878cdfc0597e70b690912b03e500643e617367 100644 --- a/helm/dbrepo/Chart.lock +++ b/helm/dbrepo/Chart.lock @@ -4,24 +4,24 @@ dependencies: version: 1.2.2 - name: keycloak repository: https://charts.bitnami.com/bitnami - version: 17.3.3 + version: 21.6.1 - name: mariadb repository: https://charts.bitnami.com/bitnami version: 14.1.4 -- name: mariadb +- name: mariadb-galera repository: https://charts.bitnami.com/bitnami - version: 14.1.4 + version: 10.1.3 - name: rabbitmq repository: https://charts.bitnami.com/bitnami version: 14.0.0 - name: seaweedfs - repository: https://seaweedfs.github.io/seaweedfs/helm - version: 3.59.4 + repository: https://charts.bitnami.com/bitnami + version: 0.4.5 - name: tusd repository: https://charts.sagikazarmark.dev version: 0.1.2 - name: openldap-stack-ha repository: https://jp-gouin.github.io/helm-openldap/ version: 4.2.5 -digest: sha256:148c7e81b148258da485bc471e36de0f52b8abb1811f26557b1c11e5756e88c7 -generated: "2024-06-26T07:56:42.532498605+02:00" +digest: sha256:bc81f32931159cbea98f6da5f58ff3425a5cd03183506ab218120be136486468 +generated: "2024-07-12T06:33:37.323435643+02:00" diff --git a/helm/dbrepo/Chart.yaml b/helm/dbrepo/Chart.yaml index 28ce12c838d2b92e5e6d383fa0309eea8d72d59f..f32287b93cd1ba982cef7af817c95a70f0af7604 100644 --- a/helm/dbrepo/Chart.yaml +++ b/helm/dbrepo/Chart.yaml @@ -1,3 +1,5 @@ +annotations: + licenses: Apache-2.0 apiVersion: v2 name: dbrepo description: Helm Chart for installing DBRepo @@ -21,17 +23,17 @@ dependencies: condition: searchdb.enabled - name: keycloak alias: authservice - version: 17.3.3 + version: 21.6.1 # app version: 24.0.5 repository: https://charts.bitnami.com/bitnami condition: authservice.enabled - name: mariadb alias: datadb - version: 14.1.4 + version: 14.1.4 # app version: 11.1.3 repository: https://charts.bitnami.com/bitnami condition: datadb.enabled - - name: mariadb + - name: mariadb-galera alias: metadatadb - version: 14.1.4 + version: 10.1.3 # app version: 11.1.3 repository: https://charts.bitnami.com/bitnami condition: metadatadb.enabled - name: rabbitmq @@ -41,8 +43,8 @@ dependencies: condition: brokerservice.enabled - name: seaweedfs alias: storageservice - version: 3.59.4 - repository: https://seaweedfs.github.io/seaweedfs/helm + version: 0.4.5 + repository: https://charts.bitnami.com/bitnami condition: storageservice.enabled - name: tusd alias: uploadservice diff --git a/helm/dbrepo/README.md b/helm/dbrepo/README.md index b310705b11665d8906a4093a2cc1ff73376527fd..294fda6477f278e1cc6bf71e0002ca7cfbfc9916 100644 --- a/helm/dbrepo/README.md +++ b/helm/dbrepo/README.md @@ -64,18 +64,19 @@ The command removes all the Kubernetes components associated with the chart and ### Metadata Database -| Name | Description | Value | -| ------------------------------------- | ---------------------------------------------------------------- | ------------- | -| `metadatadb.enabled` | Enable the Metadata Database. | `true` | -| `metadatadb.host` | The hostname for the microservices. | `metadata-db` | -| `metadatadb.auth.root` | The root username. | `root` | -| `metadatadb.auth.rootPassword` | The root user password. | `dbrepo` | -| `metadatadb.auth.database` | The database name. | `dbrepo` | -| `metadatadb.auth.replicationUser` | The database replication username. | `replication` | -| `metadatadb.auth.replicationPassword` | The database replication user password | `replication` | -| `metadatadb.jdbcExtraArgs` | The extra arguments for JDBC connections in the microservices. | `""` | -| `metadatadb.extraInitDbScripts` | Additional init.db scripts that are executed on the first start. | `{}` | -| `metadatadb.secondary.replicaCount` | The number of replicas of the secondary database pods. | `2` | +| Name | Description | Value | +| ---------------------------------------- | ---------------------------------------------------------------- | ------------- | +| `metadatadb.enabled` | Enable the Metadata Database. | `true` | +| `metadatadb.host` | The hostname for the microservices. | `metadata-db` | +| `metadatadb.rootUser.user` | The root username. | `root` | +| `metadatadb.rootUser.password` | The root user password. | `dbrepo` | +| `metadatadb.db.name` | The database name. | `dbrepo` | +| `metadatadb.galera.mariabackup.user` | The database backup username. | `backup` | +| `metadatadb.galera.mariabackup.password` | The database backup user password | `backup` | +| `metadatadb.jdbcExtraArgs` | The extra arguments for JDBC connections in the microservices. | `""` | +| `metadatadb.initdbScripts` | Additional init.db scripts that are executed on the first start. | `{}` | +| `metadatadb.replicaCount` | The number of cluster nodes, should be uneven i.e. 2n+1 | `3` | +| `metadatadb.persistence.enabled` | Enable persistent storage. | `true` | ### Auth Service @@ -144,69 +145,72 @@ The command removes all the Kubernetes components associated with the chart and ### Analyse Service -| Name | Description | Value | -| ------------------------------------------------------------------ | ----------------------------------------------------------- | ------------------------------- | -| `analyseservice.enabled` | Enable the Broker Service. | `true` | -| `analyseservice.image.debug` | Set the logging level to `trace`. Otherwise, set to `info`. | `false` | -| `analyseservice.podSecurityContext.enabled` | Enable pods' Security Context | `true` | -| `analyseservice.podSecurityContext.fsGroupChangePolicy` | Set filesystem group change policy | `Always` | -| `analyseservice.podSecurityContext.sysctls` | Set kernel settings using the sysctl interface | `[]` | -| `analyseservice.podSecurityContext.supplementalGroups` | Set filesystem extra groups | `[]` | -| `analyseservice.podSecurityContext.fsGroup` | Set RabbitMQ pod's Security Context fsGroup | `1001` | -| `analyseservice.containerSecurityContext.enabled` | Enabled containers' Security Context | `true` | -| `analyseservice.containerSecurityContext.seLinuxOptions` | Set SELinux options in container | `""` | -| `analyseservice.containerSecurityContext.runAsUser` | Set RabbitMQ containers' Security Context runAsUser | `1001` | -| `analyseservice.containerSecurityContext.runAsGroup` | Set RabbitMQ containers' Security Context runAsGroup | `1001` | -| `analyseservice.containerSecurityContext.runAsNonRoot` | Set RabbitMQ container's Security Context runAsNonRoot | `true` | -| `analyseservice.containerSecurityContext.allowPrivilegeEscalation` | Set container's privilege escalation | `false` | -| `analyseservice.containerSecurityContext.readOnlyRootFilesystem` | Set container's Security Context readOnlyRootFilesystem | `false` | -| `analyseservice.containerSecurityContext.capabilities.drop` | Set container's Security Context runAsNonRoot | `["ALL"]` | -| `analyseservice.containerSecurityContext.seccompProfile.type` | Set container's Security Context seccomp profile | `RuntimeDefault` | -| `analyseservice.endpoint` | The url of the endpoint. | `http://analyse-service` | -| `analyseservice.s3.endpoint` | The S3-capable endpoint the microservice connects to. | `http://storageservice-s3:9000` | -| `analyseservice.replicaCount` | The number of replicas. | `2` | +| Name | Description | Value | +| ------------------------------------------------------------------ | ----------------------------------------------------------- | -------------------------------- | +| `analyseservice.enabled` | Enable the Broker Service. | `true` | +| `analyseservice.image.debug` | Set the logging level to `trace`. Otherwise, set to `info`. | `false` | +| `analyseservice.podSecurityContext.enabled` | Enable pods' Security Context | `true` | +| `analyseservice.podSecurityContext.fsGroupChangePolicy` | Set filesystem group change policy | `Always` | +| `analyseservice.podSecurityContext.sysctls` | Set kernel settings using the sysctl interface | `[]` | +| `analyseservice.podSecurityContext.supplementalGroups` | Set filesystem extra groups | `[]` | +| `analyseservice.podSecurityContext.fsGroup` | Set RabbitMQ pod's Security Context fsGroup | `1001` | +| `analyseservice.containerSecurityContext.enabled` | Enabled containers' Security Context | `true` | +| `analyseservice.containerSecurityContext.seLinuxOptions` | Set SELinux options in container | `{}` | +| `analyseservice.containerSecurityContext.runAsUser` | Set RabbitMQ containers' Security Context runAsUser | `1001` | +| `analyseservice.containerSecurityContext.runAsGroup` | Set RabbitMQ containers' Security Context runAsGroup | `1001` | +| `analyseservice.containerSecurityContext.runAsNonRoot` | Set RabbitMQ container's Security Context runAsNonRoot | `true` | +| `analyseservice.containerSecurityContext.allowPrivilegeEscalation` | Set container's privilege escalation | `false` | +| `analyseservice.containerSecurityContext.readOnlyRootFilesystem` | Set container's Security Context readOnlyRootFilesystem | `false` | +| `analyseservice.containerSecurityContext.capabilities.drop` | Set container's Security Context runAsNonRoot | `["ALL"]` | +| `analyseservice.containerSecurityContext.seccompProfile.type` | Set container's Security Context seccomp profile | `RuntimeDefault` | +| `analyseservice.endpoint` | The url of the endpoint. | `http://analyse-service` | +| `analyseservice.s3.endpoint` | The S3-capable endpoint the microservice connects to. | `http://storage-service-s3:8333` | +| `analyseservice.replicaCount` | The number of replicas. | `2` | ### Metadata Service -| Name | Description | Value | -| ------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | ------------------------------- | -| `metadataservice.enabled` | Enable the Broker Service. | `true` | -| `metadataservice.image.debug` | Set the logging level to `trace`. Otherwise, set to `info`. | `false` | -| `metadataservice.podSecurityContext.enabled` | Enable pods' Security Context | `true` | -| `metadataservice.podSecurityContext.fsGroupChangePolicy` | Set filesystem group change policy | `Always` | -| `metadataservice.podSecurityContext.sysctls` | Set kernel settings using the sysctl interface | `[]` | -| `metadataservice.podSecurityContext.supplementalGroups` | Set filesystem extra groups | `[]` | -| `metadataservice.podSecurityContext.fsGroup` | Set RabbitMQ pod's Security Context fsGroup | `1001` | -| `metadataservice.containerSecurityContext.enabled` | Enabled containers' Security Context | `true` | -| `metadataservice.containerSecurityContext.seLinuxOptions` | Set SELinux options in container | `""` | -| `metadataservice.containerSecurityContext.runAsUser` | Set RabbitMQ containers' Security Context runAsUser | `1001` | -| `metadataservice.containerSecurityContext.runAsGroup` | Set RabbitMQ containers' Security Context runAsGroup | `1001` | -| `metadataservice.containerSecurityContext.runAsNonRoot` | Set RabbitMQ container's Security Context runAsNonRoot | `true` | -| `metadataservice.containerSecurityContext.allowPrivilegeEscalation` | Set container's privilege escalation | `false` | -| `metadataservice.containerSecurityContext.readOnlyRootFilesystem` | Set container's Security Context readOnlyRootFilesystem | `false` | -| `metadataservice.containerSecurityContext.capabilities.drop` | Set container's Security Context runAsNonRoot | `["ALL"]` | -| `metadataservice.containerSecurityContext.seccompProfile.type` | Set container's Security Context seccomp profile | `RuntimeDefault` | -| `metadataservice.endpoint` | The Metadata Service endpoint. | `http://metadata-service` | -| `metadataservice.admin.email` | The OAI-PMH exposed e-mail for contacting the metadata records responsible person. | `noreply@example.com` | -| `metadataservice.deletedRecord` | The OAI-PMH exposed delete policy. | `permanent` | -| `metadataservice.repositoryName` | The OAI-PMH exposed repository name. | `Database Repository` | -| `metadataservice.granularity` | The OAI-PMH exposed record granularity. | `YYYY-MM-DDThh:mm:ssZ` | -| `metadataservice.datacite.enabled` | If set to true, the service mints DOIs instead of local PIDs. | `false` | -| `metadataservice.datacite.url` | The DataCite api endpoint url. | `https://api.datacite.org` | -| `metadataservice.datacite.prefix` | The DataCite prefix. | `""` | -| `metadataservice.datacite.username` | The DataCite api username. | `""` | -| `metadataservice.datacite.password` | The DataCite api user password. | `""` | -| `metadataservice.sparql.connectionTimeout` | The connection timeout for sparql queries fetching remote data in ms. | `10000` | -| `metadataservice.s3.endpoint` | The S3-capable endpoint the microservice connects to. | `http://storageservice-s3:9000` | -| `metadataservice.s3.auth.username` | The S3-capable endpoint username (or access key id). | `seaweedfsadmin` | -| `metadataservice.s3.auth.password` | The S3-capable endpoint user password (or access key secret). | `seaweedfsadmin` | -| `metadataservice.replicaCount` | The number of replicas. | `2` | +| Name | Description | Value | +| ------------------------------------------------------------------- | ---------------------------------------------------------------------------------- | -------------------------------- | +| `metadataservice.enabled` | Enable the Broker Service. | `true` | +| `metadataservice.image.debug` | Set the logging level to `trace`. Otherwise, set to `info`. | `false` | +| `metadataservice.podSecurityContext.enabled` | Enable pods' Security Context | `true` | +| `metadataservice.podSecurityContext.fsGroupChangePolicy` | Set filesystem group change policy | `Always` | +| `metadataservice.podSecurityContext.sysctls` | Set kernel settings using the sysctl interface | `[]` | +| `metadataservice.podSecurityContext.supplementalGroups` | Set filesystem extra groups | `[]` | +| `metadataservice.podSecurityContext.fsGroup` | Set RabbitMQ pod's Security Context fsGroup | `1001` | +| `metadataservice.containerSecurityContext.enabled` | Enabled containers' Security Context | `true` | +| `metadataservice.containerSecurityContext.seLinuxOptions` | Set SELinux options in container | `{}` | +| `metadataservice.containerSecurityContext.runAsUser` | Set RabbitMQ containers' Security Context runAsUser | `1001` | +| `metadataservice.containerSecurityContext.runAsGroup` | Set RabbitMQ containers' Security Context runAsGroup | `1001` | +| `metadataservice.containerSecurityContext.runAsNonRoot` | Set RabbitMQ container's Security Context runAsNonRoot | `true` | +| `metadataservice.containerSecurityContext.allowPrivilegeEscalation` | Set container's privilege escalation | `false` | +| `metadataservice.containerSecurityContext.readOnlyRootFilesystem` | Set container's Security Context readOnlyRootFilesystem | `false` | +| `metadataservice.containerSecurityContext.capabilities.drop` | Set container's Security Context runAsNonRoot | `["ALL"]` | +| `metadataservice.containerSecurityContext.seccompProfile.type` | Set container's Security Context seccomp profile | `RuntimeDefault` | +| `metadataservice.endpoint` | The Metadata Service endpoint. | `http://metadata-service` | +| `metadataservice.crossref.endpoint` | The CrossRef endpoint. | `http://data.crossref.org` | +| `metadataservice.ror.endpoint` | The ROR endpoint. | `https://api.ror.org` | +| `metadataservice.admin.email` | The OAI-PMH exposed e-mail for contacting the metadata records responsible person. | `noreply@example.com` | +| `metadataservice.deletedRecord` | The OAI-PMH exposed delete policy. | `permanent` | +| `metadataservice.repositoryName` | The OAI-PMH exposed repository name. | `Database Repository` | +| `metadataservice.granularity` | The OAI-PMH exposed record granularity. | `YYYY-MM-DDThh:mm:ssZ` | +| `metadataservice.datacite.enabled` | If set to true, the service mints DOIs instead of local PIDs. | `false` | +| `metadataservice.datacite.url` | The DataCite api endpoint url. | `https://api.datacite.org` | +| `metadataservice.datacite.prefix` | The DataCite prefix. | `""` | +| `metadataservice.datacite.username` | The DataCite api username. | `""` | +| `metadataservice.datacite.password` | The DataCite api user password. | `""` | +| `metadataservice.sparql.connectionTimeout` | The connection timeout for sparql queries fetching remote data in ms. | `10000` | +| `metadataservice.s3.endpoint` | The S3-capable endpoint the microservice connects to. | `http://storage-service-s3:8333` | +| `metadataservice.s3.auth.username` | The S3-capable endpoint username (or access key id). | `seaweedfsadmin` | +| `metadataservice.s3.auth.password` | The S3-capable endpoint user password (or access key secret). | `seaweedfsadmin` | +| `metadataservice.replicaCount` | The number of replicas. | `2` | ### Data Service | Name | Description | Value | | --------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------ | --------------------------------------------------------------------------------------------------------------------------- | | `dataservice.enabled` | Enable the Broker Service. | `true` | +| `dataservice.endpoint` | Absolute URL to the data service in the form of http://host:port | `http://data-service` | | `dataservice.image.debug` | Set the logging level to `trace`. Otherwise, set to `info`. | `false` | | `dataservice.podSecurityContext.enabled` | Enable pods' Security Context | `true` | | `dataservice.podSecurityContext.fsGroupChangePolicy` | Set filesystem group change policy | `Always` | @@ -214,7 +218,7 @@ The command removes all the Kubernetes components associated with the chart and | `dataservice.podSecurityContext.supplementalGroups` | Set filesystem extra groups | `[]` | | `dataservice.podSecurityContext.fsGroup` | Set RabbitMQ pod's Security Context fsGroup | `1001` | | `dataservice.containerSecurityContext.enabled` | Enabled containers' Security Context | `true` | -| `dataservice.containerSecurityContext.seLinuxOptions` | Set SELinux options in container | `""` | +| `dataservice.containerSecurityContext.seLinuxOptions` | Set SELinux options in container | `{}` | | `dataservice.containerSecurityContext.runAsUser` | Set RabbitMQ containers' Security Context runAsUser | `1001` | | `dataservice.containerSecurityContext.runAsGroup` | Set RabbitMQ containers' Security Context runAsGroup | `1001` | | `dataservice.containerSecurityContext.runAsNonRoot` | Set RabbitMQ container's Security Context runAsNonRoot | `true` | @@ -232,7 +236,8 @@ The command removes all the Kubernetes components associated with the chart and | `dataservice.rabbitmq.requeueRejected` | If set to true, rejected tuples will be re-queued. | `false` | | `dataservice.rabbitmq.consumer.username` | The username for the consumer to read tuples from the broker service. In many cases this value is equal to `identityservice.users`. | `admin` | | `dataservice.rabbitmq.consumer.password` | The user password for the consumer to read tuples from the broker service. In many cases this value is equal to `identityservice.userPasswords`. | `admin` | -| `dataservice.s3.endpoint` | The S3-capable endpoint the microservice connects to. | `http://storageservice-s3:9000` | +| `dataservice.s3.endpoint` | The S3-capable endpoint the microservice connects to. | `http://storage-service-s3:8333` | +| `dataservice.s3.bucket` | The S3 bucket name. | `dbrepo` | | `dataservice.s3.auth.username` | The S3-capable endpoint username (or access key id). | `seaweedfsadmin` | | `dataservice.s3.auth.password` | The S3-capable endpoint user password (or access key secret). | `seaweedfsadmin` | | `dataservice.s3.filePath` | The local location to download/upload files from/to S3-capable endpoint. | `/s3` | @@ -240,31 +245,38 @@ The command removes all the Kubernetes components associated with the chart and ### Search Service -| Name | Description | Value | -| ----------------------------------------------------------------- | ----------------------------------------------------------- | ---------------- | -| `searchservice.enabled` | Enable the Broker Service. | `true` | -| `searchservice.image.debug` | Set the logging level to `trace`. Otherwise, set to `info`. | `false` | -| `searchservice.podSecurityContext.enabled` | Enable pods' Security Context | `true` | -| `searchservice.podSecurityContext.fsGroupChangePolicy` | Set filesystem group change policy | `Always` | -| `searchservice.podSecurityContext.sysctls` | Set kernel settings using the sysctl interface | `[]` | -| `searchservice.podSecurityContext.supplementalGroups` | Set filesystem extra groups | `[]` | -| `searchservice.podSecurityContext.fsGroup` | Set RabbitMQ pod's Security Context fsGroup | `1001` | -| `searchservice.containerSecurityContext.enabled` | Enabled containers' Security Context | `true` | -| `searchservice.containerSecurityContext.seLinuxOptions` | Set SELinux options in container | `""` | -| `searchservice.containerSecurityContext.runAsUser` | Set RabbitMQ containers' Security Context runAsUser | `1001` | -| `searchservice.containerSecurityContext.runAsGroup` | Set RabbitMQ containers' Security Context runAsGroup | `1001` | -| `searchservice.containerSecurityContext.runAsNonRoot` | Set RabbitMQ container's Security Context runAsNonRoot | `true` | -| `searchservice.containerSecurityContext.allowPrivilegeEscalation` | Set container's privilege escalation | `false` | -| `searchservice.containerSecurityContext.readOnlyRootFilesystem` | Set container's Security Context readOnlyRootFilesystem | `true` | -| `searchservice.containerSecurityContext.capabilities.drop` | Set container's Security Context runAsNonRoot | `["ALL"]` | -| `searchservice.containerSecurityContext.seccompProfile.type` | Set container's Security Context seccomp profile | `RuntimeDefault` | -| `searchservice.replicaCount` | The number of replicas. | `2` | +| Name | Description | Value | +| ----------------------------------------------------------------- | ------------------------------------------------------------------ | ----------------------- | +| `searchservice.enabled` | Enable the Broker Service. | `true` | +| `searchservice.endpoint` | Absolute URL to the search service in the form of http://host:port | `http://search-service` | +| `searchservice.image.debug` | Set the logging level to `trace`. Otherwise, set to `info`. | `false` | +| `searchservice.podSecurityContext.enabled` | Enable pods' Security Context | `true` | +| `searchservice.podSecurityContext.fsGroupChangePolicy` | Set filesystem group change policy | `Always` | +| `searchservice.podSecurityContext.sysctls` | Set kernel settings using the sysctl interface | `[]` | +| `searchservice.podSecurityContext.supplementalGroups` | Set filesystem extra groups | `[]` | +| `searchservice.podSecurityContext.fsGroup` | Set RabbitMQ pod's Security Context fsGroup | `1001` | +| `searchservice.containerSecurityContext.enabled` | Enabled containers' Security Context | `true` | +| `searchservice.containerSecurityContext.seLinuxOptions` | Set SELinux options in container | `{}` | +| `searchservice.containerSecurityContext.runAsUser` | Set RabbitMQ containers' Security Context runAsUser | `1001` | +| `searchservice.containerSecurityContext.runAsGroup` | Set RabbitMQ containers' Security Context runAsGroup | `1001` | +| `searchservice.containerSecurityContext.runAsNonRoot` | Set RabbitMQ container's Security Context runAsNonRoot | `true` | +| `searchservice.containerSecurityContext.allowPrivilegeEscalation` | Set container's privilege escalation | `false` | +| `searchservice.containerSecurityContext.readOnlyRootFilesystem` | Set container's Security Context readOnlyRootFilesystem | `true` | +| `searchservice.containerSecurityContext.capabilities.drop` | Set container's Security Context runAsNonRoot | `["ALL"]` | +| `searchservice.containerSecurityContext.seccompProfile.type` | Set container's Security Context seccomp profile | `RuntimeDefault` | +| `searchservice.replicaCount` | The number of replicas. | `2` | ### Storage Service -| Name | Description | Value | -| ------------------------ | --------------------------- | ------ | -| `storageservice.enabled` | Enable the Storage Service. | `true` | +| Name | Description | Value | +| --------------------------------------------- | -------------------------------------------------------------------------------------- | ---------------- | +| `storageservice.enabled` | Enable the Storage Service. | `true` | +| `storageservice.filer.enabled` | Enable the storage service filer which is required for S3. | `true` | +| `storageservice.s3.replicaCount` | The number of replicas. | `2` | +| `storageservice.s3.bucket` | The S3-bucket name. | `dbrepo` | +| `storageservice.s3.auth.enabled` | Enable the S3 service. | `true` | +| `storageservice.s3.auth.adminAccessKeyId` | The S3 access key id for the admin user. In some systems this is named `username`. | `seaweedfsadmin` | +| `storageservice.s3.auth.adminSecretAccessKey` | The S3 secret access key for the admin user. In some systems this is named `password`. | `seaweedfsadmin` | ### Identity Service @@ -283,39 +295,40 @@ The command removes all the Kubernetes components associated with the chart and ### User Interface -| Name | Description | Value | -| ------------------------------------------------------ | ---------------------------------------------------------------------------- | ----------------------- | -| `ui.enabled` | Enable the Broker Service. | `true` | -| `ui.image.debug` | Set the logging level to `trace`. Otherwise, set to `info`. | `false` | -| `ui.podSecurityContext.enabled` | Enable pods' Security Context | `true` | -| `ui.podSecurityContext.fsGroupChangePolicy` | Set filesystem group change policy | `Always` | -| `ui.podSecurityContext.sysctls` | Set kernel settings using the sysctl interface | `[]` | -| `ui.podSecurityContext.supplementalGroups` | Set filesystem extra groups | `[]` | -| `ui.podSecurityContext.fsGroup` | Set RabbitMQ pod's Security Context fsGroup | `1001` | -| `ui.containerSecurityContext.enabled` | Enabled containers' Security Context | `true` | -| `ui.containerSecurityContext.seLinuxOptions` | Set SELinux options in container | `""` | -| `ui.containerSecurityContext.runAsUser` | Set RabbitMQ containers' Security Context runAsUser | `1001` | -| `ui.containerSecurityContext.runAsGroup` | Set RabbitMQ containers' Security Context runAsGroup | `1001` | -| `ui.containerSecurityContext.runAsNonRoot` | Set RabbitMQ container's Security Context runAsNonRoot | `true` | -| `ui.containerSecurityContext.allowPrivilegeEscalation` | Set container's privilege escalation | `false` | -| `ui.containerSecurityContext.readOnlyRootFilesystem` | Set container's Security Context readOnlyRootFilesystem | `false` | -| `ui.containerSecurityContext.capabilities.drop` | Set container's Security Context runAsNonRoot | `["ALL"]` | -| `ui.containerSecurityContext.seccompProfile.type` | Set container's Security Context seccomp profile | `RuntimeDefault` | -| `ui.public.api.client` | The endpoint for the client api. | `""` | -| `ui.public.api.server` | The endpoint for the server api. | `""` | -| `ui.public.title` | The user interface title. | `Database Repository` | -| `ui.public.logo` | The user interface logo. | `/logo.svg` | -| `ui.public.icon` | The user interface icon. | `/favicon.ico` | -| `ui.public.touch` | The user interface apple touch icon. | `/apple-touch-icon.png` | -| `ui.public.broker.host` | The displayed broker hostname. | `example.com` | -| `ui.public.broker.port.5671` | Enable display of the broker 5671 port and mark it as secure (SSL/TLS). | `true` | -| `ui.public.broker.port.5672` | Enable display of the broker 5672 port and mark it as insecure (no SSL/TLS). | `false` | -| `ui.public.broker.extra` | Extra metadata displayed. | `""` | -| `ui.public.database.extra` | Extra metadata displayed. | `128.130.0.0/15` | -| `ui.public.pid.default.publisher` | The default dataset publisher for persisted identifiers. | `Example University` | -| `ui.public.doi.enabled` | Enable the display that DOIs are minted. | `false` | -| `ui.public.doi.endpoint` | The DOI proxy. | `https://doi.org` | -| `ui.replicaCount` | The number of replicas. | `2` | +| Name | Description | Value | +| ------------------------------------------------------ | ---------------------------------------------------------------------------------------------------- | ----------------------- | +| `ui.enabled` | Enable the Broker Service. | `true` | +| `ui.image.debug` | Set the logging level to `trace`. Otherwise, set to `info`. | `false` | +| `ui.podSecurityContext.enabled` | Enable pods' Security Context | `true` | +| `ui.podSecurityContext.fsGroupChangePolicy` | Set filesystem group change policy | `Always` | +| `ui.podSecurityContext.sysctls` | Set kernel settings using the sysctl interface | `[]` | +| `ui.podSecurityContext.supplementalGroups` | Set filesystem extra groups | `[]` | +| `ui.podSecurityContext.fsGroup` | Set RabbitMQ pod's Security Context fsGroup | `1001` | +| `ui.containerSecurityContext.enabled` | Enabled containers' Security Context | `true` | +| `ui.containerSecurityContext.seLinuxOptions` | Set SELinux options in container | `{}` | +| `ui.containerSecurityContext.runAsUser` | Set RabbitMQ containers' Security Context runAsUser | `1001` | +| `ui.containerSecurityContext.runAsGroup` | Set RabbitMQ containers' Security Context runAsGroup | `1001` | +| `ui.containerSecurityContext.runAsNonRoot` | Set RabbitMQ container's Security Context runAsNonRoot | `true` | +| `ui.containerSecurityContext.allowPrivilegeEscalation` | Set container's privilege escalation | `false` | +| `ui.containerSecurityContext.readOnlyRootFilesystem` | Set container's Security Context readOnlyRootFilesystem | `false` | +| `ui.containerSecurityContext.capabilities.drop` | Set container's Security Context runAsNonRoot | `["ALL"]` | +| `ui.containerSecurityContext.seccompProfile.type` | Set container's Security Context seccomp profile | `RuntimeDefault` | +| `ui.public.api.client` | The endpoint for the client api. Defaults to the value of `gateway`. | `""` | +| `ui.public.api.server` | The endpoint for the server api. Defaults to the value of `gateway`. | `""` | +| `ui.public.upload.client` | The endpoint for the upload client. Defaults to the value of `gateway` and path `/api/upload/files`. | `""` | +| `ui.public.title` | The user interface title. | `Database Repository` | +| `ui.public.logo` | The user interface logo. | `/logo.svg` | +| `ui.public.icon` | The user interface icon. | `/favicon.ico` | +| `ui.public.touch` | The user interface apple touch icon. | `/apple-touch-icon.png` | +| `ui.public.broker.host` | The displayed broker hostname. | `example.com` | +| `ui.public.broker.port.5671` | Enable display of the broker 5671 port and mark it as secure (SSL/TLS). | `true` | +| `ui.public.broker.port.5672` | Enable display of the broker 5672 port and mark it as insecure (no SSL/TLS). | `false` | +| `ui.public.broker.extra` | Extra metadata displayed. | `""` | +| `ui.public.database.extra` | Extra metadata displayed. | `128.130.0.0/15` | +| `ui.public.pid.default.publisher` | The default dataset publisher for persisted identifiers. | `Example University` | +| `ui.public.doi.enabled` | Enable the display that DOIs are minted. | `false` | +| `ui.public.doi.endpoint` | The DOI proxy. | `https://doi.org` | +| `ui.replicaCount` | The number of replicas. | `2` | ### Ingress diff --git a/helm/dbrepo/charts/keycloak-17.3.3.tgz b/helm/dbrepo/charts/keycloak-17.3.3.tgz deleted file mode 100644 index 2a4298f589c5b38c7dca1c7ec3efaf011c450676..0000000000000000000000000000000000000000 Binary files a/helm/dbrepo/charts/keycloak-17.3.3.tgz and /dev/null differ diff --git a/helm/dbrepo/charts/keycloak-21.6.1.tgz b/helm/dbrepo/charts/keycloak-21.6.1.tgz new file mode 100644 index 0000000000000000000000000000000000000000..6479f5943846dee589d3ec90bbda649a8d7b72fe Binary files /dev/null and b/helm/dbrepo/charts/keycloak-21.6.1.tgz differ diff --git a/helm/dbrepo/charts/mariadb-galera-10.1.3.tgz b/helm/dbrepo/charts/mariadb-galera-10.1.3.tgz new file mode 100644 index 0000000000000000000000000000000000000000..c906aaf7634b20f0eaf9358b435b01086bdc4f55 Binary files /dev/null and b/helm/dbrepo/charts/mariadb-galera-10.1.3.tgz differ diff --git a/helm/dbrepo/charts/seaweedfs-0.4.5.tgz b/helm/dbrepo/charts/seaweedfs-0.4.5.tgz new file mode 100644 index 0000000000000000000000000000000000000000..7beeabf35c0dbf671f5771d117bbdcc78443dd9e Binary files /dev/null and b/helm/dbrepo/charts/seaweedfs-0.4.5.tgz differ diff --git a/helm/dbrepo/charts/seaweedfs-3.59.4.tgz b/helm/dbrepo/charts/seaweedfs-3.59.4.tgz deleted file mode 100644 index 61cb40d6deedbf5a4cc0efabd8dee6f13f7341de..0000000000000000000000000000000000000000 Binary files a/helm/dbrepo/charts/seaweedfs-3.59.4.tgz and /dev/null differ diff --git a/helm/dbrepo/templates/analyse-secret.yaml b/helm/dbrepo/templates/analyse-secret.yaml index 0f75fb143764bceaf2d5aeb3e35bff53d11c3843..2b8b78dbd5a14ed01f5f21d0e3aabaa25b518c36 100644 --- a/helm/dbrepo/templates/analyse-secret.yaml +++ b/helm/dbrepo/templates/analyse-secret.yaml @@ -16,9 +16,8 @@ stringData: GATEWAY_SERVICE_ENDPOINT: "{{ .Values.gateway }}" JWT_PUBKEY: "{{ .Values.authservice.jwt.pubkey }}" LOG_LEVEL: "{{ ternary "DEBUG" "INFO" .Values.analyseservice.image.debug }}" - S3_ACCESS_KEY_ID: "{{ .Values.storageservice.s3.auth.username }}" + S3_ACCESS_KEY_ID: "{{ .Values.storageservice.s3.auth.adminAccessKeyId }}" + S3_BUCKET: "{{ .Values.storageservice.s3.bucket }}" S3_ENDPOINT: "{{ .Values.analyseservice.s3.endpoint }}" - S3_EXPORT_BUCKET: "{{ .Values.storageservice.s3.bucket.export }}" - S3_IMPORT_BUCKET: "{{ .Values.storageservice.s3.bucket.import }}" - S3_SECRET_ACCESS_KEY: "{{ .Values.storageservice.s3.auth.password }}" + S3_SECRET_ACCESS_KEY: "{{ .Values.storageservice.s3.auth.adminSecretAccessKey }}" {{- end }} diff --git a/helm/dbrepo/templates/auth-configmap.yaml b/helm/dbrepo/templates/auth-configmap.yaml index 843916e90158599d42c8f4951ad4b7c225cd3981..0ef8e90bf95eb5575f2f4fd1628fcf6b8c16e120 100644 --- a/helm/dbrepo/templates/auth-configmap.yaml +++ b/helm/dbrepo/templates/auth-configmap.yaml @@ -47,6 +47,7 @@ data: "editUsernameAllowed" : false, "bruteForceProtected" : false, "permanentLockout" : false, + "maxTemporaryLockouts" : 0, "maxFailureWaitSeconds" : 900, "minimumQuickLoginWaitSeconds" : 60, "waitIncrementSeconds" : 60, @@ -76,6 +77,17 @@ data: "clientRole" : false, "containerId" : "82c39861-d877-4667-a0f3-4daa2ee230e0", "attributes" : { } + }, { + "id" : "7ee1c424-11b0-46a9-b0ed-725e9b7fc40c", + "name" : "default-system-roles", + "description" : "${default-system-roles}", + "composite" : true, + "composites" : { + "realm" : [ "delete-database-view", "update-semantic-unit", "export-query-data", "check-foreign-database-access", "default-data-steward-roles", "execute-query", "default-user-handling", "delete-table-data", "find-query", "list-database-views", "persist-query", "update-search-index", "delete-database-access", "view-table-history", "create-ontology", "update-ontology", "modify-user-theme", "default-system-roles", "create-semantic-concept", "default-container-handling", "create-container", "create-table", "default-broker-handling", "default-maintenance-handling", "execute-semantic-query", "uma_authorization", "table-semantic-analyse", "list-containers", "check-database-access", "escalated-query-handling", "delete-identifier", "modify-database-owner", "list-tables", "export-table-data", "create-database-access", "delete-container", "re-execute-query", "create-semantic-unit", "escalated-identifier-handling", "system", "update-table-statistic", "escalated-semantics-handling", "default-database-handling", "delete-ontology", "find-database", "find-database-view", "update-semantic-concept", "find-user", "import-database-data", "publish-identifier", "default-roles-dbrepo", "find-foreign-user", "create-database", "create-maintenance-message", "find-maintenance-message", "escalated-container-handling", "default-researcher-roles", "default-identifier-handling", "escalated-user-handling", "modify-user-information", "create-database-view", "update-maintenance-message", "delete-foreign-table", "offline_access", "modify-foreign-table-column-semantics", "delete-maintenance-message", "find-container", "insert-table-data", "modify-identifier-metadata", "modify-database-image", "escalated-broker-handling", "modify-table-column-semantics", "escalated-database-handling", "default-semantics-handling", "update-database-access", "default-query-handling", "find-table", "list-queries", "default-developer-roles", "create-identifier", "escalated-table-handling", "find-identifier", "view-database-view-data", "view-table-data", "list-licenses", "default-table-handling", "list-identifiers", "create-foreign-identifier", "list-databases", "list-ontologies", "modify-database-visibility", "list-maintenance-messages", "delete-table" ] + }, + "clientRole" : false, + "containerId" : "82c39861-d877-4667-a0f3-4daa2ee230e0", + "attributes" : { } }, { "id" : "143ba359-5fa2-451e-8296-43ecf20bb251", "name" : "update-semantic-concept", @@ -114,6 +126,14 @@ data: "clientRole" : false, "containerId" : "82c39861-d877-4667-a0f3-4daa2ee230e0", "attributes" : { } + }, { + "id" : "74648f9a-777e-4ef9-b97b-4c5d749d862f", + "name" : "update-search-index", + "description" : "${update-search-index}", + "composite" : false, + "clientRole" : false, + "containerId" : "82c39861-d877-4667-a0f3-4daa2ee230e0", + "attributes" : { } }, { "id" : "22492b64-c633-48a0-9678-b28669f2885b", "name" : "execute-semantic-query", @@ -141,14 +161,6 @@ data: "clientRole" : false, "containerId" : "82c39861-d877-4667-a0f3-4daa2ee230e0", "attributes" : { } - }, { - "id" : "79534da1-4c85-409e-810e-a7ce6d632b09", - "name" : "system", - "description" : "${system}", - "composite" : false, - "clientRole" : false, - "containerId" : "82c39861-d877-4667-a0f3-4daa2ee230e0", - "attributes" : { } }, { "id" : "b0d66d3d-59b4-4aae-aa66-e3d5a49f28e3", "name" : "view-database-view-data", @@ -399,6 +411,14 @@ data: "clientRole" : false, "containerId" : "82c39861-d877-4667-a0f3-4daa2ee230e0", "attributes" : { } + }, { + "id" : "b05e9b2b-748d-490b-949b-e78655bf7805", + "name" : "check-foreign-database-access", + "description" : "${check-foreign-database-access}", + "composite" : false, + "clientRole" : false, + "containerId" : "82c39861-d877-4667-a0f3-4daa2ee230e0", + "attributes" : { } }, { "id" : "c047d521-cec3-4444-86c4-aef098489b7b", "name" : "delete-maintenance-message", @@ -407,6 +427,14 @@ data: "clientRole" : false, "containerId" : "82c39861-d877-4667-a0f3-4daa2ee230e0", "attributes" : { } + }, { + "id" : "88f82262-be80-4d18-9fb4-5529da031f33", + "name" : "system", + "description" : "${system}", + "composite" : false, + "clientRole" : false, + "containerId" : "82c39861-d877-4667-a0f3-4daa2ee230e0", + "attributes" : { } }, { "id" : "e14ab76b-1c24-484d-ae2d-478b8457edea", "name" : "list-licenses", @@ -656,6 +684,14 @@ data: "clientRole" : false, "containerId" : "82c39861-d877-4667-a0f3-4daa2ee230e0", "attributes" : { } + }, { + "id" : "0c487c93-448f-4a82-8b9f-ebd8a0904bf8", + "name" : "find-foreign-user", + "description" : "${find-foreign-user}", + "composite" : false, + "clientRole" : false, + "containerId" : "82c39861-d877-4667-a0f3-4daa2ee230e0", + "attributes" : { } }, { "id" : "cf9735a9-fb70-4cc5-b5f4-75afc4e5654b", "name" : "modify-identifier-metadata", @@ -1079,26 +1115,34 @@ data: "id" : "f2ce17fe-7b15-47a4-bbf8-86f415298fa9", "name" : "data-stewards", "path" : "/data-stewards", + "subGroups" : [ ], "attributes" : { }, "realmRoles" : [ "default-data-steward-roles" ], - "clientRoles" : { }, - "subGroups" : [ ] + "clientRoles" : { } }, { "id" : "124d9888-0b6e-46aa-8225-077dcedaf16e", "name" : "developers", "path" : "/developers", + "subGroups" : [ ], "attributes" : { }, "realmRoles" : [ "default-developer-roles" ], - "clientRoles" : { }, - "subGroups" : [ ] + "clientRoles" : { } }, { "id" : "f467c38e-9041-4faa-ae0b-39cec65ff4db", "name" : "researchers", "path" : "/researchers", + "subGroups" : [ ], "attributes" : { }, "realmRoles" : [ "default-researcher-roles" ], - "clientRoles" : { }, - "subGroups" : [ ] + "clientRoles" : { } + }, { + "id" : "2b9f94b4-d434-4a98-8eab-25678cfee983", + "name" : "system", + "path" : "/system", + "subGroups" : [ ], + "attributes" : { }, + "realmRoles" : [ "default-system-roles" ], + "clientRoles" : { } } ], "defaultRole" : { "id" : "abd2d9ee-ebc4-4d0a-839e-6b588a6d442a", @@ -1117,7 +1161,8 @@ data: "otpPolicyLookAheadWindow" : 1, "otpPolicyPeriod" : 30, "otpPolicyCodeReusable" : false, - "otpSupportedApplications" : [ "totpAppMicrosoftAuthenticatorName", "totpAppFreeOTPName", "totpAppGoogleName" ], + "otpSupportedApplications" : [ "totpAppFreeOTPName", "totpAppGoogleName", "totpAppMicrosoftAuthenticatorName" ], + "localizationTexts" : { }, "webAuthnPolicyRpEntityName" : "keycloak", "webAuthnPolicySignatureAlgorithms" : [ "ES256" ], "webAuthnPolicyRpId" : "", @@ -1128,6 +1173,7 @@ data: "webAuthnPolicyCreateTimeout" : 0, "webAuthnPolicyAvoidSameAuthenticatorRegister" : false, "webAuthnPolicyAcceptableAaguids" : [ ], + "webAuthnPolicyExtraOrigins" : [ ], "webAuthnPolicyPasswordlessRpEntityName" : "keycloak", "webAuthnPolicyPasswordlessSignatureAlgorithms" : [ "ES256" ], "webAuthnPolicyPasswordlessRpId" : "", @@ -1138,6 +1184,7 @@ data: "webAuthnPolicyPasswordlessCreateTimeout" : 0, "webAuthnPolicyPasswordlessAvoidSameAuthenticatorRegister" : false, "webAuthnPolicyPasswordlessAcceptableAaguids" : [ ], + "webAuthnPolicyPasswordlessExtraOrigins" : [ ], "scopeMappings" : [ { "clientScope" : "rabbitmq.tag:administrator", "roles" : [ "escalated-broker-handling" ] @@ -2049,6 +2096,7 @@ data: "browserSecurityHeaders" : { "contentSecurityPolicyReportOnly" : "", "xContentTypeOptions" : "nosniff", + "referrerPolicy" : "no-referrer", "xRobotsTag" : "none", "xFrameOptions" : "SAMEORIGIN", "contentSecurityPolicy" : "frame-src 'self'; frame-ancestors 'self'; object-src 'none';", @@ -2105,7 +2153,7 @@ data: "subType" : "anonymous", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-usermodel-property-mapper", "oidc-full-name-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "saml-role-list-mapper" ] + "allowed-protocol-mapper-types" : [ "oidc-full-name-mapper", "saml-user-attribute-mapper", "saml-user-property-mapper", "saml-role-list-mapper", "oidc-usermodel-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper" ] } }, { "id" : "1849e52a-b8c9-44a8-af3d-ee19376a1ed1", @@ -2131,7 +2179,7 @@ data: "subType" : "authenticated", "subComponents" : { }, "config" : { - "allowed-protocol-mapper-types" : [ "oidc-usermodel-property-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "saml-user-property-mapper", "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-address-mapper", "saml-role-list-mapper" ] + "allowed-protocol-mapper-types" : [ "saml-user-property-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "oidc-address-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-attribute-mapper" ] } } ], "org.keycloak.storage.UserStorageProvider" : [ { @@ -2147,8 +2195,8 @@ data: "config" : { "ldap.attribute" : [ "createTimestamp" ], "is.mandatory.in.ldap" : [ "false" ], - "read.only" : [ "true" ], "always.read.value.from.ldap" : [ "true" ], + "read.only" : [ "true" ], "user.model.attribute" : [ "createTimestamp" ] } }, { @@ -2171,8 +2219,8 @@ data: "config" : { "ldap.attribute" : [ "cn" ], "is.mandatory.in.ldap" : [ "true" ], - "always.read.value.from.ldap" : [ "true" ], "read.only" : [ "false" ], + "always.read.value.from.ldap" : [ "true" ], "user.model.attribute" : [ "firstName" ] } }, { @@ -2183,8 +2231,8 @@ data: "config" : { "ldap.attribute" : [ "mail" ], "is.mandatory.in.ldap" : [ "false" ], - "always.read.value.from.ldap" : [ "false" ], "read.only" : [ "false" ], + "always.read.value.from.ldap" : [ "false" ], "user.model.attribute" : [ "email" ] } }, { @@ -2197,13 +2245,13 @@ data: "group.name.ldap.attribute" : [ "cn" ], "preserve.group.inheritance" : [ "false" ], "membership.user.ldap.attribute" : [ "uid" ], - "groups.dn" : [ "cn=system,ou=users,{{ .Values.identityservice.global.ldapDomain }}" ], + "groups.dn" : [ "ou=users,{{ .Values.identityservice.global.ldapDomain }}" ], "mode" : [ "LDAP_ONLY" ], "user.roles.retrieve.strategy" : [ "LOAD_GROUPS_BY_MEMBER_ATTRIBUTE" ], - "ignore.missing.groups" : [ "false" ], "membership.ldap.attribute" : [ "member" ], - "memberof.ldap.attribute" : [ "memberOf" ], + "ignore.missing.groups" : [ "false" ], "group.object.classes" : [ "groupOfNames" ], + "memberof.ldap.attribute" : [ "memberOf" ], "groups.path" : [ "/" ], "drop.non.existing.groups.during.sync" : [ "false" ] } @@ -2226,8 +2274,8 @@ data: "subComponents" : { }, "config" : { "ldap.attribute" : [ "uid" ], - "attribute.force.default" : [ "false" ], "is.mandatory.in.ldap" : [ "true" ], + "attribute.force.default" : [ "false" ], "is.binary.attribute" : [ "false" ], "always.read.value.from.ldap" : [ "false" ], "read.only" : [ "false" ], @@ -2236,27 +2284,27 @@ data: } ] }, "config" : { - "fullSyncPeriod" : [ "-1" ], "pagination" : [ "false" ], + "fullSyncPeriod" : [ "-1" ], "startTls" : [ "false" ], - "usersDn" : [ "ou=users,{{ .Values.identityservice.global.ldapDomain }}" ], + "usersDn" : [ "ou=users,dc=dbrepo,dc=at" ], "connectionPooling" : [ "true" ], "cachePolicy" : [ "DEFAULT" ], "useKerberosForPasswordAuthentication" : [ "false" ], "importEnabled" : [ "true" ], "enabled" : [ "true" ], "usernameLDAPAttribute" : [ "uid" ], - "bindDn" : [ "cn={{ .Values.identityservice.global.adminUser }},{{ .Values.identityservice.global.ldapDomain }}" ], "bindCredential" : [ "{{ .Values.identityservice.global.adminPassword }}" ], + "bindDn" : [ "cn={{ .Values.identityservice.global.adminUser }},{{ .Values.identityservice.global.ldapDomain }}" ], "changedSyncPeriod" : [ "-1" ], "lastSync" : [ "1719252666" ], "vendor" : [ "other" ], "uuidLDAPAttribute" : [ "entryUUID" ], - "allowKerberosAuthentication" : [ "false" ], "connectionUrl" : [ "ldap://identity-service:389" ], + "allowKerberosAuthentication" : [ "false" ], "syncRegistrations" : [ "true" ], "authType" : [ "simple" ], - "useTruststoreSpi" : [ "ldapsOnly" ], + "useTruststoreSpi" : [ "always" ], "usePasswordModifyExtendedOp" : [ "false" ], "trustEmail" : [ "false" ], "userObjectClasses" : [ "inetOrgPerson, organizationalPerson, person" ], @@ -2265,6 +2313,14 @@ data: "validatePasswordPolicy" : [ "false" ] } } ], + "org.keycloak.userprofile.UserProfileProvider" : [ { + "id" : "a407a1d6-a7f6-4a72-ba3a-149de03d5a43", + "providerId" : "declarative-user-profile", + "subComponents" : { }, + "config" : { + "kc.user.profile.config" : [ "{\"attributes\":[{\"name\":\"username\",\"displayName\":\"${username}\",\"validations\":{\"length\":{\"min\":3,\"max\":255},\"username-prohibited-characters\":{},\"up-username-not-idn-homograph\":{}},\"permissions\":{\"view\":[\"admin\",\"user\"],\"edit\":[\"admin\",\"user\"]},\"multivalued\":false},{\"name\":\"email\",\"displayName\":\"${email}\",\"validations\":{\"email\":{},\"length\":{\"max\":255}},\"required\":{\"roles\":[\"user\"]},\"permissions\":{\"view\":[\"admin\",\"user\"],\"edit\":[\"admin\",\"user\"]},\"multivalued\":false},{\"name\":\"firstName\",\"displayName\":\"${firstName}\",\"validations\":{\"length\":{\"max\":255},\"person-name-prohibited-characters\":{}},\"required\":{\"roles\":[\"user\"]},\"permissions\":{\"view\":[\"admin\",\"user\"],\"edit\":[\"admin\",\"user\"]},\"multivalued\":false},{\"name\":\"lastName\",\"displayName\":\"${lastName}\",\"validations\":{\"length\":{\"max\":255},\"person-name-prohibited-characters\":{}},\"required\":{\"roles\":[\"user\"]},\"permissions\":{\"view\":[\"admin\",\"user\"],\"edit\":[\"admin\",\"user\"]},\"multivalued\":false}],\"groups\":[{\"name\":\"user-metadata\",\"displayHeader\":\"User metadata\",\"displayDescription\":\"Attributes, which refer to user metadata\"}],\"unmanagedAttributePolicy\":\"ENABLED\"}" ] + } + } ], "org.keycloak.keys.KeyProvider" : [ { "id" : "2f53ccf3-37b0-4d34-83e7-ed497499ee51", "name" : "rsa-enc-generated", @@ -2293,8 +2349,8 @@ data: "providerId" : "hmac-generated", "subComponents" : { }, "config" : { - "kid" : [ "c8500166-5cc4-4085-ad0f-853c3b0b0233" ], - "secret" : [ "TI3xg__G2Qy8C47DracpYir2X4ItQZSrhgr5KSlwRNISDbBqZ-ky3OcAyokSXMcpweSOaCPvbivpvzJNklUBvw" ], + "kid" : [ "7f9f9054-5697-4f60-bdc8-67e3bd0f4db6" ], + "secret" : [ "1SCIY20z3AbAHCL28LuJfBU-7zfsZv5dacgliUeGdRW_WK3vH9fJUpPu1f7iDrdlhF7YQmHxLXsWjxhQId4ShI7QBdgKCArHWqi0GeH37oNXfZFg_uv-K_3JSfxfGBRu5jpRQhhSBxESZWsFVkskhxWUvNe6b5l9dFbMIif72rI" ], "priority" : [ "100" ], "algorithm" : [ "HS256" ] } @@ -2309,12 +2365,23 @@ data: "certificate" : [ "MIICmzCCAYMCBgGG3GWyBTANBgkqhkiG9w0BAQsFADARMQ8wDQYDVQQDDAZkYnJlcG8wHhcNMjMwMzEzMTkxMzE3WhcNMzMwMzEzMTkxNDU3WjARMQ8wDQYDVQQDDAZkYnJlcG8wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqqcdDYFZZb28M0tEJzEP77FmD/Xqioyj9zWX6VwUSOMAgmMmn8eqs9hT9T0a+q4YTo9tUW1PNbUpwprA5b4Uk04DcIajxDVMUR/PjcHytmkqwVskq9AZW/Vngdoo+8tSbuIybwe/3Vwt266hbHpDcM97a+DXcYooRl7tQWCEX7RP27wQrMD9epDQ6IgKayZg9vC9/03dsIqwH9jXQRiZlFvwiEKhX2aY7lPGBaCK414JO00K/Z49iov9TRa/IYVbSt5qwgrx6DcqsBSPwOnI6A85UGfeUEZ/7coVJiL7RvBlsllapsL9eWTbQajVh94k9Ei3sibEPbtH+U2OAM78zAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAASnN1Cuif1sdfEK2kWAURSXGJCohCROLWdKFjaeHPRaEfpbFJsgxW0Yj3nwX5O3bUlOWoTyENwnXSsXMQsqnNi+At32CKaKO8+AkhAbgQL9F0B+KeJwmYv3cUj5N/LYkJjBvZBzUZ4Ugu5dcxH0k7AktLAIwimkyEnxTNolOA3UyrGGpREr8MCKWVr10RFuOpF/0CsJNNwbHXzalO9D756EUcRWZ9VSg6QVNso0YYRKTnILWDn9hcTRnqGy3SHo3anFTqQZ+BB57YbgFWy6udC0LYRB3zdp6zNti87eu/VEymiDY/mmo1AB8Tm0b6vxFz4AKcL3ax5qS6YnZ9efSzk=" ], "priority" : [ "100" ] } + }, { + "id" : "addbae10-c6ae-4735-851f-7a5ea035ce25", + "name" : "hmac-generated-hs512", + "providerId" : "hmac-generated", + "subComponents" : { }, + "config" : { + "kid" : [ "352d0ea1-8218-42b5-ab78-e2ca56cf6a95" ], + "secret" : [ "_kr6EZOZ8IKqPWgJltHAAsQ34wCIGPs8oOQLYWwJrSIH7Qie3CEVKZnICyBP1goR-QgUtg25tR8Qu5MkvYkb8assJ8Iok5x_8iYCR4Txkf_mS-emrlAtQajlIjmOfNBtx704dTnZlP9rWzqpW6mrpeiOaiCw1K0XCpY5C_ZjXKw" ], + "priority" : [ "100" ], + "algorithm" : [ "HS512" ] + } } ] }, "internationalizationEnabled" : false, "supportedLocales" : [ ], "authenticationFlows" : [ { - "id" : "df1ebc5f-2037-43f5-9915-71eb4cd0ed7e", + "id" : "259dd7b6-01b7-433a-bda4-028857151ecd", "alias" : "Account verification options", "description" : "Method with which to verity the existing account", "providerId" : "basic-flow", @@ -2336,36 +2403,7 @@ data: "userSetupAllowed" : false } ] }, { - "id" : "76ef2d26-2756-4ce1-904b-4be58e99b576", - "alias" : "Authentication Options", - "description" : "Authentication options.", - "providerId" : "basic-flow", - "topLevel" : false, - "builtIn" : true, - "authenticationExecutions" : [ { - "authenticator" : "basic-auth", - "authenticatorFlow" : false, - "requirement" : "REQUIRED", - "priority" : 10, - "autheticatorFlow" : false, - "userSetupAllowed" : false - }, { - "authenticator" : "basic-auth-otp", - "authenticatorFlow" : false, - "requirement" : "DISABLED", - "priority" : 20, - "autheticatorFlow" : false, - "userSetupAllowed" : false - }, { - "authenticator" : "auth-spnego", - "authenticatorFlow" : false, - "requirement" : "DISABLED", - "priority" : 30, - "autheticatorFlow" : false, - "userSetupAllowed" : false - } ] - }, { - "id" : "b0d74a54-cea7-48f2-a4c9-f35204488da6", + "id" : "542ca1d7-9627-4102-b843-98837ce433fb", "alias" : "Browser - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2387,7 +2425,7 @@ data: "userSetupAllowed" : false } ] }, { - "id" : "07b964c7-4527-4071-9f7a-e50d6321d951", + "id" : "4f153b98-6851-440b-a022-0a14e67a9b2f", "alias" : "Direct Grant - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2409,7 +2447,7 @@ data: "userSetupAllowed" : false } ] }, { - "id" : "be69fd2d-1bf5-429e-9833-a76232a23904", + "id" : "3d791b35-d35c-40b2-bb3e-e806d72b27ee", "alias" : "First broker login - Conditional OTP", "description" : "Flow to determine if the OTP is required for the authentication", "providerId" : "basic-flow", @@ -2431,7 +2469,7 @@ data: "userSetupAllowed" : false } ] }, { - "id" : "e9d23d2a-d857-4547-a419-2fd850ed58e5", + "id" : "9b746104-9371-4c3f-b69f-9322cead1b08", "alias" : "Handle Existing Account", "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider", "providerId" : "basic-flow", @@ -2453,7 +2491,7 @@ data: "userSetupAllowed" : false } ] }, { - "id" : "75e1f120-8a28-4cc0-af60-26fa9d865975", + "id" : "7a164efe-c97b-4fbb-950d-7745359ba9a4", "alias" : "Reset - Conditional OTP", "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", "providerId" : "basic-flow", @@ -2475,7 +2513,7 @@ data: "userSetupAllowed" : false } ] }, { - "id" : "eeb37a0b-2f2f-47f5-9ee6-3da2c8b48ec0", + "id" : "4fdc5e1b-1b55-4662-8360-67d75fa22677", "alias" : "User creation or linking", "description" : "Flow for the existing/non-existing user alternatives", "providerId" : "basic-flow", @@ -2498,7 +2536,7 @@ data: "userSetupAllowed" : false } ] }, { - "id" : "8637f64c-8b45-48b0-b3ba-c6e93225cce4", + "id" : "75893341-c338-44d8-ae27-a3fc7bfe8f2d", "alias" : "Verify Existing Account by Re-authentication", "description" : "Reauthentication of existing account", "providerId" : "basic-flow", @@ -2520,7 +2558,7 @@ data: "userSetupAllowed" : false } ] }, { - "id" : "7ad56360-b344-4f26-9dea-1a718ed99d4e", + "id" : "89626b76-f4cf-4c46-934c-4408c225a44b", "alias" : "browser", "description" : "browser based authentication", "providerId" : "basic-flow", @@ -2556,7 +2594,7 @@ data: "userSetupAllowed" : false } ] }, { - "id" : "c6817917-1d21-4693-9171-b2e3dfde9582", + "id" : "4112115a-e7a7-44c2-9af5-65d538e4ba0d", "alias" : "clients", "description" : "Base authentication for clients", "providerId" : "client-flow", @@ -2592,7 +2630,7 @@ data: "userSetupAllowed" : false } ] }, { - "id" : "7cd02437-6d05-486d-a7fe-4d1762895ded", + "id" : "f82a9b0a-2c0a-4cb1-96b2-6c78b0b1f14f", "alias" : "direct grant", "description" : "OpenID Connect Resource Owner Grant", "providerId" : "basic-flow", @@ -2621,7 +2659,7 @@ data: "userSetupAllowed" : false } ] }, { - "id" : "eb1d9721-b4a0-40a5-9236-b4fd95ca9024", + "id" : "3614e155-e8ce-4958-98fb-a27e4706cc70", "alias" : "docker auth", "description" : "Used by Docker clients to authenticate against the IDP", "providerId" : "basic-flow", @@ -2636,7 +2674,7 @@ data: "userSetupAllowed" : false } ] }, { - "id" : "219415d8-3bab-47a6-9d0c-8c1061ffb68a", + "id" : "506f9b96-5002-47c0-96e3-3830a0fcfa26", "alias" : "first broker login", "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", "providerId" : "basic-flow", @@ -2659,7 +2697,7 @@ data: "userSetupAllowed" : false } ] }, { - "id" : "ccbf8944-bd32-4aa1-a6f8-93513a3fa5a4", + "id" : "4b7a7e91-36db-4b27-8e2d-01a04a822980", "alias" : "forms", "description" : "Username, password, otp and other auth forms.", "providerId" : "basic-flow", @@ -2681,29 +2719,7 @@ data: "userSetupAllowed" : false } ] }, { - "id" : "805f972b-75ca-48c0-a390-752b32c0688a", - "alias" : "http challenge", - "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes", - "providerId" : "basic-flow", - "topLevel" : true, - "builtIn" : true, - "authenticationExecutions" : [ { - "authenticator" : "no-cookie-redirect", - "authenticatorFlow" : false, - "requirement" : "REQUIRED", - "priority" : 10, - "autheticatorFlow" : false, - "userSetupAllowed" : false - }, { - "authenticatorFlow" : true, - "requirement" : "REQUIRED", - "priority" : 20, - "autheticatorFlow" : true, - "flowAlias" : "Authentication Options", - "userSetupAllowed" : false - } ] - }, { - "id" : "2b15383f-ded1-4fb6-afdc-0c19f65dacc7", + "id" : "04c2fe01-5076-4aa4-9596-4efb4004195f", "alias" : "registration", "description" : "registration flow", "providerId" : "basic-flow", @@ -2719,7 +2735,7 @@ data: "userSetupAllowed" : false } ] }, { - "id" : "1c18c3c7-a191-426b-84a4-1ffec96562cc", + "id" : "d12f77e1-7733-44a2-98ff-fd75c784d721", "alias" : "registration form", "description" : "registration form", "providerId" : "form-flow", @@ -2732,13 +2748,6 @@ data: "priority" : 20, "autheticatorFlow" : false, "userSetupAllowed" : false - }, { - "authenticator" : "registration-profile-action", - "authenticatorFlow" : false, - "requirement" : "REQUIRED", - "priority" : 40, - "autheticatorFlow" : false, - "userSetupAllowed" : false }, { "authenticator" : "registration-password-action", "authenticatorFlow" : false, @@ -2755,7 +2764,7 @@ data: "userSetupAllowed" : false } ] }, { - "id" : "cab07ead-2a48-4b0c-8916-2f89abe55720", + "id" : "91f6048c-a376-4809-8f37-c8d7a517830c", "alias" : "reset credentials", "description" : "Reset credentials for a user if they forgot their password or something", "providerId" : "basic-flow", @@ -2791,7 +2800,7 @@ data: "userSetupAllowed" : false } ] }, { - "id" : "6e158077-d221-4695-b0d3-9528c5ba6bfd", + "id" : "7b8fb487-53b8-4533-a696-76bc05256cb1", "alias" : "saml ecp", "description" : "SAML ECP Profile Authentication Flow", "providerId" : "basic-flow", @@ -2807,13 +2816,13 @@ data: } ] } ], "authenticatorConfig" : [ { - "id" : "fcb6cb09-fec5-4390-800c-00a3d49525ec", + "id" : "48372696-0579-45e5-b074-5e8dbdbbe7d6", "alias" : "create unique user config", "config" : { "require.password.update.after.registration" : "false" } }, { - "id" : "68f9e765-81d4-47cd-b111-94d6723883c5", + "id" : "08df3b83-e522-42a7-9e24-9028b960bf39", "alias" : "review profile config", "config" : { "update.profile.on.first.login" : "missing" @@ -2883,6 +2892,14 @@ data: "defaultAction" : false, "priority" : 80, "config" : { } + }, { + "alias" : "delete_credential", + "name" : "Delete Credential", + "providerId" : "delete_credential", + "enabled" : true, + "defaultAction" : false, + "priority" : 100, + "config" : { } }, { "alias" : "update_user_locale", "name" : "Update User Locale", @@ -2898,6 +2915,7 @@ data: "resetCredentialsFlow" : "reset credentials", "clientAuthenticationFlow" : "clients", "dockerAuthenticationFlow" : "docker auth", + "firstBrokerLoginFlow" : "first broker login", "attributes" : { "cibaBackchannelTokenDeliveryMode" : "poll", "cibaAuthRequestedUserHint" : "login_hint", @@ -2917,7 +2935,7 @@ data: "clientSessionMaxLifespan" : "0", "shortVerificationUri" : "" }, - "keycloakVersion" : "21.0.2", + "keycloakVersion" : "24.0.5", "userManagedAccessAllowed" : false, "clientProfiles" : { "profiles" : [ ] @@ -2926,4 +2944,4 @@ data: "policies" : [ ] } } -{{- end }} +{{- end }} \ No newline at end of file diff --git a/helm/dbrepo/templates/data-db-secret.yaml b/helm/dbrepo/templates/data-db-secret.yaml index 7b42140e581c604847929e3f9e32ad116fd27b51..100c1ce82c5ce7aa3da112a83a1af4b8825105ef 100644 --- a/helm/dbrepo/templates/data-db-secret.yaml +++ b/helm/dbrepo/templates/data-db-secret.yaml @@ -6,7 +6,7 @@ metadata: name: data-db-secret namespace: {{ .Values.namespace }} stringData: - S3_ACCESS_KEY_ID: "{{ .Values.storageservice.s3.auth.username }}" - S3_SECRET_ACCESS_KEY: "{{ .Values.storageservice.s3.auth.password }}" + S3_ACCESS_KEY_ID: "{{ .Values.storageservice.s3.auth.adminAccessKeyId }}" + S3_SECRET_ACCESS_KEY: "{{ .Values.storageservice.s3.auth.adminSecretAccessKey }}" S3_STORAGE_ENDPOINT: "{{ .Values.analyseservice.s3.endpoint }}" {{- end }} diff --git a/helm/dbrepo/templates/data-secret.yaml b/helm/dbrepo/templates/data-secret.yaml index 0a8a4ec51a46638c7440a6615818f4afa343b869..4bcaf0846854e692ce32a8253aaba69d4b6db6b5 100644 --- a/helm/dbrepo/templates/data-secret.yaml +++ b/helm/dbrepo/templates/data-secret.yaml @@ -6,20 +6,18 @@ metadata: namespace: {{ .Values.namespace }} stringData: ADMIN_EMAIL: "{{ .Values.metadataservice.admin.email }}" - ADMIN_USERNAME: "{{ .Values.identityservice.users }}" - ADMIN_PASSWORD: "{{ .Values.identityservice.userPasswords }}" AUTH_SERVICE_ADMIN: "{{ .Values.authservice.auth.adminUser }}" AUTH_SERVICE_ADMIN_PASSWORD: "{{ .Values.authservice.auth.adminPassword }}" AUTH_SERVICE_CLIENT: "{{ .Values.authservice.client.id }}" AUTH_SERVICE_CLIENT_SECRET: "{{ .Values.authservice.client.secret }}" - AUTH_SERVICE_HOST: "{{ .Values.authservice.endpoint }}" + AUTH_SERVICE_ENDPOINT: "{{ .Values.authservice.endpoint }}" BROKER_EXCHANGE_NAME: "{{ .Values.brokerservice.exchangeName }}" BROKER_HOST: "{{ .Values.brokerservice.host }}" BROKER_QUEUE_NAME: "{{ .Values.brokerservice.queueName }}" BROKER_PASSWORD: "{{ .Values.dataservice.rabbitmq.consumer.password }}" BROKER_PORT: "{{ .Values.brokerservice.port }}" BROKER_ROUTING_KEY: "{{ .Values.brokerservice.routingKey }}" - BROKER_SERVICE_ENDPOINT: "{{ .Values.brokerservice.url }}" + BROKER_SERVICE_ENDPOINT: "{{ .Values.brokerservice.endpoint }}" BROKER_USERNAME: "{{ .Values.dataservice.rabbitmq.consumer.username }}" BROKER_VIRTUALHOST: "{{ .Values.brokerservice.virtualHost }}" CONNECTION_TIMEOUT: "{{ .Values.brokerservice.connectionTimeout }}" @@ -37,7 +35,8 @@ stringData: REQUEUE_REJECTED: "{{ .Values.dataservice.rabbitmq.requeueRejected }}" S3_ACCESS_KEY_ID: "{{ .Values.dataservice.s3.auth.username }}" S3_ENDPOINT: "{{ .Values.dataservice.s3.endpoint }}" - S3_EXPORT_BUCKET: "{{ .Values.dataservice.s3.bucket.export }}" S3_FILE_PATH: "{{ .Values.dataservice.s3.filePath }}" S3_SECRET_ACCESS_KEY: "{{ .Values.dataservice.s3.auth.password }}" - S3_IMPORT_BUCKET: "{{ .Values.dataservice.s3.bucket.import }}" + S3_BUCKET: "{{ .Values.dataservice.s3.bucket }}" + SYSTEM_USERNAME: "{{ .Values.identityservice.users }}" + SYSTEM_PASSWORD: "{{ .Values.identityservice.userPasswords }}" diff --git a/helm/dbrepo/templates/metadata-configmap.yaml b/helm/dbrepo/templates/metadata-configmap.yaml index 7965f0a3855c991291a30dcef43294226971aeeb..9fd137bb39201b6149f5cce21c5c37c0f7ad6b20 100644 --- a/helm/dbrepo/templates/metadata-configmap.yaml +++ b/helm/dbrepo/templates/metadata-configmap.yaml @@ -12,7 +12,7 @@ data: 02-setup-data.sql: | BEGIN; INSERT INTO `mdb_containers` (name, internal_name, image_id, host, port, sidecar_host, sidecar_port, privileged_username, privileged_password) - VALUES ('MariaDB 11.1.3', 'mariadb_11_1_3', 1, 'data-db', 3306, 'data-db', 80, 'root', 'dbrepo'); + VALUES ('MariaDB 11.1.3', 'mariadb_11_1_3', 1, 'data-db', 3306, 'data-db', 8080, 'root', 'dbrepo'); COMMIT; 01-setup-schema.sql: | BEGIN; diff --git a/helm/dbrepo/templates/metadata-secret.yaml b/helm/dbrepo/templates/metadata-secret.yaml index edf451c22ad38613107d79b00d87fc8e5d2d6bf2..ac9fbb32fe149c8d64942ea08424bf7344bd4bf4 100644 --- a/helm/dbrepo/templates/metadata-secret.yaml +++ b/helm/dbrepo/templates/metadata-secret.yaml @@ -7,8 +7,6 @@ metadata: namespace: {{ .Values.namespace }} stringData: ADMIN_EMAIL: "{{ .Values.metadataservice.admin.email }}" - ADMIN_USERNAME: "{{ .Values.identityservice.users }}" - ADMIN_PASSWORD: "{{ .Values.identityservice.userPasswords }}" ANALYSE_SERVICE_ENDPOINT: "{{ .Values.analyseservice.endpoint }}" AUTH_SERVICE_ADMIN: "{{ .Values.authservice.auth.adminUser }}" AUTH_SERVICE_ADMIN_PASSWORD: "{{ .Values.authservice.auth.adminPassword }}" @@ -24,6 +22,7 @@ stringData: BROKER_SERVICE_ENDPOINT: "{{ .Values.brokerservice.endpoint }}" BROKER_USERNAME: "{{ .Values.brokerservice.auth.username }}" BROKER_VIRTUALHOST: "{{ .Values.brokerservice.virtualHost }}" + CROSSREF_ENDPOINT: "{{ .Values.metadataservice.crossref.endpoint}}" DATA_SERVICE_ENDPOINT: "{{ .Values.dataservice.endpoint }}" DATACITE_URL: "{{ .Values.metadataservice.datacite.url }}" DATACITE_PREFIX: "{{ .Values.metadataservice.datacite.prefix | toString }}" @@ -33,18 +32,20 @@ stringData: GRANULARITY: "{{ .Values.metadataservice.granularity }}" JWT_PUBKEY: "{{ .Values.authservice.jwt.pubkey }}" LOG_LEVEL: "{{ ternary "trace" "info" .Values.metadataservice.image.debug }}" - METADATA_DB: "{{ .Values.metadatadb.auth.database }}" + METADATA_DB: "{{ .Values.metadatadb.db.name }}" METADATA_HOST: "{{ .Values.metadatadb.host }}" METADATA_JDBC_EXTRA_ARGS: "{{ .Values.metadatadb.jdbcExtraArgs }}" - METADATA_USERNAME: "{{ .Values.metadatadb.auth.root }}" - METADATA_PASSWORD: "{{ .Values.metadatadb.auth.rootPassword }}" + METADATA_USERNAME: "{{ .Values.metadatadb.rootUser.user }}" + METADATA_PASSWORD: "{{ .Values.metadatadb.rootUser.password }}" PID_BASE: "{{ $pidBase }}" REPOSITORY_NAME: "{{ .Values.metadataservice.repositoryName }}" + ROR_ENDPOINT: "{{ .Values.metadataservice.ror.endpoint}}" SEARCH_SERVICE_ENDPOINT: "{{ .Values.searchservice.endpoint }}" SPARQL_CONNECTION_TIMEOUT: "{{ .Values.metadataservice.sparql.connectionTimeout }}" SPRING_PROFILES_ACTIVE: "{{ ternary "doi" "" .Values.metadataservice.datacite.enabled }}" S3_ENDPOINT: "{{ .Values.metadataservice.s3.endpoint }}" S3_ACCESS_KEY_ID: "{{ .Values.metadataservice.s3.auth.username }}" + S3_BUCKET: "{{ .Values.metadataservice.s3.bucket }}" S3_SECRET_ACCESS_KEY: "{{ .Values.metadataservice.s3.auth.password }}" - S3_IMPORT_BUCKET: "{{ .Values.metadataservice.s3.bucket.import }}" - S3_EXPORT_BUCKET: "{{ .Values.metadataservice.s3.bucket.export }}" + SYSTEM_USERNAME: "{{ .Values.identityservice.users }}" + SYSTEM_PASSWORD: "{{ .Values.identityservice.userPasswords }}" diff --git a/helm/dbrepo/templates/search-secret.yaml b/helm/dbrepo/templates/search-secret.yaml index 52f33a640c6e56e5cc82ae6073c35aaee841d17c..41665ac2bc1614653262f93cf28882a55638e4ec 100644 --- a/helm/dbrepo/templates/search-secret.yaml +++ b/helm/dbrepo/templates/search-secret.yaml @@ -1,3 +1,4 @@ +{{- if .Values.searchservice.enabled }} --- apiVersion: v1 kind: Secret @@ -18,4 +19,5 @@ stringData: OPENSEARCH_HOST: "{{ .Values.searchdb.host }}" OPENSEARCH_PORT: "{{ .Values.searchdb.port }}" OPENSEARCH_USERNAME: "{{ .Values.searchdb.security.adminUsername }}" - OPENSEARCH_PASSWORD: "{{ .Values.searchdb.security.adminPassword }}" \ No newline at end of file + OPENSEARCH_PASSWORD: "{{ .Values.searchdb.security.adminPassword }}" +{{- end }} diff --git a/helm/dbrepo/templates/storage-job.yaml b/helm/dbrepo/templates/storage-job.yaml index da30b885eb8eac16ae459a12c6dd71897d645004..4062aa8efeba0282954623e814bf4757e44a5f88 100644 --- a/helm/dbrepo/templates/storage-job.yaml +++ b/helm/dbrepo/templates/storage-job.yaml @@ -3,12 +3,12 @@ apiVersion: batch/v1 kind: Job metadata: - name: storageservice-create-buckets-job + name: storage-service-create-buckets-job namespace: {{ .Values.namespace }} spec: template: metadata: - name: storageservice-create-buckets-job + name: storage-service-create-buckets-job spec: restartPolicy: OnFailure containers: @@ -18,11 +18,9 @@ spec: - name: WEED_CLUSTER_DEFAULT value: "sw" - name: WEED_CLUSTER_SW_MASTER - value: "storageservice-master.{{ .Release.Name }}:9333" - - name: SEAWEEDFS_ENDPOINT - value: "storageservice-master.{{ .Release.Name }}:9333" + value: "storage-service-master:9333" - name: WEED_CLUSTER_SW_FILER - value: "storageservice-filer-client.{{ .Release.Name }}:8888" + value: "storage-service-filer:8888" - name: POD_IP valueFrom: fieldRef: @@ -35,8 +33,13 @@ spec: valueFrom: fieldRef: fieldPath: metadata.namespace + - name: S3_BUCKET + valueFrom: + secretKeyRef: + name: storage-service-secret + key: S3_BUCKET - name: SEAWEEDFS_FULLNAME - value: "storageservice" + value: "storage-service" volumeMounts: - name: config-users mountPath: /etc/sw @@ -50,5 +53,5 @@ spec: - name: config-users secret: defaultMode: 420 - secretName: seaweedfs-s3-secret + secretName: storage-service-secret {{- end }} diff --git a/helm/dbrepo/templates/storage-secret.yaml b/helm/dbrepo/templates/storage-secret.yaml index faaf302f7dc36910ff82bc605eaf63da8b662149..77bf426717fc50fe7b468196c61218388c60bf6b 100644 --- a/helm/dbrepo/templates/storage-secret.yaml +++ b/helm/dbrepo/templates/storage-secret.yaml @@ -3,21 +3,19 @@ apiVersion: v1 kind: Secret metadata: - name: seaweedfs-s3-secret + name: storage-service-secret namespace: {{ .Values.namespace }} - labels: - app.kubernetes.io/name: seaweedfs - app.kubernetes.io/component: s3 stringData: - seaweedfs_s3_config: | + S3_BUCKET: "{{ .Values.storageservice.s3.bucket }}" + config.json: | { "identities": [ { "name": "admin", "credentials": [ { - "accessKey": "{{ .Values.storageservice.s3.auth.username }}", - "secretKey": "{{ .Values.storageservice.s3.auth.password }}" + "accessKey": "{{ .Values.storageservice.s3.auth.adminAccessKeyId }}", + "secretKey": "{{ .Values.storageservice.s3.auth.adminSecretAccessKey }}" } ], "actions": [ @@ -30,4 +28,4 @@ stringData: } ] } -{{- end }} +{{- end }} \ No newline at end of file diff --git a/helm/dbrepo/templates/ui-deployment.yaml b/helm/dbrepo/templates/ui-deployment.yaml index 64cea9bf103dd3c66446ba353528b9ddb96b42a7..8c72c35f0d395230bcf3b9ace4a76a7b721bcfe7 100644 --- a/helm/dbrepo/templates/ui-deployment.yaml +++ b/helm/dbrepo/templates/ui-deployment.yaml @@ -35,92 +35,9 @@ spec: ports: - containerPort: 3000 protocol: TCP - env: - - name: NUXT_PUBLIC_API_SERVER - valueFrom: - secretKeyRef: - name: ui-secret - key: public-api-server - - name: NUXT_PUBLIC_API_CLIENT - valueFrom: - secretKeyRef: - name: ui-secret - key: public-api-server - - name: NUXT_PUBLIC_TITLE - valueFrom: - secretKeyRef: - name: ui-secret - key: public-title - - name: NUXT_PUBLIC_LOGO - valueFrom: - secretKeyRef: - name: ui-secret - key: public-logo - - name: NUXT_PUBLIC_ICON - valueFrom: - secretKeyRef: - name: ui-secret - key: public-icon - - name: NUXT_PUBLIC_TOUCH - valueFrom: - secretKeyRef: - name: ui-secret - key: public-touch - - name: NUXT_PUBLIC_BROKER_HOST - valueFrom: - secretKeyRef: - name: ui-secret - key: public-broker-host - - name: NUXT_PUBLIC_BROKER_PORT - valueFrom: - secretKeyRef: - name: ui-secret - key: public-broker-port - - name: NUXT_PUBLIC_BROKER_EXTRA - valueFrom: - secretKeyRef: - name: ui-secret - key: public-broker-extra - - name: NUXT_PUBLIC_DATABASE_EXTRA - valueFrom: - secretKeyRef: - name: ui-secret - key: public-database-extra - - name: NUXT_PUBLIC_LINKS_KEYCLOAK_HREF - valueFrom: - secretKeyRef: - name: ui-secret - key: public-links-keycloak-href - - name: NUXT_PUBLIC_LINKS_KEYCLOAK_TEXT - valueFrom: - secretKeyRef: - name: ui-secret - key: public-links-keycloak-text - - name: NUXT_PUBLIC_LINKS_RABBITMQ_HREF - valueFrom: - secretKeyRef: - name: ui-secret - key: public-links-rabbitmq-href - - name: NUXT_PUBLIC_LINKS_RABBITMQ_TEXT - valueFrom: - secretKeyRef: - name: ui-secret - key: public-links-rabbitmq-text - - name: NUXT_PUBLIC_PID_DEFAULT_PUBLISHER - valueFrom: - secretKeyRef: - name: ui-secret - key: public-pid-default-publisher - - name: NUXT_PUBLIC_DOI_ENABLED - valueFrom: - secretKeyRef: - name: ui-secret - key: public-doi-enabled - - name: NUXT_PUBLIC_DOI_ENDPOINT - valueFrom: - secretKeyRef: - name: ui-secret - key: public-doi-endpoint + envFrom: + - secretRef: + name: ui-secret volumeMounts: {{- if .Values.ui.extraVolumeMounts }} {{- .Values.ui.extraVolumeMounts | toYaml | nindent 12 }} diff --git a/helm/dbrepo/templates/ui-secret.yaml b/helm/dbrepo/templates/ui-secret.yaml index ddb0f902ecd7d67d3534d335ca180009e101cea1..af933da2e36e6af80711e46692757f4c650202f5 100644 --- a/helm/dbrepo/templates/ui-secret.yaml +++ b/helm/dbrepo/templates/ui-secret.yaml @@ -1,4 +1,5 @@ -{{ $api := printf "https://%s" .Values.hostname }} +{{- if .Values.ui.enabled }} +{{ $uploadEndpoint := printf "%s/api/upload/files" .Values.gateway }} --- apiVersion: v1 kind: Secret @@ -6,20 +7,22 @@ metadata: name: ui-secret namespace: {{ .Values.namespace }} stringData: - public-api-client: "{{ .Values.ui.public.api.client | default $api }}" - public-api-server: "{{ .Values.ui.public.api.server | default $api }}" - public-title: "{{ .Values.ui.public.title }}" - public-logo: "{{ .Values.ui.public.logo }}" - public-icon: "{{ .Values.ui.public.icon }}" - public-touch: "{{ .Values.ui.public.touch }}" - public-broker-host: "{{ .Values.ui.public.broker.host }}" - public-broker-port: {{ .Values.ui.public.broker.port | toJson | quote }} - public-broker-extra: "{{ .Values.ui.public.broker.extra }}" - public-database-extra: "{{ .Values.ui.public.database.extra }}" - public-links-keycloak-text: "{{ .Values.ui.public.links.keycloak.text }}" - public-links-keycloak-href: "{{ .Values.ui.public.links.keycloak.href }}" - public-links-rabbitmq-text: "{{ .Values.ui.public.links.rabbitmq.text }}" - public-links-rabbitmq-href: "{{ .Values.ui.public.links.rabbitmq.href }}" - public-pid-default-publisher: "{{ .Values.ui.public.pid.default.publisher }}" - public-doi-enabled: "{{ .Values.ui.public.doi.enabled }}" - public-doi-endpoint: "{{ .Values.ui.public.doi.endpoint }}" \ No newline at end of file + NUXT_PUBLIC_API_CLIENT: "{{ .Values.ui.public.api.client | default .Values.gateway }}" + NUXT_PUBLIC_API_SERVER: "{{ .Values.ui.public.api.server | default .Values.gateway }}" + NUXT_PUBLIC_TITLE: "{{ .Values.ui.public.title }}" + NUXT_PUBLIC_LOGO: "{{ .Values.ui.public.logo }}" + NUXT_PUBLIC_ICON: "{{ .Values.ui.public.icon }}" + NUXT_PUBLIC_TOUCH: "{{ .Values.ui.public.touch }}" + NUXT_PUBLIC_BROKER_HOST: "{{ .Values.ui.public.broker.host }}" + NUXT_PUBLIC_BROKER_PORT: {{ .Values.ui.public.broker.port | toJson | quote }} + NUXT_PUBLIC_BROKER_EXTRA: "{{ .Values.ui.public.broker.extra }}" + NUXT_PUBLIC_DATABASE_EXTRA: "{{ .Values.ui.public.database.extra }}" + NUXT_PUBLIC_DOI_ENABLED: "{{ .Values.ui.public.doi.enabled }}" + NUXT_PUBLIC_DOI_ENDPOINT: "{{ .Values.ui.public.doi.endpoint }}" + NUXT_PUBLIC_LINKS_KEYCLOAK_HREF: "{{ .Values.ui.public.links.keycloak.href }}" + NUXT_PUBLIC_LINKS_KEYCLOAK_TEXT: "{{ .Values.ui.public.links.keycloak.text }}" + NUXT_PUBLIC_LINKS_RABBITMQ_HREF: "{{ .Values.ui.public.links.rabbitmq.href }}" + NUXT_PUBLIC_LINKS_RABBITMQ_TEXT: "{{ .Values.ui.public.links.rabbitmq.text }}" + NUXT_PUBLIC_PID_DEFAULT_PUBLISHER: "{{ .Values.ui.public.pid.default.publisher }}" + NUXT_PUBLIC_UPLOAD_CLIENT: "{{ .Values.ui.public.upload.client | default $uploadEndpoint }}" +{{- end }} diff --git a/helm/dbrepo/templates/upload-secret.yaml b/helm/dbrepo/templates/upload-secret.yaml index fe415fe2be44e4730a07397e0735c2b798f21615..489b99743cbaaa9a816ce4d2efe7302c4b0aa5e0 100644 --- a/helm/dbrepo/templates/upload-secret.yaml +++ b/helm/dbrepo/templates/upload-secret.yaml @@ -6,7 +6,7 @@ metadata: name: upload-service-secret namespace: {{ .Values.namespace }} stringData: - AWS_ACCESS_KEY_ID: "{{ .Values.storageservice.s3.auth.username }}" - AWS_SECRET_ACCESS_KEY: "{{ .Values.storageservice.s3.auth.password }}" + AWS_ACCESS_KEY_ID: "{{ .Values.storageservice.s3.auth.adminAccessKeyId }}" + AWS_SECRET_ACCESS_KEY: "{{ .Values.storageservice.s3.auth.adminSecretAccessKey }}" AWS_REGION: "default" {{- end }} \ No newline at end of file diff --git a/helm/dbrepo/values.schema.json b/helm/dbrepo/values.schema.json index 0e1d72462caa8ecaa85d9eacd79822cb19f89dd1..843f7e799100a7b8a5041c96b8914275326ea2b7 100644 --- a/helm/dbrepo/values.schema.json +++ b/helm/dbrepo/values.schema.json @@ -35,7 +35,8 @@ "type": "integer" }, "seLinuxOptions": { - "type": "string" + "properties": {}, + "type": "object" }, "seccompProfile": { "properties": { @@ -425,9 +426,6 @@ "enabled": { "type": "boolean" }, - "extraFlags": { - "type": "string" - }, "fullnameOverride": { "type": "string" }, @@ -703,7 +701,8 @@ "type": "integer" }, "seLinuxOptions": { - "type": "string" + "properties": {}, + "type": "object" }, "seccompProfile": { "properties": { @@ -733,6 +732,9 @@ "enabled": { "type": "boolean" }, + "endpoint": { + "type": "string" + }, "grant": { "properties": { "read": { @@ -820,15 +822,7 @@ "type": "object" }, "bucket": { - "properties": { - "export": { - "type": "string" - }, - "import": { - "type": "string" - } - }, - "type": "object" + "type": "string" }, "endpoint": { "type": "string" @@ -1037,21 +1031,9 @@ }, "metadatadb": { "properties": { - "auth": { + "db": { "properties": { - "database": { - "type": "string" - }, - "replicationPassword": { - "type": "string" - }, - "replicationUser": { - "type": "string" - }, - "root": { - "type": "string" - }, - "rootPassword": { + "name": { "type": "string" } }, @@ -1060,16 +1042,32 @@ "enabled": { "type": "boolean" }, - "extraInitDbScripts": { - "properties": {}, - "type": "object" - }, "fullnameOverride": { "type": "string" }, + "galera": { + "properties": { + "mariabackup": { + "properties": { + "password": { + "type": "string" + }, + "user": { + "type": "string" + } + }, + "type": "object" + } + }, + "type": "object" + }, "host": { "type": "string" }, + "initdbScripts": { + "properties": {}, + "type": "object" + }, "initdbScriptsConfigMap": { "type": "string" }, @@ -1084,10 +1082,24 @@ }, "type": "object" }, - "secondary": { + "persistence": { "properties": { - "replicaCount": { - "type": "integer" + "enabled": { + "type": "boolean" + } + }, + "type": "object" + }, + "replicaCount": { + "type": "integer" + }, + "rootUser": { + "properties": { + "password": { + "type": "string" + }, + "user": { + "type": "string" } }, "type": "object" @@ -1137,7 +1149,8 @@ "type": "integer" }, "seLinuxOptions": { - "type": "string" + "properties": {}, + "type": "object" }, "seccompProfile": { "properties": { @@ -1150,6 +1163,14 @@ }, "type": "object" }, + "crossref": { + "properties": { + "endpoint": { + "type": "string" + } + }, + "type": "object" + }, "datacite": { "properties": { "enabled": { @@ -1249,6 +1270,14 @@ }, "type": "object" }, + "ror": { + "properties": { + "endpoint": { + "type": "string" + } + }, + "type": "object" + }, "s3": { "properties": { "auth": { @@ -1263,15 +1292,7 @@ "type": "object" }, "bucket": { - "properties": { - "export": { - "type": "string" - }, - "import": { - "type": "string" - } - }, - "type": "object" + "type": "string" }, "endpoint": { "type": "string" @@ -1364,7 +1385,8 @@ "type": "integer" }, "seLinuxOptions": { - "type": "string" + "properties": {}, + "type": "object" }, "seccompProfile": { "properties": { @@ -1380,6 +1402,9 @@ "enabled": { "type": "boolean" }, + "endpoint": { + "type": "string" + }, "image": { "properties": { "debug": { @@ -1470,44 +1495,15 @@ }, "filer": { "properties": { - "enablePVC": { - "type": "boolean" - }, "enabled": { "type": "boolean" - }, - "replicas": { - "type": "integer" - }, - "s3": { - "properties": { - "allowEmptyFolder": { - "type": "boolean" - }, - "enableAuth": { - "type": "boolean" - }, - "enabled": { - "type": "boolean" - }, - "existingConfigSecret": { - "type": "string" - }, - "port": { - "type": "integer" - }, - "skipAuthSecretCreation": { - "type": "boolean" - } - }, - "type": "object" - }, - "storage": { - "type": "string" } }, "type": "object" }, + "fullnameOverride": { + "type": "string" + }, "init": { "properties": { "image": { @@ -1519,6 +1515,17 @@ }, "type": "object" }, + "mariadb": { + "properties": { + "enabled": { + "type": "boolean" + }, + "fullnameOverride": { + "type": "string" + } + }, + "type": "object" + }, "master": { "properties": { "enabled": { @@ -1531,46 +1538,26 @@ "properties": { "auth": { "properties": { - "password": { + "adminAccessKeyId": { "type": "string" }, - "username": { - "type": "string" - } - }, - "type": "object" - }, - "bucket": { - "properties": { - "export": { + "adminSecretAccessKey": { "type": "string" }, - "import": { - "type": "string" + "enabled": { + "type": "boolean" } }, "type": "object" }, - "enableAuth": { - "type": "boolean" + "bucket": { + "type": "string" }, "enabled": { "type": "boolean" }, - "existingConfigSecret": { - "type": "string" - }, - "metricsPort": { - "type": "integer" - }, - "port": { - "type": "integer" - }, - "replicas": { + "replicaCount": { "type": "integer" - }, - "skipAuthSecretCreation": { - "type": "boolean" } }, "type": "object" @@ -1579,9 +1566,6 @@ "properties": { "enabled": { "type": "boolean" - }, - "replicas": { - "type": "integer" } }, "type": "object" @@ -1626,7 +1610,8 @@ "type": "integer" }, "seLinuxOptions": { - "type": "string" + "properties": {}, + "type": "object" }, "seccompProfile": { "properties": { @@ -1787,6 +1772,14 @@ }, "touch": { "type": "string" + }, + "upload": { + "properties": { + "client": { + "type": "string" + } + }, + "type": "object" } }, "type": "object" diff --git a/helm/dbrepo/values.yaml b/helm/dbrepo/values.yaml index d910ff084fb38d08dde46d4f06c69c68450dc38d..f96cf27b47bb0492479bd7ae45e2389870c8f9ef 100644 --- a/helm/dbrepo/values.yaml +++ b/helm/dbrepo/values.yaml @@ -35,17 +35,20 @@ metadatadb: fullnameOverride: metadata-db ## @param metadatadb.host The hostname for the microservices. host: metadata-db - auth: - ## @param metadatadb.auth.root The root username. - root: root - ## @param metadatadb.auth.rootPassword The root user password. - rootPassword: dbrepo - ## @param metadatadb.auth.database The database name. - database: dbrepo - ## @param metadatadb.auth.replicationUser The database replication username. - replicationUser: replication - ## @param metadatadb.auth.replicationPassword The database replication user password - replicationPassword: replication + rootUser: + ## @param metadatadb.rootUser.user The root username. + user: root + ## @param metadatadb.rootUser.password The root user password. + password: dbrepo + db: + ## @param metadatadb.db.name The database name. + name: dbrepo + galera: + mariabackup: + ## @param metadatadb.galera.mariabackup.user The database backup username. + user: backup + ## @param metadatadb.galera.mariabackup.password The database backup user password + password: backup ## @param metadatadb.jdbcExtraArgs The extra arguments for JDBC connections in the microservices. jdbcExtraArgs: "" metrics: @@ -53,16 +56,18 @@ metadatadb: enabled: false ## @skip metadatadb.initdbScriptsConfigMap The initial database scripts. initdbScriptsConfigMap: metadata-db-setup - ## @param metadatadb.extraInitDbScripts Additional init.db scripts that are executed on the first start. - extraInitDbScripts: { } + ## @param metadatadb.initdbScripts Additional init.db scripts that are executed on the first start. + initdbScripts: { } # 03-additional-data.sql: | # BEGIN; # INSERT INTO `mdb_containers` (name, internal_name, image_id, host, port, sidecar_host, sidecar_port, privileged_username, privileged_password) # VALUES ('MariaDB Galera TEST', 'mariadb_11_1_3', 1, 'data-db', 3306, 'data-db', 80, 'root', 'dbrepo'); # COMMIT; - secondary: - ## @param metadatadb.secondary.replicaCount The number of replicas of the secondary database pods. - replicaCount: 2 + ## @param metadatadb.replicaCount The number of cluster nodes, should be uneven i.e. 2n+1 + replicaCount: 3 + persistence: + ## @param metadatadb.persistence.enabled Enable persistent storage. + enabled: true ## @section Auth Service @@ -131,8 +136,6 @@ datadb: image: ## @param datadb.image.debug Set the logging level to `trace`. Otherwise, set to `info`. debug: false - ## @skip datadb.extraFlags - extraFlags: "--character-set-server=utf8mb4 --collation-server=utf8mb4_general_ci" auth: ## @param datadb.auth.rootPassword The root user password. rootPassword: dbrepo @@ -153,7 +156,7 @@ datadb: protocol: TCP sidecars: - name: sidecar - image: registry.datalab.tuwien.ac.at/dbrepo/data-db-sidecar:1.4.4 + image: registry.datalab.tuwien.ac.at/dbrepo/data-db-sidecar:1.4.5 imagePullPolicy: Always securityContext: runAsUser: 1001 @@ -248,9 +251,11 @@ uploadservice: - ALL ## @skip uploadservice.containerArgs containerArgs: - - "--base-path=/api/upload/files/" - - "-s3-endpoint=http://storageservice-s3:9000" - - "-s3-bucket=dbrepo-upload" + - "-behind-proxy" + - "-max-size=2000000000" + - "-base-path=/api/upload/files/" + - "-s3-endpoint=http://storage-service-s3:8333" + - "-s3-bucket=dbrepo" ## @skip uploadservice.envFrom envFrom: - secretRef: @@ -337,7 +342,7 @@ analyseservice: enabled: true image: ## @skip analyseservice.image.name - name: registry.datalab.tuwien.ac.at/dbrepo/analyse-service:1.4.4 + name: registry.datalab.tuwien.ac.at/dbrepo/analyse-service:1.4.5 ## @skip analyseservice.image.pullPolicy pullPolicy: Always ## @param analyseservice.image.debug Set the logging level to `trace`. Otherwise, set to `info`. @@ -358,7 +363,7 @@ analyseservice: ## @param analyseservice.containerSecurityContext.enabled Enabled containers' Security Context enabled: true ## @param analyseservice.containerSecurityContext.seLinuxOptions Set SELinux options in container - seLinuxOptions: "" + seLinuxOptions: { } ## @param analyseservice.containerSecurityContext.runAsUser Set RabbitMQ containers' Security Context runAsUser runAsUser: 1001 ## @param analyseservice.containerSecurityContext.runAsGroup Set RabbitMQ containers' Security Context runAsGroup @@ -388,7 +393,7 @@ analyseservice: endpoint: http://analyse-service s3: ## @param analyseservice.s3.endpoint The S3-capable endpoint the microservice connects to. - endpoint: http://storageservice-s3:9000 + endpoint: http://storage-service-s3:8333 ## @param analyseservice.replicaCount The number of replicas. replicaCount: 2 @@ -399,7 +404,7 @@ metadataservice: enabled: true image: ## @skip metadataservice.image.name - name: registry.datalab.tuwien.ac.at/dbrepo/metadata-service:1.4.4 + name: registry.datalab.tuwien.ac.at/dbrepo/metadata-service:1.4.5 ## @skip metadataservice.image.pullPolicy pullPolicy: Always ## @param metadataservice.image.debug Set the logging level to `trace`. Otherwise, set to `info`. @@ -420,7 +425,7 @@ metadataservice: ## @param metadataservice.containerSecurityContext.enabled Enabled containers' Security Context enabled: true ## @param metadataservice.containerSecurityContext.seLinuxOptions Set SELinux options in container - seLinuxOptions: "" + seLinuxOptions: { } ## @param metadataservice.containerSecurityContext.runAsUser Set RabbitMQ containers' Security Context runAsUser runAsUser: 1001 ## @param metadataservice.containerSecurityContext.runAsGroup Set RabbitMQ containers' Security Context runAsGroup @@ -447,6 +452,12 @@ metadataservice: memory: 2048Mi ## @param metadataservice.endpoint The Metadata Service endpoint. endpoint: http://metadata-service + crossref: + ## @param metadataservice.crossref.endpoint The CrossRef endpoint. + endpoint: http://data.crossref.org + ror: + ## @param metadataservice.ror.endpoint The ROR endpoint. + endpoint: https://api.ror.org admin: ## @param metadataservice.admin.email The OAI-PMH exposed e-mail for contacting the metadata records responsible person. email: noreply@example.com @@ -472,11 +483,9 @@ metadataservice: connectionTimeout: 10000 s3: ## @param metadataservice.s3.endpoint The S3-capable endpoint the microservice connects to. - endpoint: http://storageservice-s3:9000 + endpoint: http://storage-service-s3:8333 ## @skip metadataservice.s3.bucket - bucket: - import: dbrepo-upload - export: dbrepo-download + bucket: dbrepo auth: ## @param metadataservice.s3.auth.username The S3-capable endpoint username (or access key id). username: seaweedfsadmin @@ -490,9 +499,11 @@ metadataservice: dataservice: ## @param dataservice.enabled Enable the Broker Service. enabled: true + ## @param dataservice.endpoint Absolute URL to the data service in the form of http://host:port + endpoint: http://data-service image: ## @skip dataservice.image.name - name: registry.datalab.tuwien.ac.at/dbrepo/data-service:1.4.4 + name: registry.datalab.tuwien.ac.at/dbrepo/data-service:1.4.5 ## @skip dataservice.image.pullPolicy pullPolicy: Always ## @param dataservice.image.debug Set the logging level to `trace`. Otherwise, set to `info`. @@ -513,7 +524,7 @@ dataservice: ## @param dataservice.containerSecurityContext.enabled Enabled containers' Security Context enabled: true ## @param dataservice.containerSecurityContext.seLinuxOptions Set SELinux options in container - seLinuxOptions: "" + seLinuxOptions: { } ## @param dataservice.containerSecurityContext.runAsUser Set RabbitMQ containers' Security Context runAsUser runAsUser: 1001 ## @param dataservice.containerSecurityContext.runAsGroup Set RabbitMQ containers' Security Context runAsGroup @@ -557,11 +568,9 @@ dataservice: password: admin s3: ## @param dataservice.s3.endpoint The S3-capable endpoint the microservice connects to. - endpoint: http://storageservice-s3:9000 - ## @skip dataservice.s3.bucket - bucket: - import: dbrepo-upload - export: dbrepo-download + endpoint: http://storage-service-s3:8333 + ## @param dataservice.s3.bucket The S3 bucket name. + bucket: dbrepo auth: ## @param dataservice.s3.auth.username The S3-capable endpoint username (or access key id). username: seaweedfsadmin @@ -577,9 +586,11 @@ dataservice: searchservice: ## @param searchservice.enabled Enable the Broker Service. enabled: true + ## @param searchservice.endpoint Absolute URL to the search service in the form of http://host:port + endpoint: http://search-service image: ## @skip searchservice.image.name - name: registry.datalab.tuwien.ac.at/dbrepo/search-service:1.4.4 + name: registry.datalab.tuwien.ac.at/dbrepo/search-service:1.4.5 ## @skip searchservice.image.pullPolicy pullPolicy: Always ## @param searchservice.image.debug Set the logging level to `trace`. Otherwise, set to `info`. @@ -600,7 +611,7 @@ searchservice: ## @param searchservice.containerSecurityContext.enabled Enabled containers' Security Context enabled: true ## @param searchservice.containerSecurityContext.seLinuxOptions Set SELinux options in container - seLinuxOptions: "" + seLinuxOptions: { } ## @param searchservice.containerSecurityContext.runAsUser Set RabbitMQ containers' Security Context runAsUser runAsUser: 1001 ## @param searchservice.containerSecurityContext.runAsGroup Set RabbitMQ containers' Security Context runAsGroup @@ -628,7 +639,7 @@ searchservice: ## @skip searchservice.init init: image: - name: registry.datalab.tuwien.ac.at/dbrepo/search-service-init:1.4.4 + name: registry.datalab.tuwien.ac.at/dbrepo/search-service-init:1.4.5 pullPolicy: Always ## @param searchservice.replicaCount The number of replicas. replicaCount: 2 @@ -638,44 +649,39 @@ searchservice: storageservice: ## @param storageservice.enabled Enable the Storage Service. enabled: true - ## @skip storageservice.master + ## @skip storageservice.fullnameOverride + fullnameOverride: storage-service + mariadb: + ## @skip storageservice.mariadb.fullnameOverride + fullnameOverride: storage-service-db + ## @skip storageservice.mariadb.enabled + enabled: true master: + ## @skip storageservice.master.enabled enabled: true - ## @skip storageservice.filer filer: + ## @param storageservice.filer.enabled Enable the storage service filer which is required for S3. enabled: true - replicas: 1 - enablePVC: false - storage: 25Gi - s3: - enabled: true - allowEmptyFolder: true - port: 9000 - enableAuth: true - skipAuthSecretCreation: true - existingConfigSecret: seaweedfs-s3-secret - ## @skip storageservice.volume volume: - enabled: true - replicas: 1 - ## @skip storageservice.s3 + ## @skip storageservice.volume.enabled + enabled: false s3: + ## @skip storageservice.s3.enabled enabled: true - replicas: 2 - port: 9000 - metricsPort: 9091 - enableAuth: true - skipAuthSecretCreation: true - existingConfigSecret: seaweedfs-s3-secret - bucket: - import: dbrepo-upload - export: dbrepo-download + ## @param storageservice.s3.replicaCount The number of replicas. + replicaCount: 2 + ## @param storageservice.s3.bucket The S3-bucket name. + bucket: dbrepo auth: - username: seaweedfsadmin - password: seaweedfsadmin + ## @param storageservice.s3.auth.enabled Enable the S3 service. + enabled: true + ## @param storageservice.s3.auth.adminAccessKeyId The S3 access key id for the admin user. In some systems this is named `username`. + adminAccessKeyId: seaweedfsadmin + ## @param storageservice.s3.auth.adminSecretAccessKey The S3 secret access key for the admin user. In some systems this is named `password`. + adminSecretAccessKey: seaweedfsadmin ## @skip storageservice.init init: - image: registry.datalab.tuwien.ac.at/dbrepo/storage-service-init:1.4.4 + image: registry.datalab.tuwien.ac.at/dbrepo/storage-service-init:1.4.5 pullPolicy: Always ## @section Identity Service @@ -738,7 +744,7 @@ ui: enabled: true image: ## @skip ui.image.name - name: registry.datalab.tuwien.ac.at/dbrepo/ui:1.4.4 + name: registry.datalab.tuwien.ac.at/dbrepo/ui:1.4.5 ## @skip ui.image.pullPolicy pullPolicy: Always ## @param ui.image.debug Set the logging level to `trace`. Otherwise, set to `info`. @@ -759,7 +765,7 @@ ui: ## @param ui.containerSecurityContext.enabled Enabled containers' Security Context enabled: true ## @param ui.containerSecurityContext.seLinuxOptions Set SELinux options in container - seLinuxOptions: "" + seLinuxOptions: { } ## @param ui.containerSecurityContext.runAsUser Set RabbitMQ containers' Security Context runAsUser runAsUser: 1001 ## @param ui.containerSecurityContext.runAsGroup Set RabbitMQ containers' Security Context runAsGroup @@ -786,10 +792,13 @@ ui: memory: 2048Mi public: api: - ## @param ui.public.api.client The endpoint for the client api. + ## @param ui.public.api.client The endpoint for the client api. Defaults to the value of `gateway`. client: "" - ## @param ui.public.api.server The endpoint for the server api. + ## @param ui.public.api.server The endpoint for the server api. Defaults to the value of `gateway`. server: "" + upload: + ## @param ui.public.upload.client The endpoint for the upload client. Defaults to the value of `gateway` and path `/api/upload/files`. + client: "" ## @param ui.public.title The user interface title. title: "Database Repository" ## @param ui.public.logo The user interface logo. diff --git a/install.sh b/install.sh index 32d30096f8a2cfc9c098bd76e26f0080468ca7ca..1c10f8e7cbafe9df4bfbb07d104bf96f857e3046 100644 --- a/install.sh +++ b/install.sh @@ -64,6 +64,7 @@ curl -sSL -o ./dist/2_setup-data.sql "https://gitlab.phaidra.org/fair-data-austr curl -sSL -o ./dist/rabbitmq.conf "https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-${VERSION}/dbrepo-broker-service/rabbitmq.conf" curl -sSL -o ./dist/enabled_plugins "https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-${VERSION}/dbrepo-broker-service/enabled_plugins" curl -sSL -o ./dist/definitions.json "https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-${VERSION}/dbrepo-broker-service/definitions.json" +curl -sSL -o ./dist/advanced.config "https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-${VERSION}/dbrepo-broker-service/advanced.config" curl -sSL -o ./dist/dbrepo.conf "https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-${VERSION}/dbrepo-gateway-service/dbrepo.conf" curl -sSL -o ./dist/opensearch_dashboards.yml "https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-${VERSION}/dbrepo-search-db/opensearch_dashboards.yml" curl -sSL -o ./dist/dbrepo.config.json "https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-${VERSION}/dbrepo-ui/dbrepo.config.json"