diff --git a/.docs/changelog.md b/.docs/changelog.md index 723b48b8f18da1b1bca79ecf04a832d1435a925e..5dfa786e8b364deed00ec23a1b7b0af6826beb20 100644 --- a/.docs/changelog.md +++ b/.docs/changelog.md @@ -7,7 +7,7 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## [Unreleased] +## [v1.9.0](https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/tags/v1.9.0) - 2025-05-30 #### Fixes diff --git a/.docs/installation.md b/.docs/installation.md index 79530f31ee8ee69ffdab2c0078921948229a6db2..785a96794f3775c478bb3e6aec88170f7a573011 100644 --- a/.docs/installation.md +++ b/.docs/installation.md @@ -11,7 +11,7 @@ author: Martin Weise If you have [Docker](https://docs.docker.com/engine/install/) already installed on your system, you can install DBRepo with: ```shell -curl -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.8/install.sh | bash +curl -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.9/install.sh | bash ``` !!! bug "Default installation security disclaimer" @@ -35,7 +35,7 @@ SSL/TLS certificate is recommended. Follow the [secure installation](#secure-ins Execute the installation script to download only the environment and save it to `dist`. ```shell -curl -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.8/install.sh | DOWNLOAD_ONLY=1 bash +curl -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.9/install.sh | DOWNLOAD_ONLY=1 bash ``` ### Static Configuration @@ -79,7 +79,7 @@ the variable `IDENTITY_SERVICE_ADMIN_PASSWORD` in `.env`. Update the client secret of the `dbrepo-client`: ```bash -curl -sSL "https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.8/.scripts/reg-client-secret.sh" | bash +curl -sSL "https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.9/.scripts/reg-client-secret.sh" | bash ``` Also, update the JWT key according to the diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index ebe349d31f5c7a0863c8e5d451eb09d6d44cfd8e..c44a99dcc51602ba4ac09e2c30d9eabb8f39c4ab 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -166,6 +166,8 @@ build-metadata-service: paths: - ./dbrepo-metadata-service/oai/target/classes - ./dbrepo-metadata-service/repositories/target/classes + - ./dbrepo-metadata-service/repositories/target/classes + - ./dbrepo-metadata-service/repositories/target/generated-sources - ./dbrepo-metadata-service/rest-service/target/classes - ./dbrepo-metadata-service/services/target/classes expire_in: 1 days @@ -229,9 +231,11 @@ build-data-service: artifacts: when: always paths: - - ./dbrepo-data-service/services/target/classes - - ./dbrepo-data-service/rest-service/target/classes - ./dbrepo-data-service/querystore/target/classes + - ./dbrepo-data-service/rest-service/target/classes + - ./dbrepo-data-service/rest-service/target/test-classes + - ./dbrepo-data-service/services/target/classes + - ./dbrepo-data-service/services/target/generated-sources expire_in: 1 days build-ui: @@ -597,7 +601,8 @@ release-docs: stage: release image: docker.io/python:${PYTHON_VERSION}-alpine${ALPINE_VERSION} only: - - tags + refs: + - /^release-.*/ before_script: - "apk add --no-cache alpine-sdk bash sed wget openssh jq curl" - "pip install pipenv" diff --git a/dbrepo-data-service/rest-service/src/test/java/at/ac/tuwien/ifs/dbrepo/service/StorageServiceIntegrationTest.java b/dbrepo-data-service/rest-service/src/test/java/at/ac/tuwien/ifs/dbrepo/service/StorageServiceIntegrationTest.java index e9f51a83f4fb29b09e0aa79543e11f74c4f65dbb..6493be6af5252f19803edb704de4a093f8454725 100644 --- a/dbrepo-data-service/rest-service/src/test/java/at/ac/tuwien/ifs/dbrepo/service/StorageServiceIntegrationTest.java +++ b/dbrepo-data-service/rest-service/src/test/java/at/ac/tuwien/ifs/dbrepo/service/StorageServiceIntegrationTest.java @@ -84,7 +84,6 @@ public class StorageServiceIntegrationTest extends BaseTest { @BeforeEach public void beforeEach() throws SQLException, InterruptedException { /* s3 */ - Thread.sleep(1000) /* wait for test container some more */; if (s3Client.listBuckets().buckets().stream().noneMatch(b -> b.name().equals(s3Config.getS3Bucket()))) { log.warn("Bucket {} not found", s3Config.getS3Bucket()); s3Client.createBucket(CreateBucketRequest.builder() diff --git a/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ifs/dbrepo/service/impl/StorageServiceS3Impl.java b/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ifs/dbrepo/service/impl/StorageServiceS3Impl.java index e1b235d8a970025971fbd92876c0169981f57688..22887c8d10d2049943468db739d0c5ed4b051044 100644 --- a/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ifs/dbrepo/service/impl/StorageServiceS3Impl.java +++ b/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ifs/dbrepo/service/impl/StorageServiceS3Impl.java @@ -35,6 +35,8 @@ public class StorageServiceS3Impl implements StorageService { private final S3Client s3Client; private final SparkSession sparkSession; + private static final String S3_KEY = "s3_key"; + @Autowired public StorageServiceS3Impl(S3Config s3Config, S3Client s3Client, SparkSession sparkSession) { this.s3Config = s3Config; @@ -98,7 +100,6 @@ public class StorageServiceS3Impl implements StorageService { } @Override - // TODO should be export to S3 -> load from S3 public ExportResourceDto transformDataset(Dataset<Row> dataset) throws StorageUnavailableException { final List<Map<String, String>> inMemory = dataset.collectAsList() .stream() @@ -146,7 +147,7 @@ public class StorageServiceS3Impl implements StorageService { final String path = "s3a://" + s3Config.getS3Bucket() + "/" + key; log.atDebug() .setMessage("read dataset " + key + " using header: " + withHeader) - .addKeyValue("s3_key", key) + .addKeyValue(S3_KEY, key) .addKeyValue("s3_bucket", s3Config.getS3Bucket()) .addKeyValue("header", withHeader) .log(); @@ -163,7 +164,7 @@ public class StorageServiceS3Impl implements StorageService { if (exception.getSimpleMessage().contains("PATH_NOT_FOUND")) { log.atError() .setMessage("Failed to find dataset " + key + " in storage service") - .addKeyValue("s3_key", key) + .addKeyValue(S3_KEY, key) .setCause(e) .log(); throw new StorageNotFoundException("Failed to find dataset in storage service: " + e.getMessage()); @@ -171,7 +172,7 @@ public class StorageServiceS3Impl implements StorageService { if (exception.getSimpleMessage().contains("UNRESOLVED_COLUMN")) { log.atError() .setMessage("Failed to resolve column from dataset in database") - .addKeyValue("s3_key", key) + .addKeyValue(S3_KEY, key) .setCause(e) .log(); throw new TableMalformedException("Failed to resolve column from dataset in database: " + e.getMessage()); @@ -179,14 +180,14 @@ public class StorageServiceS3Impl implements StorageService { } else if (e instanceof IllegalArgumentException) { log.atError() .setMessage("Failed to map columns: " + e.getMessage()) - .addKeyValue("s3_key", key) + .addKeyValue(S3_KEY, key) .setCause(e) .log(); throw new MalformedException("Failed to map columns: " + e.getMessage()); } log.atError() .setMessage("Failed to connect to storage service") - .addKeyValue("s3_key", key) + .addKeyValue(S3_KEY, key) .setCause(e) .log(); throw new StorageUnavailableException("Failed to connect to storage service: " + e.getMessage()); @@ -220,14 +221,14 @@ public class StorageServiceS3Impl implements StorageService { if (e instanceof ExtendedAnalysisException exception) { log.atError() .setMessage("Failed to resolve column from dataset in database") - .addKeyValue("s3_key", key) + .addKeyValue(S3_KEY, key) .setCause(e) .log(); throw new TableMalformedException("Failed to resolve column from dataset in database: " + exception.getSimpleMessage()); } log.atError() .setMessage("Failed to select columns from dataset") - .addKeyValue("s3_key", key) + .addKeyValue(S3_KEY, key) .setCause(e) .log(); throw new MalformedException("Failed to select columns from dataset: " + e.getMessage());