Skip to content
Snippets Groups Projects
Verified Commit 86009366 authored by Martin Weise's avatar Martin Weise
Browse files

Updated the changes from release branch

parent 93a12caa
No related branches found
No related tags found
1 merge request!422Fixed a library issue where the value could not be empty
......@@ -7,7 +7,7 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased]
## [v1.9.0](https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/tags/v1.9.0) - 2025-05-30
#### Fixes
......
......@@ -11,7 +11,7 @@ author: Martin Weise
If you have [Docker](https://docs.docker.com/engine/install/) already installed on your system, you can install DBRepo with:
```shell
curl -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.8/install.sh | bash
curl -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.9/install.sh | bash
```
!!! bug "Default installation security disclaimer"
......@@ -35,7 +35,7 @@ SSL/TLS certificate is recommended. Follow the [secure installation](#secure-ins
Execute the installation script to download only the environment and save it to `dist`.
```shell
curl -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.8/install.sh | DOWNLOAD_ONLY=1 bash
curl -sSL https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.9/install.sh | DOWNLOAD_ONLY=1 bash
```
### Static Configuration
......@@ -79,7 +79,7 @@ the variable `IDENTITY_SERVICE_ADMIN_PASSWORD` in `.env`.
Update the client secret of the `dbrepo-client`:
```bash
curl -sSL "https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.8/.scripts/reg-client-secret.sh" | bash
curl -sSL "https://gitlab.phaidra.org/fair-data-austria-db-repository/fda-services/-/raw/release-1.9/.scripts/reg-client-secret.sh" | bash
```
Also, update the JWT key according to the
......
......@@ -166,6 +166,8 @@ build-metadata-service:
paths:
- ./dbrepo-metadata-service/oai/target/classes
- ./dbrepo-metadata-service/repositories/target/classes
- ./dbrepo-metadata-service/repositories/target/classes
- ./dbrepo-metadata-service/repositories/target/generated-sources
- ./dbrepo-metadata-service/rest-service/target/classes
- ./dbrepo-metadata-service/services/target/classes
expire_in: 1 days
......@@ -229,9 +231,11 @@ build-data-service:
artifacts:
when: always
paths:
- ./dbrepo-data-service/services/target/classes
- ./dbrepo-data-service/rest-service/target/classes
- ./dbrepo-data-service/querystore/target/classes
- ./dbrepo-data-service/rest-service/target/classes
- ./dbrepo-data-service/rest-service/target/test-classes
- ./dbrepo-data-service/services/target/classes
- ./dbrepo-data-service/services/target/generated-sources
expire_in: 1 days
build-ui:
......@@ -597,7 +601,8 @@ release-docs:
stage: release
image: docker.io/python:${PYTHON_VERSION}-alpine${ALPINE_VERSION}
only:
- tags
refs:
- /^release-.*/
before_script:
- "apk add --no-cache alpine-sdk bash sed wget openssh jq curl"
- "pip install pipenv"
......
......@@ -84,7 +84,6 @@ public class StorageServiceIntegrationTest extends BaseTest {
@BeforeEach
public void beforeEach() throws SQLException, InterruptedException {
/* s3 */
Thread.sleep(1000) /* wait for test container some more */;
if (s3Client.listBuckets().buckets().stream().noneMatch(b -> b.name().equals(s3Config.getS3Bucket()))) {
log.warn("Bucket {} not found", s3Config.getS3Bucket());
s3Client.createBucket(CreateBucketRequest.builder()
......
......@@ -35,6 +35,8 @@ public class StorageServiceS3Impl implements StorageService {
private final S3Client s3Client;
private final SparkSession sparkSession;
private static final String S3_KEY = "s3_key";
@Autowired
public StorageServiceS3Impl(S3Config s3Config, S3Client s3Client, SparkSession sparkSession) {
this.s3Config = s3Config;
......@@ -98,7 +100,6 @@ public class StorageServiceS3Impl implements StorageService {
}
@Override
// TODO should be export to S3 -> load from S3
public ExportResourceDto transformDataset(Dataset<Row> dataset) throws StorageUnavailableException {
final List<Map<String, String>> inMemory = dataset.collectAsList()
.stream()
......@@ -146,7 +147,7 @@ public class StorageServiceS3Impl implements StorageService {
final String path = "s3a://" + s3Config.getS3Bucket() + "/" + key;
log.atDebug()
.setMessage("read dataset " + key + " using header: " + withHeader)
.addKeyValue("s3_key", key)
.addKeyValue(S3_KEY, key)
.addKeyValue("s3_bucket", s3Config.getS3Bucket())
.addKeyValue("header", withHeader)
.log();
......@@ -163,7 +164,7 @@ public class StorageServiceS3Impl implements StorageService {
if (exception.getSimpleMessage().contains("PATH_NOT_FOUND")) {
log.atError()
.setMessage("Failed to find dataset " + key + " in storage service")
.addKeyValue("s3_key", key)
.addKeyValue(S3_KEY, key)
.setCause(e)
.log();
throw new StorageNotFoundException("Failed to find dataset in storage service: " + e.getMessage());
......@@ -171,7 +172,7 @@ public class StorageServiceS3Impl implements StorageService {
if (exception.getSimpleMessage().contains("UNRESOLVED_COLUMN")) {
log.atError()
.setMessage("Failed to resolve column from dataset in database")
.addKeyValue("s3_key", key)
.addKeyValue(S3_KEY, key)
.setCause(e)
.log();
throw new TableMalformedException("Failed to resolve column from dataset in database: " + e.getMessage());
......@@ -179,14 +180,14 @@ public class StorageServiceS3Impl implements StorageService {
} else if (e instanceof IllegalArgumentException) {
log.atError()
.setMessage("Failed to map columns: " + e.getMessage())
.addKeyValue("s3_key", key)
.addKeyValue(S3_KEY, key)
.setCause(e)
.log();
throw new MalformedException("Failed to map columns: " + e.getMessage());
}
log.atError()
.setMessage("Failed to connect to storage service")
.addKeyValue("s3_key", key)
.addKeyValue(S3_KEY, key)
.setCause(e)
.log();
throw new StorageUnavailableException("Failed to connect to storage service: " + e.getMessage());
......@@ -220,14 +221,14 @@ public class StorageServiceS3Impl implements StorageService {
if (e instanceof ExtendedAnalysisException exception) {
log.atError()
.setMessage("Failed to resolve column from dataset in database")
.addKeyValue("s3_key", key)
.addKeyValue(S3_KEY, key)
.setCause(e)
.log();
throw new TableMalformedException("Failed to resolve column from dataset in database: " + exception.getSimpleMessage());
}
log.atError()
.setMessage("Failed to select columns from dataset")
.addKeyValue("s3_key", key)
.addKeyValue(S3_KEY, key)
.setCause(e)
.log();
throw new MalformedException("Failed to select columns from dataset: " + e.getMessage());
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment