From db374682f5d0f6adb4b10ab18a4fde75d45edf3c Mon Sep 17 00:00:00 2001
From: Martin Weise <martin.weise@tuwien.ac.at>
Date: Fri, 30 May 2025 10:57:10 +0200
Subject: [PATCH] S3 key

Signed-off-by: Martin Weise <martin.weise@tuwien.ac.at>
---
 .../service/StorageServiceIntegrationTest.java  |  1 -
 .../service/impl/StorageServiceS3Impl.java      | 17 +++++++++--------
 2 files changed, 9 insertions(+), 9 deletions(-)

diff --git a/dbrepo-data-service/rest-service/src/test/java/at/ac/tuwien/ifs/dbrepo/service/StorageServiceIntegrationTest.java b/dbrepo-data-service/rest-service/src/test/java/at/ac/tuwien/ifs/dbrepo/service/StorageServiceIntegrationTest.java
index e9f51a83f4..6493be6af5 100644
--- a/dbrepo-data-service/rest-service/src/test/java/at/ac/tuwien/ifs/dbrepo/service/StorageServiceIntegrationTest.java
+++ b/dbrepo-data-service/rest-service/src/test/java/at/ac/tuwien/ifs/dbrepo/service/StorageServiceIntegrationTest.java
@@ -84,7 +84,6 @@ public class StorageServiceIntegrationTest extends BaseTest {
     @BeforeEach
     public void beforeEach() throws SQLException, InterruptedException {
         /* s3 */
-        Thread.sleep(1000) /* wait for test container some more */;
         if (s3Client.listBuckets().buckets().stream().noneMatch(b -> b.name().equals(s3Config.getS3Bucket()))) {
             log.warn("Bucket {} not found", s3Config.getS3Bucket());
             s3Client.createBucket(CreateBucketRequest.builder()
diff --git a/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ifs/dbrepo/service/impl/StorageServiceS3Impl.java b/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ifs/dbrepo/service/impl/StorageServiceS3Impl.java
index e1b235d8a9..22887c8d10 100644
--- a/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ifs/dbrepo/service/impl/StorageServiceS3Impl.java
+++ b/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ifs/dbrepo/service/impl/StorageServiceS3Impl.java
@@ -35,6 +35,8 @@ public class StorageServiceS3Impl implements StorageService {
     private final S3Client s3Client;
     private final SparkSession sparkSession;
 
+    private static final String S3_KEY = "s3_key";
+
     @Autowired
     public StorageServiceS3Impl(S3Config s3Config, S3Client s3Client, SparkSession sparkSession) {
         this.s3Config = s3Config;
@@ -98,7 +100,6 @@ public class StorageServiceS3Impl implements StorageService {
     }
 
     @Override
-    // TODO should be export to S3 -> load from S3
     public ExportResourceDto transformDataset(Dataset<Row> dataset) throws StorageUnavailableException {
         final List<Map<String, String>> inMemory = dataset.collectAsList()
                 .stream()
@@ -146,7 +147,7 @@ public class StorageServiceS3Impl implements StorageService {
         final String path = "s3a://" + s3Config.getS3Bucket() + "/" + key;
         log.atDebug()
                 .setMessage("read dataset " + key + " using header: " + withHeader)
-                .addKeyValue("s3_key", key)
+                .addKeyValue(S3_KEY, key)
                 .addKeyValue("s3_bucket", s3Config.getS3Bucket())
                 .addKeyValue("header", withHeader)
                 .log();
@@ -163,7 +164,7 @@ public class StorageServiceS3Impl implements StorageService {
                 if (exception.getSimpleMessage().contains("PATH_NOT_FOUND")) {
                     log.atError()
                             .setMessage("Failed to find dataset " + key + " in storage service")
-                            .addKeyValue("s3_key", key)
+                            .addKeyValue(S3_KEY, key)
                             .setCause(e)
                             .log();
                     throw new StorageNotFoundException("Failed to find dataset in storage service: " + e.getMessage());
@@ -171,7 +172,7 @@ public class StorageServiceS3Impl implements StorageService {
                 if (exception.getSimpleMessage().contains("UNRESOLVED_COLUMN")) {
                     log.atError()
                             .setMessage("Failed to resolve column from dataset in database")
-                            .addKeyValue("s3_key", key)
+                            .addKeyValue(S3_KEY, key)
                             .setCause(e)
                             .log();
                     throw new TableMalformedException("Failed to resolve column from dataset in database: " + e.getMessage());
@@ -179,14 +180,14 @@ public class StorageServiceS3Impl implements StorageService {
             } else if (e instanceof IllegalArgumentException) {
                 log.atError()
                         .setMessage("Failed to map columns: " + e.getMessage())
-                        .addKeyValue("s3_key", key)
+                        .addKeyValue(S3_KEY, key)
                         .setCause(e)
                         .log();
                 throw new MalformedException("Failed to map columns: " + e.getMessage());
             }
             log.atError()
                     .setMessage("Failed to connect to storage service")
-                    .addKeyValue("s3_key", key)
+                    .addKeyValue(S3_KEY, key)
                     .setCause(e)
                     .log();
             throw new StorageUnavailableException("Failed to connect to storage service: " + e.getMessage());
@@ -220,14 +221,14 @@ public class StorageServiceS3Impl implements StorageService {
             if (e instanceof ExtendedAnalysisException exception) {
                 log.atError()
                         .setMessage("Failed to resolve column from dataset in database")
-                        .addKeyValue("s3_key", key)
+                        .addKeyValue(S3_KEY, key)
                         .setCause(e)
                         .log();
                 throw new TableMalformedException("Failed to resolve column from dataset in database: " + exception.getSimpleMessage());
             }
             log.atError()
                     .setMessage("Failed to select columns from dataset")
-                    .addKeyValue("s3_key", key)
+                    .addKeyValue(S3_KEY, key)
                     .setCause(e)
                     .log();
             throw new MalformedException("Failed to select columns from dataset: " + e.getMessage());
-- 
GitLab