diff --git a/dbrepo-auth-service/listeners/target/create-event-listener.jar b/dbrepo-auth-service/listeners/target/create-event-listener.jar index e6ee56c50203b9ba4e75d7964141c3c353ff933d..6ba9f58a962c7d786d8731418b1312432088baca 100644 Binary files a/dbrepo-auth-service/listeners/target/create-event-listener.jar and b/dbrepo-auth-service/listeners/target/create-event-listener.jar differ diff --git a/dbrepo-data-service/init.Dockerfile b/dbrepo-data-service/init.Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..4cef42722f2a1695ed318f0f459080dbf76a1957 --- /dev/null +++ b/dbrepo-data-service/init.Dockerfile @@ -0,0 +1,42 @@ +###### FIRST STAGE ###### +FROM dbrepo-core:build AS dependency +LABEL org.opencontainers.image.authors="martin.weise@tuwien.ac.at" + +###### SECOND STAGE ###### +FROM maven:3-amazoncorretto-17 AS build +LABEL org.opencontainers.image.authors="martin.weise@tuwien.ac.at" + +COPY ./pom.xml ./ + +RUN mvn -fn dependency:go-offline + +COPY --from=dependency /root/.m2/repository/at/ac/tuwien/ifs/dbrepo /root/.m2/repository/at/ac/tuwien/ifs/dbrepo + +COPY ./querystore ./querystore +COPY ./report ./report +COPY ./rest-service ./rest-service +COPY ./services ./services + +# Make sure it compiles +RUN mvn -fn clean package -DskipTests + +###### THIRD STAGE ###### +FROM amazoncorretto:17-alpine3.19 AS runtime +LABEL org.opencontainers.image.authors="martin.weise@tuwien.ac.at" + +RUN apk add --no-cache curl bash jq + +WORKDIR /app + +RUN adduser -D dbrepo --uid 1001 + +USER 1001 + +COPY --from=build --chown=1001 ./rest-service/target/rest-service-*.jar ./data-service-init.jar + +# non-root port +EXPOSE 8080 + +ENV JAVA_OPTS="-XX:+IgnoreUnrecognizedVMOptions --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.lang.invoke=ALL-UNNAMED --add-opens=java.base/java.lang.reflect=ALL-UNNAMED --add-opens=java.base/java.io=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/java.util.concurrent=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens=java.base/jdk.internal.ref=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/sun.nio.cs=ALL-UNNAMED --add-opens=java.base/sun.security.action=ALL-UNNAMED --add-opens=java.base/sun.util.calendar=ALL-UNNAMED --add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED -Djdk.reflect.useDirectMethodHandle=false -Dio.netty.tryReflectionSetAccessible=true" + +ENTRYPOINT exec java $JAVA_OPTS -jar ./data-service-init.jar init \ No newline at end of file diff --git a/dbrepo-data-service/rest-service/src/main/java/at/ac/tuwien/ac/at/ifs/dbrepo/DataServiceApplication.java b/dbrepo-data-service/rest-service/src/main/java/at/ac/tuwien/ac/at/ifs/dbrepo/DataServiceApplication.java index 8430543899ef67fa7daa0e22f57a4f0eb58eb7d4..ec763b9d4ac3efea27e3664fa9bdb40d8b32733b 100644 --- a/dbrepo-data-service/rest-service/src/main/java/at/ac/tuwien/ac/at/ifs/dbrepo/DataServiceApplication.java +++ b/dbrepo-data-service/rest-service/src/main/java/at/ac/tuwien/ac/at/ifs/dbrepo/DataServiceApplication.java @@ -1,17 +1,87 @@ package at.ac.tuwien.ac.at.ifs.dbrepo; +import at.ac.tuwien.ac.at.ifs.dbrepo.gateway.MetadataServiceGateway; +import at.ac.tuwien.ac.at.ifs.dbrepo.service.CacheService; +import at.ac.tuwien.ac.at.ifs.dbrepo.service.ContainerService; +import at.ac.tuwien.ac.at.ifs.dbrepo.service.DatabaseService; +import at.ac.tuwien.ifs.dbrepo.core.api.database.DatabaseBriefDto; +import at.ac.tuwien.ifs.dbrepo.core.api.database.DatabaseDto; +import at.ac.tuwien.ifs.dbrepo.core.api.database.ViewDto; +import at.ac.tuwien.ifs.dbrepo.core.api.database.table.TableDto; +import at.ac.tuwien.ifs.dbrepo.core.exception.*; import lombok.extern.log4j.Log4j2; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.CommandLineRunner; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.context.ApplicationContext; import org.springframework.scheduling.annotation.EnableScheduling; +import java.sql.SQLException; +import java.util.List; + @Log4j2 @EnableScheduling @SpringBootApplication -public class DataServiceApplication { +public class DataServiceApplication implements CommandLineRunner { + + private final CacheService cacheService; + private final DatabaseService databaseService; + private final ContainerService containerService; + private final ApplicationContext applicationContext; + private final MetadataServiceGateway metadataServiceGateway; + + @Autowired + public DataServiceApplication(CacheService cacheService, DatabaseService databaseService, + ContainerService containerService, ApplicationContext applicationContext, + MetadataServiceGateway metadataServiceGateway) { + this.cacheService = cacheService; + this.databaseService = databaseService; + this.containerService = containerService; + this.applicationContext = applicationContext; + this.metadataServiceGateway = metadataServiceGateway; + } public static void main(String[] args) { SpringApplication.run(DataServiceApplication.class, args); } + @Override + public void run(String... args) throws MetadataServiceException, RemoteUnavailableException, SQLException, + DatabaseNotFoundException, QueryStoreCreateException, TableNotFoundException, DatabaseMalformedException, + ViewNotFoundException { + if (args.length == 0) { + return; + } + for (DatabaseBriefDto d : metadataServiceGateway.getDatabases()) { + final DatabaseDto database = cacheService.getDatabase(d.getId()); + containerService.createQueryStore(database.getContainer(), database.getInternalName()); + final List<TableDto> tables = databaseService.exploreTables(database); + if (database.getTables().size() != tables.size()) { + final List<TableDto> missingTables = tables.stream() + .filter(table -> database.getTables() + .stream() + .noneMatch(t -> table.getInternalName().equals(t.getInternalName()))) + .toList(); + if (!missingTables.isEmpty()) { + log.warn("Failed to obtain metadata on table(s): {}", missingTables); + metadataServiceGateway.updateTableSchemas(d.getId()); + } + } + final List<ViewDto> views = databaseService.exploreViews(database); + if (database.getViews().size() != views.size()) { + final List<ViewDto> missingViews = views.stream() + .filter(view -> database.getTables() + .stream() + .noneMatch(v -> view.getInternalName().equals(v.getInternalName()))) + .toList(); + if (!missingViews.isEmpty()) { + log.warn("Failed to obtain metadata on view(s): {}", missingViews); + metadataServiceGateway.updateViewSchemas(d.getId()); + } + } + } + log.info("Finished initialization"); + SpringApplication.exit(applicationContext, () -> 0); + } } diff --git a/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ac/at/ifs/dbrepo/gateway/MetadataServiceGateway.java b/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ac/at/ifs/dbrepo/gateway/MetadataServiceGateway.java index 01634718c749778a65f046066e5cb8d1ac879a7a..6a6acdba049fd61db07c024bbfc6163dc268f202 100644 --- a/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ac/at/ifs/dbrepo/gateway/MetadataServiceGateway.java +++ b/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ac/at/ifs/dbrepo/gateway/MetadataServiceGateway.java @@ -2,6 +2,7 @@ package at.ac.tuwien.ac.at.ifs.dbrepo.gateway; import at.ac.tuwien.ifs.dbrepo.core.api.container.ContainerDto; import at.ac.tuwien.ifs.dbrepo.core.api.database.DatabaseAccessDto; +import at.ac.tuwien.ifs.dbrepo.core.api.database.DatabaseBriefDto; import at.ac.tuwien.ifs.dbrepo.core.api.database.DatabaseDto; import at.ac.tuwien.ifs.dbrepo.core.api.database.ViewDto; import at.ac.tuwien.ifs.dbrepo.core.api.database.table.TableDto; @@ -12,7 +13,7 @@ import jakarta.validation.constraints.NotNull; import java.util.List; import java.util.UUID; -// todo ? + public interface MetadataServiceGateway { /** @@ -102,6 +103,19 @@ public interface MetadataServiceGateway { List<IdentifierBriefDto> getIdentifiers(@NotNull UUID databaseId, UUID subsetId) throws MetadataServiceException, RemoteUnavailableException, DatabaseNotFoundException; + /** + * Get a list of databases. + * + * @return The list of databases. + * @throws RemoteUnavailableException The remote service is not available and invalid data was returned. + * @throws MetadataServiceException The remote service returned invalid data. + */ + List<DatabaseBriefDto> getDatabases() throws MetadataServiceException, RemoteUnavailableException; + + DatabaseBriefDto updateTableSchemas(UUID databaseId) throws MetadataServiceException, RemoteUnavailableException; + + DatabaseBriefDto updateViewSchemas(UUID databaseId) throws MetadataServiceException, RemoteUnavailableException; + /** * Update the table statistics in the metadata service. * diff --git a/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ac/at/ifs/dbrepo/gateway/impl/MetadataServiceGatewayImpl.java b/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ac/at/ifs/dbrepo/gateway/impl/MetadataServiceGatewayImpl.java index 292b5bba4a023d4f21d43d043346dfeba4875add..a30c782c9dba8a4a662f5f11be2ca530bfe725e0 100644 --- a/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ac/at/ifs/dbrepo/gateway/impl/MetadataServiceGatewayImpl.java +++ b/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ac/at/ifs/dbrepo/gateway/impl/MetadataServiceGatewayImpl.java @@ -2,6 +2,7 @@ package at.ac.tuwien.ac.at.ifs.dbrepo.gateway.impl; import at.ac.tuwien.ifs.dbrepo.core.api.container.ContainerDto; import at.ac.tuwien.ifs.dbrepo.core.api.database.DatabaseAccessDto; +import at.ac.tuwien.ifs.dbrepo.core.api.database.DatabaseBriefDto; import at.ac.tuwien.ifs.dbrepo.core.api.database.DatabaseDto; import at.ac.tuwien.ifs.dbrepo.core.api.database.ViewDto; import at.ac.tuwien.ifs.dbrepo.core.api.database.table.TableDto; @@ -269,6 +270,72 @@ public class MetadataServiceGatewayImpl implements MetadataServiceGateway { return List.of(response.getBody()); } + @Override + public List<DatabaseBriefDto> getDatabases() throws MetadataServiceException, RemoteUnavailableException { + final ResponseEntity<DatabaseBriefDto[]> response; + final String url = "/api/database"; + log.debug("get databases from metadata service: {}", url); + try { + response = internalRestTemplate.exchange(url, HttpMethod.GET, HttpEntity.EMPTY, DatabaseBriefDto[].class); + } catch (ResourceAccessException | HttpServerErrorException e) { + log.error("Failed to find databases: {}", e.getMessage()); + throw new RemoteUnavailableException("Failed to find databases: " + e.getMessage(), e); + } + if (!response.getStatusCode().equals(HttpStatus.OK)) { + log.error("Failed to find databases: service responded unsuccessful: {}", response.getStatusCode()); + throw new MetadataServiceException("Failed to find databases: service responded unsuccessful: " + response.getStatusCode()); + } + if (response.getBody() == null) { + log.error("Failed to find databases: body is null"); + throw new MetadataServiceException("Failed to find databases: body is null"); + } + return List.of(response.getBody()); + } + + @Override + public DatabaseBriefDto updateTableSchemas(UUID databaseId) throws MetadataServiceException, RemoteUnavailableException { + final ResponseEntity<DatabaseBriefDto> response; + final String url = "/api/database/" + databaseId + "/metadata/table"; + log.debug("update table schemas in metadata service: {}", url); + try { + response = internalRestTemplate.exchange(url, HttpMethod.PUT, HttpEntity.EMPTY, DatabaseBriefDto.class); + } catch (ResourceAccessException | HttpServerErrorException e) { + log.error("Failed to update table schemas: {}", e.getMessage()); + throw new RemoteUnavailableException("Failed to update table schemas: " + e.getMessage(), e); + } + if (!response.getStatusCode().equals(HttpStatus.OK)) { + log.error("Failed to update table schemas: service responded unsuccessful: {}", response.getStatusCode()); + throw new MetadataServiceException("Failed to update table schemas: service responded unsuccessful: " + response.getStatusCode()); + } + if (response.getBody() == null) { + log.error("Failed to update table schemas: body is null"); + throw new MetadataServiceException("Failed to update table schemas: body is null"); + } + return response.getBody(); + } + + @Override + public DatabaseBriefDto updateViewSchemas(UUID databaseId) throws MetadataServiceException, RemoteUnavailableException { + final ResponseEntity<DatabaseBriefDto> response; + final String url = "/api/database/" + databaseId + "/metadata/view"; + log.debug("update view schemas in metadata service: {}", url); + try { + response = internalRestTemplate.exchange(url, HttpMethod.PUT, HttpEntity.EMPTY, DatabaseBriefDto.class); + } catch (ResourceAccessException | HttpServerErrorException e) { + log.error("Failed to update view schemas: {}", e.getMessage()); + throw new RemoteUnavailableException("Failed to update view schemas: " + e.getMessage(), e); + } + if (!response.getStatusCode().equals(HttpStatus.OK)) { + log.error("Failed to update view schemas: service responded unsuccessful: {}", response.getStatusCode()); + throw new MetadataServiceException("Failed to update view schemas: service responded unsuccessful: " + response.getStatusCode()); + } + if (response.getBody() == null) { + log.error("Failed to update view schemas: body is null"); + throw new MetadataServiceException("Failed to update view schemas: body is null"); + } + return response.getBody(); + } + @Override public void updateTableStatistics(UUID databaseId, UUID tableId, String authorization) throws TableNotFoundException, MetadataServiceException, RemoteUnavailableException { diff --git a/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ac/at/ifs/dbrepo/mapper/MariaDbMapper.java b/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ac/at/ifs/dbrepo/mapper/MariaDbMapper.java index 7895c902e70964fb6aee68927bd40a6b798e58a8..2abcc51ff305b49583937ffb89b38b83832c165a 100644 --- a/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ac/at/ifs/dbrepo/mapper/MariaDbMapper.java +++ b/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ac/at/ifs/dbrepo/mapper/MariaDbMapper.java @@ -124,25 +124,25 @@ public interface MariaDbMapper { } default String queryStoreCreateTableRawQuery() { - final String statement = "CREATE TABLE `qs_queries` ( `id` VARCHAR(36) NOT NULL PRIMARY KEY DEFAULT UUID(), `created` datetime NOT NULL DEFAULT NOW(), `executed` datetime NOT NULL default now(), `created_by` VARCHAR(36), `query` text NOT NULL, `query_normalized` text NOT NULL, `is_persisted` boolean NOT NULL, `query_hash` VARCHAR(255) NOT NULL, `result_hash` VARCHAR(255), `result_number` bigint) WITH SYSTEM VERSIONING;"; + final String statement = "CREATE TABLE IF NOT EXISTS `qs_queries` ( `id` VARCHAR(36) NOT NULL PRIMARY KEY DEFAULT UUID(), `created` datetime NOT NULL DEFAULT NOW(), `executed` datetime NOT NULL default now(), `created_by` VARCHAR(36), `query` text NOT NULL, `query_normalized` text NOT NULL, `is_persisted` boolean NOT NULL, `query_hash` VARCHAR(255) NOT NULL, `result_hash` VARCHAR(255), `result_number` bigint) WITH SYSTEM VERSIONING;"; log.trace("mapped create query store table statement: {}", statement); return statement; } default String queryStoreCreateHashTableProcedureRawQuery() { - final String statement = "CREATE PROCEDURE hash_table(IN name VARCHAR(255), OUT hash VARCHAR(255), OUT count BIGINT) BEGIN DECLARE _sql TEXT; SELECT CONCAT('SELECT SHA2(GROUP_CONCAT(CONCAT_WS(\\'\\',', GROUP_CONCAT(CONCAT('`', column_name, '`') ORDER BY column_name), ') SEPARATOR \\',\\'), 256) AS hash, COUNT(*) AS count FROM `', name, '` INTO @hash, @count;') FROM `information_schema`.`columns` WHERE `table_schema` = DATABASE() AND `table_name` = name INTO _sql; PREPARE stmt FROM _sql; EXECUTE stmt; DEALLOCATE PREPARE stmt; SET hash = @hash; SET count = @count; END;"; + final String statement = "CREATE OR REPLACE PROCEDURE hash_table(IN name VARCHAR(255), OUT hash VARCHAR(255), OUT count BIGINT) BEGIN DECLARE _sql TEXT; SELECT CONCAT('SELECT SHA2(GROUP_CONCAT(CONCAT_WS(\\'\\',', GROUP_CONCAT(CONCAT('`', column_name, '`') ORDER BY column_name), ') SEPARATOR \\',\\'), 256) AS hash, COUNT(*) AS count FROM `', name, '` INTO @hash, @count;') FROM `information_schema`.`columns` WHERE `table_schema` = DATABASE() AND `table_name` = name INTO _sql; PREPARE stmt FROM _sql; EXECUTE stmt; DEALLOCATE PREPARE stmt; SET hash = @hash; SET count = @count; END;"; log.trace("mapped create query store hash_table procedure statement: {}", statement); return statement; } default String queryStoreCreateStoreQueryProcedureRawQuery() { - final String statement = "CREATE PROCEDURE store_query(IN query TEXT, IN executed DATETIME, OUT queryId VARCHAR(36)) BEGIN DECLARE _queryhash VARCHAR(255) DEFAULT SHA2(query, 256); DECLARE _username VARCHAR(255) DEFAULT REGEXP_REPLACE(current_user(), '@.*', ''); DECLARE _query TEXT DEFAULT CONCAT('CREATE OR REPLACE TABLE _tmp AS (', query, ')'); PREPARE stmt FROM _query; EXECUTE stmt; DEALLOCATE PREPARE stmt; CALL hash_table('_tmp', @hash, @count); DROP TABLE IF EXISTS `_tmp`; IF @hash IS NULL THEN INSERT INTO `qs_queries` (`created_by`, `query`, `query_normalized`, `is_persisted`, `query_hash`, `result_hash`, `result_number`, `executed`) SELECT _username, query, query, false, _queryhash, @hash, @count, executed WHERE NOT EXISTS (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` IS NULL); SET queryId = (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` IS NULL); ELSE INSERT INTO `qs_queries` (`created_by`, `query`, `query_normalized`, `is_persisted`, `query_hash`, `result_hash`, `result_number`, `executed`) SELECT _username, query, query, false, _queryhash, @hash, @count, executed WHERE NOT EXISTS (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` = @hash); SET queryId = (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` = @hash); END IF; END;"; + final String statement = "CREATE OR REPLACE PROCEDURE store_query(IN query TEXT, IN executed DATETIME, OUT queryId VARCHAR(36)) BEGIN DECLARE _queryhash VARCHAR(255) DEFAULT SHA2(query, 256); DECLARE _username VARCHAR(255) DEFAULT REGEXP_REPLACE(current_user(), '@.*', ''); DECLARE _query TEXT DEFAULT CONCAT('CREATE OR REPLACE TABLE _tmp AS (', query, ')'); PREPARE stmt FROM _query; EXECUTE stmt; DEALLOCATE PREPARE stmt; CALL hash_table('_tmp', @hash, @count); DROP TABLE IF EXISTS `_tmp`; IF @hash IS NULL THEN INSERT INTO `qs_queries` (`created_by`, `query`, `query_normalized`, `is_persisted`, `query_hash`, `result_hash`, `result_number`, `executed`) SELECT _username, query, query, false, _queryhash, @hash, @count, executed WHERE NOT EXISTS (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` IS NULL); SET queryId = (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` IS NULL); ELSE INSERT INTO `qs_queries` (`created_by`, `query`, `query_normalized`, `is_persisted`, `query_hash`, `result_hash`, `result_number`, `executed`) SELECT _username, query, query, false, _queryhash, @hash, @count, executed WHERE NOT EXISTS (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` = @hash); SET queryId = (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` = @hash); END IF; END;"; log.trace("mapped create query store store_query procedure statement: {}", statement); return statement; } default String queryStoreCreateInternalStoreQueryProcedureRawQuery() { - final String statement = "CREATE DEFINER = 'root' PROCEDURE _store_query(IN _username VARCHAR(255), IN query TEXT, IN executed DATETIME, OUT queryId VARCHAR(36)) BEGIN DECLARE _queryhash VARCHAR(255) DEFAULT SHA2(query, 256); DECLARE _query TEXT DEFAULT CONCAT('CREATE OR REPLACE TABLE _tmp AS (', query, ')'); PREPARE stmt FROM _query; EXECUTE stmt; DEALLOCATE PREPARE stmt; CALL hash_table('_tmp', @hash, @count); DROP TABLE IF EXISTS `_tmp`; IF @hash IS NULL THEN INSERT INTO `qs_queries` (`created_by`, `query`, `query_normalized`, `is_persisted`, `query_hash`, `result_hash`, `result_number`, `executed`) SELECT _username, query, query, false, _queryhash, @hash, @count, executed WHERE NOT EXISTS (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` IS NULL); SET queryId = (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` IS NULL); ELSE INSERT INTO `qs_queries` (`created_by`, `query`, `query_normalized`, `is_persisted`, `query_hash`, `result_hash`, `result_number`, `executed`) SELECT _username, query, query, false, _queryhash, @hash, @count, executed WHERE NOT EXISTS (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` = @hash); SET queryId = (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` = @hash); END IF; END;"; + final String statement = "CREATE OR REPLACE DEFINER = 'root' PROCEDURE _store_query(IN _username VARCHAR(255), IN query TEXT, IN executed DATETIME, OUT queryId VARCHAR(36)) BEGIN DECLARE _queryhash VARCHAR(255) DEFAULT SHA2(query, 256); DECLARE _query TEXT DEFAULT CONCAT('CREATE OR REPLACE TABLE _tmp AS (', query, ')'); PREPARE stmt FROM _query; EXECUTE stmt; DEALLOCATE PREPARE stmt; CALL hash_table('_tmp', @hash, @count); DROP TABLE IF EXISTS `_tmp`; IF @hash IS NULL THEN INSERT INTO `qs_queries` (`created_by`, `query`, `query_normalized`, `is_persisted`, `query_hash`, `result_hash`, `result_number`, `executed`) SELECT _username, query, query, false, _queryhash, @hash, @count, executed WHERE NOT EXISTS (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` IS NULL); SET queryId = (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` IS NULL); ELSE INSERT INTO `qs_queries` (`created_by`, `query`, `query_normalized`, `is_persisted`, `query_hash`, `result_hash`, `result_number`, `executed`) SELECT _username, query, query, false, _queryhash, @hash, @count, executed WHERE NOT EXISTS (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` = @hash); SET queryId = (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` = @hash); END IF; END;"; log.trace("mapped create query store _store_query procedure statement: {}", statement); return statement; } diff --git a/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ac/at/ifs/dbrepo/service/impl/ContainerServiceMariaDbImpl.java b/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ac/at/ifs/dbrepo/service/impl/ContainerServiceMariaDbImpl.java index 8a61a05af51f4a1834bd384e0928933de510b75d..2747b1cdc4b429c620030bea6f8eaa581cc7737d 100644 --- a/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ac/at/ifs/dbrepo/service/impl/ContainerServiceMariaDbImpl.java +++ b/dbrepo-data-service/services/src/main/java/at/ac/tuwien/ac/at/ifs/dbrepo/service/impl/ContainerServiceMariaDbImpl.java @@ -94,6 +94,6 @@ public class ContainerServiceMariaDbImpl extends DataConnector implements Contai } finally { dataSource.close(); } - log.info("Created query store in database with name {}", databaseName); + log.info("Created or replaced query store and procedures in database: {}", databaseName); } } diff --git a/docker-compose.yml b/docker-compose.yml index e0be02996d38a2559b941c793caa56e668f51eef..196ee28f06ab875f6fa878051855ffa446de1248 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -636,6 +636,37 @@ services: logging: driver: json-file + dbrepo-data-service-init: + restart: "no" + init: true + container_name: dbrepo-data-service-init + hostname: data-service-init + image: dbrepo-data-service-init:latest + build: + context: ./dbrepo-data-service + dockerfile: init.Dockerfile + network: host + environment: + AUTH_SERVICE_CLIENT: "${AUTH_SERVICE_CLIENT:-dbrepo-client}" + AUTH_SERVICE_CLIENT_SECRET: "${AUTH_SERVICE_CLIENT:-MUwRc7yfXSJwX8AdRMWaQC3Nep1VjwgG}" + AUTH_SERVICE_ENDPOINT: "${AUTH_SERVICE_ENDPOINT:-http://auth-service:8080}" + METADATA_SERVICE_ENDPOINT: "${METADATA_SERVICE_ENDPOINT:-http://metadata-service:8080}" + GRANT_DEFAULT_READ: "${GRANT_DEFAULT_READ:-SELECT}" + GRANT_DEFAULT_WRITE: "${GRANT_DEFAULT_WRITE:-SELECT, CREATE, CREATE VIEW, CREATE ROUTINE, CREATE TEMPORARY TABLES, LOCK TABLES, INDEX, TRIGGER, INSERT, UPDATE, DELETE}" + JWT_PUBKEY: "${JWT_PUBKEY:-MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqqnHQ2BWWW9vDNLRCcxD++xZg/16oqMo/c1l+lcFEjjAIJjJp/HqrPYU/U9GvquGE6PbVFtTzW1KcKawOW+FJNOA3CGo8Q1TFEfz43B8rZpKsFbJKvQGVv1Z4HaKPvLUm7iMm8Hv91cLduuoWx6Q3DPe2vg13GKKEZe7UFghF+0T9u8EKzA/XqQ0OiICmsmYPbwvf9N3bCKsB/Y10EYmZRb8IhCoV9mmO5TxgWgiuNeCTtNCv2ePYqL/U0WvyGFW0reasIK8eg3KrAUj8DpyOgPOVBn3lBGf+3KFSYi+0bwZbJZWqbC/Xlk20Go1YfeJPRIt7ImxD27R/lNjgDO/MwIDAQAB}" + LOG_LEVEL: ${LOG_LEVEL:-info} + SYSTEM_USERNAME: "${SYSTEM_USERNAME:-admin}" + SYSTEM_PASSWORD: "${SYSTEM_PASSWORD:-admin}" + deploy: + <<: *resources-large + depends_on: + dbrepo-data-db: + condition: service_healthy + dbrepo-metadata-service: + condition: service_healthy + logging: + driver: json-file + dbrepo-dashboard-ui: restart: "no" container_name: dbrepo-dashboard-ui diff --git a/helm/dbrepo/files/create-event-listener.jar b/helm/dbrepo/files/create-event-listener.jar index e6ee56c50203b9ba4e75d7964141c3c353ff933d..6ba9f58a962c7d786d8731418b1312432088baca 100644 Binary files a/helm/dbrepo/files/create-event-listener.jar and b/helm/dbrepo/files/create-event-listener.jar differ