diff --git a/.dbrepo2/deploy b/.dbrepo2/deploy deleted file mode 100755 index 2cda49aec781a27fc0a36336a6e3990003bb863e..0000000000000000000000000000000000000000 --- a/.dbrepo2/deploy +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash - -# set version -TAG=latest - -# tear everything down -echo "Removing all data ..." -/bin/bash teardown -docker system prune -f --volumes || true - -echo "Pulling new images ..." -docker pull "dbrepo/analyse-service:${TAG}" -docker pull "dbrepo/authentication-service:${TAG}" -docker pull "dbrepo/metadata-db:${TAG}" -docker pull "dbrepo/ui:${TAG}" -docker pull "dbrepo/ui-proxy:${TAG}" -docker pull "dbrepo/identifier-service:${TAG}" -docker pull "dbrepo/container-service:${TAG}" -docker pull "dbrepo/database-service:${TAG}" -docker pull "dbrepo/discovery-service:${TAG}" -docker pull "dbrepo/gateway-service:${TAG}" -docker pull "dbrepo/query-service:${TAG}" -docker pull "dbrepo/table-service:${TAG}" -docker pull "dbrepo/units-service:${TAG}" -docker pull "dbrepo/broker-service:${TAG}" -docker pull "dbrepo/metadata-service:${TAG}" -echo "Pulled new images" - -# deploy dbrepo -echo "Deploy DBRepo ..." -docker compose -f ./docker-compose.dbrepo2.yml up -d - -# clone tuw specific deployment -git -C "/home/demo/dbrepo-tuw" pull || git clone ssh://git@gitlab.tuwien.ac.at:822/martin.weise/dbrepo-tuw.git "/home/demo/dbrepo-tuw" -cd /home/demo/dbrepo-tuw && git checkout dev - -# build tuw specific deployment -docker compose -f /home/demo/dbrepo-tuw/docker-compose.dbrepo2.yml build || exit 3 -docker compose -f /home/demo/dbrepo-tuw/docker-compose.dbrepo2.yml up -d || exit 4 -echo "Deployed TU Wien specific deployment" \ No newline at end of file diff --git a/.dbrepo2/known_hosts b/.dbrepo2/known_hosts deleted file mode 100644 index d17a72e2de908c97eb551254d51770e6725332b8..0000000000000000000000000000000000000000 --- a/.dbrepo2/known_hosts +++ /dev/null @@ -1,6 +0,0 @@ -gitlab.tuwien.ac.at ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDKslN24njaMeH0xyVBO/lzeQ5/X/6UFkiz2qdcWihjT28slO/6pFBAxSzPUTBeVJWxKJgX9N+Hm65S9/Z2521E8Y9F56dcrOgeQBZ4GbLPSbEZIFN/71VL4cOh6tLahNZsAwd6y2Zi5XTAqho31VWfdCZkegZB3dNzLfOuC0t3KpxEAmWus5j8InNYDTBIy6U351/3h3oy32EdLvLpaHP+oy+QFAqXOlcXlKwHTfv9SSchQRLfWzrL4hVvohbCwMDBBvIP8J0WQ9mV95xfcwuipMWP3TksDGbst2MQ6HRXZ+yfie9Vgzg+++AjpHXQCMdalEmedNxCmxbHWYJxFHUV -gitlab.tuwien.ac.at ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBC+2QClFaM3ZNQ6nl3nHkZdFSyNP26uHPzJEUBu9J4yC1ON9GPHb2P3rC//wS819e3LWP4PXb8ug/EyEjWN30MA= -gitlab.tuwien.ac.at ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAILTzOKU3zEYNo1xy8rxZsa8D0/y4EN4oB0E7wgWCkOpZ -dbrepo2.ec.tuwien.ac.at ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC5GFfX4xmYeQdQQL81heBjKb4d1M8xjOjF4lpfNmc3/wyCPtP/dt4r3nkcOW3X14yPximy8BlArsPk4ul14BWNI9yrU5VA1FLlWq9Og6SfMJ1EAUNsePDd3qCpr/ljlydS3kWCVrXD1bNO0kQ3/36e9l64BDvZXyhI3n9DvkPO93n9xJ151IC9IwjnSzJQPghboUDRiYwT2B9wt+uC+k7tTV5tQH5kJk8fEa8nMyHOc8aD2miHubrcQJrYO/fv+vVxVWJKtSdz22wuvFnj8SfGClxbT8cfMo8X+LpIQOH8VYwIZVznuSlnNYOOYO6EGYyRnqud6oz0B5RUbwyeBhtAgZW1C1OXO9DVDLnazzFxJIzwhwyZCPurpLpP7bd6P+oFWy1A7bxIhfvdduEDE80vEOPrBl44TDe6RJR6QILtdUn9rrvcV/kgfj04zkQJjqMvX2pdCpdMIU1Pm+NQ53k3oOap7j9UHnpWX7C3mk76ueQddQxWZUhGsrFUAYSJfus= -dbrepo2.ec.tuwien.ac.at ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBOGTeKw2aKnykIUmvLiBNaGbq3xlSEsnD1M1HiZsPJ9ZtfSV12y9F0yutV4j68Rb+eHbyOxoyVekfl19ODDvXLM= -dbrepo2.ec.tuwien.ac.at ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIPpSoshRRtQBj4ebWeaWoeGr1XFIlx3L+BXV69fafBGr \ No newline at end of file diff --git a/.dbrepo2/teardown b/.dbrepo2/teardown deleted file mode 100755 index 6e818f5ad3acd1eb58002c9c8851b932b159c7c7..0000000000000000000000000000000000000000 --- a/.dbrepo2/teardown +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -echo "=== [ Stopping dbrepo-* ] ===" -docker container stop $(docker container ls -aq -f name=^/dbrepo-.*) || true -echo "=== [ Removing dbrepo-* ] ===" -docker container rm $(docker container ls -aq -f name=^/dbrepo-.*) || true -docker volume rm $(docker volume ls -q -f name=^dbrepo-.*) || true -docker network rm $(docker network ls -q -f name=^dbrepo-.*) || true -echo "=== [ Stopping * ] ===" -docker container stop $(docker container ls -aq -f name=.*-service) || true -docker container stop ui ui-proxy metadata-db || true -echo "=== [ Removing * ] ===" -docker container rm $(docker container ls -aq -f name=.*-service) || true -docker container rm ui ui-proxy metadata-db || true -docker volume rm $(docker volume ls -q) || true -docker network rm core public userdb || true -echo "=== [ Stopping fda-* ] ===" -docker container stop $(docker container ls -aq -f name=^/fda-.*) || true -echo "=== [ Removing fda-* ] ===" -docker container rm $(docker container ls -aq -f name=^/fda-.*) || true -docker volume rm $(docker volume ls -q -f name=^fda-.*) || true -docker network rm $(docker network ls -q -f name=^fda-.*) || true -echo "=== [ Stopping tuw-* ] ===" -docker container stop $(docker container ls -aq -f name=^/tuw-.*) || true -echo "=== [ Removing tuw-* ] ===" -docker container rm $(docker container ls -aq -f name=^/tuw-.*) || true -docker volume rm $(docker volume ls -q -f name=^tuw-.*) || true -docker network rm $(docker network ls -q -f name=^tuw-.*) || true \ No newline at end of file diff --git a/.fda-deployment/clean b/.fda-deployment/clean deleted file mode 100755 index 03506fc6ef3a480eea7269f90c2f8a99209a1bc0..0000000000000000000000000000000000000000 --- a/.fda-deployment/clean +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash -docker container stop $(docker container ls -aq) || true -docker container rm $(docker container ls -aq) || true -docker volume rm $(docker volume ls -q) || true diff --git a/.fda-deployment/clean-tmp b/.fda-deployment/clean-tmp deleted file mode 100755 index ac34df7e9c815917e5908b129f3502c18e97e2f4..0000000000000000000000000000000000000000 --- a/.fda-deployment/clean-tmp +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/bash -FILES=$(cd /tmp && find -exec basename '{}' ';' 2>/dev/null | egrep '^.{32}$' | egrep "^([a-z0-9]+)$") -for file in $FILES; do - rm -f /tmp/$file - echo "Removed /tmp/${file}" -done \ No newline at end of file diff --git a/.fda-deployment/crontab b/.fda-deployment/crontab deleted file mode 100644 index bbc97b1a429e174b4a42e99503e70098f1acb441..0000000000000000000000000000000000000000 --- a/.fda-deployment/crontab +++ /dev/null @@ -1,2 +0,0 @@ -0 2 * * * /usr/bin/make -C /home/rocky/fda-services teardown -1 2 * * * /usr/bin/make -C /home/rocky/fda-services run \ No newline at end of file diff --git a/.fda-deployment/fda-authentication-service/install_cert b/.fda-deployment/fda-authentication-service/install_cert deleted file mode 100755 index f52db602f44a1c08cb4cb4e8e8cc6e17e45d190b..0000000000000000000000000000000000000000 --- a/.fda-deployment/fda-authentication-service/install_cert +++ /dev/null @@ -1,52 +0,0 @@ -#!/bin/bash -TMP_CERT_LOCATION="/root/keys" -TMP_SAML_LOCATION="/root/keys" -KEY_STORE_LOCATION="/tmp/dbrepo.jks" -KEY_STORE_PASS="dbrepo" -CERT_LOCATION="/etc/letsencrypt/live/dbrepo.ossdip.at" -SAML_KEY="/root/keys/saml_sign.key" -SAML_PUB="/root/keys/saml_sign.cer" -TU_SAML_CERT="./fda-authentication-service/rest-service/src/main/resources/saml/tu.crt" - -# PLACE -sudo mkdir -p "${TMP_CERT_LOCATION}" -sudo mkdir -p "${TMP_SAML_LOCATION}" - -# REQUEST -sudo certbot certonly --standalone --preferred-challenges http -d dbrepo.ossdip.at \ - -m martin.weise@tuwien.ac.at --agree-tos --keep-until-expiring - -# CONVERT PKCS12 -sudo openssl pkcs12 -export -out "${TMP_SAML_LOCATION}/saml.p12" -in "${SAML_PUB}" \ - -inkey "${SAML_KEY}" -passout "pass:${KEY_STORE_PASS}" - -# CONVERT PKCS12 -sudo openssl pkcs12 -export -out "${TMP_CERT_LOCATION}/cert.p12" -in "${CERT_LOCATION}/cert.pem" \ - -inkey "${CERT_LOCATION}/privkey.pem" -passout "pass:${KEY_STORE_PASS}" - -# FIX PERMISSIONS -sudo chmod -R 644 "${TMP_CERT_LOCATION}" -sudo chmod -R 644 "${TMP_SAML_LOCATION}" - -# IMPORT SSL SIGN PRIVKEY -sudo keytool -noprompt -importkeystore -deststorepass "${KEY_STORE_PASS}" -destkeypass "${KEY_STORE_PASS}" \ - -destkeystore "${KEY_STORE_LOCATION}" -srckeystore "${TMP_CERT_LOCATION}/cert.p12" -srcstoretype PKCS12 \ - -srcstorepass "${KEY_STORE_PASS}" -alias 1 -destalias ssl - -# IMPORT SAML MESSAGE SIGN PRIVKEY -sudo keytool -noprompt -importkeystore -deststorepass "${KEY_STORE_PASS}" -destkeypass "${KEY_STORE_PASS}" \ - -destkeystore "${KEY_STORE_LOCATION}" -srckeystore "${TMP_SAML_LOCATION}" -srcstoretype PKCS12 \ - -srcstorepass "${KEY_STORE_PASS}" -alias 1 -destalias saml - -# IMPORT METADATA VERIFICATION PUBKEY -sudo keytool -noprompt -importcert -file "${TU_SAML_CERT}" -storepass "${KEY_STORE_PASS}" \ - -keystore "${KEY_STORE_LOCATION}" -alias tu - -# OWNERSHIP -sudo chown centos:docker "${TMP_CERT_LOCATION}" -sudo chown centos:docker "${TMP_SAML_LOCATION}" -sudo chown centos:docker "${KEY_STORE_LOCATION}" - -# TRUST LET'S ENCRYPT -sudo keytool -noprompt -import -alias letsencrypt -keystore "${KEY_STORE_LOCATION}" -storepass "${KEY_STORE_PASS}" \ - -file "${CERT_LOCATION}/chain.pem" diff --git a/.fda-deployment/fda-authentication-service/install_smtp b/.fda-deployment/fda-authentication-service/install_smtp deleted file mode 100755 index 22b0afaa76093e4c311b2451e365733963b15b49..0000000000000000000000000000000000000000 --- a/.fda-deployment/fda-authentication-service/install_smtp +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/bash -sudo cat /root/smtp.secret >> ./.env diff --git a/.fda-deployment/fda-ui/install_cert b/.fda-deployment/fda-ui/install_cert deleted file mode 100755 index 92553123718052ab77978aea4d8ad3a699c0c816..0000000000000000000000000000000000000000 --- a/.fda-deployment/fda-ui/install_cert +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -CA_PATH="/etc/letsencrypt/live/dbrepo.ossdip.at" - -sudo certbot certonly --standalone --preferred-challenges http -d dbrepo.ossdip.at --agree-tos --keep-until-expiring - -KEY=$(sudo sed -E ':a;N;$!ba;s/\r{0,1}\n/\\n/g' "${CA_PATH}/privkey.pem") -CERT=$(sudo sed -E ':a;N;$!ba;s/\r{0,1}\n/\\n/g' "${CA_PATH}/cert.pem") - -cat << EOF -UI_KEY="${KEY}" -UI_CERT="${CERT}" -EOF \ No newline at end of file diff --git a/.fda-deployment/hosts b/.fda-deployment/hosts deleted file mode 100644 index 40da214c6af6d90f416cca86829a26ae06edec66..0000000000000000000000000000000000000000 --- a/.fda-deployment/hosts +++ /dev/null @@ -1,7 +0,0 @@ -# FDA USERDB -172.28.0.2 fda-userdb-weather-aus -172.28.0.3 fda-userdb-infection -172.28.0.4 fda-userdb-air -172.28.0.5 fda-userdb-u01 -172.28.0.6 fda-userdb-u02 -172.28.0.7 fda-userdb-u03 \ No newline at end of file diff --git a/.gitlab/jacoco2cobertura.py b/.gitlab/jacoco2cobertura.py deleted file mode 100755 index 76ad75a70583ae4689952ef037ecb241eec48897..0000000000000000000000000000000000000000 --- a/.gitlab/jacoco2cobertura.py +++ /dev/null @@ -1,155 +0,0 @@ -#!/usr/bin/env python3 -import sys -import xml.etree.ElementTree as ET -import re -import os.path - -# branch-rate="0.0" complexity="0.0" line-rate="1.0" -# branch="true" hits="1" number="86" - -def find_lines(j_package, filename): - """Return all <line> elements for a given source file in a package.""" - lines = list() - sourcefiles = j_package.findall("sourcefile") - for sourcefile in sourcefiles: - if sourcefile.attrib.get("name") == os.path.basename(filename): - lines = lines + sourcefile.findall("line") - return lines - -def line_is_after(jm, start_line): - return int(jm.attrib.get('line', 0)) > start_line - -def method_lines(jmethod, jmethods, jlines): - """Filter the lines from the given set of jlines that apply to the given jmethod.""" - start_line = int(jmethod.attrib.get('line', 0)) - larger = list(int(jm.attrib.get('line', 0)) for jm in jmethods if line_is_after(jm, start_line)) - end_line = min(larger) if len(larger) else 99999999 - - for jline in jlines: - if start_line <= int(jline.attrib['nr']) < end_line: - yield jline - -def convert_lines(j_lines, into): - """Convert the JaCoCo <line> elements into Cobertura <line> elements, add them under the given element.""" - c_lines = ET.SubElement(into, 'lines') - for jline in j_lines: - mb = int(jline.attrib['mb']) - cb = int(jline.attrib['cb']) - ci = int(jline.attrib['ci']) - - cline = ET.SubElement(c_lines, 'line') - cline.set('number', jline.attrib['nr']) - cline.set('hits', '1' if ci > 0 else '0') # Probably not true but no way to know from JaCoCo XML file - - if mb + cb > 0: - percentage = str(int(100 * (float(cb) / (float(cb) + float(mb))))) + '%' - cline.set('branch', 'true') - cline.set('condition-coverage', percentage + ' (' + str(cb) + '/' + str(cb + mb) + ')') - - cond = ET.SubElement(ET.SubElement(cline, 'conditions'), 'condition') - cond.set('number', '0') - cond.set('type', 'jump') - cond.set('coverage', percentage) - else: - cline.set('branch', 'false') - -def guess_filename(path_to_class): - m = re.match('([^$]*)', path_to_class) - return (m.group(1) if m else path_to_class) + '.java' - -def add_counters(source, target): - target.set('line-rate', counter(source, 'LINE')) - target.set('branch-rate', counter(source, 'BRANCH')) - target.set('complexity', counter(source, 'COMPLEXITY', sum)) - -def fraction(covered, missed): - return covered / (covered + missed) - -def sum(covered, missed): - return covered + missed - -def counter(source, type, operation=fraction): - cs = source.findall('counter') - c = next((ct for ct in cs if ct.attrib.get('type') == type), None) - - if c is not None: - covered = float(c.attrib['covered']) - missed = float(c.attrib['missed']) - - return str(operation(covered, missed)) - else: - return '0.0' - -def convert_method(j_method, j_lines): - c_method = ET.Element('method') - c_method.set('name', j_method.attrib['name']) - c_method.set('signature', j_method.attrib['desc']) - - add_counters(j_method, c_method) - convert_lines(j_lines, c_method) - - return c_method - -def convert_class(j_class, j_package): - c_class = ET.Element('class') - c_class.set('name', j_class.attrib['name'].replace('/', '.')) - c_class.set('filename', guess_filename(j_class.attrib['name'])) - - all_j_lines = list(find_lines(j_package, c_class.attrib['filename'])) - - c_methods = ET.SubElement(c_class, 'methods') - all_j_methods = list(j_class.findall('method')) - for j_method in all_j_methods: - j_method_lines = method_lines(j_method, all_j_methods, all_j_lines) - c_methods.append(convert_method(j_method, j_method_lines)) - - add_counters(j_class, c_class) - convert_lines(all_j_lines, c_class) - - return c_class - -def convert_package(j_package): - c_package = ET.Element('package') - c_package.attrib['name'] = j_package.attrib['name'].replace('/', '.') - - c_classes = ET.SubElement(c_package, 'classes') - for j_class in j_package.findall('class'): - c_classes.append(convert_class(j_class, j_package)) - - add_counters(j_package, c_package) - - return c_package - -def convert_root(source, target, source_roots): - target.set('timestamp', str(int(source.find('sessioninfo').attrib['start']) / 1000)) - - sources = ET.SubElement(target, 'sources') - for s in source_roots: - ET.SubElement(sources, 'source').text = s - - packages = ET.SubElement(target, 'packages') - for package in source.findall('package'): - packages.append(convert_package(package)) - - add_counters(source, target) - -def jacoco2cobertura(filename, source_roots): - if filename == '-': - root = ET.fromstring(sys.stdin.read()) - else: - tree = ET.parse(filename) - root = tree.getroot() - - into = ET.Element('coverage') - convert_root(root, into, source_roots) - print(ET.tostring(into, encoding='utf8', method='xml').decode("utf-8")) - -if __name__ == '__main__': - if len(sys.argv) < 2: - print("Usage: cover2cover.py FILENAME [SOURCE_ROOTS]") - sys.exit(1) - - filename = sys.argv[1] - source_roots = sys.argv[2:] if 2 < len(sys.argv) else '.' - - jacoco2cobertura(filename, source_roots) \ No newline at end of file diff --git a/.gitlab/tests/test_containers.py b/.gitlab/tests/test_containers.py deleted file mode 100644 index dd4e1a97cc9cd2ea9a3ff9740b8720f600e62a35..0000000000000000000000000000000000000000 --- a/.gitlab/tests/test_containers.py +++ /dev/null @@ -1,97 +0,0 @@ -#!/bin/env python3 -from api_container.api.container_endpoint_api import ContainerEndpointApi -from api_database.api.database_endpoint_api import DatabaseEndpointApi -import time - -container = ContainerEndpointApi() -database = DatabaseEndpointApi() - -def create_container(): - response = container.create1({ - "name": "Pilot Factory Data", - "repository": "mariadb", - "tag": "10.5" - }) - print("created container with id %d" % response.id) - return response - - -def start_container(container_id): - response = container.modify({ - "action": "start" - }, container_id) - print("... starting") - time.sleep(5) - print("started container with id %d" % response.id) - return response - - -def create_database(container_id, is_public=True): - response = database.create({ - "name": "Pilot Factory Data", - "is_public": is_public - }, container_id) - print("created database with id %d" % response.id) - return response - -token = "" # keep - -from api_authentication.api.authentication_endpoint_api import AuthenticationEndpointApi -from api_authentication.api.user_endpoint_api import UserEndpointApi -import uuid - -authentication = AuthenticationEndpointApi() -user = UserEndpointApi() - -def create_user(username): - response = user.register({ - "username": username, - "password": username, - "email": username + "@gmail.com" - }) - print("created user with id %d" % response.id) - return response - - -def auth_user(username): - response = authentication.authenticate_user1({ - "username": username, - "password": username - }) - print("authenticated user with id %d" % response.id) - token = response.token - container.api_client.default_headers = {"Authorization": "Bearer " + token} - database.api_client.default_headers = {"Authorization": "Bearer " + token} - return response - - -def find_database(container_id, database_id): - response = database.find_by_id(container_id, database_id) - print("found database with id %d" % response.id) - return response - - -def update_database(container_id, database_id): - response = database.update({ - "description": "This dataset includes daily values from 1983 to the current day, divided into annual files. This includes the maximum hourly average and the number of times the hourly average limit value for ozone was exceeded and the daily averages for sulfur dioxide (SO2), carbon monoxide (CO), nitrogen oxide (NOx), nitrogen monoxide (NO), nitrogen dioxide (NO2), particulate matter (PM10 and PM2.5). ) and particle number (PN), provided that they are of sufficient quality. The values of the completed day for the current year are updated every 30 minutes after midnight (UTC+1).", - "publisher": "Technical University of Vienna", - "license": { - "identifier": "CC0-1.0", - "uri": "https://creativecommons.org/publicdomain/zero/1.0/legalcode" - }, - "language": "en", - "publication_year": 2022 - }, container_id, database_id) - print("updated database with id %d" % response.id) - return response - - -def test_containers(): - username = str(uuid.uuid1()).replace("-", "") - uid = create_user(username).id - auth_user(username) - # container 1 - cid = create_container().id - start_container(cid) - dbid = create_database(cid).id - update_database(cid, dbid) diff --git a/.gitlab/tests/test_query.py b/.gitlab/tests/test_query.py deleted file mode 100644 index 283f311b6554b76393568be0efaf1b44f24077a1..0000000000000000000000000000000000000000 --- a/.gitlab/tests/test_query.py +++ /dev/null @@ -1,284 +0,0 @@ -#!/bin/env python3 - -import time -import os -import shutil -import uuid - -from api_authentication.api.authentication_endpoint_api import AuthenticationEndpointApi -from api_authentication.api.user_endpoint_api import UserEndpointApi -from api_container.api.container_endpoint_api import ContainerEndpointApi -from api_database.api.database_endpoint_api import DatabaseEndpointApi -from api_table.api.table_endpoint_api import TableEndpointApi -from api_query.api.table_data_endpoint_api import TableDataEndpointApi -from api_query.api.query_endpoint_api import QueryEndpointApi -from api_query.api.table_history_endpoint_api import TableHistoryEndpointApi -from api_identifier.api.identifier_endpoint_api import IdentifierEndpointApi -from api_identifier.api.persistence_endpoint_api import PersistenceEndpointApi -from api_query.api.view_endpoint_api import ViewEndpointApi -from api_query.rest import ApiException - -authentication = AuthenticationEndpointApi() -user = UserEndpointApi() -container = ContainerEndpointApi() -database = DatabaseEndpointApi() -table = TableEndpointApi() -query = QueryEndpointApi() -history = TableHistoryEndpointApi() -data = TableDataEndpointApi() -identifier = IdentifierEndpointApi() -persistence = PersistenceEndpointApi() -view = ViewEndpointApi() - -token = "" # keep - - -def create_user(username): - response = user.register({ - "username": username, - "password": username, - "email": username + "@gmail.com" - }) - print("created user with id %d" % response.id) - return response - - -def update_password(user_id, password): - response = user.update_password({ - "password": password - }, user_id) - print("updated password for user with id %d" % user_id) - return response - - -def auth_user(username): - response = authentication.authenticate_user1({ - "username": username, - "password": username - }) - print("authenticated user with id %d" % response.id) - token = response.token - container.api_client.default_headers = {"Authorization": "Bearer " + token} - database.api_client.default_headers = {"Authorization": "Bearer " + token} - table.api_client.default_headers = {"Authorization": "Bearer " + token} - data.api_client.default_headers = {"Authorization": "Bearer " + token} - query.api_client.default_headers = {"Authorization": "Bearer " + token} - identifier.api_client.default_headers = {"Authorization": "Bearer " + token} - user.api_client.default_headers = {"Authorization": "Bearer " + token} - persistence.api_client.default_headers = {"Authorization": "Bearer " + token} - history.api_client.default_headers = {"Authorization": "Bearer " + token} - view.api_client.default_headers = {"Authorization": "Bearer " + token} - return response - - -def create_container(): - response = container.create1({ - "name": "Pilot Factory Data", - "repository": "mariadb", - "tag": "10.5" - }) - print("created container with id %d" % response.id) - return response - - -def start_container(container_id): - response = container.modify({ - "action": "start" - }, container_id) - print("... starting") - time.sleep(5) - print("started container with id %d" % response.id) - return response - - -def create_database(container_id, is_public=True): - response = database.create({ - "name": "Pilot Factory Data", - "is_public": is_public - }, container_id) - print("created database with id %d" % response.id) - return response - - -def find_database(container_id, database_id): - response = database.find_by_id(container_id, database_id) - print("found database with id %d" % response.id) - return response - - -def update_database(container_id, database_id): - response = database.update({ - "description": "This dataset includes daily values from 1983 to the current day, divided into annual files. This includes the maximum hourly average and the number of times the hourly average limit value for ozone was exceeded and the daily averages for sulfur dioxide (SO2), carbon monoxide (CO), nitrogen oxide (NOx), nitrogen monoxide (NO), nitrogen dioxide (NO2), particulate matter (PM10 and PM2.5). ) and particle number (PN), provided that they are of sufficient quality. The values of the completed day for the current year are updated every 30 minutes after midnight (UTC+1).", - "publisher": "Technical University of Vienna", - "license": { - "identifier": "CC0-1.0", - "uri": "https://creativecommons.org/publicdomain/zero/1.0/legalcode" - }, - "language": "en", - "publication_year": 2022 - }, container_id, database_id) - print("updated database with id %d" % response.id) - return response - - -def create_table(container_id, database_id, columns=None): - if columns is None: - columns = [{ - "name": "Date", - "type": "date", - "dfid": 1, - "unique": False, - "primary_key": False, - "null_allowed": True, - }, { - "name": "Location", - "type": "string", - "unique": False, - "primary_key": False, - "null_allowed": True, - }, { - "name": "Parameter", - "type": "string", - "unique": False, - "primary_key": False, - "null_allowed": True, - }, { - "name": "Interval", - "type": "string", - "unique": False, - "primary_key": False, - "null_allowed": True, - }, { - "name": "Unit", - "type": "string", - "unique": False, - "primary_key": False, - "null_allowed": True, - }, { - "name": "Value", - "type": "decimal", - "unique": False, - "primary_key": False, - "null_allowed": True, - }, { - "name": "Status", - "type": "string", - "unique": False, - "primary_key": False, - "null_allowed": True, - }] - response = table.create({ - "name": "Airquality " + str(uuid.uuid1()), - "description": "Airquality in Zürich, Switzerland", - "columns": columns - }, "Bearer " + token, container_id, database_id) - print("created table with id %d" % response.id) - return response - - -def find_table(container_id, database_id, table_id): - response = table.find_by_id(container_id, database_id, table_id) - print("found table with id %d" % response.id) - return response - - -def fill_table(container_id, database_id, table_id): - response = data.import_csv({ - "location": "/path/to/data.csv", - "quote": "\"", - "null_element": "NA" - "separator": ",", - }, container_id, database_id, table_id) - print("filled table with id %d" % table_id) - return response - - -def create_query(container_id, database_id, statement, page=0, size=3): - response = query.execute({ - "statement": statement - }, container_id, database_id, page=page, size=size) - print("executed query with id %d" % response.id) - return response - - -def delete_tuple(container_id, database_id, table_id, keys): - response = data.delete(keys, container_id, database_id, table_id) - print("deleted tuples for table with id %d" % table_id) - return response - - -def download_query_data(container_id, database_id, query_id): - response = query.export1(container_id, database_id, query_id) - print("downloaded query data for query with id %d" % query_id) - return response - - -def list_views(container_id, database_id): - response = view.find_all(container_id, database_id) - print("list views for database with id %d" % database_id) - return response - - -def create_view(container_id, database_id, table_name): - response = view.create({ - "name": "Air Quality " + str(uuid.uuid1()), - "query": "SELECT `date`, `parameter`, `value` FROM `" + table_name + "` WHERE `date` = '2021-10-02T14:00'", - "is_public": True - }, container_id, database_id) - print("created view with id %d" % response.id) - return response - - -def data_view(container_id, database_id, view_id): - response = view.data(container_id, database_id, view_id) - print("retrieved data for view with id %d" % response.id) - return response - - -def test_identifiers(): - # - # create 1 user and 2 containers (public, private) - # - username = str(uuid.uuid1()).replace("-", "") - uid = create_user(username).id - auth_user(username) - # container 1 - cid = create_container().id - start_container(cid) - dbid = create_database(cid).id - update_database(cid, dbid) - tid = create_table(cid, dbid).id - tname = find_table(cid, dbid, tid).internal_name - fill_table(cid, dbid, tid) - create_query(cid, dbid, "select `id` from `" + tname + "`") - create_query(cid, dbid, "select `date` from `" + tname + "`") - qid = create_query(cid, dbid, "select `date`, `location`, `status` from `" + tname + "`").id - create_query(cid, dbid, "select `date`, `location`, `status` from `" + tname + "` order by `date` asc") - create_query(cid, dbid, "select t.`date`, t.location, t.status from `" + tname + "` t group by t.`date` order by t.`date` asc") - create_query(cid, dbid, "select `date`, `location`, `status` from `" + tname + "` group by `date`, `location` asc") - download_query_data(cid, dbid, qid) - # container 2 (=private) - cid = create_container().id - start_container(cid) - dbid = create_database(cid, False).id - update_database(cid, dbid) - tid = create_table(cid, dbid).id - tname = find_table(cid, dbid, tid).internal_name - fill_table(cid, dbid, tid) - qid = create_query(cid, dbid, "select `id` from `" + tname + "`").id - qid = create_query(cid, dbid, "select `id` from `" + tname + "`").id - vid = create_view(cid, dbid, tname).id - data_view(cid, dbid, vid) - list_views(cid, dbid) - for i in range(5, 10): - delete_tuple(cid, dbid, tid, { - "keys": { - "id": i - } - }) - time.sleep(1) - delete_tuple(cid, dbid, tid, { - "keys": { - "location": "Schimmelstrasse" - } - }) diff --git a/.junit/hosts b/.junit/hosts new file mode 100644 index 0000000000000000000000000000000000000000..701ef56fa315ea16163432c842816c946150d97f --- /dev/null +++ b/.junit/hosts @@ -0,0 +1,10 @@ +########################################################### +# PLACE IN /etc/hosts # +########################################################### + +172.30.0.5 dbrepo-userdb-u01 +172.30.0.6 dbrepo-userdb-u02 +172.30.0.7 dbrepo-userdb-u03 +172.30.0.8 dbrepo-userdb-u04 +172.31.0.2 dbrepo-broker-service +172.31.0.3 dbrepo-search-service \ No newline at end of file diff --git a/docker-compose.dbrepo1.yml b/docker-compose.dbrepo1.yml index dfd09f5097236500634958c7072211892968336c..01db36561d40b6d328fade93b271caa6666d750d 100644 --- a/docker-compose.dbrepo1.yml +++ b/docker-compose.dbrepo1.yml @@ -31,7 +31,7 @@ networks: services: - metadata-db: + dbrepo-metadata-db: restart: on-failure container_name: dbrepo-metadata-db hostname: metadata-db @@ -60,7 +60,7 @@ services: logging: driver: json-file - gateway-service: + dbrepo-gateway-service: restart: on-failure container_name: dbrepo-gateway-service hostname: gateway-service @@ -72,12 +72,12 @@ services: env_file: - .env depends_on: - discovery-service: + dbrepo-discovery-service: condition: service_healthy logging: driver: json-file - database-service: + dbrepo-database-service: restart: on-failure container_name: dbrepo-database-service hostname: database-service @@ -90,16 +90,16 @@ services: volumes: - /var/run/docker.sock:/var/run/docker.sock depends_on: - container-service: + dbrepo-container-service: condition: service_healthy - broker-service: + dbrepo-broker-service: condition: service_started - authentication-service: + dbrepo-authentication-service: condition: service_healthy logging: driver: json-file - container-service: + dbrepo-container-service: restart: on-failure container_name: dbrepo-container-service hostname: container-service @@ -111,12 +111,12 @@ services: volumes: - /var/run/docker.sock:/var/run/docker.sock depends_on: - authentication-service: + dbrepo-authentication-service: condition: service_healthy logging: driver: json-file - authentication-service: + dbrepo-authentication-service: restart: on-failure container_name: dbrepo-authentication-service hostname: authentication-service @@ -126,16 +126,16 @@ services: env_file: - .env depends_on: - discovery-service: + dbrepo-discovery-service: condition: service_healthy - broker-service: + dbrepo-broker-service: condition: service_started - metadata-db: + dbrepo-metadata-db: condition: service_healthy logging: driver: json-file - query-service: + dbrepo-query-service: restart: on-failure container_name: dbrepo-query-service hostname: query-service @@ -148,14 +148,14 @@ services: volumes: - ${SHARED_FILESYSTEM}:/tmp depends_on: - table-service: + dbrepo-table-service: condition: service_healthy - authentication-service: + dbrepo-authentication-service: condition: service_healthy logging: driver: json-file - table-service: + dbrepo-table-service: restart: on-failure container_name: dbrepo-table-service hostname: table-service @@ -169,16 +169,16 @@ services: - /var/run/docker.sock:/var/run/docker.sock - ${SHARED_FILESYSTEM}:/tmp depends_on: - authentication-service: + dbrepo-authentication-service: condition: service_healthy - search-service: + dbrepo-search-service: condition: service_started - broker-service: + dbrepo-broker-service: condition: service_started logging: driver: json-file - identifier-service: + dbrepo-identifier-service: restart: on-failure container_name: dbrepo-identifier-service hostname: identifier-service @@ -188,16 +188,16 @@ services: env_file: - .env depends_on: - query-service: + dbrepo-query-service: condition: service_healthy - authentication-service: + dbrepo-authentication-service: condition: service_healthy volumes: - ${SHARED_FILESYSTEM}:/tmp logging: driver: json-file - fda-metadata-service: + dbrepo-metadata-service: restart: on-failure container_name: dbrepo-metadata-service hostname: metadata-service @@ -207,12 +207,12 @@ services: env_file: - .env depends_on: - metadata-db: + dbrepo-metadata-db: condition: service_started logging: driver: json-file - analyse-service: + dbrepo-analyse-service: restart: on-failure container_name: dbrepo-analyse-service hostname: analyse-service @@ -226,12 +226,12 @@ services: - ${SHARED_FILESYSTEM}:/tmp - /var/run/docker.sock:/var/run/docker.sock depends_on: - discovery-service: + dbrepo-discovery-service: condition: service_healthy logging: driver: json-file - semantics-service: + dbrepo-semantics-service: restart: on-failure container_name: dbrepo-semantics-service hostname: semantics-service @@ -244,14 +244,14 @@ services: - ${SHARED_FILESYSTEM}:/tmp - /var/run/docker.sock:/var/run/docker.sock depends_on: - discovery-service: + dbrepo-discovery-service: condition: service_healthy - metadata-db: + dbrepo-metadata-db: condition: service_healthy logging: driver: json-file - broker-service: + dbrepo-broker-service: restart: on-failure container_name: dbrepo-broker-service hostname: broker-service @@ -264,14 +264,14 @@ services: env_file: - .env depends_on: - discovery-service: + dbrepo-discovery-service: condition: service_healthy volumes: - broker-service-data:/var/lib/rabbitmq/ logging: driver: json-file - search-service: + dbrepo-search-service: restart: always container_name: dbrepo-search-service hostname: search-service @@ -279,7 +279,7 @@ services: networks: core: depends_on: - discovery-service: + dbrepo-discovery-service: condition: service_healthy ports: - 9200:9200 @@ -290,7 +290,7 @@ services: logging: driver: json-file - ui: + dbrepo-ui: restart: on-failure container_name: dbrepo-ui hostname: ui @@ -303,9 +303,9 @@ services: volumes: - ${SHARED_FILESYSTEM}:/tmp depends_on: - identifier-service: + dbrepo-identifier-service: condition: service_healthy - database-service: + dbrepo-database-service: condition: service_healthy logging: driver: json-file diff --git a/docker-compose.dbrepo2.yml b/docker-compose.dbrepo2.yml index dbd34f265e1c072684a60209978b9304a1b04fd4..7c169dd1cecfd8f1c9674ea6fb5c1be5a738c07c 100644 --- a/docker-compose.dbrepo2.yml +++ b/docker-compose.dbrepo2.yml @@ -31,7 +31,7 @@ networks: services: - metadata-db: + dbrepo-metadata-db: restart: on-failure container_name: dbrepo-metadata-db hostname: metadata-db @@ -48,7 +48,7 @@ services: logging: driver: json-file - discovery-service: + dbrepo-discovery-service: restart: on-failure container_name: dbrepo-discovery-service hostname: discovery-service @@ -60,7 +60,7 @@ services: logging: driver: json-file - gateway-service: + dbrepo-gateway-service: restart: on-failure container_name: dbrepo-gateway-service hostname: gateway-service @@ -72,12 +72,12 @@ services: env_file: - .env depends_on: - discovery-service: + dbrepo-discovery-service: condition: service_healthy logging: driver: json-file - database-service: + dbrepo-database-service: restart: on-failure container_name: dbrepo-database-service hostname: database-service @@ -90,16 +90,16 @@ services: volumes: - /var/run/docker.sock:/var/run/docker.sock depends_on: - container-service: + dbrepo-container-service: condition: service_healthy - broker-service: + dbrepo-broker-service: condition: service_started - authentication-service: + dbrepo-authentication-service: condition: service_healthy logging: driver: json-file - container-service: + dbrepo-container-service: restart: on-failure container_name: dbrepo-container-service hostname: container-service @@ -111,12 +111,12 @@ services: volumes: - /var/run/docker.sock:/var/run/docker.sock depends_on: - authentication-service: + dbrepo-authentication-service: condition: service_healthy logging: driver: json-file - authentication-service: + dbrepo-authentication-service: restart: on-failure container_name: dbrepo-authentication-service hostname: authentication-service @@ -126,16 +126,16 @@ services: env_file: - .env depends_on: - discovery-service: + dbrepo-discovery-service: condition: service_healthy - broker-service: + dbrepo-broker-service: condition: service_started - metadata-db: + dbrepo-metadata-db: condition: service_healthy logging: driver: json-file - query-service: + dbrepo-query-service: restart: on-failure container_name: dbrepo-query-service hostname: query-service @@ -148,14 +148,14 @@ services: volumes: - ${SHARED_FILESYSTEM}:/tmp depends_on: - table-service: + dbrepo-table-service: condition: service_healthy - authentication-service: + dbrepo-authentication-service: condition: service_healthy logging: driver: json-file - table-service: + dbrepo-table-service: restart: on-failure container_name: dbrepo-table-service hostname: table-service @@ -169,16 +169,16 @@ services: - /var/run/docker.sock:/var/run/docker.sock - ${SHARED_FILESYSTEM}:/tmp depends_on: - authentication-service: + dbrepo-authentication-service: condition: service_healthy - search-service: + dbrepo-search-service: condition: service_started - broker-service: + dbrepo-broker-service: condition: service_started logging: driver: json-file - identifier-service: + dbrepo-identifier-service: restart: on-failure container_name: dbrepo-identifier-service hostname: identifier-service @@ -188,16 +188,16 @@ services: env_file: - .env depends_on: - query-service: + dbrepo-query-service: condition: service_healthy - authentication-service: + dbrepo-authentication-service: condition: service_healthy volumes: - ${SHARED_FILESYSTEM}:/tmp logging: driver: json-file - metadata-service: + dbrepo-metadata-service: restart: on-failure container_name: dbrepo-metadata-service hostname: metadata-service @@ -207,12 +207,12 @@ services: env_file: - .env depends_on: - metadata-db: + dbrepo-metadata-db: condition: service_started logging: driver: json-file - analyse-service: + dbrepo-analyse-service: restart: on-failure container_name: dbrepo-analyse-service hostname: analyse-service @@ -226,12 +226,12 @@ services: - ${SHARED_FILESYSTEM}:/tmp - /var/run/docker.sock:/var/run/docker.sock depends_on: - discovery-service: + dbrepo-discovery-service: condition: service_healthy logging: driver: json-file - semantics-service: + dbrepo-semantics-service: restart: on-failure container_name: dbrepo-semantics-service hostname: semantics-service @@ -244,14 +244,14 @@ services: - ${SHARED_FILESYSTEM}:/tmp - /var/run/docker.sock:/var/run/docker.sock depends_on: - discovery-service: + dbrepo-discovery-service: condition: service_healthy - metadata-db: + dbrepo-metadata-db: condition: service_healthy logging: driver: json-file - broker-service: + dbrepo-broker-service: restart: on-failure container_name: dbrepo-broker-service hostname: broker-service @@ -264,14 +264,14 @@ services: env_file: - .env depends_on: - discovery-service: + dbrepo-discovery-service: condition: service_healthy volumes: - broker-service-data:/var/lib/rabbitmq/ logging: driver: json-file - search-service: + dbrepo-search-service: restart: always container_name: dbrepo-search-service hostname: search-service @@ -279,7 +279,7 @@ services: networks: core: depends_on: - discovery-service: + dbrepo-discovery-service: condition: service_healthy env_file: - .env @@ -288,7 +288,7 @@ services: logging: driver: json-file - ui: + dbrepo-ui: restart: on-failure container_name: dbrepo-ui hostname: ui @@ -301,9 +301,9 @@ services: volumes: - ${SHARED_FILESYSTEM}:/tmp depends_on: - identifier-service: + dbrepo-identifier-service: condition: service_healthy - database-service: + dbrepo-database-service: condition: service_healthy logging: driver: json-file diff --git a/docker-compose.prod.yml b/docker-compose.prod.yml index 78fc30f91141e52b6d647c1e901aba2e987e7ca6..8984baecab55922994fb89672234edc276ec221e 100644 --- a/docker-compose.prod.yml +++ b/docker-compose.prod.yml @@ -31,7 +31,7 @@ networks: services: - metadata-db: + dbrepo-metadata-db: restart: on-failure container_name: dbrepo-metadata-db hostname: metadata-db @@ -48,7 +48,7 @@ services: logging: driver: json-file - discovery-service: + dbrepo-discovery-service: restart: on-failure container_name: dbrepo-discovery-service hostname: discovery-service @@ -60,7 +60,7 @@ services: logging: driver: json-file - gateway-service: + dbrepo-gateway-service: restart: on-failure container_name: dbrepo-gateway-service hostname: gateway-service @@ -72,12 +72,12 @@ services: env_file: - .env depends_on: - discovery-service: + dbrepo-discovery-service: condition: service_healthy logging: driver: json-file - database-service: + dbrepo-database-service: restart: on-failure container_name: dbrepo-database-service hostname: database-service @@ -90,16 +90,16 @@ services: volumes: - /var/run/docker.sock:/var/run/docker.sock depends_on: - container-service: + dbrepo-container-service: condition: service_healthy - broker-service: + dbrepo-broker-service: condition: service_started - authentication-service: + dbrepo-authentication-service: condition: service_healthy logging: driver: json-file - container-service: + dbrepo-container-service: restart: on-failure container_name: dbrepo-container-service hostname: container-service @@ -111,12 +111,12 @@ services: volumes: - /var/run/docker.sock:/var/run/docker.sock depends_on: - authentication-service: + dbrepo-authentication-service: condition: service_healthy logging: driver: json-file - authentication-service: + dbrepo-authentication-service: restart: on-failure container_name: dbrepo-authentication-service hostname: authentication-service @@ -126,16 +126,16 @@ services: env_file: - .env depends_on: - discovery-service: + dbrepo-discovery-service: condition: service_healthy - broker-service: + dbrepo-broker-service: condition: service_started - metadata-db: + dbrepo-metadata-db: condition: service_healthy logging: driver: json-file - query-service: + dbrepo-query-service: restart: on-failure container_name: dbrepo-query-service hostname: query-service @@ -148,14 +148,14 @@ services: volumes: - ${SHARED_FILESYSTEM}:/tmp depends_on: - table-service: + dbrepo-table-service: condition: service_healthy - authentication-service: + dbrepo-authentication-service: condition: service_healthy logging: driver: json-file - table-service: + dbrepo-table-service: restart: on-failure container_name: dbrepo-table-service hostname: table-service @@ -169,16 +169,16 @@ services: - /var/run/docker.sock:/var/run/docker.sock - ${SHARED_FILESYSTEM}:/tmp depends_on: - authentication-service: + dbrepo-authentication-service: condition: service_healthy - search-service: + dbrepo-search-service: condition: service_started - broker-service: + dbrepo-broker-service: condition: service_started logging: driver: json-file - identifier-service: + dbrepo-identifier-service: restart: on-failure container_name: dbrepo-identifier-service hostname: identifier-service @@ -188,16 +188,16 @@ services: env_file: - .env depends_on: - query-service: + dbrepo-query-service: condition: service_healthy - authentication-service: + dbrepo-authentication-service: condition: service_healthy volumes: - ${SHARED_FILESYSTEM}:/tmp logging: driver: json-file - metadata-service: + dbrepo-metadata-service: restart: on-failure container_name: dbrepo-metadata-service hostname: metadata-service @@ -207,12 +207,12 @@ services: env_file: - .env depends_on: - metadata-db: + dbrepo-metadata-db: condition: service_started logging: driver: json-file - analyse-service: + dbrepo-analyse-service: restart: on-failure container_name: dbrepo-analyse-service hostname: analyse-service @@ -226,12 +226,12 @@ services: - ${SHARED_FILESYSTEM}:/tmp - /var/run/docker.sock:/var/run/docker.sock depends_on: - discovery-service: + dbrepo-discovery-service: condition: service_healthy logging: driver: json-file - semantics-service: + dbrepo-semantics-service: restart: on-failure container_name: dbrepo-semantics-service hostname: semantics-service @@ -244,14 +244,14 @@ services: - ${SHARED_FILESYSTEM}:/tmp - /var/run/docker.sock:/var/run/docker.sock depends_on: - discovery-service: + dbrepo-discovery-service: condition: service_healthy - metadata-db: + dbrepo-metadata-db: condition: service_healthy logging: driver: json-file - broker-service: + dbrepo-broker-service: restart: on-failure container_name: dbrepo-broker-service hostname: broker-service @@ -263,14 +263,14 @@ services: env_file: - .env depends_on: - discovery-service: + dbrepo-discovery-service: condition: service_healthy volumes: - broker-service-data:/var/lib/rabbitmq/ logging: driver: json-file - search-service: + dbrepo-search-service: restart: always container_name: dbrepo-search-service hostname: search-service @@ -278,7 +278,7 @@ services: networks: core: depends_on: - discovery-service: + dbrepo-discovery-service: condition: service_healthy env_file: - .env @@ -287,7 +287,7 @@ services: logging: driver: json-file - ui: + dbrepo-ui: restart: on-failure container_name: dbrepo-ui hostname: ui @@ -302,9 +302,9 @@ services: volumes: - ${SHARED_FILESYSTEM}:/tmp depends_on: - identifier-service: + dbrepo-identifier-service: condition: service_healthy - database-service: + dbrepo-database-service: condition: service_healthy logging: driver: json-file diff --git a/docker-compose.yml b/docker-compose.yml index fd864bd166bfed2443e866de714ba9c4ce269c49..955a59d41ca1896c6915aaf8aa4cf3916567f141 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,27 +10,26 @@ volumes: broker-service-data: networks: - public: - name: public + userdb: + name: userdb driver: bridge ipam: config: - - subnet: 172.29.0.0/16 - userdb: - name: userdb + - subnet: 172.30.0.0/16 + public: + name: public driver: bridge ipam: config: - - subnet: 172.28.0.0/16 + - subnet: 172.31.0.0/16 core: name: core driver: bridge ipam: config: - - subnet: 172.27.0.0/16 + - subnet: 172.32.0.0/16 services: - fda-metadata-db: restart: on-failure container_name: dbrepo-metadata-db diff --git a/fda-database-service/rest-service/src/test/java/at/tuwien/BaseUnitTest.java b/fda-database-service/rest-service/src/test/java/at/tuwien/BaseUnitTest.java index 3d66daf81fedcf29e85981f04471dbde8226497e..e32140f007c003e33e6a48b9224080ea216d3d58 100644 --- a/fda-database-service/rest-service/src/test/java/at/tuwien/BaseUnitTest.java +++ b/fda-database-service/rest-service/src/test/java/at/tuwien/BaseUnitTest.java @@ -330,8 +330,8 @@ public abstract class BaseUnitTest { .build(); public final static Long CONTAINER_BROKER_ID = 5L; - public final static String CONTAINER_BROKER_NAME = "broker-service"; - public final static String CONTAINER_BROKER_INTERNAL_NAME = "broker-service"; + public final static String CONTAINER_BROKER_NAME = "dbrepo-broker-service"; + public final static String CONTAINER_BROKER_INTERNAL_NAME = "dbrepo-broker-service"; public final static String CONTAINER_BROKER_IP = "172.31.0.2"; public final static String CONTAINER_BROKER_HASH = "deadbeef"; public final static Instant CONTAINER_BROKER_CREATED = Instant.now().minus(1, HOURS); diff --git a/fda-database-service/rest-service/src/test/resources/application.properties b/fda-database-service/rest-service/src/test/resources/application.properties index 33b8c25e25a0fe7db1a47bc8349d3d31e834e179..ddfddd8b33f6b4807c1047703d873571b4f89c24 100644 --- a/fda-database-service/rest-service/src/test/resources/application.properties +++ b/fda-database-service/rest-service/src/test/resources/application.properties @@ -22,8 +22,8 @@ logging.level.root=error logging.level.at.tuwien.=info # rabbitmq -fda.gateway.endpoint=http://broker-service:15672 -spring.rabbitmq.host=broker-service +fda.gateway.endpoint=http://dbrepo-broker-service:15672 +spring.rabbitmq.host=dbrepo-broker-service # elastic fda.elastic.endpoint=search-mock-service:9200 \ No newline at end of file diff --git a/fda-database-service/rest-service/src/test/resources/nginx/nginx.conf b/fda-database-service/rest-service/src/test/resources/nginx/nginx.conf index f4ada65dc83ba0cb149a1136ef36bbaf87fff513..6604865024d8e32d66f23a8f49fd0bca5e2c3e70 100644 --- a/fda-database-service/rest-service/src/test/resources/nginx/nginx.conf +++ b/fda-database-service/rest-service/src/test/resources/nginx/nginx.conf @@ -39,7 +39,7 @@ http { proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; - proxy_pass http://broker-service:15672; + proxy_pass http://dbrepo-broker-service:15672; proxy_read_timeout 90; } } diff --git a/fda-table-service/rest-service/src/test/java/at/tuwien/BaseUnitTest.java b/fda-table-service/rest-service/src/test/java/at/tuwien/BaseUnitTest.java index 6afb2e340d88ec03ba1df7bf09e9b758a479c76b..d971603dd8dd5f1e3c387b75759ca34359923590 100644 --- a/fda-table-service/rest-service/src/test/java/at/tuwien/BaseUnitTest.java +++ b/fda-table-service/rest-service/src/test/java/at/tuwien/BaseUnitTest.java @@ -146,7 +146,7 @@ public abstract class BaseUnitTest { public final static String CONTAINER_1_HASH = "deadbeef"; public final static ContainerImage CONTAINER_1_IMAGE = IMAGE_1; public final static String CONTAINER_1_NAME = "u01"; - public final static String CONTAINER_1_INTERNALNAME = "fda-userdb-u01"; + public final static String CONTAINER_1_INTERNALNAME = "dbrepo-userdb-u01"; public final static String CONTAINER_1_IP = "172.30.0.5"; public final static Instant CONTAINER_1_CREATED = Instant.now().minus(1, HOURS); @@ -154,7 +154,7 @@ public abstract class BaseUnitTest { public final static String CONTAINER_2_HASH = "deadbeef"; public final static ContainerImage CONTAINER_2_IMAGE = IMAGE_1; public final static String CONTAINER_2_NAME = "u02"; - public final static String CONTAINER_2_INTERNALNAME = "fda-userdb-u02"; + public final static String CONTAINER_2_INTERNALNAME = "dbrepo-userdb-u02"; public final static String CONTAINER_2_IP = "172.30.0.6"; public final static Instant CONTAINER_2_CREATED = Instant.now().minus(1, HOURS); @@ -162,7 +162,7 @@ public abstract class BaseUnitTest { public final static String CONTAINER_3_HASH = "deadbeef"; public final static ContainerImage CONTAINER_3_IMAGE = IMAGE_1; public final static String CONTAINER_3_NAME = "u03"; - public final static String CONTAINER_3_INTERNALNAME = "fda-userdb-u03"; + public final static String CONTAINER_3_INTERNALNAME = "dbrepo-userdb-u03"; public final static String CONTAINER_3_IP = "172.30.0.7"; public final static Instant CONTAINER_3_CREATED = Instant.now().minus(1, HOURS);