Skip to content
Snippets Groups Projects
Unverified Commit a4598bfb authored by Martin Weise's avatar Martin Weise
Browse files

Hotfix

- Removed unused files
- Added hosts file
- Added dbrepo- to docker compose files
parent 720ab656
No related branches found
No related tags found
1 merge request!144Revert
Showing
with 134 additions and 819 deletions
#!/bin/bash
# set version
TAG=latest
# tear everything down
echo "Removing all data ..."
/bin/bash teardown
docker system prune -f --volumes || true
echo "Pulling new images ..."
docker pull "dbrepo/analyse-service:${TAG}"
docker pull "dbrepo/authentication-service:${TAG}"
docker pull "dbrepo/metadata-db:${TAG}"
docker pull "dbrepo/ui:${TAG}"
docker pull "dbrepo/ui-proxy:${TAG}"
docker pull "dbrepo/identifier-service:${TAG}"
docker pull "dbrepo/container-service:${TAG}"
docker pull "dbrepo/database-service:${TAG}"
docker pull "dbrepo/discovery-service:${TAG}"
docker pull "dbrepo/gateway-service:${TAG}"
docker pull "dbrepo/query-service:${TAG}"
docker pull "dbrepo/table-service:${TAG}"
docker pull "dbrepo/units-service:${TAG}"
docker pull "dbrepo/broker-service:${TAG}"
docker pull "dbrepo/metadata-service:${TAG}"
echo "Pulled new images"
# deploy dbrepo
echo "Deploy DBRepo ..."
docker compose -f ./docker-compose.dbrepo2.yml up -d
# clone tuw specific deployment
git -C "/home/demo/dbrepo-tuw" pull || git clone ssh://git@gitlab.tuwien.ac.at:822/martin.weise/dbrepo-tuw.git "/home/demo/dbrepo-tuw"
cd /home/demo/dbrepo-tuw && git checkout dev
# build tuw specific deployment
docker compose -f /home/demo/dbrepo-tuw/docker-compose.dbrepo2.yml build || exit 3
docker compose -f /home/demo/dbrepo-tuw/docker-compose.dbrepo2.yml up -d || exit 4
echo "Deployed TU Wien specific deployment"
\ No newline at end of file
gitlab.tuwien.ac.at ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDKslN24njaMeH0xyVBO/lzeQ5/X/6UFkiz2qdcWihjT28slO/6pFBAxSzPUTBeVJWxKJgX9N+Hm65S9/Z2521E8Y9F56dcrOgeQBZ4GbLPSbEZIFN/71VL4cOh6tLahNZsAwd6y2Zi5XTAqho31VWfdCZkegZB3dNzLfOuC0t3KpxEAmWus5j8InNYDTBIy6U351/3h3oy32EdLvLpaHP+oy+QFAqXOlcXlKwHTfv9SSchQRLfWzrL4hVvohbCwMDBBvIP8J0WQ9mV95xfcwuipMWP3TksDGbst2MQ6HRXZ+yfie9Vgzg+++AjpHXQCMdalEmedNxCmxbHWYJxFHUV
gitlab.tuwien.ac.at ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBC+2QClFaM3ZNQ6nl3nHkZdFSyNP26uHPzJEUBu9J4yC1ON9GPHb2P3rC//wS819e3LWP4PXb8ug/EyEjWN30MA=
gitlab.tuwien.ac.at ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAILTzOKU3zEYNo1xy8rxZsa8D0/y4EN4oB0E7wgWCkOpZ
dbrepo2.ec.tuwien.ac.at ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC5GFfX4xmYeQdQQL81heBjKb4d1M8xjOjF4lpfNmc3/wyCPtP/dt4r3nkcOW3X14yPximy8BlArsPk4ul14BWNI9yrU5VA1FLlWq9Og6SfMJ1EAUNsePDd3qCpr/ljlydS3kWCVrXD1bNO0kQ3/36e9l64BDvZXyhI3n9DvkPO93n9xJ151IC9IwjnSzJQPghboUDRiYwT2B9wt+uC+k7tTV5tQH5kJk8fEa8nMyHOc8aD2miHubrcQJrYO/fv+vVxVWJKtSdz22wuvFnj8SfGClxbT8cfMo8X+LpIQOH8VYwIZVznuSlnNYOOYO6EGYyRnqud6oz0B5RUbwyeBhtAgZW1C1OXO9DVDLnazzFxJIzwhwyZCPurpLpP7bd6P+oFWy1A7bxIhfvdduEDE80vEOPrBl44TDe6RJR6QILtdUn9rrvcV/kgfj04zkQJjqMvX2pdCpdMIU1Pm+NQ53k3oOap7j9UHnpWX7C3mk76ueQddQxWZUhGsrFUAYSJfus=
dbrepo2.ec.tuwien.ac.at ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBOGTeKw2aKnykIUmvLiBNaGbq3xlSEsnD1M1HiZsPJ9ZtfSV12y9F0yutV4j68Rb+eHbyOxoyVekfl19ODDvXLM=
dbrepo2.ec.tuwien.ac.at ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIPpSoshRRtQBj4ebWeaWoeGr1XFIlx3L+BXV69fafBGr
\ No newline at end of file
#!/bin/bash
echo "=== [ Stopping dbrepo-* ] ==="
docker container stop $(docker container ls -aq -f name=^/dbrepo-.*) || true
echo "=== [ Removing dbrepo-* ] ==="
docker container rm $(docker container ls -aq -f name=^/dbrepo-.*) || true
docker volume rm $(docker volume ls -q -f name=^dbrepo-.*) || true
docker network rm $(docker network ls -q -f name=^dbrepo-.*) || true
echo "=== [ Stopping * ] ==="
docker container stop $(docker container ls -aq -f name=.*-service) || true
docker container stop ui ui-proxy metadata-db || true
echo "=== [ Removing * ] ==="
docker container rm $(docker container ls -aq -f name=.*-service) || true
docker container rm ui ui-proxy metadata-db || true
docker volume rm $(docker volume ls -q) || true
docker network rm core public userdb || true
echo "=== [ Stopping fda-* ] ==="
docker container stop $(docker container ls -aq -f name=^/fda-.*) || true
echo "=== [ Removing fda-* ] ==="
docker container rm $(docker container ls -aq -f name=^/fda-.*) || true
docker volume rm $(docker volume ls -q -f name=^fda-.*) || true
docker network rm $(docker network ls -q -f name=^fda-.*) || true
echo "=== [ Stopping tuw-* ] ==="
docker container stop $(docker container ls -aq -f name=^/tuw-.*) || true
echo "=== [ Removing tuw-* ] ==="
docker container rm $(docker container ls -aq -f name=^/tuw-.*) || true
docker volume rm $(docker volume ls -q -f name=^tuw-.*) || true
docker network rm $(docker network ls -q -f name=^tuw-.*) || true
\ No newline at end of file
#!/bin/bash
docker container stop $(docker container ls -aq) || true
docker container rm $(docker container ls -aq) || true
docker volume rm $(docker volume ls -q) || true
#!/bin/bash
FILES=$(cd /tmp && find -exec basename '{}' ';' 2>/dev/null | egrep '^.{32}$' | egrep "^([a-z0-9]+)$")
for file in $FILES; do
rm -f /tmp/$file
echo "Removed /tmp/${file}"
done
\ No newline at end of file
0 2 * * * /usr/bin/make -C /home/rocky/fda-services teardown
1 2 * * * /usr/bin/make -C /home/rocky/fda-services run
\ No newline at end of file
#!/bin/bash
TMP_CERT_LOCATION="/root/keys"
TMP_SAML_LOCATION="/root/keys"
KEY_STORE_LOCATION="/tmp/dbrepo.jks"
KEY_STORE_PASS="dbrepo"
CERT_LOCATION="/etc/letsencrypt/live/dbrepo.ossdip.at"
SAML_KEY="/root/keys/saml_sign.key"
SAML_PUB="/root/keys/saml_sign.cer"
TU_SAML_CERT="./fda-authentication-service/rest-service/src/main/resources/saml/tu.crt"
# PLACE
sudo mkdir -p "${TMP_CERT_LOCATION}"
sudo mkdir -p "${TMP_SAML_LOCATION}"
# REQUEST
sudo certbot certonly --standalone --preferred-challenges http -d dbrepo.ossdip.at \
-m martin.weise@tuwien.ac.at --agree-tos --keep-until-expiring
# CONVERT PKCS12
sudo openssl pkcs12 -export -out "${TMP_SAML_LOCATION}/saml.p12" -in "${SAML_PUB}" \
-inkey "${SAML_KEY}" -passout "pass:${KEY_STORE_PASS}"
# CONVERT PKCS12
sudo openssl pkcs12 -export -out "${TMP_CERT_LOCATION}/cert.p12" -in "${CERT_LOCATION}/cert.pem" \
-inkey "${CERT_LOCATION}/privkey.pem" -passout "pass:${KEY_STORE_PASS}"
# FIX PERMISSIONS
sudo chmod -R 644 "${TMP_CERT_LOCATION}"
sudo chmod -R 644 "${TMP_SAML_LOCATION}"
# IMPORT SSL SIGN PRIVKEY
sudo keytool -noprompt -importkeystore -deststorepass "${KEY_STORE_PASS}" -destkeypass "${KEY_STORE_PASS}" \
-destkeystore "${KEY_STORE_LOCATION}" -srckeystore "${TMP_CERT_LOCATION}/cert.p12" -srcstoretype PKCS12 \
-srcstorepass "${KEY_STORE_PASS}" -alias 1 -destalias ssl
# IMPORT SAML MESSAGE SIGN PRIVKEY
sudo keytool -noprompt -importkeystore -deststorepass "${KEY_STORE_PASS}" -destkeypass "${KEY_STORE_PASS}" \
-destkeystore "${KEY_STORE_LOCATION}" -srckeystore "${TMP_SAML_LOCATION}" -srcstoretype PKCS12 \
-srcstorepass "${KEY_STORE_PASS}" -alias 1 -destalias saml
# IMPORT METADATA VERIFICATION PUBKEY
sudo keytool -noprompt -importcert -file "${TU_SAML_CERT}" -storepass "${KEY_STORE_PASS}" \
-keystore "${KEY_STORE_LOCATION}" -alias tu
# OWNERSHIP
sudo chown centos:docker "${TMP_CERT_LOCATION}"
sudo chown centos:docker "${TMP_SAML_LOCATION}"
sudo chown centos:docker "${KEY_STORE_LOCATION}"
# TRUST LET'S ENCRYPT
sudo keytool -noprompt -import -alias letsencrypt -keystore "${KEY_STORE_LOCATION}" -storepass "${KEY_STORE_PASS}" \
-file "${CERT_LOCATION}/chain.pem"
#!/bin/bash
sudo cat /root/smtp.secret >> ./.env
#!/bin/bash
CA_PATH="/etc/letsencrypt/live/dbrepo.ossdip.at"
sudo certbot certonly --standalone --preferred-challenges http -d dbrepo.ossdip.at --agree-tos --keep-until-expiring
KEY=$(sudo sed -E ':a;N;$!ba;s/\r{0,1}\n/\\n/g' "${CA_PATH}/privkey.pem")
CERT=$(sudo sed -E ':a;N;$!ba;s/\r{0,1}\n/\\n/g' "${CA_PATH}/cert.pem")
cat << EOF
UI_KEY="${KEY}"
UI_CERT="${CERT}"
EOF
\ No newline at end of file
# FDA USERDB
172.28.0.2 fda-userdb-weather-aus
172.28.0.3 fda-userdb-infection
172.28.0.4 fda-userdb-air
172.28.0.5 fda-userdb-u01
172.28.0.6 fda-userdb-u02
172.28.0.7 fda-userdb-u03
\ No newline at end of file
#!/usr/bin/env python3
import sys
import xml.etree.ElementTree as ET
import re
import os.path
# branch-rate="0.0" complexity="0.0" line-rate="1.0"
# branch="true" hits="1" number="86"
def find_lines(j_package, filename):
"""Return all <line> elements for a given source file in a package."""
lines = list()
sourcefiles = j_package.findall("sourcefile")
for sourcefile in sourcefiles:
if sourcefile.attrib.get("name") == os.path.basename(filename):
lines = lines + sourcefile.findall("line")
return lines
def line_is_after(jm, start_line):
return int(jm.attrib.get('line', 0)) > start_line
def method_lines(jmethod, jmethods, jlines):
"""Filter the lines from the given set of jlines that apply to the given jmethod."""
start_line = int(jmethod.attrib.get('line', 0))
larger = list(int(jm.attrib.get('line', 0)) for jm in jmethods if line_is_after(jm, start_line))
end_line = min(larger) if len(larger) else 99999999
for jline in jlines:
if start_line <= int(jline.attrib['nr']) < end_line:
yield jline
def convert_lines(j_lines, into):
"""Convert the JaCoCo <line> elements into Cobertura <line> elements, add them under the given element."""
c_lines = ET.SubElement(into, 'lines')
for jline in j_lines:
mb = int(jline.attrib['mb'])
cb = int(jline.attrib['cb'])
ci = int(jline.attrib['ci'])
cline = ET.SubElement(c_lines, 'line')
cline.set('number', jline.attrib['nr'])
cline.set('hits', '1' if ci > 0 else '0') # Probably not true but no way to know from JaCoCo XML file
if mb + cb > 0:
percentage = str(int(100 * (float(cb) / (float(cb) + float(mb))))) + '%'
cline.set('branch', 'true')
cline.set('condition-coverage', percentage + ' (' + str(cb) + '/' + str(cb + mb) + ')')
cond = ET.SubElement(ET.SubElement(cline, 'conditions'), 'condition')
cond.set('number', '0')
cond.set('type', 'jump')
cond.set('coverage', percentage)
else:
cline.set('branch', 'false')
def guess_filename(path_to_class):
m = re.match('([^$]*)', path_to_class)
return (m.group(1) if m else path_to_class) + '.java'
def add_counters(source, target):
target.set('line-rate', counter(source, 'LINE'))
target.set('branch-rate', counter(source, 'BRANCH'))
target.set('complexity', counter(source, 'COMPLEXITY', sum))
def fraction(covered, missed):
return covered / (covered + missed)
def sum(covered, missed):
return covered + missed
def counter(source, type, operation=fraction):
cs = source.findall('counter')
c = next((ct for ct in cs if ct.attrib.get('type') == type), None)
if c is not None:
covered = float(c.attrib['covered'])
missed = float(c.attrib['missed'])
return str(operation(covered, missed))
else:
return '0.0'
def convert_method(j_method, j_lines):
c_method = ET.Element('method')
c_method.set('name', j_method.attrib['name'])
c_method.set('signature', j_method.attrib['desc'])
add_counters(j_method, c_method)
convert_lines(j_lines, c_method)
return c_method
def convert_class(j_class, j_package):
c_class = ET.Element('class')
c_class.set('name', j_class.attrib['name'].replace('/', '.'))
c_class.set('filename', guess_filename(j_class.attrib['name']))
all_j_lines = list(find_lines(j_package, c_class.attrib['filename']))
c_methods = ET.SubElement(c_class, 'methods')
all_j_methods = list(j_class.findall('method'))
for j_method in all_j_methods:
j_method_lines = method_lines(j_method, all_j_methods, all_j_lines)
c_methods.append(convert_method(j_method, j_method_lines))
add_counters(j_class, c_class)
convert_lines(all_j_lines, c_class)
return c_class
def convert_package(j_package):
c_package = ET.Element('package')
c_package.attrib['name'] = j_package.attrib['name'].replace('/', '.')
c_classes = ET.SubElement(c_package, 'classes')
for j_class in j_package.findall('class'):
c_classes.append(convert_class(j_class, j_package))
add_counters(j_package, c_package)
return c_package
def convert_root(source, target, source_roots):
target.set('timestamp', str(int(source.find('sessioninfo').attrib['start']) / 1000))
sources = ET.SubElement(target, 'sources')
for s in source_roots:
ET.SubElement(sources, 'source').text = s
packages = ET.SubElement(target, 'packages')
for package in source.findall('package'):
packages.append(convert_package(package))
add_counters(source, target)
def jacoco2cobertura(filename, source_roots):
if filename == '-':
root = ET.fromstring(sys.stdin.read())
else:
tree = ET.parse(filename)
root = tree.getroot()
into = ET.Element('coverage')
convert_root(root, into, source_roots)
print(ET.tostring(into, encoding='utf8', method='xml').decode("utf-8"))
if __name__ == '__main__':
if len(sys.argv) < 2:
print("Usage: cover2cover.py FILENAME [SOURCE_ROOTS]")
sys.exit(1)
filename = sys.argv[1]
source_roots = sys.argv[2:] if 2 < len(sys.argv) else '.'
jacoco2cobertura(filename, source_roots)
\ No newline at end of file
#!/bin/env python3
from api_container.api.container_endpoint_api import ContainerEndpointApi
from api_database.api.database_endpoint_api import DatabaseEndpointApi
import time
container = ContainerEndpointApi()
database = DatabaseEndpointApi()
def create_container():
response = container.create1({
"name": "Pilot Factory Data",
"repository": "mariadb",
"tag": "10.5"
})
print("created container with id %d" % response.id)
return response
def start_container(container_id):
response = container.modify({
"action": "start"
}, container_id)
print("... starting")
time.sleep(5)
print("started container with id %d" % response.id)
return response
def create_database(container_id, is_public=True):
response = database.create({
"name": "Pilot Factory Data",
"is_public": is_public
}, container_id)
print("created database with id %d" % response.id)
return response
token = "" # keep
from api_authentication.api.authentication_endpoint_api import AuthenticationEndpointApi
from api_authentication.api.user_endpoint_api import UserEndpointApi
import uuid
authentication = AuthenticationEndpointApi()
user = UserEndpointApi()
def create_user(username):
response = user.register({
"username": username,
"password": username,
"email": username + "@gmail.com"
})
print("created user with id %d" % response.id)
return response
def auth_user(username):
response = authentication.authenticate_user1({
"username": username,
"password": username
})
print("authenticated user with id %d" % response.id)
token = response.token
container.api_client.default_headers = {"Authorization": "Bearer " + token}
database.api_client.default_headers = {"Authorization": "Bearer " + token}
return response
def find_database(container_id, database_id):
response = database.find_by_id(container_id, database_id)
print("found database with id %d" % response.id)
return response
def update_database(container_id, database_id):
response = database.update({
"description": "This dataset includes daily values from 1983 to the current day, divided into annual files. This includes the maximum hourly average and the number of times the hourly average limit value for ozone was exceeded and the daily averages for sulfur dioxide (SO2), carbon monoxide (CO), nitrogen oxide (NOx), nitrogen monoxide (NO), nitrogen dioxide (NO2), particulate matter (PM10 and PM2.5). ) and particle number (PN), provided that they are of sufficient quality. The values of the completed day for the current year are updated every 30 minutes after midnight (UTC+1).",
"publisher": "Technical University of Vienna",
"license": {
"identifier": "CC0-1.0",
"uri": "https://creativecommons.org/publicdomain/zero/1.0/legalcode"
},
"language": "en",
"publication_year": 2022
}, container_id, database_id)
print("updated database with id %d" % response.id)
return response
def test_containers():
username = str(uuid.uuid1()).replace("-", "")
uid = create_user(username).id
auth_user(username)
# container 1
cid = create_container().id
start_container(cid)
dbid = create_database(cid).id
update_database(cid, dbid)
#!/bin/env python3
import time
import os
import shutil
import uuid
from api_authentication.api.authentication_endpoint_api import AuthenticationEndpointApi
from api_authentication.api.user_endpoint_api import UserEndpointApi
from api_container.api.container_endpoint_api import ContainerEndpointApi
from api_database.api.database_endpoint_api import DatabaseEndpointApi
from api_table.api.table_endpoint_api import TableEndpointApi
from api_query.api.table_data_endpoint_api import TableDataEndpointApi
from api_query.api.query_endpoint_api import QueryEndpointApi
from api_query.api.table_history_endpoint_api import TableHistoryEndpointApi
from api_identifier.api.identifier_endpoint_api import IdentifierEndpointApi
from api_identifier.api.persistence_endpoint_api import PersistenceEndpointApi
from api_query.api.view_endpoint_api import ViewEndpointApi
from api_query.rest import ApiException
authentication = AuthenticationEndpointApi()
user = UserEndpointApi()
container = ContainerEndpointApi()
database = DatabaseEndpointApi()
table = TableEndpointApi()
query = QueryEndpointApi()
history = TableHistoryEndpointApi()
data = TableDataEndpointApi()
identifier = IdentifierEndpointApi()
persistence = PersistenceEndpointApi()
view = ViewEndpointApi()
token = "" # keep
def create_user(username):
response = user.register({
"username": username,
"password": username,
"email": username + "@gmail.com"
})
print("created user with id %d" % response.id)
return response
def update_password(user_id, password):
response = user.update_password({
"password": password
}, user_id)
print("updated password for user with id %d" % user_id)
return response
def auth_user(username):
response = authentication.authenticate_user1({
"username": username,
"password": username
})
print("authenticated user with id %d" % response.id)
token = response.token
container.api_client.default_headers = {"Authorization": "Bearer " + token}
database.api_client.default_headers = {"Authorization": "Bearer " + token}
table.api_client.default_headers = {"Authorization": "Bearer " + token}
data.api_client.default_headers = {"Authorization": "Bearer " + token}
query.api_client.default_headers = {"Authorization": "Bearer " + token}
identifier.api_client.default_headers = {"Authorization": "Bearer " + token}
user.api_client.default_headers = {"Authorization": "Bearer " + token}
persistence.api_client.default_headers = {"Authorization": "Bearer " + token}
history.api_client.default_headers = {"Authorization": "Bearer " + token}
view.api_client.default_headers = {"Authorization": "Bearer " + token}
return response
def create_container():
response = container.create1({
"name": "Pilot Factory Data",
"repository": "mariadb",
"tag": "10.5"
})
print("created container with id %d" % response.id)
return response
def start_container(container_id):
response = container.modify({
"action": "start"
}, container_id)
print("... starting")
time.sleep(5)
print("started container with id %d" % response.id)
return response
def create_database(container_id, is_public=True):
response = database.create({
"name": "Pilot Factory Data",
"is_public": is_public
}, container_id)
print("created database with id %d" % response.id)
return response
def find_database(container_id, database_id):
response = database.find_by_id(container_id, database_id)
print("found database with id %d" % response.id)
return response
def update_database(container_id, database_id):
response = database.update({
"description": "This dataset includes daily values from 1983 to the current day, divided into annual files. This includes the maximum hourly average and the number of times the hourly average limit value for ozone was exceeded and the daily averages for sulfur dioxide (SO2), carbon monoxide (CO), nitrogen oxide (NOx), nitrogen monoxide (NO), nitrogen dioxide (NO2), particulate matter (PM10 and PM2.5). ) and particle number (PN), provided that they are of sufficient quality. The values of the completed day for the current year are updated every 30 minutes after midnight (UTC+1).",
"publisher": "Technical University of Vienna",
"license": {
"identifier": "CC0-1.0",
"uri": "https://creativecommons.org/publicdomain/zero/1.0/legalcode"
},
"language": "en",
"publication_year": 2022
}, container_id, database_id)
print("updated database with id %d" % response.id)
return response
def create_table(container_id, database_id, columns=None):
if columns is None:
columns = [{
"name": "Date",
"type": "date",
"dfid": 1,
"unique": False,
"primary_key": False,
"null_allowed": True,
}, {
"name": "Location",
"type": "string",
"unique": False,
"primary_key": False,
"null_allowed": True,
}, {
"name": "Parameter",
"type": "string",
"unique": False,
"primary_key": False,
"null_allowed": True,
}, {
"name": "Interval",
"type": "string",
"unique": False,
"primary_key": False,
"null_allowed": True,
}, {
"name": "Unit",
"type": "string",
"unique": False,
"primary_key": False,
"null_allowed": True,
}, {
"name": "Value",
"type": "decimal",
"unique": False,
"primary_key": False,
"null_allowed": True,
}, {
"name": "Status",
"type": "string",
"unique": False,
"primary_key": False,
"null_allowed": True,
}]
response = table.create({
"name": "Airquality " + str(uuid.uuid1()),
"description": "Airquality in Zürich, Switzerland",
"columns": columns
}, "Bearer " + token, container_id, database_id)
print("created table with id %d" % response.id)
return response
def find_table(container_id, database_id, table_id):
response = table.find_by_id(container_id, database_id, table_id)
print("found table with id %d" % response.id)
return response
def fill_table(container_id, database_id, table_id):
response = data.import_csv({
"location": "/path/to/data.csv",
"quote": "\"",
"null_element": "NA"
"separator": ",",
}, container_id, database_id, table_id)
print("filled table with id %d" % table_id)
return response
def create_query(container_id, database_id, statement, page=0, size=3):
response = query.execute({
"statement": statement
}, container_id, database_id, page=page, size=size)
print("executed query with id %d" % response.id)
return response
def delete_tuple(container_id, database_id, table_id, keys):
response = data.delete(keys, container_id, database_id, table_id)
print("deleted tuples for table with id %d" % table_id)
return response
def download_query_data(container_id, database_id, query_id):
response = query.export1(container_id, database_id, query_id)
print("downloaded query data for query with id %d" % query_id)
return response
def list_views(container_id, database_id):
response = view.find_all(container_id, database_id)
print("list views for database with id %d" % database_id)
return response
def create_view(container_id, database_id, table_name):
response = view.create({
"name": "Air Quality " + str(uuid.uuid1()),
"query": "SELECT `date`, `parameter`, `value` FROM `" + table_name + "` WHERE `date` = '2021-10-02T14:00'",
"is_public": True
}, container_id, database_id)
print("created view with id %d" % response.id)
return response
def data_view(container_id, database_id, view_id):
response = view.data(container_id, database_id, view_id)
print("retrieved data for view with id %d" % response.id)
return response
def test_identifiers():
#
# create 1 user and 2 containers (public, private)
#
username = str(uuid.uuid1()).replace("-", "")
uid = create_user(username).id
auth_user(username)
# container 1
cid = create_container().id
start_container(cid)
dbid = create_database(cid).id
update_database(cid, dbid)
tid = create_table(cid, dbid).id
tname = find_table(cid, dbid, tid).internal_name
fill_table(cid, dbid, tid)
create_query(cid, dbid, "select `id` from `" + tname + "`")
create_query(cid, dbid, "select `date` from `" + tname + "`")
qid = create_query(cid, dbid, "select `date`, `location`, `status` from `" + tname + "`").id
create_query(cid, dbid, "select `date`, `location`, `status` from `" + tname + "` order by `date` asc")
create_query(cid, dbid, "select t.`date`, t.location, t.status from `" + tname + "` t group by t.`date` order by t.`date` asc")
create_query(cid, dbid, "select `date`, `location`, `status` from `" + tname + "` group by `date`, `location` asc")
download_query_data(cid, dbid, qid)
# container 2 (=private)
cid = create_container().id
start_container(cid)
dbid = create_database(cid, False).id
update_database(cid, dbid)
tid = create_table(cid, dbid).id
tname = find_table(cid, dbid, tid).internal_name
fill_table(cid, dbid, tid)
qid = create_query(cid, dbid, "select `id` from `" + tname + "`").id
qid = create_query(cid, dbid, "select `id` from `" + tname + "`").id
vid = create_view(cid, dbid, tname).id
data_view(cid, dbid, vid)
list_views(cid, dbid)
for i in range(5, 10):
delete_tuple(cid, dbid, tid, {
"keys": {
"id": i
}
})
time.sleep(1)
delete_tuple(cid, dbid, tid, {
"keys": {
"location": "Schimmelstrasse"
}
})
###########################################################
# PLACE IN /etc/hosts #
###########################################################
172.30.0.5 dbrepo-userdb-u01
172.30.0.6 dbrepo-userdb-u02
172.30.0.7 dbrepo-userdb-u03
172.30.0.8 dbrepo-userdb-u04
172.31.0.2 dbrepo-broker-service
172.31.0.3 dbrepo-search-service
\ No newline at end of file
......@@ -31,7 +31,7 @@ networks:
services:
metadata-db:
dbrepo-metadata-db:
restart: on-failure
container_name: dbrepo-metadata-db
hostname: metadata-db
......@@ -60,7 +60,7 @@ services:
logging:
driver: json-file
gateway-service:
dbrepo-gateway-service:
restart: on-failure
container_name: dbrepo-gateway-service
hostname: gateway-service
......@@ -72,12 +72,12 @@ services:
env_file:
- .env
depends_on:
discovery-service:
dbrepo-discovery-service:
condition: service_healthy
logging:
driver: json-file
database-service:
dbrepo-database-service:
restart: on-failure
container_name: dbrepo-database-service
hostname: database-service
......@@ -90,16 +90,16 @@ services:
volumes:
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
container-service:
dbrepo-container-service:
condition: service_healthy
broker-service:
dbrepo-broker-service:
condition: service_started
authentication-service:
dbrepo-authentication-service:
condition: service_healthy
logging:
driver: json-file
container-service:
dbrepo-container-service:
restart: on-failure
container_name: dbrepo-container-service
hostname: container-service
......@@ -111,12 +111,12 @@ services:
volumes:
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
authentication-service:
dbrepo-authentication-service:
condition: service_healthy
logging:
driver: json-file
authentication-service:
dbrepo-authentication-service:
restart: on-failure
container_name: dbrepo-authentication-service
hostname: authentication-service
......@@ -126,16 +126,16 @@ services:
env_file:
- .env
depends_on:
discovery-service:
dbrepo-discovery-service:
condition: service_healthy
broker-service:
dbrepo-broker-service:
condition: service_started
metadata-db:
dbrepo-metadata-db:
condition: service_healthy
logging:
driver: json-file
query-service:
dbrepo-query-service:
restart: on-failure
container_name: dbrepo-query-service
hostname: query-service
......@@ -148,14 +148,14 @@ services:
volumes:
- ${SHARED_FILESYSTEM}:/tmp
depends_on:
table-service:
dbrepo-table-service:
condition: service_healthy
authentication-service:
dbrepo-authentication-service:
condition: service_healthy
logging:
driver: json-file
table-service:
dbrepo-table-service:
restart: on-failure
container_name: dbrepo-table-service
hostname: table-service
......@@ -169,16 +169,16 @@ services:
- /var/run/docker.sock:/var/run/docker.sock
- ${SHARED_FILESYSTEM}:/tmp
depends_on:
authentication-service:
dbrepo-authentication-service:
condition: service_healthy
search-service:
dbrepo-search-service:
condition: service_started
broker-service:
dbrepo-broker-service:
condition: service_started
logging:
driver: json-file
identifier-service:
dbrepo-identifier-service:
restart: on-failure
container_name: dbrepo-identifier-service
hostname: identifier-service
......@@ -188,16 +188,16 @@ services:
env_file:
- .env
depends_on:
query-service:
dbrepo-query-service:
condition: service_healthy
authentication-service:
dbrepo-authentication-service:
condition: service_healthy
volumes:
- ${SHARED_FILESYSTEM}:/tmp
logging:
driver: json-file
fda-metadata-service:
dbrepo-metadata-service:
restart: on-failure
container_name: dbrepo-metadata-service
hostname: metadata-service
......@@ -207,12 +207,12 @@ services:
env_file:
- .env
depends_on:
metadata-db:
dbrepo-metadata-db:
condition: service_started
logging:
driver: json-file
analyse-service:
dbrepo-analyse-service:
restart: on-failure
container_name: dbrepo-analyse-service
hostname: analyse-service
......@@ -226,12 +226,12 @@ services:
- ${SHARED_FILESYSTEM}:/tmp
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
discovery-service:
dbrepo-discovery-service:
condition: service_healthy
logging:
driver: json-file
semantics-service:
dbrepo-semantics-service:
restart: on-failure
container_name: dbrepo-semantics-service
hostname: semantics-service
......@@ -244,14 +244,14 @@ services:
- ${SHARED_FILESYSTEM}:/tmp
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
discovery-service:
dbrepo-discovery-service:
condition: service_healthy
metadata-db:
dbrepo-metadata-db:
condition: service_healthy
logging:
driver: json-file
broker-service:
dbrepo-broker-service:
restart: on-failure
container_name: dbrepo-broker-service
hostname: broker-service
......@@ -264,14 +264,14 @@ services:
env_file:
- .env
depends_on:
discovery-service:
dbrepo-discovery-service:
condition: service_healthy
volumes:
- broker-service-data:/var/lib/rabbitmq/
logging:
driver: json-file
search-service:
dbrepo-search-service:
restart: always
container_name: dbrepo-search-service
hostname: search-service
......@@ -279,7 +279,7 @@ services:
networks:
core:
depends_on:
discovery-service:
dbrepo-discovery-service:
condition: service_healthy
ports:
- 9200:9200
......@@ -290,7 +290,7 @@ services:
logging:
driver: json-file
ui:
dbrepo-ui:
restart: on-failure
container_name: dbrepo-ui
hostname: ui
......@@ -303,9 +303,9 @@ services:
volumes:
- ${SHARED_FILESYSTEM}:/tmp
depends_on:
identifier-service:
dbrepo-identifier-service:
condition: service_healthy
database-service:
dbrepo-database-service:
condition: service_healthy
logging:
driver: json-file
......@@ -31,7 +31,7 @@ networks:
services:
metadata-db:
dbrepo-metadata-db:
restart: on-failure
container_name: dbrepo-metadata-db
hostname: metadata-db
......@@ -48,7 +48,7 @@ services:
logging:
driver: json-file
discovery-service:
dbrepo-discovery-service:
restart: on-failure
container_name: dbrepo-discovery-service
hostname: discovery-service
......@@ -60,7 +60,7 @@ services:
logging:
driver: json-file
gateway-service:
dbrepo-gateway-service:
restart: on-failure
container_name: dbrepo-gateway-service
hostname: gateway-service
......@@ -72,12 +72,12 @@ services:
env_file:
- .env
depends_on:
discovery-service:
dbrepo-discovery-service:
condition: service_healthy
logging:
driver: json-file
database-service:
dbrepo-database-service:
restart: on-failure
container_name: dbrepo-database-service
hostname: database-service
......@@ -90,16 +90,16 @@ services:
volumes:
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
container-service:
dbrepo-container-service:
condition: service_healthy
broker-service:
dbrepo-broker-service:
condition: service_started
authentication-service:
dbrepo-authentication-service:
condition: service_healthy
logging:
driver: json-file
container-service:
dbrepo-container-service:
restart: on-failure
container_name: dbrepo-container-service
hostname: container-service
......@@ -111,12 +111,12 @@ services:
volumes:
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
authentication-service:
dbrepo-authentication-service:
condition: service_healthy
logging:
driver: json-file
authentication-service:
dbrepo-authentication-service:
restart: on-failure
container_name: dbrepo-authentication-service
hostname: authentication-service
......@@ -126,16 +126,16 @@ services:
env_file:
- .env
depends_on:
discovery-service:
dbrepo-discovery-service:
condition: service_healthy
broker-service:
dbrepo-broker-service:
condition: service_started
metadata-db:
dbrepo-metadata-db:
condition: service_healthy
logging:
driver: json-file
query-service:
dbrepo-query-service:
restart: on-failure
container_name: dbrepo-query-service
hostname: query-service
......@@ -148,14 +148,14 @@ services:
volumes:
- ${SHARED_FILESYSTEM}:/tmp
depends_on:
table-service:
dbrepo-table-service:
condition: service_healthy
authentication-service:
dbrepo-authentication-service:
condition: service_healthy
logging:
driver: json-file
table-service:
dbrepo-table-service:
restart: on-failure
container_name: dbrepo-table-service
hostname: table-service
......@@ -169,16 +169,16 @@ services:
- /var/run/docker.sock:/var/run/docker.sock
- ${SHARED_FILESYSTEM}:/tmp
depends_on:
authentication-service:
dbrepo-authentication-service:
condition: service_healthy
search-service:
dbrepo-search-service:
condition: service_started
broker-service:
dbrepo-broker-service:
condition: service_started
logging:
driver: json-file
identifier-service:
dbrepo-identifier-service:
restart: on-failure
container_name: dbrepo-identifier-service
hostname: identifier-service
......@@ -188,16 +188,16 @@ services:
env_file:
- .env
depends_on:
query-service:
dbrepo-query-service:
condition: service_healthy
authentication-service:
dbrepo-authentication-service:
condition: service_healthy
volumes:
- ${SHARED_FILESYSTEM}:/tmp
logging:
driver: json-file
metadata-service:
dbrepo-metadata-service:
restart: on-failure
container_name: dbrepo-metadata-service
hostname: metadata-service
......@@ -207,12 +207,12 @@ services:
env_file:
- .env
depends_on:
metadata-db:
dbrepo-metadata-db:
condition: service_started
logging:
driver: json-file
analyse-service:
dbrepo-analyse-service:
restart: on-failure
container_name: dbrepo-analyse-service
hostname: analyse-service
......@@ -226,12 +226,12 @@ services:
- ${SHARED_FILESYSTEM}:/tmp
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
discovery-service:
dbrepo-discovery-service:
condition: service_healthy
logging:
driver: json-file
semantics-service:
dbrepo-semantics-service:
restart: on-failure
container_name: dbrepo-semantics-service
hostname: semantics-service
......@@ -244,14 +244,14 @@ services:
- ${SHARED_FILESYSTEM}:/tmp
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
discovery-service:
dbrepo-discovery-service:
condition: service_healthy
metadata-db:
dbrepo-metadata-db:
condition: service_healthy
logging:
driver: json-file
broker-service:
dbrepo-broker-service:
restart: on-failure
container_name: dbrepo-broker-service
hostname: broker-service
......@@ -264,14 +264,14 @@ services:
env_file:
- .env
depends_on:
discovery-service:
dbrepo-discovery-service:
condition: service_healthy
volumes:
- broker-service-data:/var/lib/rabbitmq/
logging:
driver: json-file
search-service:
dbrepo-search-service:
restart: always
container_name: dbrepo-search-service
hostname: search-service
......@@ -279,7 +279,7 @@ services:
networks:
core:
depends_on:
discovery-service:
dbrepo-discovery-service:
condition: service_healthy
env_file:
- .env
......@@ -288,7 +288,7 @@ services:
logging:
driver: json-file
ui:
dbrepo-ui:
restart: on-failure
container_name: dbrepo-ui
hostname: ui
......@@ -301,9 +301,9 @@ services:
volumes:
- ${SHARED_FILESYSTEM}:/tmp
depends_on:
identifier-service:
dbrepo-identifier-service:
condition: service_healthy
database-service:
dbrepo-database-service:
condition: service_healthy
logging:
driver: json-file
......@@ -31,7 +31,7 @@ networks:
services:
metadata-db:
dbrepo-metadata-db:
restart: on-failure
container_name: dbrepo-metadata-db
hostname: metadata-db
......@@ -48,7 +48,7 @@ services:
logging:
driver: json-file
discovery-service:
dbrepo-discovery-service:
restart: on-failure
container_name: dbrepo-discovery-service
hostname: discovery-service
......@@ -60,7 +60,7 @@ services:
logging:
driver: json-file
gateway-service:
dbrepo-gateway-service:
restart: on-failure
container_name: dbrepo-gateway-service
hostname: gateway-service
......@@ -72,12 +72,12 @@ services:
env_file:
- .env
depends_on:
discovery-service:
dbrepo-discovery-service:
condition: service_healthy
logging:
driver: json-file
database-service:
dbrepo-database-service:
restart: on-failure
container_name: dbrepo-database-service
hostname: database-service
......@@ -90,16 +90,16 @@ services:
volumes:
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
container-service:
dbrepo-container-service:
condition: service_healthy
broker-service:
dbrepo-broker-service:
condition: service_started
authentication-service:
dbrepo-authentication-service:
condition: service_healthy
logging:
driver: json-file
container-service:
dbrepo-container-service:
restart: on-failure
container_name: dbrepo-container-service
hostname: container-service
......@@ -111,12 +111,12 @@ services:
volumes:
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
authentication-service:
dbrepo-authentication-service:
condition: service_healthy
logging:
driver: json-file
authentication-service:
dbrepo-authentication-service:
restart: on-failure
container_name: dbrepo-authentication-service
hostname: authentication-service
......@@ -126,16 +126,16 @@ services:
env_file:
- .env
depends_on:
discovery-service:
dbrepo-discovery-service:
condition: service_healthy
broker-service:
dbrepo-broker-service:
condition: service_started
metadata-db:
dbrepo-metadata-db:
condition: service_healthy
logging:
driver: json-file
query-service:
dbrepo-query-service:
restart: on-failure
container_name: dbrepo-query-service
hostname: query-service
......@@ -148,14 +148,14 @@ services:
volumes:
- ${SHARED_FILESYSTEM}:/tmp
depends_on:
table-service:
dbrepo-table-service:
condition: service_healthy
authentication-service:
dbrepo-authentication-service:
condition: service_healthy
logging:
driver: json-file
table-service:
dbrepo-table-service:
restart: on-failure
container_name: dbrepo-table-service
hostname: table-service
......@@ -169,16 +169,16 @@ services:
- /var/run/docker.sock:/var/run/docker.sock
- ${SHARED_FILESYSTEM}:/tmp
depends_on:
authentication-service:
dbrepo-authentication-service:
condition: service_healthy
search-service:
dbrepo-search-service:
condition: service_started
broker-service:
dbrepo-broker-service:
condition: service_started
logging:
driver: json-file
identifier-service:
dbrepo-identifier-service:
restart: on-failure
container_name: dbrepo-identifier-service
hostname: identifier-service
......@@ -188,16 +188,16 @@ services:
env_file:
- .env
depends_on:
query-service:
dbrepo-query-service:
condition: service_healthy
authentication-service:
dbrepo-authentication-service:
condition: service_healthy
volumes:
- ${SHARED_FILESYSTEM}:/tmp
logging:
driver: json-file
metadata-service:
dbrepo-metadata-service:
restart: on-failure
container_name: dbrepo-metadata-service
hostname: metadata-service
......@@ -207,12 +207,12 @@ services:
env_file:
- .env
depends_on:
metadata-db:
dbrepo-metadata-db:
condition: service_started
logging:
driver: json-file
analyse-service:
dbrepo-analyse-service:
restart: on-failure
container_name: dbrepo-analyse-service
hostname: analyse-service
......@@ -226,12 +226,12 @@ services:
- ${SHARED_FILESYSTEM}:/tmp
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
discovery-service:
dbrepo-discovery-service:
condition: service_healthy
logging:
driver: json-file
semantics-service:
dbrepo-semantics-service:
restart: on-failure
container_name: dbrepo-semantics-service
hostname: semantics-service
......@@ -244,14 +244,14 @@ services:
- ${SHARED_FILESYSTEM}:/tmp
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
discovery-service:
dbrepo-discovery-service:
condition: service_healthy
metadata-db:
dbrepo-metadata-db:
condition: service_healthy
logging:
driver: json-file
broker-service:
dbrepo-broker-service:
restart: on-failure
container_name: dbrepo-broker-service
hostname: broker-service
......@@ -263,14 +263,14 @@ services:
env_file:
- .env
depends_on:
discovery-service:
dbrepo-discovery-service:
condition: service_healthy
volumes:
- broker-service-data:/var/lib/rabbitmq/
logging:
driver: json-file
search-service:
dbrepo-search-service:
restart: always
container_name: dbrepo-search-service
hostname: search-service
......@@ -278,7 +278,7 @@ services:
networks:
core:
depends_on:
discovery-service:
dbrepo-discovery-service:
condition: service_healthy
env_file:
- .env
......@@ -287,7 +287,7 @@ services:
logging:
driver: json-file
ui:
dbrepo-ui:
restart: on-failure
container_name: dbrepo-ui
hostname: ui
......@@ -302,9 +302,9 @@ services:
volumes:
- ${SHARED_FILESYSTEM}:/tmp
depends_on:
identifier-service:
dbrepo-identifier-service:
condition: service_healthy
database-service:
dbrepo-database-service:
condition: service_healthy
logging:
driver: json-file
......@@ -10,27 +10,26 @@ volumes:
broker-service-data:
networks:
public:
name: public
userdb:
name: userdb
driver: bridge
ipam:
config:
- subnet: 172.29.0.0/16
userdb:
name: userdb
- subnet: 172.30.0.0/16
public:
name: public
driver: bridge
ipam:
config:
- subnet: 172.28.0.0/16
- subnet: 172.31.0.0/16
core:
name: core
driver: bridge
ipam:
config:
- subnet: 172.27.0.0/16
- subnet: 172.32.0.0/16
services:
fda-metadata-db:
restart: on-failure
container_name: dbrepo-metadata-db
......
......@@ -330,8 +330,8 @@ public abstract class BaseUnitTest {
.build();
public final static Long CONTAINER_BROKER_ID = 5L;
public final static String CONTAINER_BROKER_NAME = "broker-service";
public final static String CONTAINER_BROKER_INTERNAL_NAME = "broker-service";
public final static String CONTAINER_BROKER_NAME = "dbrepo-broker-service";
public final static String CONTAINER_BROKER_INTERNAL_NAME = "dbrepo-broker-service";
public final static String CONTAINER_BROKER_IP = "172.31.0.2";
public final static String CONTAINER_BROKER_HASH = "deadbeef";
public final static Instant CONTAINER_BROKER_CREATED = Instant.now().minus(1, HOURS);
......
......@@ -22,8 +22,8 @@ logging.level.root=error
logging.level.at.tuwien.=info
# rabbitmq
fda.gateway.endpoint=http://broker-service:15672
spring.rabbitmq.host=broker-service
fda.gateway.endpoint=http://dbrepo-broker-service:15672
spring.rabbitmq.host=dbrepo-broker-service
# elastic
fda.elastic.endpoint=search-mock-service:9200
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please to comment