diff --git a/.docs/system-services-authentication.md b/.docs/system-services-authentication.md
index 64bc61120d1ca7c6e79cdcf8a85ad6d7fb7de390..a1d25539fa3bcc8732a5e4c49517c27d27e1404b 100644
--- a/.docs/system-services-authentication.md
+++ b/.docs/system-services-authentication.md
@@ -91,6 +91,7 @@ public ResponseEntity<DatabaseBriefDto> create(@NotNull Long containerId,
 | `find-tables`                   | Can list a specific table in a database              |
 | `list-tables`                   | Can list all tables                                  |
 | `modify-table-column-semantics` | Can modify the column semantics of a specific column |
+| `delete-table`                  | Can delete tables owned by the user in a database    |
 
 ### Default Query Handling
 
@@ -168,9 +169,9 @@ public ResponseEntity<DatabaseBriefDto> create(@NotNull Long containerId,
 
 ### Escalated Table Handling
 
-| Name           | Description                          |
-|----------------|--------------------------------------|
-| `delete-table` | Can delete any table in any database |
+| Name                   | Description                          |
+|------------------------|--------------------------------------|
+| `delete-foreign-table` | Can delete any table in any database |
 
 ### Escalated Query Handling
 
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index f0feb7e23396e18bc323014992004a7b03bfbad1..13d9a94dd11fad3a7a5229f80494eeb072608d1e 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -439,8 +439,8 @@ release-latest:
   script:
     - "ifconfig eth0 mtu 1450 up"
     - "apk add make"
-    - echo "$CI_REGISTRY_PASSWORD" | docker login --username "$CI_REGISTRY_USER" --password-stdin docker.io
-    - echo "$AZURE_PASSWORD" | docker login --username "$AZURE_USERNAME" --password-stdin dbrepo.azurecr.io
+    - echo "$CI_REGISTRY_PASSWORD" | docker login --username "$CI_REGISTRY_USER" --password-stdin $CI_REGISTRY_URL
+    - echo "$CI_REGISTRY2_PASSWORD" | docker login --username "$CI_REGISTRY2_USER" --password-stdin $CI_REGISTRY2_URL
     - TAG=latest make release
 
 release-version:
@@ -458,8 +458,8 @@ release-version:
   script:
     - "ifconfig eth0 mtu 1450 up"
     - "apk add make"
-    - echo "$CI_REGISTRY_PASSWORD" | docker login --username "$CI_REGISTRY_USER" --password-stdin docker.io
-    - echo "$AZURE_PASSWORD" | docker login --username "$AZURE_USERNAME" --password-stdin dbrepo.azurecr.io
+    - echo "$CI_REGISTRY_PASSWORD" | docker login --username "$CI_REGISTRY_USER" --password-stdin $CI_REGISTRY_URL
+    - echo "$CI_REGISTRY2_PASSWORD" | docker login --username "$CI_REGISTRY2_USER" --password-stdin $CI_REGISTRY2_URL
     - "TAG=1.3 make release"
 
 build-api-latest:
diff --git a/Makefile b/Makefile
index 5bc096951bef0feccda2bcad8a95d7f120769393..0a9d7b3c934935a7ab23e2679b48f8c2806816c5 100644
--- a/Makefile
+++ b/Makefile
@@ -2,7 +2,8 @@
 
 TAG ?= latest
 TRIVY_VERSION ?= v0.41.0
-AZURE_REPO ?= dbrepo.azurecr.io
+REPOSITORY_URL ?= docker.io/dbrepo
+REPOSITORY2_URL ?= s210.dl.hpc.tuwien.ac.at/dbrepo
 
 all: build
 
@@ -46,102 +47,102 @@ build-clients:
 tag: tag-analyse-service tag-authentication-service tag-metadata-db tag-ui tag-metadata-service tag-data-service tag-mirror-service tag-log-service tag-search-db tag-search-db-init tag-search-service tag-data-db-sidecar
 
 tag-analyse-service:
-	docker tag dbrepo-analyse-service:latest "dbrepo/analyse-service:${TAG}"
-	docker tag dbrepo-analyse-service:latest "${AZURE_REPO}/dbrepo/analyse-service:${TAG}"
+	docker tag dbrepo-analyse-service:latest "${REPOSITORY_URL}/analyse-service:${TAG}"
+	docker tag dbrepo-analyse-service:latest "${REPOSITORY2_URL}/analyse-service:${TAG}"
 
 tag-authentication-service:
-	docker tag dbrepo-authentication-service:latest "dbrepo/authentication-service:${TAG}"
-	docker tag dbrepo-authentication-service:latest "${AZURE_REPO}/dbrepo/authentication-service:${TAG}"
+	docker tag dbrepo-authentication-service:latest "${REPOSITORY_URL}/authentication-service:${TAG}"
+	docker tag dbrepo-authentication-service:latest "${REPOSITORY2_URL}/authentication-service:${TAG}"
 
 tag-metadata-db:
-	docker tag dbrepo-metadata-db:latest "dbrepo/metadata-db:${TAG}"
-	docker tag dbrepo-metadata-db:latest "${AZURE_REPO}/dbrepo/metadata-db:${TAG}"
+	docker tag dbrepo-metadata-db:latest "${REPOSITORY_URL}/metadata-db:${TAG}"
+	docker tag dbrepo-metadata-db:latest "${REPOSITORY2_URL}/metadata-db:${TAG}"
 
 tag-ui:
-	docker tag dbrepo-ui:latest "dbrepo/ui:${TAG}"
-	docker tag dbrepo-ui:latest "${AZURE_REPO}/dbrepo/ui:${TAG}"
+	docker tag dbrepo-ui:latest "${REPOSITORY_URL}/ui:${TAG}"
+	docker tag dbrepo-ui:latest "${REPOSITORY2_URL}/ui:${TAG}"
 
 tag-data-service:
-	docker tag dbrepo-data-service:latest "dbrepo/data-service:${TAG}"
-	docker tag dbrepo-data-service:latest "${AZURE_REPO}/dbrepo/data-service:${TAG}"
+	docker tag dbrepo-data-service:latest "${REPOSITORY_URL}/data-service:${TAG}"
+	docker tag dbrepo-data-service:latest "${REPOSITORY2_URL}/data-service:${TAG}"
 
 tag-mirror-service:
-	docker tag dbrepo-mirror-service:latest "dbrepo/mirror-service:${TAG}"
-	docker tag dbrepo-mirror-service:latest "${AZURE_REPO}/dbrepo/mirror-service:${TAG}"
+	docker tag dbrepo-mirror-service:latest "${REPOSITORY_URL}/mirror-service:${TAG}"
+	docker tag dbrepo-mirror-service:latest "${REPOSITORY2_URL}/mirror-service:${TAG}"
 
 tag-metadata-service:
-	docker tag dbrepo-metadata-service:latest "dbrepo/metadata-service:${TAG}"
-	docker tag dbrepo-metadata-service:latest "${AZURE_REPO}/dbrepo/metadata-service:${TAG}"
+	docker tag dbrepo-metadata-service:latest "${REPOSITORY_URL}/metadata-service:${TAG}"
+	docker tag dbrepo-metadata-service:latest "${REPOSITORY2_URL}/metadata-service:${TAG}"
 
 tag-search-db:
-	docker tag dbrepo-search-db:latest "dbrepo/search-db:${TAG}"
-	docker tag dbrepo-search-db:latest "${AZURE_REPO}/dbrepo/search-db:${TAG}"
+	docker tag dbrepo-search-db:latest "${REPOSITORY_URL}/search-db:${TAG}"
+	docker tag dbrepo-search-db:latest "${REPOSITORY2_URL}/search-db:${TAG}"
 
 tag-data-db-sidecar:
-	docker tag dbrepo-data-db-sidecar:latest "dbrepo/data-db-sidecar:${TAG}"
-	docker tag dbrepo-data-db-sidecar:latest "${AZURE_REPO}/dbrepo/data-db-sidecar:${TAG}"
+	docker tag dbrepo-data-db-sidecar:latest "${REPOSITORY_URL}/data-db-sidecar:${TAG}"
+	docker tag dbrepo-data-db-sidecar:latest "${REPOSITORY2_URL}/data-db-sidecar:${TAG}"
 
 tag-search-db-init:
-	docker tag dbrepo-search-db-init:latest "dbrepo/search-db-init:${TAG}"
-	docker tag dbrepo-search-db-init:latest "${AZURE_REPO}/dbrepo/search-db-init:${TAG}"
+	docker tag dbrepo-search-db-init:latest "${REPOSITORY_URL}/search-db-init:${TAG}"
+	docker tag dbrepo-search-db-init:latest "${REPOSITORY2_URL}/search-db-init:${TAG}"
 
 tag-log-service:
-	docker tag dbrepo-log-service:latest "dbrepo/log-service:${TAG}"
-	docker tag dbrepo-log-service:latest "${AZURE_REPO}/dbrepo/log-service:${TAG}"
+	docker tag dbrepo-log-service:latest "${REPOSITORY_URL}/log-service:${TAG}"
+	docker tag dbrepo-log-service:latest "${REPOSITORY2_URL}/log-service:${TAG}"
 
 tag-search-service:
-	docker tag dbrepo-search-service:latest "dbrepo/search-service:${TAG}"
-	docker tag dbrepo-search-service:latest "${AZURE_REPO}/dbrepo/search-service:${TAG}"
+	docker tag dbrepo-search-service:latest "${REPOSITORY_URL}/search-service:${TAG}"
+	docker tag dbrepo-search-service:latest "${REPOSITORY2_URL}/search-service:${TAG}"
 
 release: build-docker tag release-analyse-service release-authentication-service release-metadata-db release-ui release-metadata-service release-data-service release-log-service release-search-db release-mirror-service release-search-db-init release-search-service release-data-db-sidecar
 
 release-analyse-service: tag-analyse-service
-	docker push "dbrepo/analyse-service:${TAG}"
-	docker push "${AZURE_REPO}/dbrepo/analyse-service:${TAG}"
+	docker push "${REPOSITORY_URL}/analyse-service:${TAG}"
+	docker push "${REPOSITORY2_URL}/analyse-service:${TAG}"
 
 release-authentication-service: tag-authentication-service
-	docker push "dbrepo/authentication-service:${TAG}"
-	docker push "${AZURE_REPO}/dbrepo/authentication-service:${TAG}"
+	docker push "${REPOSITORY_URL}/authentication-service:${TAG}"
+	docker push "${REPOSITORY2_URL}/authentication-service:${TAG}"
 
 release-metadata-db: tag-metadata-db
-	docker push "dbrepo/metadata-db:${TAG}"
-	docker push "${AZURE_REPO}/dbrepo/metadata-db:${TAG}"
+	docker push "${REPOSITORY_URL}/metadata-db:${TAG}"
+	docker push "${REPOSITORY2_URL}/metadata-db:${TAG}"
 
 release-ui: tag-ui
-	docker push "dbrepo/ui:${TAG}"
-	docker push "${AZURE_REPO}/dbrepo/ui:${TAG}"
+	docker push "${REPOSITORY_URL}/ui:${TAG}"
+	docker push "${REPOSITORY2_URL}/ui:${TAG}"
 
 release-data-service: tag-data-service
-	docker push "dbrepo/data-service:${TAG}"
-	docker push "${AZURE_REPO}/dbrepo/data-service:${TAG}"
+	docker push "${REPOSITORY_URL}/data-service:${TAG}"
+	docker push "${REPOSITORY2_URL}/data-service:${TAG}"
 
 release-mirror-service: tag-mirror-service
-	docker push "dbrepo/mirror-service:${TAG}"
-	docker push "${AZURE_REPO}/dbrepo/mirror-service:${TAG}"
+	docker push "${REPOSITORY_URL}/mirror-service:${TAG}"
+	docker push "${REPOSITORY2_URL}/mirror-service:${TAG}"
 
 release-search-db: tag-search-db
-	docker push "dbrepo/search-db:${TAG}"
-	docker push "${AZURE_REPO}/dbrepo/search-db:${TAG}"
+	docker push "${REPOSITORY_URL}/search-db:${TAG}"
+	docker push "${REPOSITORY2_URL}/search-db:${TAG}"
 
 release-search-db-init: tag-search-db-init
-	docker push "dbrepo/search-db-init:${TAG}"
-	docker push "${AZURE_REPO}/dbrepo/search-db-init:${TAG}"
+	docker push "${REPOSITORY_URL}/search-db-init:${TAG}"
+	docker push "${REPOSITORY2_URL}/search-db-init:${TAG}"
 
 release-data-db-sidecar: tag-data-db-sidecar
-	docker push "dbrepo/data-db-sidecar:${TAG}"
-	docker push "${AZURE_REPO}/dbrepo/data-db-sidecar:${TAG}"
+	docker push "${REPOSITORY_URL}/data-db-sidecar:${TAG}"
+	docker push "${REPOSITORY2_URL}/data-db-sidecar:${TAG}"
 
 release-metadata-service: tag-metadata-service
-	docker push "dbrepo/metadata-service:${TAG}"
-	docker push "${AZURE_REPO}/dbrepo/metadata-service:${TAG}"
+	docker push "${REPOSITORY_URL}/metadata-service:${TAG}"
+	docker push "${REPOSITORY2_URL}/metadata-service:${TAG}"
 
 release-log-service: tag-log-service
-	docker push "dbrepo/log-service:${TAG}"
-	docker push "${AZURE_REPO}/dbrepo/log-service:${TAG}"
+	docker push "${REPOSITORY_URL}/log-service:${TAG}"
+	docker push "${REPOSITORY2_URL}/log-service:${TAG}"
 
 release-search-service: tag-search-service
-	docker push "dbrepo/search-service:${TAG}"
-	docker push "${AZURE_REPO}/dbrepo/search-service:${TAG}"
+	docker push "${REPOSITORY_URL}/search-service:${TAG}"
+	docker push "${REPOSITORY2_URL}/search-service:${TAG}"
 
 test-backend: test-metadata-service test-analyse-service test-data-service test-mirror-service
 
diff --git a/dbrepo-analyse-service/Dockerfile b/dbrepo-analyse-service/Dockerfile
index 0b5718ea3faa29aafcc840f3964824f4a3d081f3..52912bdcebd58f7d51e7f8f8a59f1195a42e76ff 100644
--- a/dbrepo-analyse-service/Dockerfile
+++ b/dbrepo-analyse-service/Dockerfile
@@ -13,6 +13,7 @@ ENV FLASK_RUN_HOST=0.0.0.0
 ENV PORT_APP=5000
 ENV FLASK_ENV=production
 ENV HOSTNAME=analyse-service
+ENV LOG_LEVEL=INFO
 ENV S3_STORAGE_ENDPOINT="http://storage-service:9000"
 ENV S3_ACCESS_KEY_ID="minioadmin"
 ENV S3_SECRET_ACCESS_KEY="minioadmin"
diff --git a/dbrepo-analyse-service/app.py b/dbrepo-analyse-service/app.py
index d3b9dc36169030dbf837c0a458c8657707bcb830..e917b10cfe0d292d7a98240406cddaec05581b6b 100644
--- a/dbrepo-analyse-service/app.py
+++ b/dbrepo-analyse-service/app.py
@@ -1,3 +1,4 @@
+import os
 from _csv import Error
 
 from flask import Flask, request, Response
@@ -11,7 +12,9 @@ from flasgger import LazyJSONEncoder
 from gevent.pywsgi import WSGIServer
 from prometheus_flask_exporter import PrometheusMetrics
 
-logging.basicConfig(level=logging.DEBUG)
+log_level = os.getenv('LOG_LEVEL', 'INFO')
+
+logging.basicConfig(level=logging.getLevelName(log_level))
 
 from logging.config import dictConfig
 
@@ -26,7 +29,7 @@ dictConfig({
         'formatter': 'default'
     }},
     'root': {
-        'level': 'INFO',
+        'level': log_level,
         'handlers': ['wsgi']
     }
 })
diff --git a/dbrepo-authentication-service/dbrepo-realm.json b/dbrepo-authentication-service/dbrepo-realm.json
index 88b4fbec0040e351caef5e6dc211c07cbc60af1a..235c16a720fdfa47ffdf2233460c4a7f4b494314 100644
--- a/dbrepo-authentication-service/dbrepo-realm.json
+++ b/dbrepo-authentication-service/dbrepo-realm.json
@@ -126,7 +126,7 @@
       "description" : "${default-table-handling}",
       "composite" : true,
       "composites" : {
-        "realm" : [ "modify-table-column-semantics", "list-tables", "find-table", "create-table" ]
+        "realm" : [ "modify-table-column-semantics", "list-tables", "find-table", "create-table", "delete-table" ]
       },
       "clientRole" : false,
       "containerId" : "82c39861-d877-4667-a0f3-4daa2ee230e0",
@@ -577,6 +577,14 @@
       "clientRole" : false,
       "containerId" : "82c39861-d877-4667-a0f3-4daa2ee230e0",
       "attributes" : { }
+    }, {
+      "id" : "6a0bb740-4448-49be-aee8-6dd183325be5",
+      "name" : "delete-foreign-table",
+      "description" : "${delete-foreign-table}",
+      "composite" : false,
+      "clientRole" : false,
+      "containerId" : "82c39861-d877-4667-a0f3-4daa2ee230e0",
+      "attributes" : { }
     }, {
       "id" : "7f3652c7-3073-4566-ab63-25385495ebc3",
       "name" : "modify-database-visibility",
@@ -714,7 +722,7 @@
       "description" : "${escalated-table-handling}",
       "composite" : true,
       "composites" : {
-        "realm" : [ "delete-table" ]
+        "realm" : [ "delete-foreign-table" ]
       },
       "clientRole" : false,
       "containerId" : "82c39861-d877-4667-a0f3-4daa2ee230e0",
@@ -1054,7 +1062,7 @@
   "otpPolicyLookAheadWindow" : 1,
   "otpPolicyPeriod" : 30,
   "otpPolicyCodeReusable" : false,
-  "otpSupportedApplications" : [ "totpAppFreeOTPName", "totpAppMicrosoftAuthenticatorName", "totpAppGoogleName" ],
+  "otpSupportedApplications" : [ "totpAppGoogleName", "totpAppFreeOTPName", "totpAppMicrosoftAuthenticatorName" ],
   "webAuthnPolicyRpEntityName" : "keycloak",
   "webAuthnPolicySignatureAlgorithms" : [ "ES256" ],
   "webAuthnPolicyRpId" : "",
@@ -2027,7 +2035,7 @@
       "subType" : "anonymous",
       "subComponents" : { },
       "config" : {
-        "allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-attribute-mapper", "saml-role-list-mapper", "saml-user-property-mapper", "oidc-address-mapper", "oidc-usermodel-property-mapper", "oidc-full-name-mapper" ]
+        "allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-address-mapper", "oidc-usermodel-property-mapper", "saml-role-list-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-full-name-mapper" ]
       }
     }, {
       "id" : "1849e52a-b8c9-44a8-af3d-ee19376a1ed1",
@@ -2053,7 +2061,7 @@
       "subType" : "authenticated",
       "subComponents" : { },
       "config" : {
-        "allowed-protocol-mapper-types" : [ "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-address-mapper", "saml-user-property-mapper", "saml-role-list-mapper", "oidc-usermodel-property-mapper", "oidc-full-name-mapper" ]
+        "allowed-protocol-mapper-types" : [ "oidc-address-mapper", "oidc-usermodel-property-mapper", "oidc-usermodel-attribute-mapper", "saml-role-list-mapper", "saml-user-property-mapper", "oidc-full-name-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-attribute-mapper" ]
       }
     } ],
     "org.keycloak.userprofile.UserProfileProvider" : [ {
@@ -2111,7 +2119,7 @@
   "internationalizationEnabled" : false,
   "supportedLocales" : [ ],
   "authenticationFlows" : [ {
-    "id" : "136de2cd-39b0-451f-9b5b-0596a6e703ba",
+    "id" : "88c24c27-94a1-4473-b545-bda821e22216",
     "alias" : "Account verification options",
     "description" : "Method with which to verity the existing account",
     "providerId" : "basic-flow",
@@ -2133,7 +2141,7 @@
       "userSetupAllowed" : false
     } ]
   }, {
-    "id" : "6d041452-0e17-4c2f-8dc1-077ae4bc0d15",
+    "id" : "796e78cb-67d5-4411-a163-bcc93afde7d3",
     "alias" : "Authentication Options",
     "description" : "Authentication options.",
     "providerId" : "basic-flow",
@@ -2162,7 +2170,7 @@
       "userSetupAllowed" : false
     } ]
   }, {
-    "id" : "a124f3c8-73e9-4ba9-bebc-8282aa0fad62",
+    "id" : "42e0b37c-1cd9-4472-8a3c-aff3f5903283",
     "alias" : "Browser - Conditional OTP",
     "description" : "Flow to determine if the OTP is required for the authentication",
     "providerId" : "basic-flow",
@@ -2184,7 +2192,7 @@
       "userSetupAllowed" : false
     } ]
   }, {
-    "id" : "b989868b-effd-494b-90ce-72311208cd07",
+    "id" : "6493953f-c9d1-44e6-8adb-0355eaecb65b",
     "alias" : "Direct Grant - Conditional OTP",
     "description" : "Flow to determine if the OTP is required for the authentication",
     "providerId" : "basic-flow",
@@ -2206,7 +2214,7 @@
       "userSetupAllowed" : false
     } ]
   }, {
-    "id" : "3a1986ff-77b5-43ee-8ea5-e83ce4c0b052",
+    "id" : "c247859c-1171-4ac1-b93f-e4f17a758226",
     "alias" : "First broker login - Conditional OTP",
     "description" : "Flow to determine if the OTP is required for the authentication",
     "providerId" : "basic-flow",
@@ -2228,7 +2236,7 @@
       "userSetupAllowed" : false
     } ]
   }, {
-    "id" : "8ef32e4a-384c-4a0f-9ef5-6b6c6dd67e23",
+    "id" : "5a61e41a-80fb-4848-a457-90cc1c93b625",
     "alias" : "Handle Existing Account",
     "description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider",
     "providerId" : "basic-flow",
@@ -2250,7 +2258,7 @@
       "userSetupAllowed" : false
     } ]
   }, {
-    "id" : "b800468d-d6f4-4e01-a143-f3b9a24767dd",
+    "id" : "bf35008a-4f40-44c0-bc26-994cf2b8fc5f",
     "alias" : "Reset - Conditional OTP",
     "description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.",
     "providerId" : "basic-flow",
@@ -2272,7 +2280,7 @@
       "userSetupAllowed" : false
     } ]
   }, {
-    "id" : "1a25e553-ebc5-407a-b432-ffdeea36907d",
+    "id" : "a001a5ff-780d-4d61-a60d-f0cce53b2726",
     "alias" : "User creation or linking",
     "description" : "Flow for the existing/non-existing user alternatives",
     "providerId" : "basic-flow",
@@ -2295,7 +2303,7 @@
       "userSetupAllowed" : false
     } ]
   }, {
-    "id" : "b799d43f-e8f4-4d6a-93e9-39f73e42e22a",
+    "id" : "49f8350a-151f-409d-8a8d-642cfffa2a96",
     "alias" : "Verify Existing Account by Re-authentication",
     "description" : "Reauthentication of existing account",
     "providerId" : "basic-flow",
@@ -2317,7 +2325,7 @@
       "userSetupAllowed" : false
     } ]
   }, {
-    "id" : "6f1b9b5f-3d09-4b2e-ba42-237ca174f393",
+    "id" : "ae555215-51f3-426d-ada9-36963bff44ef",
     "alias" : "browser",
     "description" : "browser based authentication",
     "providerId" : "basic-flow",
@@ -2353,7 +2361,7 @@
       "userSetupAllowed" : false
     } ]
   }, {
-    "id" : "2eddb88f-3c4c-4d2f-99ba-f2cba595b94a",
+    "id" : "74aae05a-433a-468e-959e-a8e23fcc89c0",
     "alias" : "clients",
     "description" : "Base authentication for clients",
     "providerId" : "client-flow",
@@ -2389,7 +2397,7 @@
       "userSetupAllowed" : false
     } ]
   }, {
-    "id" : "509dd5c3-ef75-45be-8cad-6328eef3c94e",
+    "id" : "f2aaa4a5-ee8b-42c4-b633-2e7d63bb1fc2",
     "alias" : "direct grant",
     "description" : "OpenID Connect Resource Owner Grant",
     "providerId" : "basic-flow",
@@ -2418,7 +2426,7 @@
       "userSetupAllowed" : false
     } ]
   }, {
-    "id" : "995b881c-71d0-4ac1-be36-ce7cf7a9779d",
+    "id" : "677394eb-81d5-401f-a288-43935b39989a",
     "alias" : "docker auth",
     "description" : "Used by Docker clients to authenticate against the IDP",
     "providerId" : "basic-flow",
@@ -2433,7 +2441,7 @@
       "userSetupAllowed" : false
     } ]
   }, {
-    "id" : "2c18ddd1-e304-4e7e-98f5-25f8abba945d",
+    "id" : "8d2809c9-9b3e-4c96-9768-5c89e752676a",
     "alias" : "first broker login",
     "description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account",
     "providerId" : "basic-flow",
@@ -2456,7 +2464,7 @@
       "userSetupAllowed" : false
     } ]
   }, {
-    "id" : "7abb16cf-310d-465f-93ef-34349ffc42fb",
+    "id" : "7024c35c-81a5-4f73-ad1e-366f4a488175",
     "alias" : "forms",
     "description" : "Username, password, otp and other auth forms.",
     "providerId" : "basic-flow",
@@ -2478,7 +2486,7 @@
       "userSetupAllowed" : false
     } ]
   }, {
-    "id" : "c01ab73b-0bc7-4047-98a3-502f98e6cc74",
+    "id" : "a6764496-699f-42d8-aa04-1e996c5f014e",
     "alias" : "http challenge",
     "description" : "An authentication flow based on challenge-response HTTP Authentication Schemes",
     "providerId" : "basic-flow",
@@ -2500,7 +2508,7 @@
       "userSetupAllowed" : false
     } ]
   }, {
-    "id" : "9270fad3-c2da-4d20-9d8d-c55c3bace9fc",
+    "id" : "fe150e10-946a-441f-aef5-417e171ca49f",
     "alias" : "registration",
     "description" : "registration flow",
     "providerId" : "basic-flow",
@@ -2516,7 +2524,7 @@
       "userSetupAllowed" : false
     } ]
   }, {
-    "id" : "1c8a94b0-eb77-4201-aa7f-4411da843ba1",
+    "id" : "5de2ec52-872f-462c-b994-47f59a31b5ab",
     "alias" : "registration form",
     "description" : "registration form",
     "providerId" : "form-flow",
@@ -2552,7 +2560,7 @@
       "userSetupAllowed" : false
     } ]
   }, {
-    "id" : "28b08679-329a-4555-a0a0-7396e398e5bb",
+    "id" : "5833fbd7-31d4-4cf3-95db-6e6beb40a2a5",
     "alias" : "reset credentials",
     "description" : "Reset credentials for a user if they forgot their password or something",
     "providerId" : "basic-flow",
@@ -2588,7 +2596,7 @@
       "userSetupAllowed" : false
     } ]
   }, {
-    "id" : "3e3a0a50-0850-4f48-b315-dcd9340b1c2a",
+    "id" : "64902f62-0b27-4174-8d66-88910be9fcfd",
     "alias" : "saml ecp",
     "description" : "SAML ECP Profile Authentication Flow",
     "providerId" : "basic-flow",
@@ -2604,13 +2612,13 @@
     } ]
   } ],
   "authenticatorConfig" : [ {
-    "id" : "2e6a1507-6515-4d2a-8462-a2517df9d1da",
+    "id" : "e1ad59c1-8db5-4381-b7b9-cc97d5653236",
     "alias" : "create unique user config",
     "config" : {
       "require.password.update.after.registration" : "false"
     }
   }, {
-    "id" : "4b0c01c1-aa00-432c-a9c6-640881e71fb6",
+    "id" : "81d8abc7-c14e-4b41-9432-8ea38cd8a0e3",
     "alias" : "review profile config",
     "config" : {
       "update.profile.on.first.login" : "missing"
diff --git a/dbrepo-metadata-db/setup-schema.sql b/dbrepo-metadata-db/setup-schema.sql
index 972216ac0c6c2d28f654e3ea41febbbe7adf1c13..1f46bf29cfefc395882f69e9f9cc460f1b77aec1 100644
--- a/dbrepo-metadata-db/setup-schema.sql
+++ b/dbrepo-metadata-db/setup-schema.sql
@@ -167,7 +167,7 @@ CREATE TABLE IF NOT EXISTS `mdb_columns`
     std_dev          Numeric      NULL,
     created          timestamp    NOT NULL DEFAULT NOW(),
     last_modified    timestamp,
-    FOREIGN KEY (tID) REFERENCES mdb_tables (ID),
+    FOREIGN KEY (tID) REFERENCES mdb_tables (ID) ON DELETE CASCADE,
     PRIMARY KEY (ID)
 ) WITH SYSTEM VERSIONING;
 
diff --git a/dbrepo-metadata-db/setup-schema_local.sql b/dbrepo-metadata-db/setup-schema_local.sql
index 1c144e31e35cb22bd09702ae683050454b66337d..4b35338875b707d7a951461bd54314017e573d16 100644
--- a/dbrepo-metadata-db/setup-schema_local.sql
+++ b/dbrepo-metadata-db/setup-schema_local.sql
@@ -2,6 +2,6 @@ BEGIN;
 
 INSERT INTO `mdb_containers` (name, internal_name, image_id, host, port, sidecar_host, sidecar_port,
                               privileged_username, privileged_password)
-VALUES ('MariaDB 11.1.3', 'mariadb_11_1_3', 1, 'data-db', 3306, 'data-db-sidecar', 3305, 'root', 'dbrepo');
+VALUES ('MariaDB Galera 11.1.3', 'mariadb_11_1_3', 1, 'data-db', 3306, 'data-db-sidecar', 3305, 'root', 'dbrepo');
 
 COMMIT;
diff --git a/dbrepo-metadata-service/repositories/src/main/java/at/tuwien/mapper/TableMapper.java b/dbrepo-metadata-service/repositories/src/main/java/at/tuwien/mapper/TableMapper.java
index 18549b1cefd36e574b23f02d80bdab88f61ff1fc..c42c8f162598d6b24463c67cb7d9cbb6df323da9 100644
--- a/dbrepo-metadata-service/repositories/src/main/java/at/tuwien/mapper/TableMapper.java
+++ b/dbrepo-metadata-service/repositories/src/main/java/at/tuwien/mapper/TableMapper.java
@@ -307,26 +307,40 @@ public interface TableMapper {
 
     /**
      * Map the table to a drop table query
-     * TODO for e.g. postgres image
      *
-     * @param data The table
-     * @return The drop table query
+     * @param connection The connection
+     * @param data       The table that should be dropped.
      */
-    default PreparedStatement tableToDropTableRawQuery(Connection connection, Table data) throws ImageNotSupportedException, QueryMalformedException {
+    default void tableToDropTableRawQuery(Connection connection, Table data) throws ImageNotSupportedException, QueryMalformedException {
         if (!data.getDatabase().getContainer().getImage().getName().equals("mariadb")) {
             log.error("Currently only MariaDB is supported");
             throw new ImageNotSupportedException("Currently only MariaDB is supported");
         }
-        final StringBuilder statement = new StringBuilder("DROP TABLE `")
+        final StringBuilder sequence = new StringBuilder();
+        if (data.getColumns().stream().anyMatch(TableColumn::getAutoGenerated)) {
+            log.debug("table with id {} has sequence generated which needs to be dropped too", data.getId());
+            sequence.append("DROP SEQUENCE `")
+                    .append(tableToSequenceName(data))
+                    .append("`;");
+        }
+        final StringBuilder table = new StringBuilder("DROP TABLE `")
+                .append(data.getInternalName())
+                .append("`;");
+        final StringBuilder view = new StringBuilder("DROP VIEW `hs_")
                 .append(data.getInternalName())
                 .append("`;");
         try {
-            final PreparedStatement pstmt = connection.prepareStatement(statement.toString());
-            log.trace("prepared statement {}", statement);
-            return pstmt;
+            final Statement statement = connection.createStatement();
+            if (!sequence.isEmpty()) {
+                statement.execute(sequence.toString());
+            }
+            statement.execute(table.toString());
+            log.trace("mapped drop table statement {}", table);
+            statement.execute(view.toString());
+            log.trace("mapped drop view statement {}", table);
         } catch (SQLException e) {
-            log.error("Failed to prepare statement {}, reason: {}", statement, e.getMessage());
-            throw new QueryMalformedException("Failed to prepare statement", e);
+            log.error("Failed to drop table or sequence: {}", e.getMessage());
+            throw new QueryMalformedException("Failed to drop table or sequence", e);
         }
     }
 
@@ -439,7 +453,7 @@ public interface TableMapper {
         log.trace("create table query built with {} columns and system versioning", data.getColumns().size());
         try {
             final Statement statement = connection.createStatement();
-            if (sequence.length() > 0) {
+            if (!sequence.isEmpty()) {
                 log.trace("mapped create sequence statement: {}", sequence);
                 statement.execute(sequence.toString());
             }
@@ -454,7 +468,13 @@ public interface TableMapper {
 
     default String tableCreateDtoToSequenceName(TableCreateDto data) {
         final String name = "seq_" + nameToInternalName(data.getName()) + "_id";
-        log.trace("mapped name {} to internal name {}", data.getName(), name);
+        log.trace("mapped table name {} to sequence name {}", data.getName(), name);
+        return name;
+    }
+
+    default String tableToSequenceName(Table data) {
+        final String name = "seq_" + data.getInternalName() + "_id";
+        log.trace("mapped table to sequence name {}", name);
         return name;
     }
 
diff --git a/dbrepo-metadata-service/rest-service/src/main/java/at/tuwien/endpoints/TableEndpoint.java b/dbrepo-metadata-service/rest-service/src/main/java/at/tuwien/endpoints/TableEndpoint.java
index 10d0ae805ff6e6fbe1303571d469e0e5d35c2fb9..0b0b27c16f6be592debe9f1d5e2d3de77776e8d2 100644
--- a/dbrepo-metadata-service/rest-service/src/main/java/at/tuwien/endpoints/TableEndpoint.java
+++ b/dbrepo-metadata-service/rest-service/src/main/java/at/tuwien/endpoints/TableEndpoint.java
@@ -12,6 +12,7 @@ import at.tuwien.mapper.TableMapper;
 import at.tuwien.service.MessageQueueService;
 import at.tuwien.service.TableService;
 import at.tuwien.utils.PrincipalUtil;
+import at.tuwien.utils.UserUtil;
 import at.tuwien.validation.EndpointValidator;
 import io.micrometer.core.annotation.Timed;
 import io.micrometer.observation.annotation.Observed;
@@ -201,7 +202,7 @@ public class TableEndpoint {
 
     @DeleteMapping("/{tableId}")
     @Transactional
-    @PreAuthorize("hasAuthority('delete-table')")
+    @PreAuthorize("hasAuthority('delete-table') or hasAuthority('delete-foreign-table')")
     @Observed(name = "dbr_table_delete")
     @Operation(summary = "Delete a table", security = @SecurityRequirement(name = "bearerAuth"))
     @ApiResponses(value = {
@@ -245,8 +246,14 @@ public class TableEndpoint {
                                        @NotNull @PathVariable("tableId") Long tableId,
                                        @NotNull Principal principal)
             throws TableNotFoundException, DatabaseNotFoundException, ImageNotSupportedException,
-            DataProcessingException, ContainerNotFoundException, TableMalformedException, QueryMalformedException {
+            DataProcessingException, ContainerNotFoundException, TableMalformedException, QueryMalformedException,
+            NotAllowedException {
         log.debug("endpoint delete table, databaseId={}, tableId={}, {}", databaseId, tableId, PrincipalUtil.formatForDebug(principal));
+        final Table table = tableService.find(databaseId, tableId);
+        if (!table.getOwner().getUsername().equals(principal.getName()) && !UserUtil.hasRole(principal, "delete-foreign-table")) {
+            log.error("Failed to delete table: not owned by you");
+            throw new NotAllowedException("Failed to delete table: not owned by you");
+        }
         tableService.deleteTable(databaseId, tableId);
         return ResponseEntity.accepted()
                 .build();
diff --git a/dbrepo-metadata-service/services/src/main/java/at/tuwien/service/impl/TableServiceImpl.java b/dbrepo-metadata-service/services/src/main/java/at/tuwien/service/impl/TableServiceImpl.java
index 4cf3bf7459721e47a0d4c852a19bf8f772f5b96e..ae0a2643a224894e5df24aace286937c1f9a4c8e 100644
--- a/dbrepo-metadata-service/services/src/main/java/at/tuwien/service/impl/TableServiceImpl.java
+++ b/dbrepo-metadata-service/services/src/main/java/at/tuwien/service/impl/TableServiceImpl.java
@@ -128,11 +128,10 @@ public class TableServiceImpl extends HibernateConnector implements TableService
         final ComboPooledDataSource dataSource = getPrivilegedDataSource(database.getContainer().getImage(), database.getContainer(), database);
         try {
             final Connection connection = dataSource.getConnection();
-            final PreparedStatement preparedStatement = tableMapper.tableToDropTableRawQuery(connection, table);
-            preparedStatement.executeUpdate();
+            tableMapper.tableToDropTableRawQuery(connection, table);
         } catch (SQLException e) {
-            log.error("Failed to delete table {}, reason: {}", table, e.getMessage());
-            throw new TableMalformedException("Failed to delete table", e);
+            log.error("Failed to drop table: {}", e.getMessage());
+            throw new TableMalformedException("Failed to drop table", e);
         } finally {
             dataSource.close();
         }
diff --git a/dbrepo-search-service/Dockerfile b/dbrepo-search-service/Dockerfile
index 4371d22bd0464d7a680935abd0638e5e2dfb1c2d..1faeae58d0b12caecb23d92a0ae160cf52080157 100644
--- a/dbrepo-search-service/Dockerfile
+++ b/dbrepo-search-service/Dockerfile
@@ -17,6 +17,12 @@ COPY ./us-yml ./us-yml
 COPY config.py wsgi.py ./
 
 ENV FLASK_APP=wsgi.py
+ENV COLLECTION="['database','table','column','identifier','unit','concept','user','view']"
+ENV OPENSEARCH_HOST=localhost
+ENV OPENSEARCH_PORT=9200
+ENV OPENSEARCH_USERNAME=admin
+ENV OPENSEARCH_PASSWORD=admin
+ENV LOG_LEVEL=INFO
 
 RUN chown -R alpine:alpine ./
 USER alpine
diff --git a/dbrepo-search-service/app/__init__.py b/dbrepo-search-service/app/__init__.py
index 014b475338d3348e16121bcc75b487129cef5df7..f7554e25da9ab5b36a5157b611980adadbee41fe 100644
--- a/dbrepo-search-service/app/__init__.py
+++ b/dbrepo-search-service/app/__init__.py
@@ -8,7 +8,9 @@ from opensearchpy import OpenSearch
 from config import Config
 from prometheus_flask_exporter import PrometheusMetrics
 
-logging.basicConfig(level=logging.DEBUG)
+log_level = os.getenv('LOG_LEVEL', 'INFO')
+
+logging.basicConfig(level=logging.getLevelName(log_level))
 
 from logging.config import dictConfig
 
@@ -31,7 +33,7 @@ def create_app(config_class=Config):
             'formatter': 'simple'  # default
         }},
         'root': {
-            'level': 'DEBUG',
+            'level': log_level,
             'handlers': ['wsgi']
         }
     })
diff --git a/dbrepo-search-service/app/api/routes.py b/dbrepo-search-service/app/api/routes.py
index f666637037a2cf5289387de1801194d733d7bffd..8850f57939262043eb8bad141ac78ea6a9f330fe 100644
--- a/dbrepo-search-service/app/api/routes.py
+++ b/dbrepo-search-service/app/api/routes.py
@@ -2,7 +2,8 @@
 """
 This file defines the endpoints for the dbrepo-search-service.
 """
-import logging
+import os
+from ast import literal_eval
 
 from flask import request
 
@@ -11,16 +12,11 @@ from app.api import api_bp
 from flasgger.utils import swag_from
 from app.opensearch_client import *
 import math
-from opensearchpy import OpenSearch
 
-host = "localhost"
-port = 9200
-auth = ("admin", "admin")
-client = OpenSearch(
-    hosts=[{"host": host, "port": port}],
-    http_compress=True,  # enables gzip compression for request bodies
-    http_auth=auth,
-)
+available_indices = literal_eval(
+    os.getenv("COLLECTION", "['database','table','column','identifier','unit','concept','user','view']"))
+
+logging.info(f"Available collection loaded as: {available_indices}")
 
 
 def general_filter(index, results):
@@ -70,21 +66,10 @@ def get_index(index):
     :param index: desired index
     :return: list of the results
     """
-    logging.info('Searching for index: %s', index)
-    available_indices = [
-        "table",
-        "user",
-        "database",
-        "column",
-        "identifier",
-        "concept",
-        "unit",
-        "view",
-    ]
+    logging.info(f'Searching for index: {index}')
     if index not in available_indices:
         return {
             "results": {},
-            "status": 404,
         }, 404  # ToDo: replace with better error handling
     results = query_index_by_term_opensearch(index, "*", "contains")
     results = general_filter(index, results)
@@ -104,54 +89,54 @@ def get_fields(index):
     :param index:
     :return:
     """
-    logging.info('Getting fields for index: %s', index)
-    available_indices = [
-        "table",
-        "user",
-        "database",
-        "column",
-        "identifier",
-        "concept",
-        "unit",
-        "view",
-    ]
+    logging.info(f'Searching for index: {index}')
     if index not in available_indices:
         return {
             "results": {},
-            "status": 404,
-        }, 404  # ToDo: replace with better error handling
-    fields = []
+        }, 404
     fields = get_fields_for_index(index)
-    logging.debug('get fields for index %s resulted in fields: %s', index, fields)
+    logging.debug(f'get fields for index {index} resulted in {len(fields)} field(s)')
     return {"fields": fields, "status": 200}
 
 
 @api_bp.route("/api/search", methods=["POST"], endpoint="search_fuzzy_search")
-def search():
+def post_fuzzy_search():
     """
-    Main endpoint for general searching.
-
-    There are three ways of  searching:
-    *  if you specify 'search_term' in the request json, all entries that have relevant fields matching the 'search_term' are returned.
-         No wildcards are allowed, although fuzzy search is enabled (meaning, there are also matches when 1 or two characters differ)
-    * if you specify 't1' and/or 't2' entries that are newer than timestamp 't1' and entries that are younger than timestamp 't2' are returned.
-        the timestamp has to have the format YYYY-MM-DD
-    * if 'field' and 'value' are specified, only entries where the 'field' matches the 'value' are returned.
-        For example, if  the 'field' is 'creator.orcid' and the 'value' is '0000-0002-6778-0887',
-        only entries created by the person with this specific orcid id are returned.
-    If there are multiple parameters specified, they are combined via an AND-conjunction, so you can e.g. search for entries that match a certain keyword,
-    were created in a certain time period, by a specific person.
+    Main endpoint for fuzzy searching.
     :return:
     """
     if request.content_type != "application/json":
         return {
-            "status": 415,
             "message": "Unsupported Media Type",
             "suggested_content_types": ["application/json"],
         }, 415
     req_body = request.json
-    logging.debug('search request body: %s', req_body)
+    logging.debug(f"search request body: {req_body}")
     search_term = req_body.get("search_term")
+    response = general_search(None, available_indices, search_term, None, None, None)
+    return response, 200
+
+
+@api_bp.route("/api/search/<string:index>", methods=["POST"], endpoint="search_general_search")
+def post_general_search(index):
+    """
+    Main endpoint for fuzzy searching.
+    :return:
+    """
+    if request.content_type != "application/json":
+        return {
+            "message": "Unsupported Media Type",
+            "suggested_content_types": ["application/json"],
+        }, 415
+    req_body = request.json
+    logging.info(f'Searching for index: {index}')
+    logging.debug(f"search request body: {req_body}")
+    search_term = req_body.get("search_term")
+    if index is not None and index not in available_indices:
+        logging.error(f"Index {index} is not in list of searchable indices: {available_indices}")
+        return {
+            "results": {},
+        }, 404
     t1 = req_body.get("t1")
     if not str(t1).isdigit():
         t1 = None
@@ -162,5 +147,5 @@ def search():
     if t1 is not None and t2 is not None and "unit.uri" in field_value_pairs and "concept.uri" in field_value_pairs:
         response = unit_independent_search(t1, t2, field_value_pairs)
     else:
-        response = general_search(search_term, t1, t2, field_value_pairs)
+        response = general_search(index, available_indices, search_term, t1, t2, field_value_pairs)
     return response, 200
diff --git a/dbrepo-search-service/app/opensearch_client.py b/dbrepo-search-service/app/opensearch_client.py
index 01717c6a98f957c2052a22223a2b186429c10f98..d5d5e8f877caafd90c8de12effd7c0fc98ea2f4d 100644
--- a/dbrepo-search-service/app/opensearch_client.py
+++ b/dbrepo-search-service/app/opensearch_client.py
@@ -7,7 +7,6 @@ import re
 from flask import current_app
 from collections.abc import MutableMapping
 
-from omlib.dimension import Dimension
 from omlib.measure import om
 from omlib.constants import SI, OM_IDS
 from omlib.omconstants import OM
@@ -117,25 +116,25 @@ def get_fields_for_index(index):
     return fields_list
 
 
-def general_search(search_term=None, t1=None, t2=None, fieldValuePairs=None):
+def general_search(index=None, indices=[], search_term=None, t1=None, t2=None, field_value_pairs=None):
     """
     Main method for seaching stuff in the opensearch db
 
     all parameters are optional
 
-    :param search_term: the term you want to search for (no wildcards are allowed)
-    :param t1: beginn time period
-    :param t2:  end time period
-    :param field: name of the field you want to look at
-    :param value: the value the specified field should match
-    :return:
+    :param index: The index to be searched. Optional.
+    :param indices: The available indices to be searched.
+    :param search_term: The search term. Optional.
+    :param t1: The start range value. Optional.
+    :param t2: The end range value. Optional.
+    :param field_value_pairs: The key-value pair of properties that need to match. Optional.
+    :return: The object of results and HTTP status code. e.g. { "hits": { "hits": [] } }, 200
     """
-    logging.info(f"Performing general search")
-    searchable_indices = ["database", "user", "table", "column", "identifier", "view", "concept", "unit"]
-    index = searchable_indices
     queries = []
-    if search_term is not None:
-        logging.debug('query has search_term present')
+    if search_term is None:
+        logging.info(f"Performing general search")
+    else:
+        logging.info(f"Performing fuzzy search")
         fuzzy_body = {
             "query": {
                 "multi_match": {
@@ -146,14 +145,16 @@ def general_search(search_term=None, t1=None, t2=None, fieldValuePairs=None):
                 }
             }
         }
-        logging.debug('search body: %s', fuzzy_body)
+        logging.debug(f'search body: {fuzzy_body}')
+        index = ','.join(indices)
+        logging.debug(f'search index: {index}')
         response = current_app.opensearch_client.search(
             index=index,
             body=fuzzy_body
         )
-        response["status"] = 200
+        logging.info(f"Found {len(response['hits']['hits'])} result(s)")
         return response
-    if fieldValuePairs is not None and len(fieldValuePairs) > 0:
+    if field_value_pairs is not None and len(field_value_pairs) > 0:
         logging.debug('query has field_value_pairs present')
         musts = []
         is_range_open_end = False
@@ -168,12 +169,8 @@ def general_search(search_term=None, t1=None, t2=None, fieldValuePairs=None):
         if t1 is not None and t2 is not None:
             is_range_query = True
             logging.debug(f"query has start value {t1} and end value {t2} present")
-        for key, value in fieldValuePairs.items():
+        for key, value in field_value_pairs.items():
             logging.debug(f"current key={key}, value={value}")
-            if key == "type" and value in searchable_indices:
-                logging.debug("search for specific index: %s", value)
-                index = value
-                continue
             # if key in field_list:
             if re.match(f"{index}\.", key):
                 new_field = key[key.index(".") + 1:len(key)]
@@ -242,10 +239,8 @@ def general_search(search_term=None, t1=None, t2=None, fieldValuePairs=None):
     logging.debug('search body: %s', body)
     response = current_app.opensearch_client.search(
         index=index,
-        body=body
+        body=json.dumps(body)
     )
-    response["status"] = 200
-    # response = [hit["_source"] for hit in response["hits"]["hits"]]
     return response
 
 
@@ -271,16 +266,17 @@ def unit_independent_search(t1=None, t2=None, field_value_pairs=None):
     """
     logging.info(f"Performing unit-independent search")
     searches = []
-    response = current_app.opensearch_client.search(
-        index="column",
-        body={
-            "size": 0,
-            "aggs": {
-                "units": {
-                    "terms": {"field": "unit.uri", "size": 500}
-                }
+    body = {
+        "size": 0,
+        "aggs": {
+            "units": {
+                "terms": {"field": "unit.uri", "size": 500}
             }
         }
+    }
+    response = current_app.opensearch_client.search(
+        index="column",
+        body=json.dumps(body)
     )
     unit_uris = [hit["key"] for hit in response["aggregations"]["units"]["buckets"]]
     logging.debug(f"found {len(unit_uris)} unit(s) in column index")
@@ -334,24 +330,17 @@ def unit_independent_search(t1=None, t2=None, field_value_pairs=None):
                 }
             }
         })
-    # searches.append({'index': 'column'})
-    # searches.append({
-    #     "query": {
-    #         "match_all": {}
-    #     }
-    # })
     logging.debug('searches: %s', searches)
     body = ''
     for search in searches:
         body += '%s \n' % json.dumps(search)
     responses = current_app.opensearch_client.msearch(
-        body=body
+        body=json.dumps(body)
     )
     response = {
         "hits": {
             "hits": flatten([hits["hits"]["hits"] for hits in responses["responses"]])
         },
-        "took": responses["took"],
-        "status": 200
+        "took": responses["took"]
     }
     return response
diff --git a/dbrepo-ui/api/search.service.js b/dbrepo-ui/api/search.service.js
index c531256ee6d678beb258ae944350182ce2cce6aa..c037199f366e90cc4702d8cda73d909c90097a36 100644
--- a/dbrepo-ui/api/search.service.js
+++ b/dbrepo-ui/api/search.service.js
@@ -19,7 +19,7 @@ class SearchService {
     })
   }
 
-  search (searchData) {
+  search (index, searchData) {
     // transform values to what the search API expects
     let localSearchData = Object.assign({}, searchData)
     const searchTerm = localSearchData.search_term
@@ -36,7 +36,7 @@ class SearchService {
       field_value_pairs: { ...localSearchData }
     }
     return new Promise((resolve, reject) => {
-      axios.post('/api/search', payload, { headers: { Accept: 'application/json' } })
+      axios.post(`/api/search${index ? `/${index}` : ''}`, payload, { headers: { Accept: 'application/json' } })
         .then((response) => {
           const { hits } = response.data
           console.debug('advanced search response', hits.hits)
diff --git a/dbrepo-ui/api/table.service.js b/dbrepo-ui/api/table.service.js
index a438b46aaedf589bb1bc877302157b6b42e4e02b..d2ddf42b6343ff4092d5b314d91290217b8cb5d3 100644
--- a/dbrepo-ui/api/table.service.js
+++ b/dbrepo-ui/api/table.service.js
@@ -160,6 +160,21 @@ class TableService {
     })
   }
 
+  delete (databaseId, tableId) {
+    return new Promise((resolve, reject) => {
+      api.delete(`/api/database/${databaseId}/table/${tableId}`, { headers: { Accept: 'application/json' } })
+        .then(() => {
+          resolve()
+        })
+        .catch((error) => {
+          const { code, message } = error.response.data
+          console.error('Failed to delete table', error)
+          Vue.$toast.error(`[${code}] Failed to delete table: ${message}`)
+          reject(error)
+        })
+    })
+  }
+
   deleteTuple (databaseId, tableId, data) {
     return new Promise((resolve, reject) => {
       api.delete(`/api/database/${databaseId}/table/${tableId}/data`, { headers: { Accept: 'application/json' }, data })
diff --git a/dbrepo-ui/api/table.utils.js b/dbrepo-ui/api/table.utils.js
new file mode 100644
index 0000000000000000000000000000000000000000..5744289e7e94839c2cbf1077a8110c00555f5ec1
--- /dev/null
+++ b/dbrepo-ui/api/table.utils.js
@@ -0,0 +1,10 @@
+class TableUtils {
+  isOwner (table, user) {
+    if (!table || !user) {
+      return false
+    }
+    return table.owner.id === user.id
+  }
+}
+
+export default new TableUtils()
diff --git a/dbrepo-ui/components/TableToolbar.vue b/dbrepo-ui/components/TableToolbar.vue
index 2cfe2f80ece49ed407c2d4f6cd185cb0bc010869..c4ebefe8e1efb5827d52890cb4216a846df43bfa 100644
--- a/dbrepo-ui/components/TableToolbar.vue
+++ b/dbrepo-ui/components/TableToolbar.vue
@@ -29,6 +29,9 @@
         <v-btn v-if="canImportCsv" class="mb-1" :to="`/database/${$route.params.database_id}/table/${$route.params.table_id}/import`">
           <v-icon left>mdi-cloud-upload</v-icon> Import .csv
         </v-btn>
+        <v-btn v-if="canDropTable" class="mb-1" color="error" @click="dropTableDialog = true">
+          <v-icon left>mdi-delete</v-icon> Drop Table
+        </v-btn>
       </v-toolbar-title>
     </v-toolbar>
     <v-tabs v-model="tab" color="primary">
@@ -48,6 +51,12 @@
       max-width="640">
       <EditTuple :columns="table.columns" :tuple="tuple" :edit="edit" @close="close" />
     </v-dialog>
+    <v-dialog
+      v-model="dropTableDialog"
+      persistent
+      max-width="640">
+      <DropTable @close="closed" />
+    </v-dialog>
   </div>
 </template>
 
@@ -56,10 +65,13 @@ import EditTuple from '@/components/dialogs/EditTuple'
 import TableService from '@/api/table.service'
 import UserUtils from '@/api/user.utils'
 import DatabaseUtils from '@/api/database.utils'
+import TableUtils from '@/api/table.utils'
+import DropTable from '@/components/dialogs/DropTable'
 
 export default {
   components: {
-    EditTuple
+    EditTuple,
+    DropTable
   },
   props: {
     selection: {
@@ -76,7 +88,8 @@ export default {
       loadingDelete: false,
       error: false,
       edit: false,
-      editTupleDialog: false
+      editTupleDialog: false,
+      dropTableDialog: false
     }
   },
   computed: {
@@ -131,6 +144,12 @@ export default {
       }
       return UserUtils.hasReadAccess(this.access) && this.roles.includes('execute-query')
     },
+    canDropTable () {
+      if (!this.roles || !this.table) {
+        return false
+      }
+      return TableUtils.isOwner(this.table, this.user) || this.roles.includes('drop-foreign-table')
+    },
     canCreateView () {
       if (!this.user) {
         return false
@@ -219,6 +238,12 @@ export default {
       } else {
         this.$emit('modified', { success: false, action: 'close' })
       }
+    },
+    async closed (event) {
+      console.debug('closed drop table dialog', event)
+      this.dropTableDialog = false
+      await this.$store.dispatch('reloadDatabase')
+      await this.$router.push(`/database/${this.$route.params.database_id}/table`)
     }
   }
 }
diff --git a/dbrepo-ui/components/dialogs/DropTable.vue b/dbrepo-ui/components/dialogs/DropTable.vue
new file mode 100644
index 0000000000000000000000000000000000000000..0a60a4fb3200f1e72cbd2c89b336b493fb4cfd4e
--- /dev/null
+++ b/dbrepo-ui/components/dialogs/DropTable.vue
@@ -0,0 +1,85 @@
+<template>
+  <div>
+    <v-form ref="form" v-model="valid" autocomplete="off" @submit.prevent="submit">
+      <v-card>
+        <v-card-title>Drop table {{ table.internal_name }}</v-card-title>
+        <v-card-text>
+          <v-row dense>
+            <v-col>
+              This action cannot be undone! Type the table name <code>{{ table.internal_name }}</code> below if you really want to drop it with all stored data.
+            </v-col>
+          </v-row>
+          <v-row dense>
+            <v-col>
+              <v-text-field
+                id="confirm"
+                v-model="confirm"
+                name="confirm"
+                label="Table Name *"
+                autofocus
+                required />
+            </v-col>
+          </v-row>
+        </v-card-text>
+        <v-card-actions>
+          <v-spacer />
+          <v-btn
+            class="mb-2"
+            @click="cancel">
+            Cancel
+          </v-btn>
+          <v-btn
+            class="mb-2 mr-1"
+            color="error"
+            :loading="loadingDelete"
+            :disabled="confirm !== table.internal_name"
+            @click="dropTable">
+            Delete
+          </v-btn>
+        </v-card-actions>
+      </v-card>
+    </v-form>
+  </div>
+</template>
+
+<script>
+import TableService from '@/api/table.service'
+
+export default {
+  data () {
+    return {
+      confirm: null,
+      loadingDelete: false,
+      valid: false
+    }
+  },
+  computed: {
+    table () {
+      return this.$store.state.table
+    }
+  },
+  methods: {
+    submit () {
+      this.$refs.form.validate()
+    },
+    cancel () {
+      this.$emit('close', { action: 'closed' })
+    },
+    dropTable () {
+      if (!this.table.id) {
+        return
+      }
+      this.loadingDelete = true
+      TableService.delete(this.table.database.id, this.table.id)
+        .then(() => {
+          console.info('Deleted table with id ', this.table.id)
+          this.$toast.success('Successfully deleted table with id ' + this.table.id)
+          this.$emit('close', { action: 'deleted' })
+        })
+        .finally(() => {
+          this.loadingDelete = false
+        })
+    }
+  }
+}
+</script>
diff --git a/dbrepo-ui/components/search/AdvancedSearch.vue b/dbrepo-ui/components/search/AdvancedSearch.vue
index f1cb9faf59d693353a3ec742fb4a0e1b4f05047d..6113cc14ff723ae1720f3f4bec67649537f24c31 100644
--- a/dbrepo-ui/components/search/AdvancedSearch.vue
+++ b/dbrepo-ui/components/search/AdvancedSearch.vue
@@ -1,12 +1,12 @@
 <template>
   <div>
-    <v-card flat tile>
+    <v-card v-if="isAdvancedSearch" flat tile>
       <v-card-text class="pt-0 pl-4 pb-6 pr-4">
         <v-form ref="form" v-model="valid" autocomplete="off" @submit.prevent="submit">
           <v-row dense>
             <v-col cols="3">
               <v-select
-                v-model="advancedSearchData.type"
+                v-model="index"
                 :items="fieldItems"
                 item-text="name"
                 item-value="value"
@@ -141,6 +141,7 @@ import SemanticMapper from '@/api/semantic.mapper'
 export default {
   data () {
     return {
+      index: 'database',
       valid: false,
       loading: false,
       loadingFields: false,
@@ -170,25 +171,27 @@ export default {
       advancedSearchData: {
         name: null,
         internal_name: null,
-        id: null,
-        type: 'database'
+        id: null
       }
     }
   },
   computed: {
     hideFields () {
-      const selectedOption = this.advancedSearchData.type
+      const selectedOption = this.index
       return {
         hideNameField: selectedOption === 'identifier',
         hideInternalNameField: ['identifier', 'user', 'concept', 'unit'].includes(selectedOption)
       }
     },
     isEligibleConceptOrUnitSearch () {
-      return ['column'].includes(this.advancedSearchData.type)
+      return ['column'].includes(this.index)
+    },
+    isAdvancedSearch () {
+      return !this.$route.query.q
     }
   },
   watch: {
-    'advancedSearchData.type': {
+    index: {
       handler (newType, oldType) {
         if (!newType) {
           return
@@ -244,7 +247,7 @@ export default {
         this.advancedSearchData.t2 = Number(this.advancedSearchData.t2)
       }
       this.loading = true
-      SearchService.search(this.advancedSearchData)
+      SearchService.search(this.index, this.advancedSearchData)
         .then((response) => {
           this.$emit('search-result', response.map(h => h._source))
         })
@@ -254,7 +257,6 @@ export default {
     },
     isAdvancedSearchEmpty () {
       return !(
-        this.advancedSearchData.type ||
         this.advancedSearchData.id ||
         this.advancedSearchData.name ||
         this.advancedSearchData.internal_name
@@ -304,12 +306,12 @@ export default {
       // Generates a dynamic v-model; It will be attached to the advancedSearchData object
       if (!item) { return '' }
 
-      return `${this.advancedSearchData.type}.${item.attribute_name}`
+      return `${this.index}.${item.attribute_name}`
     },
     shouldRenderItem (item) {
       // Checks if item's attribute_name matches any wanted field
       // The expected response is of a flattened format, so this method must be modified accordingly if the response is changed
-      return this.dynamicFieldsMap()[this.advancedSearchData.type].includes(item.attribute_name)
+      return this.dynamicFieldsMap()[this.index].includes(item.attribute_name)
     },
     fetchLicenses () {
       // Licenses is a nested object in the backend, but without any values.
diff --git a/dbrepo-ui/layouts/default.vue b/dbrepo-ui/layouts/default.vue
index 496f427136f5434d73c9b472d2c021c6bf004f0c..946b88a2deafc10163f939afb2840c25905f7a4f 100644
--- a/dbrepo-ui/layouts/default.vue
+++ b/dbrepo-ui/layouts/default.vue
@@ -95,6 +95,7 @@
           flat
           single-line
           hide-details
+          clearable
           append-icon="mdi-magnify"
           :placeholder="$t('search.fuzzy.placeholder', { name: 'vue-i18n' })"
           @click:append="retrieve" />
diff --git a/dbrepo-ui/pages/search/index.vue b/dbrepo-ui/pages/search/index.vue
index e08e4b63b846351c4f5d8ccbbfe484c73350c81e..4f97484d1491271fbdbf119aec8de2617c684ee6 100644
--- a/dbrepo-ui/pages/search/index.vue
+++ b/dbrepo-ui/pages/search/index.vue
@@ -96,22 +96,22 @@ export default {
   watch: {
     '$route.query.q': {
       handler () {
-        this.retrieve()
+        this.generalSearch()
       }
     }
   },
   mounted () {
     if (this.query) {
-      this.retrieve()
+      this.generalSearch()
     }
   },
   methods: {
-    retrieve () {
+    generalSearch () {
       if (this.loading) {
         return
       }
       this.loading = true
-      SearchService.search({ search_term: this.query })
+      SearchService.search(null, { search_term: this.query })
         .then((hits) => {
           this.results = hits.map(h => h._source)
         })
diff --git a/docker-compose.yml b/docker-compose.yml
index 4074c172ab10e56b7d89d75535f807aad4260ea4..6431cb7a414b62fffdac3abe41222b1ce13cf800 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -40,7 +40,7 @@ services:
     restart: "no"
     container_name: dbrepo-data-db
     hostname: data-db
-    image: docker.io/bitnami/mariadb:11.1.3
+    image: docker.io/bitnami/mariadb-galera:11.1.3-debian-11-r0
     volumes:
       - data-db-data:/bitnami/mariadb
       - "${SHARED_FILESYSTEM:-/tmp}:/tmp"
@@ -48,6 +48,7 @@ services:
       - "3307:3306"
     environment:
       MARIADB_ROOT_PASSWORD: "${USER_DB_PASSWORD:-dbrepo}"
+      MARIADB_GALERA_MARIABACKUP_PASSWORD: "${USER_DB_BACKUP_PASSWORD:-dbrepo}"
     healthcheck:
       test: mysqladmin ping --user="${USER_DB_USERNAME:-root}" --password="${USER_DB_PASSWORD:-dbrepo}" --silent
       interval: 10s