diff --git a/dbrepo-analyse-service/determine_dt.py b/dbrepo-analyse-service/determine_dt.py
index 462f8b652b1e5c40c3a7cf0717c554c864052059..fe3e6a0279042b4fbdd8fa2b508ff64efea17056 100644
--- a/dbrepo-analyse-service/determine_dt.py
+++ b/dbrepo-analyse-service/determine_dt.py
@@ -73,8 +73,8 @@ def determine_datatypes(filename, enum=False, enum_tol=0.0001, separator=',') ->
                 if pandas.to_numeric(df[name], errors='coerce').notnull().all():
                     logging.debug(f"mapped column {name} from float64 to decimal")
                     col.type = DataTypeDto.DECIMAL
-                    col.size = 10
-                    col.d = 4
+                    col.size = 40
+                    col.d = 10
                 else:
                     logging.debug(f"mapped column {name} from float64 to text")
                     col.type = DataTypeDto.TEXT
diff --git a/dbrepo-analyse-service/test/test_determine_dt.py b/dbrepo-analyse-service/test/test_determine_dt.py
index 73c443b7280e45295bb66a5ee4b4519daf50627c..0ac9dc4b8af211ec0beec82918d1873b687b3ca7 100644
--- a/dbrepo-analyse-service/test/test_determine_dt.py
+++ b/dbrepo-analyse-service/test/test_determine_dt.py
@@ -36,8 +36,8 @@ class DetermineDatatypesTest(unittest.TestCase):
             },
             "Wert": {
                 "type": "decimal",
-                "size": 10,
-                "d": 4,
+                "size": 40,
+                "d": 10,
                 "null_allowed": False,
             },
             "Status": {
@@ -84,8 +84,8 @@ class DetermineDatatypesTest(unittest.TestCase):
             },
             "Wert": {
                 "type": "decimal",
-                "size": 10,
-                "d": 4,
+                "size": 40,
+                "d": 10,
                 "null_allowed": False,
             },
             "Status": {
@@ -132,8 +132,8 @@ class DetermineDatatypesTest(unittest.TestCase):
             },
             "Wert": {
                 "type": "decimal",
-                "size": 10,
-                "d": 4,
+                "size": 40,
+                "d": 10,
                 "null_allowed": False,
             },
             "Status": {
@@ -160,8 +160,8 @@ class DetermineDatatypesTest(unittest.TestCase):
             },
             "float": {
                 "type": "decimal",
-                "size": 10,
-                "d": 4,
+                "size": 40,
+                "d": 10,
                 "null_allowed": False,
             },
             "string": {
diff --git a/dbrepo-data-service/rest-service/src/main/resources/init/querystore.sql b/dbrepo-data-service/rest-service/src/main/resources/init/querystore.sql
index 212e262742b7517b3b6e22d319609a0492e8e243..c1df44d1b0766fb04d081f3b5b3679039d8ba72f 100644
--- a/dbrepo-data-service/rest-service/src/main/resources/init/querystore.sql
+++ b/dbrepo-data-service/rest-service/src/main/resources/init/querystore.sql
@@ -1,5 +1,5 @@
 CREATE SEQUENCE `qs_queries_seq` NOCACHE;
-CREATE TABLE `qs_queries` ( `id` bigint not null primary key default nextval(`qs_queries_seq`), `created` datetime not null default now(), `executed` datetime not null default now(), `created_by` varchar(36) not null, `query` text not null, `query_normalized` text not null, `is_persisted` boolean not null, `query_hash` varchar(255) not null, `result_hash` varchar(255), `result_number` bigint );
+CREATE TABLE `qs_queries` ( `id` bigint not null primary key default nextval(`qs_queries_seq`), `created` datetime not null default now(), `executed` datetime not null default now(), `created_by` varchar(36) not null, `query` text not null, `query_normalized` text not null, `is_persisted` boolean not null, `query_hash` varchar(255) not null, `result_hash` varchar(255), `result_number` bigint ) WITH SYSTEM VERSIONING;
 CREATE PROCEDURE hash_table(IN name VARCHAR(255), OUT hash VARCHAR(255), OUT count BIGINT) BEGIN DECLARE _sql TEXT; SELECT CONCAT('SELECT SHA2(GROUP_CONCAT(CONCAT_WS(\'\',', GROUP_CONCAT(CONCAT('`', column_name, '`') ORDER BY column_name), ') SEPARATOR \',\'), 256) AS hash, COUNT(*) AS count FROM `', name, '` INTO @hash, @count;') FROM `information_schema`.`columns` WHERE `table_schema` = DATABASE() AND `table_name` = name INTO _sql; PREPARE stmt FROM _sql; EXECUTE stmt; DEALLOCATE PREPARE stmt; SET hash = @hash; SET count = @count; END;
 CREATE PROCEDURE store_query(IN query TEXT, IN executed DATETIME, OUT queryId BIGINT) BEGIN DECLARE _queryhash varchar(255) DEFAULT SHA2(query, 256); DECLARE _username varchar(255) DEFAULT REGEXP_REPLACE(current_user(), '@.*', ''); DECLARE _query TEXT DEFAULT CONCAT('CREATE OR REPLACE TABLE _tmp AS (', query, ')'); PREPARE stmt FROM _query; EXECUTE stmt; DEALLOCATE PREPARE stmt; CALL hash_table('_tmp', @hash, @count); DROP TABLE IF EXISTS `_tmp`; IF @hash IS NULL THEN INSERT INTO `qs_queries` (`created_by`, `query`, `query_normalized`, `is_persisted`, `query_hash`, `result_hash`, `result_number`, `executed`) SELECT _username, query, query, false, _queryhash, @hash, @count, executed WHERE NOT EXISTS (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` IS NULL); SET queryId = (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` IS NULL); ELSE INSERT INTO `qs_queries` (`created_by`, `query`, `query_normalized`, `is_persisted`, `query_hash`, `result_hash`, `result_number`, `executed`) SELECT _username, query, query, false, _queryhash, @hash, @count, executed WHERE NOT EXISTS (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` = @hash); SET queryId = (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` = @hash); END IF; END;
 CREATE DEFINER = 'root' PROCEDURE _store_query(IN _username VARCHAR(255), IN query TEXT, IN executed DATETIME, OUT queryId BIGINT) BEGIN DECLARE _queryhash varchar(255) DEFAULT SHA2(query, 256); DECLARE _query TEXT DEFAULT CONCAT('CREATE OR REPLACE TABLE _tmp AS (', query, ')'); PREPARE stmt FROM _query; EXECUTE stmt; DEALLOCATE PREPARE stmt; CALL hash_table('_tmp', @hash, @count); DROP TABLE IF EXISTS `_tmp`; IF @hash IS NULL THEN INSERT INTO `qs_queries` (`created_by`, `query`, `query_normalized`, `is_persisted`, `query_hash`, `result_hash`, `result_number`, `executed`) SELECT _username, query, query, false, _queryhash, @hash, @count, executed WHERE NOT EXISTS (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` IS NULL); SET queryId = (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` IS NULL); ELSE INSERT INTO `qs_queries` (`created_by`, `query`, `query_normalized`, `is_persisted`, `query_hash`, `result_hash`, `result_number`, `executed`) SELECT _username, query, query, false, _queryhash, @hash, @count, executed WHERE NOT EXISTS (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` = @hash); SET queryId = (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` = @hash); END IF; END;
\ No newline at end of file
diff --git a/dbrepo-data-service/rest-service/src/test/java/at/tuwien/service/TableServiceIntegrationTest.java b/dbrepo-data-service/rest-service/src/test/java/at/tuwien/service/TableServiceIntegrationTest.java
index b8267e959e32f00545ec6670af98b420efb17c13..e12b5bdc75962076ab52466b6c12493d109ccf64 100644
--- a/dbrepo-data-service/rest-service/src/test/java/at/tuwien/service/TableServiceIntegrationTest.java
+++ b/dbrepo-data-service/rest-service/src/test/java/at/tuwien/service/TableServiceIntegrationTest.java
@@ -477,6 +477,7 @@ public class TableServiceIntegrationTest extends AbstractUnitTest {
         /* test */
         final TableStatisticDto response = tableService.getStatistics(TABLE_1_PRIVILEGED_DTO);
         assertEquals(TABLE_1_COLUMNS.size(), response.getColumns().size());
+        log.trace("response rows: {}", response.getRows());
         assertEquals(3L, response.getRows());
         assertEquals(Set.of("id", "date", "location", "mintemp", "rainfall"), response.getColumns().keySet());
         final ColumnStatisticDto column0 = response.getColumns().get("id");
diff --git a/dbrepo-data-service/rest-service/src/test/resources/init/querystore.sql b/dbrepo-data-service/rest-service/src/test/resources/init/querystore.sql
index 212e262742b7517b3b6e22d319609a0492e8e243..c1df44d1b0766fb04d081f3b5b3679039d8ba72f 100644
--- a/dbrepo-data-service/rest-service/src/test/resources/init/querystore.sql
+++ b/dbrepo-data-service/rest-service/src/test/resources/init/querystore.sql
@@ -1,5 +1,5 @@
 CREATE SEQUENCE `qs_queries_seq` NOCACHE;
-CREATE TABLE `qs_queries` ( `id` bigint not null primary key default nextval(`qs_queries_seq`), `created` datetime not null default now(), `executed` datetime not null default now(), `created_by` varchar(36) not null, `query` text not null, `query_normalized` text not null, `is_persisted` boolean not null, `query_hash` varchar(255) not null, `result_hash` varchar(255), `result_number` bigint );
+CREATE TABLE `qs_queries` ( `id` bigint not null primary key default nextval(`qs_queries_seq`), `created` datetime not null default now(), `executed` datetime not null default now(), `created_by` varchar(36) not null, `query` text not null, `query_normalized` text not null, `is_persisted` boolean not null, `query_hash` varchar(255) not null, `result_hash` varchar(255), `result_number` bigint ) WITH SYSTEM VERSIONING;
 CREATE PROCEDURE hash_table(IN name VARCHAR(255), OUT hash VARCHAR(255), OUT count BIGINT) BEGIN DECLARE _sql TEXT; SELECT CONCAT('SELECT SHA2(GROUP_CONCAT(CONCAT_WS(\'\',', GROUP_CONCAT(CONCAT('`', column_name, '`') ORDER BY column_name), ') SEPARATOR \',\'), 256) AS hash, COUNT(*) AS count FROM `', name, '` INTO @hash, @count;') FROM `information_schema`.`columns` WHERE `table_schema` = DATABASE() AND `table_name` = name INTO _sql; PREPARE stmt FROM _sql; EXECUTE stmt; DEALLOCATE PREPARE stmt; SET hash = @hash; SET count = @count; END;
 CREATE PROCEDURE store_query(IN query TEXT, IN executed DATETIME, OUT queryId BIGINT) BEGIN DECLARE _queryhash varchar(255) DEFAULT SHA2(query, 256); DECLARE _username varchar(255) DEFAULT REGEXP_REPLACE(current_user(), '@.*', ''); DECLARE _query TEXT DEFAULT CONCAT('CREATE OR REPLACE TABLE _tmp AS (', query, ')'); PREPARE stmt FROM _query; EXECUTE stmt; DEALLOCATE PREPARE stmt; CALL hash_table('_tmp', @hash, @count); DROP TABLE IF EXISTS `_tmp`; IF @hash IS NULL THEN INSERT INTO `qs_queries` (`created_by`, `query`, `query_normalized`, `is_persisted`, `query_hash`, `result_hash`, `result_number`, `executed`) SELECT _username, query, query, false, _queryhash, @hash, @count, executed WHERE NOT EXISTS (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` IS NULL); SET queryId = (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` IS NULL); ELSE INSERT INTO `qs_queries` (`created_by`, `query`, `query_normalized`, `is_persisted`, `query_hash`, `result_hash`, `result_number`, `executed`) SELECT _username, query, query, false, _queryhash, @hash, @count, executed WHERE NOT EXISTS (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` = @hash); SET queryId = (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` = @hash); END IF; END;
 CREATE DEFINER = 'root' PROCEDURE _store_query(IN _username VARCHAR(255), IN query TEXT, IN executed DATETIME, OUT queryId BIGINT) BEGIN DECLARE _queryhash varchar(255) DEFAULT SHA2(query, 256); DECLARE _query TEXT DEFAULT CONCAT('CREATE OR REPLACE TABLE _tmp AS (', query, ')'); PREPARE stmt FROM _query; EXECUTE stmt; DEALLOCATE PREPARE stmt; CALL hash_table('_tmp', @hash, @count); DROP TABLE IF EXISTS `_tmp`; IF @hash IS NULL THEN INSERT INTO `qs_queries` (`created_by`, `query`, `query_normalized`, `is_persisted`, `query_hash`, `result_hash`, `result_number`, `executed`) SELECT _username, query, query, false, _queryhash, @hash, @count, executed WHERE NOT EXISTS (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` IS NULL); SET queryId = (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` IS NULL); ELSE INSERT INTO `qs_queries` (`created_by`, `query`, `query_normalized`, `is_persisted`, `query_hash`, `result_hash`, `result_number`, `executed`) SELECT _username, query, query, false, _queryhash, @hash, @count, executed WHERE NOT EXISTS (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` = @hash); SET queryId = (SELECT `id` FROM `qs_queries` WHERE `query_hash` = _queryhash AND `result_hash` = @hash); END IF; END;
\ No newline at end of file
diff --git a/dbrepo-data-service/services/src/main/java/at/tuwien/mapper/MariaDbMapper.java b/dbrepo-data-service/services/src/main/java/at/tuwien/mapper/MariaDbMapper.java
index a88cdb5078b9a6fe348b082ff089de01a444bc61..3e0bb2f01838560a5c49d05e8137b565914fdf85 100644
--- a/dbrepo-data-service/services/src/main/java/at/tuwien/mapper/MariaDbMapper.java
+++ b/dbrepo-data-service/services/src/main/java/at/tuwien/mapper/MariaDbMapper.java
@@ -113,7 +113,7 @@ public interface MariaDbMapper {
     }
 
     default String queryStoreCreateTableRawQuery() {
-        final String statement = "CREATE TABLE `qs_queries` ( `id` bigint not null primary key default nextval(`qs_queries_seq`), `created` datetime not null default now(), `executed` datetime not null default now(), `created_by` varchar(36) not null, `query` text not null, `query_normalized` text not null, `is_persisted` boolean not null, `query_hash` varchar(255) not null, `result_hash` varchar(255), `result_number` bigint);";
+        final String statement = "CREATE TABLE `qs_queries` ( `id` bigint not null primary key default nextval(`qs_queries_seq`), `created` datetime not null default now(), `executed` datetime not null default now(), `created_by` varchar(36) not null, `query` text not null, `query_normalized` text not null, `is_persisted` boolean not null, `query_hash` varchar(255) not null, `result_hash` varchar(255), `result_number` bigint) WITH SYSTEM VERSIONING;";
         log.trace("mapped create query store table statement: {}", statement);
         return statement;
     }
diff --git a/dbrepo-ui/components/table/TableImport.vue b/dbrepo-ui/components/table/TableImport.vue
index e89c920d32e83eae82ccc346593bbd6650456aea..2451a7c3acb18722e8e4b0ebdb9fe10effcd554d 100644
--- a/dbrepo-ui/components/table/TableImport.vue
+++ b/dbrepo-ui/components/table/TableImport.vue
@@ -204,7 +204,13 @@
                   persistent-hint
                   :variant="inputVariant"
                   :hint="$t('pages.table.subpages.import.file.hint')"
-                  :label="$t('pages.table.subpages.import.file.label')" />
+                  :label="$t('pages.table.subpages.import.file.label')">
+                  <template
+                    v-if="uploadProgress"
+                    v-slot:append>
+                    <span>{{ uploadProgress }}%</span>
+                  </template>
+                </v-file-input>
               </v-col>
             </v-row>
             <v-row
@@ -348,6 +354,7 @@ export default {
     }
   },
   mounted() {
+    this.cacheStore.setUploadProgress(null)
     this.setQueryParamSafely('location')
     this.setQueryParamSafely('quote')
     this.setQueryParamSafely('false_element')
@@ -365,6 +372,9 @@ export default {
     table() {
       return this.cacheStore.getTable
     },
+    uploadProgress () {
+      return this.cacheStore.getUploadProgress
+    },
     isAnalyseAllowed () {
       if (!this.file || this.file.length === 0) {
         return false
@@ -526,6 +536,7 @@ export default {
           this.suggestedAnalyseLineTerminator = line_termination
           this.tableImport.location = filename
           this.step = 3
+          this.cacheStore.setUploadProgress(null)
           const toast = useToastInstance()
           toast.success(this.$t('success.analyse.dataset'))
           this.$emit('analyse', {
diff --git a/dbrepo-ui/composables/upload-service.ts b/dbrepo-ui/composables/upload-service.ts
index bddae2692cf80ad2f116d058010b157740a39360..26971316107dd834efdc6cfe21addfbb8c15c7db 100644
--- a/dbrepo-ui/composables/upload-service.ts
+++ b/dbrepo-ui/composables/upload-service.ts
@@ -1,6 +1,8 @@
 import * as tus from 'tus-js-client'
+import {useCacheStore} from '@/stores/cache'
 
 export const useUploadService = (): any => {
+
   function create (data: File) {
     const config = useRuntimeConfig()
     const endpoint = config.public.upload.client
@@ -19,6 +21,8 @@ export const useUploadService = (): any => {
         onProgress (bytesUploaded, bytesTotal) {
           const percentage = ((bytesUploaded / bytesTotal) * 100).toFixed(2)
           console.debug(bytesUploaded, bytesTotal, percentage + '%')
+          const cacheStore = useCacheStore()
+          cacheStore.setUploadProgress(percentage)
         },
         onSuccess () {
           if (uploadClient.file) {
diff --git a/dbrepo-ui/pages/database/[database_id]/settings.vue b/dbrepo-ui/pages/database/[database_id]/settings.vue
index a79ad48c4193c9b4f0fbc83f84b3a2c0db3a6193..09c3d8263bc46bc74eb0fe34b8651e41c6214ccc 100644
--- a/dbrepo-ui/pages/database/[database_id]/settings.vue
+++ b/dbrepo-ui/pages/database/[database_id]/settings.vue
@@ -42,7 +42,13 @@
                     :show-size="1000"
                     counter
                     :label="$t('pages.database.subpages.settings.image.label')"
-                    @update:modelValue="uploadFile" />
+                    @update:modelValue="uploadFile">
+                    <template
+                      v-if="uploadProgress"
+                      v-slot:append>
+                      <span>{{ uploadProgress }}%</span>
+                    </template>
+                  </v-file-input>
                 </v-col>
               </v-row>
               <v-row
@@ -308,6 +314,9 @@ export default {
     user () {
       return this.userStore.getUser
     },
+    uploadProgress () {
+      return this.cacheStore.getUploadProgress
+    },
     isOwner () {
       if (!this.database || !this.user) {
         return false
@@ -439,6 +448,8 @@ export default {
       uploadService.create(this.file)
         .then((s3key) => {
           console.debug('uploaded image', s3key)
+          const cacheStore = useCacheStore()
+          cacheStore.setUploadProgress(null)
           const toast = useToastInstance()
           toast.success(this.$t('success.database.upload'))
           this.modifyImage.key = s3key
diff --git a/dbrepo-ui/stores/cache.js b/dbrepo-ui/stores/cache.js
index 087ae4e9b936330b43d3002b076ea431c95c7420..c733e8d48a4a3d77f4fce9eb9b1558db59d0306e 100644
--- a/dbrepo-ui/stores/cache.js
+++ b/dbrepo-ui/stores/cache.js
@@ -8,6 +8,7 @@ export const useCacheStore = defineStore('cache', {
       table: null,
       ontologies: [],
       messages: [],
+      uploadProgress: null
     }
   },
   getters: {
@@ -15,6 +16,7 @@ export const useCacheStore = defineStore('cache', {
     getTable: (state) => state.table,
     getOntologies: (state) => state.ontologies,
     getMessages: (state) => state.messages,
+    getUploadProgress: (state) => state.uploadProgress,
   },
   actions: {
     setDatabase (database) {
@@ -26,6 +28,9 @@ export const useCacheStore = defineStore('cache', {
     setOntologies (ontologies) {
       this.ontologies = ontologies
     },
+    setUploadProgress (uploadProgress) {
+      this.uploadProgress = uploadProgress
+    },
     reloadMessages () {
       const messageService = useMessageService()
       messageService.findAll('active')
diff --git a/lib/python/data.csv b/lib/python/data.csv
new file mode 100644
index 0000000000000000000000000000000000000000..f776784b3750ee8d183efec700935bb0cbea570e
Binary files /dev/null and b/lib/python/data.csv differ
diff --git a/lib/python/smaller.csv b/lib/python/smaller.csv
new file mode 100644
index 0000000000000000000000000000000000000000..a8a157f85e03cb5d92981d0cd8806ca51dc4e3ee
--- /dev/null
+++ b/lib/python/smaller.csv
@@ -0,0 +1,9 @@
+"00000157-ca59-4cd9-9c20-b6e07a461720","Factory/Power/Active/Sum/value","8910.427734375","1695370294.260122"
+"000001cc-6640-45cd-bca3-d4edd907993f","Factory/Power/Active/Sum/value","5193.7080078125","1686120654.770266"
+"00000819-d0d3-4e63-8bad-316fd4605072","Factory/Power/Active/Sum/value","2744.0185546875","1686161362.024192"
+"00000b14-0a50-46cc-a289-f0c2d559d348","Factory/Power/Active/Sum/value","3047.9697265625","1695324339.994483"
+"000012b9-1e54-4079-a42f-769761e4a446","Factory/Power/Active/Sum/value","3604.2626953125","1695146853.947941"
+"0000131f-bc1e-4d15-a79f-3feb7024c7bb","Factory/Power/Active/Sum/value","2871.40625","1686159611.775427"
+"00001420-94ac-4fb9-a3ca-f40105247f20","Factory/Power/Active/Sum/value","2843.608642578125","1686009614.272813"
+"00001443-769a-4bd1-b5d7-ad702a76ab93","Factory/Power/Active/Sum/value","2843.680908203125","1686002382.359535"
+"0000144f-f325-43f6-9954-7b633a935c48","Factory/Power/Active/Sum/value","2856.92724609375","1685938173.948616"