From 8faa1f3d9dd583608a747e5513ea8b6a239513c4 Mon Sep 17 00:00:00 2001 From: mboudet Date: Tue, 31 Oct 2023 16:11:07 +0100 Subject: [PATCH 1/9] pre-compute the preview --- askomics/api/file.py | 4 +++- askomics/libaskomics/BedFile.py | 17 +++++++++++++++ askomics/libaskomics/CsvFile.py | 6 ++++++ askomics/libaskomics/Database.py | 22 ++++++++++++++++++++ askomics/libaskomics/File.py | 19 +++++++++++++++++ askomics/libaskomics/FilesHandler.py | 31 ++++++++++++++++++---------- askomics/libaskomics/GffFile.py | 17 +++++++++++++++ askomics/libaskomics/RdfFile.py | 17 ++++++++++++++- askomics/tasks.py | 22 ++++++++++++++++++++ 9 files changed, 142 insertions(+), 13 deletions(-) diff --git a/askomics/api/file.py b/askomics/api/file.py index 69cd82e4..63220ea3 100644 --- a/askomics/api/file.py +++ b/askomics/api/file.py @@ -130,6 +130,8 @@ def upload_chunk(): }), 400 data = request.get_json() + + skip_preview = data.get('skip_preview', False) if not (data and all([key in data for key in ["first", "last", "size", "name", "type", "size", "chunk"]])): return jsonify({ "path": '', @@ -146,7 +148,7 @@ def upload_chunk(): try: files = FilesHandler(current_app, session) - path = files.persist_chunk(data) + path = files.persist_chunk(data, skip_preview) except Exception as e: traceback.print_exc(file=sys.stdout) return jsonify({ diff --git a/askomics/libaskomics/BedFile.py b/askomics/libaskomics/BedFile.py index 0c4a9ee5..97a55f92 100644 --- a/askomics/libaskomics/BedFile.py +++ b/askomics/libaskomics/BedFile.py @@ -41,6 +41,11 @@ def __init__(self, app, session, file_info, host_url=None, external_endpoint=Non def set_preview(self): """Set entity name preview""" + + if self.preview: + self.entity_name = self.preview['entity_name'] + return + try: BedTool(self.path).count() self.entity_name = self.human_name @@ -49,6 +54,18 @@ def set_preview(self): self.error_message = "Malformated BED ({})".format(str(e)) traceback.print_exc(file=sys.stdout) + def save_preview(self): + """Save location and endpoint in preview""" + data = None + error = None + self.set_preview() + + if self.error: + error = self.error_message + else: + data = {'entity_name': self.entity_name} + self.save_preview_in_db(data, error) + def get_preview(self): """Get file preview diff --git a/askomics/libaskomics/CsvFile.py b/askomics/libaskomics/CsvFile.py index 064c814a..5dca6443 100644 --- a/askomics/libaskomics/CsvFile.py +++ b/askomics/libaskomics/CsvFile.py @@ -59,6 +59,12 @@ def set_preview(self): self.set_preview_and_header() self.set_columns_type() + def save_preview(self): + """Save location and endpoint in preview""" + data = None + error = None + self.save_preview_in_db(data, error) + def get_preview(self): """Get a preview of the file diff --git a/askomics/libaskomics/Database.py b/askomics/libaskomics/Database.py index 6b18a746..a168d075 100644 --- a/askomics/libaskomics/Database.py +++ b/askomics/libaskomics/Database.py @@ -419,6 +419,28 @@ def create_files_table(self): except Exception: pass + query = ''' + ALTER TABLE files + ADD preview text NULL + DEFAULT(NULL) + ''' + + try: + self.execute_sql_query(query) + except Exception: + pass + + query = ''' + ALTER TABLE files + ADD error text NULL + DEFAULT(NULL) + ''' + + try: + self.execute_sql_query(query) + except Exception: + pass + def create_abstraction_table(self): """Create abstraction table""" query = """ diff --git a/askomics/libaskomics/File.py b/askomics/libaskomics/File.py index 83276fb0..55be8824 100644 --- a/askomics/libaskomics/File.py +++ b/askomics/libaskomics/File.py @@ -1,5 +1,6 @@ import datetime import os +import json import time from dateutil import parser from urllib.parse import quote @@ -92,6 +93,7 @@ def __init__(self, app, session, file_info, host_url=None, external_endpoint=Non self.path = file_info['path'] self.type = file_info['type'] self.size = file_info['size'] + self.preview = json.loads(file_info['preview']) self.id = file_info['id'] self.public = False self.ntriples = 0 @@ -536,3 +538,20 @@ def convert_type(self, value, try_date=False): return value return value + + def save_preview_in_db(self, preview, error): + database = Database(self.app, self.session) + + status = "available" if not error else "error" + data = json.dumps(preview) if preview else None + + query = ''' + UPDATE files SET + preview = ?, + error = ?, + status = ? + WHERE id= ? + ''' + + variables = [data, error, status, self.id] + database.execute_sql_query(query, tuple(variables)) diff --git a/askomics/libaskomics/FilesHandler.py b/askomics/libaskomics/FilesHandler.py index 10344e9b..2f68b567 100644 --- a/askomics/libaskomics/FilesHandler.py +++ b/askomics/libaskomics/FilesHandler.py @@ -90,7 +90,7 @@ def get_files_infos(self, files_id=None, files_path=None, return_path=False): subquery_str = '(' + ' OR '.join(['id = ?'] * len(files_id)) + ')' query = ''' - SELECT id, name, type, size, path, date, status + SELECT id, name, type, size, path, date, status, preview FROM files WHERE user_id = ? AND {} @@ -102,7 +102,7 @@ def get_files_infos(self, files_id=None, files_path=None, return_path=False): subquery_str = '(' + ' OR '.join(['path = ?'] * len(files_path)) + ')' query = ''' - SELECT id, name, type, size, path, date, status + SELECT id, name, type, size, path, date, status, preview FROM files WHERE user_id = ? AND {} @@ -113,7 +113,7 @@ def get_files_infos(self, files_id=None, files_path=None, return_path=False): else: query = ''' - SELECT id, name, type, size, path, date, status + SELECT id, name, type, size, path, date, status, preview FROM files WHERE user_id = ? ''' @@ -128,7 +128,8 @@ def get_files_infos(self, files_id=None, files_path=None, return_path=False): 'type': row[2], 'size': row[3], 'date': row[5], - 'status': row[6] + 'status': row[6], + 'preview': row[7] } if return_path: file['path'] = row[4] @@ -144,7 +145,7 @@ def get_all_files_infos(self): database = Database(self.app, self.session) query = ''' - SELECT files.id, files.name, files.type, files.size, files.date, files.status, users.username + SELECT files.id, files.name, files.type, files.size, files.date, files.status, users.username, files.preview FROM files INNER JOIN users ON files.user_id=users.user_id ''' @@ -160,7 +161,8 @@ def get_all_files_infos(self): 'size': row[3], 'date': row[4], 'status': row[5], - 'user': row[6] + 'user': row[6], + 'preview': row[7] } files.append(file) @@ -206,7 +208,7 @@ def write_data_into_file(self, data, file_name, mode, should_exist=False): with open(file_path, mode) as file: file.write(data) - def store_file_info_in_db(self, name, filetype, file_name, size, status="available", task_id=None): + def store_file_info_in_db(self, name, filetype, file_name, size, status="available", task_id=None, skip_preview=False): """Store the file info in the database Parameters @@ -243,6 +245,9 @@ def store_file_info_in_db(self, name, filetype, file_name, size, status="availab ) ''' + if not skip_preview: + status = 'processing' + # Type if filetype in ('text/tab-separated-values', 'tabular'): filetype = 'csv/tsv' @@ -259,7 +264,11 @@ def store_file_info_in_db(self, name, filetype, file_name, size, status="availab self.date = int(time.time()) - return database.execute_sql_query(query, (self.session['user']['id'], name, filetype, file_path, size, self.date, status, task_id), get_id=True) + id = database.execute_sql_query(query, (self.session['user']['id'], name, filetype, file_path, size, self.date, status, task_id), get_id=True) + + if not skip_preview: + self.app.celery.send_task('save_preview', (self.session, id)) + return id def update_file_info(self, file_id, size=None, status="", task_id=""): """Update file size and status @@ -311,7 +320,7 @@ def update_file_info(self, file_id, size=None, status="", task_id=""): database.execute_sql_query(query, tuple(query_vars)) - def persist_chunk(self, chunk_info): + def persist_chunk(self, chunk_info, skip_preview=False): """Persist a file by chunk. Store info in db if the chunk is the last Parameters @@ -331,7 +340,7 @@ def persist_chunk(self, chunk_info): file_name = self.get_file_name() self.write_data_into_file(chunk_info["chunk"], file_name, "w") # store file info in db - self.store_file_info_in_db(chunk_info["name"], chunk_info["type"], file_name, chunk_info["size"]) + self.store_file_info_in_db(chunk_info["name"], chunk_info["type"], file_name, chunk_info["size"], skip_preview) # first chunk of large file elif chunk_info["first"]: file_name = self.get_file_name() @@ -340,7 +349,7 @@ def persist_chunk(self, chunk_info): elif chunk_info["last"]: file_name = chunk_info["path"] self.write_data_into_file(chunk_info["chunk"], file_name, "a") - self.store_file_info_in_db(chunk_info["name"], chunk_info["type"], file_name, chunk_info["size"]) + self.store_file_info_in_db(chunk_info["name"], chunk_info["type"], file_name, chunk_info["size"], skip_preview) # chunk of large file else: file_name = chunk_info["path"] diff --git a/askomics/libaskomics/GffFile.py b/askomics/libaskomics/GffFile.py index d87d7d9c..d537328a 100644 --- a/askomics/libaskomics/GffFile.py +++ b/askomics/libaskomics/GffFile.py @@ -54,6 +54,11 @@ def __init__(self, app, session, file_info, host_url=None, external_endpoint=Non def set_preview(self): """Summary""" + + if self.preview: + self.entities = self.preview['entities'] + return + try: exam = GFFExaminer() handle = open(self.path, encoding="utf-8", errors="ignore") @@ -67,6 +72,18 @@ def set_preview(self): self.error_message = "Malformated GFF ({})".format(str(e)) traceback.print_exc(file=sys.stdout) + def save_preview(self): + """Save location and endpoint in preview""" + data = None + error = None + self.set_preview() + + if self.error: + error = self.error_message + else: + data = {'entities': self.entity_name} + self.save_preview_in_db(data, error) + def get_preview(self): """Get gff file preview (list of entities) diff --git a/askomics/libaskomics/RdfFile.py b/askomics/libaskomics/RdfFile.py index db067986..a4e0360a 100644 --- a/askomics/libaskomics/RdfFile.py +++ b/askomics/libaskomics/RdfFile.py @@ -40,6 +40,17 @@ def set_preview(self): """Summary""" pass + def save_preview(self): + """Save location and endpoint in preview""" + data = None + error = None + try: + location, remote_graph = self.get_location_and_remote_graph() + data = {"location": location, "remote_graph": remote_graph} + except Exception as e: + error = str(e) + self.save_preview_in_db(data, error) + def get_location_and_remote_graph(self): """Get location of data if specified @@ -81,7 +92,11 @@ def get_preview(self): location = None remote_graph = None try: - location, remote_graph = self.get_location_and_remote_graph() + if self.preview: + location = self.preview['location'] + remote_graph = self.preview['remote_graph'] + else: + location, remote_graph = self.get_location_and_remote_graph() except Exception as e: self.error_message = str(e) # Todo: Better error management diff --git a/askomics/tasks.py b/askomics/tasks.py index afaaefe0..5ca2dabc 100644 --- a/askomics/tasks.py +++ b/askomics/tasks.py @@ -26,6 +26,28 @@ celery = create_celery(app) +@celery.task(bind=True, name="set_preview") +def save_preview(self, session, fileId): + """Compute the file preview in backend and store it in DB + + Parameters + ---------- + session : dict + AskOmics session + fileId : string + file to integrate + """ + files_handler = FilesHandler(app, session) + files_handler.handle_files([fileId, ]) + for file in files_handler.files: + file.save_preview() + + return { + 'error': False, + 'errorMessage': '' + } + + @celery.task(bind=True, name="integrate") def integrate(self, session, data, host_url): """Integrate a file into the triplestore From d94337728e5b48d2944735e63b9653ad1d118672 Mon Sep 17 00:00:00 2001 From: mboudet Date: Tue, 31 Oct 2023 16:09:15 +0000 Subject: [PATCH 2/9] fix test --- askomics/libaskomics/Database.py | 2 +- askomics/libaskomics/File.py | 2 +- askomics/libaskomics/FilesHandler.py | 16 ++++---- tests/conftest.py | 2 +- tests/test_api_file.py | 55 +++++++++++++++++++--------- 5 files changed, 49 insertions(+), 28 deletions(-) diff --git a/askomics/libaskomics/Database.py b/askomics/libaskomics/Database.py index a168d075..d444cf32 100644 --- a/askomics/libaskomics/Database.py +++ b/askomics/libaskomics/Database.py @@ -432,7 +432,7 @@ def create_files_table(self): query = ''' ALTER TABLE files - ADD error text NULL + ADD preview_error text NULL DEFAULT(NULL) ''' diff --git a/askomics/libaskomics/File.py b/askomics/libaskomics/File.py index 55be8824..1ebdeffa 100644 --- a/askomics/libaskomics/File.py +++ b/askomics/libaskomics/File.py @@ -93,7 +93,7 @@ def __init__(self, app, session, file_info, host_url=None, external_endpoint=Non self.path = file_info['path'] self.type = file_info['type'] self.size = file_info['size'] - self.preview = json.loads(file_info['preview']) + self.preview = json.loads(file_info['preview']) if file_info['preview'] else None self.id = file_info['id'] self.public = False self.ntriples = 0 diff --git a/askomics/libaskomics/FilesHandler.py b/askomics/libaskomics/FilesHandler.py index 2f68b567..ce273dc1 100644 --- a/askomics/libaskomics/FilesHandler.py +++ b/askomics/libaskomics/FilesHandler.py @@ -145,7 +145,7 @@ def get_all_files_infos(self): database = Database(self.app, self.session) query = ''' - SELECT files.id, files.name, files.type, files.size, files.date, files.status, users.username, files.preview + SELECT files.id, files.name, files.type, files.size, files.date, files.status, users.username FROM files INNER JOIN users ON files.user_id=users.user_id ''' @@ -161,8 +161,7 @@ def get_all_files_infos(self): 'size': row[3], 'date': row[4], 'status': row[5], - 'user': row[6], - 'preview': row[7] + 'user': row[6] } files.append(file) @@ -241,6 +240,8 @@ def store_file_info_in_db(self, name, filetype, file_name, size, status="availab ?, ?, ?, + ?, + ?, ? ) ''' @@ -264,10 +265,10 @@ def store_file_info_in_db(self, name, filetype, file_name, size, status="availab self.date = int(time.time()) - id = database.execute_sql_query(query, (self.session['user']['id'], name, filetype, file_path, size, self.date, status, task_id), get_id=True) + id = database.execute_sql_query(query, (self.session['user']['id'], name, filetype, file_path, size, self.date, status, task_id, None, None), get_id=True) if not skip_preview: - self.app.celery.send_task('save_preview', (self.session, id)) + self.app.celery.send_task('save_preview', ({"user": self.session["user"]}, id)) return id def update_file_info(self, file_id, size=None, status="", task_id=""): @@ -333,6 +334,7 @@ def persist_chunk(self, chunk_info, skip_preview=False): str local filename """ + try: # 1 chunk file if chunk_info["first"] and chunk_info["last"]: @@ -340,7 +342,7 @@ def persist_chunk(self, chunk_info, skip_preview=False): file_name = self.get_file_name() self.write_data_into_file(chunk_info["chunk"], file_name, "w") # store file info in db - self.store_file_info_in_db(chunk_info["name"], chunk_info["type"], file_name, chunk_info["size"], skip_preview) + self.store_file_info_in_db(chunk_info["name"], chunk_info["type"], file_name, chunk_info["size"], skip_preview=skip_preview) # first chunk of large file elif chunk_info["first"]: file_name = self.get_file_name() @@ -349,7 +351,7 @@ def persist_chunk(self, chunk_info, skip_preview=False): elif chunk_info["last"]: file_name = chunk_info["path"] self.write_data_into_file(chunk_info["chunk"], file_name, "a") - self.store_file_info_in_db(chunk_info["name"], chunk_info["type"], file_name, chunk_info["size"], skip_preview) + self.store_file_info_in_db(chunk_info["name"], chunk_info["type"], file_name, chunk_info["size"], skip_preview=skip_preview) # chunk of large file else: file_name = chunk_info["path"] diff --git a/tests/conftest.py b/tests/conftest.py index 69882de1..f54c08d4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -291,7 +291,7 @@ def upload_file(self, file_path): } files = FilesHandler(self.app, self.session) - filepath = files.persist_chunk(file_data) + filepath = files.persist_chunk(file_data, skip_preview=True) filedate = files.date return { diff --git a/tests/test_api_file.py b/tests/test_api_file.py index a3a0f708..46c3f640 100644 --- a/tests/test_api_file.py +++ b/tests/test_api_file.py @@ -27,6 +27,7 @@ def test_get_files(self, client): 'name': 'transcripts.tsv', 'size': 2264, 'type': 'csv/tsv', + 'preview': None, 'status': 'available' }, { 'date': info["de"]["upload"]["file_date"], @@ -34,28 +35,32 @@ def test_get_files(self, client): 'name': 'de.tsv', 'size': 819, 'type': 'csv/tsv', - 'status': 'available' + 'status': 'available', + 'preview': None }, { 'date': info["qtl"]["upload"]["file_date"], 'id': 3, 'name': 'qtl.tsv', 'size': 99, 'type': 'csv/tsv', - 'status': 'available' + 'status': 'available', + 'preview': None }, { 'date': info["gene"]["upload"]["file_date"], 'id': 4, 'name': 'gene.gff3', 'size': 2555, 'type': 'gff/gff3', - 'status': 'available' + 'status': 'available', + 'preview': None }, { 'date': info["bed"]["upload"]["file_date"], 'id': 5, 'name': 'gene.bed', 'size': 689, 'type': 'bed', - 'status': 'available' + 'status': 'available', + 'preview': None }] } @@ -80,7 +85,8 @@ def test_get_files(self, client): 'name': 'transcripts.tsv', 'size': 2264, 'type': 'csv/tsv', - 'status': 'available' + 'status': 'available', + 'preview': None }] } @@ -112,7 +118,8 @@ def test_get_files_upload(self, client): 'name': 'gene.tsv', 'size': 369, 'type': 'csv/tsv', - 'status': 'available' + 'status': 'available', + 'preview': None }] } @@ -136,35 +143,40 @@ def test_edit_file(self, client): 'name': 'new name.tsv', 'size': 2264, 'type': 'csv/tsv', - 'status': 'available' + 'status': 'available', + 'preview': None }, { 'date': info["de"]["upload"]["file_date"], 'id': 2, 'name': 'de.tsv', 'size': 819, 'type': 'csv/tsv', - 'status': 'available' + 'status': 'available', + 'preview': None }, { 'date': info["qtl"]["upload"]["file_date"], 'id': 3, 'name': 'qtl.tsv', 'size': 99, 'type': 'csv/tsv', - 'status': 'available' + 'status': 'available', + 'preview': None }, { 'date': info["gene"]["upload"]["file_date"], 'id': 4, 'name': 'gene.gff3', 'size': 2555, 'type': 'gff/gff3', - 'status': 'available' + 'status': 'available', + 'preview': None }, { 'date': info["bed"]["upload"]["file_date"], 'id': 5, 'name': 'gene.bed', 'size': 689, 'type': 'bed', - 'status': 'available' + 'status': 'available', + 'preview': None }] } @@ -461,28 +473,32 @@ def test_delete_files(self, client): 'name': 'de.tsv', 'size': 819, 'type': 'csv/tsv', - 'status': 'available' + 'status': 'available', + 'preview': None }, { 'date': info["qtl"]["upload"]["file_date"], 'id': 3, 'name': 'qtl.tsv', 'size': 99, 'type': 'csv/tsv', - 'status': 'available' + 'status': 'available', + 'preview': None }, { 'date': info["gene"]["upload"]["file_date"], 'id': 4, 'name': 'gene.gff3', 'size': 2555, 'type': 'gff/gff3', - 'status': 'available' + 'status': 'available', + 'preview': None }, { 'date': info["bed"]["upload"]["file_date"], 'id': 5, 'name': 'gene.bed', 'size': 689, 'type': 'bed', - 'status': 'available' + 'status': 'available', + 'preview': None }] } @@ -497,21 +513,24 @@ def test_delete_files(self, client): 'name': 'qtl.tsv', 'size': 99, 'type': 'csv/tsv', - 'status': 'available' + 'status': 'available', + 'preview': None }, { 'date': info["gene"]["upload"]["file_date"], 'id': 4, 'name': 'gene.gff3', 'size': 2555, 'type': 'gff/gff3', - 'status': 'available' + 'status': 'available', + 'preview': None }, { 'date': info["bed"]["upload"]["file_date"], 'id': 5, 'name': 'gene.bed', 'size': 689, 'type': 'bed', - 'status': 'available' + 'status': 'available', + 'preview': None }] } From f3292bdeb99f43965de3fea3f6da1fcb6092a31f Mon Sep 17 00:00:00 2001 From: mboudet Date: Tue, 31 Oct 2023 17:16:30 +0100 Subject: [PATCH 3/9] Error management --- askomics/libaskomics/File.py | 2 +- askomics/libaskomics/FilesHandler.py | 11 ++-- .../react/src/routes/upload/filestable.jsx | 3 + tests/test_api_file.py | 57 ++++++++++++------- 4 files changed, 48 insertions(+), 25 deletions(-) diff --git a/askomics/libaskomics/File.py b/askomics/libaskomics/File.py index 1ebdeffa..fd1a6856 100644 --- a/askomics/libaskomics/File.py +++ b/askomics/libaskomics/File.py @@ -548,7 +548,7 @@ def save_preview_in_db(self, preview, error): query = ''' UPDATE files SET preview = ?, - error = ?, + preview_error = ?, status = ? WHERE id= ? ''' diff --git a/askomics/libaskomics/FilesHandler.py b/askomics/libaskomics/FilesHandler.py index ce273dc1..ed900772 100644 --- a/askomics/libaskomics/FilesHandler.py +++ b/askomics/libaskomics/FilesHandler.py @@ -90,7 +90,7 @@ def get_files_infos(self, files_id=None, files_path=None, return_path=False): subquery_str = '(' + ' OR '.join(['id = ?'] * len(files_id)) + ')' query = ''' - SELECT id, name, type, size, path, date, status, preview + SELECT id, name, type, size, path, date, status, preview, preview_error FROM files WHERE user_id = ? AND {} @@ -102,7 +102,7 @@ def get_files_infos(self, files_id=None, files_path=None, return_path=False): subquery_str = '(' + ' OR '.join(['path = ?'] * len(files_path)) + ')' query = ''' - SELECT id, name, type, size, path, date, status, preview + SELECT id, name, type, size, path, date, status, preview, preview_error FROM files WHERE user_id = ? AND {} @@ -113,7 +113,7 @@ def get_files_infos(self, files_id=None, files_path=None, return_path=False): else: query = ''' - SELECT id, name, type, size, path, date, status, preview + SELECT id, name, type, size, path, date, status, preview, preview_error FROM files WHERE user_id = ? ''' @@ -129,7 +129,8 @@ def get_files_infos(self, files_id=None, files_path=None, return_path=False): 'size': row[3], 'date': row[5], 'status': row[6], - 'preview': row[7] + 'preview': row[7], + 'preview_error': row[8] } if return_path: file['path'] = row[4] @@ -241,7 +242,7 @@ def store_file_info_in_db(self, name, filetype, file_name, size, status="availab ?, ?, ?, - ?, + ?, ? ) ''' diff --git a/askomics/react/src/routes/upload/filestable.jsx b/askomics/react/src/routes/upload/filestable.jsx index 2864fcb9..4a17b94f 100644 --- a/askomics/react/src/routes/upload/filestable.jsx +++ b/askomics/react/src/routes/upload/filestable.jsx @@ -99,6 +99,9 @@ export default class FilesTable extends Component { if (cell == 'available') { return Available } + if (cell == 'processing') { + return Processing + } return Error }, sort: true, diff --git a/tests/test_api_file.py b/tests/test_api_file.py index 46c3f640..17b2eb7c 100644 --- a/tests/test_api_file.py +++ b/tests/test_api_file.py @@ -27,8 +27,9 @@ def test_get_files(self, client): 'name': 'transcripts.tsv', 'size': 2264, 'type': 'csv/tsv', + 'status': 'available', 'preview': None, - 'status': 'available' + 'preview_error': None }, { 'date': info["de"]["upload"]["file_date"], 'id': 2, @@ -36,7 +37,8 @@ def test_get_files(self, client): 'size': 819, 'type': 'csv/tsv', 'status': 'available', - 'preview': None + 'preview': None, + 'preview_error': None, }, { 'date': info["qtl"]["upload"]["file_date"], 'id': 3, @@ -44,7 +46,8 @@ def test_get_files(self, client): 'size': 99, 'type': 'csv/tsv', 'status': 'available', - 'preview': None + 'preview': None, + 'preview_error': None }, { 'date': info["gene"]["upload"]["file_date"], 'id': 4, @@ -52,7 +55,8 @@ def test_get_files(self, client): 'size': 2555, 'type': 'gff/gff3', 'status': 'available', - 'preview': None + 'preview': None, + 'preview_error': None }, { 'date': info["bed"]["upload"]["file_date"], 'id': 5, @@ -60,7 +64,8 @@ def test_get_files(self, client): 'size': 689, 'type': 'bed', 'status': 'available', - 'preview': None + 'preview': None, + 'preview_error': None }] } @@ -86,7 +91,8 @@ def test_get_files(self, client): 'size': 2264, 'type': 'csv/tsv', 'status': 'available', - 'preview': None + 'preview': None, + 'preview_error': None }] } @@ -119,7 +125,8 @@ def test_get_files_upload(self, client): 'size': 369, 'type': 'csv/tsv', 'status': 'available', - 'preview': None + 'preview': None, + 'preview_error': None }] } @@ -144,7 +151,8 @@ def test_edit_file(self, client): 'size': 2264, 'type': 'csv/tsv', 'status': 'available', - 'preview': None + 'preview': None, + 'preview_error': None }, { 'date': info["de"]["upload"]["file_date"], 'id': 2, @@ -152,7 +160,8 @@ def test_edit_file(self, client): 'size': 819, 'type': 'csv/tsv', 'status': 'available', - 'preview': None + 'preview': None, + 'preview_error': None }, { 'date': info["qtl"]["upload"]["file_date"], 'id': 3, @@ -160,7 +169,8 @@ def test_edit_file(self, client): 'size': 99, 'type': 'csv/tsv', 'status': 'available', - 'preview': None + 'preview': None, + 'preview_error': None }, { 'date': info["gene"]["upload"]["file_date"], 'id': 4, @@ -168,7 +178,8 @@ def test_edit_file(self, client): 'size': 2555, 'type': 'gff/gff3', 'status': 'available', - 'preview': None + 'preview': None, + 'preview_error': None }, { 'date': info["bed"]["upload"]["file_date"], 'id': 5, @@ -176,7 +187,8 @@ def test_edit_file(self, client): 'size': 689, 'type': 'bed', 'status': 'available', - 'preview': None + 'preview': None, + 'preview_error': None }] } @@ -474,7 +486,8 @@ def test_delete_files(self, client): 'size': 819, 'type': 'csv/tsv', 'status': 'available', - 'preview': None + 'preview': None, + 'preview_error': None }, { 'date': info["qtl"]["upload"]["file_date"], 'id': 3, @@ -482,7 +495,8 @@ def test_delete_files(self, client): 'size': 99, 'type': 'csv/tsv', 'status': 'available', - 'preview': None + 'preview': None, + 'preview_error': None }, { 'date': info["gene"]["upload"]["file_date"], 'id': 4, @@ -490,7 +504,8 @@ def test_delete_files(self, client): 'size': 2555, 'type': 'gff/gff3', 'status': 'available', - 'preview': None + 'preview': None, + 'preview_error': None }, { 'date': info["bed"]["upload"]["file_date"], 'id': 5, @@ -498,7 +513,8 @@ def test_delete_files(self, client): 'size': 689, 'type': 'bed', 'status': 'available', - 'preview': None + 'preview': None, + 'preview_error': None }] } @@ -514,7 +530,8 @@ def test_delete_files(self, client): 'size': 99, 'type': 'csv/tsv', 'status': 'available', - 'preview': None + 'preview': None, + 'preview_error': None }, { 'date': info["gene"]["upload"]["file_date"], 'id': 4, @@ -522,7 +539,8 @@ def test_delete_files(self, client): 'size': 2555, 'type': 'gff/gff3', 'status': 'available', - 'preview': None + 'preview': None, + 'preview_error': None }, { 'date': info["bed"]["upload"]["file_date"], 'id': 5, @@ -530,7 +548,8 @@ def test_delete_files(self, client): 'size': 689, 'type': 'bed', 'status': 'available', - 'preview': None + 'preview': None, + 'preview_error': None }] } From e6937fe9ae49d0efb33d38b5c52d0c202e9a149e Mon Sep 17 00:00:00 2001 From: mboudet Date: Tue, 31 Oct 2023 17:31:18 +0100 Subject: [PATCH 4/9] More error management --- askomics/api/file.py | 16 ++++++++++++++-- askomics/libaskomics/FilesHandler.py | 20 +++++++++++++------- 2 files changed, 27 insertions(+), 9 deletions(-) diff --git a/askomics/api/file.py b/askomics/api/file.py index 63220ea3..1c030fac 100644 --- a/askomics/api/file.py +++ b/askomics/api/file.py @@ -245,6 +245,9 @@ def get_preview(): results = [] for file in files_handler.files: + if file.status == "error": + continue + file.set_preview() res = file.get_preview() results.append(res) @@ -256,10 +259,16 @@ def get_preview(): 'errorMessage': str(e) }), 500 + errorMessage = '' + error = False + if not results: + errorMessage = "None of the selected files are in an integrable state" + error = True + return jsonify({ 'previewFiles': results, - 'error': False, - 'errorMessage': '' + 'error': error, + 'errorMessage': errorMessage }) @@ -334,6 +343,9 @@ def integrate(): for file in files_handler.files: + if file.status == "error": + continue + data["externalEndpoint"] = data["externalEndpoint"] if (data.get("externalEndpoint") and isinstance(file, RdfFile)) else None data["externalGraph"] = data["externalGraph"] if (data.get("externalGraph") and isinstance(file, RdfFile)) else None data["customUri"] = data["customUri"] if (data.get("customUri") and not isinstance(file, RdfFile)) else None diff --git a/askomics/libaskomics/FilesHandler.py b/askomics/libaskomics/FilesHandler.py index ed900772..3da78938 100644 --- a/askomics/libaskomics/FilesHandler.py +++ b/askomics/libaskomics/FilesHandler.py @@ -272,7 +272,7 @@ def store_file_info_in_db(self, name, filetype, file_name, size, status="availab self.app.celery.send_task('save_preview', ({"user": self.session["user"]}, id)) return id - def update_file_info(self, file_id, size=None, status="", task_id=""): + def update_file_info(self, file_id, size=None, status="", task_id="", error=None): """Update file size and status Parameters @@ -296,20 +296,25 @@ def update_file_info(self, file_id, size=None, status="", task_id=""): size_query = "" status_query = "" task_query = "" + error_query = "" # Should be a cleaner way of doing this... if size is not None: - size_query = "size=?," if (status or task_id) else "size=?" + size_query = "size=?," if (status or task_id or error) else "size=?" query_vars.append(size) if status: - status_query = "status=?," if task_id else "status=?" + status_query = "status=?," if (task_id or error) else "status=?" query_vars.append(status) if task_id: - task_query = "task_id=?" + task_query = "task_id=?," if error else "task_id=?" query_vars.append(task_id) + if error: + error_query = "preview_error=?" + query_vars.append(error) + query_vars.append(file_id) query = ''' @@ -317,8 +322,9 @@ def update_file_info(self, file_id, size=None, status="", task_id=""): {} {} {} + {} WHERE id=? - '''.format(size_query, status_query, task_query) + '''.format(size_query, status_query, task_query, error_query) database.execute_sql_query(query, tuple(query_vars)) @@ -407,8 +413,8 @@ def download_url(self, url, task_id): # Update final value self.update_file_info(file_id, size=os.path.getsize(path), status="available") - except Exception: - self.update_file_info(file_id, size=os.path.getsize(path), status="error") + except Exception as e: + self.update_file_info(file_id, size=os.path.getsize(path), status="error", error=str(e)) def get_type(self, file_ext): """Get files type, based on extension From 47264ebc90ee386130a712275d2e6f59e3209a73 Mon Sep 17 00:00:00 2001 From: mboudet Date: Tue, 31 Oct 2023 18:33:29 +0100 Subject: [PATCH 5/9] tests --- tests/test_api_file.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_api_file.py b/tests/test_api_file.py index 17b2eb7c..8afbcad5 100644 --- a/tests/test_api_file.py +++ b/tests/test_api_file.py @@ -415,8 +415,8 @@ def test_get_preview(self, client): response = client.client.post('/api/files/preview', json=fake_data) assert response.status_code == 200 assert response.json == { - 'error': False, - 'errorMessage': '', + 'error': True, + 'errorMessage': 'None of the selected files are in an integrable state', 'previewFiles': [] } From 3c7a37212be4ae82056b75dc66cf7cad2e4ef5fc Mon Sep 17 00:00:00 2001 From: mboudet Date: Tue, 31 Oct 2023 18:51:09 +0100 Subject: [PATCH 6/9] Status --- askomics/libaskomics/File.py | 1 + 1 file changed, 1 insertion(+) diff --git a/askomics/libaskomics/File.py b/askomics/libaskomics/File.py index fd1a6856..47715b78 100644 --- a/askomics/libaskomics/File.py +++ b/askomics/libaskomics/File.py @@ -93,6 +93,7 @@ def __init__(self, app, session, file_info, host_url=None, external_endpoint=Non self.path = file_info['path'] self.type = file_info['type'] self.size = file_info['size'] + self.status = file_info['status'] self.preview = json.loads(file_info['preview']) if file_info['preview'] else None self.id = file_info['id'] self.public = False From 448b36c77802cf4d57c8fa69c1bed14e88aaa580 Mon Sep 17 00:00:00 2001 From: mboudet Date: Thu, 2 Nov 2023 14:49:05 +0000 Subject: [PATCH 7/9] Fix ui and some bugs --- CHANGELOG.md | 2 + askomics/api/file.py | 4 +- askomics/libaskomics/GffFile.py | 2 +- askomics/libaskomics/RdfFile.py | 4 ++ askomics/react/src/routes/error/error.jsx | 10 +++- .../react/src/routes/upload/filestable.jsx | 44 ++++++++++++++- askomics/react/src/routes/upload/upload.jsx | 55 ++++++++++++------- askomics/tasks.py | 2 +- config/askomics.ini.template | 3 + 9 files changed, 98 insertions(+), 28 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9ee747a5..4c0ed8c0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,9 +14,11 @@ This changelog was started for release 4.2.0. - "askomics:instancesLabel" predicate can be defined at the entity-level, to set a specific attribute URI as the 'label' (ie, visible by default) - Play the same role as 'askomics:instancesHaveNoLabels', except a specific attribute is visible instead of the URI attribute. - Added the *TIMEOUT* env variable, which will set the web workers tiemout value. Default 300s +- Added the 'skip_rdf_preview' config option. This will skip loading RDF files in memory to get the location and remote graph. Warning: This means you must enter the values at the integration step yourself. ### Changed +- Now pre-process files to store integration parameters (column, entities, etc...) in DB, to avoid re-processing every time. - Rewrote the ontology part. Instead of specifying 'children of' and other values, users can tick the 'recursive' button to customize the query. While this is less intuitive, this change is more flexible for the various types of ontological relations ## [4.5.0] - 2023-10-20 diff --git a/askomics/api/file.py b/askomics/api/file.py index 1c030fac..5ae7567c 100644 --- a/askomics/api/file.py +++ b/askomics/api/file.py @@ -261,15 +261,17 @@ def get_preview(): errorMessage = '' error = False + errorCode = 200 if not results: errorMessage = "None of the selected files are in an integrable state" error = True + errorCode = 400 return jsonify({ 'previewFiles': results, 'error': error, 'errorMessage': errorMessage - }) + }), errorCode @file_bp.route('/api/files/delete', methods=['POST']) diff --git a/askomics/libaskomics/GffFile.py b/askomics/libaskomics/GffFile.py index d537328a..2ce64dfe 100644 --- a/askomics/libaskomics/GffFile.py +++ b/askomics/libaskomics/GffFile.py @@ -81,7 +81,7 @@ def save_preview(self): if self.error: error = self.error_message else: - data = {'entities': self.entity_name} + data = {'entities': self.entities} self.save_preview_in_db(data, error) def get_preview(self): diff --git a/askomics/libaskomics/RdfFile.py b/askomics/libaskomics/RdfFile.py index a4e0360a..da0efd35 100644 --- a/askomics/libaskomics/RdfFile.py +++ b/askomics/libaskomics/RdfFile.py @@ -59,6 +59,10 @@ def get_location_and_remote_graph(self): str Location """ + + if self.settings.get('skip_rdf_preview', False): + return "", "" + graph = RdfGraph(self.app, self.session) graph.parse(self.path, format=self.type_dict[self.type]) triple_loc = (None, self.prov.atLocation, None) diff --git a/askomics/react/src/routes/error/error.jsx b/askomics/react/src/routes/error/error.jsx index 16af818e..68e0044c 100644 --- a/askomics/react/src/routes/error/error.jsx +++ b/askomics/react/src/routes/error/error.jsx @@ -39,7 +39,7 @@ export default class ErrorDiv extends Component { } let error - + if (Array.isArray(this.props.errorMessage)) { error = ( @@ -48,12 +48,18 @@ export default class ErrorDiv extends Component { ))} ) - } else { + } else if (! this.props.errorMessage){ error = (
{messages[this.props.status.toString()]}
) + } else { + error = ( + +
{this.props.errorMessage}
+
+ ) } return ( diff --git a/askomics/react/src/routes/upload/filestable.jsx b/askomics/react/src/routes/upload/filestable.jsx index 4a17b94f..869c8f9c 100644 --- a/askomics/react/src/routes/upload/filestable.jsx +++ b/askomics/react/src/routes/upload/filestable.jsx @@ -3,17 +3,26 @@ import axios from 'axios' import BootstrapTable from 'react-bootstrap-table-next' import paginationFactory from 'react-bootstrap-table2-paginator' import cellEditFactory from 'react-bootstrap-table2-editor' -import {Badge} from 'reactstrap' +import {Badge, Modal, ModalHeader, ModalBody, ModalFooter, Button} from 'reactstrap' import WaitingDiv from '../../components/waiting' import Utils from '../../classes/utils' +import SyntaxHighlighter from 'react-syntax-highlighter' +import { monokai } from 'react-syntax-highlighter/dist/esm/styles/hljs' import PropTypes from 'prop-types' export default class FilesTable extends Component { constructor (props) { super(props) + this.state = { + modalTracebackTitle: "", + modalTracebackContent: "", + modalTraceback: false + } this.utils = new Utils() this.handleSelection = this.handleSelection.bind(this) this.handleSelectionAll = this.handleSelectionAll.bind(this) + this.handleClickError = this.handleClickError.bind(this) + this.toggleModalTraceback = this.toggleModalTraceback.bind(this) } handleSelection (row, isSelect) { @@ -41,6 +50,24 @@ export default class FilesTable extends Component { } } + handleClickError(event) { + this.props.files.forEach(file => { + if (file.id == event.target.id) { + this.setState({ + modalTracebackTitle: "File processing error", + modalTracebackContent: file.preview_error ? file.preview_error : "Internal server error", + modalTraceback: true + }) + } + }) + } + + toggleModalTraceback () { + this.setState({ + modalTraceback: !this.state.modalTraceback + }) + } + editFileName (oldValue, newValue, row) { if (newValue === oldValue) {return} @@ -102,7 +129,7 @@ export default class FilesTable extends Component { if (cell == 'processing') { return Processing } - return Error + return Error }, sort: true, editable: false @@ -147,6 +174,19 @@ export default class FilesTable extends Component { })} /> + + {this.state.modalTracebackTitle.substring(0, 100)} + +
+ + {this.state.modalTracebackContent} + +
+
+ + + +
) } diff --git a/askomics/react/src/routes/upload/upload.jsx b/askomics/react/src/routes/upload/upload.jsx index 907d4d5f..a8c0cf60 100644 --- a/askomics/react/src/routes/upload/upload.jsx +++ b/askomics/react/src/routes/upload/upload.jsx @@ -28,26 +28,10 @@ export default class Upload extends Component { componentDidMount () { if (!this.props.waitForStart) { - let requestUrl = '/api/files' - axios.get(requestUrl, { baseURL: this.props.config.proxyPath, cancelToken: new axios.CancelToken((c) => { this.cancelRequest = c }) }) - .then(response => { - console.log(requestUrl, response.data) - this.setState({ - diskSpace: response.data.diskSpace, - exceededQuota: this.props.config.user.quota > 0 && response.data.diskSpace >= this.props.config.user.quota, - files: response.data.files, - waiting: false - }) - }) - .catch(error => { - console.log(error, error.response.data.errorMessage) - this.setState({ - error: true, - errorMessage: error.response.data.errorMessage, - status: error.response.status, - waiting: false - }) - }) + this.getFiles() + this.interval = setInterval(() => { + this.getFiles() + }, 5000) } } @@ -57,6 +41,31 @@ export default class Upload extends Component { } } + + getFiles() { + let requestUrl = '/api/files' + axios.get(requestUrl, { baseURL: this.props.config.proxyPath, cancelToken: new axios.CancelToken((c) => { this.cancelRequest = c }) }) + .then(response => { + console.log(requestUrl, response.data) + this.setState({ + diskSpace: response.data.diskSpace, + exceededQuota: this.props.config.user.quota > 0 && response.data.diskSpace >= this.props.config.user.quota, + files: response.data.files, + waiting: false + }) + }) + .catch(error => { + console.log(error, error.response.data.errorMessage) + this.setState({ + error: true, + errorMessage: error.response.data.errorMessage, + status: error.response.status, + waiting: false + }) + }) + } + + deleteSelectedFiles () { let requestUrl = '/api/files/delete' let data = { @@ -94,6 +103,10 @@ export default class Upload extends Component { return this.state.selected.length == 0 } + isDisabledIntegrate () { + return this.state.selected.length == 0 || this.state.files.some(file => this.state.selected.includes(file.id) && file.status == "error") + } + render () { let redirectLogin if (this.state.status == 401) { @@ -138,7 +151,7 @@ export default class Upload extends Component {
- + diff --git a/askomics/tasks.py b/askomics/tasks.py index 5ca2dabc..53d65426 100644 --- a/askomics/tasks.py +++ b/askomics/tasks.py @@ -26,7 +26,7 @@ celery = create_celery(app) -@celery.task(bind=True, name="set_preview") +@celery.task(bind=True, name="save_preview") def save_preview(self, session, fileId): """Compute the file preview in backend and store it in DB diff --git a/config/askomics.ini.template b/config/askomics.ini.template index b65f068b..a065e85c 100644 --- a/config/askomics.ini.template +++ b/config/askomics.ini.template @@ -91,6 +91,9 @@ autocomplete_max_results = 10 anonymous_query = false anonymous_query_cleanup = 60 +skip_rdf_preview = false + + [triplestore] # name of the triplestore, can be virtuoso or fuseki triplestore = virtuoso From 700b0bfa3513e843e2d0dc438293886996e46046 Mon Sep 17 00:00:00 2001 From: mboudet Date: Thu, 2 Nov 2023 15:29:43 +0000 Subject: [PATCH 8/9] fix tests --- askomics/libaskomics/RdfFile.py | 2 +- askomics/react/src/routes/upload/upload.jsx | 16 ++++++++++++++-- .../react/src/routes/upload/uploadform.jsx | 18 +----------------- .../react/src/routes/upload/uploadmodal.jsx | 7 ++++--- .../react/src/routes/upload/uploadurlform.jsx | 15 ++------------- tests/test_api_file.py | 2 +- 6 files changed, 23 insertions(+), 37 deletions(-) diff --git a/askomics/libaskomics/RdfFile.py b/askomics/libaskomics/RdfFile.py index da0efd35..f96ba19a 100644 --- a/askomics/libaskomics/RdfFile.py +++ b/askomics/libaskomics/RdfFile.py @@ -60,7 +60,7 @@ def get_location_and_remote_graph(self): Location """ - if self.settings.get('skip_rdf_preview', False): + if self.settings.get('askomics', 'skip_rdf_preview', fallback=False): return "", "" graph = RdfGraph(self.app, self.session) diff --git a/askomics/react/src/routes/upload/upload.jsx b/askomics/react/src/routes/upload/upload.jsx index a8c0cf60..b56bcce5 100644 --- a/askomics/react/src/routes/upload/upload.jsx +++ b/askomics/react/src/routes/upload/upload.jsx @@ -23,15 +23,16 @@ export default class Upload extends Component { } this.deleteSelectedFiles = this.deleteSelectedFiles.bind(this) this.integrateSelectedFiles = this.integrateSelectedFiles.bind(this) + this.getFiles = this.getFiles.bind(this) this.cancelRequest } componentDidMount () { if (!this.props.waitForStart) { - this.getFiles() this.interval = setInterval(() => { this.getFiles() }, 5000) + this.getFiles() } } @@ -53,6 +54,17 @@ export default class Upload extends Component { files: response.data.files, waiting: false }) + let isProcessing = response.data.files.some(file => file.status == "processing") + console.log(isProcessing) + if (this.interval && !isProcessing){ + clearInterval(this.interval) + this.interval = "" + } + if (!this.interval && isProcessing){ + this.interval = setInterval(() => { + this.getFiles() + }, 5000) + } }) .catch(error => { console.log(error, error.response.data.errorMessage) @@ -145,7 +157,7 @@ export default class Upload extends Component {

Upload


{warningDiskSpace} - this.setState(p)} config={this.props.config} /> + this.setState(p)} config={this.props.config} getFiles={this.getFiles} />
this.setState(p)} selected={this.state.selected} waiting={this.state.waiting} config={this.props.config} />
diff --git a/askomics/react/src/routes/upload/uploadform.jsx b/askomics/react/src/routes/upload/uploadform.jsx index 5c3f9878..fceff0ea 100644 --- a/askomics/react/src/routes/upload/uploadform.jsx +++ b/askomics/react/src/routes/upload/uploadform.jsx @@ -101,23 +101,7 @@ export default class UploadForm extends Component { new_files: update(this.state.new_files, { [i]: { uploadPercentage: { $set: 100 } } }) }) // load file component - let requestUrlFiles = '/api/files' - axios.get(requestUrlFiles, { baseURL: this.props.config.proxyPath, cancelToken: new axios.CancelToken((c) => { this.cancelRequest = c }) }) - .then(response => { - console.log(requestUrlFiles, response.data) - this.props.setStateUpload({ - files: response.data.files - }) - }) - .catch(error => { - console.log(error, error.response.data.errorMessage) - this.setState({ - new_files: update(this.state.new_files, { [i]: { error: { $set: true } } }, { [i]: { errorMessage: { $set: error.response.data.errorMessage } } }), - error: true, - errorMessage: error.response.data.errorMessage, - status: error.response.status - }) - }) + this.props.getFiles() } }).catch(error => { console.log(error, error.response.data.errorMessage) diff --git a/askomics/react/src/routes/upload/uploadmodal.jsx b/askomics/react/src/routes/upload/uploadmodal.jsx index 1e63b458..7d4b3bbd 100644 --- a/askomics/react/src/routes/upload/uploadmodal.jsx +++ b/askomics/react/src/routes/upload/uploadmodal.jsx @@ -67,7 +67,7 @@ export default class UploadModal extends Component { Upload files - + @@ -77,7 +77,7 @@ export default class UploadModal extends Component { Upload files by URL - + @@ -91,6 +91,7 @@ export default class UploadModal extends Component { UploadModal.propTypes = { setStateUpload: PropTypes.func, + getFiles: PropTypes.func, config: PropTypes.object, disabled: PropTypes.bool -} \ No newline at end of file +} diff --git a/askomics/react/src/routes/upload/uploadurlform.jsx b/askomics/react/src/routes/upload/uploadurlform.jsx index ce631744..df738b55 100644 --- a/askomics/react/src/routes/upload/uploadurlform.jsx +++ b/askomics/react/src/routes/upload/uploadurlform.jsx @@ -60,19 +60,7 @@ export default class UploadUrlForm extends Component { progressDisplay: "100 %", progressColor: "success" }) - - // load file component - let requestUrlFiles = '/api/files' - axios.get(requestUrlFiles, { baseURL: this.props.config.proxyPath, cancelToken: new axios.CancelToken((c) => { this.cancelRequest = c }) }) - .then(response => { - console.log(requestUrlFiles, response.data) - this.props.setStateUpload({ - files: response.data.files - }) - }) - .catch(error => { - console.log(error, error.response.data.errorMessage) - }) + this.props.getFiles() }) .catch(error => { console.log(error, error.response.data.errorMessage) @@ -106,5 +94,6 @@ export default class UploadUrlForm extends Component { UploadUrlForm.propTypes = { setStateUpload: PropTypes.func, + getFiles: PropTypes.func, config: PropTypes.object } diff --git a/tests/test_api_file.py b/tests/test_api_file.py index 8afbcad5..79e86c00 100644 --- a/tests/test_api_file.py +++ b/tests/test_api_file.py @@ -413,7 +413,7 @@ def test_get_preview(self, client): csv_malformed = json.loads(file.read()) response = client.client.post('/api/files/preview', json=fake_data) - assert response.status_code == 200 + assert response.status_code == 400 assert response.json == { 'error': True, 'errorMessage': 'None of the selected files are in an integrable state', From c985968342a659a5b3617b4280c554967723b9d0 Mon Sep 17 00:00:00 2001 From: mboudet Date: Thu, 2 Nov 2023 19:08:07 +0100 Subject: [PATCH 9/9] Fix test --- askomics/react/src/routes/upload/uploadform.jsx | 1 + 1 file changed, 1 insertion(+) diff --git a/askomics/react/src/routes/upload/uploadform.jsx b/askomics/react/src/routes/upload/uploadform.jsx index fceff0ea..70216e4b 100644 --- a/askomics/react/src/routes/upload/uploadform.jsx +++ b/askomics/react/src/routes/upload/uploadform.jsx @@ -152,5 +152,6 @@ export default class UploadForm extends Component { UploadForm.propTypes = { setStateUpload: PropTypes.func, + getFiles: PropTypes.func, config: PropTypes.object }