From 214ced3dee564e30c1d3eefa77d9ddc12caa6d63 Mon Sep 17 00:00:00 2001 From: ZhuoweiWen Date: Wed, 11 Oct 2023 20:20:58 -0400 Subject: [PATCH] Bug fixes and makes export filing and editing tiles into async function, defined a naming scheme for exported report Signed-off-by: ZhuoweiWen --- .../celery_controller/celery_tasks.py | 61 +++++++++-- .../database_controller/kml_ops.py | 20 ++-- .../controllers/database_controller/vt_ops.py | 3 +- back-end/routes.py | 10 +- back-end/utils/namingschemes.py | 2 + front-end/components/MyEdit.js | 102 +++++++++--------- front-end/components/MyFile.js | 1 - 7 files changed, 123 insertions(+), 76 deletions(-) create mode 100644 back-end/utils/namingschemes.py diff --git a/back-end/controllers/celery_controller/celery_tasks.py b/back-end/controllers/celery_controller/celery_tasks.py index 6543132..88130e2 100755 --- a/back-end/controllers/celery_controller/celery_tasks.py +++ b/back-end/controllers/celery_controller/celery_tasks.py @@ -1,13 +1,14 @@ import logging, subprocess, os, json, uuid, cProfile from controllers.celery_controller.celery_config import celery -from controllers.database_controller import fabric_ops, kml_ops, mbtiles_ops, file_ops, folder_ops, vt_ops +from controllers.database_controller import user_ops, fabric_ops, kml_ops, mbtiles_ops, file_ops, folder_ops, vt_ops from database.models import file, kml_data from database.sessions import Session from flask import jsonify +from datetime import datetime +from utils.namingschemes import DATETIME_FORMAT, EXPORT_CSV_NAME_TEMPLATE @celery.task(bind=True, autoretry_for=(Exception,), retry_backoff=True) def process_data(self, file_names, file_data_list, userid, folderid): - from controllers.database_controller import vt_ops print(file_names) try: geojson_array = [] @@ -134,7 +135,6 @@ def process_data(self, file_names, file_data_list, userid, folderid): @celery.task(bind=True, autoretry_for=(Exception,), retry_backoff=True) def run_tippecanoe(self, command, folderid, mbtilepath): - from controllers.database_controller import vt_ops result = subprocess.run(command, shell=True, check=True, stderr=subprocess.PIPE) if result.stderr: @@ -145,7 +145,6 @@ def run_tippecanoe(self, command, folderid, mbtilepath): @celery.task(bind=True, autoretry_for=(Exception,), retry_backoff=True) def run_tippecanoe_tiles_join(self, command1, command2, folderid, mbtilepaths): - from controllers.database_controller import vt_ops # run first command result1 = subprocess.run(command1, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) @@ -201,16 +200,17 @@ def deleteFiles(self, fileid, userid): session.close() @celery.task(bind=True, autoretry_for=(Exception,), retry_backoff=True) -def toggle_tiles(self, markers, userid): - - +def toggle_tiles(self, markers, userid, mbtid): message = '' status_code = 0 session = Session() try: # Get the last folder of the user - user_last_folder = folder_ops.get_folder(userid=userid, folderid=None, session=session) - geojson_data = [] + if mbtid != -1: + mbtiles_entry = mbtiles_ops.get_mbtiles_with_id(mbtid=mbtid, session=session) + user_last_folder = folder_ops.get_export_folder(userid=userid, folderid=mbtiles_entry.folder_id, session=session) + else: + user_last_folder = folder_ops.get_upload_folder(userid=userid, folderid=None, session=session) if user_last_folder: kml_set = set() @@ -239,18 +239,41 @@ def toggle_tiles(self, markers, userid): else: raise Exception('No last folder for the user') - all_kmls = file_ops.get_files_with_postfix(folderid=user_last_folder.id, postfix='.kml', session=session) + geojson_data = [] + all_kmls = file_ops.get_files_with_postfix(user_last_folder.id, '.kml', session) for kml_f in all_kmls: geojson_data.append(vt_ops.read_kml(kml_f.id, session)) + all_geojsons = file_ops.get_files_with_postfix(folderid=user_last_folder.id, postfix='.geojson', session=session) for geojson_f in all_geojsons: geojson_data.append(vt_ops.read_geojson(geojson_f.id, session)) + mbtiles_ops.delete_mbtiles(user_last_folder.id, session) vt_ops.create_tiles(geojson_data, userid, user_last_folder.id, session) + if mbtid != -1: + existing_csvs = file_ops.get_files_by_type(folderid=user_last_folder.id, filetype='export', session=session) + for csv_file in existing_csvs: + session.delete(csv_file) + + userVal = user_ops.get_user_with_id(userid=userid, session=session) + # Generate and save a new CSV + all_file_ids = [file.id for file in file_ops.get_files_with_postfix(user_last_folder.id, '.kml', session) + file_ops.get_files_with_postfix(user_last_folder.id, '.geojson', session)] + + results = session.query(kml_data).filter(kml_data.file_id.in_(all_file_ids)).all() + availability_csv = file_ops.generate_csv_data(results, userVal.provider_id, userVal.brand_name) + + csv_name = f"availability-{datetime.now().strftime('%Y-%m-%d_%H:%M:%S')}.csv" + csv_data_str = availability_csv.to_csv(index=False, encoding='utf-8') + new_csv_file = file_ops.create_file(filename=csv_name, content=csv_data_str.encode('utf-8'), folderid=user_last_folder.id, filetype='export', session=session) + session.add(new_csv_file) + + + message = 'Markers toggled successfully' status_code = 200 + except Exception as e: session.rollback() # rollback transaction on error @@ -262,3 +285,21 @@ def toggle_tiles(self, markers, userid): session.close() return (message, status_code) + + +@celery.task(bind=True, autoretry_for=(Exception,), retry_backoff=True) +def async_folder_copy_for_export(self, userid, folderid, serialized_csv): + session = Session() + userVal = user_ops.get_user_with_id(userid) + current_datetime = datetime.now().strftime(DATETIME_FORMAT) + newfolder_name = f"export-{current_datetime}" + + + csv_name = EXPORT_CSV_NAME_TEMPLATE.format(brand_name=userVal.brand_name, current_datetime=current_datetime) + + original_folder = folder_ops.get_upload_folder(userid=userid, folderid=folderid, session=session) + new_folder = original_folder.copy(name=newfolder_name,type='export', session=session) + csv_file = file_ops.create_file(filename=csv_name, content=serialized_csv.encode('utf-8'), folderid=new_folder.id, filetype='export', session=session) + session.add(csv_file) + session.commit() + session.close() \ No newline at end of file diff --git a/back-end/controllers/database_controller/kml_ops.py b/back-end/controllers/database_controller/kml_ops.py index 2c98f94..4967a24 100755 --- a/back-end/controllers/database_controller/kml_ops.py +++ b/back-end/controllers/database_controller/kml_ops.py @@ -225,6 +225,8 @@ def generate_csv_data(results, provider_id, brand_name): return availability_csv def export(userid, folderid, providerid, brandname, session): + from controllers.celery_controller.celery_tasks import async_folder_copy_for_export + all_files = get_files_with_postfix(folderid, '.kml', session) + get_files_with_postfix(folderid, '.geojson', session) all_file_ids = [file.id for file in all_files] results = session.query(kml_data).filter(kml_data.file_id.in_(all_file_ids)).all() @@ -233,19 +235,17 @@ def export(userid, folderid, providerid, brandname, session): output = io.BytesIO() availability_csv.to_csv(output, index=False, encoding='utf-8') + csv_data_str = availability_csv.to_csv(index=False, encoding='utf-8') - current_datetime = datetime.now().strftime('%Y-%m-%d_%H:%M:%S') - folder_name = f"export-{current_datetime}" - csv_name = f"availability-{current_datetime}.csv" - - original_folder = get_upload_folder(userid=userid, folderid=folderid, session=session) - new_folder = original_folder.copy(name=folder_name,type='export', session=session) + # original_folder = get_upload_folder(userid=userid, folderid=folderid, session=session) + # new_folder = original_folder.copy(name=folder_name,type='export', session=session) - csv_data_str = availability_csv.to_csv(index=False, encoding='utf-8') - csv_file = create_file(filename=csv_name, content=csv_data_str.encode('utf-8'), folderid=new_folder.id, filetype='export', session=session) + # csv_file = create_file(filename=csv_name, content=csv_data_str.encode('utf-8'), folderid=new_folder.id, filetype='export', session=session) - session.add(csv_file) - session.commit() + # session.add(csv_file) + # session.commit() + + async_folder_copy_for_export.apply_async(args=[userid, folderid, csv_data_str]) return output diff --git a/back-end/controllers/database_controller/vt_ops.py b/back-end/controllers/database_controller/vt_ops.py index e3e4648..d56f1e0 100755 --- a/back-end/controllers/database_controller/vt_ops.py +++ b/back-end/controllers/database_controller/vt_ops.py @@ -24,6 +24,7 @@ from .folder_ops import get_upload_folder, get_export_folder from .mbtiles_ops import get_latest_mbtiles, delete_mbtiles, get_mbtiles_with_id from .user_ops import get_user_with_id, get_user_with_username +from utils.namingschemes import DATETIME_FORMAT, EXPORT_CSV_NAME_TEMPLATE db_lock = Lock() @@ -325,7 +326,7 @@ def toggle_tiles(markers, userid, mbtid): results = session.query(kml_data).filter(kml_data.file_id.in_(all_file_ids)).all() availability_csv = generate_csv_data(results, userVal.provider_id, userVal.brand_name) - csv_name = f"availability-{datetime.now().strftime('%Y-%m-%d_%H:%M:%S')}.csv" + csv_name = EXPORT_CSV_NAME_TEMPLATE.format(brand_name=userVal.brand_name, current_datime=datetime.now().strftime(DATETIME_FORMAT)) csv_data_str = availability_csv.to_csv(index=False, encoding='utf-8') new_csv_file = create_file(filename=csv_name, content=csv_data_str.encode('utf-8'), folderid=user_last_folder.id, filetype='export', session=session) session.add(new_csv_file) diff --git a/back-end/routes.py b/back-end/routes.py index 07a462a..1924d94 100755 --- a/back-end/routes.py +++ b/back-end/routes.py @@ -20,6 +20,7 @@ from controllers.database_controller import fabric_ops, kml_ops, user_ops, vt_ops, file_ops, folder_ops, mbtiles_ops, challenge_ops, kmz_ops from controllers.celery_controller.celery_config import app, celery from controllers.celery_controller.celery_tasks import process_data, deleteFiles, toggle_tiles +from utils.namingschemes import DATETIME_FORMAT, EXPORT_CSV_NAME_TEMPLATE logging.basicConfig(level=logging.DEBUG) console_handler = logging.StreamHandler() @@ -278,10 +279,9 @@ def exportFiling(): csv_output = kml_ops.export(userVal.id, folderVal.id, userVal.provider_id, userVal.brand_name, session) if csv_output: - current_time = datetime.now() - formatted_time = current_time.strftime('%Y_%B') + current_time = datetime.now().strftime(DATETIME_FORMAT) - download_name = "BDC_Report_" + formatted_time + "_" + shortuuid.uuid()[:4] + '.csv' + download_name = EXPORT_CSV_NAME_TEMPLATE.format(brand_name=userVal.brand_name, current_datetime=current_time) csv_output.seek(0) return send_file(csv_output, as_attachment=True, download_name=download_name, mimetype="text/csv") @@ -355,9 +355,9 @@ def toggle_markers(): markers = request_data['marker'] mbtid = request_data['mbtid'] identity = get_jwt_identity() - response = vt_ops.toggle_tiles(markers=markers, userid=identity['id'], mbtid=mbtid) + task = toggle_tiles.apply_async(args=[markers, identity['id'], mbtid]) - return jsonify(message=response[0]), response[1] + return jsonify({'Status': "OK", 'task_id': task.id}), 200 except NoAuthorizationError: return jsonify({'error': 'Token is invalid or expired'}), 401 diff --git a/back-end/utils/namingschemes.py b/back-end/utils/namingschemes.py new file mode 100644 index 0000000..7d06002 --- /dev/null +++ b/back-end/utils/namingschemes.py @@ -0,0 +1,2 @@ +DATETIME_FORMAT = '%Y-%m-%d_%H:%M:%S' +EXPORT_CSV_NAME_TEMPLATE = "{brand_name}-BDC_Report-{current_datetime}.csv" diff --git a/front-end/components/MyEdit.js b/front-end/components/MyEdit.js index b5ff1e7..f00f322 100644 --- a/front-end/components/MyEdit.js +++ b/front-end/components/MyEdit.js @@ -85,61 +85,65 @@ const MyEdit = () => { setSelectedPoints(updatedPoints); }; - const toggleMarkers = (markers) => { - const requestBody = { - marker: markers, - mbtid: mbtid ? mbtid : -1 - }; - console.log(requestBody); - return fetch(`${backend_url}/toggle-markers`, { - method: "POST", - credentials: "include", // Include cookies in the request - headers: { - "Content-Type": "application/json", - }, - body: JSON.stringify(requestBody), - }) - .then((response) => { - if (response.status === 401) { - // Redirect the user to the login page or other unauthorized handling page - Swal.fire({ - icon: "error", - title: "Oops...", - text: "Session expired, please log in again!", - }); - router.push("/login"); - } else { - return response.json(); - } - }) - .then((data) => { - if (data) { // to make sure data is not undefined when status is 401 - console.log(data.message); - } - }) - .catch((error) => { - console.log(error); - }); - }; - - const doneWithChanges = () => { + const toggleMarkers = async () => { setIsLoading(true); - console.log(selectedPoints); - // Send request to server to change the selected markers to served - toggleMarkers(selectedPoints).finally(() => { + try { + const requestBody = { + marker: selectedPoints, + mbtid: mbtid ? mbtid : -1 + }; + console.log(requestBody); + const response = await fetch(`${backend_url}/toggle-markers`, { + method: "POST", + credentials: "include", // Include cookies in the request + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(requestBody), + }) - setIsDataReady(true); + if (response.status === 401) { + setIsLoading(false); + // Redirect the user to the login page or other unauthorized handling page + Swal.fire({ + icon: "error", + title: "Oops...", + text: "Session expired, please log in again!", + }); + router.push("/login"); + } + if (!response.ok) { + setIsLoading(false); + // If the response status is not ok (not 200) + throw new Error(`HTTP error! status: ${response.status}, ${response.statusText}`); + } + const data = await response.json(); + if (data) { + const intervalId = setInterval(() => { + console.log(data.task_id); + fetch(`${backend_url}/status/${data.task_id}`) + .then((response) => response.json()) + .then((status) => { + if (status.state !== "PENDING") { + clearInterval(intervalId); + setIsDataReady(true); + setIsLoading(false); + setTimeout(() => { + setIsDataReady(false); + router.reload(); + }, 5000); + } + }); + }, 5000); + } + } catch (error) { + console.error("Error:", error); setIsLoading(false); - - setTimeout(() => { - setIsDataReady(false); // This will be executed 15 seconds after setIsLoading(false) - }, 5000); - setSelectedPoints([]); - router.reload(); - }); + } }; + return (
diff --git a/front-end/components/MyFile.js b/front-end/components/MyFile.js index 697dbf4..021c580 100755 --- a/front-end/components/MyFile.js +++ b/front-end/components/MyFile.js @@ -224,7 +224,6 @@ const MyFile = () => { .then((status) => { if (status.state !== "PENDING") { clearInterval(intervalId); - setExportSuccess(true); setIsDataReady(true); setIsLoading(false); setTimeout(() => {