Skip to content

Commit

Permalink
Bug fixes and makes export filing and editing tiles into async functi…
Browse files Browse the repository at this point in the history
…on, defined a naming scheme for exported report

Signed-off-by: ZhuoweiWen <[email protected]>
  • Loading branch information
ZhuoweiWen committed Oct 12, 2023
1 parent ed3cc20 commit 214ced3
Show file tree
Hide file tree
Showing 7 changed files with 123 additions and 76 deletions.
61 changes: 51 additions & 10 deletions back-end/controllers/celery_controller/celery_tasks.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
import logging, subprocess, os, json, uuid, cProfile
from controllers.celery_controller.celery_config import celery
from controllers.database_controller import fabric_ops, kml_ops, mbtiles_ops, file_ops, folder_ops, vt_ops
from controllers.database_controller import user_ops, fabric_ops, kml_ops, mbtiles_ops, file_ops, folder_ops, vt_ops
from database.models import file, kml_data
from database.sessions import Session
from flask import jsonify
from datetime import datetime
from utils.namingschemes import DATETIME_FORMAT, EXPORT_CSV_NAME_TEMPLATE

@celery.task(bind=True, autoretry_for=(Exception,), retry_backoff=True)
def process_data(self, file_names, file_data_list, userid, folderid):
from controllers.database_controller import vt_ops
print(file_names)
try:
geojson_array = []
Expand Down Expand Up @@ -134,7 +135,6 @@ def process_data(self, file_names, file_data_list, userid, folderid):

@celery.task(bind=True, autoretry_for=(Exception,), retry_backoff=True)
def run_tippecanoe(self, command, folderid, mbtilepath):
from controllers.database_controller import vt_ops
result = subprocess.run(command, shell=True, check=True, stderr=subprocess.PIPE)

if result.stderr:
Expand All @@ -145,7 +145,6 @@ def run_tippecanoe(self, command, folderid, mbtilepath):

@celery.task(bind=True, autoretry_for=(Exception,), retry_backoff=True)
def run_tippecanoe_tiles_join(self, command1, command2, folderid, mbtilepaths):
from controllers.database_controller import vt_ops

# run first command
result1 = subprocess.run(command1, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
Expand Down Expand Up @@ -201,16 +200,17 @@ def deleteFiles(self, fileid, userid):
session.close()

@celery.task(bind=True, autoretry_for=(Exception,), retry_backoff=True)
def toggle_tiles(self, markers, userid):


def toggle_tiles(self, markers, userid, mbtid):
message = ''
status_code = 0
session = Session()
try:
# Get the last folder of the user
user_last_folder = folder_ops.get_folder(userid=userid, folderid=None, session=session)
geojson_data = []
if mbtid != -1:
mbtiles_entry = mbtiles_ops.get_mbtiles_with_id(mbtid=mbtid, session=session)
user_last_folder = folder_ops.get_export_folder(userid=userid, folderid=mbtiles_entry.folder_id, session=session)
else:
user_last_folder = folder_ops.get_upload_folder(userid=userid, folderid=None, session=session)
if user_last_folder:

kml_set = set()
Expand Down Expand Up @@ -239,18 +239,41 @@ def toggle_tiles(self, markers, userid):
else:
raise Exception('No last folder for the user')

all_kmls = file_ops.get_files_with_postfix(folderid=user_last_folder.id, postfix='.kml', session=session)
geojson_data = []
all_kmls = file_ops.get_files_with_postfix(user_last_folder.id, '.kml', session)
for kml_f in all_kmls:
geojson_data.append(vt_ops.read_kml(kml_f.id, session))

all_geojsons = file_ops.get_files_with_postfix(folderid=user_last_folder.id, postfix='.geojson', session=session)
for geojson_f in all_geojsons:
geojson_data.append(vt_ops.read_geojson(geojson_f.id, session))

mbtiles_ops.delete_mbtiles(user_last_folder.id, session)
vt_ops.create_tiles(geojson_data, userid, user_last_folder.id, session)
if mbtid != -1:
existing_csvs = file_ops.get_files_by_type(folderid=user_last_folder.id, filetype='export', session=session)
for csv_file in existing_csvs:
session.delete(csv_file)

userVal = user_ops.get_user_with_id(userid=userid, session=session)
# Generate and save a new CSV
all_file_ids = [file.id for file in file_ops.get_files_with_postfix(user_last_folder.id, '.kml', session) + file_ops.get_files_with_postfix(user_last_folder.id, '.geojson', session)]

results = session.query(kml_data).filter(kml_data.file_id.in_(all_file_ids)).all()
availability_csv = file_ops.generate_csv_data(results, userVal.provider_id, userVal.brand_name)

csv_name = f"availability-{datetime.now().strftime('%Y-%m-%d_%H:%M:%S')}.csv"
csv_data_str = availability_csv.to_csv(index=False, encoding='utf-8')
new_csv_file = file_ops.create_file(filename=csv_name, content=csv_data_str.encode('utf-8'), folderid=user_last_folder.id, filetype='export', session=session)
session.add(new_csv_file)



message = 'Markers toggled successfully'
status_code = 200




except Exception as e:
session.rollback() # rollback transaction on error
Expand All @@ -262,3 +285,21 @@ def toggle_tiles(self, markers, userid):
session.close()

return (message, status_code)


@celery.task(bind=True, autoretry_for=(Exception,), retry_backoff=True)
def async_folder_copy_for_export(self, userid, folderid, serialized_csv):
session = Session()
userVal = user_ops.get_user_with_id(userid)
current_datetime = datetime.now().strftime(DATETIME_FORMAT)
newfolder_name = f"export-{current_datetime}"


csv_name = EXPORT_CSV_NAME_TEMPLATE.format(brand_name=userVal.brand_name, current_datetime=current_datetime)

original_folder = folder_ops.get_upload_folder(userid=userid, folderid=folderid, session=session)
new_folder = original_folder.copy(name=newfolder_name,type='export', session=session)
csv_file = file_ops.create_file(filename=csv_name, content=serialized_csv.encode('utf-8'), folderid=new_folder.id, filetype='export', session=session)
session.add(csv_file)
session.commit()
session.close()
20 changes: 10 additions & 10 deletions back-end/controllers/database_controller/kml_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,6 +225,8 @@ def generate_csv_data(results, provider_id, brand_name):
return availability_csv

def export(userid, folderid, providerid, brandname, session):
from controllers.celery_controller.celery_tasks import async_folder_copy_for_export

all_files = get_files_with_postfix(folderid, '.kml', session) + get_files_with_postfix(folderid, '.geojson', session)
all_file_ids = [file.id for file in all_files]
results = session.query(kml_data).filter(kml_data.file_id.in_(all_file_ids)).all()
Expand All @@ -233,19 +235,17 @@ def export(userid, folderid, providerid, brandname, session):

output = io.BytesIO()
availability_csv.to_csv(output, index=False, encoding='utf-8')
csv_data_str = availability_csv.to_csv(index=False, encoding='utf-8')

current_datetime = datetime.now().strftime('%Y-%m-%d_%H:%M:%S')
folder_name = f"export-{current_datetime}"
csv_name = f"availability-{current_datetime}.csv"

original_folder = get_upload_folder(userid=userid, folderid=folderid, session=session)
new_folder = original_folder.copy(name=folder_name,type='export', session=session)
# original_folder = get_upload_folder(userid=userid, folderid=folderid, session=session)
# new_folder = original_folder.copy(name=folder_name,type='export', session=session)

csv_data_str = availability_csv.to_csv(index=False, encoding='utf-8')
csv_file = create_file(filename=csv_name, content=csv_data_str.encode('utf-8'), folderid=new_folder.id, filetype='export', session=session)
# csv_file = create_file(filename=csv_name, content=csv_data_str.encode('utf-8'), folderid=new_folder.id, filetype='export', session=session)

session.add(csv_file)
session.commit()
# session.add(csv_file)
# session.commit()

async_folder_copy_for_export.apply_async(args=[userid, folderid, csv_data_str])

return output

Expand Down
3 changes: 2 additions & 1 deletion back-end/controllers/database_controller/vt_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from .folder_ops import get_upload_folder, get_export_folder
from .mbtiles_ops import get_latest_mbtiles, delete_mbtiles, get_mbtiles_with_id
from .user_ops import get_user_with_id, get_user_with_username
from utils.namingschemes import DATETIME_FORMAT, EXPORT_CSV_NAME_TEMPLATE

db_lock = Lock()

Expand Down Expand Up @@ -325,7 +326,7 @@ def toggle_tiles(markers, userid, mbtid):
results = session.query(kml_data).filter(kml_data.file_id.in_(all_file_ids)).all()
availability_csv = generate_csv_data(results, userVal.provider_id, userVal.brand_name)

csv_name = f"availability-{datetime.now().strftime('%Y-%m-%d_%H:%M:%S')}.csv"
csv_name = EXPORT_CSV_NAME_TEMPLATE.format(brand_name=userVal.brand_name, current_datime=datetime.now().strftime(DATETIME_FORMAT))
csv_data_str = availability_csv.to_csv(index=False, encoding='utf-8')
new_csv_file = create_file(filename=csv_name, content=csv_data_str.encode('utf-8'), folderid=user_last_folder.id, filetype='export', session=session)
session.add(new_csv_file)
Expand Down
10 changes: 5 additions & 5 deletions back-end/routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from controllers.database_controller import fabric_ops, kml_ops, user_ops, vt_ops, file_ops, folder_ops, mbtiles_ops, challenge_ops, kmz_ops
from controllers.celery_controller.celery_config import app, celery
from controllers.celery_controller.celery_tasks import process_data, deleteFiles, toggle_tiles
from utils.namingschemes import DATETIME_FORMAT, EXPORT_CSV_NAME_TEMPLATE

logging.basicConfig(level=logging.DEBUG)
console_handler = logging.StreamHandler()
Expand Down Expand Up @@ -278,10 +279,9 @@ def exportFiling():
csv_output = kml_ops.export(userVal.id, folderVal.id, userVal.provider_id, userVal.brand_name, session)

if csv_output:
current_time = datetime.now()
formatted_time = current_time.strftime('%Y_%B')
current_time = datetime.now().strftime(DATETIME_FORMAT)

download_name = "BDC_Report_" + formatted_time + "_" + shortuuid.uuid()[:4] + '.csv'
download_name = EXPORT_CSV_NAME_TEMPLATE.format(brand_name=userVal.brand_name, current_datetime=current_time)

csv_output.seek(0)
return send_file(csv_output, as_attachment=True, download_name=download_name, mimetype="text/csv")
Expand Down Expand Up @@ -355,9 +355,9 @@ def toggle_markers():
markers = request_data['marker']
mbtid = request_data['mbtid']
identity = get_jwt_identity()
response = vt_ops.toggle_tiles(markers=markers, userid=identity['id'], mbtid=mbtid)
task = toggle_tiles.apply_async(args=[markers, identity['id'], mbtid])

return jsonify(message=response[0]), response[1]
return jsonify({'Status': "OK", 'task_id': task.id}), 200
except NoAuthorizationError:
return jsonify({'error': 'Token is invalid or expired'}), 401

Expand Down
2 changes: 2 additions & 0 deletions back-end/utils/namingschemes.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
DATETIME_FORMAT = '%Y-%m-%d_%H:%M:%S'
EXPORT_CSV_NAME_TEMPLATE = "{brand_name}-BDC_Report-{current_datetime}.csv"
102 changes: 53 additions & 49 deletions front-end/components/MyEdit.js
Original file line number Diff line number Diff line change
Expand Up @@ -85,61 +85,65 @@ const MyEdit = () => {
setSelectedPoints(updatedPoints);
};

const toggleMarkers = (markers) => {
const requestBody = {
marker: markers,
mbtid: mbtid ? mbtid : -1
};
console.log(requestBody);
return fetch(`${backend_url}/toggle-markers`, {
method: "POST",
credentials: "include", // Include cookies in the request
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(requestBody),
})
.then((response) => {
if (response.status === 401) {
// Redirect the user to the login page or other unauthorized handling page
Swal.fire({
icon: "error",
title: "Oops...",
text: "Session expired, please log in again!",
});
router.push("/login");
} else {
return response.json();
}
})
.then((data) => {
if (data) { // to make sure data is not undefined when status is 401
console.log(data.message);
}
})
.catch((error) => {
console.log(error);
});
};

const doneWithChanges = () => {
const toggleMarkers = async () => {
setIsLoading(true);
console.log(selectedPoints);
// Send request to server to change the selected markers to served
toggleMarkers(selectedPoints).finally(() => {
try {
const requestBody = {
marker: selectedPoints,
mbtid: mbtid ? mbtid : -1
};
console.log(requestBody);
const response = await fetch(`${backend_url}/toggle-markers`, {
method: "POST",
credentials: "include", // Include cookies in the request
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(requestBody),
})

setIsDataReady(true);
if (response.status === 401) {
setIsLoading(false);
// Redirect the user to the login page or other unauthorized handling page
Swal.fire({
icon: "error",
title: "Oops...",
text: "Session expired, please log in again!",
});
router.push("/login");
}
if (!response.ok) {
setIsLoading(false);
// If the response status is not ok (not 200)
throw new Error(`HTTP error! status: ${response.status}, ${response.statusText}`);
}
const data = await response.json();
if (data) {
const intervalId = setInterval(() => {
console.log(data.task_id);
fetch(`${backend_url}/status/${data.task_id}`)
.then((response) => response.json())
.then((status) => {
if (status.state !== "PENDING") {
clearInterval(intervalId);
setIsDataReady(true);
setIsLoading(false);
setTimeout(() => {
setIsDataReady(false);
router.reload();
}, 5000);
}
});
}, 5000);
}
} catch (error) {
console.error("Error:", error);
setIsLoading(false);

setTimeout(() => {
setIsDataReady(false); // This will be executed 15 seconds after setIsLoading(false)
}, 5000);
setSelectedPoints([]);
router.reload();
});
}
};



return (
<div>
<div style={{ position: 'fixed', zIndex: 10000 }}>
Expand Down
1 change: 0 additions & 1 deletion front-end/components/MyFile.js
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,6 @@ const MyFile = () => {
.then((status) => {
if (status.state !== "PENDING") {
clearInterval(intervalId);
setExportSuccess(true);
setIsDataReady(true);
setIsLoading(false);
setTimeout(() => {
Expand Down

0 comments on commit 214ced3

Please sign in to comment.