From 24a661f98eec896058a96c6f6799897096ab1f7d Mon Sep 17 00:00:00 2001 From: apdn7 <106378158+apdn7@users.noreply.github.com> Date: Wed, 18 Sep 2024 18:02:14 +0900 Subject: [PATCH 1/3] 473 base files --- AnalysisPlatform.bat | 1214 +-- VERSION | 6 +- _original_path_list.log | 31 +- ap/__init__.py | 14 +- ap/api/aggregate_plot/controllers.py | 3 + ap/api/aggregate_plot/services.py | 9 +- ap/api/analyze/services/pca.py | 2 + ap/api/calendar_heatmap/services.py | 3 + ap/api/categorical_plot/services.py | 3 +- ap/api/common/services/show_graph_database.py | 37 +- ap/api/common/services/show_graph_services.py | 248 +- ap/api/common/services/sql_generator.py | 411 +- ap/api/common/services/utils.py | 200 +- ap/api/external_api/controllers.py | 89 +- ap/api/external_api/services.py | 41 +- ap/api/graphical_lasso/services.py | 4 + ap/api/heatmap/services.py | 12 +- ap/api/parallel_plot/controllers.py | 86 +- ap/api/parallel_plot/services.py | 82 +- ap/api/ridgeline_plot/services.py | 6 + ap/api/sankey_plot/sankey_glasso/grplasso.py | 4 + .../sankey_glasso/sankey_services.py | 11 +- ap/api/scatter_plot/services.py | 114 +- ap/api/setting_module/controllers.py | 247 +- ap/api/setting_module/services/autolink.py | 44 +- ap/api/setting_module/services/csv_import.py | 246 +- ap/api/setting_module/services/data_import.py | 219 +- ap/api/setting_module/services/equations.py | 18 +- .../setting_module/services/factory_import.py | 89 +- .../services/master_data_transform_pattern.py | 104 + .../services/polling_frequency.py | 9 +- .../setting_module/services/process_delete.py | 4 +- .../services/show_latest_record.py | 260 +- .../software_workshop_etl_services.py | 332 + .../services/v2_etl_services.py | 37 +- ap/api/table_viewer/controllers.py | 13 +- ap/api/trace_data/controllers.py | 9 +- ap/api/trace_data/services/csv_export.py | 50 +- ap/api/trace_data/services/data_count.py | 4 +- ap/common/assets/assets.json | 477 +- ap/common/common_utils.py | 246 +- ap/common/constants.py | 113 +- ap/common/datetime_format_utils.py | 71 + ap/common/memoize.py | 12 +- ap/common/pydn/dblib/db_common.py | 1 + ap/common/pydn/dblib/db_proxy.py | 28 +- ap/common/pydn/dblib/mssqlserver.py | 34 +- ap/common/pydn/dblib/mysql.py | 4 +- ap/common/pydn/dblib/oracle.py | 5 +- ap/common/pydn/dblib/postgresql.py | 17 +- ap/common/pydn/dblib/sqlite.py | 18 + ap/common/scheduler.py | 30 +- ap/common/services/csv_content.py | 31 +- ap/common/services/csv_header_wrapr.py | 4 +- ap/common/services/data_type.py | 9 +- ap/common/services/error_message_handler.py | 32 +- ap/common/services/form_env.py | 8 + ap/common/services/jp_to_romaji_utils.py | 41 + ap/common/services/sse.py | 16 +- ap/config/basic_config.yml | 18 +- ap/equations/core.py | 120 +- ap/equations/error.py | 7 + ap/script/migrate_cfg_data_source_csv.py | 33 +- ap/script/migrate_cfg_process.py | 19 +- ap/script/migrate_cfg_process_column.py | 29 + ap/script/migrate_delta_time.py | 41 + ap/script/migrate_m_function.py | 23 +- ap/script/migrate_process_file_name_column.py | 9 + ap/script/setup_for_e2e.py | 93 + ap/setting_module/controllers.py | 4 +- ap/setting_module/models.py | 184 +- ap/setting_module/schemas.py | 5 + .../services/background_process.py | 2 +- .../services/backup_and_restore/__init__.py | 0 .../services/backup_and_restore/backup.py | 86 + .../backup_and_restore/backup_file_manager.py | 101 + .../backup_and_restore/duplicated_check.py | 58 + .../services/backup_and_restore/jobs.py | 80 + .../services/backup_and_restore/restore.py | 100 + ap/setting_module/services/process_config.py | 197 +- .../services/register_from_file.py | 637 +- ap/setting_module/services/trace_config.py | 8 +- .../aggregate_plot/css/aggregate_plot.css | 3 +- ap/static/aggregate_plot/js/aggregate_plot.js | 418 +- .../aggregate_plot/js/aggregation_chart.js | 201 +- ap/static/analyze/css/anomaly_detection.css | 15 +- ap/static/analyze/css/graphical_lasso.css | 14 +- ap/static/analyze/css/toastr.css | 248 +- ap/static/analyze/js/generateJson.js | 1270 +-- ap/static/analyze/js/graphical_lasso.js | 108 +- ap/static/analyze/js/graphical_lasso_sigma.js | 10 +- ap/static/analyze/js/hotelling_biplot.js | 7 +- ap/static/analyze/js/hotelling_common.js | 152 +- .../analyze/js/hotelling_q_contribution.js | 25 +- ap/static/analyze/js/hotelling_scatters.js | 77 +- .../analyze/js/hotelling_t2_contribution.js | 31 +- ap/static/analyze/js/hotelling_timeseries.js | 210 +- ap/static/analyze/js/pca.js | 355 +- ap/static/analyze/js/pca_toastr.js | 23 +- .../calendar_heatmap/css/calendar_heatmap.css | 9 +- .../calendar_heatmap/js/calendar_heatmap.js | 190 +- .../js/calendar_heatmap_plotly.js | 52 +- .../categorical_plot/css/categorical_plot.css | 50 +- ap/static/categorical_plot/css/toastr.css | 248 +- ...ategorical_histogram_with_density_curve.js | 55 +- .../categorical_plot/js/categorical_plot.js | 411 +- .../js/categorical_plot_utils.js | 94 +- ap/static/co_occurrence/css/co_occurrence.css | 8 +- .../co_occurrence/css/co_occurrence_csv.css | 20 +- .../co_occurrence/js/co_occurrence_csv.js | 73 +- ap/static/co_occurrence/js/pareto_plot.js | 12 +- ap/static/common/css/components.css | 40 +- ap/static/common/css/data-finder.css | 42 +- ap/static/common/css/dragndrop.css | 23 +- ap/static/common/css/fSelect.css | 4 +- ap/static/common/css/graph_nav.css | 26 +- ap/static/common/css/jsuites.css | 1380 +-- ap/static/common/css/jump_function.css | 12 +- ap/static/common/css/main.css | 266 +- ap/static/common/css/multi_level_dropdown.css | 63 + ap/static/common/css/pagination.css | 240 +- ap/static/common/css/shepherd.css | 93 +- ap/static/common/css/toastr.css | 248 +- ap/static/common/css/user-setting-table.css | 60 +- ap/static/common/js/ap_tour.js | 212 +- ap/static/common/js/auto-update-common.js | 5 +- ap/static/common/js/base.js | 1302 ++- .../common/js/cat_facet_label_filter_modal.js | 246 +- ap/static/common/js/clipboard_utils.js | 153 +- ap/static/common/js/column_ordering.js | 238 +- ap/static/common/js/components.js | 1108 +- ap/static/common/js/config_data_interface.js | 197 +- ap/static/common/js/data-finder.js | 665 +- ap/static/common/js/data_point_info_table.js | 173 +- ap/static/common/js/database-config.js | 48 +- ap/static/common/js/datetime_label_format.js | 5 +- ap/static/common/js/divide_by_calendar.js | 352 +- ap/static/common/js/dn-custom-select.js | 85 +- ap/static/common/js/graph_nav.js | 49 +- ap/static/common/js/jump_function.js | 633 +- ap/static/common/js/save_load_user_input.js | 878 +- ap/static/common/js/summary_table.js | 64 +- ap/static/common/js/take_screenshot.js | 39 +- ap/static/common/js/terms_of_use.js | 33 +- ap/static/common/js/utils.js | 1980 ++-- ap/static/common/js/validation.js | 104 +- ap/static/heatmap/css/heatmap.css | 10 +- ap/static/heatmap/js/heatmap_main.js | 846 +- ap/static/heatmap/js/heatmap_plot.js | 237 +- .../daterangepicker-utils.js | 12 +- .../css/multiple_scatter_plot.css | 9 +- .../multiple_scatter_plot/js/heatmap_plot.js | 74 +- .../js/multiple_scatter_contour.js | 114 +- .../js/multiple_scatter_histogram.js | 39 +- .../js/multiple_scatter_plot.js | 317 +- ap/static/parallel_plot/css/parallel_plot.css | 27 +- ap/static/parallel_plot/js/parallel_plot.js | 834 +- .../parallel_plot/js/parallel_properties.js | 37 +- ap/static/parallel_plot/js/parallel_utils.js | 549 +- ap/static/plot_view/css/plot_view.css | 11 +- ap/static/plot_view/js/plot_view.js | 1 - ap/static/ridgeline_plot/css/ridgeline.css | 15 +- ap/static/ridgeline_plot/js/ridgeline_plot.js | 662 +- .../ridgeline_plot/js/ridgeline_plot_utils.js | 30 +- ap/static/ridgeline_plot/js/rlp_template.js | 506 +- ap/static/sankey_plot/css/sankey_plot.css | 8 +- ap/static/sankey_plot/js/sankey_plot.js | 318 +- ap/static/sankey_plot/js/sankey_scp.js | 114 +- ap/static/scatter_plot/css/scatter_plot.css | 10 +- ap/static/scatter_plot/js/scatter_chart.js | 127 +- ap/static/scatter_plot/js/scatter_plot.js | 1242 ++- ap/static/scatter_plot/js/scp_heatmap_plot.js | 90 +- ap/static/scatter_plot/js/scp_violin_plot.js | 115 +- .../setting_module/css/background_process.css | 20 +- .../css/backup_restore_modal.css | 95 + ap/static/setting_module/css/config_view.css | 60 +- .../setting_module/css/data_type_dropdown.css | 76 + .../css/filter_visualization.css | 32 +- ap/static/setting_module/css/index.css | 4 +- ap/static/setting_module/css/master_cfg.css | 57 +- .../setting_module/css/master_config.css | 39 +- .../setting_module/css/proc_config_modal.css | 348 +- .../setting_module/css/register_from_file.css | 16 +- ap/static/setting_module/css/setting_view.css | 2 +- ap/static/setting_module/css/top_view.css | 16 +- ap/static/setting_module/css/trace_config.css | 31 +- ap/static/setting_module/js/auto_link.js | 177 +- .../setting_module/js/background_process.js | 121 +- .../setting_module/js/cfg_filter_funcs.js | 797 +- .../setting_module/js/cfg_filter_main.js | 75 +- ap/static/setting_module/js/config_view.js | 117 +- .../js/data_type_dropdown/constant.js | 155 + .../js/data_type_dropdown/controller.js | 29 + .../js/data_type_dropdown/core.js | 267 + .../js/data_type_dropdown/event.js | 196 + .../js/data_type_dropdown/helper.js | 763 ++ .../js/data_type_dropdown/type_definition.js | 65 + ap/static/setting_module/js/db_config.js | 1573 +-- .../js/failed_cast_data_modal.js | 42 +- .../js/function_config_modals.js | 2665 +++-- .../js/function_data_type_dropdown.js | 97 +- .../setting_module/js/graph_visualization.js | 726 +- ap/static/setting_module/js/master_config.js | 21 +- ap/static/setting_module/js/npm.js | 24 +- .../setting_module/js/parse_data_utils.js | 353 + .../setting_module/js/predict_data_type.js | 11 +- ap/static/setting_module/js/proc_config.js | 378 +- .../js/proc_config_date_time_format.js | 437 + .../setting_module/js/proc_config_modals.js | 3312 +++--- .../js/process_config_section.js | 1475 +++ .../setting_module/js/register_from_file.js | 1492 ++- ap/static/setting_module/js/system.js | 188 + ap/static/setting_module/js/trace_config.js | 819 +- ap/static/table_viewer/css/table_viewer.css | 66 +- ap/static/table_viewer/js/table_viewer.js | 73 +- ap/static/table_viewer/lang/English.json | 46 +- ap/static/table_viewer/lang/Japanese.json | 46 +- .../tile_interface/css/tile_interface.css | 24 +- ap/static/tile_interface/js/tile_interface.js | 7 +- ap/static/trace_data/css/trace_data.css | 63 +- ap/static/trace_data/js/trace_data.js | 646 +- .../js/trace_data_categorical_table.js | 521 +- .../trace_data/js/trace_data_cross_hair.js | 286 +- .../trace_data/js/trace_data_histogram.js | 43 +- .../js/trace_data_histogram_with_kde.js | 34 +- .../trace_data/js/trace_data_scatter_plot.js | 276 +- .../js/trace_data_step_bar_chart.js | 48 +- .../trace_data/js/trace_data_summary_table.js | 36 +- .../trace_data/js/trace_data_time_series.js | 1112 +- .../trace_data/js/trace_data_whisker_plot.js | 61 +- ap/table_viewer/controllers.py | 2 +- ap/templates/500.html | 21 +- .../aggregate_plot/aggregate_plot.html | 84 +- ap/templates/analyze/graphical_lasso.html | 212 +- ap/templates/analyze/hotelling_tsquare.html | 400 +- ap/templates/base.html | 432 +- .../calendar_heatmap/calendar_heatmap.html | 111 +- .../categorical_plot/categorical_plot.html | 184 +- .../co_occurrence/co_occurrence_csv.html | 275 +- ap/templates/footer.html | 4 +- ap/templates/graph_nav.html | 97 +- ap/templates/header.html | 386 +- ap/templates/heatmap/heatmap.html | 348 +- ap/templates/i18n.html | 610 +- ap/templates/jump.html | 252 +- ap/templates/macros.html | 3470 ++++-- ap/templates/messages.html | 26 +- ap/templates/modal.html | 1215 ++- .../multiple_scatter_plot.html | 92 +- ap/templates/none.html | 35 +- ap/templates/parallel_plot/parallel_plot.html | 192 +- ap/templates/plot_view/plot_view.html | 133 +- .../ridgeline_plot/ridgeline_plot.html | 104 +- ap/templates/sankey_plot/sankey_plot.html | 305 +- ap/templates/scatter_plot/scatter_plot.html | 376 +- ap/templates/setting_module/_filter_line.html | 216 +- .../setting_module/_filter_machine.html | 229 +- .../setting_module/_filter_others.html | 255 +- .../setting_module/_filter_partno.html | 217 +- ap/templates/setting_module/about.html | 20 +- .../setting_module/background_job.html | 373 +- ap/templates/setting_module/config.html | 161 +- ap/templates/setting_module/db_config.html | 332 +- .../setting_module/db_config_modals.html | 958 +- ap/templates/setting_module/failed_jobs.html | 303 +- .../setting_module/filter_config.html | 97 +- .../setting_module/filter_config_modals.html | 66 +- ap/templates/setting_module/master_cfg.html | 822 +- ap/templates/setting_module/proc_config.html | 249 +- .../setting_module/proc_config_modals.html | 1857 +++- .../setting_module/register_by_file.html | 472 +- ap/templates/setting_module/system.html | 170 + ap/templates/setting_module/terms_of_use.html | 17 +- ap/templates/setting_module/trace_config.html | 422 +- ap/templates/sidebar.html | 436 +- ap/templates/table_viewer/index.html | 211 +- .../tile_interface/tile_dashboard.html | 42 +- .../tile_interface/tile_search_by_use.html | 85 +- ap/templates/trace_data/trace_data.html | 214 +- ap/trace_data/schemas.py | 47 + ap/trace_data/transaction_model.py | 49 +- ap/translations/ar/LC_MESSAGES/messages.mo | Bin 107760 -> 113526 bytes ap/translations/ar/LC_MESSAGES/messages.po | 258 +- ap/translations/bg/LC_MESSAGES/messages.mo | Bin 119482 -> 125292 bytes ap/translations/bg/LC_MESSAGES/messages.po | 258 +- ap/translations/ca/LC_MESSAGES/messages.mo | Bin 98291 -> 104043 bytes ap/translations/ca/LC_MESSAGES/messages.po | 258 +- ap/translations/cs/LC_MESSAGES/messages.mo | Bin 95842 -> 101584 bytes ap/translations/cs/LC_MESSAGES/messages.po | 258 +- ap/translations/cy/LC_MESSAGES/messages.mo | Bin 94508 -> 100247 bytes ap/translations/cy/LC_MESSAGES/messages.po | 258 +- ap/translations/da/LC_MESSAGES/messages.mo | Bin 94233 -> 99966 bytes ap/translations/da/LC_MESSAGES/messages.po | 258 +- ap/translations/de/LC_MESSAGES/messages.mo | Bin 98666 -> 104401 bytes ap/translations/de/LC_MESSAGES/messages.po | 258 +- ap/translations/el/LC_MESSAGES/messages.mo | Bin 123604 -> 129400 bytes ap/translations/el/LC_MESSAGES/messages.po | 259 +- ap/translations/en/LC_MESSAGES/messages.mo | Bin 97072 -> 102853 bytes ap/translations/en/LC_MESSAGES/messages.po | 273 +- ap/translations/es/LC_MESSAGES/messages.mo | Bin 98591 -> 104347 bytes ap/translations/es/LC_MESSAGES/messages.po | 259 +- ap/translations/fa/LC_MESSAGES/messages.mo | Bin 110251 -> 116005 bytes ap/translations/fa/LC_MESSAGES/messages.po | 259 +- ap/translations/fi/LC_MESSAGES/messages.mo | Bin 94873 -> 100607 bytes ap/translations/fi/LC_MESSAGES/messages.po | 259 +- ap/translations/fr/LC_MESSAGES/messages.mo | Bin 100375 -> 106134 bytes ap/translations/fr/LC_MESSAGES/messages.po | 261 +- ap/translations/gd/LC_MESSAGES/messages.mo | Bin 101160 -> 106907 bytes ap/translations/gd/LC_MESSAGES/messages.po | 258 +- ap/translations/he/LC_MESSAGES/messages.mo | Bin 104063 -> 109823 bytes ap/translations/he/LC_MESSAGES/messages.po | 259 +- ap/translations/hi/LC_MESSAGES/messages.mo | Bin 132890 -> 138706 bytes ap/translations/hi/LC_MESSAGES/messages.po | 258 +- ap/translations/hr/LC_MESSAGES/messages.mo | Bin 94802 -> 100551 bytes ap/translations/hr/LC_MESSAGES/messages.po | 258 +- ap/translations/hu/LC_MESSAGES/messages.mo | Bin 98534 -> 104274 bytes ap/translations/hu/LC_MESSAGES/messages.po | 259 +- ap/translations/id/LC_MESSAGES/messages.mo | Bin 94438 -> 100173 bytes ap/translations/id/LC_MESSAGES/messages.po | 259 +- ap/translations/is/LC_MESSAGES/messages.mo | Bin 95119 -> 100844 bytes ap/translations/is/LC_MESSAGES/messages.po | 259 +- ap/translations/it/LC_MESSAGES/messages.mo | Bin 97961 -> 103713 bytes ap/translations/it/LC_MESSAGES/messages.po | 259 +- ap/translations/ja/LC_MESSAGES/messages.mo | Bin 108009 -> 114546 bytes ap/translations/ja/LC_MESSAGES/messages.po | 225 +- ap/translations/jv/LC_MESSAGES/messages.mo | Bin 93515 -> 99254 bytes ap/translations/jv/LC_MESSAGES/messages.po | 259 +- ap/translations/km/LC_MESSAGES/messages.mo | Bin 136588 -> 142408 bytes ap/translations/km/LC_MESSAGES/messages.po | 259 +- ap/translations/ko/LC_MESSAGES/messages.mo | Bin 97980 -> 103732 bytes ap/translations/ko/LC_MESSAGES/messages.po | 259 +- ap/translations/lb/LC_MESSAGES/messages.mo | Bin 96649 -> 102379 bytes ap/translations/lb/LC_MESSAGES/messages.po | 258 +- ap/translations/mi/LC_MESSAGES/messages.mo | Bin 96888 -> 102632 bytes ap/translations/mi/LC_MESSAGES/messages.po | 259 +- ap/translations/mk/LC_MESSAGES/messages.mo | Bin 121248 -> 127056 bytes ap/translations/mk/LC_MESSAGES/messages.po | 259 +- ap/translations/mn/LC_MESSAGES/messages.mo | Bin 116148 -> 121936 bytes ap/translations/mn/LC_MESSAGES/messages.po | 259 +- ap/translations/ms/LC_MESSAGES/messages.mo | Bin 94894 -> 100629 bytes ap/translations/ms/LC_MESSAGES/messages.po | 259 +- ap/translations/my/LC_MESSAGES/messages.mo | Bin 141433 -> 147244 bytes ap/translations/my/LC_MESSAGES/messages.po | 259 +- ap/translations/ne/LC_MESSAGES/messages.mo | Bin 133961 -> 139751 bytes ap/translations/ne/LC_MESSAGES/messages.po | 259 +- ap/translations/nl/LC_MESSAGES/messages.mo | Bin 96744 -> 102480 bytes ap/translations/nl/LC_MESSAGES/messages.po | 259 +- ap/translations/no/LC_MESSAGES/messages.mo | Bin 93834 -> 99565 bytes ap/translations/no/LC_MESSAGES/messages.po | 259 +- ap/translations/pa/LC_MESSAGES/messages.mo | Bin 129494 -> 135305 bytes ap/translations/pa/LC_MESSAGES/messages.po | 259 +- ap/translations/pl/LC_MESSAGES/messages.mo | Bin 96849 -> 102597 bytes ap/translations/pl/LC_MESSAGES/messages.po | 259 +- ap/translations/pt/LC_MESSAGES/messages.mo | Bin 97979 -> 103729 bytes ap/translations/pt/LC_MESSAGES/messages.po | 259 +- ap/translations/ro/LC_MESSAGES/messages.mo | Bin 98077 -> 103828 bytes ap/translations/ro/LC_MESSAGES/messages.po | 259 +- ap/translations/ru/LC_MESSAGES/messages.mo | Bin 119416 -> 125206 bytes ap/translations/ru/LC_MESSAGES/messages.po | 259 +- ap/translations/sd/LC_MESSAGES/messages.mo | Bin 107517 -> 113286 bytes ap/translations/sd/LC_MESSAGES/messages.po | 257 +- ap/translations/si/LC_MESSAGES/messages.mo | Bin 131644 -> 137465 bytes ap/translations/si/LC_MESSAGES/messages.po | 259 +- ap/translations/sk/LC_MESSAGES/messages.mo | Bin 96293 -> 102040 bytes ap/translations/sk/LC_MESSAGES/messages.po | 259 +- ap/translations/sq/LC_MESSAGES/messages.mo | Bin 99577 -> 105338 bytes ap/translations/sq/LC_MESSAGES/messages.po | 259 +- ap/translations/sv/LC_MESSAGES/messages.mo | Bin 94595 -> 100328 bytes ap/translations/sv/LC_MESSAGES/messages.po | 259 +- ap/translations/te/LC_MESSAGES/messages.mo | Bin 138004 -> 143810 bytes ap/translations/te/LC_MESSAGES/messages.po | 259 +- ap/translations/th/LC_MESSAGES/messages.mo | Bin 130877 -> 136685 bytes ap/translations/th/LC_MESSAGES/messages.po | 259 +- ap/translations/tl/LC_MESSAGES/messages.mo | Bin 99246 -> 105010 bytes ap/translations/tl/LC_MESSAGES/messages.po | 259 +- ap/translations/tr/LC_MESSAGES/messages.mo | Bin 95923 -> 101664 bytes ap/translations/tr/LC_MESSAGES/messages.po | 259 +- ap/translations/vi/LC_MESSAGES/messages.mo | Bin 104955 -> 110751 bytes ap/translations/vi/LC_MESSAGES/messages.po | 266 +- .../zh_Hans_CN/LC_MESSAGES/messages.mo | Bin 89958 -> 95691 bytes .../zh_Hans_CN/LC_MESSAGES/messages.po | 259 +- .../zh_Hant_TW/LC_MESSAGES/messages.mo | Bin 89940 -> 95673 bytes .../zh_Hant_TW/LC_MESSAGES/messages.po | 259 +- .../services/sql/transaction_query_builder.py | 0 config.py | 15 - data_files/19.m_function.tsv | 98 +- error.html | 167 +- format.bat | 12 +- init/app.sqlite3 | Bin 471040 -> 471040 bytes lang/message.pot | 9439 +++++++++-------- main.py | 2 + ...a6_add_datetime_format_column_into_cfg_.py | 27 + ..._add_is_file_path_column_into_cfg_data_.py | 27 + package-lock.json | 3133 ++++++ package.json | 39 + pyproject.toml | 242 +- requirements/common.txt | 99 +- requirements/dev.txt | 13 + requirements/oss_dev.txt | 28 +- requirements/oss_prod.txt | 2 +- start/templates/error.html | 265 +- start/templates/start.html | 154 +- start_ap.bat | 366 +- startup.ini | 180 +- 404 files changed, 68519 insertions(+), 26394 deletions(-) create mode 100644 ap/api/setting_module/services/master_data_transform_pattern.py create mode 100644 ap/api/setting_module/services/software_workshop_etl_services.py create mode 100644 ap/common/datetime_format_utils.py create mode 100644 ap/script/migrate_cfg_process_column.py create mode 100644 ap/script/setup_for_e2e.py rename bridge/services/sql/sql_generator.py => ap/setting_module/services/backup_and_restore/__init__.py (100%) create mode 100644 ap/setting_module/services/backup_and_restore/backup.py create mode 100644 ap/setting_module/services/backup_and_restore/backup_file_manager.py create mode 100644 ap/setting_module/services/backup_and_restore/duplicated_check.py create mode 100644 ap/setting_module/services/backup_and_restore/jobs.py create mode 100644 ap/setting_module/services/backup_and_restore/restore.py create mode 100644 ap/static/common/css/multi_level_dropdown.css create mode 100644 ap/static/setting_module/css/backup_restore_modal.css create mode 100644 ap/static/setting_module/css/data_type_dropdown.css create mode 100644 ap/static/setting_module/js/data_type_dropdown/constant.js create mode 100644 ap/static/setting_module/js/data_type_dropdown/controller.js create mode 100644 ap/static/setting_module/js/data_type_dropdown/core.js create mode 100644 ap/static/setting_module/js/data_type_dropdown/event.js create mode 100644 ap/static/setting_module/js/data_type_dropdown/helper.js create mode 100644 ap/static/setting_module/js/data_type_dropdown/type_definition.js create mode 100644 ap/static/setting_module/js/parse_data_utils.js create mode 100644 ap/static/setting_module/js/proc_config_date_time_format.js create mode 100644 ap/static/setting_module/js/process_config_section.js create mode 100644 ap/static/setting_module/js/system.js create mode 100644 ap/templates/setting_module/system.html delete mode 100644 bridge/services/sql/transaction_query_builder.py create mode 100644 migrations/versions/8600fce518a6_add_datetime_format_column_into_cfg_.py create mode 100644 migrations/versions/fd863d0b8735_add_is_file_path_column_into_cfg_data_.py create mode 100644 package-lock.json create mode 100644 requirements/dev.txt diff --git a/AnalysisPlatform.bat b/AnalysisPlatform.bat index 44d2cc1..e57ee9b 100644 --- a/AnalysisPlatform.bat +++ b/AnalysisPlatform.bat @@ -1,607 +1,607 @@ -@echo off -: _____________________________________________________________________________ -: -: Analysis Platform StartUp -: _____________________________________________________________________________ -: Important notice: -: Running the batch file is regarded as you agreed to the Terms of Use. -: Terms of Use: -: https://github.com/apdn7/AnalysisPlatform/about/terms_of_use_en.md -: _____________________________________________________________________________ - -echo Start Main AP Sequence... -echo: - -: Wait Setting for Network Check in [sec] -set wait_netcheck_inst=10 -set wait_netcheck_appl=1 -set wait_netcheck=%wait_netcheck_appl% - -call :defSetting -set app_title=Analysis Platform Port: %port% Lang: %lang% %subtitle% Path: %CD% -title %app_title% -rem prompt AP:%port%$g - - -: Check Product Type -if exist %file_prod% ( - set prod=%product_dn% - echo Detected Product Type: dn7 -) else ( - set prod=%product_oss% - echo Detected Product Type: oss -) -echo: - -: Run start program -: Close old start.exe before run -call :stopLoadingApp -start "" start.exe %port% -echo. -echo.> %stage_status% - -: Direct Startup Mode -if %startup_mode% == 8 ( - echo Direct Startup Mode === Force to bypass Installation === - call :saveStartUpSetting - goto START_APP -) else ( - echo Normal Startup Mode === Check Network and Proxy === -) - -: _____________________________________________________________________________ -: Check Network & Proxy -call :checkPort %port% -if %valid_port% == 0 ( - echo 101> %stage_status% - exit /b -) - -call :getDefaultProxy - -call :checkProxy %prxy% http -if %valid_proxy% == 0 ( - echo 102> %stage_status% - exit /b -) else if %valid_proxy% == 2 ( - echo HTTP_PROXY : %HTTP_PROXY% -) -set /a proxy=%valid_proxy% - -call :checkProxy %prxs% https -if %valid_proxy% == 0 ( - echo 102> %stage_status% - exit /b -) else if %valid_proxy% == 2 ( - echo HTTPS_PROXY : %HTTPS_PROXY% -) -echo: - -:: Check Status by bit-OR of http | https -set /a "proxy = %proxy% | %valid_proxy%" -if %valid_proxy% == 1 ( - set proxy=No Proxy -) else if %valid_proxy% == 2 ( - set proxy=Use Proxy -) else ( - set proxy=Not Active -) -echo Proxy: %proxy% -echo: - -: Check Network Connection at No Proxy -rem if %valid_proxy% == 1 Ping www.python.org -n 1 -w 1000 > nul -powershell -ExecutionPolicy Bypass -Command ^ - try { ^ - $response = Invoke-WebRequest -Uri https://bootstrap.pypa.io/get-pip.py -Method Head; ^ - Write-Host 'Network is available.'; ^ - Write-Host 'StatusCode:' $response.StatusCode ^ - } catch { exit 1 }" -if errorlevel 1 ( - set network_nck=True - set /a error=%error%+%ErrorLevel% - echo %esc%[41m Warning: Check Network Connection %esc%[0m - timeout %wait_netcheck% -) else ( - set network_nck=False -) - -: _____________________________________________________________________________ -: Check changed (folder + version updated) -:CHECK_STATUS -if not exist %file_status% echo: > %file_status% -call :getAppStatus %file_temp% -fc %file_temp% %file_status% > nul -if errorlevel 1 ( - set status=%status_install% - set wait_netcheck=%wait_netcheck_inst% - if %network_nck% == False ( - del %path_getpip% - ) - mode con: cols=120 lines=60 - powershell -ExecutionPolicy Bypass -Command "&{$h=Get-Host;$w=$h.UI.RawUI;$s=$w.BufferSize;$s.height=5000;$w.BufferSize=$s;}" - echo Start Installation [Need Network or Proxy Connection] -) else ( - set status=%status_run_app% - set wait_netcheck=%wait_netcheck_appl% - echo Start Analysis Platform -) -echo: - -call :saveStartUpSetting -title %app_title% - -: _____________________________________________________________________________ -: Check Installation Status -rem for Debug -:exit /b -:set error=0 -:goto FINISH -if %status% == %status_run_app% ^ -if exist %path_python% if exist %path_getpip% if exist %path_oracle% ( - echo Installation seems to be completed. - echo If you have some trouble launching AP, delete auto downloaded folders and try again. - echo: - goto REMOVE_ZIPPED_FILES - echo: - goto START_APP -) - -: _____________________________________________________________________________ -: Download Components & Libraries -echo Download Components... -echo %esc%[44m First boot may take 5-30 minutes depending on network ^& CPU speed. %esc%[0m -timeout 5 -echo: -:CHECK_EXIST -if not exist %path_R% if %prod% == %product_dn% ( - echo %esc%[41m Make sure you have R-Portable folder before running this application. %esc%[0m - echo Or prepare R-Portable folder and reboot AP before using R function - echo 999> %stage_status% - timeout 10 - exit /b -) - -if exist %path_python% (echo Detect python) else goto :PYTHON_EMBEDDED -if exist %path_getpip% (echo Detect getpip) else goto :PIP_DOWNLOAD -if exist %path_oracle% (echo Detect oracle) else goto :ORACLE_INSTANCE - -: install packages -:: Get pip -%path_python%\python.exe %path_getpip% --no-cache-dir --no-warn-script-location "pip < 22.3" - -: ----------------------------------------------------------------------------- -:ACTIVE_PYTHON_ENVIRONMENT -:: Active virtual environment -IF NOT EXIST %env_path% ( - ECHO Installing Virtualenv - %path_python%\python -m pip install --no-cache-dir --no-warn-script-location virtualenv -) -ECHO Initializing Virtual Environment -%path_python%\python -m virtualenv %env_path% -ECHO Activate virtual environment - -:: copy complied sqlite3.dll to env Scripts folder -powershell if (Test-Path -Path $env:sqlite_dll) { Copy-Item $env:sqlite_dll -Destination $env:env_path\Scripts } - -:: copy complied sqlite3.dll to python embedded Scripts folder -powershell if (Test-Path -Path $env:sqlite_dll) { Copy-Item $env:sqlite_dll -Destination $env:path_python } - -CALL %env_path%\Scripts\activate & GOTO INSTALL_PYTHON_AND_R_PACKAGES -: ----------------------------------------------------------------------------- - -:INSTALL_PYTHON_AND_R_PACKAGES -:: Set default python & components path to PATH environment -set is_venv_activated=1 -set PATH=%lib_path% -echo PATH=%PATH% -echo: -echo: - -:: Upgrade pip -: %path_python%\python.exe -m pip install --upgrade pip -if %prod% == %product_dn% ( - pip install --no-cache-dir --no-warn-script-location -r %file_prod% -) else ( - pip install --no-cache-dir --no-warn-script-location -r %file_oss_prod% -) -IF exist %path_R% ( - %path_R%\bin\R CMD BATCH "r_install_packages.r" -) - -if exist %ca_cert% call :REMOVE_CA_CERT - -echo: -echo Download components, libraries and installation is completed. -echo: - -: ----------------------------------------------------------------------------- -: Remove Oracle and python_embedded after install application -:REMOVE_ZIPPED_FILES -IF exist %path_oracle_zip% del %path_oracle_zip% -IF exist %path_python_zip% del %path_python_zip% -echo: -echo Removed Oracle and Python embedded zipped files. -echo: - -: _____________________________________________________________________________ -: Run App: Analysis Platform -:START_APP -: log application status : Installation Completed -IF exist %file_temp% del %file_temp% -call :getAppStatus %file_status% - -if %only_install% == 1 ( - echo Skip AP startup and end sequences. "only_install" option is enabled. - :: Keep CMD stay when install check (control by %error%) - set /a error=%error%+1 - GOTO FINISH -) -if [%1]==[SKIP_RUN_MAIN] GOTO FINISH - -IF %is_venv_activated% == 0 ( - GOTO ACTIVE_PYTHON_ENVIRONMENT - ECHO Activate virtual environment - CALL %env_path%\Scripts\activate & GOTO SET_PATH -) -: ----------------------------------------------------------------------------- -:SET_PATH -:: Set default python & components path to PATH environment -set is_venv_activated=1 -set PATH=%lib_path% -echo PATH=%PATH% -echo: -echo: - -:: stop start.exe before run APDN7 -if errorlevel 0 ( - : Close start.exe if there is no error - call :stopLoadingApp -) else ( - : stop program if error - exit /b -) - -REM run application -ECHO Starting Up Analysis Platform ... -python.exe main.py -set /a error=%error%+%ErrorLevel% -echo: -echo: - -rem start -: Close CMD.exe if no error -@REM if %error% neq 0 ( -@REM echo Some Error Detected %error% -@REM echo Auto restart -@REM GOTO START_APP -@REM ) -@REM :FINISH -rem echo %error% -if %error% equ 0 if not %only_install% == 1 ( - timeout 5 - :: find current running cmd and stop it through title - for /f "usebackq tokens=2" %%a in (`tasklist /FO list /FI "WINDOWTITLE eq %app_title%*" ^| find /i "PID:"`) do ( - taskkill /pid %%a - ) -) -exit /b 0 -:goto FINISH - - - -: _____________________________________________________________________________ -: Sub Program -: _____________________________________________________________________________ - -:CA_CERT -powershell -ExecutionPolicy Bypass -Command wget %ca_cert_url% -O %ca_cert% -echo %ca_cert% file is downloaded. -exit /b -:end - -:REMOVE_CA_CERT -del %ca_cert% -echo %ca_cert% file is removed. -exit /b -:end - -:PYTHON_EMBEDDED -echo Download python -powershell -ExecutionPolicy Bypass -Command wget %python39_url% -O %path_python_zip% -if %errorlevel% == 35 ( - REM In case CA cert in local machine is expired or disabled --- download CA cert to authenticate - if exist %ca_cert% (echo Detect ca_cert) else call :CA_CERT - powershell -ExecutionPolicy Bypass -Command wget -Certificate %ca_cert% -Uri %python39_url% -O %path_python_zip% -) -if errorlevel 1 ( - echo %esc%[41m Error on Wget Check network connection or use latest Win10 ^>1803 %esc%[0m - echo 200 > %stage_status% - exit /b -) - -echo Unzip python_embedded -powershell -ExecutionPolicy Bypass -Command "Expand-Archive -Path %path_python_zip% -DestinationPath %path_python%" -rename %path_python%\python39._pth python39._pth.renamed -echo: -GOTO CHECK_EXIST - -:PIP_DOWNLOAD -echo Download pip -powershell -ExecutionPolicy Bypass -Command wget %get_pip_url% -O %path_getpip% -if %errorlevel% == 35 ( - REM In case CA cert in local machine is expired or disabled --- download CA cert to authenticate - if exist %ca_cert% (echo Detect ca_cert) else call :CA_CERT - powershell -ExecutionPolicy Bypass -Command wget -Certificate %ca_cert% -Uri %get_pip_url% -O %path_getpip% -) -if errorlevel 1 ( - echo %esc%[41m Error on Curl Check network connection or use latest Win10 ^>1803 %esc%[0m - echo 201 > %stage_status% - exit /b -) - -echo: > %file_status% -echo: -GOTO CHECK_EXIST - -:ORACLE_INSTANCE -echo Download oracle instance -powershell -ExecutionPolicy Bypass -Command wget %oracle_instance_url% -O %path_oracle_zip% -if %errorlevel% == 35 ( - REM In case CA cert in local machine is expired or disabled --- download CA cert to authenticate - if exist %ca_cert% (echo Detect ca_cert) else call :CA_CERT - powershell -ExecutionPolicy Bypass -Command wget -Certificate %ca_cert% -Uri %oracle_instance_url% -O %path_oracle_zip% -) -if errorlevel 1 ( - echo %esc%[41m Error on Wget Check network connection or use latest Win10 ^>1803 %esc%[0m - echo 210 > %stage_status% - exit /b -) - -echo unzip oracle instance -powershell -ExecutionPolicy Bypass -Command "Expand-Archive -Path %path_oracle_zip% -DestinationPath %path_oracle%" -echo: -GOTO CHECK_EXIST - - -: _____________________________________________________________________________ -: Subroutine: function removeChar -: In/Out var|In %*: Array of Replacement Char -:removeChar - for %%i in (%*) do call set var=%%var:%%i=%% - exit /b -:end - -: _____________________________________________________________________________ -: Subroutine: function convertCaseLower -: In/Out var -:convertCaseLower - for %%i in (%chr_alphab%) do call set var=%%var:%%i=%%i%% - exit /b -:end - -: _____________________________________________________________________________ -: Subroutine: function getAppStatus -: In/Out - |In %1: Target File -:getAppStatus - set filename=%1 - : Path Info - cd> %filename% - : AP Version Info (Version File Timestamp) - for %%a in (%file_ver%) do echo %%~ta>> %filename% - exit /b -:end -: _____________________________________________________________________________ -: Subroutine: Definition, Setting and Cleaning -: In/Out - |In - -:defSetting - : Setting - set file_status=__STATUS__ - set file_temp=__TEMP__ - set file_ver=VERSION - :: Product Judge File dn or oss - set file_prod=requirements\prod.txt - set file_oss_prod=requirements\oss_prod.txt - set path_R=%cd%\..\R-Portable - set ca_cert=%cd%\..\cacert.pem - set path_python=%cd%\..\python_embedded_39 - set env_path=%cd%\..\env - set sqlite_dll=%cd%\init\sqlite3.dll - set path_getpip=%cd%\..\get-pip.py - set path_oracle=%cd%\..\Oracle-Portable - set path_python_zip=%cd%\..\python_embedded_39.zip - set path_oracle_zip=%cd%\..\Oracle-Portable.zip - set lib_path=%path_oracle%\instantclient_21_3;%env_path%\Scripts;%env_path%\Lib;%SystemRoot%\System32; - :: Startup Error log file - set stage_status=stage_status.log - set is_venv_activated=0 - - : links - set ca_cert_url="https://curl.se/ca/cacert-2023-08-22.pem" - set python39_url="https://www.python.org/ftp/python/3.9.0/python-3.9.0-embed-amd64.zip" - set get_pip_url="https://bootstrap.pypa.io/get-pip.py" - set oracle_instance_url="https://download.oracle.com/otn_software/nt/instantclient/213000/instantclient-basic-windows.x64-21.3.0.0.0.zip" - - : Definition - set error=0 - set status_install=0 - set status_run_app=1 - set product_dn=prod - set product_oss=oss - set chr_number=0 1 2 3 4 5 6 7 8 9 - set chr_alphab=a b c d e f g h i j k l m n o p q r s t u v w x y z - :: Get Escape $e 0x1b 27 - for /f %%i in ('cmd /k prompt $e^ %stage_status% - exit /b - ) else ( - set valid_port = 1 - ) - - : check Range - if %var% geq 6000 if %var% lss 8000 if %var% neq 7070 set valid_port=2 - if %valid_port% neq 2 ( - echo %esc%[41m Warning Bad Port Number %esc%[0m - echo Change Port Number from 6000 to 7999 - echo Current Value: %1 - echo 101 > %stage_status% - exit /b - ) -exit /b -:end - -: _____________________________________________________________________________ -: Subroutine: get Default Proxy Setting -: In/Out - |In - -:getDefaultProxy - : get Default Proxy Setting - set basic_ver=null - set basic_port=null - set basic_proxy=null - set basic_config=ap\config\basic_config.yml - if not exist %basic_config% set basic_config=ap\config\basic_config.yml - for /F "tokens=2,3,4 delims=: " %%i in (%basic_config%) do ( - if %%i==version set basic_ver=%%j - if %%i==port-no set basic_port=%%j - if %%i==proxy set basic_proxy=%%j:%%k - :: It gets more complicated dealing with nesting arrays, so leave the for loop at the next level or after reading the variable. - if %%i==proxy goto :break_for_ini - ) - :break_for_ini - - echo Basic Config - echo Ver : %basic_ver% - echo Port : %basic_port% - echo Proxy: %basic_proxy% - - if not %basic_proxy%==null if not %basic_proxy%==: if %prxy%==null ( - echo Use Default Proxy of %basic_proxy% - set prxy=%basic_proxy% - set prxs=%basic_proxy% - ) - - echo Target Proxy - echo Proxy http : %prxy% - echo Proxy https: %prxs% - echo: -exit /b -:end -: _____________________________________________________________________________ -: Subroutine: Check Proxy Setting -: In/Out valid_proxy|In %1: Proxy Info %2: Type http(s) -:checkProxy - set var=%1 - set valid_proxy=0 - if %2 == http ( - set target_type=http - set target_proxy=HTTP_PROXY - ) else ( - set target_type=https - set target_proxy=HTTPS_PROXY - ) - rem echo var - : Check Format - :: no or null: Do nothing - if %var% == no set /a valid_proxy=0x01 - if %var% == null set /a valid_proxy=0x01 - if %valid_proxy% == 1 exit /b - :: check format ip - call :removeChar %chr_number% - if %var% == ...: set /a valid_proxy=0x04 - :: check format domain name - call :convertCaseLower - call :removeChar %chr_alphab% - call :removeChar . - - if %var% == : set /a valid_proxy=0x08 - :: judge - if %valid_proxy% == 0 ( - echo %esc%[41m Warning Proxy Address/Domain Name %esc%[0m - echo Modify %target_type% Proxy Address/Domain Name - echo Current Value: %1 valid_proxy=%valid_proxy% - echo 102 > %stage_status% - exit /b - ) - - : Check Proxy Existence - for /f "tokens=1 delims=:" %%a in ("%1") do set ip=%%a - Ping %ip% -n 1 -w 1000 > nul - if errorlevel 1 ( - echo %esc%[41m Warning: %target_proxy% is not active %esc%[0m - echo Current Value: %1 valid_proxy=%valid_proxy% - timeout %wait_netcheck% - ) else ( - rem Proxy Passed - set %target_proxy%=%1 - set /a valid_proxy=0x02 - ) -exit /b -:end - - -: _____________________________________________________________________________ -: Subroutine: Save Settings -: In/Out - |In - -:saveStartUpSetting -setlocal - set var=%network_nck% - call :convertCaseLower - - : Make yaml file - set filename=startup.yaml - echo !!omap> %filename% - echo # Analysis Platform StartUp Batch File Setting>> %filename% - echo - version_yaml: 1.0 >> %filename% - echo - setting_startup: !!omap>> %filename% - echo - port: %port% >> %filename% - echo - language: %lang% >> %filename% - echo - subtitle: %subt% >> %filename% - echo - proxy_http: %prxy% >> %filename% - echo - proxy_https: %prxs% >> %filename% - echo - network_nck: %var% >> %filename% - echo - setting_app: !!omap>> %filename% - echo - env_ap: %prod%>> %filename% - echo - flask_debug: %flask_debug% >> %filename% - echo - update_R: %update_R% >> %filename% - -endlocal -exit /b -:end - -:stopLoadingApp -for /f "usebackq tokens=2" %%a in (`tasklist /FO list /FI "imagename eq start.exe" ^| find /i "PID:"`) do ( - taskkill /F /pid %%a >nul 2>&1 -) -exit /b -:end +@echo off +: _____________________________________________________________________________ +: +: Analysis Platform StartUp +: _____________________________________________________________________________ +: Important notice: +: Running the batch file is regarded as you agreed to the Terms of Use. +: Terms of Use: +: https://github.com/apdn7/AnalysisPlatform/about/terms_of_use_en.md +: _____________________________________________________________________________ + +echo Start Main AP Sequence... +echo: + +: Wait Setting for Network Check in [sec] +set wait_netcheck_inst=10 +set wait_netcheck_appl=1 +set wait_netcheck=%wait_netcheck_appl% + +call :defSetting +set app_title=Analysis Platform Port: %port% Lang: %lang% %subtitle% Path: %CD% +title %app_title% +rem prompt AP:%port%$g + + +: Check Product Type +if exist %file_prod% ( + set prod=%product_dn% + echo Detected Product Type: dn7 +) else ( + set prod=%product_oss% + echo Detected Product Type: oss +) +echo: + +: Run start program +: Close old start.exe before run +call :stopLoadingApp +start "" start.exe %port% +echo. +echo.> %stage_status% + +: Direct Startup Mode +if %startup_mode% == 8 ( + echo Direct Startup Mode === Force to bypass Installation === + call :saveStartUpSetting + goto START_APP +) else ( + echo Normal Startup Mode === Check Network and Proxy === +) + +: _____________________________________________________________________________ +: Check Network & Proxy +call :checkPort %port% +if %valid_port% == 0 ( + echo 101> %stage_status% + exit /b +) + +call :getDefaultProxy + +call :checkProxy %prxy% http +if %valid_proxy% == 0 ( + echo 102> %stage_status% + exit /b +) else if %valid_proxy% == 2 ( + echo HTTP_PROXY : %HTTP_PROXY% +) +set /a proxy=%valid_proxy% + +call :checkProxy %prxs% https +if %valid_proxy% == 0 ( + echo 102> %stage_status% + exit /b +) else if %valid_proxy% == 2 ( + echo HTTPS_PROXY : %HTTPS_PROXY% +) +echo: + +:: Check Status by bit-OR of http | https +set /a "proxy = %proxy% | %valid_proxy%" +if %valid_proxy% == 1 ( + set proxy=No Proxy +) else if %valid_proxy% == 2 ( + set proxy=Use Proxy +) else ( + set proxy=Not Active +) +echo Proxy: %proxy% +echo: + +: Check Network Connection at No Proxy +rem if %valid_proxy% == 1 Ping www.python.org -n 1 -w 1000 > nul +powershell -ExecutionPolicy Bypass -Command ^ + try { ^ + $response = Invoke-WebRequest -Uri https://bootstrap.pypa.io/get-pip.py -Method Head; ^ + Write-Host 'Network is available.'; ^ + Write-Host 'StatusCode:' $response.StatusCode ^ + } catch { exit 1 }" +if errorlevel 1 ( + set network_nck=True + set /a error=%error%+%ErrorLevel% + echo %esc%[41m Warning: Check Network Connection %esc%[0m + timeout %wait_netcheck% +) else ( + set network_nck=False +) + +: _____________________________________________________________________________ +: Check changed (folder + version updated) +:CHECK_STATUS +if not exist %file_status% echo: > %file_status% +call :getAppStatus %file_temp% +fc %file_temp% %file_status% > nul +if errorlevel 1 ( + set status=%status_install% + set wait_netcheck=%wait_netcheck_inst% + if %network_nck% == False ( + del %path_getpip% + ) + mode con: cols=120 lines=60 + powershell -ExecutionPolicy Bypass -Command "&{$h=Get-Host;$w=$h.UI.RawUI;$s=$w.BufferSize;$s.height=5000;$w.BufferSize=$s;}" + echo Start Installation [Need Network or Proxy Connection] +) else ( + set status=%status_run_app% + set wait_netcheck=%wait_netcheck_appl% + echo Start Analysis Platform +) +echo: + +call :saveStartUpSetting +title %app_title% + +: _____________________________________________________________________________ +: Check Installation Status +rem for Debug +:exit /b +:set error=0 +:goto FINISH +if %status% == %status_run_app% ^ +if exist %path_python% if exist %path_getpip% if exist %path_oracle% ( + echo Installation seems to be completed. + echo If you have some trouble launching AP, delete auto downloaded folders and try again. + echo: + goto REMOVE_ZIPPED_FILES + echo: + goto START_APP +) + +: _____________________________________________________________________________ +: Download Components & Libraries +echo Download Components... +echo %esc%[44m First boot may take 5-30 minutes depending on network ^& CPU speed. %esc%[0m +timeout 5 +echo: +:CHECK_EXIST +if not exist %path_R% if %prod% == %product_dn% ( + echo %esc%[41m Make sure you have R-Portable folder before running this application. %esc%[0m + echo Or prepare R-Portable folder and reboot AP before using R function + echo 999> %stage_status% + timeout 10 + exit /b +) + +if exist %path_python% (echo Detect python) else goto :PYTHON_EMBEDDED +if exist %path_getpip% (echo Detect getpip) else goto :PIP_DOWNLOAD +if exist %path_oracle% (echo Detect oracle) else goto :ORACLE_INSTANCE + +: install packages +:: Get pip +%path_python%\python.exe %path_getpip% --no-cache-dir --no-warn-script-location "pip < 22.3" + +: ----------------------------------------------------------------------------- +:ACTIVE_PYTHON_ENVIRONMENT +:: Active virtual environment +IF NOT EXIST %env_path% ( + ECHO Installing Virtualenv + %path_python%\python -m pip install --no-cache-dir --no-warn-script-location virtualenv +) +ECHO Initializing Virtual Environment +%path_python%\python -m virtualenv %env_path% +ECHO Activate virtual environment + +:: copy complied sqlite3.dll to env Scripts folder +powershell if (Test-Path -Path $env:sqlite_dll) { Copy-Item $env:sqlite_dll -Destination $env:env_path\Scripts } + +:: copy complied sqlite3.dll to python embedded Scripts folder +powershell if (Test-Path -Path $env:sqlite_dll) { Copy-Item $env:sqlite_dll -Destination $env:path_python } + +CALL %env_path%\Scripts\activate & GOTO INSTALL_PYTHON_AND_R_PACKAGES +: ----------------------------------------------------------------------------- + +:INSTALL_PYTHON_AND_R_PACKAGES +:: Set default python & components path to PATH environment +set is_venv_activated=1 +set PATH=%lib_path% +echo PATH=%PATH% +echo: +echo: + +:: Upgrade pip +: %path_python%\python.exe -m pip install --upgrade pip +if %prod% == %product_dn% ( + pip install --no-cache-dir --no-warn-script-location -r %file_prod% +) else ( + pip install --no-cache-dir --no-warn-script-location -r %file_oss_prod% +) +IF exist %path_R% ( + %path_R%\bin\R CMD BATCH "r_install_packages.r" +) + +if exist %ca_cert% call :REMOVE_CA_CERT + +echo: +echo Download components, libraries and installation is completed. +echo: + +: ----------------------------------------------------------------------------- +: Remove Oracle and python_embedded after install application +:REMOVE_ZIPPED_FILES +IF exist %path_oracle_zip% del %path_oracle_zip% +IF exist %path_python_zip% del %path_python_zip% +echo: +echo Removed Oracle and Python embedded zipped files. +echo: + +: _____________________________________________________________________________ +: Run App: Analysis Platform +:START_APP +: log application status : Installation Completed +IF exist %file_temp% del %file_temp% +call :getAppStatus %file_status% + +if %only_install% == 1 ( + echo Skip AP startup and end sequences. "only_install" option is enabled. + :: Keep CMD stay when install check (control by %error%) + set /a error=%error%+1 + GOTO FINISH +) +if [%1]==[SKIP_RUN_MAIN] GOTO FINISH + +IF %is_venv_activated% == 0 ( + GOTO ACTIVE_PYTHON_ENVIRONMENT + ECHO Activate virtual environment + CALL %env_path%\Scripts\activate & GOTO SET_PATH +) +: ----------------------------------------------------------------------------- +:SET_PATH +:: Set default python & components path to PATH environment +set is_venv_activated=1 +set PATH=%lib_path% +echo PATH=%PATH% +echo: +echo: + +:: stop start.exe before run APDN7 +if errorlevel 0 ( + : Close start.exe if there is no error + call :stopLoadingApp +) else ( + : stop program if error + exit /b +) + +REM run application +ECHO Starting Up Analysis Platform ... +python.exe main.py +set /a error=%error%+%ErrorLevel% +echo: +echo: + +rem start +: Close CMD.exe if no error +@REM if %error% neq 0 ( +@REM echo Some Error Detected %error% +@REM echo Auto restart +@REM GOTO START_APP +@REM ) +@REM :FINISH +rem echo %error% +if %error% equ 0 if not %only_install% == 1 ( + timeout 5 + :: find current running cmd and stop it through title + for /f "usebackq tokens=2" %%a in (`tasklist /FO list /FI "WINDOWTITLE eq %app_title%*" ^| find /i "PID:"`) do ( + taskkill /pid %%a + ) +) +exit /b 0 +:goto FINISH + + + +: _____________________________________________________________________________ +: Sub Program +: _____________________________________________________________________________ + +:CA_CERT +powershell -ExecutionPolicy Bypass -Command wget %ca_cert_url% -O %ca_cert% +echo %ca_cert% file is downloaded. +exit /b +:end + +:REMOVE_CA_CERT +del %ca_cert% +echo %ca_cert% file is removed. +exit /b +:end + +:PYTHON_EMBEDDED +echo Download python +powershell -ExecutionPolicy Bypass -Command wget %python39_url% -O %path_python_zip% +if %errorlevel% == 35 ( + REM In case CA cert in local machine is expired or disabled --- download CA cert to authenticate + if exist %ca_cert% (echo Detect ca_cert) else call :CA_CERT + powershell -ExecutionPolicy Bypass -Command wget -Certificate %ca_cert% -Uri %python39_url% -O %path_python_zip% +) +if errorlevel 1 ( + echo %esc%[41m Error on Wget Check network connection or use latest Win10 ^>1803 %esc%[0m + echo 200 > %stage_status% + exit /b +) + +echo Unzip python_embedded +powershell -ExecutionPolicy Bypass -Command "Expand-Archive -Path %path_python_zip% -DestinationPath %path_python%" +rename %path_python%\python39._pth python39._pth.renamed +echo: +GOTO CHECK_EXIST + +:PIP_DOWNLOAD +echo Download pip +powershell -ExecutionPolicy Bypass -Command wget %get_pip_url% -O %path_getpip% +if %errorlevel% == 35 ( + REM In case CA cert in local machine is expired or disabled --- download CA cert to authenticate + if exist %ca_cert% (echo Detect ca_cert) else call :CA_CERT + powershell -ExecutionPolicy Bypass -Command wget -Certificate %ca_cert% -Uri %get_pip_url% -O %path_getpip% +) +if errorlevel 1 ( + echo %esc%[41m Error on Curl Check network connection or use latest Win10 ^>1803 %esc%[0m + echo 201 > %stage_status% + exit /b +) + +echo: > %file_status% +echo: +GOTO CHECK_EXIST + +:ORACLE_INSTANCE +echo Download oracle instance +powershell -ExecutionPolicy Bypass -Command wget %oracle_instance_url% -O %path_oracle_zip% +if %errorlevel% == 35 ( + REM In case CA cert in local machine is expired or disabled --- download CA cert to authenticate + if exist %ca_cert% (echo Detect ca_cert) else call :CA_CERT + powershell -ExecutionPolicy Bypass -Command wget -Certificate %ca_cert% -Uri %oracle_instance_url% -O %path_oracle_zip% +) +if errorlevel 1 ( + echo %esc%[41m Error on Wget Check network connection or use latest Win10 ^>1803 %esc%[0m + echo 210 > %stage_status% + exit /b +) + +echo unzip oracle instance +powershell -ExecutionPolicy Bypass -Command "Expand-Archive -Path %path_oracle_zip% -DestinationPath %path_oracle%" +echo: +GOTO CHECK_EXIST + + +: _____________________________________________________________________________ +: Subroutine: function removeChar +: In/Out var|In %*: Array of Replacement Char +:removeChar + for %%i in (%*) do call set var=%%var:%%i=%% + exit /b +:end + +: _____________________________________________________________________________ +: Subroutine: function convertCaseLower +: In/Out var +:convertCaseLower + for %%i in (%chr_alphab%) do call set var=%%var:%%i=%%i%% + exit /b +:end + +: _____________________________________________________________________________ +: Subroutine: function getAppStatus +: In/Out - |In %1: Target File +:getAppStatus + set filename=%1 + : Path Info + cd> %filename% + : AP Version Info (Version File Timestamp) + for %%a in (%file_ver%) do echo %%~ta>> %filename% + exit /b +:end +: _____________________________________________________________________________ +: Subroutine: Definition, Setting and Cleaning +: In/Out - |In - +:defSetting + : Setting + set file_status=__STATUS__ + set file_temp=__TEMP__ + set file_ver=VERSION + :: Product Judge File dn or oss + set file_prod=requirements\prod.txt + set file_oss_prod=requirements\oss_prod.txt + set path_R=%cd%\..\R-Portable + set ca_cert=%cd%\..\cacert.pem + set path_python=%cd%\..\python_embedded_39 + set env_path=%cd%\..\env + set sqlite_dll=%cd%\init\sqlite3.dll + set path_getpip=%cd%\..\get-pip.py + set path_oracle=%cd%\..\Oracle-Portable + set path_python_zip=%cd%\..\python_embedded_39.zip + set path_oracle_zip=%cd%\..\Oracle-Portable.zip + set lib_path=%path_oracle%\instantclient_21_3;%env_path%\Scripts;%env_path%\Lib;%SystemRoot%\System32; + :: Startup Error log file + set stage_status=stage_status.log + set is_venv_activated=0 + + : links + set ca_cert_url="https://curl.se/ca/cacert-2023-08-22.pem" + set python39_url="https://www.python.org/ftp/python/3.9.0/python-3.9.0-embed-amd64.zip" + set get_pip_url="https://bootstrap.pypa.io/get-pip.py" + set oracle_instance_url="https://download.oracle.com/otn_software/nt/instantclient/213000/instantclient-basic-windows.x64-21.3.0.0.0.zip" + + : Definition + set error=0 + set status_install=0 + set status_run_app=1 + set product_dn=prod + set product_oss=oss + set chr_number=0 1 2 3 4 5 6 7 8 9 + set chr_alphab=a b c d e f g h i j k l m n o p q r s t u v w x y z + :: Get Escape $e 0x1b 27 + for /f %%i in ('cmd /k prompt $e^ %stage_status% + exit /b + ) else ( + set valid_port = 1 + ) + + : check Range + if %var% geq 6000 if %var% lss 8000 if %var% neq 7070 set valid_port=2 + if %valid_port% neq 2 ( + echo %esc%[41m Warning Bad Port Number %esc%[0m + echo Change Port Number from 6000 to 7999 + echo Current Value: %1 + echo 101 > %stage_status% + exit /b + ) +exit /b +:end + +: _____________________________________________________________________________ +: Subroutine: get Default Proxy Setting +: In/Out - |In - +:getDefaultProxy + : get Default Proxy Setting + set basic_ver=null + set basic_port=null + set basic_proxy=null + set basic_config=ap\config\basic_config.yml + if not exist %basic_config% set basic_config=ap\config\basic_config.yml + for /F "tokens=2,3,4 delims=: " %%i in (%basic_config%) do ( + if %%i==version set basic_ver=%%j + if %%i==port-no set basic_port=%%j + if %%i==proxy set basic_proxy=%%j:%%k + :: It gets more complicated dealing with nesting arrays, so leave the for loop at the next level or after reading the variable. + if %%i==proxy goto :break_for_ini + ) + :break_for_ini + + echo Basic Config + echo Ver : %basic_ver% + echo Port : %basic_port% + echo Proxy: %basic_proxy% + + if not %basic_proxy%==null if not %basic_proxy%==: if %prxy%==null ( + echo Use Default Proxy of %basic_proxy% + set prxy=%basic_proxy% + set prxs=%basic_proxy% + ) + + echo Target Proxy + echo Proxy http : %prxy% + echo Proxy https: %prxs% + echo: +exit /b +:end +: _____________________________________________________________________________ +: Subroutine: Check Proxy Setting +: In/Out valid_proxy|In %1: Proxy Info %2: Type http(s) +:checkProxy + set var=%1 + set valid_proxy=0 + if %2 == http ( + set target_type=http + set target_proxy=HTTP_PROXY + ) else ( + set target_type=https + set target_proxy=HTTPS_PROXY + ) + rem echo var + : Check Format + :: no or null: Do nothing + if %var% == no set /a valid_proxy=0x01 + if %var% == null set /a valid_proxy=0x01 + if %valid_proxy% == 1 exit /b + :: check format ip + call :removeChar %chr_number% + if %var% == ...: set /a valid_proxy=0x04 + :: check format domain name + call :convertCaseLower + call :removeChar %chr_alphab% + call :removeChar . - + if %var% == : set /a valid_proxy=0x08 + :: judge + if %valid_proxy% == 0 ( + echo %esc%[41m Warning Proxy Address/Domain Name %esc%[0m + echo Modify %target_type% Proxy Address/Domain Name + echo Current Value: %1 valid_proxy=%valid_proxy% + echo 102 > %stage_status% + exit /b + ) + + : Check Proxy Existence + for /f "tokens=1 delims=:" %%a in ("%1") do set ip=%%a + Ping %ip% -n 1 -w 1000 > nul + if errorlevel 1 ( + echo %esc%[41m Warning: %target_proxy% is not active %esc%[0m + echo Current Value: %1 valid_proxy=%valid_proxy% + timeout %wait_netcheck% + ) else ( + rem Proxy Passed + set %target_proxy%=%1 + set /a valid_proxy=0x02 + ) +exit /b +:end + + +: _____________________________________________________________________________ +: Subroutine: Save Settings +: In/Out - |In - +:saveStartUpSetting +setlocal + set var=%network_nck% + call :convertCaseLower + + : Make yaml file + set filename=startup.yaml + echo !!omap> %filename% + echo # Analysis Platform StartUp Batch File Setting>> %filename% + echo - version_yaml: 1.0 >> %filename% + echo - setting_startup: !!omap>> %filename% + echo - port: %port% >> %filename% + echo - language: %lang% >> %filename% + echo - subtitle: %subt% >> %filename% + echo - proxy_http: %prxy% >> %filename% + echo - proxy_https: %prxs% >> %filename% + echo - network_nck: %var% >> %filename% + echo - setting_app: !!omap>> %filename% + echo - env_ap: %prod%>> %filename% + echo - flask_debug: %flask_debug% >> %filename% + echo - update_R: %update_R% >> %filename% + +endlocal +exit /b +:end + +:stopLoadingApp +for /f "usebackq tokens=2" %%a in (`tasklist /FO list /FI "imagename eq start.exe" ^| find /i "PID:"`) do ( + taskkill /F /pid %%a >nul 2>&1 +) +exit /b +:end diff --git a/VERSION b/VERSION index d4e6d22..3b2545e 100644 --- a/VERSION +++ b/VERSION @@ -1,5 +1,5 @@ -v4.7.1.234.801d8447 -1 -OSS +v4.7.3.239.3fb04467 +1 +OSS diff --git a/_original_path_list.log b/_original_path_list.log index d6a24ef..3aa0848 100644 --- a/_original_path_list.log +++ b/_original_path_list.log @@ -6,6 +6,7 @@ .\format.bat .\LICENSE.md .\main.py +.\package-lock.json .\package.json .\pyper.py .\pyproject.toml @@ -82,11 +83,13 @@ .\ap\api\setting_module\services\equations.py .\ap\api\setting_module\services\factory_import.py .\ap\api\setting_module\services\filter_settings.py +.\ap\api\setting_module\services\master_data_transform_pattern.py .\ap\api\setting_module\services\polling_frequency.py .\ap\api\setting_module\services\process_delete.py .\ap\api\setting_module\services\save_load_user_setting.py .\ap\api\setting_module\services\show_latest_record.py .\ap\api\setting_module\services\shutdown_app.py +.\ap\api\setting_module\services\software_workshop_etl_services.py .\ap\api\setting_module\services\v2_etl_services.py .\ap\api\setting_module\services\__init__.py .\ap\api\table_viewer\controllers.py @@ -113,6 +116,7 @@ .\ap\common\common_utils.py .\ap\common\constants.py .\ap\common\cryptography_utils.py +.\ap\common\datetime_format_utils.py .\ap\common\disk_usage.py .\ap\common\logger.py .\ap\common\memoize.py @@ -211,12 +215,14 @@ .\ap\script\hide_exe_root_folder.py .\ap\script\migrate_cfg_data_source_csv.py .\ap\script\migrate_cfg_process.py +.\ap\script\migrate_cfg_process_column.py .\ap\script\migrate_csv_datatype.py .\ap\script\migrate_csv_dummy_datetime.py .\ap\script\migrate_csv_save_graph_settings.py .\ap\script\migrate_delta_time.py .\ap\script\migrate_m_function.py .\ap\script\migrate_process_file_name_column.py +.\ap\script\setup_for_e2e.py .\ap\script\dummy_data\dummy_sql.py .\ap\script\hot_fix\fix_db_issues.py .\ap\script\r_scripts\analytics.r @@ -237,6 +243,12 @@ .\ap\setting_module\services\process_config.py .\ap\setting_module\services\register_from_file.py .\ap\setting_module\services\trace_config.py +.\ap\setting_module\services\backup_and_restore\backup.py +.\ap\setting_module\services\backup_and_restore\backup_file_manager.py +.\ap\setting_module\services\backup_and_restore\duplicated_check.py +.\ap\setting_module\services\backup_and_restore\jobs.py +.\ap\setting_module\services\backup_and_restore\restore.py +.\ap\setting_module\services\backup_and_restore\__init__.py .\ap\static\aggregate_plot\css\aggregate_plot.css .\ap\static\aggregate_plot\js\aggregate_plot.js .\ap\static\aggregate_plot\js\aggregation_chart.js @@ -286,6 +298,7 @@ .\ap\static\common\css\jsuites.css .\ap\static\common\css\jump_function.css .\ap\static\common\css\main.css +.\ap\static\common\css\multi_level_dropdown.css .\ap\static\common\css\pagination.css .\ap\static\common\css\select2.min.css .\ap\static\common\css\shepherd.css @@ -560,7 +573,9 @@ .\ap\static\scatter_plot\js\scp_heatmap_plot.js .\ap\static\scatter_plot\js\scp_violin_plot.js .\ap\static\setting_module\css\background_process.css +.\ap\static\setting_module\css\backup_restore_modal.css .\ap\static\setting_module\css\config_view.css +.\ap\static\setting_module\css\data_type_dropdown.css .\ap\static\setting_module\css\filter_visualization.css .\ap\static\setting_module\css\index.css .\ap\static\setting_module\css\master_cfg.css @@ -597,11 +612,21 @@ .\ap\static\setting_module\js\graph_visualization.js .\ap\static\setting_module\js\master_config.js .\ap\static\setting_module\js\npm.js +.\ap\static\setting_module\js\parse_data_utils.js .\ap\static\setting_module\js\predict_data_type.js +.\ap\static\setting_module\js\process_config_section.js .\ap\static\setting_module\js\proc_config.js +.\ap\static\setting_module\js\proc_config_date_time_format.js .\ap\static\setting_module\js\proc_config_modals.js .\ap\static\setting_module\js\register_from_file.js +.\ap\static\setting_module\js\system.js .\ap\static\setting_module\js\trace_config.js +.\ap\static\setting_module\js\data_type_dropdown\constant.js +.\ap\static\setting_module\js\data_type_dropdown\controller.js +.\ap\static\setting_module\js\data_type_dropdown\core.js +.\ap\static\setting_module\js\data_type_dropdown\event.js +.\ap\static\setting_module\js\data_type_dropdown\helper.js +.\ap\static\setting_module\js\data_type_dropdown\type_definition.js .\ap\static\table_viewer\css\table_viewer.css .\ap\static\table_viewer\js\table_viewer.js .\ap\static\table_viewer\lang\English.json @@ -660,6 +685,7 @@ .\ap\templates\setting_module\proc_config.html .\ap\templates\setting_module\proc_config_modals.html .\ap\templates\setting_module\register_by_file.html +.\ap\templates\setting_module\system.html .\ap\templates\setting_module\terms_of_use.html .\ap\templates\setting_module\trace_config.html .\ap\templates\setting_module\_filter_line.html @@ -782,13 +808,14 @@ .\ap\translations\zh_Hans_CN\LC_MESSAGES\messages.po .\ap\translations\zh_Hant_TW\LC_MESSAGES\messages.mo .\ap\translations\zh_Hant_TW\LC_MESSAGES\messages.po -.\bridge\services\sql\sql_generator.py -.\bridge\services\sql\transaction_query_builder.py .\data_files\19.m_function.tsv .\init\app.sqlite3 .\init\sqlite3.dll .\lang\message.pot +.\migrations\versions\8600fce518a6_add_datetime_format_column_into_cfg_.py +.\migrations\versions\fd863d0b8735_add_is_file_path_column_into_cfg_data_.py .\requirements\common.txt +.\requirements\dev.txt .\requirements\oss_dev.txt .\requirements\oss_prod.txt .\sample_data\AgP_sample_data\AgP_sample.tsv diff --git a/ap/__init__.py b/ap/__init__.py index f27e819..859d70d 100644 --- a/ap/__init__.py +++ b/ap/__init__.py @@ -188,6 +188,14 @@ def init_engine(app, uri, **kwargs): def do_begin(dbapi_conn): dbapi_conn.execute('BEGIN IMMEDIATE') + @event.listens_for(db_engine, 'commit') + def do_expire(dbapi_conn): + """ + Expire all objects in `db.session` everytime meta session perform a commit. + This makes `db.session` removes all cached and queries to database again to get the newest objects + """ + db.session.expire_all() + return db_engine @@ -239,7 +247,8 @@ def create_app(object_name=None, is_main=False): from .sankey_plot import create_module as sankey_create_module from .scatter_plot import create_module as scatter_plot_create_module from .script.migrate_cfg_data_source_csv import migrate_cfg_data_source_csv - from .script.migrate_cfg_process import migrate_cfg_process_add_is_show_file_name + from .script.migrate_cfg_process import migrate_cfg_process + from .script.migrate_cfg_process_column import migrate_cfg_process_column from .script.migrate_csv_datatype import migrate_csv_datatype from .script.migrate_csv_dummy_datetime import migrate_csv_dummy_datetime from .script.migrate_csv_save_graph_settings import migrate_csv_save_graph_settings @@ -386,7 +395,8 @@ def get_locale(): migrate_cfg_process_column_add_column_raw_name(app.config[APP_DB_FILE]) migrate_cfg_process_column_add_column_type(app.config[APP_DB_FILE]) migrate_cfg_process_column_add_parent_id(app.config[APP_DB_FILE]) - migrate_cfg_process_add_is_show_file_name(app.config[APP_DB_FILE]) + migrate_cfg_process_column(app.config[APP_DB_FILE]) + migrate_cfg_process(app.config[APP_DB_FILE]) # migrate function data migrate_m_function_data(app.config[APP_DB_FILE]) diff --git a/ap/api/aggregate_plot/controllers.py b/ap/api/aggregate_plot/controllers.py index db2f0ad..158cad5 100644 --- a/ap/api/aggregate_plot/controllers.py +++ b/ap/api/aggregate_plot/controllers.py @@ -10,6 +10,7 @@ from ap.api.categorical_plot.services import customize_dict_param from ap.api.common.services.show_graph_database import get_config_data from ap.api.common.services.show_graph_jump_function import get_jump_emd_data +from ap.api.common.services.show_graph_services import judge_data_conversion from ap.api.trace_data.services.csv_export import to_csv from ap.common.constants import ( ARRAY_FORMVAL, @@ -122,6 +123,8 @@ def data_export(export_type): None, max_graph_config[MaxGraphNumber.AGP_MAX_GRAPH.name], ) + # export original value of judge variable + agp_df = judge_data_conversion(agp_df, graph_param, revert=True) end_proc_id = int(agp_dat[ARRAY_FORMVAL][0][END_PROC]) proc_name = graph_param.dic_proc_cfgs[end_proc_id].shown_name csv_list_name.append('{}.{}'.format(proc_name, export_type)) diff --git a/ap/api/aggregate_plot/services.py b/ap/api/aggregate_plot/services.py index ad73e76..67428ab 100644 --- a/ap/api/aggregate_plot/services.py +++ b/ap/api/aggregate_plot/services.py @@ -21,6 +21,7 @@ filter_cat_dict_common, get_data_from_db, get_filter_on_demand_data, + judge_data_conversion, ) from ap.api.scatter_plot.services import gen_df from ap.common.constants import ( @@ -149,7 +150,11 @@ def gen_agp_data(root_graph_param: DicParam, dic_param, df=None, max_graph=None) dic_param[ARRAY_PLOTDATA] = dic_data dic_param[IS_GRAPH_LIMITED] = is_graph_limited # calc y scale - min_max_list, all_graph_min, all_graph_max = calc_raw_common_scale_y(dic_param[ARRAY_PLOTDATA], str_cols) + min_max_list, all_graph_min, all_graph_max, max_common_y_scale_count = calc_raw_common_scale_y( + dic_param[ARRAY_PLOTDATA], + str_cols, + is_get_common_y_scale_count=True, + ) calc_scale_info( graph_param.dic_proc_cfgs, dic_param[ARRAY_PLOTDATA], @@ -157,6 +162,7 @@ def gen_agp_data(root_graph_param: DicParam, dic_param, df=None, max_graph=None) all_graph_min, all_graph_max, str_cols, + max_common_y_scale_count=max_common_y_scale_count, ) dic_param = get_filter_on_demand_data(dic_param) @@ -199,6 +205,7 @@ def gen_df_direct_term(root_graph_param, dic_param, dic_cat_filters, use_expired dic_cat_filters, _use_expired_cache=use_expired_cache, ) + df_term = judge_data_conversion(df_term, root_graph_param) df_term[DIVIDE_FMT_COL] = f'{term[START_DT]} | {term[END_DT]}' diff --git a/ap/api/analyze/services/pca.py b/ap/api/analyze/services/pca.py index f9decc0..eed363a 100644 --- a/ap/api/analyze/services/pca.py +++ b/ap/api/analyze/services/pca.py @@ -4,6 +4,7 @@ from sklearn.preprocessing import StandardScaler from ap.api.common.services.show_graph_services import ( + convert_datetime_to_ct, customize_dic_param_for_reuse_cache, filter_cat_dict_common, get_data_from_db, @@ -218,6 +219,7 @@ def gen_trace_data(graph_param, orig_graph_param, dic_cat_filters, use_expired_c # get data from database df, actual_record_number, unique_serial = get_trace_data(graph_param, dic_cat_filters, use_expired_cache) + convert_datetime_to_ct(df, graph_param) dic_var_name = {} for col_alias, id in ids.items(): diff --git a/ap/api/calendar_heatmap/services.py b/ap/api/calendar_heatmap/services.py index 687a9ae..0bdf514 100644 --- a/ap/api/calendar_heatmap/services.py +++ b/ap/api/calendar_heatmap/services.py @@ -43,6 +43,7 @@ END_PROC_NAME, HM_WEEK_MODE, HM_WEEK_MODE_DAYS, + IS_SERIAL_NO, MATCHED_FILTER_IDS, MAX_TICKS, NA_STR, @@ -350,6 +351,7 @@ def gen_plotly_data(graph_param, dic_param, dic_df_proc, hm_mode, hm_step, dic_c X_TICKVAL: x_tickvals, Y_TICKTEXT: y_ticktext, Y_TICKVAL: y_tickvals, + IS_SERIAL_NO: col_cfg.is_serial_no, }, ) dic_param[ARRAY_PLOTDATA][proc_id].append(plotdata) @@ -368,6 +370,7 @@ def gen_plotly_data(graph_param, dic_param, dic_df_proc, hm_mode, hm_step, dic_c X_TICKVAL: x_tickvals, Y_TICKTEXT: y_ticktext, Y_TICKVAL: y_tickvals, + IS_SERIAL_NO: col_cfg.is_serial_no, }, ) dic_param[ARRAY_PLOTDATA][proc_id].append(plotdata) diff --git a/ap/api/categorical_plot/services.py b/ap/api/categorical_plot/services.py index 7d6ef53..683b15e 100644 --- a/ap/api/categorical_plot/services.py +++ b/ap/api/categorical_plot/services.py @@ -883,7 +883,8 @@ def gen_graph_cyclic(graph_param, dic_param, terms, max_graph=None, df=None): plot['term_id'] = term_id set_chart_infos_to_plotdata(plot[END_COL], chart_infos, original_graph_configs, plot) - all_plots += plots + # all_plots += plots + all_plots.extend(plots) dic_param[ARRAY_PLOTDATA], dic_param[IS_GRAPH_LIMITED] = limit_graph_per_tab(all_plots, max_graph) diff --git a/ap/api/common/services/show_graph_database.py b/ap/api/common/services/show_graph_database.py index 83da771..d3e163f 100644 --- a/ap/api/common/services/show_graph_database.py +++ b/ap/api/common/services/show_graph_database.py @@ -13,7 +13,7 @@ ) from ap.common.logger import log_execution_time from ap.common.memoize import memoize -from ap.setting_module.models import CfgConstant, CfgProcess, CfgProcessColumn, CfgTrace +from ap.setting_module.models import CfgConstant, CfgProcess, CfgProcessColumn, CfgTrace, make_session from ap.setting_module.schemas import ShowGraphSchema, TraceSchema if TYPE_CHECKING: @@ -220,14 +220,16 @@ def preprocess_column(column: CfgProcessColumn) -> CfgProcessColumn: # column.data_type = DataType.TEXT.value # else: # column.format = EMPTY_STRING + if column.data_type == DataType.BOOLEAN.name and column.column_type != DataColumnType.JUDGE.value: + column.column_type = DataColumnType.BOOLEAN.value # need to change again, make sure date, time, boolean be converted to text - if column.data_type in [DataType.DATE.name, DataType.TIME.name]: + if column.data_type in [DataType.DATE.name, DataType.TIME.name, DataType.BOOLEAN.name]: column.data_type = DataType.TEXT.name # column.format = EMPTY_STRING # change data type column from `boolean` or `category` to Int(Cat) (PO requirements) - if column.data_type in [DataType.BOOLEAN.name, RawDataTypeDB.CATEGORY.name]: + if column.data_type == RawDataTypeDB.CATEGORY.name: column.data_type = DataType.INTEGER.name column.column_type = DataColumnType.INT_CATE.value @@ -253,18 +255,21 @@ def get_config_data(): show_graph_schema = ShowGraphSchema() trace_schema = TraceSchema() - # if dic_param: - # proc_ids = get_proc_ids_in_dic_param(dic_param) - # processes = CfgProcess.get_procs(proc_ids) - # else: - # processes = CfgProcess.get_all() - - processes = CfgProcess.get_all() - for process in processes: - preprocess_process(process) - - # modify processes data for showing graph - procs = show_graph_schema.dump(processes, many=True) + + # create new session to query from database, because we change data in `process_process` + # we don't want those changes to be commited to database + with make_session() as meta_session: + processes = meta_session.query(CfgProcess).all() + for process in processes: + preprocess_process(process) + cfg_traces = meta_session.query(CfgTrace).all() + + procs = show_graph_schema.dump(processes, many=True) + traces = trace_schema.dump(cfg_traces, many=True) + + # rollback to avoid changes data to be commited to database + meta_session.rollback() + dic_procs = {} proc_ids = [] for dic_proc in procs: @@ -273,8 +278,6 @@ def get_config_data(): show_graph_proc_data: CfgProcess = ShowGraphConfigData(**dic_proc) dic_procs[proc_id] = show_graph_proc_data - cfg_traces = CfgTrace.get_all() - traces = trace_schema.dump(cfg_traces, many=True) traces = [DictToClass(**trace) for trace in traces] trace_graph = TraceGraph(traces) dic_card_orders = {} diff --git a/ap/api/common/services/show_graph_services.py b/ap/api/common/services/show_graph_services.py index f7301ed..42f24d0 100644 --- a/ap/api/common/services/show_graph_services.py +++ b/ap/api/common/services/show_graph_services.py @@ -19,9 +19,10 @@ SqlProcLinkKey, gen_show_stmt, gen_tracing_cte, + gen_tracing_cte_with_delta_time_cut_off, ) from ap.api.common.services.utils import TraceGraph, gen_proc_time_label, gen_sql_and_params -from ap.api.external_api.services import save_odf_data_of_request +from ap.api.external_api.services import save_params_and_odf_data_of_request from ap.api.trace_data.services.regex_infinity import ( check_validate_target_column, get_changed_value_after_validate, @@ -65,6 +66,7 @@ CHART_INFOS, CHART_INFOS_ORG, COL_DATA_TYPE, + COL_ID, COLOR_ORDER, COMMON, COMMON_INFO, @@ -112,6 +114,7 @@ ORG_NONE_IDXS, PRC_MAX, PRC_MIN, + PROC_NAME, RANK_COL, RL_HIST_COUNTS, RL_HIST_LABELS, @@ -148,6 +151,7 @@ UNIQUE_COLOR, UNIQUE_DIV, UNIQUE_SERIAL, + UNIT, UNLINKED_IDXS, UNMATCHED_FILTER_IDS, UPPER_OUTLIER_IDXS, @@ -161,8 +165,8 @@ DuplicateSerialCount, DuplicateSerialShow, FilterFunc, + JudgeDefinition, N, - Operator, RemoveOutlierType, YType, ) @@ -421,16 +425,31 @@ def gen_group_filter_list(df, graph_param, dic_param, others=[]): sorted_filter_cols = [col.id for col in filter_sensors if col.id in actual_filter_cols] + # if group_list has nan, an int64 column will be converted to float64, 1 -> 1.0 + # should be keep original value of these columns, with convert nan to pd.NA first group_df = pd.DataFrame(columns=sorted_filter_cols, data=group_list) + for col in group_df: + group_df[col] = group_df[col].astype('Int64', errors='ignore') + # convert np.nan to NA_STR for filter + group_df.replace({np.nan: NA_STR}, inplace=True) + group_df = group_df.astype(str) dic_filter = {} for col in sorted_filter_cols: other_cols = list(sorted_filter_cols) other_cols.remove(col) - dic_filter[col] = { - vals[0]: dict(zip(other_cols, vals[1:])) for vals in group_df.groupby(col).agg(set).to_records().tolist() - } + dic_filter[col] = {} + unique_vals = group_df[col].unique() + + for val in unique_vals: + is_na = pd.isna(val) or val == NA_STR + filtered_df = ( + group_df[(group_df[col].isna()) | (group_df[col] == NA_STR)] + if is_na + else group_df[group_df[col] == val] + ) + dic_filter[col][val] = {other_col: filtered_df[other_col].unique().tolist() for other_col in other_cols} dic_param['dic_filter'] = dic_filter @@ -1820,6 +1839,7 @@ def gen_plotdata( END_COL_ID: col_id, END_COL_NAME: col_name, END_COL_SHOW_NAME: col_show_name, + UNIT: col_cfg.unit, CAT_EXP_BOX_NAME: cat_exp_box_proc_name, COL_DATA_TYPE: col_cfg.data_type, END_PROC_NAME: proc_cfg.shown_name, @@ -1884,6 +1904,7 @@ def gen_plotdata_fpp( END_PROC_NAME: dic_proc_name[proc_id].shown_name, END_COL_ID: col_id, END_COL_NAME: col_name, + UNIT: col_cfg.unit, END_COL_SHOW_NAME: col_show_name, CAT_EXP_BOX_NAME: cat_exp_box_proc_name, COL_DATA_TYPE: col_cfg.data_type, @@ -1913,7 +1934,12 @@ def gen_plotdata_fpp( return array_formval, plotdatas -def set_chart_infos_to_plotdata(col_id, chart_infos, original_graph_configs, plotdata): +def set_chart_infos_to_plotdata( + col_id, + chart_infos, + original_graph_configs, + plotdata, +): """ set chart config :param col_id: @@ -2081,37 +2107,6 @@ def calc_upper_lower_range(array_y: Series): return float(lower_range), float(upper_range) -@log_execution_time() -def apply_coef(df: DataFrame, graph_param: DicParam): - dic_proc_cfgs = graph_param.dic_proc_cfgs - for end_proc_info in graph_param.array_formval: - proc_cfg = dic_proc_cfgs.get(end_proc_info.proc_id) - if proc_cfg is None: - continue - - end_cols = proc_cfg.get_cols(end_proc_info.col_ids) or [] - for end_col in end_cols: - if not end_col.coef or not end_col.operator: - continue - - label = gen_sql_label(end_col.id, end_col.column_name) - if label not in df.columns: - continue - - if end_col.operator == Operator.REGEX.value: - df[label] = np.where(df[label].str.contains(end_col.coef), df[label], pd.NA) - elif end_col.operator == Operator.PLUS.value: - df[label] = df[label] + float(end_col.coef) - elif end_col.operator == Operator.MINUS.value: - df[label] = df[label] - float(end_col.coef) - elif end_col.operator == Operator.PRODUCT.value: - df[label] = df[label] * float(end_col.coef) - elif end_col.operator == Operator.DEVIDE.value: - df[label] = df[label] / float(end_col.coef) - - return df - - def get_filter_detail_ids(dic_proc_cfgs: Dict[int, CfgProcess], proc_ids, column_ids): """ get filter detail ids to check if this filter matching dataset of graph @@ -2350,7 +2345,7 @@ def calc_data_per_group(min_val, max_val, box=THIN_DATA_CHUNK): @log_execution_time() -def calc_raw_common_scale_y(plots, string_col_ids=None, y_col=ARRAY_Y): +def calc_raw_common_scale_y(plots, string_col_ids=None, y_col=ARRAY_Y, is_get_common_y_scale_count=False): """ calculate y min max in common scale :param plots: @@ -2359,20 +2354,40 @@ def calc_raw_common_scale_y(plots, string_col_ids=None, y_col=ARRAY_Y): """ y_commons = [] min_max_list = [] + # common_y_scale_count using for the chart that the calculated function is count + max_common_y_scale_count = 0 for plot in plots: s = pd.Series(plot[y_col]) - s = s[s.notnull()] + if not len(s): min_max_list.append((None, None)) + if is_get_common_y_scale_count: + # calculate and find min_max for none type number - start + plot_data = plot['data'] + df_list = [] + + # append data to df + for entry in plot_data: + df = pd.DataFrame(entry) + df_list.append(df) + + # merge df + non_empty_dfs = [df for df in df_list if not df.empty] + if non_empty_dfs: + combined_df = pd.concat(df_list) + # sum y by pair value + sum_y = combined_df.groupby('x')['y'].sum() + max_value = sum_y.max() + if max_value > max_common_y_scale_count: + max_common_y_scale_count = max_value + # calculate and find min_max for none type number - end continue - s = convert_series_to_number(s) # if s.dtypes == 'string': # min_max_list.append((None, None)) # continue s_without_inf = s[np.isfinite(s)] - min_val = s_without_inf.min() max_val = s_without_inf.max() if pd.isna(min_val): @@ -2397,7 +2412,10 @@ def calc_raw_common_scale_y(plots, string_col_ids=None, y_col=ARRAY_Y): all_graph_min = min(y_commons) all_graph_max = max(y_commons) - return min_max_list, all_graph_min, all_graph_max + if is_get_common_y_scale_count: + return min_max_list, all_graph_min, all_graph_max, max_common_y_scale_count + else: + return min_max_list, all_graph_min, all_graph_max def detect_abnormal_data(series_x, series_y, none_idxs=None): @@ -2549,6 +2567,7 @@ def calc_scale_info( end_col_id=END_COL_ID, y_col=ARRAY_Y, force_outlier=False, + max_common_y_scale_count=None, ): dic_datetime_cols = {} for idx, plotdata in enumerate(array_plotdata): @@ -2573,19 +2592,6 @@ def calc_scale_info( array_y = plotdata.get(ARRAY_Y) array_x = plotdata.get(ARRAY_X) - if (not len(array_y)) or (not len(array_x)) or (string_col_ids and plotdata[END_COL_ID] in string_col_ids): - dic_base_scale = { - Y_MIN: y_min, - Y_MAX: y_max, - LOWER_OUTLIER_IDXS: [], - UPPER_OUTLIER_IDXS: [], - } - plotdata[SCALE_AUTO] = dic_base_scale - plotdata[SCALE_SETTING] = dic_base_scale - plotdata[SCALE_THRESHOLD] = dic_base_scale - plotdata[SCALE_COMMON] = dic_base_scale - plotdata[SCALE_FULL] = dic_base_scale - continue series_x = pd.Series(array_x) series_y = pd.Series(array_y) @@ -2598,6 +2604,25 @@ def calc_scale_info( none_idxs = plotdata.get(NONE_IDXS) dic_abnormal_data = detect_abnormal_data(series_x, series_y, none_idxs) plotdata.update(dic_abnormal_data) + if ( + (not len(array_y)) + or (array_x is not None and not len(array_x)) + or (string_col_ids and plotdata[END_COL_ID] in string_col_ids) + ): + dic_base_scale = { + Y_MIN: y_min, + Y_MAX: y_max, + LOWER_OUTLIER_IDXS: [], + UPPER_OUTLIER_IDXS: [], + } + + plotdata[SCALE_AUTO] = dic_base_scale + plotdata[SCALE_SETTING] = dic_base_scale + plotdata[SCALE_THRESHOLD] = dic_base_scale + plotdata[SCALE_COMMON] = {**dic_base_scale, Y_MIN: 0, Y_MAX: max_common_y_scale_count} + plotdata[SCALE_FULL] = dic_base_scale + continue + for _idxs in dic_abnormal_data.values(): if _idxs: # array_y[_idxs] = None @@ -2609,24 +2634,38 @@ def calc_scale_info( series_y = series_y.loc[has_val_idxs] series_y = convert_series_to_number(series_y) - plotdata[SCALE_AUTO] = calc_auto_scale_y(plotdata, series_y, force_outlier=force_outlier) + plotdata[SCALE_FULL] = { + Y_MIN: y_min, + Y_MAX: y_max, + LOWER_OUTLIER_IDXS: [], + UPPER_OUTLIER_IDXS: [], + } + plotdata[SCALE_AUTO] = calc_auto_scale_y( + plotdata, + series_y, + force_outlier=force_outlier, + ) + if is_datetime_col: plotdata[SCALE_AUTO][Y_MIN] = y_min - plotdata[SCALE_SETTING] = calc_setting_scale_y(plotdata, series_y) - plotdata[SCALE_THRESHOLD] = calc_threshold_scale_y(plotdata, series_y) + plotdata[SCALE_SETTING] = calc_setting_scale_y( + plotdata, + series_y, + ) + + plotdata[SCALE_THRESHOLD] = calc_threshold_scale_y( + plotdata, + series_y, + ) + plotdata[SCALE_COMMON] = { Y_MIN: all_graph_min, Y_MAX: all_graph_max, LOWER_OUTLIER_IDXS: [], UPPER_OUTLIER_IDXS: [], } - plotdata[SCALE_FULL] = { - Y_MIN: y_min, - Y_MAX: y_max, - LOWER_OUTLIER_IDXS: [], - UPPER_OUTLIER_IDXS: [], - } + if is_datetime_col: plotdata[SCALE_FULL][Y_MIN] = 0 @@ -3084,7 +3123,7 @@ def get_selected_cate_column_ids(dic_param: dict): cate_column_ids = [] for cate_proc in dic_param[COMMON][CATE_PROCS]: for column_ids in [cate_proc[GET02_CATE_SELECT]]: - cate_column_ids.extend(column_ids) + cate_column_ids.extend(column_ids if isinstance(column_ids, list) else [column_ids]) return [int(column_id) for column_id in set(cate_column_ids)] @@ -3100,7 +3139,23 @@ def retrieve_order_setting(dic_proc_cfgs: Dict[int, CfgProcess], dic_param): for order_id in order_cols if order_id in dic_param[COMMON][DF_ALL_COLUMNS] or order_id in selected_cate_column_ids ] + # if having a facet but column is not selected, add it to dic_orders_column + cat_ex_boxes = dic_param[CAT_EXP_BOX] + if len(cat_ex_boxes) > 0: + for cat_ex_box in cat_ex_boxes: + proc_name = cat_ex_box[PROC_NAME] + column_id = cat_ex_box[COL_ID] + + # add column_id of cat_exp_box to dict_orders_column + if proc_name in dic_orders_columns: + if column_id not in dic_orders_columns[proc_name]: + dic_orders_columns[proc_name].append(column_id) + else: + # add new proc_name (in case that no column is selected) + dic_orders_columns[proc_name] = [column_id] + dic_param[COMMON][AVAILABLE_ORDERS] = dic_orders_columns + return dic_param @@ -3333,7 +3388,7 @@ def get_filter_on_demand_data(dic_param, remove_filter_data=False): if key in dic_param: dic_param.pop(key) - save_odf_data_of_request(dic_param) + save_params_and_odf_data_of_request(dic_param) return dic_param @@ -3402,6 +3457,9 @@ def get_data_from_db( df[col_name] = calc_cycle_time_of_list(df[col_name]) df[categorized_col_name] = df[col_name].astype(float) + df = judge_data_conversion(df, graph_param) + df = boolean_data_conversion(df, graph_param) + # on-demand filter if dic_filter: df = filter_df(graph_param.dic_proc_cfgs, df, dic_filter) @@ -3423,8 +3481,6 @@ def get_data_from_db( df = remove_outlier(df, sensor_labels, graph_param) df = cast_df_number(df, graph_param) - # apply coef for text - df = apply_coef(df, graph_param) return df, actual_total_record, unique_serial_number @@ -3766,6 +3822,8 @@ def gen_sql_proc_link_key_from_trace_keys(edge: CfgTrace) -> Tuple[List[SqlProcL self_sensor_keys: List[SqlProcLinkKey] = [] target_sensor_keys: List[SqlProcLinkKey] = [] for key in edge.trace_keys: + is_delta_time_cut_off_linked = key.delta_time is not None + self_key = SqlProcLinkKey( id=key.self_column.id, name=gen_bridge_column_name(key.self_column.id, key.self_column.column_name), @@ -3773,6 +3831,7 @@ def gen_sql_proc_link_key_from_trace_keys(edge: CfgTrace) -> Tuple[List[SqlProcL substr_to=key.self_column_substr_to, delta_time=key.delta_time, cut_off=key.cut_off, + is_delta_time_cut_off_linked=is_delta_time_cut_off_linked, ) self_sensor_keys.append(self_key) @@ -3784,6 +3843,7 @@ def gen_sql_proc_link_key_from_trace_keys(edge: CfgTrace) -> Tuple[List[SqlProcL # delta time and cut_off apply only self process link key, self_link_key + delta_time = target_link_key delta_time=None, cut_off=None, + is_delta_time_cut_off_linked=is_delta_time_cut_off_linked, ) target_sensor_keys.append(target_key) @@ -3831,15 +3891,9 @@ def gen_proc_link_from_sql( dict_cond_procs=dict_cond_procs, ) - stmt = gen_show_stmt(cte_tracing=cte_tracing, sql_objs=sql_objs) - # if for_count: - # stmt = gen_id_stmt(cte_tracing=cte_tracing) - # else: - # stmt = gen_show_stmt( - # cte_tracing=cte_tracing, - # sql_objs=sql_objs, - # # types_should_be_casted=[RawDataTypeDB.BOOLEAN], - # ) + cte_tracing_delta_time_cut_off = gen_tracing_cte_with_delta_time_cut_off(cte_tracing=cte_tracing, sql_objs=sql_objs) + + stmt = gen_show_stmt(cte_tracing=cte_tracing_delta_time_cut_off, sql_objs=sql_objs) sql, params = gen_sql_and_params(stmt) return sql, params @@ -3917,7 +3971,10 @@ def add_equation_column_to_df(df, function_detail, graph_config_data: ShowGraphC x_dtype = cfg_col_x.predict_type if cfg_col_x else None y_dtype = cfg_col_y.predict_type if cfg_col_y else None - return equation.evaluate(df, out_col=column_out, x_col=column_x, y_col=column_y, x_dtype=x_dtype, y_dtype=y_dtype) + df = equation.evaluate(df, out_col=column_out, x_col=column_x, y_col=column_y, x_dtype=x_dtype, y_dtype=y_dtype) + # update data type + cfg_col.predict_type = function_detail.return_type + return df def sorted_function_details(cfg_process_columns: list[CfgProcessColumn]) -> list[CfgProcessFunctionColumn]: @@ -3948,3 +4005,38 @@ def get_equation_data(df, end_proc: EndProc): df[label] = df[label].str.lower() return df + + +@log_execution_time() +def judge_data_conversion(df, graph_param, revert=False) -> DataFrame: + """ + All data-type can be set as judge column + But, only 1|0 will be converted into OK|NG, anything else will be converted into null (pd.NA) + eg: input[1,0,NG,A,None,2] -> output[OK,NG,NA,NA,NA,NA] + """ + judge_columns = graph_param.get_judge_variables() + if judge_columns: + if revert: + df[judge_columns] = df[judge_columns].replace( + {JudgeDefinition.OK.name: JudgeDefinition.OK.value, JudgeDefinition.NG.name: JudgeDefinition.NG.value}, + ) + else: + df[judge_columns] = ( + df[judge_columns] + .astype(pd.BooleanDtype()) + .replace({True: JudgeDefinition.OK.name, False: JudgeDefinition.NG.name}) + ) + return df + + +@log_execution_time() +def boolean_data_conversion(df, graph_param) -> DataFrame: + """ + All data-type can be set as judge column + But, only 1|0 will be converted into OK|NG, anything else will be converted into null (pd.NA) + eg: input[1,0,NG,A,None,2] -> output[true,false,NA,NA,NA,NA] + """ + boolean_columns = graph_param.get_boolean_variables() + for col in boolean_columns: + df[col] = df[col].astype(pd.BooleanDtype()).astype(pd.StringDtype()).str.lower() + return df diff --git a/ap/api/common/services/sql_generator.py b/ap/api/common/services/sql_generator.py index 360240b..9ff0a27 100644 --- a/ap/api/common/services/sql_generator.py +++ b/ap/api/common/services/sql_generator.py @@ -18,6 +18,7 @@ from ap.common.constants import ( SQL_REGEXP_FUNC, TIME_COL, + UNIXEPOCH, DuplicateSerialShow, FilterFunc, RawDataTypeDB, @@ -50,6 +51,12 @@ FILTER_PREFIX = 'filtered' +TIMEDIFF_PREFIX = 'timediff' +RANK_BY_TIMEDIFF = 'rank_by_timediff' +CTE_TRACING_TIMEDIFF = 'cte_tracing_timediff' +CTE_TRACING_RANK_BY_TIMEDIFF = 'cte_tracing_rank_by_timediff' +CTE_TRACING_MIN_TIMEDIFF = 'cte_tracing_min_timediff' + def gen_alias_col_name(trans_data: TransactionData, column_name: str) -> Optional[str]: cfg_column = trans_data.get_cfg_column_by_name(column_name) @@ -82,21 +89,29 @@ class SqlProcLinkKey: name: str substr_from: Optional[int] substr_to: Optional[int] - delta_time: Optional[int] - cut_off: Optional[int] + delta_time: Optional[float] + cut_off: Optional[float] + is_delta_time_cut_off_linked: bool @property def good(self) -> bool: - return (self.substr_from and self.substr_to) or bool(self.delta_time) + return (self.substr_from and self.substr_to) or self.is_delta_time_cut_off_linked @property def bad(self) -> bool: return not self.good + @cached_property + def cfg_col(self) -> CfgProcessColumn | None: + return CfgProcessColumn.get_by_id(self.id) + @property def sql_label(self) -> str: - cfg_cols = CfgProcessColumn.get_by_ids([self.id])[0] - return cfg_cols.gen_sql_label() + return self.cfg_col.gen_sql_label() + + @property + def unixepoch_sql_label(self) -> str: + return f'{self.sql_label}_{UNIXEPOCH}' class SqlProcLink: @@ -200,9 +215,6 @@ def gen_cte( is_start_proc: bool = False, for_count: bool = False, ): - if self.is_delta_time_using: - return self.gen_cte_by_delta_time(idx, duplicated_serial_show, is_start_proc, for_count) - query_builder = TransactionDataQueryBuilder(self.trans_data) if is_start_proc: query_builder.add_column(column=self.trans_data.id_col_name) @@ -230,50 +242,33 @@ def gen_cte( cte = query_builder.build().cte(f'{CTE_PROCESS_PREFIX}{idx}') cte: CTE = self.apply_filter(cte) + cte = self.gen_cached_unixepoch_cte(cte) return cte - @log_execution_time(SQL_GENERATOR_PREFIX) - def gen_cte_by_delta_time( - self, - idx: int, - duplicated_serial_show: DuplicateSerialShow, - is_start_proc: bool = False, - for_count: bool = False, - ): - query_builder = TransactionDataQueryBuilder(self.trans_data) - if is_start_proc: - query_builder.add_column(column=self.trans_data.id_col_name) - - time_col = self.trans_data.getdate_column - query_builder.add_column(column=time_col.bridge_column_name, label=self.gen_proc_time_label(is_start_proc)) - query_builder.between(start_tm=self.start_tm, end_tm=self.end_tm) - query_builder_time_col = query_builder.column(self.gen_proc_time_label(is_start_proc)) - - for cfg_col in self.all_cfg_columns: - query_builder.add_column(column=cfg_col.bridge_column_name, label=cfg_col.gen_sql_label()) - - link_cols = [] - for col_label in self.all_link_keys_labels: - link_cols.append(query_builder.column(col_label)) - - if not for_count and duplicated_serial_show != DuplicateSerialShow.SHOW_BOTH: - distinct_cols = [col for col in link_cols if col.name != self.time_col] - if distinct_cols: - query_builder.distinct(columns=distinct_cols) - if duplicated_serial_show == DuplicateSerialShow.SHOW_FIRST: - query_builder.having(columns=[func.min(query_builder_time_col)]) - else: - query_builder.having(columns=[func.max(query_builder_time_col)]) - - cte = query_builder.build().cte(f'{CTE_PROCESS_PREFIX}{idx}') - cte: CTE = self.apply_filter(cte) - - return cte + def gen_cached_unixepoch_cte(self, cte: CTE) -> CTE: + unixepoch_cols = [] + for key in self.link_keys: + if key.is_delta_time_cut_off_linked: + column = cte.c.get(key.sql_label) + if column is None: + raise ValueError(f'{key.name} must existed in CTE') + unixepoch_col = convert_to_unixepoch(column) + unixepoch_cols.append(unixepoch_col.label(key.unixepoch_sql_label)) + + if not unixepoch_cols: + return cte - @property - def is_delta_time_using(self): - return all(link_key.delta_time for link_key in self.link_keys) + return ( + sa.select( + [ + *unixepoch_cols, + *cte.c, + ], + ) + .cte(f'{cte.description}_{UNIXEPOCH}') + .prefix_with('MATERIALIZED') + ) def cast_col_to_text(col: ColumnClause, raw_data_type: str) -> Union[ColumnClause, ColumnOperators]: @@ -321,36 +316,75 @@ def make_comparison_column_with_cast_and_substr( return modified_col1 == modified_col2 -def unixepoch_delta_time_col(col, delta_time=0, cut_off=0): - if delta_time: - seconds = delta_time + cut_off / 1000 - return func.unixepoch(col, 'subsec') + seconds +def convert_to_unixepoch(col: ColumnClause) -> ColumnClause: + return func.unixepoch(col) + + +def make_comparisons_column_delta_time_and_cut_off( + from_col: ColumnClause, + from_key: SqlProcLinkKey, + to_col: ColumnClause, + to_key: SqlProcLinkKey, +): + def _make_comparison(col: ColumnClause, key: SqlProcLinkKey, other_col: ColumnClause): + cut_off = abs(key.cut_off if key.cut_off is not None else 0) + return [ + other_col > col, + other_col < col + key.delta_time * 60 + cut_off * 60, + other_col > col + key.delta_time * 60 - cut_off * 60, + ] + + if from_key.delta_time is not None: + return _make_comparison(from_col, from_key, to_col) + + if to_key.delta_time is not None: + return _make_comparison(to_col, to_key, from_col) + + return [] + + +def make_comparisons_column( + from_col: ColumnClause, + from_key: SqlProcLinkKey, + from_data_type: str, + to_col: ColumnClause, + to_key: SqlProcLinkKey, + to_data_type: str, +): + if from_key.is_delta_time_cut_off_linked or to_key.is_delta_time_cut_off_linked: + # TODO: check data type + comparisons = make_comparisons_column_delta_time_and_cut_off( + from_col, + from_key, + to_col, + to_key, + ) + return comparisons else: - return func.unixepoch(col, 'subsec') + comparison = make_comparison_column_with_cast_and_substr( + from_col, + from_key, + from_data_type, + to_col, + to_key, + to_data_type, + ) + return [comparison] -def make_comparison_column_delta_time_and_cut_off(comparisons, from_col, from_key, to_col, to_key): - if from_key.delta_time: - to_col_val = unixepoch_delta_time_col(to_col) - if from_key.cut_off: - min_cfg_col = unixepoch_delta_time_col(from_col, from_key.delta_time) - max_cfg_col = unixepoch_delta_time_col(from_col, from_key.delta_time, from_key.cut_off) - comparisons.append(min_cfg_col <= to_col_val) - comparisons.append(max_cfg_col >= to_col_val) - else: - from_col_val = unixepoch_delta_time_col(from_col, from_key.delta_time) - comparisons.append(from_col_val == to_col_val) - - elif to_key.delta_time: - from_col_val = unixepoch_delta_time_col(from_col) - if to_key.cut_off: - min_other_col = unixepoch_delta_time_col(to_col, to_key.delta_time) - max_other_col = unixepoch_delta_time_col(to_col, to_key.delta_time, to_key.cut_off) - comparisons.append(min_other_col <= from_col_val) - comparisons.append(max_other_col >= from_col_val) - else: - to_col_val = unixepoch_delta_time_col(to_col, to_key.delta_time) - comparisons.append(from_col_val == to_col_val) +def make_delta_time_diff( + from_col: ColumnClause, + from_key: SqlProcLinkKey, + to_col: ColumnClause, + to_key: SqlProcLinkKey, +): + if from_key.delta_time is not None: + return func.abs(to_col - (from_col + from_key.delta_time * 60)) + + if to_key.delta_time is not None: + return func.abs(from_col - (to_col + to_key.delta_time * 60)) + + return None @log_execution_time(SQL_GENERATOR_PREFIX) @@ -378,29 +412,23 @@ def gen_tracing_cte( comparisons = [] for from_key, to_key in zip(link_keys, prev_link_keys): - from_cfg_col = sql_obj.trans_data.get_cfg_column_by_name(from_key.name) - to_cfg_col = prev_sql_obj.trans_data.get_cfg_column_by_name(to_key.name) - from_col = cte_proc.c.get(from_cfg_col.gen_sql_label()) - to_col = prev_cte_proc.c.get(to_cfg_col.gen_sql_label()) - if from_key.delta_time or to_key.delta_time: - # in case of data link by delta_time, use datetime function - if from_key.delta_time: - to_col_val = func.datetime(to_col) - from_col_val = func.datetime(from_col, f'+{int(from_key.delta_time)} seconds') - else: - to_col_val = func.datetime(to_col, f'+{int(to_key.delta_time)} seconds') - from_col_val = func.datetime(from_col) - comparisons.append(to_col_val == from_col_val) + if from_key.is_delta_time_cut_off_linked: + from_col = cte_proc.c.get(from_key.unixepoch_sql_label) + to_col = prev_cte_proc.c.get(to_key.unixepoch_sql_label) else: - comp = make_comparison_column_with_cast_and_substr( + from_col = cte_proc.c.get(from_key.sql_label) + to_col = prev_cte_proc.c.get(to_key.sql_label) + + comparisons.extend( + make_comparisons_column( from_col, from_key, - from_cfg_col.data_type, + from_key.cfg_col.raw_data_type, to_col, to_key, - to_cfg_col.data_type, - ) - comparisons.append(comp) + to_key.cfg_col.raw_data_type, + ), + ) if duplicated_serial_show == duplicated_serial_show.SHOW_BOTH: from_col = cte_proc.c.get(gen_row_number_col_name(sql_obj.process_id)) @@ -424,73 +452,6 @@ def gen_tracing_cte( return stmt.cte(tracing_table_alias) -def gen_trace_by_delta_time_query( - sql_objs: list[SqlProcLink], -): - _ctes = [] - _target_selected = [] - _markers = [] - _tables = [] - _condition = [] - for idx, sql_obj in enumerate(sql_objs): - _cte = f'''cte_marker_{idx} AS (SELECT ''' - # add target columns - _select = [f'{selected_cols}' for selected_cols in sql_obj.select_col_names] - for column in sql_obj.all_cfg_columns: - if column.id in sql_obj.select_col_ids: - _target_selected.append(f'{column.bridge_column_name} AS "{column.gen_sql_label()}"') - - # add link keys - for linked_key in sql_obj.link_cfg_columns: - if linked_key.bridge_column_name not in sql_obj.select_col_names: - _select.append(linked_key.bridge_column_name) - - _cte += ', '.join(_select) - link_col = sql_obj.link_cfg_columns[0].bridge_column_name - # add marker - _cte += f''', CASE - WHEN strftime('%s', {link_col}) - - strftime('%s', LAG({link_col}, 1) OVER ( - ORDER BY {link_col})) = {sql_obj.link_keys[0].delta_time} THEN 1 - ELSE 0 - END AS marker_{idx} - FROM {sql_obj.trans_data.table_name} - WHERE {sql_obj.time_col} BETWEEN '{sql_obj.start_tm}' AND '{sql_obj.end_tm}'), - cte_delta_t_{idx} AS (SELECT ''' - _cte += ', '.join(_select) - _cte += f''', CASE - WHEN marker_{idx} > 0 THEN - SUM(marker_{idx}) - OVER (ORDER BY {link_col} ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) - END AS marker_{idx} - FROM cte_marker_{idx})''' - _markers.append(f't{idx}.marker_{idx}') - _tables.append(f'cte_delta_t_{idx} t{idx}') - _condition.append(f't{idx}.{sql_obj.time_col}') - _ctes.append(_cte) - - _cte_trace = 'SELECT ' - _cte_trace += ', '.join(_target_selected) + ', ' - _cte_trace += ', '.join(_markers) + ', ' - _cte_trace += ', '.join(_condition) + ', ' - - for idx, time in enumerate(_condition): - if not idx: - _cte_trace += time + ' AS time' - _cte_trace += ', ' + time + f' AS time_{sql_objs[idx].process_id}' - _cte_trace += f''' FROM {_tables[0]}''' - for idx, _cte_table in enumerate(_tables): - if not idx: - continue - _cte_trace += f''' LEFT OUTER JOIN {_cte_table} - ON {_markers[idx - 1]} = {_markers[idx]}''' - # WHERE {_condition[idx - 1]} BETWEEN '{sql_objs[0].start_tm}' AND '{sql_objs[0].end_tm}' - query = ', '.join(_ctes) - query = f'WITH {query}' - query += _cte_trace - return query, None - - def gen_conditions_per_column(filters): ands = [] for cfg_filter in filters: @@ -540,6 +501,44 @@ def gen_sql_condition_per_col( return and_(*ands) +def gen_tracing_cte_with_delta_time_cut_off(cte_tracing: CTE, sql_objs: list[SqlProcLink]) -> CTE: + timediff_cols = [] + for sql_obj, prev_sql_obj in zip(sql_objs[1:], sql_objs): + link_keys = sql_obj.link_keys + prev_link_keys = prev_sql_obj.next_link_keys or prev_sql_obj.link_keys + for from_key, to_key in zip(link_keys, prev_link_keys): + if from_key.is_delta_time_cut_off_linked or to_key.is_delta_time_cut_off_linked: + from_col = cte_tracing.c.get(from_key.unixepoch_sql_label) + to_col = cte_tracing.c.get(to_key.unixepoch_sql_label) + timediff_cols.append(make_delta_time_diff(from_col, from_key, to_col, to_key)) + + if not timediff_cols: + return cte_tracing + + timediff_labels = [f'{TIMEDIFF_PREFIX}_{i}' for i in range(len(timediff_cols))] + timediff_cols = [col.label(label) for col, label in zip(timediff_cols, timediff_labels)] + + cte_tracing_timediff = select([*cte_tracing.columns, *timediff_cols]).cte(CTE_TRACING_TIMEDIFF) + + rank_by_timediff = ( + sa.func.rank() + .over( + partition_by=cte_tracing_timediff.c.get(TransactionData.id_col_name), + order_by=[cte_tracing_timediff.c.get(col) for col in timediff_labels], + ) + .label(RANK_BY_TIMEDIFF) + ) + cte_tracing_rank = select([*cte_tracing_timediff.columns, rank_by_timediff]).cte(CTE_TRACING_RANK_BY_TIMEDIFF) + + cte_tracing_filtered_rank = ( + select([*cte_tracing_rank.columns]) + .where(cte_tracing_rank.c.get(RANK_BY_TIMEDIFF) == 1) + .cte(CTE_TRACING_MIN_TIMEDIFF) + ) + + return cte_tracing_filtered_rank + + @log_execution_time(SQL_GENERATOR_PREFIX) def gen_show_stmt( cte_tracing: CTE, @@ -597,6 +596,8 @@ def gen_sql_proc_link_count(trace: CfgTrace, limit: Optional[int] = None) -> Sel target_trans_data = TransactionData(trace.target_process_id) for trace_key in trace.trace_keys: + is_delta_time_cut_off_linked = trace_key.delta_time is not None + self_proc_link_keys.append( SqlProcLinkKey( id=trace_key.self_column_id, @@ -605,6 +606,7 @@ def gen_sql_proc_link_count(trace: CfgTrace, limit: Optional[int] = None) -> Sel substr_to=trace_key.self_column_substr_to, delta_time=trace_key.delta_time, cut_off=trace_key.cut_off, + is_delta_time_cut_off_linked=is_delta_time_cut_off_linked, ), ) target_proc_link_keys.append( @@ -616,6 +618,7 @@ def gen_sql_proc_link_count(trace: CfgTrace, limit: Optional[int] = None) -> Sel # delta time and cut_off apply only self process link key, self_link_key + delta_time = target_link_key delta_time=None, cut_off=None, + is_delta_time_cut_off_linked=is_delta_time_cut_off_linked, ), ) self_query_builder = TransactionDataProcLinkQueryBuilder( @@ -680,25 +683,6 @@ def add_column( column = column.label(label) self.selected_columns.append(column) - def add_column_by_case(self, datetime_column, delta_time=None, label: Optional[str] = None): - # func.lag does not work - link_columns = self.table_column(datetime_column) - filter_column = sa.case( - ( - ( - func.strftime('%s', link_columns) - - func.strftime('%s', func.lag(link_columns, 1).over(order_by=link_columns)) - ) - == delta_time, - 1, - ), - else_=0, - ).label(label) - self.selected_columns.extend(filter_column) - - def add_columns(self, columns: list[Label]) -> None: - self.selected_columns.extend(columns) - def distinct(self, *, columns: list[sa.Column | Label]) -> None: self.distinct_columns = columns @@ -760,52 +744,59 @@ def __init__( def build_proc_link_cte(self) -> Self: query_builder = TransactionDataQueryBuilder(self.trans_model) for key in self.proc_link_keys: - cfg_col = self.trans_model.get_cfg_column_by_id(int(key.id)) - query_builder.add_column(column=cfg_col.bridge_column_name, label=cfg_col.gen_sql_label()) + query_builder.add_column(column=key.cfg_col.bridge_column_name, label=key.sql_label) stmt = query_builder.build(self.limit) self.cte = stmt.cte(self.table_alias) + self.cte = self.gen_cached_unixepoch_cte() return self + def gen_cached_unixepoch_cte(self) -> CTE: + unixepoch_cols = [] + for key in self.proc_link_keys: + if key.is_delta_time_cut_off_linked: + column = self.cte.c.get(key.sql_label) + if column is None: + raise ValueError(f'{key.name} must existed in CTE') + unixepoch_col = convert_to_unixepoch(column) + unixepoch_cols.append(unixepoch_col.label(key.unixepoch_sql_label)) + + if not unixepoch_cols: + return self.cte + + return ( + sa.select( + [ + *unixepoch_cols, + *self.cte.c, + ], + ) + .cte(f'{self.table_alias}_{UNIXEPOCH}') + .prefix_with('MATERIALIZED') + ) + def get_column_by_label(self, label: str) -> Optional[ColumnClause]: return self.cte.c.get(label) - def get_column_by_cfg_column(self, cfg_column: CfgProcessColumn) -> ColumnClause: - column = self.get_column_by_label(cfg_column.gen_sql_label()) - - if column is None: - raise AssertionError(f"column : {cfg_column.column_name} doesn't exist") - - return column - def make_link_comparison(self, other: Self) -> list[ColumnOperators]: comparisons = [] for key, other_key in zip(self.proc_link_keys, other.proc_link_keys): - cfg_column: CfgProcessColumn = self.trans_model.get_cfg_column_by_id(int(key.id)) - other_cfg_column: CfgProcessColumn = other.trans_model.get_cfg_column_by_id(int(other_key.id)) - - column = self.get_column_by_cfg_column(cfg_column) - other_column = other.get_column_by_cfg_column(other_cfg_column) - - if key.delta_time or other_key.delta_time: - # in case of data link by delta_time, use datetime function - if key.delta_time: - from_col_val = func.datetime(column, f'+{int(key.delta_time)} seconds') - to_col_val = func.datetime(other_column) - else: - from_col_val = func.datetime(column) - to_col_val = func.datetime(other_column, f'+{int(other_key.delta_time)} seconds') - comparisons.append(from_col_val == to_col_val) + if key.is_delta_time_cut_off_linked: + column = self.get_column_by_label(key.unixepoch_sql_label) + other_column = other.get_column_by_label(other_key.unixepoch_sql_label) else: - # non-master column must be cast before linking - comparison = make_comparison_column_with_cast_and_substr( + column = self.get_column_by_label(key.sql_label) + other_column = other.get_column_by_label(other_key.sql_label) + + comparisons.extend( + make_comparisons_column( column, key, - cfg_column.data_type, + key.cfg_col.raw_data_type, other_column, other_key, - other_cfg_column.data_type, - ) - comparisons.append(comparison) + other_key.cfg_col.raw_data_type, + ), + ) return comparisons diff --git a/ap/api/common/services/utils.py b/ap/api/common/services/utils.py index d2e4d63..cf8979c 100644 --- a/ap/api/common/services/utils.py +++ b/ap/api/common/services/utils.py @@ -1,12 +1,17 @@ from __future__ import annotations +import copy +import re +import uuid from collections import Counter, defaultdict from typing import TYPE_CHECKING, Dict, Iterator, TypeVar from flask_sqlalchemy import BaseQuery from sqlalchemy.dialects import sqlite -from ap.common.constants import TIME_COL +from ap.common.constants import TIME_COL, WELL_KNOWN_COLUMNS, DataGroupType, MasterDBType +from ap.common.logger import log_execution_time +from ap.common.memoize import memoize from ap.common.pydn.dblib.sqlite import SQLite3 from ap.setting_module.models import CfgProcess @@ -15,11 +20,30 @@ T = TypeVar('T') +MUST_EXISTED_COLUMNS_FOR_MASTER_TYPE = { + MasterDBType.V2.name: [ + {'計測日時', '計測項目名', '計測値'}, + ], + MasterDBType.V2_MULTI.name: [ + {'加工日時', '測定項目名', '測定値'}, + {'processed_date_time', 'measurement_item_name', 'measured_value'}, + ], + MasterDBType.V2_HISTORY.name: [ + {'計測日時', '子部品シリアルNo'}, + ], + MasterDBType.V2_MULTI_HISTORY.name: [ + {'加工日時', '子部品シリアルNo'}, + ], +} + def gen_sql_and_params(stmt: Select) -> tuple[str, dict[str, str]]: compiled_stmt = stmt.compile(dialect=sqlite.dialect()) # return compiled_stmt.string, compiled_stmt.params - return compiled_stmt.string, list(compiled_stmt.params.values()) + position_params = compiled_stmt.positiontup + dict_params = compiled_stmt.params + params = [dict_params[pos] for pos in position_params] # sort params based position + return compiled_stmt.string, params def run_sql_from_query_with_casted(*, query: BaseQuery, db_instance: SQLite3, cls: type[T]) -> Iterator[T]: @@ -111,52 +135,60 @@ def get_all_paths_in_graph(self): return paths + def _unique_key_between_edges(self, left_proc, right_proc): + """Generate key to match between trace processes. + If we have delta_time, cut_off linked between those processes, we should always make sure they are unique, + and are not be removed. Since delta_time and cut_off linked are asymmetric + :param left_proc: + :param right_proc: + :return: + """ + + def _delta_time_key(trace_key): + if trace_key.delta_time is not None: + return uuid.uuid4().hex + return None + + edge = self.dic_edges.get((left_proc, right_proc)) + if not edge: + edge = self.dic_edges.get((right_proc, left_proc)) + key = tuple( + ( + key.self_column_id, + key.self_column_substr_from, + key.self_column_substr_to, + _delta_time_key(key), + ) + for key in edge.trace_keys + ) + else: + key = tuple( + ( + key.target_column_id, + key.target_column_substr_from, + key.target_column_substr_to, + _delta_time_key(key), + ) + for key in edge.trace_keys + ) + + return key + def remove_middle_nodes(self, path): min_nodes_count = 2 if len(path) <= min_nodes_count: return path reduced_path = [path[0]] + for idx in range(len(path))[1:-1]: start_proc = path[idx - 1] middle_proc = path[idx] end_proc = path[idx + 1] - first_edge = self.dic_edges.get((start_proc, middle_proc)) - if not first_edge: - first_edge = self.dic_edges.get((middle_proc, start_proc)) - left_cols = tuple( - (key.self_column_id, key.self_column_substr_from, key.self_column_substr_to) - for key in first_edge.trace_keys - ) - else: - left_cols = tuple( - ( - key.target_column_id, - key.target_column_substr_from, - key.target_column_substr_to, - ) - for key in first_edge.trace_keys - ) - - next_edge = self.dic_edges.get((middle_proc, end_proc)) - if not next_edge: - next_edge = self.dic_edges.get((end_proc, middle_proc)) - right_cols = tuple( - ( - key.target_column_id, - key.target_column_substr_from, - key.target_column_substr_to, - ) - for key in next_edge.trace_keys - ) - else: - right_cols = tuple( - (key.self_column_id, key.self_column_substr_from, key.self_column_substr_to) - for key in next_edge.trace_keys - ) - - if left_cols != right_cols: + left_key = self._unique_key_between_edges(start_proc, middle_proc) + right_key = self._unique_key_between_edges(middle_proc, end_proc) + if left_key != right_key: reduced_path.append(middle_proc) reduced_path.append(path[-1]) @@ -203,3 +235,97 @@ def get_distinct_trace_columns(self): trace_cols.add((end_proc_id, end_cols)) return trace_cols + + +@log_execution_time() +def get_well_known_columns_for_others_type( + well_known_columns: dict[str, str], + cols: list[str] | set[str], +) -> dict[str, int]: + results = {} + master_date_group_types = [] + for col in cols: + for data_group_type, pattern_regex in well_known_columns.items(): + if pattern_regex and re.search(pattern_regex, col, re.IGNORECASE): + if data_group_type not in master_date_group_types: + results[col] = data_group_type + master_date_group_types.append(data_group_type) + else: + results[col] = DataGroupType.HORIZONTAL_DATA.value + + break + + results[col] = DataGroupType.HORIZONTAL_DATA.value + + return results + + +@log_execution_time() +def get_well_known_columns_for_v2_type( + well_known_columns: dict[str, str], + cols: list[str] | set[str], +) -> dict[str, int]: + from ap.api.setting_module.services.v2_etl_services import normalize_column_name + + normalized_cols = normalize_column_name(cols) + + def get_group_type(col: str, normalized_col: str) -> str | None: + return well_known_columns.get(col, None) or well_known_columns.get(normalized_col, None) + + group_types = map(get_group_type, cols, normalized_cols) + return {col: group_type for col, group_type in zip(cols, group_types) if group_type is not None} + + +@memoize() +def get_well_known_columns(master_type: str, cols: list[str] | set[str] | None = None) -> dict[str, int]: + old_well_known_columns = WELL_KNOWN_COLUMNS.get(master_type, {}) + if not cols: + return copy.deepcopy(old_well_known_columns) + + if master_type == MasterDBType.OTHERS.name: + well_known_columns = get_well_known_columns_for_others_type(old_well_known_columns, cols) + elif MasterDBType.is_v2_group(master_type): + well_known_columns = get_well_known_columns_for_v2_type(old_well_known_columns, cols) + else: + well_known_columns = copy.deepcopy(old_well_known_columns) + return well_known_columns + + +def check_missing_column_by_data_group_type(master_type: str, file_columns: list[str]) -> bool: + existed_group_types = set(get_well_known_columns(master_type, file_columns).values()) + required_group_types = set(get_well_known_columns(master_type, cols=None).values()) + contains_all_required_group_types = required_group_types <= existed_group_types + has_missing = not contains_all_required_group_types + return has_missing + + +def get_specific_v2_type_based_on_column_names( + column_names: list[str] | set[str] | None = None, +) -> str | None: + """Currently only works for V2 master data + Checking if column names is referring to V2, V2 multi, V2 history or V2 multi history + Currently, we use hardcoded values through `MUST_EXISTED_COLUMNS_FOR_MASTER_TYPE` + Consider refactor this later + """ + from ap.api.setting_module.services.v2_etl_services import normalize_column_name + + normalized_columns = set(normalize_column_name(column_names)) + + for m_type in [ + MasterDBType.V2.name, + MasterDBType.V2_MULTI.name, + MasterDBType.V2_HISTORY.name, + MasterDBType.V2_MULTI_HISTORY.name, + ]: + if DataGroupType.FileName.name in column_names: + column_names = list(set(column_names) - set(DataGroupType.FileName.name)) + + has_missing_columns = check_missing_column_by_data_group_type(m_type, column_names) + if has_missing_columns: + continue + + for must_existed_columns in MUST_EXISTED_COLUMNS_FOR_MASTER_TYPE.get(m_type): + if normalized_columns >= set(normalize_column_name(must_existed_columns)): + return m_type + + return None diff --git a/ap/api/external_api/controllers.py b/ap/api/external_api/controllers.py index 999d665..f74317f 100644 --- a/ap/api/external_api/controllers.py +++ b/ap/api/external_api/controllers.py @@ -8,6 +8,8 @@ Validation, cast_datetime_from_query_string, get_from_request_data_with_id, + get_selected_columns_from_trace_data_form, + get_values_by_parameter_name, save_request_option, ) from ap.api.setting_module.services.common import save_user_settings @@ -17,17 +19,25 @@ BOOKMARK_ID, BOOKMARK_TITLE, BOOKMARKS, + CAT_EXP_BOX, + CATE_PROCS, CHECKED, COLUMNS, CREATED_BY, DIC_CAT_FILTERS, DIV, + END_DATE, END_DATE_ID, + END_DATETIME, END_TIME_ID, + END_TM, FACET, FILTER, FIRST_END_PROC, FUNCTION, + GET02_CATE_SELECT, + LATEST, + OBJECTIVE, OD_FILTER, OPTION_ID, PRIORITY, @@ -35,11 +45,16 @@ PROCESSES, RADIO_DEFAULT_INTERVAL, RADIO_RECENT_INTERVAL, + RECENT_TIME_INTERVAL, REQ_ID, + REQUEST_PARAMS, SAVE_DATETIME, SAVE_GRAPH_SETTINGS, + START_DATE, START_DATE_ID, + START_DATETIME, START_TIME_ID, + START_TM, TRACE_DATA_FORM, VALUE, PagePath, @@ -139,7 +154,7 @@ def save_bookmark(): data = json.loads(request.data) user_request = CfgRequest.get_by_req_id(data.get('req_id')) - request_params = json.loads(user_request.params) + request_params = json.loads(json.loads(user_request.params).get(REQUEST_PARAMS)) settings = request_params.get('settings') setting_list = settings.get(TRACE_DATA_FORM) function = request_params.get(FUNCTION).upper() @@ -162,17 +177,17 @@ def save_bookmark(): bookmark_title = data.get(BOOKMARK_TITLE) if not bookmark_title: try: - endProcId = settings[TRACE_DATA_FORM][get_from_request_data_with_id(setting_list, FIRST_END_PROC)][VALUE] - endProcName = CfgProcess.get_proc_by_id(endProcId).name - startDate = settings[TRACE_DATA_FORM][get_from_request_data_with_id(setting_list, START_DATE_ID)][VALUE] - startTime = ''.join( + end_proc_id = settings[TRACE_DATA_FORM][get_from_request_data_with_id(setting_list, FIRST_END_PROC)][VALUE] + end_proc_name = CfgProcess.get_proc_by_id(end_proc_id).name + start_date = settings[TRACE_DATA_FORM][get_from_request_data_with_id(setting_list, START_DATE_ID)][VALUE] + start_time = ''.join( settings[TRACE_DATA_FORM][get_from_request_data_with_id(setting_list, START_TIME_ID)][VALUE].split(':'), ) - endDate = settings[TRACE_DATA_FORM][get_from_request_data_with_id(setting_list, END_DATE_ID)][VALUE] - endTime = ''.join( + end_date = settings[TRACE_DATA_FORM][get_from_request_data_with_id(setting_list, END_DATE_ID)][VALUE] + end_time = ''.join( settings[TRACE_DATA_FORM][get_from_request_data_with_id(setting_list, END_TIME_ID)][VALUE].split(':'), ) - bookmark_title = f'{function}_{endProcName}_{startDate}-{startTime}_{endDate}-{endTime}' + bookmark_title = f'{function}_{end_proc_name}_{start_date}-{start_time}_{end_date}-{end_time}' except Exception: bookmark_title = f'{function}_{datetime.now().strftime(API_DATETIME_FORMAT)}' @@ -326,3 +341,61 @@ def register_datafile(): request_string = request.query_string.decode('utf-8') target_url = f'{host_url}{page}?{request_string}&load_gui_from_url=1' return redirect(target_url) + + +@external_api_v1_blueprint.route('/params', methods=['GET']) +def get_param_by_req_id(): + Validation(request).get_params().validate() + req_params = request.args + + req_id = req_params.get(REQ_ID, None) + + cfg_request = CfgRequest.get_by_req_id(req_id) + option_ids = CfgOption.get_option_ids(req_id) + cfg_request_params = json.loads(cfg_request.params) + + bookmark_id = cfg_request_params.get('bookmark_id', None) + start_datetime = '' + end_datetime = '' + if cfg_request_params.get(START_DATE) and cfg_request_params.get(START_TM): + start_datetime = f'{cfg_request_params.get(START_DATE)}T{cfg_request_params.get(START_TM)}' + if cfg_request_params.get(END_DATE) and cfg_request_params.get(END_TM): + end_datetime = f'{cfg_request_params.get(END_DATE)}T{cfg_request_params.get(END_TM)}' + # no bookmark id found in params, that means the request was sent to /dn7 + # TODO: May want to change the way form data is sent from frontend so we don't need to parse into integer + if not bookmark_id: + setting_params = json.loads(cfg_request_params.get(REQUEST_PARAMS)) + trace_data_form = setting_params['settings'].get(TRACE_DATA_FORM) + filters = [] + if cfg_request_params.get(CATE_PROCS): + for proc in cfg_request_params[CATE_PROCS]: + if proc.get(GET02_CATE_SELECT) and isinstance(proc.get(GET02_CATE_SELECT), list): + for col in proc.get(GET02_CATE_SELECT): + filters.append(int(col)) + elif proc.get(GET02_CATE_SELECT) and not isinstance(proc.get(GET02_CATE_SELECT), list): + filters.append(int(proc.get(GET02_CATE_SELECT))) + res = { + REQ_ID: req_id, + FUNCTION: setting_params.get(FUNCTION), + # Different graphs return different keys for columns selected, get columns from UI checkboxes for now + COLUMNS: get_selected_columns_from_trace_data_form(trace_data_form), + FACET: get_values_by_parameter_name(cfg_request_params, CAT_EXP_BOX, convert_to_int=True), + FILTER: filters, + DIV: int(cfg_request_params.get(DIV)) if cfg_request_params.get(DIV) else None, + START_DATETIME: start_datetime, + END_DATETIME: end_datetime, + LATEST: cfg_request_params.get(RECENT_TIME_INTERVAL), + OBJECTIVE: cfg_request_params.get(OBJECTIVE), + OPTION_ID: option_ids, + } + else: + res = { + REQ_ID: req_id, + BOOKMARK_ID: bookmark_id, + OPTION_ID: option_ids, + START_DATETIME: start_datetime, + END_DATETIME: end_datetime, + LATEST: cfg_request_params.get(RECENT_TIME_INTERVAL), + } + res = {k: v for k, v in res.items() if v} + return res diff --git a/ap/api/external_api/services.py b/ap/api/external_api/services.py index 9bd795e..ef51883 100644 --- a/ap/api/external_api/services.py +++ b/ap/api/external_api/services.py @@ -24,6 +24,7 @@ FILTER, FILTER_ON_DEMAND, FUNCTION, + GET02_VALS_SELECT, LATEST, OBJECTIVE, OD_FILTER, @@ -31,7 +32,6 @@ PRIORITY, PROCESS_ID, REQ_ID, - REQUEST_PARAMS, SAVE_DATETIME, SAVE_GRAPH_SETTINGS, SAVE_LATEST, @@ -46,7 +46,7 @@ @log_execution_time() -def save_odf_data_of_request(dic_param): +def save_params_and_odf_data_of_request(dic_param): req_id = dic_param[COMMON].get(REQ_ID, '') od_filters = [] col_ids = [] @@ -64,8 +64,8 @@ def save_odf_data_of_request(dic_param): 'values': od_filter[UNIQUE_CATEGORIES], } od_filters.append(filter_obj) - if REQUEST_PARAMS in dic_param[COMMON]: - params = dic_param[COMMON][REQUEST_PARAMS] + + params = json_dumps(dic_param[COMMON]) with make_session() as meta_session: CfgRequest.save_odf_and_params_by_req_id(meta_session, req_id, json_dumps(od_filters), params) @@ -110,6 +110,21 @@ def get_from_request_data_with_id(req_data, ele_id): return req_data.index(next(x for x in req_data if x.get('id') == ele_id)) +def get_values_by_parameter_name(form_data, param_name, convert_to_int=False): + if convert_to_int: + return [int(form_data[x]) for x in form_data if param_name in x] + return [form_data[x] for x in form_data if param_name in x] + + +# TODO: Remove this function when columns are no longer sent in different keys +def get_selected_columns_from_trace_data_form(req_data, target_component_name=GET02_VALS_SELECT): + return [ + int(component.get('value')) + for component in req_data + if target_component_name in component.get('name', '') and component.get('checked') + ] + + @dataclasses.dataclass class ErrorTypeDetail: reason: str @@ -856,6 +871,24 @@ def save_bookmark(self): return self + def get_params(self): + req_id_rules = ValidationRules( + { + self._REQUIRED: True, + }, + ) + + self.params = { + REQ_ID: req_id_rules, + } + + self.validate_func = [ + self.validate_required(), + self.validate_req_id_not_found(), + ] + + return self + def _add_api_error(self, status, error_msg: ErrorMessage): if not self.api_error: self.api_error = APIError(status_code=status) diff --git a/ap/api/graphical_lasso/services.py b/ap/api/graphical_lasso/services.py index 5b320f0..02bb032 100644 --- a/ap/api/graphical_lasso/services.py +++ b/ap/api/graphical_lasso/services.py @@ -6,6 +6,7 @@ from sklearn.preprocessing import StandardScaler from ap.api.common.services.show_graph_services import ( + convert_datetime_to_ct, customize_dic_param_for_reuse_cache, filter_cat_dict_common, get_data_from_db, @@ -26,6 +27,7 @@ END_PROC_NAME, IS_CATEGORY, IS_INT_CATEGORY, + IS_JUDGE, IS_SERIAL_NO, MATCHED_FILTER_IDS, NOT_EXACT_MATCH_FILTER_IDS, @@ -73,6 +75,7 @@ def gen_graphical_lasso(graph_param, dic_param, df=None): dic_param[ACTUAL_RECORD_NUMBER] = actual_record_number dic_param = filter_cat_dict_common(df, dic_param, cat_exp, cat_procs, graph_param) + convert_datetime_to_ct(df, graph_param) # get category sensors from df cat_sensors = [] @@ -107,6 +110,7 @@ def gen_graphical_lasso(graph_param, dic_param, df=None): 'is_checked': is_nominal_check(col_id, graph_param), 'is_serial_no': general_col_info[IS_SERIAL_NO], 'is_int_category': general_col_info[IS_INT_CATEGORY], + 'is_judge': general_col_info[IS_JUDGE], } cat_col_details.append(col_detail) cat_ids[col_id] = label diff --git a/ap/api/heatmap/services.py b/ap/api/heatmap/services.py index 03c16c2..aea28b1 100644 --- a/ap/api/heatmap/services.py +++ b/ap/api/heatmap/services.py @@ -29,7 +29,7 @@ is_categorical_col, main_check_filter_detail_match_graph_data, ) -from ap.common.common_utils import gen_sql_label +from ap.common.common_utils import gen_sql_label, get_x_y_info from ap.common.constants import ( ACTUAL_RECORD_NUMBER, ARRAY_PLOTDATA, @@ -165,13 +165,9 @@ def gen_heatmap_data(root_graph_param: DicParam, dic_param, df=None): root_graph_param.dic_card_orders, dic_param, ) - scatter_xy_ids = [] - scatter_xy_names = [] - scatter_proc_ids = [] - for proc in orig_graph_param.array_formval: - scatter_proc_ids.append(proc.proc_id) - scatter_xy_ids = scatter_xy_ids + proc.col_ids - scatter_xy_names = scatter_xy_names + proc.col_names + + # get xy info + scatter_xy_ids, scatter_xy_names, scatter_proc_ids = get_x_y_info(orig_graph_param.array_formval, dic_param[COMMON]) x_proc_id = scatter_proc_ids[0] y_proc_id = scatter_proc_ids[-1] diff --git a/ap/api/parallel_plot/controllers.py b/ap/api/parallel_plot/controllers.py index 6c22270..b81e59f 100644 --- a/ap/api/parallel_plot/controllers.py +++ b/ap/api/parallel_plot/controllers.py @@ -7,13 +7,22 @@ from ap.api.common.services.show_graph_database import get_config_data from ap.api.common.services.show_graph_jump_function import get_jump_emd_data from ap.api.multi_scatter_plot.services import calc_partial_corr -from ap.api.parallel_plot.services import gen_graph_paracords -from ap.api.trace_data.services.csv_export import gen_df_export, make_graph_param, to_csv -from ap.common.common_utils import gen_sql_label -from ap.common.constants import COMMON, CONSTRAINT_RANGE, EXPORT_FROM, SELECTED, CSVExtTypes, DataType +from ap.api.parallel_plot.services import gen_graph_paracords, generate_mask_from_constraint +from ap.api.trace_data.services.csv_export import export_preprocessing, gen_df_export, make_graph_param, to_csv +from ap.common.constants import ( + COMMON, + CONSTRAINT_RANGE, + EMPTY_STRING, + EXPORT_FROM, + ONLY_EXPORT_DATA_SELECTED, + SELECTED, + TIME_COL, + TRUE_MATCH, + CSVExtTypes, +) from ap.common.services.csv_content import zip_file_to_response from ap.common.services.form_env import bind_dic_param_to_class, parse_multi_filter_into_one, parse_request_params -from ap.common.services.http_content import orjson_dumps +from ap.common.services.http_content import json_dumps, orjson_dumps from ap.common.services.import_export_config_n_data import ( get_dic_form_from_debug_info, set_export_dataset_id_to_dic_param, @@ -76,42 +85,20 @@ def data_export(export_type): graph_param = bind_dic_param_to_class(dic_proc_cfgs, trace_graph, dic_card_orders, dic_param) graph_param, client_timezone = make_graph_param(graph_param, dic_param) delimiter = ',' if export_type == CSVExtTypes.CSV.value else '\t' + exportOnlySelected = dic_form.get(ONLY_EXPORT_DATA_SELECTED, 'false') == TRUE_MATCH df = gen_df_export(graph_param, dic_param) if dic_param[COMMON][EXPORT_FROM] == 'plot': - # add selected column 0 -> gray, 1 -> color of user selected value in plot PCP - constraint_range = json.loads(dic_form[CONSTRAINT_RANGE]) - df_condition = pd.DataFrame() - sql_labels = [] - mask = [True] * len(df) - for col_id, range_value in constraint_range.items(): - # get df label - # filter by range_value - col_cfg = graph_param.get_col_cfg(int(col_id)) - sql_label = gen_sql_label(col_cfg.id, col_cfg.column_name) - sql_labels.append(sql_label) - for range_v in range_value: - if sql_label not in df_condition: - df_condition[sql_label] = [False] * len(df) - if col_cfg.is_category: - dtype_name = col_cfg.data_type - if dtype_name == DataType.INTEGER.name: - vals = [int(val) for val in range_v] - else: - vals = [str(val) for val in range_v] - df[sql_label] = df[sql_label].astype(str) - df_condition[sql_label] = df_condition[sql_label] | (df[sql_label].isin(vals)) - else: - df_condition[sql_label] = df_condition[sql_label] | (df[sql_label] >= range_v[0]) & ( - df[sql_label] <= range_v[1] - ) - - mask = mask & df_condition[sql_label] + mask = generate_mask_from_constraint(json.loads(dic_form[CONSTRAINT_RANGE]), graph_param, df) selected_index = list(df[mask].index) + # add selected column 0 -> gray, 1 -> color of user selected value in plot PCP df[SELECTED] = 0 df.loc[selected_index, SELECTED] = 1 + if exportOnlySelected: + df = df[df[SELECTED] == 1] + del df[SELECTED] if delimiter: csv_data = to_csv( @@ -126,3 +113,36 @@ def data_export(export_type): response = zip_file_to_response([csv_data], None, export_type=export_type) return response + + +@api_paracords_blueprint.route('/select_data', methods=['GET']) +def select_data(): + dic_form = parse_request_params(request) + dic_param = parse_multi_filter_into_one(dic_form) + dic_proc_cfgs, trace_graph, dic_card_orders = get_config_data() + graph_param = bind_dic_param_to_class(dic_proc_cfgs, trace_graph, dic_card_orders, dic_param) + graph_param, client_timezone = make_graph_param(graph_param, dic_param) + + df = gen_df_export(graph_param, dic_param) + + if dic_param[COMMON][EXPORT_FROM] == 'plot': + mask = generate_mask_from_constraint(json.loads(dic_form[CONSTRAINT_RANGE]), graph_param, df) + + selected_index = list(df[mask].index) + df = df.loc[selected_index] + # return top 20 values of df, sorted by time descending + df.sort_values(by=TIME_COL, ascending=False, inplace=True) + # fill values for displaying on frontend + df = df.astype(pd.StringDtype()) + df.fillna(EMPTY_STRING, inplace=True) + df = df.head(20) + + processed_df = export_preprocessing( + df, + graph_param, + client_timezone=client_timezone, + terms=None, + ) + result = processed_df.to_dict(orient='records') + response = {'cols': processed_df.columns.values, 'rows': result, 'cols_name': list(processed_df.columns.values)} + return json_dumps(response), 200 diff --git a/ap/api/parallel_plot/services.py b/ap/api/parallel_plot/services.py index 3e2190b..695bdfb 100644 --- a/ap/api/parallel_plot/services.py +++ b/ap/api/parallel_plot/services.py @@ -6,9 +6,12 @@ from pandas import DataFrame from ap.api.common.services.show_graph_services import ( + calc_raw_common_scale_y, + calc_scale_info, customize_dic_param_for_reuse_cache, filter_cat_dict_common, gen_unique_data, + get_chart_infos, get_data_from_db, get_filter_on_demand_data, get_fmt_str_from_dic_data, @@ -16,6 +19,7 @@ get_trace_configs, is_nominal_check, main_check_filter_detail_match_graph_data, + set_chart_infos_to_plotdata, ) from ap.common.common_utils import gen_sql_label from ap.common.constants import ( @@ -44,6 +48,7 @@ UNIQUE_SERIAL, UNMATCHED_FILTER_IDS, CacheType, + DataType, ) from ap.common.logger import log_execution_time from ap.common.memoize import memoize @@ -129,13 +134,34 @@ def gen_graph_paracords(graph_param, dic_param, df=None): fmt_dic = get_fmt_str_from_dic_data(dic_data) gen_dic_serial_data_from_df(df, dic_proc_cfgs, dic_param) + times = df[TIME_COL].tolist() or [] + + # get chart infos (to calculate the threshold) + chart_infos, chart_infos_org = get_chart_infos(graph_param, dic_data, times) ( dic_param[ARRAY_FORMVAL], dic_param[ARRAY_PLOTDATA], category_cols, category_cols_details, cast_inf_vals, - ) = gen_plotdata(graph_param, dic_data, dic_proc_cfgs, df=df) + ) = gen_plotdata( + graph_param, + dic_data, + dic_proc_cfgs, + df=df, + chart_infos=chart_infos, + chart_infos_org=chart_infos_org, + ) + + # calc common scale y min max for plots_data for PCP + min_max_list, all_graph_min, all_graph_max = calc_raw_common_scale_y(dic_param[ARRAY_PLOTDATA]) + calc_scale_info( + graph_param.dic_proc_cfgs, + dic_param[ARRAY_PLOTDATA], + min_max_list, + all_graph_min, + all_graph_max, + ) dic_unique_cate = gen_unique_data(df, dic_proc_cfgs, category_cols, True) @@ -304,7 +330,14 @@ def order_end_proc_sensor(orig_graph_param: DicParam): @log_execution_time() @MessageAnnouncer.notify_progress(50) -def gen_plotdata(orig_graph_param: DicParam, dic_data, dic_proc_cfg, df=None): +def gen_plotdata( + orig_graph_param: DicParam, + dic_data, + dic_proc_cfg, + df=None, + chart_infos=None, + chart_infos_org=None, +): # re-order proc-sensors to show to UI lst_proc_end_col = order_end_proc_sensor(orig_graph_param) @@ -358,6 +391,7 @@ def gen_plotdata(orig_graph_param: DicParam, dic_data, dic_proc_cfg, df=None): 'is_checked': is_nominal_check(col_id, orig_graph_param), 'data_group_type': col_cfg.column_type, 'is_int_category': col_cfg.is_int_category, + 'is_judge': col_cfg.is_judge, } if is_categorical_sensor: @@ -377,8 +411,52 @@ def gen_plotdata(orig_graph_param: DicParam, dic_data, dic_proc_cfg, df=None): 'm_inf_idx': m_inf_idx, 'categorized_data': categorized_data, } + + # add chart info to plot data + if chart_infos: + set_chart_infos_to_plotdata( + col_id, + chart_infos, + original_graph_configs=chart_infos_org, + plotdata=plotdata, + ) + plotdatas.append(plotdata) array_formval.append({END_PROC: proc_id, GET02_VALS_SELECT: col_id}) return array_formval, plotdatas, category_cols, category_cols_details, cast_inf_vals + + +def generate_mask_from_constraint( + constraint_range, + graph_param, + df, +): + df_condition = pd.DataFrame() + sql_labels = [] + mask = [True] * len(df) + for col_id, range_value in constraint_range.items(): + # get df label + # filter by range_value + col_cfg = graph_param.get_col_cfg(int(col_id)) + sql_label = gen_sql_label(col_cfg.id, col_cfg.column_name) + sql_labels.append(sql_label) + for range_v in range_value: + if sql_label not in df_condition: + df_condition[sql_label] = [False] * len(df) + if col_cfg.is_category: + dtype_name = col_cfg.data_type + if dtype_name == DataType.INTEGER.name: + vals = [int(val) for val in range_v] + else: + vals = [str(val) for val in range_v] + df[sql_label] = df[sql_label].astype(str) + df_condition[sql_label] = df_condition[sql_label] | (df[sql_label].isin(vals)) + else: + df_condition[sql_label] = df_condition[sql_label] | (df[sql_label] >= range_v[0]) & ( + df[sql_label] <= range_v[1] + ) + + mask = mask & df_condition[sql_label] + return mask diff --git a/ap/api/ridgeline_plot/services.py b/ap/api/ridgeline_plot/services.py index fdf8c65..3de3815 100644 --- a/ap/api/ridgeline_plot/services.py +++ b/ap/api/ridgeline_plot/services.py @@ -150,6 +150,9 @@ def gen_trace_data_by_cyclic(graph_param, dic_param, max_graph=None): max_graph, ) + # get visualization setting + add_threshold_configs(dic_param, graph_param) + # calc common scale y min max min_max_list, all_graph_min, all_graph_max = calc_raw_common_scale_y(dic_param[ARRAY_PLOTDATA], y_col='data') calc_rlp_scale_info( @@ -350,6 +353,9 @@ def gen_rlp_data_by_term(graph_param, dic_param, max_graph=None): term_groups = [gen_term_groups(term) for term in terms] + # get visualization setting + add_threshold_configs(dic_param, graph_param) + # calc common scale y min max min_max_list, all_graph_min, all_graph_max = calc_raw_common_scale_y(dic_param[ARRAY_PLOTDATA], y_col='data') calc_rlp_scale_info( diff --git a/ap/api/sankey_plot/sankey_glasso/grplasso.py b/ap/api/sankey_plot/sankey_glasso/grplasso.py index 1b9212f..650ebb6 100644 --- a/ap/api/sankey_plot/sankey_glasso/grplasso.py +++ b/ap/api/sankey_plot/sankey_glasso/grplasso.py @@ -528,6 +528,7 @@ def _gen_sankey_data(self): 'node_color': [], 'edge_value': [], 'edge_color': [], + 'relationship': [], } self._add_node_colors() @@ -573,11 +574,13 @@ def _get_n_hex_col(self, N=5): def _add_links_from_x_to_group(self): # Add links: x -> groups edge_colors = [self.color_link_positive if x > 0 else self.color_link_negative for x in self.coef_remained] + relationship = ['positive' if x > 0 else 'negative' for x in self.coef_remained] for i in range(self.num_col_remained): self.dic_skd['source'].append(i) self.dic_skd['target'].append(self._sensor_node_id_to_group_node_id(i)) self.dic_skd['edge_value'].append(np.abs(self.coef_remained[i])) self.dic_skd['edge_color'].append(edge_colors[i]) + self.dic_skd['relationship'].append(relationship[i]) def _add_links_from_group_to_y(self): # Add links: groups -> y @@ -586,6 +589,7 @@ def _add_links_from_group_to_y(self): self.dic_skd['target'].append(self.num_col_remained + self.num_grp_remained) self.dic_skd['edge_value'].append(self.coef_grps[self.idx_grp_remained[i]]) self.dic_skd['edge_color'].append('#696969') + self.dic_skd['relationship'].append('') def _sensor_id_to_group_id(self, sensor_id): group_id = self.dic_groups['idx_grps'][sensor_id] diff --git a/ap/api/sankey_plot/sankey_glasso/sankey_services.py b/ap/api/sankey_plot/sankey_glasso/sankey_services.py index 7bae4ab..547cacd 100644 --- a/ap/api/sankey_plot/sankey_glasso/sankey_services.py +++ b/ap/api/sankey_plot/sankey_glasso/sankey_services.py @@ -8,6 +8,7 @@ from sklearn.metrics import confusion_matrix from ap.api.common.services.show_graph_services import ( + convert_datetime_to_ct, customize_dic_param_for_reuse_cache, filter_cat_dict_common, get_data_from_db, @@ -119,6 +120,8 @@ def gen_graph_sankey_group_lasso(graph_param, dic_param, df=None): dic_param[UNIQUE_SERIAL] = unique_serial dic_param[ACTUAL_RECORD_NUMBER] = actual_record_number + convert_datetime_to_ct(df, graph_param) + dic_param = filter_cat_dict_common( df, dic_param, @@ -158,7 +161,6 @@ def gen_graph_sankey_group_lasso(graph_param, dic_param, df=None): df_sensors: pd.DataFrame = df[dic_label_id] df_sensors = df_sensors.rename(columns=dic_label_id) df_sensors, data_clean, errors, err_cols, dic_null_percent, dic_var = clean_input_data(df_sensors) - if data_clean and not errors: # prepare column names and process names y_id = graph_param.common.objective_var @@ -313,8 +315,14 @@ def clean_input_data(df: pd.DataFrame): # drop > 50% NA column before drop NA to calculate variance df_drop = df.drop(remove_cols, axis=1) + original_dtypes = df_drop.dtypes df_drop = df_drop.replace(dict.fromkeys([np.inf, -np.inf, np.nan], np.nan)).dropna(how='any') + # After dropping NA, int columns will be converted to float + # convert int columns back to their original datatypes of the dataframe + for column in df_drop.columns: + if original_dtypes.get(column).name == pd.Int64Dtype().name: + df_drop[column] = df_drop[column].astype(original_dtypes.get(column)) is_zero_var, err_cols = zero_variance(df_drop) if is_zero_var: @@ -383,6 +391,7 @@ def plot_sankey_grplasso(dic_skd: defaultdict): 'target': dic_skd['target'], 'value': dic_skd['edge_value'], 'color': dic_skd['edge_color'], + 'relationship': dic_skd['relationship'], }, } return sankey_trace diff --git a/ap/api/scatter_plot/services.py b/ap/api/scatter_plot/services.py index dcd7be0..58d8966 100644 --- a/ap/api/scatter_plot/services.py +++ b/ap/api/scatter_plot/services.py @@ -29,13 +29,12 @@ is_categorical_col, main_check_filter_detail_match_graph_data, ) -from ap.common.common_utils import gen_sql_label +from ap.common.common_utils import gen_sql_label, get_x_y_info from ap.common.constants import ( ACTUAL_RECORD_NUMBER, ARRAY_PLOTDATA, ARRAY_X, ARRAY_Y, - ARRAY_Z, CHART_INFOS, CHART_TYPE, COLORS, @@ -55,7 +54,6 @@ MATCHED_FILTER_IDS, N_TOTAL, NOT_EXACT_MATCH_FILTER_IDS, - ORIG_ARRAY_Z, ROWID, SCALE_AUTO, SCALE_COLOR, @@ -166,13 +164,9 @@ def gen_scatter_plot(root_graph_param: DicParam, dic_param, df=None): dic_param, ) threshold_filter_detail_ids = orig_graph_param.common.threshold_boxes - scatter_xy_ids = [] - scatter_xy_names = [] - scatter_proc_ids = [] - for proc in orig_graph_param.array_formval: - scatter_proc_ids.append(proc.proc_id) - scatter_xy_ids = scatter_xy_ids + proc.col_ids - scatter_xy_names = scatter_xy_names + proc.col_names + + # get xy info + scatter_xy_ids, scatter_xy_names, scatter_proc_ids = get_x_y_info(orig_graph_param.array_formval, dic_param[COMMON]) x_proc_id = scatter_proc_ids[0] y_proc_id = scatter_proc_ids[-1] @@ -228,6 +222,10 @@ def gen_scatter_plot(root_graph_param: DicParam, dic_param, df=None): df = df_term.copy() if df is None else pd.concat([df, df_term]) chart_type, x_category, y_category = get_chart_type(df, x_id, y_id, dic_cols) + # in SCP, accept int/int as scater instead of heatmap + # todo: refactoring chart type + if chart_type == ChartType.HEATMAP_BY_INT.value: + chart_type = ChartType.SCATTER.value if _is_res_limited is None: unique_serial = None @@ -285,6 +283,9 @@ def gen_scatter_plot(root_graph_param: DicParam, dic_param, df=None): convert_datetime_to_ct(df, graph_param) chart_type, x_category, y_category = get_chart_type(df, x_id, y_id, dic_cols) + # in SCP, accept int/int as scater instead of heatmap + if chart_type == ChartType.HEATMAP_BY_INT.value: + chart_type = ChartType.SCATTER.value dic_param = filter_cat_dict_common(df, dic_param, cat_exp, [], graph_param) @@ -350,99 +351,8 @@ def gen_scatter_plot(root_graph_param: DicParam, dic_param, df=None): # chart type series_keys = [ARRAY_X, ARRAY_Y, COLORS, TIMES] dic_param[CHART_TYPE] = chart_type - if chart_type == ChartType.HEATMAP.value: - other_cols = [int(col) for col in [x_id, y_id, cat_div_id] if col] - - dic_param = gen_group_filter_list(df, graph_param, dic_param, other_cols) - # gen matrix - all_x, all_y = get_heatmap_distinct(output_graphs) - # all_x_with_step, all_y_with_step = get_heatmap_range_with_steps(output_graphs) - for graph in output_graphs: - # gen_map_xy_heatmap_matrix(x_name, y_name, all_x_with_step, all_y_with_step, graph) - # handle x, y, z data - array_x = graph[ARRAY_X] - array_y = graph[ARRAY_Y] - unique_x = set(array_x.drop_duplicates().tolist()) - unique_y = set(array_y.drop_duplicates().tolist()) - - missing_x = all_x - unique_x - missing_y = all_y - unique_y - array_z = pd.crosstab(array_y, array_x) - # map_xy_array_z = pd.crosstab(array_y, array_x, values=graph[COLORS], aggfunc='first') - for key in missing_x: - array_z[key] = None - # map_xy_array_z[key] = None - - sorted_cols = sorted(array_z.columns) - - array_z = array_z[sorted_cols] - - missing_data = [None] * len(missing_y) - df_missing = pd.DataFrame({col: missing_data for col in array_z.columns}, index=missing_y) - array_z = pd.concat([array_z, df_missing]) - array_z.sort_index(inplace=True) - - # limit 10K cells - if array_z.size > HEATMAP_COL_ROW * HEATMAP_COL_ROW: - array_z = array_z[:HEATMAP_COL_ROW][array_z.columns[:HEATMAP_COL_ROW]] - - graph[ARRAY_X] = array_z.columns - graph[ARRAY_Y] = array_z.index - graph[ORIG_ARRAY_Z] = matrix(array_z) - - # ratio - z_count = len(array_x) - array_z = array_z * 100 // z_count - graph[ARRAY_Z] = matrix(array_z) - - # reduce sending data to browser - graph[COLORS] = [] - graph[X_SERIAL] = [] - graph[Y_SERIAL] = [] - graph[TIMES] = [] - graph[ELAPSED_TIME] = [] - elif chart_type == ChartType.HEATMAP_BY_INT.value: - # todo - # draw heatmap for int columns - # gen matrix - all_x, all_y = get_heatmap_range_with_steps(output_graphs) - for graph in output_graphs: - # handle x, y, z data - array_x = graph[ARRAY_X] - array_y = graph[ARRAY_Y] - unique_x = set(array_x.drop_duplicates().tolist()) - unique_y = set(array_y.drop_duplicates().tolist()) - - missing_x = all_x - unique_x - missing_y = all_y - unique_y - map_xy_array_z = pd.crosstab(array_y, array_x, values=graph[COLORS], aggfunc='first') - for key in missing_x: - map_xy_array_z[key] = None - - sorted_cols = sorted(map_xy_array_z.columns) - map_xy_array_z = map_xy_array_z[sorted_cols] - - missing_data = [None] * len(missing_y) - df_missing = pd.DataFrame({col: missing_data for col in map_xy_array_z.columns}, index=missing_y) - map_xy_array_z = pd.concat([map_xy_array_z, df_missing]) - map_xy_array_z.sort_index(inplace=True) - - # limit 10K cells - if map_xy_array_z.size > HEATMAP_COL_ROW * HEATMAP_COL_ROW: - map_xy_array_z = map_xy_array_z[:HEATMAP_COL_ROW][map_xy_array_z.columns[:HEATMAP_COL_ROW]] - - # graph[ARRAY_Z] = matrix(map_xy_array_z).tolist() - # graph[ORIG_ARRAY_Z] = graph[ARRAY_Z] - graph[X_NAME] = x_name - graph[Y_NAME] = y_name - graph[HEATMAP_MATRIX] = { - 'z': matrix(map_xy_array_z).tolist(), - 'x': all_x, - 'y': all_y, - } - - elif chart_type == ChartType.SCATTER.value: + if chart_type == ChartType.SCATTER.value: other_cols = [int(col) for col in [color_id, cat_div_id] if col] dic_param = gen_group_filter_list(df, graph_param, dic_param, other_cols) diff --git a/ap/api/setting_module/controllers.py b/ap/api/setting_module/controllers.py index dce730f..f686bfd 100644 --- a/ap/api/setting_module/controllers.py +++ b/ap/api/setting_module/controllers.py @@ -46,9 +46,11 @@ gen_v2_columns_with_types, get_last_distinct_sensor_values, get_latest_records, + get_preview_data_files, preview_csv_data, preview_v2_data, save_master_vis_config, + save_preview_data_file, ) from ap.api.setting_module.services.shutdown_app import shut_down_app from ap.api.trace_data.services.proc_link import add_gen_proc_link_job, add_restructure_indexes_job, show_proc_link_info @@ -64,6 +66,7 @@ ) from ap.common.constants import ( ANALYSIS_INTERFACE_ENV, + FILE_NAME, FISCAL_YEAR_START_MONTH, OSERR, SHUTDOWN, @@ -107,6 +110,7 @@ ProcessSchema, ) from ap.setting_module.services.background_process import get_background_jobs_service, get_job_detail_service +from ap.setting_module.services.backup_and_restore.jobs import add_backup_data_job, add_restore_data_job from ap.setting_module.services.process_config import ( create_or_update_process_cfg, gen_function_column, @@ -117,7 +121,11 @@ get_process_visualizations, query_database_tables, ) -from ap.setting_module.services.register_from_file import get_chm_url_to_redirect +from ap.setting_module.services.register_from_file import ( + get_latest_records_for_register_by_file, + get_url_to_redirect, + handle_importing_by_one_click, +) from ap.setting_module.services.trace_config import ( gen_cfg_trace, get_all_processes_traces_info, @@ -308,7 +316,10 @@ def get_database_table(db_id): tables = query_database_tables(db_id) if tables is None: - return jsonify({'tables': [], 'msg': 'Invalid data source id'}), 400 + return ( + jsonify({'tables': [], 'process_factnames': [], 'process_factids': [], 'msg': 'Invalid data source id'}), + 400, + ) else: return jsonify(tables), 200 @@ -342,6 +353,21 @@ def check_db_connection(): return jsonify(flask_message=message), 200 +@api_setting_module_blueprint.route('/show_latest_records_for_register_by_file', methods=['POST']) +def show_latest_records_for_register_by_file(): + """[summary] + Show 5 latest records + Returns: + [type] -- [description] + """ + dic_form = request.form.to_dict() + file_name = dic_form.get('fileName') or None + limit = parse_int_value(dic_form.get('limit')) or 10 + folder = dic_form.get('folder') or None + latest_rec = get_latest_records_for_register_by_file(file_name, folder, limit) + return json_dumps(latest_rec) + + @api_setting_module_blueprint.route('/show_latest_records', methods=['POST']) def show_latest_records(): """[summary] @@ -357,6 +383,15 @@ def show_latest_records(): limit = parse_int_value(dic_form.get('limit')) or 10 folder = dic_form.get('folder') or None current_process_id = dic_form.get('currentProcessId', None) + process_factid = dic_form.get('processFactId', None) + if current_process_id and current_process_id != 'null' and not file_name: + # get data from db or csv + file_name = get_preview_data_files(data_source_id, table_name, process_factid) + if file_name: + with open(file_name, 'r') as file: + data = json.load(file) + return data + latest_rec = get_latest_records( data_source_id, table_name, @@ -364,6 +399,8 @@ def show_latest_records(): folder, limit, current_process_id, + process_factid=process_factid, + is_convert_datetime=False, ) result = { @@ -397,8 +434,9 @@ def show_latest_records(): 'data_group_type': data_group_type, 'is_rdb': is_rdb, } - - return json_dumps(result) + result = json_dumps(result) + save_preview_data_file(int(data_source_id), result, table_name=table_name, process_factid=process_factid) + return result @api_setting_module_blueprint.route('/get_csv_resources', methods=['POST']) @@ -407,22 +445,25 @@ def get_csv_resources(): etl_func = request.json.get('etl_func') csv_delimiter = request.json.get('delimiter') is_v2 = request.json.get('isV2') - line_skip = request.json.get('line_skip') or '' + skip_head = request.json.get('skip_head') + skip_head = None if skip_head is None else int(skip_head) n_rows = request.json.get('n_rows') n_rows = None if n_rows is None else int(n_rows) is_transpose = request.json.get('is_transpose') + is_file = request.json.get('is_file') if is_v2: - dic_output = preview_v2_data(folder_url, csv_delimiter, 5) + dic_output = preview_v2_data(folder_url, csv_delimiter, 5, file_name=folder_url if is_file else None) else: dic_output = preview_csv_data( folder_url, etl_func, csv_delimiter, - line_skip=line_skip, + skip_head=skip_head, n_rows=n_rows, is_transpose=is_transpose, limit=5, + file_name=folder_url if is_file else None, ) rows = dic_output['content'] previewed_files = dic_output['previewed_files'] @@ -484,7 +525,7 @@ def check_folder(): break else: is_valid_file = any(data.lower().endswith(ext) for ext in extension) - is_not_empty = True + is_not_empty = os.path.isfile(data) is_valid = is_existing and is_not_empty err_msg = _('File not found') # empty folder @@ -506,6 +547,29 @@ def check_folder(): return jsonify({'status': 500, 'err_msg': _(OSERR[e.errno]), 'is_valid': False}) +@api_setting_module_blueprint.route('/check_folder_or_file', methods=['POST']) +def check_folder_or_file(): + try: + data = request.json.get('path') + return jsonify( + { + 'status': 200, + 'isFile': os.path.isfile(data), + 'isFolder': os.path.isdir(data), + }, + ) + except OSError as e: + # raise + return jsonify( + { + 'status': 500, + 'err_msg': _(OSERR[e.errno]), + 'isFile': False, + 'isFolder': False, + }, + ) + + @api_setting_module_blueprint.route('/job_detail/', methods=['GET']) def get_job_detail(job_id): """[Summary] Get get job details @@ -839,7 +903,7 @@ def get_proc_config(proc_id): parent_and_child_processes = CfgProcess.get_all_parents_and_children_processes(proc_id) col_id_in_funcs = CfgProcessFunctionColumn.get_all_cfg_col_ids() if process: - tables = query_database_tables(process['data_source_id']) + tables = query_database_tables(process['data_source_id'], process=process) return ( jsonify( { @@ -889,6 +953,49 @@ def get_proc_config_filter_data(proc_id): ) +@api_setting_module_blueprint.route('/proc_table_viewer_columns/', methods=['GET']) +def get_table_viewer_columns(proc_id): + process = get_process_cfg(proc_id) + columns = [] + for column in process.get('columns'): + if len(column.get(CfgProcessColumn.function_details.key)) > 0: + continue + + if column.get(CfgProcessColumn.column_raw_name.name) == FILE_NAME: + continue + + if column.get(CfgProcessColumn.is_dummy_datetime.name): + continue + + columns.append(column) + + process['columns'] = columns + if process: + if not process['name_en']: + process['name_en'] = to_romaji(process['name']) + return ( + json_dumps( + { + 'status': 200, + 'data': process, + 'filter_col_data': {}, + }, + ), + 200, + ) + else: + return ( + json_dumps( + { + 'status': 404, + 'data': {}, + 'filter_col_data': {}, + }, + ), + 200, + ) + + @api_setting_module_blueprint.route('/proc_config//columns', methods=['GET']) def get_proc_column_config(proc_id): columns = get_process_columns(proc_id) @@ -1306,81 +1413,29 @@ def check_duplicated_process_name(): @api_setting_module_blueprint.route('/register_source_and_proc', methods=['POST']) def register_source_and_proc(): - input_json = request.json - with make_session() as meta_session: - try: - data_src: CfgDataSource = DataSourceSchema().load(input_json.get('csv_info')) - - # data source - data_src_rec = insert_or_update_config(meta_session, data_src, exclude_columns=[CfgDataSource.order.key]) - - # csv detail - csv_detail = data_src.csv_detail - if csv_detail: - # csv_detail.dummy_header = csv_detail.dummy_header == 'true' if csv_detail.dummy_header else None - csv_columns = data_src.csv_detail.csv_columns - csv_columns = [col for col in csv_columns if not is_empty(col.column_name)] - data_src.csv_detail.csv_columns = csv_columns - csv_detail_rec = insert_or_update_config( - meta_session, - csv_detail, - parent_obj=data_src_rec, - parent_relation_key=CfgDataSource.csv_detail.key, - parent_relation_type=RelationShip.ONE, - ) - - # CRUD - csv_columns = csv_detail.csv_columns - crud_config( - meta_session, - csv_columns, - CfgCsvColumn.data_source_id.key, - CfgCsvColumn.column_name.key, - parent_obj=csv_detail_rec, - parent_relation_key=CfgDataSourceCSV.csv_columns.key, - parent_relation_type=RelationShip.MANY, - ) - process_schema = ProcessSchema() - proc_config = request.json.get('proc_config') - proc_config['data_source_id'] = data_src_rec.id - proc_data = process_schema.load(proc_config) - unused_columns = request.json.get('unused_columns', []) - should_import_data = request.json.get('import_data') - - process = create_or_update_process_cfg(proc_data, unused_columns) - - # create process json - process_schema = ProcessSchema() - process_json = process_schema.dump(process) or {} - - # import data - if should_import_data: - add_import_job(process, run_now=True, is_user_request=True) - except Exception as e: - logger.exception(e) - meta_session.rollback() - message = {'message': _('Database Setting failed to save'), 'is_error': True} - return jsonify(flask_message=message), 500 - - message = {'message': _('Database Setting saved.'), 'is_error': False} - ds = None - if data_src_rec and data_src_rec.id and process: - ds_schema = DataSourceSchema() - ds = CfgDataSource.get_ds(data_src_rec.id) - ds = ds_schema.dumps(ds) + try: + new_process_ids = handle_importing_by_one_click(request.json) data_register_data = { + 'RegisterByFileRequestID': request.json.get('RegisterByFileRequestID'), 'status': JobStatus.PROCESSING.name, - 'process_id': process.id, 'is_first_imported': False, } background_announcer.announce(data_register_data, AnnounceEvent.DATA_REGISTER.name) - return jsonify(id=data_src_rec.id, data_source=ds, process_info=process_json, flask_message=message), 200 + except Exception as e: + logger.exception(e) + data = {'message': _('Database Setting failed to save'), 'is_error': True, 'detail': str(e)} + return jsonify(data), 500 + + data = {'message': _('Database Setting saved.'), 'is_error': False, 'processIds': new_process_ids} + return jsonify(data), 200 -@api_setting_module_blueprint.route('/redirect_to_chm_page/', methods=['GET']) -def redirect_to_chm_page(proc_id): - target_url = get_chm_url_to_redirect(request, proc_id) +@api_setting_module_blueprint.route('/redirect_to_page', methods=['POST']) +def redirect_to_page(): + page = request.json.get('page') + proc_ids = request.json.get('processIds') + target_url = get_url_to_redirect(request, proc_ids, page) return jsonify(url=target_url), 200 @@ -1398,7 +1453,7 @@ def get_function_infos(): dict_cfg_process_column = {cfg_process_column.id: cfg_process_column for cfg_process_column in cfg_process_columns} result = [] for function_detail in sorted_function_details(cfg_process_columns): - process_col = dict_cfg_process_column[function_detail.process_column_id] + process_col: CfgProcessColumn = dict_cfg_process_column[function_detail.process_column_id] function_id = function_detail.function_id m_function: MFunction = MFunction.get_by_id(function_id) var_x = function_detail.var_x @@ -1430,7 +1485,11 @@ def get_function_infos(): y_data_type=y_data_type, **function_detail.as_dict(), ) - sample_datas = equation_sample_data.sample_data().sample_data + sample_data = equation_sample_data.sample_data() + sample_datas = sample_data.sample_data + output_type = sample_data.output_type + # update data type + process_col.data_type = output_type dict_sample_data[str(process_col.id)] = sample_datas function_info = { 'functionName': m_function.function_type, @@ -1489,3 +1548,35 @@ def delete_function_column_config(): return json_dumps({}), 500 return json_dumps({}), 200 + + +@api_setting_module_blueprint.route('/backup_data', methods=['POST']) +def backup_data(): + """[Summary] backup data from DB + Returns: 200/500 + """ + data = json.loads(request.data) + process_id = data.get('process_id') + start_time = data.get('start_time') + end_time = data.get('end_time') + if process_id: + target_jobs = [JobType.CSV_IMPORT, JobType.FACTORY_IMPORT, JobType.FACTORY_PAST_IMPORT] + remove_jobs(target_jobs, proc_id=process_id) + add_backup_data_job(process_id, start_time, end_time) + return json_dumps({}), 200 + + +@api_setting_module_blueprint.route('/restore_data', methods=['POST']) +def restore_data(): + """[Summary] restore data from file + Returns: 200/500 + """ + data = json.loads(request.data) + process_id = data.get('process_id') + start_time = data.get('start_time') + end_time = data.get('end_time') + if process_id: + target_jobs = [JobType.CSV_IMPORT, JobType.FACTORY_IMPORT, JobType.FACTORY_PAST_IMPORT] + remove_jobs(target_jobs, proc_id=process_id) + add_restore_data_job(process_id, start_time, end_time) + return json_dumps({}), 200 diff --git a/ap/api/setting_module/services/autolink.py b/ap/api/setting_module/services/autolink.py index 28c83e6..aa70eae 100644 --- a/ap/api/setting_module/services/autolink.py +++ b/ap/api/setting_module/services/autolink.py @@ -16,7 +16,7 @@ get_reversed_column_value_from_v2, get_v2_datasource_type_from_file, ) -from ap.common.common_utils import detect_encoding, get_csv_delimiter, get_latest_files +from ap.common.common_utils import DATE_FORMAT_SIMPLE, detect_encoding, get_csv_delimiter, get_latest_files from ap.common.constants import ( DF_CHUNK_SIZE, DUMMY_V2_PROCESS_NAME, @@ -25,7 +25,7 @@ DBType, ) from ap.common.logger import log_execution_time -from ap.setting_module.models import CfgDataSource +from ap.setting_module.models import CfgDataSource, CfgProcess AUTO_LINK_ID = 'id' PROCESS = 'process' @@ -138,6 +138,10 @@ def __init__( def cfg_data_source(self) -> CfgDataSource: return CfgDataSource.query.get(self.data_source_id) + @property + def cfg_process(self) -> CfgProcess: + return CfgProcess.query.get(self.process_id) + def get_processes_df(self, reader: 'AutoLinkReader') -> None: reader.read_db(self) @@ -146,6 +150,29 @@ class AutoLinkReader: def __init__(self): self.df = pd.DataFrame(columns=[AUTO_LINK_ID, DATE, SERIAL]) + @staticmethod + def convert_datetime(df: DataFrame) -> DataFrame: + """ + Convert datetime column to format `%Y-%m-%d %H:%M:%S` + Args: + df: [DataFrame] - a dataframe containing datetime column + + Returns: [DataFrame] - a dataframe containing converted datetime column + """ + from ap.api.setting_module.services.csv_import import datetime_transform + + converted_df = df.copy() + + datetime_series: Series = converted_df[DATE] + converted_datetime_series: Series = pd.to_datetime(datetime_series, errors='coerce').dt.strftime( + DATE_FORMAT_SIMPLE, + ) + non_datetime_series: Series = datetime_series[converted_datetime_series.isna()] + converted_datetime_series.update(datetime_transform(non_datetime_series.astype(str))) + + converted_df[DATE] = converted_datetime_series + return converted_df + @staticmethod def drop_duplicates(df: DataFrame) -> DataFrame: """ @@ -225,7 +252,7 @@ def __read_v2(self, file: Union[Path, str], processes: List[str], ids: List[int] replaced_id_df = self.drop_duplicates(replaced_id_df) replaced_df = pd.concat([replaced_df, replaced_id_df]) - self.df = pd.concat([self.df, replaced_df]) + self.df = pd.concat([self.df, self.convert_datetime(replaced_df)]) self.df = self.drop_duplicates(self.df) except ParserError: with pd.read_csv( @@ -251,7 +278,7 @@ def __read_v2(self, file: Union[Path, str], processes: List[str], ids: List[int] replaced_id_df = self.drop_duplicates(replaced_id_df) replaced_df = pd.concat([replaced_df, replaced_id_df]) - self.df = pd.concat([self.df, replaced_df]) + self.df = pd.concat([self.df, self.convert_datetime(replaced_df)]) self.df = self.drop_duplicates(self.df) @log_execution_time(LOG_PREFIX) @@ -296,7 +323,7 @@ def __read_normal_file( df_chunk = _df_chunk.rename(columns=rename_params).dropna() df = pd.concat([df, df_chunk]) df = self.drop_duplicates(df) - return df + return self.convert_datetime(df) def read_path(self, source: AutoLinkSourcePath) -> None: for file in source.files: @@ -317,14 +344,15 @@ def read_path(self, source: AutoLinkSourcePath) -> None: delimiter=source.get_delimiter(file), ) df[AUTO_LINK_ID] = source.process_id - self.df = pd.concat([self.df, df]) + self.df = pd.concat([self.df, self.convert_datetime(df)]) self.df = self.drop_duplicates(self.df) def read_db(self, source: AutoLinkSourceDB) -> None: - cols, df = get_info_from_db( + cols, df, _ = get_info_from_db( source.cfg_data_source, source.table_name, sql_limit=AUTOLINK_TOTAL_RECORDS_PER_SOURCE, + process_factid=source.cfg_process.process_factid, ) assert source.date_col in cols assert source.serial_col in cols @@ -334,7 +362,7 @@ def read_db(self, source: AutoLinkSourceDB) -> None: } df = df[[source.date_col, source.serial_col]].rename(columns=rename_params) df[AUTO_LINK_ID] = source.process_id - self.df = pd.concat([self.df, df]) + self.df = pd.concat([self.df, self.convert_datetime(df)]) self.df = self.drop_duplicates(self.df) diff --git a/ap/api/setting_module/services/csv_import.py b/ap/api/setting_module/services/csv_import.py index 9975dea..66bbabf 100644 --- a/ap/api/setting_module/services/csv_import.py +++ b/ap/api/setting_module/services/csv_import.py @@ -5,7 +5,7 @@ import re from datetime import datetime from io import BytesIO -from typing import List +from typing import List, Optional import numpy as np import pandas as pd @@ -47,6 +47,7 @@ DATE_FORMAT, DATE_FORMAT_STR_ONLY_DIGIT, TIME_FORMAT_WITH_SEC, + SQLiteFormatStrings, convert_time, detect_encoding, detect_file_encoding, @@ -62,22 +63,26 @@ DATA_TYPE_DUPLICATE_MSG, DATA_TYPE_ERROR_EMPTY_DATA, DATA_TYPE_ERROR_MSG, + DATE_TYPE_REGEX, DATETIME_DUMMY, + DEFAULT_NONE_VALUE, EMPTY_STRING, FILE_NAME, NUM_CHARS_THRESHOLD, + TIME_TYPE_REGEX, CSVExtTypes, DataType, DBType, JobStatus, JobType, ) +from ap.common.datetime_format_utils import DateTimeFormatUtils from ap.common.disk_usage import get_ip_address from ap.common.logger import log_execution_time, logger from ap.common.pydn.dblib.db_proxy import DbProxy, gen_data_source_of_universal_db from ap.common.scheduler import scheduler_app_context from ap.common.services.csv_content import ( - get_number_of_reading_lines, + get_limit_records, is_normal_csv, read_csv_with_transpose, read_data, @@ -116,6 +121,7 @@ def import_csv_job( _proc_name, proc_id, is_user_request: bool = False, + register_by_file_request_id: str = None, **kwargs, ): """scheduler job import csv @@ -128,7 +134,7 @@ def import_csv_job( def _add_gen_proc_link_job(*_args, **_kwargs): add_gen_proc_link_job(process_id=proc_id, is_user_request=is_user_request, *_args, **_kwargs) - gen = import_csv(proc_id) + gen = import_csv(proc_id, register_by_file_request_id=register_by_file_request_id) send_processing_info( gen, JobType.CSV_IMPORT, @@ -148,12 +154,13 @@ def get_config_sensor(proc_id): @log_execution_time() -def import_csv(proc_id, record_per_commit=RECORD_PER_COMMIT): +def import_csv(proc_id, record_per_commit=RECORD_PER_COMMIT, register_by_file_request_id: str = None): """csv files import Keyword Arguments: proc_id {[type]} -- [description] (default: {None}) db_id {[type]} -- [description] (default: {None}) + register_by_file_request_id {[type]} -- [description] (default: {None}) Raises: e: [description] @@ -223,16 +230,13 @@ def import_csv(proc_id, record_per_commit=RECORD_PER_COMMIT): use_col_names = headers if use_dummy_datetime and DATETIME_DUMMY in use_col_names: use_col_names.remove(DATETIME_DUMMY) - data_first_row = (data_src.skip_head if data_src.skip_head is not None else 0) + 1 + # check for skip_head = None to prevent TypeError when adding 1 + data_first_row = (skip_head if skip_head is not None else 0) + 1 head_skips = list(range(data_first_row)) else: is_abnormal = False - data_first_row = (data_src.skip_head if data_src.skip_head is not None else 0) + 1 - head_skips = list( - range( - data_src.skip_head if data_src.skip_head is not None else 0, - ), - ) + data_first_row = (skip_head if skip_head is not None else 0) + 1 + head_skips = list(range(skip_head if skip_head is not None else 0)) total_percent = 0 percent_per_file = 100 / len(import_targets) @@ -299,7 +303,7 @@ def import_csv(proc_id, record_per_commit=RECORD_PER_COMMIT): if data_src.dummy_header: # generate column name if there is not header in file - org_csv_cols, csv_cols, _, _, _ = gen_dummy_header(org_csv_cols, line_skip=data_src.skip_head) + org_csv_cols, csv_cols, _, _, _ = gen_dummy_header(org_csv_cols, skip_head=data_src.skip_head) csv_cols, _ = gen_colsname_for_duplicated(csv_cols) else: # need to convert header in case of transposed @@ -384,7 +388,14 @@ def import_csv(proc_id, record_per_commit=RECORD_PER_COMMIT): default_csv_param['dtype'] = { col: 'string' for col, col_cfg in dic_use_cols.items() - if col in use_col_names and col_cfg.data_type == DataType.TEXT.name + if col in use_col_names + and col_cfg.data_type + in [ + DataType.TEXT.name, + DataType.DATETIME.name, + DataType.DATE.name, + DataType.TIME.name, + ] } # add more dtype columns in usecols @@ -486,12 +497,19 @@ def import_csv(proc_id, record_per_commit=RECORD_PER_COMMIT): # do import save_res, df_error, df_duplicate = import_df(proc_id, df, dic_use_cols, get_date_col, job_info, trans_data) if is_first_chunk: - data_register_data = { - 'status': JobStatus.PROCESSING.name, - 'process_id': proc_id, - 'is_first_imported': True, - } - background_announcer.announce(data_register_data, AnnounceEvent.DATA_REGISTER.name) + if register_by_file_request_id: + data_register_data = { + 'RegisterByFileRequestID': register_by_file_request_id, + 'status': JobStatus.PROCESSING.name, + 'process_id': proc_id, + 'is_first_imported': True, + 'use_dummy_datetime': use_dummy_datetime, + } + background_announcer.announce( + data_register_data, + AnnounceEvent.DATA_REGISTER.name, + f'{AnnounceEvent.DATA_REGISTER.name}_{proc_id}', + ) is_first_chunk = False df_error_cnt = len(df_error) @@ -531,13 +549,19 @@ def import_csv(proc_id, record_per_commit=RECORD_PER_COMMIT): job_info.import_type = JobType.CSV_IMPORT.name save_res, df_error, df_duplicate = import_df(proc_id, df, dic_use_cols, get_date_col, job_info, trans_data) - if is_first_chunk: + if is_first_chunk and register_by_file_request_id: data_register_data = { + 'RegisterByFileRequestID': register_by_file_request_id, 'status': JobStatus.PROCESSING.name, 'process_id': proc_id, 'is_first_imported': True, + 'use_dummy_datetime': use_dummy_datetime, } - background_announcer.announce(data_register_data, AnnounceEvent.DATA_REGISTER.name) + background_announcer.announce( + data_register_data, + AnnounceEvent.DATA_REGISTER.name, + f'{AnnounceEvent.DATA_REGISTER.name}_{proc_id}', + ) df_error_cnt = len(df_error) if df_error_cnt: @@ -677,9 +701,9 @@ def csv_to_df( }, ) - # assign n_rows - nrows = get_number_of_reading_lines(n_rows=data_src.n_rows) - read_csv_param.update({'nrows': nrows}) + # assign n_rows with is_transpose + n_rows = get_limit_records(is_transpose=data_src.is_transpose, n_rows=data_src.n_rows) + read_csv_param.update({'nrows': n_rows}) # get encoding if not encoding: @@ -692,7 +716,6 @@ def csv_to_df( read_csv_param.update( { 'sep': csv_delimiter, - 'skipinitialspace': True, 'na_values': NA_VALUES, 'error_bad_lines': False, 'encoding': encoding, @@ -712,18 +735,6 @@ def csv_to_df( if col_names: df.columns = col_names - # convert data type - if dic_use_cols: - for col, cfg_col in dic_use_cols.items(): - d_type = cfg_col.predict_type - if d_type and DataType[d_type] in [ - DataType.REAL_SEP, - DataType.INTEGER_SEP, - DataType.EU_REAL_SEP, - DataType.EU_INTEGER_SEP, - ]: - convert_eu_decimal(df, col, d_type) - col_names = {col: normalize_str(col) for col in df.columns} df = df.rename(columns=col_names) @@ -743,7 +754,7 @@ def get_import_target_files(proc_id, data_src, trans_data, db_instance): dic_success_file, dic_error_file = get_last_csv_import_info(trans_data, db_instance) valid_extensions = [CSVExtTypes.CSV.value, CSVExtTypes.TSV.value, CSVExtTypes.SSV.value, CSVExtTypes.ZIP.value] csv_files = [] - if os.path.isfile(data_src.directory): + if data_src.is_file_path: if any(data_src.directory.lower().endswith(ext) for ext in valid_extensions): csv_files.append(data_src.directory) else: @@ -817,7 +828,8 @@ def remove_duplicates( # remove error index in df_origin df_origin_check = df_origin[~df_origin.index.isin(df_error.index)] # remove duplicate in csv files - df.drop_duplicates(subset=df_columns, keep='last', inplace=True) + if len(df_columns): + df.drop_duplicates(subset=df_columns, keep='last', inplace=True) # get data from database with DbProxy(gen_data_source_of_universal_db(proc_id), True) as db_instance: @@ -943,11 +955,90 @@ def actual_datetime(m: re.match) -> str: @log_execution_time() -def convert_datetime_format(df, dic_use_cols): +def date_transform(date_series): + """ + Convert date series to standard date format + + Support input formats: + + - YYYY/MM/DD + - YYYY-MM-DD + - YYYY年MM月DD日 + + Args: + date_series (Series): a series of time + + Returns: + A series of date with standard format YYYY-MM-DD + """ + separate_char = '-' + begin_part_of_year = datetime.now().year.__str__()[:2] + return date_series.str.replace( + DATE_TYPE_REGEX, + lambda m: ( + f'{m.group("year") if len(m.group("year")) == 4 else begin_part_of_year + m.group("year")}' + f'{separate_char}' + f'{m.group("month").rjust(2, "0")}' + f'{separate_char}' + f'{m.group("day").rjust(2, "0")}' + ), + regex=True, + ) + + +@log_execution_time() +def time_transform(time_series): + """ + Convert time series to standard time format + + Support input formats: + + - HH:mm:ss + - HH-mm-ss + - HH.mm.ss + - HH mm ss + - HH時mm分ss秒 + + Args: + time_series (Series): a series of time + + Returns: + A series of time with standard format HH:MM:SS + """ + separate_char = ':' + return time_series.str.replace( + TIME_TYPE_REGEX, + lambda m: ( + f'{m.group("hour").rjust(2, "0")}' + f'{separate_char}' + f'{m.group("minute").rjust(2, "0")}' + f'{separate_char}' + f'{m.group("second").rjust(2, "0")}' + ), + regex=True, + ) + + +@log_execution_time() +def convert_datetime_format(df, dic_use_cols, datetime_format: Optional[str] = None): + datetime_format_obj = DateTimeFormatUtils.get_datetime_format(datetime_format) for col, cfg_col in dic_use_cols.items(): if col not in df.columns: continue - if cfg_col.data_type in [DataType.DATETIME.name, DataType.DATE.name]: + if cfg_col.data_type == DataType.DATETIME.name: + # Convert datetime base on datetime format + if datetime_format_obj.datetime_format: + datetime_series = pd.to_datetime( + df[col], + errors='coerce', + format=datetime_format_obj.datetime_format, + ) + non_na_datetime_series = datetime_series[datetime_series.notnull()] + df[col] = non_na_datetime_series.dt.strftime(SQLiteFormatStrings.DATETIME.value).astype( + pd.StringDtype(), + ) + continue + dtype_name = df[col].dtype.name if dtype_name == 'object': df[col] = df[col].astype(str) @@ -956,6 +1047,52 @@ def convert_datetime_format(df, dic_use_cols): date_only = cfg_col.data_type == DataType.DATE.name df[col] = datetime_transform(df[col], date_only=date_only) + elif cfg_col.data_type == DataType.DATE.name: + # Convert date base on date format + if datetime_format_obj.date_format: + date_series = pd.to_datetime( + df[col], + errors='coerce', + format=datetime_format_obj.date_format, + ) + non_na_date_series = date_series[date_series.notnull()] + df[col] = non_na_date_series.dt.strftime(SQLiteFormatStrings.DATE.value).astype(pd.StringDtype()) + continue + + if pd.api.types.is_datetime64_dtype(df[col]): + df[col] = df[col].dt.strftime(SQLiteFormatStrings.DATE.value).astype(pd.StringDtype()) + continue + + date_series = pd.to_datetime(df[col], errors='coerce') + date_series.update(date_series[date_series.notnull()].dt.strftime(SQLiteFormatStrings.DATE.value)) + unknown_series = df[date_series.isnull()][col].astype('string') + date_series.update(unknown_series) + date_series = date_series.astype('string') + df[col] = date_transform(date_series).replace({pd.NaT: DEFAULT_NONE_VALUE}) + + elif cfg_col.data_type == DataType.TIME.name: + # Convert time base on time format + if datetime_format_obj.time_format: + time_series = pd.to_datetime( + df[col], + errors='coerce', + format=datetime_format_obj.time_format, + ) + non_na_time_series = time_series[time_series.notnull()] + df[col] = non_na_time_series.dt.strftime(SQLiteFormatStrings.TIME.value).astype(pd.StringDtype()) + continue + + if pd.api.types.is_datetime64_dtype(df[col]): + df[col] = df[col].dt.strftime(SQLiteFormatStrings.TIME.value).astype(pd.StringDtype()) + continue + + time_series = pd.to_datetime(df[col], errors='coerce') + time_series.update(time_series[time_series.notnull()].dt.strftime(SQLiteFormatStrings.TIME.value)) + unknown_series = df[time_series.isnull()][col].astype('string') + time_series.update(unknown_series) + time_series = time_transform(time_series).replace({pd.NaT: DEFAULT_NONE_VALUE}) + df[col] = time_series + return df @@ -990,7 +1127,8 @@ def import_df(proc_id, df, dic_use_cols, get_date_col, job_info=None, trans_data df = df.convert_dtypes() # convert datatime type 2023年01月02日 -> 2023-01-02 00:00:00 - df = convert_datetime_format(df, dic_use_cols) + cfg_process = CfgProcess.get_proc_by_id(proc_id) + df = convert_datetime_format(df, dic_use_cols, datetime_format=cfg_process.datetime_format) # make datetime main from date:main and time:main if trans_data and trans_data.main_date_column and trans_data.main_time_column: @@ -1098,18 +1236,16 @@ def convert_csv_timezone(df, get_date_col): @log_execution_time() def convert_eu_decimal(df: DataFrame, df_col, data_type): if data_type == DataType.REAL_SEP.name: - df[df_col] = df[df_col].astype(str).str.replace(r'\,+', '', regex=True).astype('float64') + df[df_col] = df[df_col].astype(str).str.replace(r'\,+', '', regex=True) if data_type == DataType.INTEGER_SEP.name: - df[df_col] = df[df_col].astype(str).str.replace(r'\,+', '', regex=True).astype('int32') + df[df_col] = df[df_col].astype(str).str.replace(r'\,+', '', regex=True) if data_type == DataType.EU_REAL_SEP.name: - df[df_col] = df[df_col].astype(str).str.replace(r'\.+', '', regex=True) - df[df_col] = df[df_col].astype(str).str.replace(r'\,+', '.', regex=True).astype('float64') + df[df_col] = df[df_col].astype(str).str.replace(r'\.+', '', regex=True).str.replace(r'\,+', '.', regex=True) if data_type == DataType.EU_INTEGER_SEP.name: - df[df_col] = df[df_col].astype(str).str.replace(r'\.+', '', regex=True) - df[df_col] = df[df_col].astype(str).str.replace(r'\,+', '.', regex=True).astype('int32') + df[df_col] = df[df_col].astype(str).str.replace(r'\.+', '', regex=True).str.replace(r'\,+', '.', regex=True) def write_invalid_records_to_file( @@ -1183,28 +1319,28 @@ def is_header_contains_invalid_chars(header_names: list[str]) -> bool: return nchars > NUM_CHARS_THRESHOLD -def gen_dummy_header(header_names, data_details=None, line_skip=''): +def gen_dummy_header(header_names, data_details=None, skip_head=None): """Generate dummy header for current data source - - if line_skip is not provided (None or '') or line_skip > 0: + - if skip_head is not provided (None) or skip_head > 0: generate dummy header if and only if number of invalid chars > 90% - - if line_skip = 0: + - if skip_head = 0: always generate dummy header @param header_names: @param data_details: - @param line_skip: + @param skip_head: @return: """ dummy_header = False partial_dummy_header = False org_header = header_names.copy() - is_blank = line_skip is None or line_skip == EMPTY_STRING + is_blank = skip_head is None is_auto_generate_dummy_header = is_header_contains_invalid_chars(header_names) # auto generate dummy header rules is_gen_from_blank_skip = is_blank and is_auto_generate_dummy_header - is_gen_from_zero_skip = not is_blank and int(line_skip) == 0 - is_gen_from_number_skip = not is_blank and int(line_skip) > 0 and is_auto_generate_dummy_header + is_gen_from_zero_skip = not is_blank and skip_head == 0 + is_gen_from_number_skip = not is_blank and skip_head > 0 and is_auto_generate_dummy_header if is_gen_from_blank_skip or is_gen_from_zero_skip or is_gen_from_number_skip: if data_details: data_details = [header_names] + data_details diff --git a/ap/api/setting_module/services/data_import.py b/ap/api/setting_module/services/data_import.py index 6cd8a41..bf3cb1b 100644 --- a/ap/api/setting_module/services/data_import.py +++ b/ap/api/setting_module/services/data_import.py @@ -1,7 +1,10 @@ +from __future__ import annotations + +import functools import os.path import traceback from datetime import datetime -from typing import List +from typing import List, Literal import numpy as np import pandas as pd @@ -13,6 +16,7 @@ DATE_FORMAT_STR, DATE_FORMAT_STR_ONLY_DIGIT, TXT_FILE_TYPE, + convert_numeric_by_type, convert_time, gen_transaction_table_name, get_basename, @@ -97,6 +101,7 @@ 'n/a', 'nan', 'null', + 'na', } NA_VALUES = {'na', '-', '--', '---', '#NULL!', '#REF!', '#VALUE!', '#NUM!', '#NAME?', '0/0'} PREVIEW_ALLOWED_EXCEPTIONS = {'-', '--', '---'} @@ -105,8 +110,8 @@ ALL_SYMBOLS = set(PANDAS_DEFAULT_NA | NA_VALUES | INF_VALUES | INF_NEG_VALUES) # let app can show preview and import all na column, as string -NORMAL_NULL_VALUES = {'NA', 'na', 'null'} -SPECIAL_SYMBOLS = ALL_SYMBOLS - NORMAL_NULL_VALUES - PREVIEW_ALLOWED_EXCEPTIONS +NORMAL_NULL_VALUES = {'NA', 'na', 'n/a', 'N/A', '', 'null', 'NULL', 'nan'} +# SPECIAL_SYMBOLS = ALL_SYMBOLS - NORMAL_NULL_VALUES - PREVIEW_ALLOWED_EXCEPTIONS IS_ERROR_COL = '___ERR0R___' ERR_COLS_NAME = '___ERR0R_C0LS___' @@ -189,15 +194,11 @@ def check_db_con(db_type, host, port, dbname, schema, username, password): db_source.db_detail = db_source_detail db_source.type = db_type - # 戻り値の初期化 - result = False - + # if we cannot connect, this will raise Exception # コネクションをチェックする - with DbProxy(db_source, force_connect=True) as db_instance: - if db_instance.is_connected: - result = True + DbProxy.check_db_connection(db_source, force=True) - return result + return True @log_execution_time() @@ -579,6 +580,12 @@ def strip_special_symbol(data, is_dict=False): # TODO: convert to dataframe than filter is faster , but care about generation purpose , # we just need to read some rows + def clean_value(val): + str_val = str(val) + if str_val in ALL_SYMBOLS or str_val.lower() in ALL_SYMBOLS: + return '' + return val + if is_dict: def iter_func(x): @@ -590,16 +597,10 @@ def iter_func(x): return x for row in data: - is_ng = False if not row: continue - for val in iter_func(row): - if str(val).lower() in SPECIAL_SYMBOLS: - is_ng = True - break - - if not is_ng: - yield row + new_row = {k: clean_value(v) for k, v in row.items()} if is_dict else [clean_value(v) for v in row] + yield new_row @log_execution_time() @@ -694,6 +695,7 @@ def validate_data(df: DataFrame, dic_use_cols, na_vals, exclude_cols=None): :param exclude_cols: :return: """ + from ap.api.setting_module.services.csv_import import convert_eu_decimal init_is_error_col(df) @@ -719,6 +721,7 @@ def validate_data(df: DataFrame, dic_use_cols, na_vals, exclude_cols=None): # data type that user chose user_data_type = dic_use_cols[col_name].data_type + d_type = dic_use_cols[col_name].predict_type # do nothing with float column if col_name in float_cols and user_data_type != DataType.INTEGER.name: @@ -736,13 +739,25 @@ def validate_data(df: DataFrame, dic_use_cols, na_vals, exclude_cols=None): # strip quotes and spaces dtype_name = df[col_name].dtype.name - if user_data_type in [DataType.INTEGER.name, DataType.REAL.name]: + if dtype_name == 'boolean': + df[col_name] = df[col_name].astype(pd.StringDtype()).str.lower() + + # convert K sep Int|Real data type + if dic_use_cols[col_name].predict_type and DataType[d_type] in [ + DataType.REAL_SEP, + DataType.INTEGER_SEP, + DataType.EU_REAL_SEP, + DataType.EU_INTEGER_SEP, + ]: + convert_eu_decimal(df, col_name, d_type) + + if user_data_type in [DataType.INTEGER.name, DataType.REAL.name, DataType.BOOLEAN.name]: vals = df[col_name].copy() # convert numeric values numerics = pd.to_numeric(vals, errors='coerce') + numerics = convert_numeric_by_type(numerics, user_data_type) df[col_name] = numerics - # strip quote space then convert non numeric values non_num_idxs = numerics.isna() non_numerics = vals.loc[non_num_idxs].dropna() @@ -752,6 +767,7 @@ def validate_data(df: DataFrame, dic_use_cols, na_vals, exclude_cols=None): # convert non numeric again numerics = pd.to_numeric(non_numerics, errors='coerce') + numerics = convert_numeric_by_type(numerics, user_data_type) df.loc[non_num_idxs, col_name] = numerics # set error for non numeric values @@ -781,28 +797,25 @@ def validate_data(df: DataFrame, dic_use_cols, na_vals, exclude_cols=None): df.loc[df[col_name].isin([float('inf'), float('-inf')]), col_name] = nan elif user_data_type == DataType.TEXT.name: - if dtype_name == 'boolean': - df[col_name] = df[col_name].replace({True: 'True', False: 'False'}) + idxs = df[col_name].dropna().index + if dtype_name == 'object': + df.loc[idxs, col_name] = df.loc[idxs, col_name].astype(str).str.strip("'").str.strip() + elif dtype_name == 'string': + df.loc[idxs, col_name] = df.loc[idxs, col_name].str.strip("'").str.strip() else: - idxs = df[col_name].dropna().index - if dtype_name == 'object': - df.loc[idxs, col_name] = df.loc[idxs, col_name].astype(str).str.strip("'").str.strip() - elif dtype_name == 'string': - df.loc[idxs, col_name] = df.loc[idxs, col_name].str.strip("'").str.strip() - else: - # convert to string before insert to database - df.loc[idxs, col_name] = df.loc[idxs, col_name].astype(str) - continue - - if len(idxs): - conditions = [ - df[col_name].isin(na_vals), - df[col_name].isin(INF_VALUES), - df[col_name].isin(INF_NEG_VALUES), - ] - return_vals = [nan, inf_val, inf_neg_val] - - df[col_name] = np.select(conditions, return_vals, df[col_name]) + # convert to string before insert to database + df.loc[idxs, col_name] = df.loc[idxs, col_name].astype(str) + continue + + if len(idxs): + conditions = [ + df[col_name].isin(na_vals), + df[col_name].isin(INF_VALUES), + df[col_name].isin(INF_NEG_VALUES), + ] + return_vals = [nan, inf_val, inf_neg_val] + + df[col_name] = np.select(conditions, return_vals, df[col_name]) df.head() @@ -965,14 +978,14 @@ def convert_df_col_to_utc(df, get_date_col, is_timezone_inside, db_time_zone, ut @log_execution_time() def convert_df_datetime_to_str(df: DataFrame, get_date_col): - return df[df[get_date_col].notnull()][get_date_col].dt.strftime(DATE_FORMAT_STR) + return df[df[get_date_col].notnull()][get_date_col].dt.strftime(DATE_FORMAT_STR).astype(pd.StringDtype()) @log_execution_time() def validate_datetime(df: DataFrame, date_col, is_strip=True, add_is_error_col=True, null_is_error=True): dtype_name = df[date_col].dtype.name if dtype_name == 'object': - df[date_col] = df[date_col].astype(str) + df[date_col] = df[date_col].astype(pd.StringDtype()) elif dtype_name != 'string': return @@ -1199,34 +1212,84 @@ def get_df_first_n_last(df: DataFrame, first_count=10, last_count=10): @log_execution_time() -def save_proc_data_count(db_instance, df: DataFrame, proc_id, get_date_col): - if not df.size or not get_date_col: - return None +def save_proc_data_count(db_instance, df, proc_id, get_date_col): + save_proc_data_count_multiple_dfs( + db_instance, + proc_id=proc_id, + get_date_col=get_date_col, + dfs_push_to_db=df, + ) - s = pd.to_datetime(df[get_date_col], errors='coerce') - s: Series = (s.dt.year * 1_00_00_00 + s.dt.month * 1_00_00 + s.dt.day * 1_00 + s.dt.hour).value_counts() - s.rename(DataCountTable.count.name, inplace=True) - count_df = s.reset_index(name=DataCountTable.count.name) - count_df.rename(columns={'index': DataCountTable.datetime.name}, inplace=True) - count_df[DataCountTable.datetime.name] = pd.to_datetime( - count_df[DataCountTable.datetime.name], - format='%Y%m%d%H', - ).dt.strftime(DATE_FORMAT_FOR_ONE_HOUR) - # # rename - # df = df.rename(columns={get_date_col: DataCountTable.get_date_col()}) - # - # # group data by datetime time - # df[get_date_col] = df[get_date_col].apply( - # lambda x: '{}'.format(datetime.strptime(x, DATE_FORMAT_STR).strftime(DATE_FORMAT_FOR_ONE_HOUR)), - # ) - # count_df = df.groupby([get_date_col]).value_counts() - # count_df = count_df.to_frame(name=DataCountTable.count.name).reset_index() +def save_proc_data_count_multiple_dfs( + db_instance, + *, + proc_id, + get_date_col, + dfs_push_to_db: list[pd.DataFrame] | pd.DataFrame = None, + dfs_pop_from_db: list[pd.DataFrame] | pd.DataFrame = None, + dfs_push_to_file: list[pd.DataFrame] | pd.DataFrame = None, + dfs_pop_from_file: list[pd.DataFrame] | pd.DataFrame = None, +): + def check_args(dfs): + if dfs is None: + return [] + if not isinstance(dfs, list): + return [dfs] + return dfs + + dfs_push_to_db = check_args(dfs_push_to_db) + dfs_pop_from_db = check_args(dfs_pop_from_db) + dfs_push_to_file = check_args(dfs_push_to_file) + dfs_pop_from_file = check_args(dfs_pop_from_file) + + get_proc_data_count_df_func = functools.partial( + get_proc_data_count_df, + get_date_col=get_date_col, + ) + # TODO: aggregate to one df instead of run 4 times + aggregated_df = pd.DataFrame() + + for df in dfs_push_to_db: + count_df = get_proc_data_count_df_func(df, is_db=True, decrease=False) + aggregated_df = pd.concat([aggregated_df, count_df]) + + for df in dfs_pop_from_db: + count_df = get_proc_data_count_df_func(df, is_db=True, decrease=True) + aggregated_df = pd.concat([aggregated_df, count_df]) + + for df in dfs_push_to_file: + count_df = get_proc_data_count_df_func(df, is_db=False, decrease=False) + aggregated_df = pd.concat([aggregated_df, count_df]) + + for df in dfs_pop_from_file: + count_df = get_proc_data_count_df_func(df, is_db=False, decrease=True) + aggregated_df = pd.concat([aggregated_df, count_df]) + + if aggregated_df.empty: + return + + agg_keys = {DataCountTable.count.name: 'sum', DataCountTable.count_file.name: 'sum'} + aggregated_df = aggregated_df.groupby(DataCountTable.datetime.name).agg(agg_keys).reset_index() + sql_vals = aggregated_df.to_records(index=False).tolist() sql_params = get_insert_params(DataCountTable.get_keys()) sql_insert = gen_bulk_insert_sql(DataCountTable.get_table_name(proc_id), *sql_params) - sql_vals = count_df.to_records(index=False).tolist() + insert_data(db_instance, sql_insert, sql_vals) + set_all_cache_expired(CacheType.TRANSACTION_DATA) + + +def get_proc_data_count_df(df, *, get_date_col, decrease: bool, is_db: bool): + if df.empty: + return pd.DataFrame() + + count_column = DataCountTable.count.name if is_db else DataCountTable.count_file.name + count_df = calculate_value_counts_per_hours(df, get_date_col, count_column=count_column) + if decrease: + count_df[count_column] = -count_df[count_column] + + return count_df @log_execution_time() @@ -1280,3 +1343,29 @@ def save_failed_import_history(proc_id, job_info, error_type): # save import history before return # insert import history save_import_history(proc_id, job_info) + + +@log_execution_time() +def calculate_value_counts_per_hours( + df: DataFrame, + get_date_col, + count_column: Literal['count', 'count_file'] = DataCountTable.count.name, +): + if not df.size or not get_date_col: + return None + + s = pd.to_datetime(df[get_date_col], errors='coerce') + s: Series = (s.dt.year * 1_00_00_00 + s.dt.month * 1_00_00 + s.dt.day * 1_00 + s.dt.hour).value_counts() + count_df = s.rename(count_column).reset_index(name=count_column) + count_df.rename(columns={'index': DataCountTable.datetime.name}, inplace=True) + count_df[DataCountTable.datetime.name] = pd.to_datetime( + count_df[DataCountTable.datetime.name], + format='%Y%m%d%H', + ).dt.strftime(DATE_FORMAT_FOR_ONE_HOUR) + + if count_column == DataCountTable.count.name: + count_df[DataCountTable.count_file.name] = 0 + else: + count_df[DataCountTable.count.name] = 0 + + return count_df diff --git a/ap/api/setting_module/services/equations.py b/ap/api/setting_module/services/equations.py index 1bebea3..5d631c9 100644 --- a/ap/api/setting_module/services/equations.py +++ b/ap/api/setting_module/services/equations.py @@ -44,21 +44,28 @@ def sample_data(self) -> EquationSampleDataResponse: equation = get_function_class_by_id(equation_id=self.equation_id).from_kwargs( **self.model_dump(), ) - sample_data = equation.evaluate( + + evaluate = equation.evaluate( df, out_col=SAMPLE_DATA, x_col=ARRAY_X, y_col=ARRAY_Y, x_dtype=self.x_data_type, y_dtype=self.y_data_type, - )[SAMPLE_DATA] + ) + sample_data = evaluate[SAMPLE_DATA] output_type = equation.get_output_type(x_data_type=self.x_data_type, y_data_type=self.y_data_type) # orjson can't parse infinity # https://github.com/ijl/orjson?tab=readme-ov-file#float sample_data = sample_data.replace({np.inf: 'inf', -np.inf: '-inf'}) + if pd.api.types.is_bool_dtype(sample_data): + sample_data = sample_data.astype(pd.StringDtype()).str.lower() - return EquationSampleDataResponse(sample_data=sample_data.tolist(), output_type=output_type.value) + return EquationSampleDataResponse( + sample_data=sample_data.tolist(), + output_type=output_type.value, + ) def get_all_normal_columns_for_functions( @@ -131,6 +138,7 @@ def validate_functions_calculation(process_id: int, validation_errors: FunctionE for function in functions: function_id = function.get(CfgProcessFunctionColumn.function_id.key) + process_column_id = function.get(CfgProcessFunctionColumn.process_column_id.key) var_x = function.get(CfgProcessFunctionColumn.var_x.key) var_y = function.get(CfgProcessFunctionColumn.var_y.key) @@ -147,7 +155,9 @@ def validate_functions_calculation(process_id: int, validation_errors: FunctionE **function, ) try: - equation_sample_data.sample_data() + sample_data = equation_sample_data.sample_data() + # update data type + raw_data_types[process_column_id] = sample_data.output_type except FunctionFieldError as err: validation_errors.add_function_error(err.with_id(function_column_id)) diff --git a/ap/api/setting_module/services/factory_import.py b/ap/api/setting_module/services/factory_import.py index 390a868..f87e1cf 100644 --- a/ap/api/setting_module/services/factory_import.py +++ b/ap/api/setting_module/services/factory_import.py @@ -22,6 +22,10 @@ write_error_import, write_error_trace, ) +from ap.api.setting_module.services.software_workshop_etl_services import ( + get_transaction_data_stmt, + transform_df_for_software_workshop, +) from ap.api.trace_data.services.proc_link import add_gen_proc_link_job from ap.common.common_utils import ( DATE_FORMAT_STR_FACTORY_DB, @@ -36,9 +40,9 @@ DATA_TYPE_ERROR_EMPTY_DATA, DATA_TYPE_ERROR_MSG, DATETIME_DUMMY, - MSG_DB_CON_FAILED, PAST_IMPORT_LIMIT_DATA_COUNT, DataType, + DBType, JobStatus, JobType, ) @@ -118,7 +122,7 @@ def import_factory(proc_id): data_src: CfgDataSourceDB = CfgDataSourceDB.query.get(proc_cfg.data_source_id) # check db connection - check_db_connection(data_src) + DbProxy.check_db_connection(data_src) trans_data = TransactionData(proc_cfg.id) with DbProxy(gen_data_source_of_universal_db(proc_id), True, immediate_isolation_level=True) as db_instance: @@ -195,7 +199,6 @@ def import_factory(proc_id): # validate import date range if end_time >= fac_max_date: - end_time = fac_max_date is_import = False # get data from factory @@ -208,12 +211,22 @@ def import_factory(proc_id): remain_rows = () error_type = None for _rows in data: + # Reassign attribute + get_date_col = proc_cfg.get_date_col() + proc_id = proc_cfg.id + cfg_columns = proc_cfg.columns + is_import, rows, remain_rows = gen_import_data(_rows, remain_rows, auto_increment_idx) if not is_import: continue # dataframe df = pd.DataFrame(rows, columns=cols) + # to save into import history + imported_end_time = str(df[auto_increment_col].max()) + # pivot if this is vertical data + if proc_cfg.data_source.type == DBType.SOFTWARE_WORKSHOP.name: + df = transform_df_for_software_workshop(df, proc_cfg.data_source_id, proc_cfg.process_factid) # no records if not len(df): @@ -275,7 +288,10 @@ def import_factory(proc_id): cfg_parent_proc: CfgProcess = CfgProcess.get_proc_by_id(parent_id) dic_parent_cfg_cols = {cfg_col.id: cfg_col for cfg_col in cfg_parent_proc.columns} dic_cols = {cfg_col.column_name: cfg_col.parent_id for cfg_col in cfg_columns} - dic_rename = {col: dic_parent_cfg_cols[dic_cols[col]].column_name for col in df.columns} + dic_rename = {} + for col in df.columns: + if dic_cols.get(col): + dic_rename[col] = dic_parent_cfg_cols[dic_cols[col]].column_name df = df.rename(columns=dic_rename) orig_df = orig_df.rename(columns=dic_rename) df_error = df_error.rename(columns=dic_rename) @@ -300,9 +316,6 @@ def import_factory(proc_id): # import data job_info.import_type = JobType.FACTORY_IMPORT.name - # to save into import history - imported_end_time = rows[-1][auto_increment_idx] - # to save into job_info.import_from = start_time job_info.import_to = imported_end_time @@ -311,6 +324,8 @@ def import_factory(proc_id): if error_type: job_info.status = JobStatus.FAILED.name job_info.err_msg = error_type + + df = remove_non_exist_columns_in_df(df, [col.column_name for col in cfg_columns]) save_res = import_data(df, proc_id, get_date_col, cfg_columns, job_info) gen_import_job_info(job_info, save_res, start_time, imported_end_time, err_cnt=df_error_cnt) @@ -445,8 +460,7 @@ def get_sql_range_time( return start_time, end_time, filter_time -@log_execution_time('[FACTORY DATA IMPORT SELECT SQL]') -def get_data_by_range_time(db_instance, get_date_col, column_names, table_name, start_time, end_time, sql_limit): +def get_data_by_range_time_sql(db_instance, get_date_col, column_names, table_name, start_time, end_time, sql_limit): if isinstance(db_instance, mysql.MySQL): sel_cols = ','.join(column_names) else: @@ -477,8 +491,36 @@ def get_data_by_range_time(db_instance, get_date_col, column_names, table_name, else: sql = f'SELECT {sql} LIMIT {sql_limit}' + return sql, None + + +@log_execution_time('[FACTORY DATA IMPORT SELECT SQL]') +def get_data_by_range_time( + db_instance, + proc_cfg, + get_date_col, + column_names, + table_name, + start_time, + end_time, + sql_limit, +): + if proc_cfg.data_source.type == DBType.SOFTWARE_WORKSHOP.name: + stmt = get_transaction_data_stmt(proc_cfg.process_factid, start_time, end_time, limit=sql_limit) + sql, params = db_instance.gen_sql_and_params(stmt) + else: + sql, params = get_data_by_range_time_sql( + db_instance, + get_date_col, + column_names, + table_name, + start_time, + end_time, + sql_limit, + ) + logger.info(f'sql: {sql}') - data = db_instance.fetch_many(sql, FETCH_MANY_SIZE) + data = db_instance.fetch_many(sql, FETCH_MANY_SIZE, params=params) if not data: return None @@ -498,6 +540,7 @@ def get_factory_data(proc_cfg, column_names, auto_increment_col, start_time, end with DbProxy(proc_cfg.data_source) as db_instance: data = get_data_by_range_time( db_instance, + proc_cfg, auto_increment_col, column_names, proc_cfg.table_name, @@ -523,12 +566,13 @@ def get_factory_min_max_date(proc_cfg): # gen sql agg_results = [] get_date_col = add_double_quotes(proc_cfg.get_auto_increment_col_else_get_date()) - orig_tblname = proc_cfg.table_name.strip('"') + orig_tblname = proc_cfg.table_name_for_query_datetime().strip('"') table_name = add_double_quotes(orig_tblname) if not isinstance(db_instance, mysql.MySQL) else orig_tblname for agg_func in ['MIN', 'MAX']: sql = f'select {agg_func}({get_date_col}) from {table_name}' if isinstance(db_instance, mssqlserver.MSSQLServer): sql = f'select convert(varchar(30), {agg_func}({get_date_col}), 127) from {table_name}' + sql = proc_cfg.filter_for_query_datetime(sql) _, rows = db_instance.run_sql(sql, row_is_dict=False) if not rows: @@ -614,7 +658,7 @@ def factory_past_data_transform(proc_id): data_src: CfgDataSourceDB = CfgDataSourceDB.query.get(proc_cfg.data_source_id) # check db connection - check_db_connection(data_src) + DbProxy.check_db_connection(data_src) # columns info proc_name = proc_cfg.name @@ -629,6 +673,7 @@ def factory_past_data_transform(proc_id): trans_data = TransactionData(proc_id) with DbProxy(gen_data_source_of_universal_db(proc_id), True, immediate_isolation_level=True) as db_instance: + trans_data.create_table(db_instance) # last import date last_import = trans_data.get_import_history_last_import(db_instance, JobType.FACTORY_PAST_IMPORT.name) @@ -714,6 +759,9 @@ def factory_past_data_transform(proc_id): # dataframe df = pd.DataFrame(rows, columns=cols) + # pivot if this is vertical data + if proc_cfg.data_source.type == DBType.SOFTWARE_WORKSHOP.name: + df = transform_df_for_software_workshop(df, proc_cfg.data_source_id, proc_cfg.process_factid) # no records if not len(df): @@ -789,6 +837,8 @@ def factory_past_data_transform(proc_id): # save import history job_info.status = JobStatus.FAILED.name job_info.err_msg = error_type + + df = remove_non_exist_columns_in_df(df, [col.column_name for col in cfg_columns]) save_res = import_data(df, proc_id, get_date_col, cfg_columns, job_info) # update job info @@ -820,12 +870,17 @@ def factory_past_data_transform(proc_id): yield job_info +def remove_non_exist_columns_in_df(df, required_columns: list[str]) -> pd.DataFrame: + good_columns = [col for col in df.columns if col in required_columns] + return df[good_columns] + + @log_execution_time() def handle_time_zone(proc_cfg, get_date_col): # convert utc time func get_date, tzoffset_str, db_timezone = get_tzoffset_of_random_record( proc_cfg.data_source, - proc_cfg.table_name, + proc_cfg.table_name_for_query_datetime(), get_date_col, ) @@ -849,14 +904,6 @@ def handle_time_zone(proc_cfg, get_date_col): return is_timezone_inside, db_time_zone, utc_offset -@log_execution_time() -def check_db_connection(data_src): - # check db connection - with DbProxy(data_src) as db_instance: - if not db_instance.is_connected: - raise Exception(MSG_DB_CON_FAILED) - - @log_execution_time() def gen_import_data(rows, remain_rows, auto_increment_idx): is_allow_import = True diff --git a/ap/api/setting_module/services/master_data_transform_pattern.py b/ap/api/setting_module/services/master_data_transform_pattern.py new file mode 100644 index 0000000..cbc056b --- /dev/null +++ b/ap/api/setting_module/services/master_data_transform_pattern.py @@ -0,0 +1,104 @@ +import re +from typing import Union + +import pandas as pd +from pandas._libs.missing import NAType + +from ap.common.constants import EMPTY_STRING, HALF_WIDTH_SPACE + +REMOVE_CHARACTERS_PATTERN = r'\\+[nrt]' + + +class RegexRule: + regex_dict = None + + @classmethod + def extract_data(cls, *args, **kwargs) -> Union[tuple, str, None, NAType]: + raise NotImplementedError('Method not implemented!') + + # @classmethod + # def pattern_regexes(cls, data: str, **extend_args) -> Union[tuple, str, None, NAType]: + # for pattern_no, regex in cls.regex_dict.items(): + # result = cls.extract_data(data, regex, pattern_no, **extend_args) + # if result is None: + # continue + # return result + # return None + + @classmethod + def is_not_data(cls, data: str) -> bool: + return pd.isnull(data) or data is None + + +class ColumnRawNameRule(RegexRule): + # V2 eFA + # 計測項目ID 計測項目名 DATA_ID DATA_NAME + # LNSCXX4302_M00022 △q/△SP荷重 [mm3/str/N] DIC14028 7孔Z軸補正量指令値 + # LNSCXX4302_M00042 △q/△SP荷重 [mm3/str/N] DIC14128 7孔Z軸補正量指令値 + + # In abnormal case + # No 計測項目名 data name unit + # ① AJP圧入荷重[N](最終荷重) -> AJP圧入荷重 最終荷重 N + # ② SPバネ定数[N]] -> SPバネ定数 N + # 切込み終了時荷重(ローラ①実測)[N -> 切込み終了時荷重(ローラ①実測) N + # ③ ターミナル位置寸法 L側 [m m ] -> ターミナル位置寸法 L側 m m + # ④ 調整荷重(調整終了点)(ローラ②実測)[N] -> 調整荷重(調整終了点)(ローラ②実測) N + # ④ AJP圧入荷重[N][mm](最終荷重) -> AJP圧入荷重 最終荷重 N + + regex_dict = { + 'pattern_1': r'^([^\[\]]*)[\[\s]*([^\[\]]*)[\]\s]*([^\[\]]*)$', + 'pattern_2': r'^([^\[\]]+)[\[\s]*([^\[\]]*)[\]\s]*[\[\s]*([^\[\]]*)[\]\s]*([^\[\]]*)$', + } + + @classmethod + def extract_data(cls, data: str): + column_name = data + unit = EMPTY_STRING + suffix_data_name = EMPTY_STRING + if cls.is_not_data(data): + return data, unit + + data = re.sub(r'\\+[nrt]|\(?±\)?', EMPTY_STRING, data) + is_match = False + for pattern_no, regex in cls.regex_dict.items(): + match = re.search(regex, data) + if not match or is_match: + continue + is_match = True + if pattern_no == 'pattern_1': + column_name, unit, suffix_data_name = match.groups() + else: + column_name, unit, _, suffix_data_name = match.groups() + + if not is_match: + return data, unit + + column_name = column_name.strip() + if len(suffix_data_name) != 0: + column_name += f'{HALF_WIDTH_SPACE}{suffix_data_name}' + + # 4. Replace any bracket. + # 4-1. Replace "\s?[(\[]\s?" to "(" + column_name = re.sub(r'\s?[(\[【「『〖〚〘⦅〔]\s?', '(', column_name) + # 4-2. Replace "\s?[)\]" to ")" + column_name = re.sub(r'\s?[)\]】」』〗〛〙⦆〕]\s?', ')', column_name) + + # handle case 'μm' and 'µm' + unit = unit.replace('μ', 'µ').replace('KPa', 'kPa').replace('cm^3/min', 'cm3/min').strip() + + # 4-3. Remove "()" + column_name = re.sub(r'[()]', HALF_WIDTH_SPACE, column_name) + column_name = re.sub(r'\s+', HALF_WIDTH_SPACE, column_name) + + # 5. Replace `No.`=> `No` by removing `.` + column_name = column_name.replace('No.', 'No') + column_name = column_name.replace(';', ':') # Cover case 管理マスタ値1;指示値 & 管理マスタ値1:指示値 + + # Remove 計測値:|measurement. + measurement_removes = ['計測値:', '加工値:', '加工条件:', '加工条件値:', 'その他:', 'measurement.'] + column_name = re.sub('|'.join(map(re.escape, measurement_removes)), EMPTY_STRING, column_name) + + # 6. Strip + column_name = column_name.strip() + + return column_name, unit diff --git a/ap/api/setting_module/services/polling_frequency.py b/ap/api/setting_module/services/polling_frequency.py index 8ac024c..5b136c7 100644 --- a/ap/api/setting_module/services/polling_frequency.py +++ b/ap/api/setting_module/services/polling_frequency.py @@ -45,7 +45,13 @@ def change_polling_all_interval_jobs(interval_sec=None, run_now=False, is_user_r add_import_job(proc_cfg, interval_sec=interval_sec, run_now=run_now, is_user_request=is_user_request) -def add_import_job(proc_cfg: CfgProcess, interval_sec=None, run_now=None, is_user_request: bool = False): +def add_import_job( + proc_cfg: CfgProcess, + interval_sec=None, + run_now=None, + is_user_request: bool = False, + register_by_file_request_id: str = None, +): if interval_sec is None: interval_sec = CfgConstant.get_value_by_type_first(CfgConstantType.POLLING_FREQUENCY.name, int) @@ -70,6 +76,7 @@ def add_import_job(proc_cfg: CfgProcess, interval_sec=None, run_now=None, is_use '_proc_name': proc_cfg.name, 'proc_id': proc_cfg.id, 'is_user_request': is_user_request, + 'register_by_file_request_id': register_by_file_request_id, } add_job_to_scheduler(job_id, job_name, trigger, import_func, run_now, dic_import_param) diff --git a/ap/api/setting_module/services/process_delete.py b/ap/api/setting_module/services/process_delete.py index 87d000c..bee951d 100644 --- a/ap/api/setting_module/services/process_delete.py +++ b/ap/api/setting_module/services/process_delete.py @@ -69,9 +69,9 @@ def delete_proc_cfg_and_relate_jobs(proc_id): for proc_id in deleting_process_ids: remove_jobs(target_jobs, proc_id) - # TODO: batch delete + CfgProcess.batch_delete(deleting_process_ids) + for proc_id in deleting_process_ids: - CfgProcess.delete(proc_id=proc_id) delete_transaction_db_file(proc_id) return deleting_process_ids diff --git a/ap/api/setting_module/services/show_latest_record.py b/ap/api/setting_module/services/show_latest_record.py index 7566733..45b1dfb 100644 --- a/ap/api/setting_module/services/show_latest_record.py +++ b/ap/api/setting_module/services/show_latest_record.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json import os import re import time @@ -11,6 +12,7 @@ import pandas as pd from flask_babel import get_locale +from ap import check_exist from ap.api.efa.services.etl import detect_file_path_delimiter, preview_data from ap.api.setting_module.services.csv_import import add_column_file_name, convert_csv_timezone, gen_dummy_header from ap.api.setting_module.services.data_import import ( @@ -18,6 +20,14 @@ strip_special_symbol, validate_datetime, ) +from ap.api.setting_module.services.master_data_transform_pattern import ColumnRawNameRule +from ap.api.setting_module.services.software_workshop_etl_services import ( + get_code_name_mapping, + get_transaction_data_stmt, + measurements_table, + quality_measurements_table, + transform_df_for_software_workshop, +) from ap.api.setting_module.services.v2_etl_services import ( get_df_v2_process_single_file, get_preview_processes_v2, @@ -28,10 +38,14 @@ rename_sub_part_no, ) from ap.common.common_utils import ( + add_suffix_for_same_column_name, get_csv_delimiter, + get_files, + get_preview_data_file_folder, get_sorted_files, get_sorted_files_by_size, get_sorted_files_by_size_and_time, + make_dir_from_file_path, remove_non_ascii_chars, ) from ap.common.constants import ( @@ -92,30 +106,34 @@ def get_latest_records( directory=None, limit=5, current_process_id: int | None = None, + is_v2_datasource=False, + filtered_process_name=None, + process_factid=None, + is_convert_datetime=True, ): - is_v2_datasource = False previewed_files = None cols_with_types = [] - filtered_process_name = False delimiter = 'Auto' - skip_head = '' + skip_head = None etl_func = '' n_rows = None is_transpose = None if data_source_id: - data_source = CfgDataSource.query.get(data_source_id) + data_source: CfgDataSource = CfgDataSource.query.get(data_source_id) if not data_source: return None is_v2_datasource = is_v2_data_source(ds_type=data_source.type) is_csv_or_v2 = data_source.type.lower() in [DBType.CSV.name.lower(), DBType.V2.name.lower()] if is_csv_or_v2: csv_detail = data_source.csv_detail - filtered_process_name = csv_detail.process_name or False + if not filtered_process_name: + filtered_process_name = csv_detail.process_name or False directory = csv_detail.directory + file_name = csv_detail.directory if csv_detail.is_file_path else None delimiter = csv_detail.delimiter etl_func = csv_detail.etl_func - skip_head = '' if (csv_detail.skip_head == 0 and not csv_detail.dummy_header) else csv_detail.skip_head + skip_head = csv_detail.skip_head n_rows = csv_detail.n_rows is_transpose = csv_detail.is_transpose else: @@ -141,20 +159,27 @@ def get_latest_records( return_df=True, max_records=1000, file_name=file_name, - line_skip=skip_head, + skip_head=skip_head, n_rows=n_rows, is_transpose=is_transpose, show_file_name_column=True, current_process_id=current_process_id, + is_convert_datetime=is_convert_datetime, ) - column_raw_name = dic_preview.get('org_headers') + column_raw_names = dic_preview.get('org_headers') headers = normalize_list(dic_preview.get('header')) headers = [normalize_str(col) for col in headers] data_types = dic_preview.get('dataType') same_values = dic_preview.get('same_values') is_v2_history = dic_preview.get('v2_type') == DBType.V2_HISTORY if headers and data_types: - cols_with_types = gen_cols_with_types(headers, data_types, same_values, is_v2_history, column_raw_name) + cols_with_types = gen_cols_with_types( + headers, + data_types, + same_values, + is_v2_history, + column_raw_names, + ) # sort columns # sorted(csv_detail.csv_columns, key=lambda c: c.order or c.id) @@ -165,11 +190,21 @@ def get_latest_records( df_rows = dic_preview.get('content', None) previewed_files = dic_preview.get('previewed_files') else: - cols, df_rows = get_info_from_db(data_source, table_name) + cols, df_rows, dict_column_name_and_unit = get_info_from_db( + data_source, + table_name, + process_factid=process_factid, + ) data_types = [gen_data_types(df_rows[col]) for col in cols] same_values = check_same_values_in_df(df_rows, cols) if cols and data_types: - cols_with_types = gen_cols_with_types(cols, data_types, same_values) + cols_with_types = gen_cols_with_types( + normalize_list(cols), + data_types, + same_values, + column_raw_name=cols, + dict_column_name_and_unit=dict_column_name_and_unit, + ) # format data df_rows = convert_utc_df(df_rows, cols, data_types, data_source, table_name) @@ -187,6 +222,8 @@ def get_latest_records( 'romaji': DATETIME_DUMMY, 'is_date': True, 'check_same_value': {'is_null': False, 'is_same': False}, + 'is_checked': True, + 'is_show': True, }, ) cols.insert(dummy_datetime_idx, DATETIME_DUMMY) @@ -237,8 +274,19 @@ def get_latest_records( # return [dic_col_type.get(col) for col in cols] +def get_info_from_db(data_source, table_name, process_factid: str | None = None, sql_limit: int = 2000): + if data_source.type == DBType.SOFTWARE_WORKSHOP.name: + return get_info_from_db_software_workshop( + data_source.id, + quality_measurements_table.name, + process_factid, + ) + return get_info_from_db_normal(data_source.id, table_name, sql_limit) + + @lru_cache(maxsize=20) -def get_info_from_db(data_source, table_name, sql_limit: int = 2000): +def get_info_from_db_normal(data_source_id, table_name, sql_limit: int = 2000): + data_source = CfgDataSource.query.get(data_source_id) with DbProxy(data_source) as db_instance: if not db_instance or not table_name: return [], [] @@ -255,7 +303,45 @@ def get_info_from_db(data_source, table_name, sql_limit: int = 2000): cols = normalize_list(cols) df_rows = normalize_big_rows(rows, cols, strip_quote=False) - return cols, df_rows + return cols, df_rows, {} + + +@log_execution_time() +@memoize(duration=300) +def get_info_from_db_software_workshop(data_source_id, table_name, child_equip_id, sql_limit: int = 2000): + data_source = CfgDataSource.query.get(data_source_id) + with DbProxy(data_source) as db_instance: + if not db_instance or not table_name or not child_equip_id: + return [], [] + stmt = get_transaction_data_stmt(child_equip_id, limit=sql_limit, sort_by_time=False) + sql, params = db_instance.gen_sql_and_params(stmt) + cols, rows = db_instance.run_sql(sql, row_is_dict=False, params=params) + + df = pd.DataFrame(rows, columns=cols) + df = df.sort_values(by=quality_measurements_table.c.event_time.name) + + code_col = measurements_table.c.code.name + unit_col = measurements_table.c.unit.name + + df[unit_col] = df[unit_col].str.strip() # Remove space characters in unit value + + dict_code_with_unit = ( + df.dropna(subset=[unit_col]) # do not get unit column with NA + .drop_duplicates(subset=[code_col])[[code_col, unit_col]] # only get unique code column + .set_index(code_col)[unit_col] # convert to dictionary + .to_dict() + ) + dict_code_with_name = get_code_name_mapping(data_source.id, child_equip_id) + dict_code_with_name = add_suffix_for_same_column_name(dict_code_with_name) + dict_column_name_with_unit = {dict_code_with_name[code]: unit for code, unit in dict_code_with_unit.items()} + + df = transform_df_for_software_workshop(df, data_source.id, child_equip_id) + transform_cols = df.columns.to_list() + transform_rows = df.values.tolist() + + df_rows = normalize_big_rows(transform_rows, transform_cols, strip_quote=False) + + return transform_cols, df_rows, dict_column_name_with_unit @log_execution_time() @@ -293,14 +379,13 @@ def save_master_vis_config(proc_id, cfg_jsons): @log_execution_time() def get_csv_data_from_files( sorted_files, - line_skip: int | None, - n_rows: int, + skip_head: int | None, + n_rows: int | None, is_transpose: bool, etl_func, csv_delimiter, max_records=5, ): - skip_head = 0 if not line_skip else int(line_skip) csv_file = sorted_files[0] skip_tail = 0 encoding = None @@ -324,7 +409,7 @@ def get_csv_data_from_files( try: data = read_data( csv_file, - skip_head=line_skip, + skip_head=skip_head, n_rows=n_rows, is_transpose=is_transpose, delimiter=csv_delimiter, @@ -401,14 +486,15 @@ def get_csv_data_from_files( else: raise ValueError('Cannot get headers_name and data_details') - # generate column name if there is not header in file + # check for header and generate column name + # TODO: We should make use of dummy_header variable of data_src if data_src already exists and not check again org_header, header_names, dummy_header, partial_dummy_header, data_details = gen_dummy_header( header_names, data_details, - line_skip, + skip_head, ) - skip_head = skip_head_detected if skip_head_detected else line_skip + skip_head = skip_head_detected if skip_head_detected else skip_head return org_header, header_names, dummy_header, partial_dummy_header, data_details, encoding, skip_tail, skip_head @@ -421,11 +507,12 @@ def preview_csv_data( return_df=False, max_records=5, file_name=None, - line_skip=None, + skip_head=None, n_rows: int | None = None, is_transpose: bool = False, show_file_name_column=False, current_process_id=None, + is_convert_datetime=True, ): csv_delimiter = get_csv_delimiter(csv_delimiter) @@ -442,11 +529,12 @@ def preview_csv_data( same_values = [] if not sorted_files: return { + 'directory': folder_url, 'file_name': csv_file, 'header': header_names, 'content': [] if return_df else data_details, 'dataType': data_types, - 'skip_head': line_skip, + 'skip_head': skip_head, 'n_rows': n_rows, 'is_transpose': is_transpose, 'skip_tail': 0, @@ -467,7 +555,7 @@ def preview_csv_data( skip_head_detected, ) = get_csv_data_from_files( sorted_files, - line_skip=line_skip, + skip_head=skip_head, n_rows=n_rows, is_transpose=is_transpose, etl_func=etl_func, @@ -488,11 +576,18 @@ def preview_csv_data( first_datetime_col_idx = None # convert utc for col, dtype in zip(header_names, data_types): + if DataType(dtype) in [DataType.DATETIME, DataType.DATE, DataType.TIME]: + df_data_details[col] = df_data_details[col].astype(pd.StringDtype()) + if DataType(dtype) is not DataType.DATETIME: continue # Convert UTC time validate_datetime(df_data_details, col, False, False) - convert_csv_timezone(df_data_details, col) + # When show sample data on Process Config, it will show raw data of datetime value. + if is_convert_datetime: + convert_csv_timezone(df_data_details, col) + else: + df_data_details[col] = df_data_details[col].astype(pd.StringDtype()) df_data_details.dropna(subset=[col], inplace=True) # if not first_datetime_col_idx: @@ -514,18 +609,6 @@ def preview_csv_data( data_types, ) - # check to add generated datetime column - if current_process_id is not None: - # we check datetime column and add this datetime column into details - df_data_details, org_headers, header_names, dupl_cols, data_types = add_generated_datetime_column( - current_process_id, - df_data_details, - org_headers, - header_names, - dupl_cols, - data_types, - ) - if DataType.DATETIME.value not in data_types and DATETIME_DUMMY not in df_data_details.columns: dummy_datetime_idx = 0 df_data_details = gen_dummy_datetime(df_data_details) @@ -539,6 +622,18 @@ def preview_csv_data( org_headers = re_order_items_by_datetime_idx(first_datetime_col_idx, org_headers) df_data_details = df_data_details[header_names] + # check to add generated datetime column + if current_process_id is not None: + # we check datetime column and add this datetime column into details + df_data_details, org_headers, header_names, dupl_cols, data_types = add_generated_datetime_column( + current_process_id, + df_data_details, + org_headers, + header_names, + dupl_cols, + data_types, + ) + same_values = check_same_values_in_df(df_data_details, header_names) if not return_df: @@ -561,6 +656,7 @@ def preview_csv_data( has_dupl_cols = True return { + 'directory': folder_url, 'file_name': csv_file, 'header': header_names, 'content': df_data_details, @@ -595,7 +691,7 @@ def preview_v2_data( sorted_files = get_sorted_files_by_size(folder_url) if not file_name else [file_name] encoding = None csv_file = '' - skip_head = 0 + skip_head = None skip_tail = 0 header_names = [] data_types = [] @@ -831,7 +927,14 @@ def gen_v2_history_sub_part_no_column(column_name): @log_execution_time() @memoize(is_save_file=False) -def gen_cols_with_types(cols, data_types, same_values, is_v2_history=False, column_raw_name=[]): +def gen_cols_with_types( + cols, + data_types, + same_values, + is_v2_history=False, + column_raw_name=[], + dict_column_name_and_unit={}, +): ja_locale = False cols_with_types = [] with suppress(Exception): @@ -839,7 +942,14 @@ def gen_cols_with_types(cols, data_types, same_values, is_v2_history=False, colu has_is_get_date_col = False if not column_raw_name: column_raw_name = cols - for col_name, col_raw_name, data_type, same_value in zip(cols, column_raw_name, data_types, same_values): + cols_raw_name_added_suffix, _ = gen_colsname_for_duplicated(column_raw_name.copy()) + for ( + col_name, + col_raw_name, + col_raw_name_added_suffix, + data_type, + same_value, + ) in zip(cols, column_raw_name, cols_raw_name_added_suffix, data_types, same_values): is_date = False if has_is_get_date_col else DataType(data_type) is DataType.DATETIME if is_date: has_is_get_date_col = True @@ -847,26 +957,36 @@ def gen_cols_with_types(cols, data_types, same_values, is_v2_history=False, colu is_big_int = DataType(data_type) is DataType.BIG_INT # add to output if col_name: + mapped_col_name = normalize_str(col_raw_name_added_suffix) + column_name_extracted, unit = ColumnRawNameRule.extract_data(mapped_col_name) if ja_locale: - system_name = to_romaji(col_name) if not is_v2_history else gen_v2_history_sub_part_no_column(col_name) + system_name = ( + to_romaji(column_name_extracted) + if not is_v2_history + else gen_v2_history_sub_part_no_column(column_name_extracted) + ) else: system_name = ( - remove_non_ascii_chars(col_name) + remove_non_ascii_chars(column_name_extracted) if not is_v2_history - else gen_v2_history_sub_part_no_column(col_name) + else gen_v2_history_sub_part_no_column(column_name_extracted) ) + unit = dict_column_name_and_unit.get(col_raw_name, unit) cols_with_types.append( { 'column_name': col_name, 'data_type': DataType(data_type).name if not is_big_int else DataType.TEXT.name, 'name_en': system_name, # this is system_name - 'romaji': to_romaji(col_name), + 'romaji': to_romaji(column_name_extracted), 'is_get_date': is_date, 'check_same_value': same_value, 'is_big_int': is_big_int, - 'name_jp': col_name if ja_locale else '', - 'name_local': col_name if not ja_locale else '', + 'name_jp': column_name_extracted if ja_locale else '', + 'name_local': column_name_extracted if not ja_locale else '', 'column_raw_name': col_raw_name, + 'unit': unit, + 'is_checked': not same_value.get('is_null'), + 'is_show': True, }, ) @@ -900,7 +1020,10 @@ def convert_utc_df(df_rows, cols, data_types, data_source, table_name): @log_execution_time() def transform_df_to_rows(cols, df_rows, limit): df_rows.columns = normalize_list(df_rows.columns) - return [dict(zip(cols, vals)) for vals in df_rows[0:limit][cols].to_records(index=False).tolist()] + normalized_cols = normalize_list(cols) + return [ + dict(zip(normalized_cols, vals)) for vals in df_rows[0:limit][normalized_cols].to_records(index=False).tolist() + ] @log_execution_time() @@ -934,6 +1057,7 @@ def gen_v2_columns_with_types(v2_datasrc): limit=5, return_df=True, process_name=v2_csv_detail.get('process_name'), + file_name=v2_csv_detail.get('directory') if v2_csv_detail.get('is_file_path') else None, ) for i, column in enumerate(v2_data_preview.get('header')): data_type = int(v2_data_preview.get('dataType')[i]) @@ -1044,3 +1168,47 @@ def add_generated_datetime_column( # TODO: add sample data for datetime column df_data_details[column_name] = None return df_data_details, org_headers, header_names, dupl_cols, data_types + + +@log_execution_time() +def save_preview_data_file( + data_source_id: int, + data: dict, + table_name=None, + process_factid=None, +): + sample_data_file = gen_latest_record_result_file_path( + data_source_id, + table_name=table_name, + process_factid=process_factid, + ) + make_dir_from_file_path(sample_data_file) + with open(sample_data_file, 'w') as outfile: + json.dump(data, outfile) + + +def get_preview_data_files(data_source_id, table_name=None, process_factid=None): + folder_path = get_preview_data_file_folder(data_source_id) + if check_exist(folder_path): + _files = get_files(folder_path, extension=['csv', 'tsv', 'json']) + file_name = gen_latest_record_result_file_path( + data_source_id, + table_name=table_name, + process_factid=process_factid, + ) + if file_name in _files: + return file_name + + return None + + +def gen_latest_record_result_file_path(data_source_id, table_name=None, process_factid=None): + sample_data_path = get_preview_data_file_folder(data_source_id) + if table_name: + file_name = f'{data_source_id}_{table_name}.json' + elif process_factid: + file_name = f'{data_source_id}_{process_factid}.json' + else: + file_name = f'{data_source_id}.json' + sample_data_file = os.path.join(sample_data_path, file_name) + return sample_data_file diff --git a/ap/api/setting_module/services/software_workshop_etl_services.py b/ap/api/setting_module/services/software_workshop_etl_services.py new file mode 100644 index 0000000..452a951 --- /dev/null +++ b/ap/api/setting_module/services/software_workshop_etl_services.py @@ -0,0 +1,332 @@ +from __future__ import annotations + +import pandas as pd +import sqlalchemy as sa + +from ap.common.common_utils import add_suffix_for_same_column_name +from ap.common.constants import UNDER_SCORE +from ap.common.logger import log_execution_time +from ap.common.memoize import memoize +from ap.common.pydn.dblib.db_proxy import DbProxy +from ap.setting_module.models import CfgDataSource, CfgProcess + +factory_table = sa.Table( + 'fctries', + sa.MetaData(), + sa.Column('fctry_id', sa.TEXT), + sa.Column('fctry_name', sa.TEXT), +) + +line_groups_table = sa.Table( + 'line_grps', + sa.MetaData(), + sa.Column('fctry_id', sa.TEXT), + sa.Column('line_grp_id', sa.TEXT), +) + +lines_table = sa.Table( + 'lines', + sa.MetaData(), + sa.Column('line_id', sa.TEXT), + sa.Column('line_name', sa.TEXT), + sa.Column('line_grp_id', sa.TEXT), +) + +equips_table = sa.Table( + 'equips', + sa.MetaData(), + sa.Column('equip_id', sa.TEXT), + sa.Column('line_id', sa.TEXT), +) + +child_equips_table = sa.Table( + 'child_equips', + sa.MetaData(), + sa.Column('child_equip_id', sa.TEXT), + sa.Column('child_equip_name', sa.TEXT), + sa.Column('equip_id', sa.TEXT), +) + +child_equip_meas_items_table = sa.Table( + 'child_equip_meas_items', + sa.MetaData(), + sa.Column('child_equip_id', sa.TEXT), + sa.Column('meas_item_code', sa.TEXT), + sa.Column('meas_item_name', sa.TEXT), +) + +quality_measurements_table = sa.Table( + 'quality_measurements', + sa.MetaData(), + sa.Column('quality_measurement_id', sa.BIGINT), + sa.Column('child_equip_id', sa.TEXT), + sa.Column('event_time', sa.TIMESTAMP), + sa.Column('part_no', sa.TEXT), + sa.Column('lot_no', sa.TEXT), + sa.Column('tray_no', sa.TEXT), + sa.Column('serial_no', sa.TEXT), +) + +measurements_table = sa.Table( + 'measurements', + sa.MetaData(), + sa.Column('quality_measurement_id', sa.BIGINT), + sa.Column('code', sa.TEXT), + sa.Column('unit', sa.TEXT), + sa.Column('value', sa.REAL), +) + +string_measurements_table = sa.Table( + 'string_measurements', + sa.MetaData(), + sa.Column('quality_measurement_id', sa.BIGINT), + sa.Column('code', sa.TEXT), + sa.Column('unit', sa.TEXT), + sa.Column('value', sa.TEXT), +) + + +def get_processes_stmt(limit: int | None = None): + join_master = ( + sa.join( + left=child_equips_table, + right=equips_table, + onclause=equips_table.c.equip_id == child_equips_table.c.equip_id, + ) + .join( + right=lines_table, + onclause=lines_table.c.line_id == equips_table.c.line_id, + ) + .join( + right=line_groups_table, + onclause=line_groups_table.c.line_grp_id == lines_table.c.line_grp_id, + ) + .join( + right=factory_table, + onclause=factory_table.c.fctry_id == line_groups_table.c.fctry_id, + ) + ) + + stmt = ( + sa.select( + [ + sa.func.concat( + factory_table.c.fctry_name, + UNDER_SCORE, + lines_table.c.line_name, + UNDER_SCORE, + child_equips_table.c.child_equip_name, + ).label(CfgProcess.process_factname.name), + child_equips_table.c.child_equip_id, + ], + ) + .select_from(join_master) + .order_by(child_equips_table.c.child_equip_id) + .distinct(child_equips_table.c.child_equip_id) + ) + + if limit is not None: + stmt = stmt.limit(limit) + + return stmt + + +def get_code_name_mapping_stmt(process_factid: str): + stmt = ( + sa.select( + [ + child_equip_meas_items_table.c.meas_item_code, + child_equip_meas_items_table.c.meas_item_name, + ], + ) + .where(child_equip_meas_items_table.c.child_equip_id == process_factid) + .order_by(child_equip_meas_items_table.c.meas_item_code) + ) + + return stmt + + +def get_master_data_stmt( + process_factid: str | None = None, + start_date: str | None = None, + end_date: str | None = None, + limit: int | None = 2000, +): + join_master = ( + sa.join( + left=quality_measurements_table, + right=child_equips_table, + onclause=child_equips_table.c.child_equip_id == quality_measurements_table.c.child_equip_id, + ) + .join( + right=equips_table, + onclause=equips_table.c.equip_id == child_equips_table.c.equip_id, + ) + .join( + right=lines_table, + onclause=lines_table.c.line_id == equips_table.c.line_id, + ) + .join( + right=line_groups_table, + onclause=line_groups_table.c.line_grp_id == lines_table.c.line_grp_id, + ) + .join( + right=factory_table, + onclause=factory_table.c.fctry_id == line_groups_table.c.fctry_id, + ) + ) + + conditions = [] + if process_factid is not None: + conditions.append(quality_measurements_table.c.child_equip_id == process_factid) + if start_date is not None: + conditions.append(quality_measurements_table.c.event_time >= start_date) + if end_date is not None: + conditions.append(quality_measurements_table.c.event_time < end_date) + + stmt = sa.select( + [ + quality_measurements_table.c.quality_measurement_id, + quality_measurements_table.c.event_time, + quality_measurements_table.c.part_no, + quality_measurements_table.c.lot_no, + quality_measurements_table.c.tray_no, + quality_measurements_table.c.serial_no, + factory_table.c.fctry_id, + factory_table.c.fctry_name, + lines_table.c.line_id, + lines_table.c.line_name, + child_equips_table.c.child_equip_id, + child_equips_table.c.child_equip_name, + ], + ).select_from(join_master) + + if conditions: + stmt = stmt.where(sa.and_(*conditions)) + + if limit is not None: + stmt = stmt.limit(limit) + + return stmt + + +def get_transaction_data_stmt( + process_factid: str | None = None, + start_date: str | None = None, + end_date: str | None = None, + limit: int | None = None, + sort_by_time: bool = True, +): + cte = get_master_data_stmt(process_factid, start_date, end_date, limit).cte('master_data') + + measurements_stmt = sa.select( + [ + cte, + measurements_table.c.code, + measurements_table.c.unit, + # need to cast data to text in order to union + sa.cast(measurements_table.c.value, sa.sql.sqltypes.TEXT).label(measurements_table.c.value.name), + ], + ).select_from( + sa.join( + left=cte, + right=measurements_table, + onclause=cte.c.quality_measurement_id == measurements_table.c.quality_measurement_id, + ), + ) + + string_measurements_stmt = sa.select( + [ + cte, + string_measurements_table.c.code, + string_measurements_table.c.unit, + string_measurements_table.c.value, + ], + ).select_from( + sa.join( + left=cte, + right=string_measurements_table, + onclause=cte.c.quality_measurement_id == string_measurements_table.c.quality_measurement_id, + ), + ) + + stmt = measurements_stmt.union_all(string_measurements_stmt) + if sort_by_time: + stmt = stmt.order_by(stmt.c.event_time) + + return stmt + + +@memoize(is_save_file=True) +def get_code_name_mapping(data_source_id: int, process_factid: str) -> dict[str, str]: + data_source = CfgDataSource.query.get(data_source_id) + with DbProxy(data_source) as db_instance: + stmt = get_code_name_mapping_stmt(process_factid) + sql, params = db_instance.gen_sql_and_params(stmt) + cols, rows = db_instance.run_sql(sql, row_is_dict=False, params=params) + return dict(rows) + + +def transform_transaction_data_to_horizontal(software_workshop_vertical_df: pd.DataFrame) -> pd.DataFrame: + # all master columns in dataframe + master_columns = [ + factory_table.c.fctry_id.name, + factory_table.c.fctry_name.name, + lines_table.c.line_id.name, + lines_table.c.line_name.name, + child_equips_table.c.child_equip_id.name, + child_equips_table.c.child_equip_name.name, + ] + + # columns for getting unique records + index_columns = [ + child_equips_table.c.child_equip_id.name, + quality_measurements_table.c.event_time.name, + quality_measurements_table.c.serial_no.name, + quality_measurements_table.c.part_no.name, + ] + + # horizontal columns in vertical dataframe + horizontal_columns = [ + quality_measurements_table.c.lot_no.name, + quality_measurements_table.c.tray_no.name, + ] + + # all required columns from those columns above. We use this hack to preserve order + required_columns = list(dict.fromkeys([*master_columns, *index_columns, *horizontal_columns])) + + # columns used for pivoting + pivot_column = measurements_table.c.code.name + pivot_value = measurements_table.c.value.name + + df_pivot = ( + # only select required columns, ignore unneeded ones + software_workshop_vertical_df[[*index_columns, pivot_column, pivot_value]] + # drop duplicated columns, to make sure pivot can work properly + .drop_duplicates(subset=[*index_columns, pivot_column], keep='last') + .pivot(index=index_columns, columns=pivot_column, values=pivot_value) + .reset_index() + ) + + # merge to get master data + df_with_master = software_workshop_vertical_df[required_columns].drop_duplicates(subset=index_columns, keep='last') + df_horizontal = df_pivot.merge(right=df_with_master, on=index_columns) + + # sort vertical columns for better output, we don't want our data being shown as col_03 col_01 col_02 + sorted_vertical_columns = sorted(c for c in df_horizontal.columns if c not in required_columns) + df_horizontal = df_horizontal[[*required_columns, *sorted_vertical_columns]] + + return df_horizontal + + +@log_execution_time() +def transform_df_for_software_workshop(df: pd.DataFrame, data_source_id: int, process_factid: str) -> pd.DataFrame: + df = transform_transaction_data_to_horizontal(df) + code_name_mapping = get_code_name_mapping(data_source_id, process_factid) + code_name_mapping = add_suffix_for_same_column_name(code_name_mapping) + for code in code_name_mapping: + if code not in df: + df[code] = None + + df = df.rename(columns=code_name_mapping) + return df diff --git a/ap/api/setting_module/services/v2_etl_services.py b/ap/api/setting_module/services/v2_etl_services.py index 22005b2..cea2cf0 100644 --- a/ap/api/setting_module/services/v2_etl_services.py +++ b/ap/api/setting_module/services/v2_etl_services.py @@ -8,6 +8,7 @@ import pandas as pd from pandas import DataFrame, Series from pandas.errors import ParserError +from sqlalchemy.orm import scoped_session from ap.common.common_utils import open_with_zip from ap.common.constants import ( @@ -43,6 +44,7 @@ CfgProcessUnusedColumn, crud_config, make_session, + use_meta_session, ) from ap.setting_module.schemas import ProcessColumnSchema @@ -102,17 +104,19 @@ def add_remaining_v2_columns(df, process_id): @log_execution_time() -def get_datasource_type(process_id): - proc_cfg: CfgProcess = CfgProcess.query.get(process_id) - data_src: CfgDataSourceCSV = CfgDataSourceCSV.query.get(proc_cfg.data_source_id) +def get_datasource_type(process_id, meta_session: scoped_session = None): + proc_cfg: CfgProcess = (meta_session.query(CfgProcess) if meta_session else CfgProcess.query).get(process_id) + data_src: CfgDataSourceCSV = (meta_session.query(CfgDataSourceCSV) if meta_session else CfgDataSourceCSV.query).get( + proc_cfg.data_source_id, + ) if data_src: return data_src.cfg_data_source.type return None @log_execution_time() -def is_v2_data_source(ds_type=None, process_id=None): - ds_type = ds_type or get_datasource_type(process_id) +def is_v2_data_source(ds_type=None, process_id=None, meta_session: scoped_session = None): + ds_type = ds_type or get_datasource_type(process_id, meta_session=meta_session) if ds_type: return ds_type.lower() == DBType.V2.name.lower() return False @@ -369,23 +373,24 @@ def usecols_with_normalization(x): @log_execution_time() -def save_unused_columns(process_id, unused_columns): - is_v2 = is_v2_data_source(process_id=process_id) +@use_meta_session() +def save_unused_columns(process_id, unused_columns, meta_session: scoped_session = None): + is_v2 = is_v2_data_source(process_id=process_id, meta_session=meta_session) if not is_v2: return if unused_columns: unused_columns = [CfgProcessUnusedColumn(process_id=process_id, column_name=name) for name in unused_columns] - with make_session() as meta_session: - crud_config( - meta_session=meta_session, - data=unused_columns, - parent_key_names=CfgProcessUnusedColumn.process_id.key, - key_names=CfgProcessUnusedColumn.column_name.key, - model=CfgProcessUnusedColumn, - ) + crud_config( + meta_session=meta_session, + data=unused_columns, + parent_key_names=CfgProcessUnusedColumn.process_id.key, + key_names=CfgProcessUnusedColumn.column_name.key, + model=CfgProcessUnusedColumn, + autocommit=False, + ) else: - CfgProcessUnusedColumn.delete_all_columns_by_proc_id(process_id) + CfgProcessUnusedColumn.delete_all_columns_by_proc_id(process_id, meta_session=meta_session) @log_execution_time() diff --git a/ap/api/table_viewer/controllers.py b/ap/api/table_viewer/controllers.py index 29403c8..e45a351 100644 --- a/ap/api/table_viewer/controllers.py +++ b/ap/api/table_viewer/controllers.py @@ -9,6 +9,7 @@ from ap.common.constants import DBType from ap.common.pydn.dblib import mssqlserver, oracle from ap.common.pydn.dblib.db_proxy import DbProxy +from ap.common.services.csv_header_wrapr import add_suffix_if_duplicated from ap.common.services.http_content import json_dumps from ap.common.services.jp_to_romaji_utils import to_romaji from ap.common.services.sse import MessageAnnouncer @@ -110,20 +111,20 @@ def query_data(db_instance, table_name, sort_column, sort_order, limit): @MessageAnnouncer.notify_progress(50) def get_csv_data(csv_detail, sort_colum, sort_order, limit): - latest_file = get_latest_files(csv_detail.directory) + latest_file = [csv_detail.directory] if csv_detail.is_file_path else get_latest_files(csv_detail.directory) latest_file = latest_file[0:1][0] csv_delimiter = get_csv_delimiter(csv_detail.delimiter) - line_skip = '' if (csv_detail.skip_head == 0 and not csv_detail.dummy_header) else csv_detail.skip_head - + skip_head = csv_detail.skip_head # delimiter check _, encoding = detect_file_path_delimiter( latest_file, csv_delimiter, with_encoding=True, ) + # TODO: Should we use preview_csv_data for this instead? org_header, header_names, _, _, data_details, encoding, skip_tail, _ = get_csv_data_from_files( [latest_file], - line_skip=line_skip, + skip_head=skip_head, n_rows=csv_detail.n_rows, is_transpose=csv_detail.is_transpose, etl_func=csv_detail.etl_func, @@ -131,7 +132,9 @@ def get_csv_data(csv_detail, sort_colum, sort_order, limit): max_records=None, ) - df_data = pd.DataFrame(columns=org_header, data=data_details) + # display header names, add suffixes to duplicate header names (including dummy header case) + header_names, _ = add_suffix_if_duplicated(header_names) + df_data = pd.DataFrame(columns=header_names, data=data_details) if sort_colum: dict_column_name = dict(zip(org_header, header_names)) diff --git a/ap/api/trace_data/controllers.py b/ap/api/trace_data/controllers.py index 9bb9386..2669397 100644 --- a/ap/api/trace_data/controllers.py +++ b/ap/api/trace_data/controllers.py @@ -7,7 +7,9 @@ from ap.api.common.services.show_graph_database import get_config_data from ap.api.common.services.show_graph_jump_function import get_jump_emd_data from ap.api.trace_data.services.data_count import get_data_count_by_time_range -from ap.api.trace_data.services.time_series_chart import gen_graph_fpp +from ap.api.trace_data.services.time_series_chart import ( + gen_graph_fpp, +) from ap.common.constants import CfgConstantType, DataCountType, MaxGraphNumber from ap.common.logger import log_execution_time from ap.common.services.form_env import ( @@ -148,10 +150,14 @@ def get_data_count(): from_date = request_data.get('from') or None to_date = request_data.get('to') or None local_tz = request_data.get('timezone') or None + count_in_file = request_data.get('count_in_file', False) + data_count = {} min_val = 0 max_val = 0 if process_id: + start_date, end_date = None, None + if from_date and to_date: start_date = get_date_from_type(from_date, query_type, local_tz) end_date = get_date_from_type(to_date, query_type, local_tz, True) @@ -162,6 +168,7 @@ def get_data_count(): end_date, query_type, local_tz, + count_in_file=count_in_file, ) out_dict = { 'from': from_date, diff --git a/ap/api/trace_data/services/csv_export.py b/ap/api/trace_data/services/csv_export.py index 50e4f45..2e0f839 100644 --- a/ap/api/trace_data/services/csv_export.py +++ b/ap/api/trace_data/services/csv_export.py @@ -14,7 +14,7 @@ gen_graph_param, produce_cyclic_terms, ) -from ap.api.common.services.show_graph_services import get_data_from_db +from ap.api.common.services.show_graph_services import get_data_from_db, judge_data_conversion from ap.common.common_utils import DATE_FORMAT_STR, DATE_FORMAT_STR_CSV, gen_sql_label from ap.common.constants import ( CLIENT_TIMEZONE, @@ -137,6 +137,8 @@ def gen_df_export(graph_param, dic_param): # get data from database df, *_ = get_data_from_db(graph_param, dic_cat_filters) + # export original value of judge variable + df = judge_data_conversion(df, graph_param, revert=True) return df @@ -191,6 +193,31 @@ def to_csv( terms=None, emd_type=None, div_col=None, +): + df_csv = export_preprocessing( + df, + graph_param, + client_timezone=client_timezone, + output_col_ids=output_col_ids, + len_of_col_name=len_of_col_name, + terms=terms, + emd_type=emd_type, + div_col=div_col, + ) + + delimiter = delimiter or ',' + return df_csv.to_csv(output_path, sep=delimiter, index=False) + + +def export_preprocessing( + df: DataFrame, + graph_param: DicParam, + client_timezone=None, + output_col_ids=None, + len_of_col_name=None, + terms=None, + emd_type=None, + div_col=None, ): # rename new_headers = [] @@ -230,11 +257,11 @@ def to_csv( output_cols = df.columns.to_list() if div_col: output_cols = [div_col] + df.columns.to_list() - df_csv = df[output_cols] + df_output = df[output_cols] else: - df_csv = df[dic_rename] - df_csv.rename(columns=dic_rename, inplace=True) - df_csv.replace({np.nan: None}, inplace=True) + df_output = df[dic_rename] + df_output.rename(columns=dic_rename, inplace=True) + df_output.replace({np.nan: None}, inplace=True) # timezone if client_timezone: @@ -255,22 +282,19 @@ def to_csv( if start_proc_term_from: # add term datetime to df - gen_term_cols(df_csv, start_ct_col, start_proc_term_from, start_proc_term_to, terms) + gen_term_cols(df_output, start_ct_col, start_proc_term_from, start_proc_term_to, terms) # extend datetime columns get_dates.extend([start_proc_term_from, start_proc_term_to]) - for col in df_csv.columns: + for col in df_output.columns: if col not in get_dates: continue - # df_csv[col] = df_csv[col].apply(lambda v: convert_dt_str_to_timezone(client_timezone, v)) - df_csv[col] = ( - pd.to_datetime(df_csv[col], format=DATE_FORMAT_STR, utc=True) + df_output[col] = ( + pd.to_datetime(df_output[col], format=DATE_FORMAT_STR, utc=True) .dt.tz_convert(client_timezone) .dt.strftime(DATE_FORMAT_STR_CSV) ) - - delimiter = delimiter or ',' - return df_csv.to_csv(output_path, sep=delimiter, index=False) + return df_output def find_term(value, terms, is_from): diff --git a/ap/api/trace_data/services/data_count.py b/ap/api/trace_data/services/data_count.py index 20099ad..1c995d6 100644 --- a/ap/api/trace_data/services/data_count.py +++ b/ap/api/trace_data/services/data_count.py @@ -66,14 +66,14 @@ def gen_full_data_by_time(df, start_date, end_date, query_type): @log_execution_time() @memoize(cache_type=CacheType.TRANSACTION_DATA) -def get_data_count_by_time_range(proc_id, start_date, end_date, query_type, local_tz): +def get_data_count_by_time_range(proc_id, start_date, end_date, query_type, local_tz, count_in_file: bool): data = None min_val = None max_val = None with DbProxy(gen_data_source_of_universal_db(proc_id), True) as db_instance: trans_data = TransactionData(proc_id) - _, data_count = trans_data.select_data_count(db_instance, start_date, end_date) + _, data_count = trans_data.select_data_count(db_instance, start_date, end_date, count_in_file) # data_count = ProcDataCount.get_by_proc_id(proc_id, start_date, end_date) # data_count = [[r.datetime, r.count] for r in data_count] diff --git a/ap/common/assets/assets.json b/ap/common/assets/assets.json index 751d622..599c209 100644 --- a/ap/common/assets/assets.json +++ b/ap/common/assets/assets.json @@ -1,251 +1,226 @@ -{ - "all": { - "js": [ - "common/js/libs/jquery.js", - "common/js/libs/jquery-ui.min.js", - "common/js/libs/datepicker.js", - "modules/popper/umd/popper.min.js", - "common/js/libs/bootstrap.min.js", - "common/js/libs/all.min.js", - "common/js/libs/jquery.ui.datepicker-ja.min.js", - "common/js/libs/moment-with-locales.js", - "common/js/libs/loadingoverlay.min.js", - "common/js/libs/lodash.min.js", - "common/js/libs/clipboard.min.js", - "common/js/config_data_interface.js", - "common/js/libs/bootstrap-table.min.js", - "common/js/libs/bootstrap-table-filter-control.min.js", - "common/js/libs/bootstrap-table-locale-all.min.js", - "common/js/libs/html2canvas.min.js", - "common/js/modern_screenshot.js", - "common/js/take_screenshot.js", - "common/js/libs/plotly.min.js", - "common/js/libs/toastr.min.js", - "modules/js-datatables/lib/jquery.dataTables.min.js", - "common/js/libs/dataTables.fixedHeader.min.js", - "common/js/libs/select2.min.js", - "common/js/terms_of_use.js", - "common/js/divide_by_calendar.js", - "common/js/jump_function.js", - "common/js/base.js", - "common/js/utils.js", - "common/js/app_version_handler.js", - "common/js/summary_table.js", - "common/js/auto-update-common.js", - "common/js/data-finder.js", - "common/js/column_ordering.js", - "common/js/components.js", - "common/js/data_point_info_table.js", - "common/js/save_load_user_input.js", - "common/js/validation.js", - "common/js/clipboard_utils.js", - "common/js/libs/dragndrop.js", - "common/js/libs/d3-format.js", - "common/js/dn-custom-select.js", - "common/js/libs/popper.min.js", - "common/js/libs/shepherd.min.js", - "common/js/ap_tour.js", - "common/js/graph_nav.js", - "modules/jquery-ui-timepicker-addon/jquery-ui-timepicker-addon.min.js", - "modules/jquery-ui-timepicker-addon/i18n/jquery-ui-timepicker-ja.js", - "modules/date-range-picker/daterangepicker.js", - "modules/date-range-picker/daterangepicker-utils.js", - "common/js/libs/js.cookie.min.js", - "common/js/libs/pagination.min.js", - "common/js/datetime_label_format.js" - ], - "css": [ - "common/custom-jquery/jquery-ui.css", - "common/css/select2.min.css", - "common/css/bootstrap.min.css", - "common/css/all.min.css", - "common/css/main.css", - "common/css/components.css", - "common/css/dragndrop.css", - "common/css/bootstrap-table.min.css", - "common/css/toastr.css", - "common/css/data-finder.css", - "modules/js-datatables/lib/jquery.dataTables.min.css", - "common/css/user-setting-table.css", - "common/css/shepherd.css", - "modules/jquery-ui-timepicker-addon/jquery-ui-timepicker-addon.css", - "modules/date-range-picker/daterangepicker.css", - "common/css/pagination.css", - "common/css/graph_nav.css", - "common/css/jump_function.css", - "tile_interface/css/tile_interface.css" - ] - }, - "fpp": { - "js": [ - "common/js/libs/Chart.min.js", - "common/js/libs/chartjs-adapter-moment.min.js", - "common/js/libs/chartjs-plugin-annotation-latest.min.js", - "trace_data/js/trace_data_time_series.js", - "trace_data/js/trace_data_histogram.js", - "trace_data/js/trace_data_step_bar_chart.js", - "trace_data/js/trace_data_summary_table.js", - "trace_data/js/trace_data_cross_hair.js", - "trace_data/js/trace_data_categorical_table.js", - "trace_data/js/trace_data_scatter_plot.js", - "trace_data/js/trace_data_whisker_plot.js", - "common/js/cat_facet_label_filter_modal.js", - "trace_data/js/trace_data.js", - "trace_data/js/trace_data_histogram_with_kde.js" - ], - "css": [ - "trace_data/css/trace_data.css" - ] - }, - "stp": { - "js": [ - "categorical_plot/js/categorical_histogram_with_density_curve.js", - "categorical_plot/js/categorical_plot_utils.js", - "categorical_plot/js/categorical_plot.js", - "common/js/cat_facet_label_filter_modal.js" - ], - "css": [ - "categorical_plot/css/categorical_plot.css" - ] - }, - "rlp": { - "js": [ - "ridgeline_plot/js/rlp_template.js", - "ridgeline_plot/js/ridgeline_plot_utils.js", - "ridgeline_plot/js/ridgeline_plot.js", - "common/js/cat_facet_label_filter_modal.js" - ], - "css": [ - "ridgeline_plot/css/ridgeline.css" - ] - }, - "chm": { - "js": [ - "calendar_heatmap/js/calendar_heatmap.js", - "calendar_heatmap/js/calendar_heatmap_plotly.js", - "common/js/cat_facet_label_filter_modal.js" - ], - "css": [ - "calendar_heatmap/css/calendar_heatmap.css" - ] - }, - "msp": { - "js": [ - "multiple_scatter_plot/js/multiple_scatter_histogram.js", - "multiple_scatter_plot/js/multiple_scatter_contour.js", - "multiple_scatter_plot/js/multiple_scatter_plot.js", - "multiple_scatter_plot/js/heatmap_plot.js", - "common/js/cat_facet_label_filter_modal.js" - ], - "css": [ - "multiple_scatter_plot/css/multiple_scatter_plot.css" - ] - }, - "scp": { - "js": [ - "common/js/libs/Chart.bundle.min.js", - "scatter_plot/js/scatter_chart.js", - "scatter_plot/js/scatter_plot.js", - "scatter_plot/js/scp_heatmap_plot.js", - "scatter_plot/js/scp_violin_plot.js", - "common/js/cat_facet_label_filter_modal.js" - ], - "css": [ - "scatter_plot/css/scatter_plot.css" - ] - }, - "hmp": { - "js": [ - "common/js/libs/Chart.bundle.min.js", - "heatmap/js/heatmap_main.js", - "heatmap/js/heatmap_plot.js", - "common/js/cat_facet_label_filter_modal.js" - ], - "css": [ - "heatmap/css/heatmap.css" - ] - }, - "pcp": { - "js": [ - "common/js/libs/Chart.bundle.min.js", - "common/js/libs/fSelect.js", - "parallel_plot/js/parallel_properties.js", - "parallel_plot/js/parallel_utils.js", - "parallel_plot/js/parallel_plot.js", - "common/js/cat_facet_label_filter_modal.js" - ], - "css": [ - "common/css/fSelect.css", - "parallel_plot/css/parallel_plot.css" - ] - }, - "skd": { - "js": [ - "sankey_plot/js/sankey_plot.js", - "sankey_plot/js/sankey_scp.js", - "common/js/cat_facet_label_filter_modal.js" - ], - "css": [ - "sankey_plot/css/sankey_plot.css" - ] - }, - "cog": { - "js": [ - "modules/sigmajs/build/sigma.min.js", - "modules/sigmajs/build/plugins/sigma.renderers.edgeLabels.min.js", - "modules/sigmajs/build/plugins/sigma.plugins.dragNodes.min.js", - "modules/sigmajs/build/plugins/sigma.layout.forceAtlas2.min.js", - "modules/sigmajs/build/plugins/sigma.plugins.animate.min.js", - "modules/sigmajs/build/plugins/sigma.layout.noverlap.min.js", - "co_occurrence/js/pareto_plot.js", - "co_occurrence/js/co_occurrence_csv.js" - ], - "css": [ - "co_occurrence/css/co_occurrence_csv.css" - ] - }, - "pca": { - "js": [ - "common/js/libs/jquery.dataTables.min.js", - "common/js/libs/dataTables.bootstrap4.min.js", - "common/js/libs/dom-text.js", - "analyze/js/pca_toastr.js", - "analyze/js/generateJson.js", - "analyze/js/hotelling_common.js", - "analyze/js/hotelling_timeseries.js", - "analyze/js/hotelling_scatters.js", - "analyze/js/hotelling_biplot.js", - "analyze/js/hotelling_q_contribution.js", - "analyze/js/hotelling_t2_contribution.js", - "analyze/js/pca.js", - "common/js/cat_facet_label_filter_modal.js" - ], - "css": [ - "modules/js-datatables/lib/jquery.dataTables.min.css", - "analyze/css/anomaly_detection.css" - ] - }, - "agp": { - "js": [ - "aggregate_plot/js/aggregation_chart.js", - "aggregate_plot/js/aggregate_plot.js", - "common/js/cat_facet_label_filter_modal.js" - ], - "css": [ - "aggregate_plot/css/aggregate_plot.css" - ] - }, - "gl": { - "js": [ - "analyze/js/graphical_lasso.js", - "common/js/libs/sigma.min.js", - "common/js/libs/sigma.plugins.dragNodes.min.js", - "common/js/libs/sigma.renderers.edgeLabels.min.js", - "analyze/js/graphical_lasso_sigma.js", - "common/js/cat_facet_label_filter_modal.js" - ], - "css": [ - "analyze/css/graphical_lasso.css" - ] - } -} +{ + "all": { + "js": [ + "common/js/libs/jquery.js", + "common/js/libs/jquery-ui.min.js", + "common/js/libs/datepicker.js", + "modules/popper/umd/popper.min.js", + "common/js/libs/bootstrap.min.js", + "common/js/libs/all.min.js", + "common/js/libs/jquery.ui.datepicker-ja.min.js", + "common/js/libs/moment-with-locales.js", + "common/js/libs/loadingoverlay.min.js", + "common/js/libs/lodash.min.js", + "common/js/libs/clipboard.min.js", + "common/js/config_data_interface.js", + "common/js/libs/bootstrap-table.min.js", + "common/js/libs/bootstrap-table-filter-control.min.js", + "common/js/libs/bootstrap-table-locale-all.min.js", + "common/js/libs/html2canvas.min.js", + "common/js/modern_screenshot.js", + "common/js/take_screenshot.js", + "common/js/libs/plotly.min.js", + "common/js/libs/toastr.min.js", + "modules/js-datatables/lib/jquery.dataTables.min.js", + "common/js/libs/dataTables.fixedHeader.min.js", + "common/js/libs/select2.min.js", + "common/js/terms_of_use.js", + "common/js/divide_by_calendar.js", + "common/js/jump_function.js", + "common/js/base.js", + "common/js/utils.js", + "common/js/app_version_handler.js", + "common/js/summary_table.js", + "common/js/auto-update-common.js", + "common/js/data-finder.js", + "common/js/column_ordering.js", + "common/js/components.js", + "common/js/data_point_info_table.js", + "common/js/save_load_user_input.js", + "common/js/validation.js", + "common/js/clipboard_utils.js", + "common/js/libs/dragndrop.js", + "common/js/libs/d3-format.js", + "common/js/dn-custom-select.js", + "common/js/libs/popper.min.js", + "common/js/libs/shepherd.min.js", + "common/js/ap_tour.js", + "common/js/graph_nav.js", + "modules/jquery-ui-timepicker-addon/jquery-ui-timepicker-addon.min.js", + "modules/jquery-ui-timepicker-addon/i18n/jquery-ui-timepicker-ja.js", + "modules/date-range-picker/daterangepicker.js", + "modules/date-range-picker/daterangepicker-utils.js", + "common/js/libs/js.cookie.min.js", + "common/js/libs/pagination.min.js", + "common/js/datetime_label_format.js" + ], + "css": [ + "common/custom-jquery/jquery-ui.css", + "common/css/select2.min.css", + "common/css/bootstrap.min.css", + "common/css/all.min.css", + "common/css/main.css", + "common/css/components.css", + "common/css/dragndrop.css", + "common/css/bootstrap-table.min.css", + "common/css/toastr.css", + "common/css/data-finder.css", + "modules/js-datatables/lib/jquery.dataTables.min.css", + "common/css/user-setting-table.css", + "common/css/shepherd.css", + "modules/jquery-ui-timepicker-addon/jquery-ui-timepicker-addon.css", + "modules/date-range-picker/daterangepicker.css", + "common/css/pagination.css", + "common/css/graph_nav.css", + "common/css/jump_function.css", + "tile_interface/css/tile_interface.css" + ] + }, + "fpp": { + "js": [ + "common/js/libs/Chart.min.js", + "common/js/libs/chartjs-adapter-moment.min.js", + "common/js/libs/chartjs-plugin-annotation-latest.min.js", + "trace_data/js/trace_data_time_series.js", + "trace_data/js/trace_data_histogram.js", + "trace_data/js/trace_data_step_bar_chart.js", + "trace_data/js/trace_data_summary_table.js", + "trace_data/js/trace_data_cross_hair.js", + "trace_data/js/trace_data_categorical_table.js", + "trace_data/js/trace_data_scatter_plot.js", + "trace_data/js/trace_data_whisker_plot.js", + "common/js/cat_facet_label_filter_modal.js", + "trace_data/js/trace_data.js", + "trace_data/js/trace_data_histogram_with_kde.js" + ], + "css": ["trace_data/css/trace_data.css"] + }, + "stp": { + "js": [ + "categorical_plot/js/categorical_histogram_with_density_curve.js", + "categorical_plot/js/categorical_plot_utils.js", + "categorical_plot/js/categorical_plot.js", + "common/js/cat_facet_label_filter_modal.js" + ], + "css": ["categorical_plot/css/categorical_plot.css"] + }, + "rlp": { + "js": [ + "ridgeline_plot/js/rlp_template.js", + "ridgeline_plot/js/ridgeline_plot_utils.js", + "ridgeline_plot/js/ridgeline_plot.js", + "common/js/cat_facet_label_filter_modal.js" + ], + "css": ["ridgeline_plot/css/ridgeline.css"] + }, + "chm": { + "js": [ + "calendar_heatmap/js/calendar_heatmap.js", + "calendar_heatmap/js/calendar_heatmap_plotly.js", + "common/js/cat_facet_label_filter_modal.js" + ], + "css": ["calendar_heatmap/css/calendar_heatmap.css"] + }, + "msp": { + "js": [ + "multiple_scatter_plot/js/multiple_scatter_histogram.js", + "multiple_scatter_plot/js/multiple_scatter_contour.js", + "multiple_scatter_plot/js/multiple_scatter_plot.js", + "multiple_scatter_plot/js/heatmap_plot.js", + "common/js/cat_facet_label_filter_modal.js" + ], + "css": ["multiple_scatter_plot/css/multiple_scatter_plot.css"] + }, + "scp": { + "js": [ + "common/js/libs/Chart.bundle.min.js", + "scatter_plot/js/scatter_chart.js", + "scatter_plot/js/scatter_plot.js", + "scatter_plot/js/scp_heatmap_plot.js", + "scatter_plot/js/scp_violin_plot.js", + "common/js/cat_facet_label_filter_modal.js" + ], + "css": ["scatter_plot/css/scatter_plot.css"] + }, + "hmp": { + "js": [ + "common/js/libs/Chart.bundle.min.js", + "heatmap/js/heatmap_main.js", + "heatmap/js/heatmap_plot.js", + "common/js/cat_facet_label_filter_modal.js" + ], + "css": ["heatmap/css/heatmap.css"] + }, + "pcp": { + "js": [ + "common/js/libs/Chart.bundle.min.js", + "common/js/libs/fSelect.js", + "parallel_plot/js/parallel_properties.js", + "parallel_plot/js/parallel_utils.js", + "parallel_plot/js/parallel_plot.js", + "common/js/cat_facet_label_filter_modal.js" + ], + "css": ["common/css/fSelect.css", "parallel_plot/css/parallel_plot.css"] + }, + "skd": { + "js": [ + "sankey_plot/js/sankey_plot.js", + "sankey_plot/js/sankey_scp.js", + "common/js/cat_facet_label_filter_modal.js" + ], + "css": ["sankey_plot/css/sankey_plot.css"] + }, + "cog": { + "js": [ + "modules/sigmajs/build/sigma.min.js", + "modules/sigmajs/build/plugins/sigma.renderers.edgeLabels.min.js", + "modules/sigmajs/build/plugins/sigma.plugins.dragNodes.min.js", + "modules/sigmajs/build/plugins/sigma.layout.forceAtlas2.min.js", + "modules/sigmajs/build/plugins/sigma.plugins.animate.min.js", + "modules/sigmajs/build/plugins/sigma.layout.noverlap.min.js", + "co_occurrence/js/pareto_plot.js", + "co_occurrence/js/co_occurrence_csv.js" + ], + "css": ["co_occurrence/css/co_occurrence_csv.css"] + }, + "pca": { + "js": [ + "common/js/libs/jquery.dataTables.min.js", + "common/js/libs/dataTables.bootstrap4.min.js", + "common/js/libs/dom-text.js", + "analyze/js/pca_toastr.js", + "analyze/js/generateJson.js", + "analyze/js/hotelling_common.js", + "analyze/js/hotelling_timeseries.js", + "analyze/js/hotelling_scatters.js", + "analyze/js/hotelling_biplot.js", + "analyze/js/hotelling_q_contribution.js", + "analyze/js/hotelling_t2_contribution.js", + "analyze/js/pca.js", + "common/js/cat_facet_label_filter_modal.js" + ], + "css": [ + "modules/js-datatables/lib/jquery.dataTables.min.css", + "analyze/css/anomaly_detection.css" + ] + }, + "agp": { + "js": [ + "aggregate_plot/js/aggregation_chart.js", + "aggregate_plot/js/aggregate_plot.js", + "common/js/cat_facet_label_filter_modal.js" + ], + "css": ["aggregate_plot/css/aggregate_plot.css"] + }, + "gl": { + "js": [ + "analyze/js/graphical_lasso.js", + "common/js/libs/sigma.min.js", + "common/js/libs/sigma.plugins.dragNodes.min.js", + "common/js/libs/sigma.renderers.edgeLabels.min.js", + "analyze/js/graphical_lasso_sigma.js", + "common/js/cat_facet_label_filter_modal.js" + ], + "css": ["analyze/css/graphical_lasso.css"] + } +} diff --git a/ap/common/common_utils.py b/ap/common/common_utils.py index 71e86f0..e9210f4 100644 --- a/ap/common/common_utils.py +++ b/ap/common/common_utils.py @@ -11,11 +11,12 @@ import zipfile from collections import OrderedDict from datetime import datetime, timedelta +from enum import Enum from io import IOBase from itertools import permutations from multiprocessing import Manager from pathlib import Path -from typing import IO, List, TextIO, Union +from typing import IO, Any, List, TextIO, Union import chardet import numpy as np @@ -26,7 +27,9 @@ from dateutil.relativedelta import relativedelta from flask import g from flask_assets import Bundle, Environment -from pandas import DataFrame +from pandas import DataFrame, Series +from pandas.io import parquet +from pyarrow import feather from ap.common.constants import ( ANALYSIS_INTERFACE_ENV, @@ -34,9 +37,13 @@ ENCODING_SHIFT_JIS, ENCODING_UTF_8, ENCODING_UTF_8_BOM, + INT64_MAX, + INT64_MIN, LANGUAGES, PROCESS_QUEUE, SAFARI_SUPPORT_VER, + SCP_HMP_X_AXIS, + SCP_HMP_Y_AXIS, SQL_COL_PREFIX, UNIVERSAL_DB_FILE, ZERO_FILL_PATTERN, @@ -45,6 +52,8 @@ AppEnv, CsvDelimiter, CSVExtTypes, + DataType, + FileExtension, FilterFunc, FlaskGKey, ListenNotifyType, @@ -76,6 +85,18 @@ def get_current_timestamp(format_str=DATE_FORMAT_STR): return datetime.utcnow().strftime(format_str) +class PostgresFormatStrings(Enum): + DATE = '%Y-%m-%d' + TIME = '%H:%M:%S' + DATETIME = '%Y-%m-%d %H:%M:%S.%f' + + +class SQLiteFormatStrings(Enum): + DATE = '%Y-%m-%d' + TIME = '%H:%M:%S' + DATETIME = '%Y-%m-%d %H:%M:%S' + + def parse_int_value(value): """ Parse integral value from text or numeric data @@ -341,6 +362,21 @@ def add_years(time=datetime.utcnow(), years=0): return time + relativedelta(years=years) +def add_months(time=datetime.utcnow(), months=0): + """add months + + Keyword Arguments: + time {[type]} -- [description] (default: {datetime.now()}) + months {int} -- [description] (default: {0}) + + Returns: + [type] -- [description] + """ + if isinstance(time, str): + time = parser.parse(time) + return time + relativedelta(months=months) + + @log_execution_time() def get_files(directory, depth_from=1, depth_to=2, extension=[''], file_name_only=False): """get files in folder @@ -560,6 +596,16 @@ def get_data_path(abs=True, is_log=False): return resource_path(folder_name, level=AbsPath.SHOW) if abs else folder_name +def get_instance_path(abs=True): + """get data folder path + + Returns: + [type] -- [description] + """ + folder_name = 'instance' + return resource_path(folder_name, level=AbsPath.SHOW) if abs else folder_name + + def get_error_trace_path(abs=True): """get import folder path @@ -684,6 +730,19 @@ def get_etl_path(*sub_paths): return resource_path(data_folder, folder_name, *sub_paths, level=AbsPath.SHOW) +def get_backup_data_path(): + folder_name = 'backup_data' + data_folder = get_data_path() + return resource_path(data_folder, folder_name, level=AbsPath.SHOW) + + +def get_backup_data_folder(process_id): + folder = get_backup_data_path() + if not check_exist(folder): + os.makedirs(folder) + return os.path.join(folder, str(process_id)) + + # def df_chunks(df, size): # """Yield n-sized chunks from dataframe.""" # if df.columns.size == 0: @@ -717,9 +776,7 @@ def get_base_dir(path, is_file=True): def make_dir(dir_path): - if not os.path.exists(dir_path): - os.makedirs(dir_path) - + os.makedirs(dir_path, exist_ok=True) return True @@ -761,6 +818,43 @@ def set_sqlite_params(conn): cursor.close() +# get x_y info for scp and heatmap page +def get_x_y_info(array_formval, dic_param_common): + scatter_xy_ids = [] + scatter_proc_ids = [] + scatter_xy_names = [] + + x_axis = dic_param_common.get(SCP_HMP_X_AXIS) + y_axis = dic_param_common.get(SCP_HMP_Y_AXIS) + + # if no x_axis or y_axis in payload, no switch XY (may not have this case) + if not x_axis or not y_axis: + for proc in array_formval: + scatter_proc_ids.append(proc.proc_id) + scatter_xy_ids = scatter_xy_ids + proc.col_ids + scatter_xy_names = scatter_xy_names + proc.col_names + else: + x_proc, x_column_id = map(int, x_axis.split('-')) + y_proc, y_column_id = map(int, y_axis.split('-')) + + scatter_proc_ids += [x_proc, y_proc] + scatter_xy_ids += [x_column_id, y_column_id] + + # get x_y name based on proc_id and column_id in dict_array + def get_name(dict_array, find_proc_id, find_column_id): + for proc in dict_array: + if proc.proc_id == find_proc_id: + index = proc.col_ids.index(find_column_id) + return proc.col_names[index] + + x_name = get_name(array_formval, x_proc, x_column_id) + y_name = get_name(array_formval, y_proc, y_column_id) + + scatter_xy_names += [x_name, y_name] + + return scatter_xy_ids, scatter_xy_names, scatter_proc_ids + + def gen_sql_label(*args): return SQL_COL_PREFIX + SQL_COL_PREFIX.join([str(name).strip(SQL_COL_PREFIX) for name in args if name is not None]) @@ -981,6 +1075,56 @@ def write_to_pickle(data, file): return file +def read_feather_file(file): + # df = pd.read_feather(file) + df = feather.read_feather(file) + return df + + +def read_parquet_file(file): + df = parquet.read_parquet(file) + return df + + +def write_feather_file(df: DataFrame, file): + make_dir_from_file_path(file) + # df.reset_index(drop=True, inplace=True) + # df.to_feather(file, compression='lz4') + # Use LZ4 explicitly + if len(file) > 255: + file = f'{file[:250]}.{FileExtension.Feather.value}' + + try: + feather.write_feather(df.reset_index(drop=True), file, compression='lz4') + except Exception as e: + print(str(e)) + for col in df.columns: + if df[col].dtype.name in ('object', 'category'): + df[col] = np.where(pd.isnull(df[col]), None, df[col].astype(str)) + # df[col] = df[col].astype('category') # error in some cases + feather.write_feather(df.reset_index(drop=True), file, compression='lz4') + + return file + + +def write_parquet_file(df: DataFrame, file): + make_dir_from_file_path(file) + if len(file) > 255: + file = f'{file[:250]}.{FileExtension.Feather.value}' + + try: + parquet.to_parquet(df.reset_index(drop=True), file, compression='gzip') + except Exception as e: + print(str(e)) + for col in df.columns: + if df[col].dtype.name in ('object', 'category'): + df[col] = np.where(pd.isnull(df[col]), None, df[col].astype(str)) + # df[col] = df[col].astype('category') # error in some cases + parquet.to_parquet(df.reset_index(drop=True), file, compression='gzip') + + return file + + def get_debug_g_dict(): return g.setdefault(FlaskGKey.DEBUG_SHOW_GRAPH, {}) @@ -1323,3 +1467,95 @@ def get_type_all_columns(db_instance, table_name: str): types = df['type'].tolist() dict_name_type = dict(zip(names, types)) return dict_name_type + + +def get_month_diff(str_min_datetime, str_max_datetime): + min_datetime = parser.parse(str_min_datetime) + max_datetime = parser.parse(str_max_datetime) + diff = relativedelta(max_datetime, min_datetime) + return diff.years * 12 + diff.months + + +def is_boolean(data: Series): + return (data >= 0) & (data <= 1) + + +def is_int_64(data: Series): + return (data >= INT64_MIN) & (data <= INT64_MAX) + + +def convert_numeric_by_type(data: Series, provided_data_type): + s = data[data.notnull()] + if provided_data_type == DataType.BOOLEAN.name: + s = s[((np.mod(s, 1) == 0) & is_boolean(s))] + data = data.where(data.isin(s), np.nan) + elif provided_data_type in DataType.INTEGER.name: + s = s[((np.mod(s, 1) == 0) & is_int_64(s))] + data = data.where(data.isin(s), np.nan) + return data + + +def find_duplicate_values(key_value_dict: dict[Any, str]) -> dict[str, Any]: + """Find duplicate values in dictionary + + Args: + key_value_dict: dictionary with duplicate values + + Returns: + a dictionary with + + - key: duplicate value of input dictionary + + - value: list of keys that contain duplicate values + """ + + # find duplicate values + values = list(key_value_dict.values()) + duplicate_values = [value for value in values if values.count(value) > 1] + + # find keys belongs to duplicate values + duplicate_value_item = {} + for key, value in key_value_dict.items(): + if value in duplicate_values: + duplicate_value_item[value] = (duplicate_value_item.get(value) or []) + [key] + + # sort keys by alphabet + for key in duplicate_value_item.keys(): + duplicate_value_item[key].sort() + + return duplicate_value_item + + +def add_suffix_for_same_column_name(key_value_dict: dict[Any, str]): + """Add suffix for duplicate names in dictionary + + Args: + key_value_dict: dictionary with duplicate names + + Returns: + dictionary with unique names (added suffix for duplicate names) + """ + + output = copy.deepcopy(key_value_dict) + duplicate_value_item = find_duplicate_values(key_value_dict) + for duplicate_value, keys in duplicate_value_item.items(): + suffix_index = 1 + for key in keys[1:]: # skip first element in list + output[key] = f'{output[key]}_{suffix_index:02}' + suffix_index += 1 + + return output + + +def get_preview_data_file_folder(data_source_id): + folder = get_preview_data_path() + if not check_exist(folder): + os.makedirs(folder) + + return os.path.join(folder, str(data_source_id)) + + +def get_preview_data_path(): + folder_name = 'preview_data' + data_folder = get_instance_path() + return resource_path(data_folder, folder_name, level=AbsPath.SHOW) diff --git a/ap/common/constants.py b/ap/common/constants.py index a155641..129591f 100644 --- a/ap/common/constants.py +++ b/ap/common/constants.py @@ -17,10 +17,13 @@ VAR_Y = 'Y' MULTIPLE_VALUES_CONNECTOR = '|' DEFAULT_NONE_VALUE = pd.NA +HALF_WIDTH_SPACE = ' ' +UNIXEPOCH = 'unixepoch' SQL_COL_PREFIX = '__' SQL_IN_MAX = 900 SQL_LIMIT = 5_000_000 +DATABASE_LOGIN_TIMEOUT = 3 ACTUAL_RECORD_NUMBER = 'actual_record_number' ACTUAL_RECORD_NUMBER_TRAIN = 'actual_record_number_train' ACTUAL_RECORD_NUMBER_TEST = 'actual_record_number_test' @@ -78,6 +81,7 @@ DATA_NAME_V2_SUFFIX = '01' CONSTRAINT_RANGE = 'constraint_range' SELECTED = 'selected' +ONLY_EXPORT_DATA_SELECTED = 'only_export_data_selected' class ApLogLevel(Enum): @@ -114,6 +118,7 @@ class DBType(Enum): V2 = 'v2' V2_MULTI = 'v2_multi' V2_HISTORY = 'v2_history' + SOFTWARE_WORKSHOP = 'software_workshop' @classmethod def from_str(cls, s: str) -> Optional['DBType']: @@ -128,6 +133,7 @@ def is_db(self): DBType.SQLITE, DBType.ORACLE, DBType.MYSQL, + DBType.SOFTWARE_WORKSHOP, ] @@ -218,6 +224,10 @@ class ErrorMsg(Enum): EMD_TYPE = 'emdType' DUPLICATE_SERIAL_SHOW = 'duplicated_serial' DUPLICATED_SERIALS_COUNT = 'dup_check' +RECENT_TIME_INTERVAL = 'recentTimeInterval' +DIC_SCP = 'dic_scp' +IS_CLASSIF = 'is_classif' +ACTUAL = 'actual' UNLINKED_IDXS = 'unlinked_idxs' NONE_IDXS = 'none_idxs' @@ -371,6 +381,7 @@ class ErrorMsg(Enum): COL_TYPE = 'type' ORG_ARRAY_Y = 'org_array_y' CAT_ON_DEMAND = 'cat_on_demand' +UNIT = 'unit' # Cat Expansion CAT_EXP_BOX = 'catExpBox' @@ -420,6 +431,11 @@ class ErrorMsg(Enum): DATA_GROUP_TYPE = 'data_group_type' IS_SERIAL_NO = 'is_serial_no' IS_INT_CATEGORY = 'is_int_category' +IS_JUDGE = 'is_judge' + +# X-Y Axis for SCP and HMp +SCP_HMP_X_AXIS = 'SCP_HMP_X_AXIS' +SCP_HMP_Y_AXIS = 'SCP_HMP_Y_AXIS' class HMFunction(Enum): @@ -619,7 +635,7 @@ def __str__(self): UN_AVAILABLE = 'unavailable' ALL_TILES = 'all' TILES = 'tiles' -# UNDER_SCORE = '_' +UNDER_SCORE = '_' TITLE = 'title' HOVER = 'hover' DESCRIPTION = 'description' @@ -704,6 +720,7 @@ class DataImportErrorTypes(Enum): TABLE_NOT_FOUND = 2 EMPTY_DATA_FILE = 3 DB_LOCKED = 4 + DB_CONNECTION_FAILED = 5 UNKNOWN = 100 @@ -715,6 +732,7 @@ class DataImportErrorTypes(Enum): DataImportErrorTypes.TABLE_NOT_FOUND: 'The table could not be found.', DataImportErrorTypes.EMPTY_DATA_FILE: 'The data file is empty.', DataImportErrorTypes.DB_LOCKED: 'The database is locked and data cannot be written.', + DataImportErrorTypes.DB_CONNECTION_FAILED: 'Connection to database failed.', } ErrorMsgFromDB = { @@ -907,6 +925,14 @@ class EMDType(Enum): both = [False, True] +class FileExtension(Enum): + Feather = 'ftr' + Parquet = 'parquet' + Pickle = 'pkl' + Csv = 'csv' + Tsv = 'tsv' + + USE_CONTOUR = 'use_contour' USE_HEATMAP = 'use_heatmap' COL_ID = 'column_id' @@ -1065,7 +1091,6 @@ class DataGroupType(BaseEnum): # LINE_GROUP_NAME = 31 # PART_FULL = 32 # EQUIP_ID = 33 - # HORIZONTAL_DATA = 34 # Type for horizontal columns that are sensor columns # PART_LOG # FORGING_DATE = 35 @@ -1073,6 +1098,9 @@ class DataGroupType(BaseEnum): # PRODUCT_ID = 35 + HORIZONTAL_DATA = 67 + FileName = 97 + @classmethod def v2_pivottable_group(cls) -> list['DataGroupType']: return [ @@ -1402,6 +1430,25 @@ def v2_horizontal_group(cls) -> list['DataGroupType']: ZERO_FILL_PATTERN = r'^{\:(0)([<>]?)([1-9]\d*)d?\}$' # {:010}, {:010d} ZERO_FILL_PATTERN_2 = r'^{\:([1-9])([<>])(\d+)d?\}$' # {:1>10}, {:1>10}, {:1<10d} + +class MasterDBType(BaseEnum): + EFA = auto() + EFA_HISTORY = auto() + V2 = auto() + V2_MULTI = auto() + V2_HISTORY = auto() + OTHERS = auto() + V2_MULTI_HISTORY = auto() + + @classmethod + def is_v2_group(cls, master_type: str): + return master_type in [cls.V2.name, cls.V2_MULTI.name, cls.V2_HISTORY.name, cls.V2_MULTI_HISTORY.name] + + @classmethod + def is_efa_group(cls, master_type: str): + return master_type in [cls.EFA.name, cls.EFA_HISTORY.name] + + OSERR = {22: 'Access denied', 2: 'Folder not found', 20: 'Not a folder'} # Browser support @@ -1476,6 +1523,12 @@ class RawDataTypeDB(BaseEnum): # data type user select # CATEGORY_TEXT = 'T' CATEGORY = 'CATEGORY' # Rainbow treat category as integer + def __repr__(self) -> str: + return self.value + + def __str__(self): + return self.value + @staticmethod def is_integer_data_type(data_type_db_value: str): return data_type_db_value in ( @@ -1669,6 +1722,7 @@ class FunctionCastDataType(BaseEnum): END_DATE_ID = 'endDate' END_TIME_ID = 'endTime' + EXAMPLE_VALUE = 3 @@ -1716,20 +1770,22 @@ def __str__(self): # 1,2 is priority DEL_PROCESS = 1 - CSV_IMPORT = 2 - FACTORY_IMPORT = 3 - GEN_GLOBAL = 4 - # CLEAN_DATA = 5 - FACTORY_PAST_IMPORT = 6 - IDLE_MORNITORING = 7 - SHUTDOWN_APP = 8 - BACKUP_DATABASE = 9 - PROC_LINK_COUNT = 10 - ZIP_LOG = 11 - CLEAN_ZIP = 12 - RESTRUCTURE_INDEXES = 13 - CLEAN_EXPIRED_REQUEST = 14 - PROCESS_COMMUNICATE = 15 + USER_BACKUP_DATABASE = 2 + USER_RESTORE_DATABASE = 3 + CSV_IMPORT = 4 + FACTORY_IMPORT = 5 + GEN_GLOBAL = 6 + # CLEAN_DATA = 7 + FACTORY_PAST_IMPORT = 8 + IDLE_MORNITORING = 9 + SHUTDOWN_APP = 10 + BACKUP_DATABASE = 11 + PROC_LINK_COUNT = 12 + ZIP_LOG = 13 + CLEAN_ZIP = 14 + RESTRUCTURE_INDEXES = 15 + CLEAN_EXPIRED_REQUEST = 16 + PROCESS_COMMUNICATE = 17 SEQUENCE_CACHE = 1000 @@ -1767,7 +1823,8 @@ class DataColumnType(BaseEnum): PART_NAME = 24 PART_NO = 25 ST_NO = 26 - + JUDGE = 76 + BOOLEAN = 77 GENERATED = 99 GENERATED_EQUATION = 100 @@ -1801,6 +1858,7 @@ class ColumnDTypeToSQLiteDType(BaseEnum): EU_INTEGER_SEP = 'integer' K_SEP_NULL = 'null' BIG_INT = 'string' + BOOLEAN = 'boolean' # if nchar(header) > 90%: generate column name @@ -1831,3 +1889,24 @@ class DataRegisterStage(BaseEnum): # Limit range time to check new version LIMIT_CHECKING_NEWER_VERSION_TIME: int = 60 # unit: seconds + +DATE_TYPE_REGEX = ( + '^' + r'(?P\d{2}|\d{4})' + r'(?:[\-\.\s\/\\年]?(?P\d|0\d|1[0-2])[月]?)' + r'(?:[\-\.\s\/\\]?(?P\d|[0-2]\d|3[0-1])[日]?)?' + '$' +) +TIME_TYPE_REGEX = ( + '^' + r'(?P\d|[01]\d|2[0-3])' + r'(?:[:\-\.\s時]?(?P\d|[0-5]\d)[分]?)' + r'(?:[:\-\.\s]?(?P\d|[0-5]\d)[秒]?)?' + '$' +) + + +# Judge definition, show OK/NG in UI instead of raw value +class JudgeDefinition(BaseEnum): + OK = 1 + NG = 0 diff --git a/ap/common/datetime_format_utils.py b/ap/common/datetime_format_utils.py new file mode 100644 index 0000000..2d608bf --- /dev/null +++ b/ap/common/datetime_format_utils.py @@ -0,0 +1,71 @@ +class DateTimeFormatUtils(object): + datetime_format: str + date_format: str + time_format: str + + def __init__(self, dic: dict): + self.datetime_format = dic['datetime'] + self.date_format = dic['date'] + self.time_format = dic['time'] + + HOUR_FORMAT_CODES = ['%H', '%I'] + DATE_FORMAT_CODES = [ + '%a', + '%A', + '%w', + '%d', + '%b', + '%B', + '%m', + '%y', + '%Y', + '%z', + '%Z', + '%j', + '%U', + '%W', + '%c', + '%x', + '%G', + '%u', + '%V', + ] + + @staticmethod + def get_datetime_format(datetime_format_str: str): + format_dict = { + 'datetime': datetime_format_str if datetime_format_str is not None and datetime_format_str != '' else None, + 'date': None, + 'time': None, + } + + if format_dict['datetime'] is None: + return DateTimeFormatUtils(format_dict) + + def get_start_index_by_codes(format_codes): + indexes = [datetime_format_str.index(code) for code in format_codes if code in datetime_format_str] + return min(indexes) if indexes else None + + time_start_index = get_start_index_by_codes(DateTimeFormatUtils.HOUR_FORMAT_CODES) + date_start_index = get_start_index_by_codes(DateTimeFormatUtils.DATE_FORMAT_CODES) + + if time_start_index is not None and date_start_index is not None: + # These are valid indexes if and only if one of them is zero + if time_start_index == 0: + format_dict['time'] = datetime_format_str[:date_start_index].strip() + format_dict['date'] = datetime_format_str[date_start_index:].strip() + elif date_start_index == 0: + format_dict['time'] = datetime_format_str[time_start_index:].strip() + format_dict['date'] = datetime_format_str[:time_start_index].strip() + elif time_start_index is not None and date_start_index is None: + format_dict['time'] = datetime_format_str[time_start_index:] + elif time_start_index is None and date_start_index is not None: + format_dict['date'] = datetime_format_str[date_start_index:] + else: + DateTimeFormatUtils.notify_invalid_format() + + return DateTimeFormatUtils(format_dict) + + @staticmethod + def notify_invalid_format(): + raise Exception('Invalid datetime format!!!') diff --git a/ap/common/memoize.py b/ap/common/memoize.py index 11ed16c..4621cb8 100644 --- a/ap/common/memoize.py +++ b/ap/common/memoize.py @@ -135,7 +135,8 @@ def memoize2(*args, **kwargs): if check_exist(file_name): return read_pickle_file(file_name) else: - return cache[key]['value'] + # Must use deepcopy to avoid reference value will be overwritten later + return deepcopy(cache[key]['value']) result = fn(*args, **kwargs) @@ -149,10 +150,11 @@ def memoize2(*args, **kwargs): cache[key] = {'value': deepcopy(result), 'time': time.time()} # resize - while len(cache) > cache_max_size: - key, dic_val = cache.popitem(last=False) - if dic_val.get('file'): - delete_cache_file(key) + with lock: + while len(cache) > cache_max_size: + key, dic_val = cache.popitem(last=False) + if dic_val.get('file'): + delete_cache_file(key) return result diff --git a/ap/common/pydn/dblib/db_common.py b/ap/common/pydn/dblib/db_common.py index a6c334c..5e7c13c 100644 --- a/ap/common/pydn/dblib/db_common.py +++ b/ap/common/pydn/dblib/db_common.py @@ -17,6 +17,7 @@ class SqlComparisonOperator(Enum): LIKE = 'LIKE' BETWEEN = 'BETWEEN' IN = 'IN' # Ex: cls.Columns.status.name: [(SqlComparisonOperator.IN, tuple(job_statuses))] + NOT_IN = 'NOT IN' # Ex: cls.Columns.status.name: [(SqlComparisonOperator.NOT_IN, tuple(job_statuses))] # class AggregateFunction(Enum): diff --git a/ap/common/pydn/dblib/db_proxy.py b/ap/common/pydn/dblib/db_proxy.py index ee43eea..efc347a 100644 --- a/ap/common/pydn/dblib/db_proxy.py +++ b/ap/common/pydn/dblib/db_proxy.py @@ -48,19 +48,31 @@ def __init__( self.db_basic = CfgDataSource.query.get(data_src) self.db_detail = self.db_basic.db_detail + def check_latest_failed_connection(self): + last_failed_time = DbProxy.dic_last_connect_failed_time.get(self.db_basic.id) + if last_failed_time is not None and last_failed_time > add_seconds(seconds=-180): + raise Exception(MSG_DB_CON_FAILED) + + def add_latest_failed_connection(self): + DbProxy.dic_last_connect_failed_time[self.db_basic.id] = datetime.utcnow() + + def remove_latest_failed_connection(self): + DbProxy.dic_last_connect_failed_time.pop(self.db_basic.id, None) + def __enter__(self): if not self.force_connect: - last_failed_time = DbProxy.dic_last_connect_failed_time.get(self.db_basic.id) - if last_failed_time and last_failed_time > add_seconds(seconds=-180): - raise Exception(MSG_DB_CON_FAILED) + self.check_latest_failed_connection() self.db_instance = self._get_db_instance() conn = self.db_instance.connect() if conn in (None, False): - DbProxy.dic_last_connect_failed_time[self.db_basic.id] = datetime.utcnow() + self.add_latest_failed_connection() raise Exception(MSG_DB_CON_FAILED) + # connect successfully, we need reset failed connection + self.remove_latest_failed_connection() + if self.is_universal_db and self.isolation_level: set_sqlite_params(conn) @@ -102,6 +114,8 @@ def _get_db_instance(self): target_db_class = MySQL elif db_type == DBType.MSSQLSERVER.value.lower(): target_db_class = MSSQLServer + elif db_type == DBType.SOFTWARE_WORKSHOP.value.lower(): + target_db_class = PostgreSQL else: raise Exception(MSG_NOT_SUPPORT_DB) @@ -120,6 +134,12 @@ def _get_db_instance(self): return db_instance + @classmethod + def check_db_connection(cls, data_src, force: bool = False): + with cls(data_src, force_connect=force) as db_instance: + if not db_instance.is_connected: + raise Exception(MSG_DB_CON_FAILED) + def gen_data_source_of_universal_db(proc_id=None): """ diff --git a/ap/common/pydn/dblib/mssqlserver.py b/ap/common/pydn/dblib/mssqlserver.py index c72c217..3a01ab4 100644 --- a/ap/common/pydn/dblib/mssqlserver.py +++ b/ap/common/pydn/dblib/mssqlserver.py @@ -2,20 +2,22 @@ # -*- coding: utf-8 -*- # Author: Masato Yasuda (2018/01/04) +import threading import traceback from dateutil import parser from pymssql import connect as mssqlconnect from ap.common.common_utils import strip_all_quote +from ap.common.constants import DATABASE_LOGIN_TIMEOUT # import pyodbc class MSSQLServer: - def __init__(self, host, dbname, username, password): + def __init__(self, host, dbname, username, password, port=1433): self.host = host - self.port = 1433 + self.port = port self.dbname = dbname self.username = username self.password = password @@ -49,6 +51,25 @@ def dump(self): print('self.is_connected: ', self.is_connected) print('=======================') + def try_connect(self): + def try_connect_inner(): + self.connection = None + self.connection = mssqlconnect( + self.host, + self.username, + self.password, + self.dbname, + port=self.port, + login_timeout=DATABASE_LOGIN_TIMEOUT, + ) + + connection_thread = threading.Thread(target=try_connect_inner) + connection_thread.start() + connection_thread.join(DATABASE_LOGIN_TIMEOUT + 1) + + if self.connection is None: + raise TimeoutError('Could not connect to sql server') + def connect(self): # dsn = "Driver={{ODBC Driver 17 for SQL Server}};Server={0:s};".format(self.host) # dsn += "Database={0:s};".format(self.dbname) @@ -59,14 +80,7 @@ def connect(self): # dsn += ';Trusted_Connection=No;' try: # self.connection = pyodbc.connect(dsn) - self.connection = mssqlconnect( - self.host, - self.username, - self.password, - self.dbname, - port=self.port, - login_timeout=3, - ) + self.try_connect() # if alredy use default schema , set to None to avoid replace schema self.is_connected = True diff --git a/ap/common/pydn/dblib/mysql.py b/ap/common/pydn/dblib/mysql.py index 01e948d..7f39caf 100644 --- a/ap/common/pydn/dblib/mysql.py +++ b/ap/common/pydn/dblib/mysql.py @@ -214,13 +214,13 @@ def run_sql(self, sql, row_is_dict=True): cur.close() return cols, rows - def fetch_many(self, sql, size=10_000): + def fetch_many(self, sql, size=10_000, params=None): if not self._check_connection(): return False cur = self.connection.cursor() sql = sql.replace('"', '`') - cur.execute(sql) + cur.execute(sql, params) cols = [column[0] for column in cur.description] yield cols while True: diff --git a/ap/common/pydn/dblib/oracle.py b/ap/common/pydn/dblib/oracle.py index 6131c52..7c260c4 100644 --- a/ap/common/pydn/dblib/oracle.py +++ b/ap/common/pydn/dblib/oracle.py @@ -233,14 +233,15 @@ def run_sql(self, sql, row_is_dict=True): cursor.close() return cols, rows - def fetch_many(self, sql, size=10_000): + def fetch_many(self, sql, size=10_000, params=None): if not self._check_connection(): return False cur = self._create_cursor_with_date_time_format() sql = Oracle.convert_sql(sql) - cursor = cur.execute(sql) + cursor = cur.execute(sql, **params) if params is not None else cur.execute(sql) + cols = [column[0] for column in cursor.description] yield cols while True: diff --git a/ap/common/pydn/dblib/postgresql.py b/ap/common/pydn/dblib/postgresql.py index 858c700..d0e4b46 100644 --- a/ap/common/pydn/dblib/postgresql.py +++ b/ap/common/pydn/dblib/postgresql.py @@ -7,8 +7,11 @@ import psycopg2 import psycopg2.extras +from sqlalchemy.dialects import postgresql +from sqlalchemy.sql import Select from ap.common.common_utils import strip_all_quote +from ap.common.logger import log_execution_time logger = logging.getLogger(__name__) @@ -238,7 +241,8 @@ def insert_table_records(self, tblname, names, values, add_comma_to_value=True): # colsとdict形式のrowsを返す # cols, rows = db1.run_sql("select * from tbl01") # という形で呼び出す - def run_sql(self, sql, row_is_dict=True): + @log_execution_time('POSTGRES') + def run_sql(self, sql, row_is_dict=True, params=None): if not self._check_connection(): return False cur = self.connection.cursor() @@ -246,7 +250,7 @@ def run_sql(self, sql, row_is_dict=True): # how-do-i-get-a-list-of-column-names-from-a-psycopg2-cursor # カラム名がRenameされた場合も対応出来る形に処理を変更 print(sql) - cur.execute(sql) + cur.execute(sql, params) # cursor.descriptionはcolumnの配列 # そこから配列名(column[0])を取り出して配列columnsに代入 cols = [column[0] for column in cur.description] @@ -258,12 +262,12 @@ def run_sql(self, sql, row_is_dict=True): cur.close() return cols, rows - def fetch_many(self, sql, size=10_000): + def fetch_many(self, sql, size=10_000, params=None): if not self._check_connection(): return False cur = self.connection.cursor() - cur.execute(sql) + cur.execute(sql, params) cols = [column[0] for column in cur.description] yield cols while True: @@ -317,3 +321,8 @@ def is_timezone_hold_column(self, tbl, col): return True return False + + @staticmethod + def gen_sql_and_params(stmt: Select) -> tuple[str, dict[str, str]]: + compiled_stmt = stmt.compile(dialect=postgresql.dialect()) + return compiled_stmt.string, compiled_stmt.params diff --git a/ap/common/pydn/dblib/sqlite.py b/ap/common/pydn/dblib/sqlite.py index 827cd48..2f86080 100644 --- a/ap/common/pydn/dblib/sqlite.py +++ b/ap/common/pydn/dblib/sqlite.py @@ -1,6 +1,7 @@ #!/usr/bin/python3 # -*- coding: utf-8 -*- # Author: Masato Yasuda (2019/04/10) +from __future__ import annotations import logging import sqlite3 @@ -200,6 +201,23 @@ def insert_table_records(self, tblname, names, values): self.connection.commit() print('Dummy data was inserted to {}!'.format(tblname)) + def get_column_type(self, tblname, colname) -> str | None: + sql = f'PRAGMA TABLE_INFO({tblname})' + cur = self.connection.cursor() + cur.execute(sql) + rows = cur.fetchall() + cur.close() + + def is_good_column(row): + # cid, name, type, notnull, dflt_value, pk + _, name, *_ = row + return name == colname + + good_rows = filter(is_good_column, rows) + column_types = (x[2] for x in good_rows) + column_type = next(column_types, None) + return column_type + # SQLをそのまま実行。 # cols, rows = db1.run_sql("select * from tbl01") # という形で呼び出す diff --git a/ap/common/scheduler.py b/ap/common/scheduler.py index baac99e..1a98399 100644 --- a/ap/common/scheduler.py +++ b/ap/common/scheduler.py @@ -1,7 +1,11 @@ +from __future__ import annotations + +import contextlib from datetime import datetime from functools import wraps from threading import Lock +from apscheduler.jobstores.base import JobLookupError from pytz import utc from ap import ListenNotifyType, close_sessions, scheduler @@ -46,6 +50,12 @@ # (JobType.RESTRUCTURE_INDEXES.name, JobType.FACTORY_PAST_IMPORT.name), } +# jobs with `_id` suffixes +EXCLUSIVE_JOBS_WITH_IDS = { + JobType.USER_BACKUP_DATABASE.name, + JobType.USER_RESTORE_DATABASE.name, +} + @log_execution_time(logging_exception=True) def scheduler_app_context(fn): @@ -106,6 +116,20 @@ def inner(*args, **kwargs): # return False +def is_job_existed_in_exclusive_jobs_with_ids(job: str, running_job_name: str): + def extract_id_from_job(job_name: str) -> int | None: + for exclusive_job_with_id in EXCLUSIVE_JOBS_WITH_IDS: + if job_name.startswith(exclusive_job_with_id): + id_from_job = job_name[len(exclusive_job_with_id) + 1 :] + with contextlib.suppress(ValueError): + return int(id_from_job) + return None + + job_id = extract_id_from_job(job) + running_job_id = extract_id_from_job(running_job_name) + return job_id is not None and running_job_id is not None and job_id == running_job_id + + def scheduler_check_before_run(job_id, job_name, proc_id, dic_running_job_param): """check if job can run parallel with other jobs""" @@ -118,6 +142,9 @@ def scheduler_check_before_run(job_id, job_name, proc_id, dic_running_job_param) return False for running_job_name, running_proc_id, *_ in dic_running_job_param.values(): + if is_job_existed_in_exclusive_jobs_with_ids(job_name, running_job_name): + return False + if (job_name, running_job_name) in CONFLICT_PAIR or (running_job_name, job_name) in CONFLICT_PAIR: print(f'{job_name} job can not run parallel with {running_job_name}') return False @@ -188,7 +215,8 @@ def remove_jobs(target_job_names, proc_id=None): job.remove() else: job.remove() - + except JobLookupError: + pass finally: scheduler.resume() diff --git a/ap/common/services/csv_content.py b/ap/common/services/csv_content.py index fc984a3..845becc 100644 --- a/ap/common/services/csv_content.py +++ b/ap/common/services/csv_content.py @@ -54,10 +54,10 @@ def get_encoding_name(encoding): return encoding -def get_delimiter_encoding(f_name, preview=False, skip_line: int | None = None): +def get_delimiter_encoding(f_name, preview=False, skip_head: int | None = None): with open_with_zip(f_name, 'rb') as f: - if skip_line is not None and skip_line != 0: - f.readlines(skip_line) + if skip_head is not None and skip_head != 0: + f.readlines(skip_head) metadata = get_metadata(f, is_full_scan_metadata=True, default_csv_delimiter=',') delimiter = metadata.get(DELIMITER_KW) encoding = metadata.get(ENCODING_KW) @@ -67,6 +67,29 @@ def get_delimiter_encoding(f_name, preview=False, skip_line: int | None = None): return delimiter, encoding +def get_limit_records( + is_transpose: bool = False, + n_rows: int | None = None, + user_limit: int | None = None, +) -> int | None: + nrows = get_number_of_reading_lines(n_rows, user_limit) + + # do not use user provided limit if we need to transpose + if is_transpose: + return nrows + + if nrows is None: + return user_limit + + # need to escape 1 records because of the header + nrows = nrows - 1 + + if user_limit is None: + return nrows + + return min(nrows, user_limit) + + def get_number_of_reading_lines(n_rows: int | None = None, limit: int | None = None) -> int | None: """ @param n_rows: @@ -102,7 +125,7 @@ def read_data( normalize_func = normalize_list if do_normalize else lambda x: x - delimiter, encoding = get_delimiter_encoding(f_name, skip_line=skip_head) + delimiter, encoding = get_delimiter_encoding(f_name, skip_head=skip_head) with open_with_zip(f_name, 'r', encoding=encoding) as f: _f = f diff --git a/ap/common/services/csv_header_wrapr.py b/ap/common/services/csv_header_wrapr.py index 5dad020..1a83eb4 100644 --- a/ap/common/services/csv_header_wrapr.py +++ b/ap/common/services/csv_header_wrapr.py @@ -619,10 +619,12 @@ def transform_duplicated_col_suffix_to_pandas_col(dic_valid_csv_cols, dic_origin if len(matched) > 1 and matched[-1].isdigit(): digit = int(matched[-1]) if (digit - 1) > 0: + # the rest of the column name except for the suffix s = '_'.join(matched[0:-1]) col_names.append(f'{s}.{digit-1}') else: - col_names.append(matched[0]) + # case a_01 -> a + col_names.append('_'.join(matched[0:-1])) else: col_names.append(org_col_name) else: diff --git a/ap/common/services/data_type.py b/ap/common/services/data_type.py index 276f66f..eb15255 100644 --- a/ap/common/services/data_type.py +++ b/ap/common/services/data_type.py @@ -24,7 +24,9 @@ def gen_data_types(series: Series, is_v2=False): """ series = series.drop_duplicates().dropna() # drop 'NA' in series if series is ('1.1', 'NA') - series = series.replace(na_values, pd.NA).dropna() + # BUG: BooleanArray raising on comparison to string: https://github.com/pandas-dev/pandas/pull/44533 + if series.dtypes.name != 'boolean': + series = series.replace(na_values, pd.NA).dropna() # try to convert dtypes from float to int # if data=[1.0, 2.0] (to avoid wrong data-type prediction) @@ -200,6 +202,8 @@ def check_large_int_type(val): def check_data_type_series(orig_series: Series): + from ap.common.common_utils import is_boolean + series: Series = convert_df_str_to_others(orig_series) # all items in series are NA @@ -217,6 +221,9 @@ def check_data_type_series(orig_series: Series): return check_float_type(orig_series, series) if 'int' in series_type: + if sum(is_boolean(series)) == len(series): + return DataType.BOOLEAN.value + # BIG INT if series.max() > MAX_SAFE_INTEGER: return DataType.BIG_INT.value diff --git a/ap/common/services/error_message_handler.py b/ap/common/services/error_message_handler.py index 59639fe..4f09be7 100644 --- a/ap/common/services/error_message_handler.py +++ b/ap/common/services/error_message_handler.py @@ -2,7 +2,13 @@ import pandas as pd -from ap.common.constants import UNKNOWN_ERROR_TEXT, DataImportErrorTypes, ErrorMsgFromDB, ErrorMsgText +from ap.common.constants import ( + MSG_DB_CON_FAILED, + UNKNOWN_ERROR_TEXT, + DataImportErrorTypes, + ErrorMsgFromDB, + ErrorMsgText, +) class ErrorMessageHandler: @@ -10,13 +16,21 @@ def __init__(self): self.msg = UNKNOWN_ERROR_TEXT def msg_from_exception(self, exception: Exception): + exception_message = str(exception) + # default case + self.msg = f'{UNKNOWN_ERROR_TEXT} Detail: {exception_message}' + # known exception types if isinstance(exception, KeyError): - self.msg = f'{ErrorMsgText[DataImportErrorTypes.COL_NOT_FOUND]} Detail:{str(exception)}' - if isinstance(exception, pd.errors.EmptyDataError): - self.msg = f'{ErrorMsgText[DataImportErrorTypes.EMPTY_DATA_FILE]} Detail:{str(exception)}' - if isinstance(exception, sqlite3.OperationalError): - if ErrorMsgFromDB[DataImportErrorTypes.DB_LOCKED] in str(exception): - self.msg = f'{ErrorMsgText[DataImportErrorTypes.DB_LOCKED]} Detail:{str(exception)}' - elif ErrorMsgFromDB[DataImportErrorTypes.TABLE_NOT_FOUND] in str(exception): - self.msg = f'{ErrorMsgText[DataImportErrorTypes.TABLE_NOT_FOUND]} Detail:{str(exception)}' + self.msg = f'{ErrorMsgText[DataImportErrorTypes.COL_NOT_FOUND]} Detail: {exception_message}' + elif isinstance(exception, pd.errors.EmptyDataError): + self.msg = f'{ErrorMsgText[DataImportErrorTypes.EMPTY_DATA_FILE]} Detail: {exception_message}' + elif isinstance(exception, sqlite3.OperationalError): + if ErrorMsgFromDB[DataImportErrorTypes.DB_LOCKED] in exception_message: + self.msg = f'{ErrorMsgText[DataImportErrorTypes.DB_LOCKED]} Detail: {exception_message}' + elif ErrorMsgFromDB[DataImportErrorTypes.TABLE_NOT_FOUND] in exception_message: + self.msg = f'{ErrorMsgText[DataImportErrorTypes.TABLE_NOT_FOUND]} Detail: {exception_message}' + # custom exceptions + elif exception_message == MSG_DB_CON_FAILED: + self.msg = f'{ErrorMsgText[DataImportErrorTypes.DB_CONNECTION_FAILED]} Detail: {exception_message}' + return self.msg diff --git a/ap/common/services/form_env.py b/ap/common/services/form_env.py index cdb3bf9..6f7124f 100644 --- a/ap/common/services/form_env.py +++ b/ap/common/services/form_env.py @@ -14,6 +14,7 @@ AGP_COLOR_VARS, ARRAY_FORMVAL, ARRAY_PLOTDATA, + BOOKMARK_ID, CAT_EXP_BOX, CAT_ON_DEMAND, CATE_PROC, @@ -84,6 +85,7 @@ NOT_EXACT_MATCH_FILTER_IDS, OBJ_VAR, PROCS, + RECENT_TIME_INTERVAL, REMOVE_OUTLIER_EXPLANATORY_VAR, REMOVE_OUTLIER_OBJECTIVE_VAR, REMOVE_OUTLIER_REAL_ONLY, @@ -93,6 +95,8 @@ RL_CATES, RL_EMD, RL_XAXIS, + SCP_HMP_X_AXIS, + SCP_HMP_Y_AXIS, SELECT_ALL, SERIAL_COLUMN, SERIAL_ORDER, @@ -207,8 +211,12 @@ IS_NOMINAL_SCALE, NOMINAL_VARS, REQ_ID, + BOOKMARK_ID, + RECENT_TIME_INTERVAL, FINE_SELECT, REQUEST_PARAMS, + SCP_HMP_X_AXIS, + SCP_HMP_Y_AXIS, ) conds_startwith_keys = ('filter-', 'cond_', 'machine_id_multi') diff --git a/ap/common/services/jp_to_romaji_utils.py b/ap/common/services/jp_to_romaji_utils.py index c2c557c..2087b94 100644 --- a/ap/common/services/jp_to_romaji_utils.py +++ b/ap/common/services/jp_to_romaji_utils.py @@ -73,6 +73,47 @@ def replace_special_symbols(input_str): return normalized_input +def replace_special_symbols_old(input_str): + normalized_input = input_str + # remove space and tab + normalized_input = re.sub(r"[\s\t\+\*…・:;!\?\$\&\"\'\`\=\@\#\\\/。、\.,~\|]", '', normalized_input) + + # `[\(\)\[\]<>\{\}【】]` in string in `English Name` should be replaced into `_`. + normalized_input = re.sub(r'[\(\)\[\]<>\{\}【】]', '_', normalized_input) + + # hyphen: remove last, multi + normalized_input = re.sub(r'-+', '-', normalized_input) + normalized_input = re.sub(r'_+', '_', normalized_input) + + # under score: remove first , last, multi + normalized_input = re.sub(r'^[_-]', '', normalized_input) + normalized_input = re.sub(r'[_-]$', '', normalized_input) + + # `[℃°]` in `English Name` should be replaced in to `deg`. + normalized_input = re.sub(r'[℃°]', 'deg', normalized_input) + + # `℉` in `English Name` should be replaced in to `degF`. + normalized_input = re.sub(r'℉', 'degF', normalized_input) + normalized_input = re.sub(r'%', 'pct', normalized_input) + + # `[Δ, △]` in English Name should be replaced into `d`. + normalized_input = re.sub(r'[Δ△]', 'd', normalized_input) + + # `Ω` in English Name should be replaced into `ohm`. + normalized_input = re.sub(r'Ω', 'ohm', normalized_input) + + # convert postal mark in string to `_` + normalized_input = re.sub(r'[\u3012\u3020\u3036]', 'post', normalized_input) + + # # replace for μµ + normalized_input = re.sub(r'Uu|uu', 'u', normalized_input) + + # `Mm` in English Name should be replaced into `mm`. + normalized_input = re.sub(r'Mm', 'mm', normalized_input) + + return normalized_input + + # def remove_irregular(input_str): # """ # Use this function to remove irregular string diff --git a/ap/common/services/sse.py b/ap/common/services/sse.py index 2f72386..5f48ad0 100644 --- a/ap/common/services/sse.py +++ b/ap/common/services/sse.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import queue from datetime import datetime from enum import Enum, auto @@ -18,6 +20,8 @@ class AnnounceEvent(Enum): SHOW_GRAPH = auto() DISK_USAGE = auto() DATA_REGISTER = auto() + BACKUP_DATA_FINISHED = auto() + RESTORE_DATA_FINISHED = auto() class MessageAnnouncer: @@ -92,13 +96,21 @@ def format_sse(data, event=None) -> str: msg = f'event: {event}\n{msg}' return msg - def announce(self, data, event): + def announce(self, data, event, job_id: str | int = None): + """ + Send message data to front-end by EVENT name + + :param data: data want to send + :param event: event name + :param job_id: a job id to identity EVENT each other + """ if not dic_config[MAIN_THREAD]: if self.dic_progress is None: process_queue = read_pickle_file(get_multiprocess_queue_file()) self.dic_progress = process_queue[ListenNotifyType.JOB_PROGRESS.name] - self.dic_progress[event] = (data, event) + event_job_id = job_id if job_id else event + self.dic_progress[event_job_id] = (data, event) return # We go in reverse order because we might have to delete an element, which will shift the diff --git a/ap/config/basic_config.yml b/ap/config/basic_config.yml index 91646fb..1ad46e9 100644 --- a/ap/config/basic_config.yml +++ b/ap/config/basic_config.yml @@ -1,11 +1,11 @@ -!!omap -- info: !!omap - - version: '1' - - port-no: 7770 - - hide-setting-page: false - - r-path: - - auto-backup-universal: false - - language: en - - proxy: +!!omap +- info: !!omap + - version: '1' + - port-no: 7770 + - hide-setting-page: false + - r-path: + - auto-backup-universal: false + - language: en + - proxy: - log_level: INFO diff --git a/ap/equations/core.py b/ap/equations/core.py index 28d53b7..e38b161 100644 --- a/ap/equations/core.py +++ b/ap/equations/core.py @@ -1,7 +1,6 @@ from __future__ import annotations import contextlib -import locale import re from abc import abstractmethod from typing import Any, ClassVar, Optional @@ -15,7 +14,7 @@ from ap.common.constants import EMPTY_STRING, MULTIPLE_VALUES_CONNECTOR, DataTypeEncode, RawDataTypeDB from ap.common.memoize import memoize -from ap.equations.error import INVALID_VALUE_MSG, ErrorField, FunctionFieldError +from ap.equations.error import INVALID_VALUE_MSG, MUST_HAVE_THE_SAME_TYPE_MSG, ErrorField, FunctionFieldError from ap.setting_module.models import MFunction BOOLEAN_DICT_VALUES = { @@ -63,6 +62,9 @@ def cast_value_based_on_series(series: pd.Series, value: Any) -> tuple[Any, bool @param value: @return: cast value (if possible) and boolean result indicated that it is cast to series or not """ + if pd.api.types.is_bool_dtype(series) and value in BOOLEAN_DICT_VALUES: + return BOOLEAN_DICT_VALUES[value], True + if pd.api.types.is_float_dtype(series): with contextlib.suppress(ValueError): return float(value), True @@ -86,7 +88,13 @@ def cast_value_based_on_series(series: pd.Series, value: Any) -> tuple[Any, bool def try_cast_series_pd_types(series: pd.Series, pd_types: list[pd.ExtensionType]) -> pd.Series | None: for dtype in pd_types: - result_series = series.replace(BOOLEAN_DICT_VALUES) if pd.api.types.is_bool_dtype(dtype) else series + result_series = series + + current_type_is_not_boolean = not pd.api.types.is_bool_dtype(result_series) + type_should_be_boolean = pd.api.types.is_bool_dtype(dtype) + if current_type_is_not_boolean and type_should_be_boolean: + result_series = result_series.replace(BOOLEAN_DICT_VALUES) + with contextlib.suppress(TypeError, ValueError, OverflowError): return result_series.astype(dtype) with contextlib.suppress(TypeError, ValueError, OverflowError): @@ -123,10 +131,10 @@ def try_cast_series(series: pd.Series, raw_data_types: list[RawDataTypeDB | None def get_data_encoding_type_from_series(series: pd.Series) -> DataTypeEncode: - if pd.api.types.is_int64_dtype(series): - return DataTypeEncode.BIG_INT if pd.api.types.is_integer_dtype(series): return DataTypeEncode.INTEGER + # if pd.api.types.is_int64_dtype(series): # ES not use BIG_INT + # return DataTypeEncode.BIG_INT if pd.api.types.is_float_dtype(series): return DataTypeEncode.REAL if pd.api.types.is_string_dtype(series): @@ -212,9 +220,9 @@ def get_output_type_cast(self) -> str | None: if output_type_cast == DataTypeEncode.REAL.value: return RawDataTypeDB.REAL.value - # TODO: check if return bigint or smallint - if output_type_cast == DataTypeEncode.BIG_INT.value: - return RawDataTypeDB.BIG_INT.value + # TODO: ES not use BIG_INT + # if output_type_cast == DataTypeEncode.BIG_INT.value: + # return RawDataTypeDB.BIG_INT.value if output_type_cast == DataTypeEncode.INTEGER.value: return RawDataTypeDB.INTEGER.value @@ -242,12 +250,6 @@ def get_output_type(self, x_data_type: str | None = None, y_data_type: str | Non ) return output_data_type - @staticmethod - def force_convert_boolean_to_integer(series: pd.Series, raw_data_type: RawDataTypeDB): - if raw_data_type == RawDataTypeDB.BOOLEAN: - return series.astype(pd.Int16Dtype()) - return series - def evaluate( self, df: pd.DataFrame, @@ -300,7 +302,7 @@ def evaluate( if raw_output_type == RawDataTypeDB.TEXT: result_series = result_series.replace(to_replace=EMPTY_STRING, value=pd.NA) - df[out_col] = self.force_convert_boolean_to_integer(result_series, raw_output_type) + df[out_col] = result_series return df @@ -342,7 +344,7 @@ def eval_to_series( series_x: pd.Series | None = None, series_y: pd.Series | None = None, ) -> pd.Series | np.NDArray: - return self.a * series_x * series_y + self.c + return self.a * (series_x * series_y) + self.c class Ratio(BaseFunction): @@ -356,7 +358,20 @@ def eval_to_series( series_x: pd.Series | None = None, series_y: pd.Series | None = None, ) -> pd.Series | np.NDArray: - return self.a * series_x / (self.b * series_y) + self.c + """ + https://github.com/pandas-dev/pandas/issues/30188 + Calling numpy calculation in Float64 dataframe, can result in np.nan exist in Float64, + those can cause incorrect behavior, therefore, we need to handle these cases separately + """ + a_mul = self.a * series_x + b_mul = self.b * series_y + result = a_mul / b_mul + self.c + + # explicitly set zeros indexes NA + both_zeroes = (a_mul == 0) & (b_mul == 0) + result[both_zeroes] = pd.NA + + return result class ExpTransform(BaseFunction): @@ -398,7 +413,20 @@ def eval_to_series( series_x: pd.Series | None = None, series_y: pd.Series | None = None, ) -> pd.Series | np.NDArray: - return self.a * np.log10(series_x - self.b) + self.c + """ + https://github.com/pandas-dev/pandas/issues/30188 + Calling numpy calculation in Float64 dataframe, can result in np.nan exist in Float64, + those can cause incorrect behavior, therefore, we need to handle these cases separately + """ + + minus_b = series_x - self.b + result = self.a * np.log10(minus_b) + self.c + + # log10(x) if x < 0 is NaN + is_negative = minus_b < 0 + result[is_negative] = pd.NA + + return result class HexToDec(BaseFunction): @@ -478,7 +506,9 @@ def eval_to_series( series_x: pd.Series | None = None, series_y: pd.Series | None = None, ) -> pd.Series | np.NDArray: - return np.arctan2(series_y - self.b, series_x - self.a) * 180 / np.pi - self.c + result = np.arctan2(series_y - self.b, series_x - self.a) + result = np.rad2deg(result) - self.c + return result class StringExtraction(BaseFunction): @@ -738,18 +768,14 @@ def eval_to_series( ) -> pd.Series | np.NDArray: self.custom_validate() - use_japanese_locale = self.n >= 2 + locale_translator = { + 0: {0: 'Mon', 1: 'Tue', 2: 'Wed', 3: 'Thu', 4: 'Fri', 5: 'Sat', 6: 'Sun'}, + 1: {0: 'Monday', 1: 'Tuesday', 2: 'Wednesday', 3: 'Thursday', 4: 'Friday', 5: 'Saturday', 6: 'Sunday'}, + 2: {0: '月', 1: '火', 2: '水', 3: '木', 4: '金', 5: '土', 6: '日'}, + 3: {0: '月曜日', 1: '火曜日', 2: '水曜日', 3: '木曜日', 4: '金曜日', 5: '土曜日', 6: '日曜日'}, + } - current_locale = locale.getlocale(locale.LC_TIME) - - if use_japanese_locale: - locale.setlocale(locale.LC_TIME, 'ja_JP') - - result = series_x.dt.strftime('%a') if self.n % 2 == 0 else series_x.dt.strftime('%A') - - # set back current locale - if use_japanese_locale: - locale.setlocale(locale.LC_TIME, current_locale) + result = series_x.dt.weekday.replace(locale_translator[self.n]) return result @@ -907,11 +933,10 @@ def custom_validate(self): ErrorField(function_type=self.function_type(), field='s', msg='Invalid regex'), ) from exc - # extracting groups must be present + # if no group is present, we assign this as a new group + # because pandas.str.extract requires pattern should have groups if regex.groups == 0: - raise FunctionFieldError(INVALID_VALUE_MSG).add_error( - ErrorField(function_type=self.function_type(), field='s', msg='Regex must provide capture groups'), - ) + self.s = f'({self.s})' def eval_to_series( self, @@ -983,11 +1008,18 @@ def eval_to_series( series_x: pd.Series | None = None, series_y: pd.Series | None = None, ) -> pd.Series | np.NDArray: - result = series_x.combine_first(series_y) + if series_x.dtype != series_y.dtype: + raise FunctionFieldError(MUST_HAVE_THE_SAME_TYPE_MSG).add_error( + ErrorField(function_type=self.function_type(), field='X', msg=MUST_HAVE_THE_SAME_TYPE_MSG), + ErrorField(function_type=self.function_type(), field='Y', msg=MUST_HAVE_THE_SAME_TYPE_MSG), + ) - # keep original type if `self.t` is not defined - self.set_type_cast(self.t if self.t else get_data_encoding_type_from_series(series_x).value) + if pd.api.types.is_string_dtype(series_x): + series_x = series_x.replace({EMPTY_STRING: pd.NA}) + series_y = series_y.replace({EMPTY_STRING: pd.NA}) + result = series_x.combine_first(series_y) + self.set_type_cast(self.t) type_converter = TypeConvert.from_kwargs(t=self.type_cast) return type_converter.eval_to_series(series_x=result) @@ -1071,6 +1103,16 @@ def eval_to_series( type_cast = self.get_output_type_cast() pd_type = RawDataTypeDB.get_pandas_dtype(type_cast) + if pd.api.types.is_bool_dtype(series_x): + if pd.api.types.is_float_dtype(pd_type): + series_x = series_x.astype('float64') # Float64 can not astype pd.Series(True, pd.NA, False) + elif pd_type == pd.StringDtype(): + series_x = series_x.astype(pd_type).str.lower() + else: + series_x = series_x.astype(pd_type) + + return series_x + if pd.api.types.is_numeric_dtype(pd_type): casted_x = pd.to_numeric(series_x, errors='coerce') @@ -1119,7 +1161,9 @@ def eval_to_series( if convertible: result = series_x.shift(t, fill_value=s) else: - if isinstance(s, float): + if isinstance(s, int): + casted_series = series_x.astype(pd.Int64Dtype()) + elif isinstance(s, float): casted_series = series_x.astype(pd.Float64Dtype()) elif isinstance(s, str): casted_series = series_x.astype(pd.StringDtype()) diff --git a/ap/equations/error.py b/ap/equations/error.py index 5734a5f..ad2b25a 100644 --- a/ap/equations/error.py +++ b/ap/equations/error.py @@ -6,6 +6,7 @@ from pydantic import BaseModel INVALID_VALUE_MSG = 'Invalid value' +MUST_HAVE_THE_SAME_TYPE_MSG = 'X and Y must have the same type' class ErrorField(BaseModel): @@ -34,6 +35,12 @@ def add_error(self, *errors: ErrorField) -> FunctionFieldError: def parse(self) -> dict[str, Any]: return {'id': self.id, 'errors': self.errors} + def __repr__(self) -> str: + return '\n'.join(err.msg for err in self.errors) + + def __str__(self) -> str: + return self.__repr__() + @classmethod def from_pydantic_validation_error( cls, diff --git a/ap/script/migrate_cfg_data_source_csv.py b/ap/script/migrate_cfg_data_source_csv.py index d346168..ab2cfec 100644 --- a/ap/script/migrate_cfg_data_source_csv.py +++ b/ap/script/migrate_cfg_data_source_csv.py @@ -1,10 +1,19 @@ +from ap.common.constants import DBType, RelationShip from ap.common.pydn.dblib import sqlite -from ap.setting_module.models import CfgDataSourceCSV, CfgProcessColumn, CfgProcess +from ap.setting_module.models import ( + CfgDataSourceCSV, + CfgProcessColumn, + CfgProcess, + make_session, + insert_or_update_config, + CfgDataSource, +) process_name_column = """alter table cfg_data_source_csv add process_name text;""" dummy_header_column = """alter table cfg_data_source_csv add dummy_header boolean;""" n_rows_column = """alter table cfg_data_source_csv add column n_rows integer;""" is_transpose_column = """alter table cfg_data_source_csv add column is_transpose boolean;""" +is_file_path_column = """alter table cfg_data_source_csv add column is_file_path boolean;""" def migrate_cfg_data_source_csv(app_db_src): @@ -20,6 +29,9 @@ def migrate_cfg_data_source_csv(app_db_src): is_is_transpose_column_existing = app_db.is_column_existing( CfgDataSourceCSV.__table__.name, CfgDataSourceCSV.is_transpose.name ) + is_file_path_column_existing = app_db.is_column_existing( + CfgDataSourceCSV.__table__.name, CfgDataSourceCSV.is_file_path.name + ) if not is_process_name_existing: app_db.execute_sql(process_name_column) if not is_dummy_header_existing: @@ -28,6 +40,8 @@ def migrate_cfg_data_source_csv(app_db_src): app_db.execute_sql(n_rows_column) if not is_is_transpose_column_existing: app_db.execute_sql(is_transpose_column) + if not is_file_path_column_existing: + app_db.execute_sql(is_file_path_column) migrate_cfg_process_column(app_db) app_db.disconnect() @@ -70,3 +84,20 @@ def migrate_cfg_process_column(app_db): # insert to_romaji value if not is_process_name_local_existing: app_db.execute_sql("""ALTER TABLE cfg_process ADD name_local text;""") + + +def migrate_skip_head_value(): + with make_session() as meta_session: + data_sources = meta_session.query(CfgDataSource).filter(CfgDataSource.type == DBType.CSV.value.upper()).all() + for data_source in data_sources: + csv_detail = data_source.csv_detail + # for existing csv data sources, convert skip_head from 0 to None if there is no dummy header generated + if not csv_detail.dummy_header and csv_detail.skip_head == 0: + csv_detail.skip_head = None + insert_or_update_config( + meta_session, + csv_detail, + parent_obj=data_source, + parent_relation_key=CfgDataSource.csv_detail.key, + parent_relation_type=RelationShip.ONE, + ) diff --git a/ap/script/migrate_cfg_process.py b/ap/script/migrate_cfg_process.py index fb03926..db51659 100644 --- a/ap/script/migrate_cfg_process.py +++ b/ap/script/migrate_cfg_process.py @@ -3,13 +3,28 @@ create_is_show_file_name = """ALTER TABLE cfg_process ADD COLUMN is_show_file_name BOOLEAN;""" update_is_show_file_name = """UPDATE cfg_process SET is_show_file_name = 0;""" +create_process_factid = """ALTER TABLE cfg_process ADD COLUMN process_factid TEXT;""" +create_process_factname = """ALTER TABLE cfg_process ADD COLUMN process_factname TEXT;""" -def migrate_cfg_process_add_is_show_file_name(app_db_src): +def migrate_cfg_process(app_db_src): app_db = sqlite.SQLite3(app_db_src) app_db.connect() + migrate_cfg_process_add_is_show_file_name(app_db) + migrate_cfg_process_add_process_factid_and_process_factname(app_db) + app_db.disconnect() + + +def migrate_cfg_process_add_is_show_file_name(app_db): is_col_existing = app_db.is_column_existing(CfgProcess.__table__.name, CfgProcess.is_show_file_name.name) if not is_col_existing: app_db.execute_sql(create_is_show_file_name) app_db.execute_sql(update_is_show_file_name) - app_db.disconnect() + + +def migrate_cfg_process_add_process_factid_and_process_factname(app_db): + is_col_process_factid = app_db.is_column_existing(CfgProcess.__table__.name, CfgProcess.process_factid.name) + is_col_process_factname = app_db.is_column_existing(CfgProcess.__table__.name, CfgProcess.process_factname.name) + if not is_col_process_factid and not is_col_process_factname: + app_db.execute_sql(create_process_factid) + app_db.execute_sql(create_process_factname) diff --git a/ap/script/migrate_cfg_process_column.py b/ap/script/migrate_cfg_process_column.py new file mode 100644 index 0000000..ed5def3 --- /dev/null +++ b/ap/script/migrate_cfg_process_column.py @@ -0,0 +1,29 @@ +from ap.common.pydn.dblib import sqlite +from ap.setting_module.models import CfgProcessColumn + +delete_operator_column = """ALTER TABLE cfg_process_column DROP COLUMN operator""" +delete_coef_column = """ALTER TABLE cfg_process_column DROP COLUMN coef""" +add_unit_column = """ALTER TABLE cfg_process_column ADD COLUMN unit TEXT;""" + + +def migrate_cfg_process_column(app_db_src): + app_db = sqlite.SQLite3(app_db_src) + app_db.connect() + del_operator_and_coef(app_db) + add_column_unit(app_db) + app_db.disconnect() + + +def del_operator_and_coef(app_db): + is_col_operator_existing = app_db.is_column_existing(CfgProcessColumn.__table__.name, 'operator') + is_col_coef_existing = app_db.is_column_existing(CfgProcessColumn.__table__.name, 'coef') + if is_col_operator_existing: + app_db.execute_sql(delete_operator_column) + if is_col_coef_existing: + app_db.execute_sql(delete_coef_column) + + +def add_column_unit(app_db): + is_col_unit_existing = app_db.is_column_existing(CfgProcessColumn.__table__.name, CfgProcessColumn.unit.name) + if not is_col_unit_existing: + app_db.execute_sql(add_unit_column) diff --git a/ap/script/migrate_delta_time.py b/ap/script/migrate_delta_time.py index 10acc35..d6d9489 100644 --- a/ap/script/migrate_delta_time.py +++ b/ap/script/migrate_delta_time.py @@ -2,6 +2,11 @@ def migrate_delta_time_in_cfg_trace_key(app_db_src): + create_delta_time_and_cutoff(app_db_src) + change_delta_time_and_cut_off_data_type(app_db_src) + + +def create_delta_time_and_cutoff(app_db_src): from ap.setting_module.models import CfgTraceKey app_db = sqlite.SQLite3(app_db_src) @@ -17,3 +22,39 @@ def migrate_delta_time_in_cfg_trace_key(app_db_src): if not is_cutoff_col_existing: app_db.execute_sql(create_cutoff_column) app_db.disconnect() + + +def change_delta_time_and_cut_off_data_type(app_db_src): + from ap.setting_module.models import CfgTraceKey + + app_db = sqlite.SQLite3(app_db_src) + app_db.connect() + + delta_time_column_type = app_db.get_column_type(CfgTraceKey.__tablename__, CfgTraceKey.delta_time.name) + cut_off_column_type = app_db.get_column_type(CfgTraceKey.__tablename__, CfgTraceKey.cut_off.name) + + def change_column_to_real_sql(table, old_column, new_column): + return [ + f'ALTER TABLE {table} ADD COLUMN {new_column} REAL;', + f'UPDATE {table} SET {new_column} = CAST({old_column} AS REAL);', + f'ALTER TABLE {table} DROP COLUMN {old_column};', + f'ALTER TABLE {table} RENAME COLUMN {new_column} TO {old_column};', + ] + + if delta_time_column_type != 'REAL': + for sql in change_column_to_real_sql( + CfgTraceKey.__tablename__, + CfgTraceKey.delta_time.name, + 'delta_time_20240711', + ): + app_db.execute_sql(sql) + + if cut_off_column_type != 'REAL': + for sql in change_column_to_real_sql( + CfgTraceKey.__tablename__, + CfgTraceKey.cut_off.name, + 'cut_off_20240711', + ): + app_db.execute_sql(sql) + + app_db.disconnect() diff --git a/ap/script/migrate_m_function.py b/ap/script/migrate_m_function.py index ed0ac6e..c78bc1e 100644 --- a/ap/script/migrate_m_function.py +++ b/ap/script/migrate_m_function.py @@ -2,11 +2,10 @@ import numpy as np import pandas as pd - -from ap.common.common_utils import get_dummy_data_path from sqlalchemy import create_engine from sqlalchemy.orm import Session +from ap.common.common_utils import get_dummy_data_path from ap.common.pydn.dblib import sqlite @@ -49,6 +48,24 @@ def migrate_existing_m_funcions(app_db_src): app_db.disconnect() +def update_data_type_for_function_column_and_remove_unused_column(app_db_src): + from ap.setting_module.models import CfgProcessFunctionColumn + + app_db = sqlite.SQLite3(app_db_src) + app_db.connect() + # remove unused column + for col in ['coe_a_n_s', 'coe_b_k_t', 'coe_c']: + table = CfgProcessFunctionColumn.__tablename__ + is_col_existing = app_db.is_column_existing(table, col) + if is_col_existing: + app_db.execute_sql( + f''' + ALTER TABLE {table} DROP COLUMN {col}; + ''', + ) + app_db.disconnect() + + def migrate_m_function_data(app_db_src): from ap.setting_module.models import MFunction @@ -88,3 +105,5 @@ def migrate_m_function_data(app_db_src): session.add_all([MFunction(**r) for r in records]) session.commit() session.close() + + update_data_type_for_function_column_and_remove_unused_column(app_db_src) diff --git a/ap/script/migrate_process_file_name_column.py b/ap/script/migrate_process_file_name_column.py index c4b4e51..bee4b9a 100644 --- a/ap/script/migrate_process_file_name_column.py +++ b/ap/script/migrate_process_file_name_column.py @@ -5,6 +5,7 @@ create_column_raw_name = """ALTER TABLE cfg_process_column ADD COLUMN column_raw_name TEXT;""" update_column_raw_name = """UPDATE cfg_process_column SET column_raw_name = column_name;""" create_column_type = """ALTER TABLE cfg_process ADD COLUMN column_type INTEGER;""" +create_datetime_column = """ALTER TABLE cfg_process ADD COLUMN datetime_format TEXT;""" def migrate_cfg_process_add_file_name(app_db_src): @@ -14,9 +15,17 @@ def migrate_cfg_process_add_file_name(app_db_src): if not is_col_existing: app_db.execute_sql(create_file_name) + + migrate_cfg_process_add_datetime_format(app_db) app_db.disconnect() +def migrate_cfg_process_add_datetime_format(app_db): + is_col_existing = app_db.is_column_existing(CfgProcess.__table__.name, CfgProcess.datetime_format.name) + if not is_col_existing: + app_db.execute_sql(create_datetime_column) + + def migrate_cfg_process_column_add_column_raw_name(app_db_src): app_db = sqlite.SQLite3(app_db_src) app_db.connect() diff --git a/ap/script/setup_for_e2e.py b/ap/script/setup_for_e2e.py new file mode 100644 index 0000000..85059ff --- /dev/null +++ b/ap/script/setup_for_e2e.py @@ -0,0 +1,93 @@ +import re +import os +import shutil +import socket +import sys +import time +from pathlib import Path +import unicodedata + +SCREEN_SHOT_FOLDER = Path('C:/') / 'workspace' / 'CICD' / 'frontend' / 'screenshot' +ROOT_PATH = Path(__file__).parent.parent.parent +INIT_BASIC_CONFIG_FILE = ROOT_PATH / 'init' / 'basic_config.yml' +BASIC_CONFIG_FILE = ROOT_PATH / 'ap' / 'config' / 'basic_config.yml' +E2E_CONFIG_FILE = ROOT_PATH / 'tests' / 'e2e' / 'cypress.config.js' +E2E_INSTANCE_FOLDER = ROOT_PATH / 'tests' / 'e2e' / 'instance' +INSTANCE_FOLDER = ROOT_PATH / 'instance' + + +def clear_old_screenshot_folders(): + for folder in SCREEN_SHOT_FOLDER.glob('*'): + if not folder.is_dir(): + continue + access_time = folder.stat().st_atime + current_time = time.time() + one_day = 24 * 60 * 60 # seconds + if current_time - access_time > one_day: + shutil.rmtree(folder) + + +def create_screenshot_folder(branch_name: str): + # https://github.com/django/django/blob/cdcd604ef8f650533eff6bd63a517ebb4ffddf96/django/utils/text.py#L452C1-L469C53 + sanitized_name = str(branch_name) + sanitized_name = unicodedata.normalize("NFKC", sanitized_name) + sanitized_name = re.sub(r"[^\w\s-]", "", sanitized_name.lower()) + sanitized_name = re.sub(r"[-\s]+", "-", sanitized_name).strip("-_") + + screenshot_folder = SCREEN_SHOT_FOLDER / sanitized_name + screenshot_folder.mkdir(parents=True, exist_ok=True) + print(f"Screenshot folder is: {screenshot_folder}") + + # change this to cypress config + with E2E_CONFIG_FILE.open('r+') as f: + text = f.read() + text = re.sub( + r"screenshotsFolder: .*,", re.escape(f"screenshotsFolder:'{screenshot_folder.as_posix()}',"), text + ) + f.seek(0) + f.write(text) + + +def find_unused_port() -> int: + sock = socket.socket() + sock.bind(('', 0)) + return sock.getsockname()[1] + + +def change_port_for_web() -> None: + unused_port = find_unused_port() + with INIT_BASIC_CONFIG_FILE.open('r+') as f: + text = f.read() + text = re.sub(r'- port-no: \d+', f'- port-no: {unused_port}', text) + f.seek(0) + f.write(text) + + with E2E_CONFIG_FILE.open('r+') as f: + text = f.read() + text = re.sub(r"baseUrl: 'http://localhost:\d+'", f"baseUrl: 'http://localhost:{unused_port}'", text) + f.seek(0) + f.write(text) + + +def replace_basic_config_file() -> None: + shutil.copy(INIT_BASIC_CONFIG_FILE, BASIC_CONFIG_FILE) + + +def copy_tests_instance_to_instance_folder() -> None: + if os.path.isdir(INSTANCE_FOLDER): + shutil.rmtree(INSTANCE_FOLDER) + + shutil.copytree(E2E_INSTANCE_FOLDER, INSTANCE_FOLDER) + + +def setup(branch_name: str) -> None: + clear_old_screenshot_folders() + create_screenshot_folder(branch_name) + change_port_for_web() + replace_basic_config_file() + copy_tests_instance_to_instance_folder() + + +if __name__ == '__main__': + branch_name = sys.argv[1] + setup(branch_name) diff --git a/ap/setting_module/controllers.py b/ap/setting_module/controllers.py index 6ef6326..ba4eff0 100644 --- a/ap/setting_module/controllers.py +++ b/ap/setting_module/controllers.py @@ -90,7 +90,7 @@ def config_screen(): @setting_module_blueprint.route('/config/filter') def filter_config(): - processes = get_all_process_no_nested() + processes = get_all_process_no_nested(with_parent=False) # generate english name for process for proc_data in processes: if not proc_data['name_en']: @@ -138,7 +138,7 @@ def term_of_use(): @setting_module_blueprint.route('/config/master') def master_config(): - processes = get_all_process_no_nested() + processes = get_all_process_no_nested(with_parent=False) # generate english name for process for proc_data in processes: if not proc_data['name_en']: diff --git a/ap/setting_module/models.py b/ap/setting_module/models.py index 4e445b4..107050d 100644 --- a/ap/setting_module/models.py +++ b/ap/setting_module/models.py @@ -2,6 +2,7 @@ import json from contextlib import contextmanager +from functools import wraps from typing import Any, Dict, List, Optional, Union import pandas as pd @@ -11,7 +12,7 @@ from sqlalchemy import asc, desc, event, func, null, or_ from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.inspection import inspect -from sqlalchemy.orm import RelationshipProperty, load_only +from sqlalchemy.orm import RelationshipProperty, load_only, scoped_session from typing_extensions import Self from ap import Session, db @@ -47,10 +48,10 @@ MaxGraphNumber, RawDataTypeDB, RelationShip, - dict_dtype, max_graph_number, ) from ap.common.cryptography_utils import decrypt_pwd +from ap.common.datetime_format_utils import DateTimeFormatUtils from ap.common.memoize import set_all_cache_expired from ap.common.services.http_content import json_dumps from ap.common.services.jp_to_romaji_utils import to_romaji @@ -76,6 +77,44 @@ def make_session(): raise e +def use_meta_session(meta_session_argument_name: str = 'meta_session'): + """Decorator to auto create db instance when no pass it in argument""" + + def decorator(fn): + @wraps(fn) + def inner(*args, **kwargs): + meta_session: scoped_session = kwargs.get(meta_session_argument_name) + if meta_session is None: + with make_session() as new_meta_session: + kwargs[meta_session_argument_name] = new_meta_session + return fn(*args, **kwargs) + else: + return fn(*args, **kwargs) + + return inner + + return decorator + + +def use_meta_session_generator(meta_session_argument_name: str = 'db_instance'): + """Decorator to auto create db instance when no pass it in argument""" + + def decorator(fn): + @wraps(fn) + def inner(*args, **kwargs): + meta_session: scoped_session = kwargs.get(meta_session_argument_name) + if meta_session is None: + with make_session() as new_meta_session: + kwargs[meta_session_argument_name] = new_meta_session + return (yield from fn(*args, **kwargs)) + else: + return (yield from fn(*args, **kwargs)) + + return inner + + return decorator + + class CommonModel(db.Model): __abstract__ = True @@ -501,10 +540,12 @@ def get_all_unused_columns_by_process_id(cls, process_id): return [col.column_name for col in cls.query.filter(cls.process_id == process_id).all()] @classmethod - def delete_all_columns_by_proc_id(cls, proc_id): - with make_session() as meta_session: - meta_session.query(cls).filter(cls.process_id == proc_id).delete() - meta_session.commit() + def delete_all_columns_by_proc_id(cls, proc_id, meta_session: scoped_session = None): + (meta_session.query(cls) if meta_session else CfgProcessUnusedColumn.query).filter( + cls.process_id == proc_id, + ).delete() + if meta_session: + meta_session.flush() class CfgProcessColumn(db.Model): @@ -523,8 +564,8 @@ class CfgProcessColumn(db.Model): raw_data_type = db.Column(db.Text()) column_type = db.Column(db.Integer()) predict_type = db.Column(db.Text()) - operator = db.Column(db.Text()) - coef = db.Column(db.Text()) + # operator = db.Column(db.Text()) + # coef = db.Column(db.Text()) is_serial_no = db.Column(db.Boolean(), default=False) is_get_date = db.Column(db.Boolean(), default=False) is_dummy_datetime = db.Column(db.Boolean(), default=False) @@ -532,6 +573,7 @@ class CfgProcessColumn(db.Model): parent_id = db.Column(db.Integer(), db.ForeignKey(id, ondelete='CASCADE'), nullable=True) # parent_column_id = db.Column(db.Integer()) order = db.Column(db.Integer()) + unit = db.Column(db.Text()) function_details: list['CfgProcessFunctionColumn'] = db.relationship( 'CfgProcessFunctionColumn', lazy='joined', @@ -605,6 +647,10 @@ def is_int_category(self): self.column_type in DataColumnType.category_int_types() or self.is_serial_no ) + @hybrid_property + def is_judge(self): + return self.column_type == DataColumnType.JUDGE.value + @classmethod def get_function_col_ids(cls, process_id): recs = ( @@ -767,9 +813,12 @@ class CfgProcess(db.Model): comment = db.Column(db.Text()) is_show_file_name = db.Column(db.Boolean(), default=None) file_name = db.Column(db.Text()) + process_factid = db.Column(db.Text()) + process_factname = db.Column(db.Text()) parent_id = db.Column(db.Integer(), db.ForeignKey(id, ondelete='CASCADE'), nullable=True) # parent_process_id = db.Column(db.Integer()) + datetime_format = db.Column(db.Text(), default=None) order = db.Column(db.Integer()) created_at = db.Column(db.Text(), default=get_current_timestamp) updated_at = db.Column(db.Text(), default=get_current_timestamp, onupdate=get_current_timestamp) @@ -920,6 +969,22 @@ def get_cols_by_data_type(self, data_type: DataType, column_name_only=True): return cols + def get_time_format(self) -> Optional[str]: + """ + Extract time format from datetime_format value + :return: time format + """ + datetime_format = DateTimeFormatUtils.get_datetime_format(self.datetime_format) + return datetime_format.time_format + + def get_date_format(self) -> Optional[str]: + """ + Extract date format from datetime_format value + :return: date format + """ + datetime_format = DateTimeFormatUtils.get_datetime_format(self.datetime_format) + return datetime_format.date_format + @classmethod def get_all(cls, with_parent=False): query = cls.query @@ -962,9 +1027,9 @@ def get_all_parents_and_children_processes(cls, proc_id: int) -> list[CfgProcess proc = cls.query.get(proc_id) parent = cls.get_parent(proc_id) children = cls.get_children(proc_id) - children_of_parent = cls.get_children(parent.id) if parent is not None else [] + siblings = cls.get_children(parent.id) if parent is not None else [] - processes = [proc] + [parent] + children + children_of_parent + processes = [proc] + [parent] + children + siblings return list({process.id: process for process in processes if process is not None}.values()) @classmethod @@ -989,6 +1054,23 @@ def delete(cls, proc_id): return True + @classmethod + def batch_delete(cls, proc_ids): + with make_session() as meta_session: + meta_session.query(cls).filter(cls.id.in_(proc_ids)).delete(synchronize_session=False) + # delete traces manually + meta_session.query(CfgTrace).filter( + or_(CfgTrace.self_process_id.in_(proc_ids), CfgTrace.target_process_id.in_(proc_ids)), + ).delete( + synchronize_session=False, + ) + # delete linking prediction manually + meta_session.query(ProcLinkCount).filter( + or_(ProcLinkCount.process_id.in_(proc_ids), ProcLinkCount.target_process_id.in_(proc_ids)), + ).delete( + synchronize_session=False, + ) + @classmethod def update_order(cls, meta_session, process_id, order): meta_session.query(cls).filter(cls.id == process_id).update({cls.order: order}) @@ -1000,11 +1082,27 @@ def get_list_of_process(cls): @classmethod def check_duplicated_name(cls, name_en, name_jp, name_local): - check_name_en = len(cls.query.filter(cls.name_en == name_en).all()) != 0 - check_name_jp = len(cls.query.filter(cls.name_jp == name_jp).all()) != 0 - check_name_local = len(cls.query.filter(cls.name_local == name_local).all()) != 0 + check_name_en = len(cls.query.filter(cls.name_en == name_en).all()) != 0 if name_en else False + check_name_jp = len(cls.query.filter(cls.name_jp == name_jp).all()) != 0 if name_jp else False + check_name_local = len(cls.query.filter(cls.name_local == name_local).all()) != 0 if name_local else False return check_name_en, check_name_jp, check_name_local + def table_name_for_query_datetime(self): + from ap.api.setting_module.services.software_workshop_etl_services import quality_measurements_table + + if self.data_source.type == DBType.SOFTWARE_WORKSHOP.name: + return quality_measurements_table.name + return self.table_name + + def filter_for_query_datetime(self, sql: str) -> str: + if self.data_source.type == DBType.SOFTWARE_WORKSHOP.name: + from ap.api.setting_module.services.software_workshop_etl_services import quality_measurements_table + + # software workshop import by process must filter by process name here + # because this is vertical database + return f"{sql} WHERE {quality_measurements_table.c.child_equip_id.name} = '{self.process_factid}'" + return sql + class CfgProcessFunctionColumn(db.Model): __tablename__ = 'cfg_process_function_column' @@ -1197,8 +1295,8 @@ class CfgTraceKey(db.Model): target_column_substr_from = db.Column(db.Integer()) target_column_substr_to = db.Column(db.Integer()) - delta_time = db.Column(db.Integer()) - cut_off = db.Column(db.Integer()) + delta_time = db.Column(db.Float()) + cut_off = db.Column(db.Float()) order = db.Column(db.Integer()) @@ -1548,6 +1646,7 @@ def insert_or_update_config( parent_relation_key=None, parent_relation_type=None, exclude_columns=None, + autocommit=True, ): """ @@ -1559,6 +1658,7 @@ def insert_or_update_config( :param parent_obj: :param parent_relation_key: :param parent_relation_type: + :param autocommit: :return: """ excludes = ['created_at', 'updated_at'] @@ -1597,6 +1697,7 @@ def insert_or_update_config( rec = model() if not parent_obj: meta_session.add(rec) + meta_session.flush() elif parent_relation_type is RelationShip.MANY: objs = getattr(parent_obj, parent_relation_key) if objs is None: @@ -1629,7 +1730,9 @@ def insert_or_update_config( setattr(rec, key, val) - meta_session.commit() + meta_session.flush() + if autocommit: + meta_session.commit() return rec @@ -1671,6 +1774,7 @@ def crud_config( parent_obj: db.Model = None, parent_relation_key=None, parent_relation_type=RelationShip.MANY, + autocommit=True, ): """ @@ -1682,6 +1786,7 @@ def crud_config( :param parent_obj: :param parent_relation_key: :param parent_relation_type: + :param autocommit: :return: """ # get model @@ -1736,9 +1841,10 @@ def crud_config( parent_obj=parent_obj, parent_relation_key=parent_relation_key, parent_relation_type=parent_relation_type, + autocommit=autocommit, ) else: - rec = insert_or_update_config(meta_session, row, key_names, model=model) + rec = insert_or_update_config(meta_session, row, key_names, model=model, autocommit=autocommit) key = tuple(getattr(rec, key) for key in key_names) set_active_keys.add(key) @@ -1750,8 +1856,11 @@ def crud_config( continue meta_session.delete(current_rec) + meta_session.flush() - meta_session.commit() + meta_session.flush() + if autocommit: + meta_session.commit() return True @@ -1762,7 +1871,7 @@ class CfgDataSourceCSV(db.Model): __table_args__ = {'sqlite_autoincrement': True} id = db.Column(db.Integer(), db.ForeignKey('cfg_data_source.id', ondelete='CASCADE'), primary_key=True) directory = db.Column(db.Text()) - skip_head = db.Column(db.Integer(), default=0) + skip_head = db.Column(db.Integer(), default=None) skip_tail = db.Column(db.Integer(), default=0) n_rows = db.Column(db.Integer(), nullable=True) is_transpose = db.Column(db.Boolean(), nullable=True) @@ -1770,6 +1879,7 @@ class CfgDataSourceCSV(db.Model): etl_func = db.Column(db.Text()) process_name = db.Column(db.Text()) dummy_header = db.Column(db.Boolean(), default=False) + is_file_path = db.Column(db.Boolean(), nullable=True, default=False) created_at = db.Column(db.Text(), default=get_current_timestamp) updated_at = db.Column(db.Text(), default=get_current_timestamp, onupdate=get_current_timestamp) # TODO check fetch all @@ -1926,6 +2036,12 @@ def get_option(cls, option_id): def get_options(cls, req_id): return cls.query.filter(cls.req_id == req_id).all() + @classmethod + def get_option_ids(cls, req_id): + records = cls.query.options(load_only(cls.id)).filter(cls.req_id == req_id).all() + ids = [rec.id for rec in records] + return ids + # class ProcDataCount(db.Model): # __tablename__ = 't_proc_data_count' @@ -2009,16 +2125,13 @@ def get_variables(self) -> list[str]: return variables def get_string_x_types(self) -> list[str]: - dtypes = self.x_type.split(',') if self.has_x() else [] - return self.convert_data_type(dtypes) + return self.x_type.split(',') if self.has_x() else [] def get_string_y_types(self) -> list[str]: - dtypes = self.y_type.split(',') if self.has_y() else [] - return self.convert_data_type(dtypes) + return self.y_type.split(',') if self.has_y() else [] def get_string_return_types(self): - dtypes = self.return_type.split(',') if self.return_type else [] - return self.convert_data_type(dtypes) + return self.return_type.split(',') if self.return_type else [] def get_possible_x_types(self) -> list[RawDataTypeDB]: if not self.has_x(): @@ -2084,27 +2197,6 @@ def get_output_data_type( return None - def convert_data_type(self, data_types: List): - # dict_dtype = { - # '': DataType.NULL.name, - # 'r': DataType.REAL.name, - # 'i': DataType.INTEGER.name, - # 't': DataType.TEXT.name, - # 'd': DataType.DATETIME.name, - # 'b': DataType.BOOLEAN.name, - # 'rs': DataType.REAL_SEP.name, - # 'is': DataType.INTEGER_SEP.name, - # 'ers': DataType.EU_REAL_SEP.name, - # 'eis': DataType.EU_INTEGER_SEP.name, - # 'ksn': DataType.K_SEP_NULL.name, - # 'date': DataType.DATE.name, - # 'time': DataType.TIME.name, - # 'cast': FunctionCastDataType.CAST.value, - # 'x': FunctionCastDataType.SAME_AS_X.value, - # 'b_i': DataType.BIG_INT.name, - # } - return [dict_dtype.get(data_type) for data_type in data_types] - def get_models(): all_sub_classes = db.Model.__subclasses__() diff --git a/ap/setting_module/schemas.py b/ap/setting_module/schemas.py index cd4a503..83f5e3e 100644 --- a/ap/setting_module/schemas.py +++ b/ap/setting_module/schemas.py @@ -138,7 +138,9 @@ class Meta: name = fields.String(required=False, allow_none=True) is_category = fields.Boolean(required=False, allow_none=True) is_int_category = fields.Boolean(required=False, allow_none=True) + is_judge = fields.Boolean(required=False, allow_none=True) is_linking_column = fields.Boolean(required=False, allow_none=True) + unit = fields.String(required=False, allow_none=True) parent_column = Nested('ProcessColumnSchema', many=False, required=False) # This field to sever store function config of this column @@ -215,6 +217,9 @@ class Meta: name_local = fields.String(required=False, allow_none=True) shown_name = fields.String(required=False, allow_none=True) is_show_file_name = fields.Boolean(required=False, allow_none=True) + process_factid = fields.String(required=False, allow_none=True) + process_factname = fields.String(required=False, allow_none=True) + datetime_format = fields.String(required=False, allow_none=True) class ProcessFullSchema(ma.SQLAlchemyAutoSchema): diff --git a/ap/setting_module/services/background_process.py b/ap/setting_module/services/background_process.py index 0dfeb3f..5aff30c 100644 --- a/ap/setting_module/services/background_process.py +++ b/ap/setting_module/services/background_process.py @@ -288,7 +288,7 @@ def send_processing_info( 'process_id': job.process_id, 'is_first_imported': False, } - dic_progress[job.id] = (dic_register_progress, AnnounceEvent.DATA_REGISTER.name) + dic_progress[f'{job.id}_register_by_file'] = (dic_register_progress, AnnounceEvent.DATA_REGISTER.name) def update_job_management(job, err=None): diff --git a/bridge/services/sql/sql_generator.py b/ap/setting_module/services/backup_and_restore/__init__.py similarity index 100% rename from bridge/services/sql/sql_generator.py rename to ap/setting_module/services/backup_and_restore/__init__.py diff --git a/ap/setting_module/services/backup_and_restore/backup.py b/ap/setting_module/services/backup_and_restore/backup.py new file mode 100644 index 0000000..8d507f0 --- /dev/null +++ b/ap/setting_module/services/backup_and_restore/backup.py @@ -0,0 +1,86 @@ +import pandas as pd + +from ap.api.setting_module.services.data_import import save_proc_data_count_multiple_dfs +from ap.common.pydn.dblib.db_proxy import DbProxy, gen_data_source_of_universal_db +from ap.common.services.sse import AnnounceEvent, background_announcer +from ap.setting_module.services.backup_and_restore.backup_file_manager import BackupKey, BackupKeysManager +from ap.setting_module.services.backup_and_restore.duplicated_check import ( + get_df_insert_and_duplicated_ids, + remove_unused_columns_and_add_missing_columns, +) +from ap.trace_data.transaction_model import TransactionData + + +def backup_db_data(process_id: int, start_time: str, end_time: str): + backup_keys_manager = BackupKeysManager(process_id=process_id, start_time=start_time, end_time=end_time) + + # TODO: get min max date in database before running this + backup_keys = backup_keys_manager.get_backup_keys_by_day() + total_backup_keys = len(backup_keys) + + if total_backup_keys == 0: + # nothing to do + yield 100 + + # create transaction outside to avoid looping, we only modify `t_table` so this is fine + transaction_data = TransactionData(process_id) + for i, backup_key in enumerate(backup_keys): + backup_db_data_from_key(transaction_data, backup_keys_manager, backup_key) + yield (i + 1) * 100 / total_backup_keys + + background_announcer.announce(True, AnnounceEvent.BACKUP_DATA_FINISHED.name) + + +def backup_db_data_from_key( + transaction_data: TransactionData, + backup_keys_manager: BackupKeysManager, + backup_key: BackupKey, +): + with DbProxy( + gen_data_source_of_universal_db(proc_id=transaction_data.process_id), + True, + immediate_isolation_level=True, + ) as db_instance: + get_date_col = transaction_data.getdate_column.bridge_column_name + df_from_db: pd.DataFrame = transaction_data.get_transaction_by_time_range( + db_instance, + backup_keys_manager.get_start_time(backup_key), + backup_keys_manager.get_end_time(backup_key), + ) + if df_from_db.empty: + return + + # remove data in transaction table + transaction_data.remove_transaction_by_time_range( + db_instance, + backup_keys_manager.get_start_time(backup_key), + backup_keys_manager.get_end_time(backup_key), + ) + + df_file = backup_key.read_file() + # overwrite columns from database to file + df_file = remove_unused_columns_and_add_missing_columns(df_file, df_from_db.columns) + + df_insert = get_df_insert_and_duplicated_ids( + transaction_data, + df_insert=df_from_db, + df_old=df_file, + ) + + df_file_overwrite = pd.concat( + [ + # remove duplicated ids in `df_file` + df_file, + df_insert, + ], + ) + + save_proc_data_count_multiple_dfs( + db_instance, + proc_id=backup_key.process_id, + get_date_col=get_date_col, + dfs_pop_from_db=df_from_db, + dfs_push_to_file=df_insert, + ) + + backup_key.write_file(df_file_overwrite) diff --git a/ap/setting_module/services/backup_and_restore/backup_file_manager.py b/ap/setting_module/services/backup_and_restore/backup_file_manager.py new file mode 100644 index 0000000..c9bb2eb --- /dev/null +++ b/ap/setting_module/services/backup_and_restore/backup_file_manager.py @@ -0,0 +1,101 @@ +from __future__ import annotations + +import contextlib +from datetime import datetime, timedelta +from pathlib import Path +from typing import Any, ClassVar + +import pandas as pd +from pydantic import BaseModel + +from ap.common.common_utils import ( + get_backup_data_folder, + read_parquet_file, + write_parquet_file, +) +from ap.common.constants import UNDER_SCORE, CsvDelimiter, FileExtension + + +class BackupKey(BaseModel): + FIlE_EXTENSION: ClassVar[str] = FileExtension.Parquet.value + FILE_SEP: ClassVar[str] = CsvDelimiter.TSV.value + FILE_FORMAT: ClassVar[str] = '%Y%m%d' + + process_id: int + start_time: datetime + end_time: datetime + + @classmethod + def from_file_name(cls, process_id: int, file_name: str) -> 'BackupKey' | None: + star_time, end_time, *_ = file_name.split(UNDER_SCORE) + with contextlib.suppress(ValueError): + start_time = datetime.strptime(star_time, cls.FILE_FORMAT) + end_time = datetime.strptime(end_time, cls.FILE_FORMAT) + return BackupKey(process_id=process_id, start_time=start_time, end_time=end_time) + return None + + @property + def backup_folder(self): + return Path(get_backup_data_folder(self.process_id)) + + @property + def filename(self) -> Path: + min_time = self.start_time.strftime(self.FILE_FORMAT) + max_time = self.end_time.strftime(self.FILE_FORMAT) + file_name = f'{min_time}{UNDER_SCORE}{max_time}.{self.FIlE_EXTENSION}' + return self.backup_folder / file_name + + def make_backup_dir(self): + self.backup_folder.mkdir(exist_ok=True) + + def delete_file(self): + self.filename.unlink(missing_ok=True) + + def read_file(self) -> pd.DataFrame: + if not self.filename.exists(): + return pd.DataFrame() + return read_parquet_file(str(self.filename)) + + def write_file(self, dataframe: pd.DataFrame) -> None: + if dataframe.empty: + self.delete_file() + else: + self.make_backup_dir() + write_parquet_file(dataframe, str(self.filename)) + + +class BackupKeysManager(BaseModel): + """Handle multiple backup keys, each key must not overlap""" + + process_id: int + start_time: datetime + end_time: datetime + + def model_post_init(self, __context: Any) -> None: + # need to add 1 second to end_time + self.end_time = self.end_time + timedelta(seconds=1) + + def get_start_time(self, backup_key: BackupKey) -> datetime: + return pd.to_datetime(max(self.start_time, backup_key.start_time), utc=True) + + def get_end_time(self, backup_key: BackupKey) -> datetime: + return pd.to_datetime(min(self.end_time, backup_key.end_time), utc=True) + + def get_backup_keys_by_day(self) -> list[BackupKey]: + """Get all non-overlap separated by day backup keys""" + backup_keys = [] + + current_date = self.start_time.date() + end_date = self.end_time.date() + while current_date <= end_date: + next_date = current_date + timedelta(days=1) + backup_keys.append( + BackupKey( + process_id=self.process_id, + start_time=current_date, + end_time=next_date, + ), + ) + current_date = next_date + + return backup_keys diff --git a/ap/setting_module/services/backup_and_restore/duplicated_check.py b/ap/setting_module/services/backup_and_restore/duplicated_check.py new file mode 100644 index 0000000..ff50d58 --- /dev/null +++ b/ap/setting_module/services/backup_and_restore/duplicated_check.py @@ -0,0 +1,58 @@ +from __future__ import annotations + +import pandas as pd + +from ap.setting_module.models import CfgProcessColumn +from ap.trace_data.transaction_model import TransactionData + + +def get_df_insert_and_duplicated_ids( + transaction_data: TransactionData, + *, + df_insert: pd.DataFrame, + df_old: pd.DataFrame, +) -> pd.DataFrame: + """Should return inserting dataframe""" + if df_old.empty or df_insert.empty: + return df_insert + + # convert datetime columns (because edge server handle datetime column as string) + get_date_col = transaction_data.getdate_column.bridge_column_name + df_insert[get_date_col] = pd.to_datetime(df_insert[get_date_col]) + df_old[get_date_col] = pd.to_datetime(df_old[get_date_col]) + + drop_duplicated_columns = get_drop_duplicated_columns(transaction_data) + df_insert_indexes = df_insert.set_index(drop_duplicated_columns).index + df_old_indexes = df_old.set_index(drop_duplicated_columns).index + + df_insert = df_insert[~df_insert_indexes.isin(df_old_indexes)] + return df_insert + + +def remove_unused_columns_and_add_missing_columns( + df: pd.DataFrame, + required_columns: list[str] | pd.Index, +) -> pd.DataFrame: + columns = set(required_columns) + + # remove redundant + intersected_columns = columns.intersection(df.columns) + df = df[intersected_columns] + + # add missing + missing_columns = columns.difference(df.columns) + df.loc[:, missing_columns] = None + + return df + + +def get_drop_duplicated_columns(transaction_data: TransactionData) -> list[str]: + def is_good_column(column: CfgProcessColumn) -> bool: + # do not include function column + if column.function_details: + return False + return True + + good_columns = [col.bridge_column_name for col in filter(is_good_column, transaction_data.cfg_process_columns)] + + return good_columns diff --git a/ap/setting_module/services/backup_and_restore/jobs.py b/ap/setting_module/services/backup_and_restore/jobs.py new file mode 100644 index 0000000..971d396 --- /dev/null +++ b/ap/setting_module/services/backup_and_restore/jobs.py @@ -0,0 +1,80 @@ +from __future__ import annotations + +from datetime import datetime + +from apscheduler.triggers.date import DateTrigger +from pytz import utc + +from ap import scheduler +from ap.api.trace_data.services.proc_link import add_gen_proc_link_job +from ap.common.constants import JobType +from ap.common.scheduler import scheduler_app_context +from ap.setting_module.services.background_process import send_processing_info +from ap.setting_module.services.backup_and_restore.backup import backup_db_data +from ap.setting_module.services.backup_and_restore.restore import restore_db_data + + +def add_backup_data_job(process_id, start_time, end_time): + job_name = job_id = f'{JobType.USER_BACKUP_DATABASE.name}_{process_id}' + dic_params = { + '_job_id': job_id, + '_job_name': job_name, + 'process_id': process_id, + 'start_time': start_time, + 'end_time': end_time, + } + scheduler.add_job( + job_id, + backup_data_job, + trigger=DateTrigger(run_date=datetime.now().astimezone(utc), timezone=utc), + replace_existing=True, + kwargs=dic_params, + ) + + +@scheduler_app_context +def backup_data_job(_job_id, _job_name, *args, **kwargs): + """ + :param _job_id: + :param _job_name: + :param args: + :param kwargs: + :return: + """ + process_id = kwargs.get('process_id') + gen = backup_db_data(*args, **kwargs) + send_processing_info(gen, JobType.USER_BACKUP_DATABASE, process_id=process_id) + add_gen_proc_link_job(process_id=process_id, is_user_request=True) + + +def add_restore_data_job(process_id, start_time, end_time): + job_name = job_id = f'{JobType.USER_RESTORE_DATABASE.name}_{process_id}' + dic_params = { + '_job_id': job_id, + '_job_name': job_name, + 'process_id': process_id, + 'start_time': start_time, + 'end_time': end_time, + } + scheduler.add_job( + job_id, + restore_data_job, + trigger=DateTrigger(run_date=datetime.now().astimezone(utc), timezone=utc), + replace_existing=True, + kwargs=dic_params, + ) + + +@scheduler_app_context +def restore_data_job(_job_id, _job_name, *args, **kwargs): + """ + :param _job_id: + :param _job_name: + :param args: + :param kwargs: + :return: + """ + process_id = kwargs.get('process_id') + gen = restore_db_data(*args, **kwargs) + send_processing_info(gen, JobType.USER_RESTORE_DATABASE, process_id=process_id) + add_gen_proc_link_job(process_id=process_id, is_user_request=True) diff --git a/ap/setting_module/services/backup_and_restore/restore.py b/ap/setting_module/services/backup_and_restore/restore.py new file mode 100644 index 0000000..2064d24 --- /dev/null +++ b/ap/setting_module/services/backup_and_restore/restore.py @@ -0,0 +1,100 @@ +import pandas as pd + +from ap import DATE_FORMAT_STR +from ap.api.setting_module.services.data_import import ( + gen_bulk_insert_sql, + get_insert_params, + insert_data, + save_proc_data_count_multiple_dfs, +) +from ap.common.pydn.dblib.db_proxy import DbProxy, gen_data_source_of_universal_db +from ap.common.services.sse import AnnounceEvent, background_announcer +from ap.setting_module.services.backup_and_restore.backup_file_manager import BackupKey, BackupKeysManager +from ap.setting_module.services.backup_and_restore.duplicated_check import ( + get_df_insert_and_duplicated_ids, + remove_unused_columns_and_add_missing_columns, +) +from ap.trace_data.transaction_model import TransactionData + + +def restore_db_data(process_id, start_time, end_time): + backup_keys_manager = BackupKeysManager(process_id=process_id, start_time=start_time, end_time=end_time) + + # TODO: get min max date in database before running this + backup_keys = backup_keys_manager.get_backup_keys_by_day() + total_backup_keys = len(backup_keys) + + if total_backup_keys == 0: + # nothing to do + yield 100 + + # create transaction outside to avoid looping, we only modify `t_table` so this is fine + transaction_data = TransactionData(process_id) + for i, backup_key in enumerate(backup_keys): + restore_db_data_from_file(transaction_data, backup_keys_manager, backup_key) + yield (i + 1) * 100 / total_backup_keys + + background_announcer.announce(True, AnnounceEvent.RESTORE_DATA_FINISHED.name) + + +def restore_db_data_from_file( + transaction_data: TransactionData, + backup_keys_manager: BackupKeysManager, + backup_key: BackupKey, +): + df_file = backup_key.read_file() + + if df_file.empty: + backup_key.delete_file() + return + + with DbProxy( + gen_data_source_of_universal_db(proc_id=transaction_data.process_id), + True, + immediate_isolation_level=True, + ) as db_instance: + get_date_col = transaction_data.getdate_column.bridge_column_name + df_file[get_date_col] = pd.to_datetime(df_file[get_date_col]) + + is_between = (df_file[get_date_col] >= backup_keys_manager.get_start_time(backup_key)) & ( + df_file[get_date_col] < backup_keys_manager.get_end_time(backup_key) + ) + + df_insert = df_file[is_between] + if df_insert.empty: + # nothing to insert + return + + # get data from db to drop duplicates + df_from_db: pd.DataFrame = transaction_data.get_transaction_by_time_range( + db_instance, + backup_keys_manager.get_start_time(backup_key), + backup_keys_manager.get_end_time(backup_key), + ) + + # overwrite columns from database to file + df_insert = remove_unused_columns_and_add_missing_columns(df_insert, df_from_db.columns) + + df_insert = get_df_insert_and_duplicated_ids( + transaction_data, + df_insert=df_insert, + df_old=df_from_db, + ) + + if not df_insert.empty: + sql_params = get_insert_params(df_insert.columns) + sql_insert = gen_bulk_insert_sql(transaction_data.table_name, *sql_params) + # need to convert to correct datetime format before inserting to database + df_insert[get_date_col] = df_insert[get_date_col].dt.strftime(DATE_FORMAT_STR) + insert_data(db_instance, sql_insert, df_insert.values.tolist()) + + save_proc_data_count_multiple_dfs( + db_instance, + proc_id=backup_key.process_id, + get_date_col=get_date_col, + dfs_push_to_db=df_insert, + dfs_pop_from_file=df_file[is_between], + ) + + df_file_remaining = df_file[~is_between] + backup_key.write_file(df_file_remaining) diff --git a/ap/setting_module/services/process_config.py b/ap/setting_module/services/process_config.py index 6205d1b..9555e02 100644 --- a/ap/setting_module/services/process_config.py +++ b/ap/setting_module/services/process_config.py @@ -1,20 +1,28 @@ +from __future__ import annotations + from functools import lru_cache -from typing import Union +import pandas as pd from sqlalchemy.orm import scoped_session from ap.api.common.services.show_graph_database import preprocess_column +from ap.api.setting_module.services.software_workshop_etl_services import ( + child_equips_table, + get_processes_stmt, +) from ap.api.setting_module.services.v2_etl_services import is_v2_data_source, save_unused_columns from ap.common.constants import ( ID, DataColumnType, DataType, DBType, + MasterDBType, ProcessCfgConst, ) from ap.common.pydn.dblib.db_proxy import DbProxy, gen_data_source_of_universal_db from ap.common.pydn.dblib.postgresql import PostgreSQL from ap.common.services.jp_to_romaji_utils import to_romaji +from ap.common.services.normalization import normalize_list from ap.equations.utils import get_all_functions_info from ap.setting_module.models import ( CfgDataSource, @@ -26,6 +34,7 @@ crud_config, insert_or_update_config, make_session, + use_meta_session, ) from ap.setting_module.schemas import ( FilterSchema, @@ -91,99 +100,139 @@ def get_all_visualizations(): # return process_schema.dump(processes, many=True) -def create_or_update_process_cfg(proc_data, unused_columns): +@use_meta_session() +def create_or_update_process_cfg(proc_data, unused_columns, meta_session: scoped_session = None): need_to_deleted_cols = [] transaction_ds = gen_data_source_of_universal_db(proc_data[ID]) # get existing columns from transaction table if proc_data[ID] and len(unused_columns) and is_v2_data_source(process_id=proc_data[ID]): with DbProxy(transaction_ds, True, immediate_isolation_level=False): - origin_proc_data = TransactionData(proc_data[ID]) + origin_proc_data = TransactionData(proc_data[ID], meta_session=meta_session) for col in unused_columns: col_dat = origin_proc_data.get_cfg_column_by_name(col, is_compare_bridge_column_name=False) if col_dat: need_to_deleted_cols.append(col_dat.bridge_column_name) - with make_session() as meta_session: - # save process config - process: CfgProcess = insert_or_update_config( - meta_session=meta_session, - data=proc_data, - key_names=CfgProcess.id.key, - model=CfgProcess, - ) - meta_session.commit() - - # create column alchemy object + assign process id - columns = proc_data[ProcessCfgConst.PROC_COLUMNS.value] - for proc_column in columns: - # transform data type - proc_column.predict_type = proc_column.data_type - if proc_column.data_type in (DataType.EU_REAL_SEP.name, DataType.REAL_SEP.name): - proc_column.data_type = DataType.REAL.name - if proc_column.data_type in (DataType.EU_INTEGER_SEP.name, DataType.INTEGER_SEP.name): - proc_column.data_type = DataType.INTEGER.name - - proc_column.process_id = process.id - # transform english name - if not proc_column.name_en: - proc_column.name_en = to_romaji(proc_column.column_name) - - # modify column_type (DataColumnType) for main::date and main::time - # if proc_column.data_type in DataColumnType.get_keys(): - # proc_column.column_type = DataColumnType[proc_column.data_type].value - - # re-fill function columns to avoid deleting it - function_columns = CfgProcessColumn.get_by_column_types( - [DataColumnType.GENERATED_EQUATION.value], - proc_ids=[process.id], - session=meta_session, - ) - columns.extend(function_columns) - - # save columns - crud_config( - meta_session=meta_session, - data=columns, - parent_key_names=CfgProcessColumn.process_id.key, - key_names=CfgProcessColumn.column_name.key, - model=CfgProcessColumn, - ) + + # save process config + process: CfgProcess = insert_or_update_config( + meta_session=meta_session, + data=proc_data, + key_names=CfgProcess.id.key, + model=CfgProcess, + autocommit=False, + ) + + # create column alchemy object + assign process id + columns = proc_data[ProcessCfgConst.PROC_COLUMNS.value] + for proc_column in columns: + # transform data type + proc_column.predict_type = proc_column.data_type + if proc_column.data_type in (DataType.EU_REAL_SEP.name, DataType.REAL_SEP.name): + proc_column.data_type = DataType.REAL.name + if proc_column.data_type in (DataType.EU_INTEGER_SEP.name, DataType.INTEGER_SEP.name): + proc_column.data_type = DataType.INTEGER.name + + proc_column.process_id = process.id + # transform english name + if not proc_column.name_en: + proc_column.name_en = to_romaji(proc_column.column_name) + + # re-fill function columns to avoid deleting it + function_columns = CfgProcessColumn.get_by_column_types( + [DataColumnType.GENERATED_EQUATION.value], + proc_ids=[process.id], + session=meta_session, + ) + columns.extend(function_columns) + + # save columns + crud_config( + meta_session=meta_session, + data=columns, + parent_key_names=CfgProcessColumn.process_id.key, + key_names=CfgProcessColumn.column_name.key, + model=CfgProcessColumn, + autocommit=False, + ) # save uncheck cols of v2 only - save_unused_columns(process.id, unused_columns) + save_unused_columns(process.id, unused_columns, meta_session=meta_session) # create table transaction_process with DbProxy(transaction_ds, True, immediate_isolation_level=True) as db_instance: - trans_data = TransactionData(process.id) + trans_data = TransactionData(process.id, meta_session=meta_session) trans_data.create_table(db_instance) # delete unused columns if len(need_to_deleted_cols): # todo: upgrade sqlite3 into 3.40 to delete column trans_data.delete_columns(db_instance, need_to_deleted_cols) - # cols = [{'name': col.bridge_column_name, 'type': col.data_type} for col in columns] - # with DbProxy(gen_data_source_of_universal_db(proc_id), True, immediate_isolation_level=True) as db_instance: - # # create table for transaction data - # db_instance.create_table(gen_transaction_table_name(process.id), cols) return process -def query_database_tables(db_id): +def query_database_tables(db_id, process=None): with make_session() as mss: data_source = mss.query(CfgDataSource).get(db_id) - if not data_source: - return None - - output = {'ds_type': data_source.type, 'tables': []} - # return None if CSV - if data_source.type.lower() in [DBType.CSV.name.lower(), DBType.V2.name.lower()]: - return output - updated_at = data_source.db_detail.updated_at + if not data_source: + return None + + output = {'ds_type': data_source.type, 'tables': [], 'process_factids': [], 'process_factnames': []} + if process: + # if process register tables = selected table + output['tables'] = [process.get('table_name', '')] + output['process_factids'] = [process.get('process_factid', '')] + output['process_factnames'] = [process.get('process_factname', '')] + return output + # return None if CSV + if data_source.type.lower() in [DBType.CSV.name.lower(), DBType.V2.name.lower()]: + return output + + updated_at = data_source.db_detail.updated_at + if data_source.type == DBType.SOFTWARE_WORKSHOP.name: + tables, process_fact_ids, process_fact_names = get_list_process_software_workshop(data_source.id) + output['tables'] = tables + output['process_factids'] = process_fact_ids + output['process_factnames'] = process_fact_names + else: output['tables'] = get_list_tables_and_views(data_source.id, updated_at) return output +def query_database_tables_core(data_source: CfgDataSource, table_prefix): + if not data_source: + return None + + detail_master_types = [] + output = {'ds_type': data_source.type, 'master_type': data_source.master_type, 'tables': []} + # return None if CSV + if data_source.type.lower() in [DBType.CSV.name.lower(), DBType.V2.name.lower()]: + if data_source.csv_detail.directory: + detail_master_types.append(MasterDBType.V2.name) + if data_source.csv_detail.second_directory: + detail_master_types.append(MasterDBType.V2_HISTORY.name) + output['detail_master_types'] = detail_master_types + return output + + updated_at = data_source.db_detail.updated_at + tables = get_list_tables_and_views(data_source, updated_at) + partitions = None + # Edge server does not have logic of EFA + # if data_source.master_type == MasterDBType.EFA.name: + # if table_prefix or data_source.is_direct_import: + # table_name, partitions, _ = get_efa_partitions(tables, table_prefix) + # tables = [table_name] + # else: + # partitions = [] + # tables = EFA_TABLES + + output['tables'] = tables + output['partitions'] = partitions + + return output + + @lru_cache(maxsize=20) def get_list_tables_and_views(data_source_id, updated_at=None): # updated_at only for cache @@ -195,12 +244,20 @@ def get_list_tables_and_views(data_source_id, updated_at=None): return tables -# def get_ds_tables(ds_id): -# try: -# tables = query_database_tables(ds_id) -# return tables['tables'] or [] -# except Exception: -# return [] +@lru_cache(maxsize=20) +def get_list_process_software_workshop(data_source_id, updated_at=None): + # updated_at only for cache + print('database config updated_at:', updated_at, ', so cache can not be used') + with DbProxy(data_source_id) as db_instance: + stmt = get_processes_stmt(limit=None) + sql, params = db_instance.gen_sql_and_params(stmt) + cols, rows = db_instance.run_sql(sql, params=params) + + df = pd.DataFrame(rows) + process_fact_ids = df[child_equips_table.c.child_equip_id.name].to_list() + process_fact_names = df[CfgProcess.process_factname.name].to_list() + process_fact_names = normalize_list(process_fact_names) + return process_fact_names, process_fact_ids, process_fact_names def convert2serialize(obj): @@ -241,7 +298,7 @@ def get_ct_range(proc_id, columns): return [] -def gen_function_column(process_columns, session: Union[scoped_session, PostgreSQL]): +def gen_function_column(process_columns, session: scoped_session | PostgreSQL): for process_column in process_columns: if process_column.function_config is None: continue diff --git a/ap/setting_module/services/register_from_file.py b/ap/setting_module/services/register_from_file.py index e747c75..2914156 100644 --- a/ap/setting_module/services/register_from_file.py +++ b/ap/setting_module/services/register_from_file.py @@ -1,10 +1,52 @@ # import tkinter as tk # from tkinter import filedialog +# import datetime +import datetime -from ap.common.common_utils import API_DATETIME_FORMAT, convert_time -from ap.common.constants import PagePath +from sqlalchemy.orm import scoped_session + +from ap.api.common.services.utils import get_specific_v2_type_based_on_column_names +from ap.api.setting_module.services.polling_frequency import add_import_job +from ap.api.setting_module.services.show_latest_record import ( + gen_preview_data_check_dict, + get_latest_records, + preview_csv_data, +) +from ap.api.setting_module.services.v2_etl_services import get_preview_processes_v2 +from ap.common.common_utils import ( + API_DATETIME_FORMAT, + add_months, + convert_time, + get_month_diff, + get_sorted_files, + is_empty, +) +from ap.common.constants import ( + FILE_NAME, + MAXIMUM_V2_PREVIEW_ZIP_FILES, + CacheType, + DataColumnType, + DBType, + MasterDBType, + PagePath, + RelationShip, +) +from ap.common.memoize import set_all_cache_expired from ap.common.pydn.dblib.db_proxy import DbProxy, gen_data_source_of_universal_db -from ap.setting_module.models import CfgProcess +from ap.common.services.jp_to_romaji_utils import to_romaji + +# from ap.common.pydn.dblib.postgresql import PostgreSQL +from ap.setting_module.models import ( + CfgCsvColumn, + CfgDataSource, + CfgDataSourceCSV, + CfgProcess, + crud_config, + insert_or_update_config, + make_session, +) +from ap.setting_module.schemas import DataSourceSchema, ProcessSchema +from ap.setting_module.services.process_config import create_or_update_process_cfg from ap.trace_data.transaction_model import TransactionData # def browse(resource_type): @@ -19,23 +61,594 @@ # return f_path, resource_type -def get_chm_url_to_redirect(request, proc_id): - proc_cfg = CfgProcess.get_proc_by_id(proc_id) - - target_col_ids = [str(col.id) for col in proc_cfg.columns if col.is_serial_no or col.is_get_date] - target_col_ids = ','.join(target_col_ids) +def get_url_to_redirect(request, proc_ids, page): + col_ids = [] + for proc_id in proc_ids: + proc_cfg = CfgProcess.get_proc_by_id(proc_id) + col_ids.extend([str(col.id) for col in proc_cfg.columns if col.is_serial_no or col.is_get_date]) + target_col_ids = ','.join(col_ids) # get start_datetime and end_datetime - trans_data = TransactionData(proc_cfg.id) - with DbProxy(gen_data_source_of_universal_db(proc_id), True, immediate_isolation_level=True) as db_instance: + trans_data = TransactionData(proc_ids[0]) + with DbProxy(gen_data_source_of_universal_db(proc_ids[0]), True, immediate_isolation_level=True) as db_instance: max_datetime = trans_data.get_max_date_time_by_process_id(db_instance) min_datetime = trans_data.get_min_date_time_by_process_id(db_instance) host_url = request.host_url - page = PagePath.CHM.value + month_diff = get_month_diff(min_datetime, max_datetime) + if page in PagePath.FPP.value and month_diff > 1: + min_datetime = add_months(max_datetime, -1) min_datetime = convert_time(min_datetime, format_str=API_DATETIME_FORMAT) max_datetime = convert_time(max_datetime, format_str=API_DATETIME_FORMAT) + end_procs = ','.join([str(_id) for _id in proc_ids]) + # get target page from bookmark - target_url = f'{host_url}{page}?columns={target_col_ids}&start_datetime={min_datetime}&end_datetime={max_datetime}&end_procs=[{proc_id}]&load_gui_from_url=1&page=chm' # noqa + target_url = f'{host_url}{page}?columns={target_col_ids}&start_datetime={min_datetime}&end_datetime={max_datetime}&end_procs=[{end_procs}]&load_gui_from_url=1&page={page.split("/")[1]}' # noqa return target_url + + +# def get_latest_records_core(dic_preview: dict, limit: int = 5): +# cols_with_types = [] +# headers = normalize_list(dic_preview.get('header')) +# headers = [normalize_str(col) for col in headers] +# data_types = dic_preview.get('dataType') +# same_values = dic_preview.get('same_values') +# is_v2_history = dic_preview.get('v2_type') == DBType.V2_HISTORY +# if headers and data_types: +# column_raw_name = dic_preview.get('org_headers') +# cols_with_types = gen_cols_with_types(headers, data_types, same_values, is_v2_history, column_raw_name) +# cols = headers +# +# # get rows +# df_rows = dic_preview.get('content', None) +# previewed_files = dic_preview.get('previewed_files') +# +# # change name if romaji cols is duplicated +# cols_with_types, cols_duplicated = change_duplicated_columns(cols_with_types) +# has_ct_col = True +# dummy_datetime_idx = None +# if DataType.DATETIME.value not in data_types: +# dummy_datetime_idx = 0 +# cols_with_types.insert( +# dummy_datetime_idx, +# { +# 'column_name': DATETIME_DUMMY, +# 'data_type': DataType.DATETIME.name, +# 'romaji': DATETIME_DUMMY, +# 'is_date': True, +# 'check_same_value': {'is_null': False, 'is_same': False}, +# }, +# ) +# cols.insert(dummy_datetime_idx, DATETIME_DUMMY) +# if is_valid_list(df_rows): +# df_rows = gen_dummy_datetime(df_rows) +# +# if DATETIME_DUMMY in cols or DataType.DATETIME.value not in data_types: +# dummy_datetime_idx = 0 +# has_ct_col = False +# +# rows = [] +# if is_valid_list(df_rows): +# data_type_by_cols = {} +# for col_data in cols_with_types: +# data_type_by_cols[col_data['column_name']] = col_data['data_type'] +# # convert to correct dtypes +# for col in df_rows.columns: +# try: +# if data_type_by_cols[col] == DataType.INTEGER.name: +# df_rows[col] = df_rows[col].astype('float64').astype('Int64') +# +# if data_type_by_cols[col] == DataType.TEXT.name: +# # fill na to '' for string column +# df_rows[col] = df_rows[col].astype('string').fillna('') +# except Exception: +# continue +# rows = transform_df_to_rows(cols, df_rows, limit) +# +# # Set raw data type base on data_type +# for col_data in cols_with_types: +# if col_data['data_type'] == DataType.DATETIME.name: +# col_data['raw_data_type'] = RawDataTypeDB.DATETIME.value +# if col_data['data_type'] == DataType.DATE.name: +# col_data['raw_data_type'] = RawDataTypeDB.DATE.value +# if col_data['data_type'] == DataType.TIME.name: +# col_data['raw_data_type'] = RawDataTypeDB.TIME.value +# if col_data['data_type'] == DataType.BIG_INT.name: +# col_data['raw_data_type'] = RawDataTypeDB.BIG_INT.value +# elif col_data['data_type'] == DataType.INTEGER.name: +# if col_data['is_big_int']: +# col_data['raw_data_type'] = RawDataTypeDB.BIG_INT.value +# else: +# col_data['raw_data_type'] = RawDataTypeDB.INTEGER.value +# elif col_data['data_type'] == DataType.REAL.name: +# col_data['raw_data_type'] = RawDataTypeDB.REAL.value +# elif col_data['data_type'] == DataType.BOOLEAN.name: +# col_data['raw_data_type'] = RawDataTypeDB.BOOLEAN.value +# elif col_data['data_type'] == DataType.TEXT.name: +# col_data['raw_data_type'] = RawDataTypeDB.TEXT.value +# +# is_rdb = False +# return cols_with_types, rows, cols_duplicated, previewed_files, has_ct_col, dummy_datetime_idx, is_rdb + + +def get_proc_config_infos(dic_preview: dict, limit: int = 5, is_v2=False, process_names=[]) -> dict: + process_configs = [] + if not is_v2: + process_names = [None] + + is_file_path = dic_preview['is_file_path'] + datasource_config = { + 'name': datetime.datetime.utcnow().timestamp().__str__(), + 'type': DBType.V2.name if is_v2 else DBType.CSV.name, + 'master_type': DBType.V2.name if is_v2 else DBType.CSV.name, + 'csv_detail': { + 'directory': dic_preview.get('file_name') if is_file_path else dic_preview.get('directory'), + 'delimiter': 'Auto', + 'csv_columns': None, + 'is_file_path': is_file_path, + }, + } + + for process_name in process_names: + latest_rec = get_latest_records( + None, + None, + file_name=dic_preview.get('file_name'), + directory=dic_preview.get('directory'), + limit=limit, + is_v2_datasource=is_v2, + filtered_process_name=process_name, + ) + if not latest_rec: + return { + 'cols': [], + 'rows': [], + 'cols_duplicated': [], + 'fail_limit': None, + 'has_ct_col': None, + 'dummy_datetime_idx': None, + 'is_rdb': False, + } + + ( + cols_with_types, + rows, + cols_duplicated, + previewed_files, + has_ct_col, + dummy_datetime_idx, + is_rdb, + ) = latest_rec + dic_preview_limit = gen_preview_data_check_dict(rows, previewed_files) + data_group_type = {key: DataColumnType[key].value for key in DataColumnType.get_keys()} + process_config = { + 'origin_name': process_name, + 'name_jp': process_name, + 'name_en': to_romaji(process_name) if process_name else process_name, + 'name_local': '', + 'cols': cols_with_types, + 'rows': rows, + 'cols_duplicated': cols_duplicated, + 'fail_limit': dic_preview_limit, + 'has_ct_col': has_ct_col, + 'dummy_datetime_idx': None if is_rdb else dummy_datetime_idx, + 'data_group_type': data_group_type, + 'is_rdb': is_rdb, + } + process_configs.append(process_config) + + datasource_config['csv_detail']['csv_columns'] = cols_with_types + + return { + 'processConfigs': process_configs, + 'datasourceConfig': datasource_config, + } + + +def proc_config_infos_for_v2(dic_preview: dict) -> dict: + [cols_with_types, _, _, _, _, _, _] = get_latest_records(None, 0) + is_file_path = dic_preview['is_file_path'] + data_src_dict = { + 'name': datetime.datetime.utcnow().timestamp().__str__(), + 'type': DBType.CSV.name, + 'master_type': MasterDBType.V2.name, + 'csv_detail': { + 'directory': dic_preview['file_name'] if is_file_path else dic_preview.get('directory'), + 'delimiter': 'Auto', + 'csv_columns': cols_with_types, + 'is_file_path': is_file_path, + }, + } + proc_config_infos = [] + + # with BridgeStationModel.get_db_proxy() as db_instance: # type: PostgreSQL + # # Generate data source + # data_src: CfgDataSource = DataSourceSchema().load(data_src_dict) + # data_source_id = generate_data_source(db_instance, data_src) + # generate_data_source_csv(db_instance, data_source_id, data_src) + # generate_csv_columns(db_instance, data_source_id, data_src) + # + # # Generate data table + # dict_tables = query_database_tables_db_instance(data_source_id, db_instance=db_instance) + # detail_master_types = dict_tables.get('detail_master_types') + # cfg_data_source, cfg_data_tables = generate_data_table( + # db_instance, + # data_source_id, + # detail_master_types=detail_master_types, + # ) + # + # temp_process_ids = [] + # for cfg_data_table in cfg_data_tables: + # # Do scan file + # generate_csv_management(db_instance, cfg_data_table.id) + # + # # Do scan master + # generate_master_data(db_instance, cfg_data_table.id) + # + # # Do scan data type and gen process config + # generate_data_type(db_instance, cfg_data_table.id) + # + # # Do get process config info and data sample + # process_id_with_data_table_ids = MappingFactoryMachine.get_process_id_with_data_table_id( + # db_instance, + # [cfg_data_table.id], + # ) + # process_ids = {x.get('process_id') for x in process_id_with_data_table_ids} + # temp_process_ids.extend(process_ids) + # for process_id in process_ids: + # proc_info_dict = get_process_config_info(process_id, db_instance=db_instance) + # proc_config_infos.append(proc_info_dict) + # + # # Do remove pickle sample file + # for process_id in temp_process_ids: + # delete_preview_data_file_folder(process_id) + # + # db_instance.connection.rollback() + + return { + 'processConfigs': proc_config_infos, + 'datasourceConfig': data_src_dict, + } + + +def get_latest_records_for_register_by_file(file_name: str = None, directory: str = None, limit: int = 5): + delimiter = 'Auto' + skip_head = None + etl_func = '' + + dic_preview = preview_csv_data( + directory, + etl_func, + delimiter, + limit, + return_df=True, + max_records=1000, + file_name=file_name, + skip_head=skip_head, + ) + + dic_preview['is_file_path'] = file_name is not None + column_raw_name = dic_preview.get('org_headers') + master_type = get_specific_v2_type_based_on_column_names(column_raw_name) + if master_type is not None: + # In case of V2 + sorted_files = [file_name] if file_name else get_sorted_files(directory) + v2_process_names = get_preview_processes_v2( + sorted_files, + maximum_files=MAXIMUM_V2_PREVIEW_ZIP_FILES, + ) + data_src_dict = get_proc_config_infos(dic_preview, limit=limit, is_v2=True, process_names=v2_process_names) + data_src_dict['datasourceConfig']['detail_master_type'] = master_type + return data_src_dict + + # In case of OTHER + return get_proc_config_infos(dic_preview, limit=limit) + + +def generate_data_source( + meta_session: scoped_session, + request_data_source: CfgDataSource, +) -> CfgDataSource: + return insert_or_update_config( + meta_session, + request_data_source, + exclude_columns=[CfgDataSource.order.key], + autocommit=False, + ) + + +def generate_data_source_csv( + meta_session: scoped_session, + request_data_source: CfgDataSource, + cfg_data_source: CfgDataSource, +) -> CfgDataSourceCSV: + csv_columns = request_data_source.csv_detail.csv_columns + csv_columns = [col for col in csv_columns if not is_empty(col.column_name)] + request_data_source.csv_detail.csv_columns = csv_columns + return insert_or_update_config( + meta_session, + request_data_source.csv_detail, + parent_obj=cfg_data_source, + parent_relation_key=CfgDataSource.csv_detail.key, + parent_relation_type=RelationShip.ONE, + autocommit=False, + ) + + +def generate_csv_columns( + meta_session: scoped_session, + request_data_source: CfgDataSource, + cfg_data_source_csv: CfgDataSourceCSV, +): + crud_config( + meta_session, + request_data_source.csv_detail.csv_columns, + CfgCsvColumn.data_source_id.key, + CfgCsvColumn.column_name.key, + parent_obj=cfg_data_source_csv, + parent_relation_key=CfgDataSourceCSV.csv_columns.key, + parent_relation_type=RelationShip.MANY, + autocommit=False, + ) + + +# def generate_data_table( +# db_instance: PostgreSQL, +# data_source_id: int, +# proc_data: ProcessSchema = None, +# detail_master_types=None, +# ) -> tuple[BSCfgDataSource, list[BSCfgDataTable]]: +# # insert cfg process & cfg process column +# serial_col = None +# datetime_col = None +# for col in (proc_data or {'columns': []}).get('columns'): +# if col.is_get_date: +# datetime_col = col.column_raw_name +# elif col.is_serial_no: +# serial_col = col.column_raw_name +# +# # Gen data table +# cfg_data_source = BSCfgDataSource( +# BSCfgDataSource.get_by_id(db_instance, data_source_id), +# db_instance=db_instance, +# is_cascade=True, +# ) +# cfg_data_source, cfg_data_tables = gen_config_data_db_instance( +# cfg_data_source, +# serial_col, +# datetime_col, +# None, +# None, +# None, +# detail_master_types, +# db_instance=db_instance, +# ) # type: BSCfgDataSource, list[BSCfgDataTable] +# +# for cfg_data_table in cfg_data_tables: +# get_n_save_partition_range_time_from_factory_db_db_instance( +# cfg_data_table, +# is_scan=True, +# db_instance=db_instance, +# ) +# +# return cfg_data_source, cfg_data_tables +# +# +# def generate_csv_management(db_instance: PostgreSQL, data_table_id: int): +# split_cols = BSCfgDataTableColumn.get_split_columns(db_instance, data_table_id) +# columns = BSCfgDataTableColumn.get_column_names_by_data_group_types(db_instance, data_table_id, split_cols) +# +# # Do scan file +# scan_files_generator = scan_files(data_table_id, columns, db_instance=db_instance) +# list(scan_files_generator) +# +# +# def generate_master_data(db_instance: PostgreSQL, data_table_id: int): +# save_scan_master_target_files(db_instance, data_table_id) +# scan_master_generator = scan_master(data_table_id, db_instance=db_instance) +# list(scan_master_generator) +# +# +# def generate_data_type(db_instance: PostgreSQL, data_table_id: int): +# scan_data_type_generator = scan_data_type(data_table_id, db_instance=db_instance) +# list(scan_data_type_generator) +# +# +# def get_all_process_ids(db_instance: PostgreSQL, data_table_id: int) -> set[int]: +# process_id_rows = MappingFactoryMachine.get_process_id_with_data_table_id(db_instance, [data_table_id]) +# return {row.get(MData.Columns.process_id.name) for row in process_id_rows} +# +# +# def update_process_infos(db_instance: PostgreSQL, process_ids: set[int], proc_configs: list[dict]): +# for process_id in process_ids: +# proc = BSCfgProcess.get_by_process_id(db_instance, process_id, is_cascade_column=True) +# for request_proc_config in proc_configs: +# proc_config = request_proc_config['proc_config'] +# if proc_config.get('origin_name_en', '') == proc.name_en or proc_config.get('name', '') == proc.name: +# del proc_config['origin_name_en'] +# proc_config = ProcessSchema().load(proc_config) +# unused_columns = (request_proc_config.get('unused_columns') or {}).get('columns', []) +# unused_column_raw_names = [unused_column.get('column_raw_name') for unused_column in unused_columns] +# update_process_info( +# db_instance, +# process_id, +# proc, +# proc_config, +# unused_columns=unused_column_raw_names, +# ) +# break +# +# +# def update_process_info( +# db_instance: PostgreSQL, +# process_id: int, +# existing_process, +# request_process, +# unused_columns: list[str] = None, +# ): +# transaction_data_obj = TransactionData(process_id, db_instance=db_instance) +# transaction_data_obj.cast_data_type_for_columns(db_instance, existing_process, request_process) +# existing_process_columns = existing_process.columns +# +# # Update process names & flag is_show_file_name +# BSCfgProcess.update_by_conditions( +# db_instance, +# { +# BSCfgProcess.Columns.name.name: request_process.get('name'), +# BSCfgProcess.Columns.name_jp.name: request_process.get('name_jp'), +# BSCfgProcess.Columns.name_local.name: request_process.get('name_local'), +# BSCfgProcess.Columns.name_en.name: request_process.get('name_en'), +# BSCfgProcess.Columns.is_show_file_name.name: False, +# }, +# dic_conditions={BSCfgProcess.Columns.id.name: process_id}, +# ) +# +# # Delete column if it was uncheck +# for delete_column_name in {DataGroupType.FileName.name, *unused_columns}: +# target_column = next( +# filter( +# lambda column: column.column_raw_name == delete_column_name, +# existing_process_columns, +# ), +# None, +# ) +# +# dic_conditions = {BSCfgProcessColumn.Columns.id.name: target_column.id} +# BSCfgProcessColumn.delete_by_condition(db_instance, dic_conditions, mode=0) +# MData.update_by_conditions(db_instance, {MData.Columns.is_hide.name: True}, dic_conditions=dic_conditions) +# +# # Update process columns +# for request_process_column in request_process['columns']: +# existing_process_column = next( +# filter( +# lambda column: column.column_raw_name == request_process_column.column_raw_name, +# existing_process_columns, +# ), +# None, +# ) +# +# if existing_process_column is None: +# raise Exception('Missing column -> It maybe be bug relate to database session!') +# +# dic_update_values = {} +# # Update name english +# if existing_process_column.name_en != request_process_column.name_en: +# dic_update_values[BSCfgProcessColumn.Columns.name_en.name] = ( +# request_process_column.name_en if not is_empty(request_process_column.name_en) else EMPTY_STRING +# ) +# +# # Update name japanese +# if existing_process_column.name_jp != request_process_column.name_jp: +# dic_update_values[BSCfgProcessColumn.Columns.name_jp.name] = ( +# request_process_column.name_jp if not is_empty(request_process_column.name_jp) else EMPTY_STRING +# ) +# +# # Update name local +# if existing_process_column.name_local != request_process_column.name_local: +# dic_update_values[BSCfgProcessColumn.Columns.name_local.name] = ( +# request_process_column.name_local if not is_empty(request_process_column.name_local) else EMPTY_STRING +# ) +# +# # Update format +# if existing_process_column.format != request_process_column.format: +# dic_update_values[BSCfgProcessColumn.Columns.format.name] = ( +# request_process_column.format if not is_empty(request_process_column.format) else None +# ) +# +# # Update raw data type and data type +# if existing_process_column.raw_data_type != request_process_column.raw_data_type: +# dic_update_values[BSCfgProcessColumn.Columns.raw_data_type.name] = request_process_column.raw_data_type +# dic_update_values[ +# BSCfgProcessColumn.Columns.data_type.name +# ] = RawDataTypeDB.convert_raw_data_type_to_data_type(request_process_column.raw_data_type) +# +# if dic_update_values: +# BSCfgProcessColumn.update_by_conditions( +# db_instance, +# dic_update_values, +# dic_conditions={BSCfgProcessColumn.Columns.id.name: existing_process_column.id}, +# ) +# +# +# def pull_csv_data(db_instance, cfg_data_table): +# job_info = JobInfo() +# job_type = JobType.PULL_CSV_DATA +# job_info.job_type = job_type +# etl_service = ETLController.get_etl_service(cfg_data_table, db_instance=db_instance) +# pull_csv_generator = pull_csv( +# JobType.PULL_CSV_DATA, +# etl_service, +# job_info, +# ignore_add_job=True, +# db_instance=db_instance, +# ) +# list(pull_csv_generator) + + +def generate_process_config(meta_session: scoped_session, proc_config, data_source_id: int) -> CfgProcess: + process_schema = ProcessSchema() + request_process_config = proc_config.get('proc_config') + request_unused_columns = [] # process not register, do need remove unused column + request_process_config['data_source_id'] = data_source_id + del request_process_config['origin_name'] + # New process, set id = None + request_process_config['id'] = None + proc_data = process_schema.load(request_process_config) + for column in proc_data['columns']: + # check is show file name + if column.column_raw_name == FILE_NAME: + proc_data[CfgProcess.is_show_file_name.name] = True + break + + cfg_process = create_or_update_process_cfg(proc_data, request_unused_columns, meta_session=meta_session) + + return cfg_process + + +def handle_importing_by_one_click(request): + register_by_file_request_id = request.get('request_id') + csv_info = request.get('csv_info') + request_proc_configs = request.get('proc_configs') + + # Detect truly detail master type (There are only 2 types: V2 or V2_HISTORY) + detail_master_type = csv_info.get('detail_master_type', MasterDBType.OTHERS.name) + if 'detail_master_type' in csv_info: + if detail_master_type in [MasterDBType.V2_HISTORY.name, MasterDBType.V2_MULTI_HISTORY.name]: + csv_info['csv_detail']['second_directory'] = csv_info['csv_detail']['directory'] + csv_info['csv_detail']['directory'] = None + detail_master_type = MasterDBType.V2_HISTORY.name + elif detail_master_type in [MasterDBType.V2.name, MasterDBType.V2_MULTI.name]: + detail_master_type = MasterDBType.V2.name + del csv_info['detail_master_type'] + + processes = [] + with make_session() as meta_session: + for request_proc_config in request_proc_configs: + # Do generate data source + request_data_source: CfgDataSource = DataSourceSchema().load(csv_info) + if request_data_source.type == MasterDBType.V2.name: + process_name = request_proc_config.get('proc_config').get('origin_name') + request_data_source.name = f'{request_data_source.name}_{process_name}' + request_data_source.csv_detail.process_name = process_name + + cfg_data_source = generate_data_source(meta_session, request_data_source) + cfg_data_source_csv = generate_data_source_csv(meta_session, request_data_source, cfg_data_source) + generate_csv_columns(meta_session, request_data_source, cfg_data_source_csv) + + # Do gen process config + process = generate_process_config(meta_session, request_proc_config, cfg_data_source.id) + processes.append(process) + + set_all_cache_expired(CacheType.CONFIG_DATA) + + # Add import data job + for process in processes: # type: CfgProcess + add_import_job( + process, + run_now=True, + is_user_request=True, + register_by_file_request_id=register_by_file_request_id, + ) + + return [proc.id for proc in processes] diff --git a/ap/setting_module/services/trace_config.py b/ap/setting_module/services/trace_config.py index 85dbe4b..ab6a4c7 100644 --- a/ap/setting_module/services/trace_config.py +++ b/ap/setting_module/services/trace_config.py @@ -41,8 +41,8 @@ def gen_cfg_trace(trace): trace_keys = [] for idx, self_col_id in enumerate(self_col_ids): target_col_id = target_col_ids[idx] - delta_time = delta_times[idx] - cut_off = cut_offs[idx] + delta_time = delta_times[idx] if delta_times else None + cut_off = cut_offs[idx] if cut_offs else None self_sub_from, self_sub_to = (None, None) target_sub_from, target_sub_to = (None, None) if self_sub_strs: @@ -56,8 +56,8 @@ def gen_cfg_trace(trace): target_column_id=int(target_col_id), target_column_substr_from=int(target_sub_from) if target_sub_from else 0, target_column_substr_to=int(target_sub_to) if target_sub_to else 0, - delta_time=int(delta_time) if delta_time else None, - cut_off=int(cut_off) if cut_off else None, + delta_time=float(delta_time) if delta_time else None, + cut_off=float(cut_off) if cut_off else None, ) trace_keys.append(trace_key) self_process_id = trace.get('from') diff --git a/ap/static/aggregate_plot/css/aggregate_plot.css b/ap/static/aggregate_plot/css/aggregate_plot.css index 6c9008a..4bac7eb 100644 --- a/ap/static/aggregate_plot/css/aggregate_plot.css +++ b/ap/static/aggregate_plot/css/aggregate_plot.css @@ -13,7 +13,6 @@ width: calc(50% - 15px); text-align: right; padding-right: 2px; - } .cyclic-calender-option-list { @@ -77,4 +76,4 @@ flex: 1; height: 100%; width: 100%; -} \ No newline at end of file +} diff --git a/ap/static/aggregate_plot/js/aggregate_plot.js b/ap/static/aggregate_plot/js/aggregate_plot.js index 8646c95..bd916ac 100644 --- a/ap/static/aggregate_plot/js/aggregate_plot.js +++ b/ap/static/aggregate_plot/js/aggregate_plot.js @@ -1,8 +1,3 @@ -/* eslint-disable no-restricted-syntax,prefer-arrow-callback */ -/* eslint-disable guard-for-in */ -/* eslint-disable no-unused-vars */ -/* eslint-disable no-undef */ -/* eslint-disable no-use-before-define */ const REQUEST_TIMEOUT = setRequestTimeOut(); const MAX_NUMBER_OF_GRAPH = 18; const MAX_NUMBER_OF_SENSOR = 18; @@ -12,10 +7,10 @@ let currentData = null; const graphStore = new GraphStore(); let scaleOption = scaleOptionConst.AUTO; const AGP_YAXIS_DISPLAY_MODES = { - COUNT: "count", - Y_AXIS_TOTAL: "yAxisTotal", - Y_AXIS_FACET: "yAxisFacet", -} + COUNT: 'count', + Y_AXIS_TOTAL: 'yAxisTotal', + Y_AXIS_FACET: 'yAxisFacet', +}; let yAxisDisplayMode = AGP_YAXIS_DISPLAY_MODES.COUNT; let useDivsArray = []; let useDivFromTo = []; @@ -32,7 +27,6 @@ const i18n = { outlier: $('#i18nOutlierVal').text(), }; - const formElements = { formID: '#traceDataForm', btnAddCondProc: '#btn-add-cond-proc', @@ -62,18 +56,12 @@ const calenderFormat = { 'yyyymmdd', 'yymmdd', 'mmdd', - 'dd' + 'dd', ], - group2: [ - 'yyyymm', - 'yymm', - 'mm', - 'yyyy', - 'yy' - ] + group2: ['yyyymm', 'yymm', 'mm', 'yyyy', 'yy'], }, 2: { - group1: [ + group1: [ 'yyyy-mm-dd_HH', 'yy-mm-dd_HH', 'mm-dd_HH', @@ -89,50 +77,44 @@ const calenderFormat = { 'dd_Fri', 'Fri', ], - group2: [ - 'yyyymm', - 'yymm', - 'mm', - 'yyyy', - 'yy' - ] + group2: ['yyyymm', 'yymm', 'mm', 'yyyy', 'yy'], }, 3: { group1: [ - "ffffh", - "ffffq", - "ffh", - "ffq", - "ffff", - "ff", - "h", - "q", - "ffffmm", - "ffmm", - "ww" + 'ffffh', + 'ffffq', + 'ffh', + 'ffq', + 'ffff', + 'ff', + 'h', + 'q', + 'ffffmm', + 'ffmm', + 'ww', ], }, 4: { group1: [ - "FY2022H1", - "FY2022Q1", - "FY22H1", - "FY22Q1", - "FY2022", - "FY22", - "H1", - "Q1", - "FY2022-mm", - "FY22-mm", - "Www", - "Www_mm-dd", - "yyyy_Www", - "yyyy_Www_mm-dd", - "yy_Www", - "yy_Www_mm-dd" - ] - } -} + 'FY2022H1', + 'FY2022Q1', + 'FY22H1', + 'FY22Q1', + 'FY2022', + 'FY22', + 'H1', + 'Q1', + 'FY2022-mm', + 'FY22-mm', + 'Www', + 'Www_mm-dd', + 'yyyy_Www', + 'yyyy_Www_mm-dd', + 'yy_Www', + 'yy_Www_mm-dd', + ], + }, +}; const AGP_EXPORT_URL = { CSV: { @@ -172,9 +154,14 @@ $(() => { onChangeDivInFacet(); }); - // add first condition process - const condProcItem = addCondProc(endProcs.ids, endProcs.names, '', formElements.formID, 'btn-add-cond-proc'); + const condProcItem = addCondProc( + endProcs.ids, + endProcs.names, + '', + formElements.formID, + 'btn-add-cond-proc', + ); condProcItem(); // click even of condition proc add button @@ -194,7 +181,6 @@ $(() => { formElements.divideOption.trigger('change'); renderCyclicCalenderModal(); - // validation initValidation(formElements.formID); @@ -213,7 +199,11 @@ $(() => { }, 2000); // validate and change to default and max value cyclic term - validateInputByNameWithOnchange(CYCLIC_TERM.DIV_NUM, { MAX: 1000, MIN: 1, DEFAULT: 120 }); + validateInputByNameWithOnchange(CYCLIC_TERM.DIV_NUM, { + MAX: 1000, + MIN: 1, + DEFAULT: 120, + }); initializeDateTimeRangePicker(); initializeDateTimePicker(); @@ -222,9 +212,9 @@ $(() => { const setScaleOption = (scaleOption = scaleOptionConst.AUTO) => { formElements.scaleOption.val(scaleOption); - formElements.yAxisPercentOrCount.val(AGP_YAXIS_DISPLAY_MODES.COUNT) + formElements.yAxisPercentOrCount.val(AGP_YAXIS_DISPLAY_MODES.COUNT); yAxisDisplayMode = AGP_YAXIS_DISPLAY_MODES.COUNT; -} +}; const getScaleOption = () => { return formElements.scaleOption.val(); @@ -235,8 +225,8 @@ const onChangeScaleOption = () => { const scale = getScaleOption(); scaleOption = scale; drawAGP(currentData, scaleOption, yAxisDisplayMode); - }) -} + }); +}; const showAgP = (clearOnFlyFilter = true) => { requestStartedAt = performance.now(); @@ -285,15 +275,19 @@ const collectInputAsFormData = (clearOnFlyFilter, autoUpdate = false) => { const compareType = formData.get('compareType'); if (compareType === divideOptions.cyclicCalender) { if (!formData.get(CYCLIC_TERM.DIV_OFFSET)) { - const offsetH = Number(divOffset.split(':')[0]) + Number(divOffset.split(':')[1]) / 60; + const offsetH = + Number(divOffset.split(':')[0]) + + Number(divOffset.split(':')[1]) / 60; formData.set(CYCLIC_TERM.DIV_OFFSET, offsetH.toString()); } // convert divFromTo from local to UTC - const divDates = divFromTo.map(date => toUTCDateTime(date, null, true)); + const divDates = divFromTo.map((date) => + toUTCDateTime(date, null, true), + ); formData.set('divDates', JSON.stringify(divDates)); - formData.set('divFormats', JSON.stringify(divFormats)) + formData.set('divFormats', JSON.stringify(divFormats)); } if (compareType !== divideOptions.cyclicCalender) { @@ -317,14 +311,18 @@ const transformColorsParams = (formData) => { formData.delete('colorVar'); // get colorVar from active GUI const colorVars = {}; - $('select[name=colorVar]').get().forEach(ele => { - const targetID = $(ele).data('target-var-id'); - const colorVal = $(ele).val(); - const isObjectiveVar = $(`input[name^=GET02_VALS_SELECT][value=${targetID}]`).prop('checked'); - if (colorVal && colorVal !== '' && isObjectiveVar) { - colorVars[targetID] = colorVal; - } - }); + $('select[name=colorVar]') + .get() + .forEach((ele) => { + const targetID = $(ele).data('target-var-id'); + const colorVal = $(ele).val(); + const isObjectiveVar = $( + `input[name^=GET02_VALS_SELECT][value=${targetID}]`, + ).prop('checked'); + if (colorVal && colorVal !== '' && isObjectiveVar) { + colorVars[targetID] = colorVal; + } + }); formData.append('aggColorVar', JSON.stringify(colorVars)); return formData; @@ -332,8 +330,6 @@ const transformColorsParams = (formData) => { const queryDataAndShowAGP = (clearOnFlyFilter = false, autoUpdate = false) => { const formData = collectInputAsFormData(clearOnFlyFilter, autoUpdate); - - // validate form const hasDiv = !!formData.get('div'); const isDivideByCat = formData.get('compareType') === CONST.CATEGORY; @@ -344,52 +340,78 @@ const queryDataAndShowAGP = (clearOnFlyFilter = false, autoUpdate = false) => { return; } - showGraphCallApi('/ap/api/agp/plot', formData, REQUEST_TIMEOUT, async (res) => { - afterShowAGP(); - - // sort graphs - if (latestSortColIds && latestSortColIds.length) { - res.ARRAY_FORMVAL = sortGraphs(res.ARRAY_FORMVAL, 'GET02_VALS_SELECT', latestSortColIds); - res.array_plotdata = sortGraphs(res.array_plotdata, 'end_col_id', latestSortColIds); - } - - currentData = res; - graphStore.setTraceData(_.cloneDeep(res)); + showGraphCallApi( + '/ap/api/agp/plot', + formData, + REQUEST_TIMEOUT, + async (res) => { + afterShowAGP(); + + // sort graphs + if (latestSortColIds && latestSortColIds.length) { + res.ARRAY_FORMVAL = sortGraphs( + res.ARRAY_FORMVAL, + 'GET02_VALS_SELECT', + latestSortColIds, + ); + res.array_plotdata = sortGraphs( + res.array_plotdata, + 'end_col_id', + latestSortColIds, + ); + } - const scaleOption = getScaleOption(); + currentData = res; + graphStore.setTraceData(_.cloneDeep(res)); - useDivsArray = [...divArrays]; - useDivFromTo = [...divFromTo]; + const scaleOption = getScaleOption(); - drawAGP(res, scaleOption, yAxisDisplayMode, clearOnFlyFilter); + useDivsArray = [...divArrays]; + useDivFromTo = [...divFromTo]; - // show info table - showInfoTable(res); + drawAGP(res, scaleOption, yAxisDisplayMode, clearOnFlyFilter); - if (!autoUpdate) { - $('html, body').animate({ - scrollTop: $(formElements.agpCard).offset().top, - }, 500); - } + // show info table + showInfoTable(res); - setPollingData(formData, longPollingHandler, []); + if (!autoUpdate) { + $('html, body').animate( + { + scrollTop: getOffsetTopDisplayGraph('#scaleOption'), + }, + 500, + ); + } - }); + setPollingData(formData, longPollingHandler, []); + }, + ); }; const longPollingHandler = () => { $(`input[name=${CYCLIC_TERM.DIV_CALENDER}]:checked`).trigger('change'); queryDataAndShowAGP(false, true); -} +}; -const drawAGP = (orgData, scale = scaleOption, showPercent = yAxisDisplayMode, clearOnFlyFilter=false) => { +const drawAGP = ( + orgData, + scale = scaleOption, + showPercent = yAxisDisplayMode, + clearOnFlyFilter = false, +) => { const data = _.cloneDeep(orgData); // if slow, change if (!data) { return; } // orgData.array_plotdata = array_plotdata; - renderAgPAllChart(orgData.array_plotdata, orgData.COMMON.compareType, scale, showPercent, clearOnFlyFilter) + renderAgPAllChart( + orgData.array_plotdata, + orgData.COMMON.compareType, + scale, + showPercent, + clearOnFlyFilter, + ); // implement order $(formElements.agpCard).sortable({}); @@ -437,7 +459,7 @@ const renderCyclicCalenderModal = () => { 2022040112 `; - let calenderListHtml = '' + let calenderListHtml = ''; let index = 1; for (const key of Object.keys(calenderFormat)) { const groups = calenderFormat[key]; @@ -448,7 +470,7 @@ const renderCyclicCalenderModal = () => { for (const format of formatList) { const isCheck = index === 1; itemHtml += renderItem(key, format, isCheck); - index ++; + index++; } const html = ` @@ -465,7 +487,7 @@ const renderCyclicCalenderModal = () => { 2: 252, 3: 147, 4: 280, - } + }; calenderListHtml += `
@@ -479,18 +501,23 @@ const renderCyclicCalenderModal = () => { const selectContent = document.getElementById('cyclicCalender-content'); selectContent.addEventListener('open', (e) => { generateCalenderExample(); - }) + }); }; const onChangeDivideFormat = (e) => { - changeFormatAndExample(e) -} + changeFormatAndExample(e); +}; -const renderAgPChartLayout = (chartOption, chartHeight = '40vh', isCTCol = false) => { - const { processName, columnName, facetLevel1, facetLevel2, chartId } = chartOption; - let facet = [facetLevel1, facetLevel2].filter(f => checkTrue(f)); +const renderAgPChartLayout = ( + chartOption, + chartHeight = '40vh', + isCTCol = false, +) => { + const { processName, columnName, facetLevel1, facetLevel2, chartId } = + chartOption; + let facet = [facetLevel1, facetLevel2].filter((f) => checkTrue(f)); const levelTitle = facet.map((el, i) => `${el}`).join(' | '); - const CTLabel = isCTCol ? ` (${DataTypes.DATETIME.short}) [sec]` : '' + const CTLabel = isCTCol ? ` (${DataTypes.DATETIME.short}) [sec]` : ''; const chartLayout = `
@@ -515,7 +542,13 @@ const renderAgPChartLayout = (chartOption, chartHeight = '40vh', isCTCol = false return chartLayout; }; -const renderAgPAllChart = (plots, compareType = '', scaleOption, yAxisDisplayMode = AGP_YAXIS_DISPLAY_MODES.COUNT, clearOnFlyFilter=false) => { +const renderAgPAllChart = ( + plots, + compareType = '', + scaleOption, + yAxisDisplayMode = AGP_YAXIS_DISPLAY_MODES.COUNT, + clearOnFlyFilter = false, +) => { if (!plots) return; $(formElements.agpCard).empty(); let chartHeight = ''; @@ -545,10 +578,14 @@ const renderAgPAllChart = (plots, compareType = '', scaleOption, yAxisDisplayMod facetLevel1, facetLevel2, chartId: canvasId, - } + }; const isCTCol = isCycleTimeCol(end_proc_id, end_col_id); - const chartHtml = renderAgPChartLayout(chartOption, chartHeight, isCTCol); - const facetKey = `${facetLevel1 || ''}${facetLevel2 || ''}` + const chartHtml = renderAgPChartLayout( + chartOption, + chartHeight, + isCTCol, + ); + const facetKey = `${facetLevel1 || ''}${facetLevel2 || ''}`; $(formElements.agpCard).append(chartHtml); const sumCountByXAxis = (key, n, colId = null) => { @@ -568,9 +605,8 @@ const renderAgPAllChart = (plots, compareType = '', scaleOption, yAxisDisplayMod const count = n || 0; if (facetKey in countByFacet) { if (key in countByFacet[facetKey]) { - countByFacet[facetKey][key] += count - } - else { + countByFacet[facetKey][key] += count; + } else { countByFacet[facetKey][key] = count; } } else { @@ -583,17 +619,23 @@ const renderAgPAllChart = (plots, compareType = '', scaleOption, yAxisDisplayMod if (compareType === divideOptions.cyclicTerm) { div = currentData.COMMON.cyclic_terms; - div = div.map((term) => term.map(t => formatDateTime(t)).join(' -
')) + div = div.map((term) => + term.map((t) => formatDateTime(t)).join(' -
'), + ); } if (compareType === divideOptions.directTerm) { - div = currentData.time_conds.map(term => [term['start_dt'], term['end_dt']]); - div = div.map((term) => term.map(t => formatDateTime(t)).join(' -
')) + div = currentData.time_conds.map((term) => [ + term['start_dt'], + term['end_dt'], + ]); + div = div.map((term) => + term.map((t) => formatDateTime(t)).join(' -
'), + ); } - // reduce full div range - let data = plotData.data.map(data => { + let data = plotData.data.map((data) => { let trace = { ...data, hoverinfo: 'none', @@ -604,13 +646,22 @@ const renderAgPAllChart = (plots, compareType = '', scaleOption, yAxisDisplayMod size: 5, }, colId: end_col_id, - } + }; let { x, y } = trace; - if ([divideOptions.directTerm, divideOptions.cyclicTerm].includes(compareType)) { - x = x.map((term) => term.split(' | ').map(t => formatDateTime(t)).join(' -
')) + if ( + [divideOptions.directTerm, divideOptions.cyclicTerm].includes( + compareType, + ) + ) { + x = x.map((term) => + term + .split(' | ') + .map((t) => formatDateTime(t)) + .join(' -
'), + ); } - const newX = [] - const newY = [] + const newX = []; + const newY = []; for (let i = 0; i < div.length; i += 1) { const currDiv = div[i]; const indexOfCurrDiv = x.indexOf(currDiv); @@ -621,14 +672,18 @@ const renderAgPAllChart = (plots, compareType = '', scaleOption, yAxisDisplayMod newY.push(null); } } - trace.x = [...newX] - trace.y = [...newY] + trace.x = [...newX]; + trace.y = [...newY]; if (clearOnFlyFilter) { for (let i = 0; i < trace.x.length; i += 1) { const currDiv = trace.x[i]; const indexOfCurrDiv = trace.x.indexOf(currDiv); - sumCountByXAxis(currDiv, trace.y[indexOfCurrDiv], end_col_id); + sumCountByXAxis( + currDiv, + trace.y[indexOfCurrDiv], + end_col_id, + ); } for (let i = 0; i < trace.x.length; i += 1) { const currDiv = trace.x[i]; @@ -637,27 +692,63 @@ const renderAgPAllChart = (plots, compareType = '', scaleOption, yAxisDisplayMod } } - trace.x = trace.x.map(val => `t${val}`); + trace.x = trace.x.map((val) => `t${val}`); return trace; - }) + }); - if ([AGP_YAXIS_DISPLAY_MODES.Y_AXIS_TOTAL, AGP_YAXIS_DISPLAY_MODES.Y_AXIS_FACET].includes(yAxisDisplayMode)) { - data = data.map(trace => { - if (trace.type.toLowerCase() === 'bar' && yAxisDisplayMode === AGP_YAXIS_DISPLAY_MODES.Y_AXIS_TOTAL) { + if ( + [ + AGP_YAXIS_DISPLAY_MODES.Y_AXIS_TOTAL, + AGP_YAXIS_DISPLAY_MODES.Y_AXIS_FACET, + ].includes(yAxisDisplayMode) + ) { + data = data.map((trace) => { + if ( + trace.type.toLowerCase() === 'bar' && + yAxisDisplayMode === AGP_YAXIS_DISPLAY_MODES.Y_AXIS_TOTAL + ) { return toTotalYAxisPercent(countByXAxis, trace); - } - else if (trace.type.toLowerCase() === 'bar' && yAxisDisplayMode === AGP_YAXIS_DISPLAY_MODES.Y_AXIS_FACET){ + } else if ( + trace.type.toLowerCase() === 'bar' && + yAxisDisplayMode === AGP_YAXIS_DISPLAY_MODES.Y_AXIS_FACET + ) { return toFacetYAxisPercent(facetKey, countByFacet, trace); } return trace; - }) + }); } // linechart for real, datetime, int. bar chart for others including int(Cat) - const isNumeric = [DataTypes.REAL.name, DataTypes.DATETIME.name, DataTypes.INTEGER.name].includes(plotData.data_type) && !plotData.is_category; - const yScale = isNumeric ? getScaleInfo(plotData, scaleOption) : null; - drawAgPPlot(data, plotData, countByXAxis, div, isCyclicCalender, `${canvasId}`, yScale, yAxisDisplayMode, currentData.div_from_to); + const isNumeric = + [ + DataTypes.REAL.name, + DataTypes.DATETIME.name, + DataTypes.INTEGER.name, + ].includes(plotData.data_type) && !plotData.is_category; + + let barChartScale = null; + // get scale common for bar chart if scale is common + const isCommonScale = scaleOption === scaleOptionConst.COMMON; + if (!isNumeric && isCommonScale) { + barChartScale = getScaleInfo(plotData, scaleOption); + } + + const yScale = isNumeric + ? getScaleInfo(plotData, scaleOption) + : barChartScale; + drawAgPPlot( + data, + plotData, + countByXAxis, + div, + isCyclicCalender, + `${canvasId}`, + yScale, + yAxisDisplayMode, + currentData.div_from_to, + isCommonScale, + ); }); -} +}; const toTotalYAxisPercent = (countByXAxis, traceData) => { let { x, y } = traceData; @@ -667,13 +758,13 @@ const toTotalYAxisPercent = (countByXAxis, traceData) => { const total = countByXAxis[traceData.colId][xVal]; const percent = val / total; return percent; - }) + }); return { ...traceData, x, - y - } + y, + }; }; const toFacetYAxisPercent = (facetKey, countByFacet, traceData) => { @@ -683,40 +774,41 @@ const toFacetYAxisPercent = (facetKey, countByFacet, traceData) => { const xVal = x[i].slice(1); const total = countByFacet[facetKey][xVal]; return val / total; - }) + }); return { ...traceData, x, - y - } -} + y, + }; +}; const changeYAxisMode = (e) => { - yAxisDisplayMode = $(e).val(); - drawAGP(currentData, scaleOption, yAxisDisplayMode); + yAxisDisplayMode = $(e).val(); + drawAGP(currentData, scaleOption, yAxisDisplayMode); }; - const setYAxisOption = () => { const isFacet = isSetFacet(); if (isFacet) { - formElements.yAxisPercentOrCount.find("option[value=yAxisFacet]").show(); + formElements.yAxisPercentOrCount + .find('option[value=yAxisFacet]') + .show(); return; } - formElements.yAxisPercentOrCount.find("option[value=yAxisFacet]").hide(); -} + formElements.yAxisPercentOrCount.find('option[value=yAxisFacet]').hide(); +}; const isSetFacet = () => { const matches = []; $("[name='catExpBox']").map((i, el) => { - const valOption = $(el).find(":selected").val(); - if(valOption) { + const valOption = $(el).find(':selected').val(); + if (valOption) { matches.push(valOption); } - }) + }); return matches.length > 0; -} +}; diff --git a/ap/static/aggregate_plot/js/aggregation_chart.js b/ap/static/aggregate_plot/js/aggregation_chart.js index c49d40c..aa635c2 100644 --- a/ap/static/aggregate_plot/js/aggregation_chart.js +++ b/ap/static/aggregate_plot/js/aggregation_chart.js @@ -1,16 +1,40 @@ - -const COLOR_DEFAULT = ['#1f77b4', '#ff7f0e', '#2ca02c', '#d62728', '#9467bd', '#8c564b', '#e377c2', '#7f7f7f', '#bcbd22'] - -const drawAgPPlot = (data, plotData, countByXAxis, div, isCyclicCalender, canvasId, yScale, yAxisDisplayMode, divFromTo) => { - const { agg_function, color_name, unique_color, fmt, shown_name } = plotData; +const COLOR_DEFAULT = [ + '#1f77b4', + '#ff7f0e', + '#2ca02c', + '#d62728', + '#9467bd', + '#8c564b', + '#e377c2', + '#7f7f7f', + '#bcbd22', +]; + +const drawAgPPlot = ( + data, + plotData, + countByXAxis, + div, + isCyclicCalender, + canvasId, + yScale, + yAxisDisplayMode, + divFromTo, + isCommonScale = false, +) => { + const { agg_function, color_name, unique_color, fmt, shown_name } = + plotData; const isLineChart = agg_function && agg_function.toLowerCase() !== 'count'; - const showPercent = [AGP_YAXIS_DISPLAY_MODES.Y_AXIS_TOTAL, AGP_YAXIS_DISPLAY_MODES.Y_AXIS_FACET].includes(yAxisDisplayMode) - && !isLineChart; + const showPercent = + [ + AGP_YAXIS_DISPLAY_MODES.Y_AXIS_TOTAL, + AGP_YAXIS_DISPLAY_MODES.Y_AXIS_FACET, + ].includes(yAxisDisplayMode) && !isLineChart; + let xTitles = data[0] ? [...data[0].x] : []; const tickLen = xTitles.length ? xTitles[0].length : 0; - const tickSize = tickLen > 5 ? 10 : 12; - + const tickSize = tickLen > 5 ? 10 : 12; data = prepareColorForTrace(data, unique_color); if (isLineChart && yScale) { @@ -18,13 +42,12 @@ const drawAgPPlot = (data, plotData, countByXAxis, div, isCyclicCalender, canvas } let yMin, yMax; - if (isLineChart) { - const offset = (yScale['y-max'] - yScale['y-min']) * 0.018 - yMin = yScale['y-min'] - offset - yMax = yScale['y-max'] + offset + if (isLineChart || (yScale && isCommonScale)) { + const offset = (yScale['y-max'] - yScale['y-min']) * 0.018; + yMin = yScale['y-min'] - offset; + yMax = yScale['y-max'] + offset; } - const layout = { barmode: 'stack', plot_bgcolor: '#222222', @@ -32,7 +55,10 @@ const drawAgPPlot = (data, plotData, countByXAxis, div, isCyclicCalender, canvas autosize: true, xaxis: { tickmode: 'array', - ticktext: reduceTicksArray(xTitles.map(val => val.slice(1)), tickLen), + ticktext: reduceTicksArray( + xTitles.map((val) => val.slice(1)), + tickLen, + ), tickvals: reduceTicksArray(xTitles, tickLen), gridcolor: '#444444', tickfont: { @@ -59,7 +85,7 @@ const drawAgPPlot = (data, plotData, countByXAxis, div, isCyclicCalender, canvas spikecolor: 'rgb(255, 0, 0)', // tickformat: showPercent ? ',.0%' : fmt ? (fmt.includes('e') ? '.1e' : fmt) : '', tickformat: '', - range: showPercent ? [0, 1] : yScale ? [yMin, yMax] : null, + range: showPercent ? [0, 100] : yScale ? [yMin, yMax] : null, autorange: yScale ? false : true, }, showlegend: true, @@ -70,7 +96,7 @@ const drawAgPPlot = (data, plotData, countByXAxis, div, isCyclicCalender, canvas font: { family: 'sans-serif', size: 12, - color: '#ffffff' + color: '#ffffff', }, bgcolor: 'transparent', xanchor: 'right', @@ -82,19 +108,19 @@ const drawAgPPlot = (data, plotData, countByXAxis, div, isCyclicCalender, canvas b: 60, t: 20, r: 10, - } + }, }; if (showPercent) { - layout.yaxis.ticksuffix = '%' - data.forEach(item => { - item.y = item.y.map(i => i*100) - }) + layout.yaxis.ticksuffix = '%'; + data.forEach((item) => { + item.y = item.y.map((i) => i * 100); + }); } if (isLineChart) { layout.xaxis.range = [-0.5, div.length - 0.5]; - layout.legend.traceorder = "reversed"; + layout.legend.traceorder = 'reversed'; } const heatmapIconSettings = genPlotlyIconSettings(); @@ -110,7 +136,8 @@ const drawAgPPlot = (data, plotData, countByXAxis, div, isCyclicCalender, canvas agPPlot.on('plotly_hover', (data) => { const dpIndex = getDataPointIndex(data); - const { x, y, name, type, isOutlier, colorName, outlierVal, colId } = data.points[0].data; + const { x, y, name, type, isOutlier, colorName, outlierVal, colId } = + data.points[0].data; const xVal = x[dpIndex].slice(1); const color = colorName || name; const hasColor = !!color_name; @@ -128,43 +155,73 @@ const drawAgPPlot = (data, plotData, countByXAxis, div, isCyclicCalender, canvas } if (from && to) { - period.push(['Period', `${from}${DATETIME_PICKER_SEPARATOR}${to}`]) + period.push([ + 'Period', + `${from}${DATETIME_PICKER_SEPARATOR}${to}`, + ]); } } if (divFromTo) { // show from, to of Data number division const divIndex = div.indexOf(xVal); - if ( divIndex !== -1 ) { - const fromToOb = divFromTo[divIndex] - fromTo.push(['From', formatDateTime(fromToOb[0])], ['To', formatDateTime(fromToOb[1])]) + if (divIndex !== -1) { + const fromToOb = divFromTo[divIndex]; + fromTo.push( + ['From', formatDateTime(fromToOb[0])], + ['To', formatDateTime(fromToOb[1])], + ); } } if (type.includes('lines') || isOutlier) { - const showVal = [] + const showVal = []; if (isOutlier) { - showVal.push([i18n.outlier, applySignificantDigit(outlierVal[dpIndex])]) + showVal.push([ + i18n.outlier, + applySignificantDigit(outlierVal[dpIndex]), + ]); } else { - showVal.push([agg_function, applySignificantDigit(nByXAndColor)]) + showVal.push([ + agg_function, + applySignificantDigit(nByXAndColor), + ]); } - dataTable = genHoverDataTable([['x', xVal], ...period, ['Color', color], ...showVal, ...fromTo]); + dataTable = genHoverDataTable([ + ['x', xVal], + ...period, + ['Color', color], + ...showVal, + ...fromTo, + ]); } else { const nByX = showPercent ? '100%' : countByXAxis[colId][xVal]; - const NByColor = showPercent ? `${applySignificantDigit(nByXAndColor)}%` : applySignificantDigit(nByXAndColor); - - const NByColorHover = hasColor ? [['N by x and Color', NByColor]] : []; - - dataTable = genHoverDataTable([['x', xVal], ...period, ['Color', color], ...NByColorHover, ['N by x', applySignificantDigit(nByX)], ...fromTo]); + const NByColor = showPercent + ? `${applySignificantDigit(nByXAndColor)}%` + : applySignificantDigit(nByXAndColor); + + const NByColorHover = hasColor + ? [['N by x and Color', NByColor]] + : []; + + dataTable = genHoverDataTable([ + ['x', xVal], + ...period, + ['Color', color], + ...NByColorHover, + ['N by x', applySignificantDigit(nByX)], + ...fromTo, + ]); } genDataPointHoverTable( dataTable, { - x: data.event.pageX - 220, y: data.event.pageY, + x: data.event.pageX - 220, + y: data.event.pageY, }, 0, true, canvasId, - 1 + 1, ); }); unHoverHandler(agPPlot); @@ -175,9 +232,10 @@ const reduceTicksArray = (array, tickLen) => { const nTicks = MAX_TICKS; const isReduce = array.length > MAX_TICKS; if (!isReduce) return array; - let nextIndex = array.length / nTicks < 2 ? 2 : Math.round(array.length / nTicks); + let nextIndex = + array.length / nTicks < 2 ? 2 : Math.round(array.length / nTicks); if (nextIndex * nTicks > MAX_TICKS) { - nextIndex += 1 + nextIndex += 1; } const res = []; let i = 0; @@ -194,20 +252,21 @@ const prepareColorForTrace = (data, uniqueColor) => { if (uniqueColor.length > 0) { styles = uniqueColor.map((color, k) => ({ target: color, - color: COLOR_DEFAULT[k] - + color: COLOR_DEFAULT[k], })); } else { styles = data.map((data, k) => ({ target: data.name, - color: COLOR_DEFAULT[k] + color: COLOR_DEFAULT[k], })); } - - return data.map(da =>{ - const colors = styles.filter(st => st.target === da.name); - const color = colors.length > 0 ? styles.filter(st => st.target === da.name)[0].color : ''; + return data.map((da) => { + const colors = styles.filter((st) => st.target === da.name); + const color = + colors.length > 0 + ? styles.filter((st) => st.target === da.name)[0].color + : ''; return { ...da, marker: { @@ -216,51 +275,51 @@ const prepareColorForTrace = (data, uniqueColor) => { line: { ...da.line, color: color, - } - } - }) + }, + }; + }); }; const getOutlierTraceData = (datas, yScale, plotData) => { const { lower_outlier_idxs, upper_outlier_idxs } = yScale; const { array_y } = plotData; - const outlierTraceList = [] + const outlierTraceList = []; - datas = datas.map(data => { + datas = datas.map((data) => { let isHasOutlier = false; - const outlierTrace = { + const outlierTrace = { ...data, colorName: data.name, name: i18n.outlier, mode: 'markers', - marker: { + marker: { symbol: '4', - size: 8 - }, + size: 8, + }, isOutlier: true, showlegend: false, - } + }; let cloneY = Array.from(outlierTrace.y).fill(null); - let outlierVal = Array.from(outlierTrace.y).fill(null) + let outlierVal = Array.from(outlierTrace.y).fill(null); for (const i of lower_outlier_idxs) { const lowerOutlier = array_y[i]; const indexList = getAllIndexes(data.y, lowerOutlier); for (const index of indexList) { - isHasOutlier = true + isHasOutlier = true; cloneY[index] = yScale['y-min']; - outlierVal[index] = data.y[index] + outlierVal[index] = data.y[index]; data.y[index] = null; } } for (const i of upper_outlier_idxs) { const upperOutlier = array_y[i]; - const indexList = getAllIndexes(data.y, upperOutlier); + const indexList = getAllIndexes(data.y, upperOutlier); for (const index of indexList) { - isHasOutlier = true + isHasOutlier = true; cloneY[index] = yScale['y-max']; - outlierVal[index] = data.y[index] + outlierVal[index] = data.y[index]; data.y[index] = null; } } @@ -271,18 +330,18 @@ const getOutlierTraceData = (datas, yScale, plotData) => { } return data; - }) - + }); - datas = outlierTraceList.concat(datas) + datas = outlierTraceList.concat(datas); - return datas + return datas; }; function getAllIndexes(arr, val) { - let indexes = [], i = -1; - while ((i = arr.indexOf(val, i+1)) != -1){ + let indexes = [], + i = -1; + while ((i = arr.indexOf(val, i + 1)) != -1) { indexes.push(i); } return indexes; -} \ No newline at end of file +} diff --git a/ap/static/analyze/css/anomaly_detection.css b/ap/static/analyze/css/anomaly_detection.css index 18ecd80..0e447e4 100644 --- a/ap/static/analyze/css/anomaly_detection.css +++ b/ap/static/analyze/css/anomaly_detection.css @@ -21,7 +21,6 @@ border-radius: 4px; } - .plotbox-header h5 { line-height: 35px; margin: 0 0 0 5px; @@ -46,10 +45,11 @@ div#scatter-plot { .plot-tabs .nav-link { padding: 0.5rem 0.5rem !important; - height: 4.1vh; /* 0.1 to make borders ovelapped */ + height: 4.1vh; /* 0.1 to make borders ovelapped */ } -.table-dark th, .table-dark td { +.table-dark th, +.table-dark td { padding: 0.1rem 0.25rem !important; background-color: #222; border-top: none; @@ -67,7 +67,7 @@ div#scatter-plot { border: solid 0.5px #444; } -table.dataTable thead th{ +table.dataTable thead th { border: 0.5px solid #444 !important; } @@ -139,7 +139,7 @@ table.dataTable.cell-border tbody td { text-align: center !important; } -#pcaConditionTbl tr td:nth-child(5){ +#pcaConditionTbl tr td:nth-child(5) { text-align: left !important; font-family: 'Consolas', 'Courier New', 'Courier', 'Monaco', 'monospace'; } @@ -163,7 +163,7 @@ table.dataTable.cell-border tbody td { .tooltip-pca .tooltip-pca-text { visibility: hidden; width: 150px; - background-color: rgba(8, 8, 8, .8); + background-color: rgba(8, 8, 8, 0.8); color: #fff; text-align: left; border-radius: 6px; @@ -178,7 +178,6 @@ table.dataTable.cell-border tbody td { z-index: 9999; top: 10%; left: 82%; - } .tooltip-pca:hover .tooltip-pca-text { @@ -186,7 +185,7 @@ table.dataTable.cell-border tbody td { } /* hide search bar */ -.dataTables_filter{ +.dataTables_filter { display: none; } diff --git a/ap/static/analyze/css/graphical_lasso.css b/ap/static/analyze/css/graphical_lasso.css index 9722a22..bd782c0 100644 --- a/ap/static/analyze/css/graphical_lasso.css +++ b/ap/static/analyze/css/graphical_lasso.css @@ -24,12 +24,12 @@ margin-top: -11px; } .tick { - position: relative; - display: flex; - justify-content: center; - width: 1px; - background: white; - height: 5px; + position: relative; + display: flex; + justify-content: center; + width: 1px; + background: white; + height: 5px; } .cmt-2 { margin-top: 2px; @@ -43,4 +43,4 @@ .is-string-col label.custom-control-label.show-ele::after, .is-string-col input.show-label { visibility: visible; -} \ No newline at end of file +} diff --git a/ap/static/analyze/css/toastr.css b/ap/static/analyze/css/toastr.css index 0e33542..3baf06e 100644 --- a/ap/static/analyze/css/toastr.css +++ b/ap/static/analyze/css/toastr.css @@ -1,200 +1,200 @@ .toast-title { - font-weight: bold; + font-weight: bold; } .toast-message { - -ms-word-wrap: break-word; - word-wrap: break-word; + -ms-word-wrap: break-word; + word-wrap: break-word; } .toast-message a, .toast-message label { - color: #ffffff; + color: #ffffff; } .toast-message a:hover { - color: #cccccc; - text-decoration: none; + color: #cccccc; + text-decoration: none; } .toast-close-button { - position: relative; - right: -0.3em; - top: -0.3em; - float: right; - font-size: 20px; - font-weight: bold; - color: #ffffff; - -webkit-text-shadow: 0 1px 0 #ffffff; - text-shadow: 0 1px 0 #ffffff; - opacity: 0.8; - -ms-filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=80); - filter: alpha(opacity=80); + position: relative; + right: -0.3em; + top: -0.3em; + float: right; + font-size: 20px; + font-weight: bold; + color: #ffffff; + -webkit-text-shadow: 0 1px 0 #ffffff; + text-shadow: 0 1px 0 #ffffff; + opacity: 0.8; + -ms-filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=80); + filter: alpha(opacity=80); } .toast-close-button:hover, .toast-close-button:focus { - color: #000000; - text-decoration: none; - cursor: pointer; - opacity: 0.4; - -ms-filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=40); - filter: alpha(opacity=40); + color: #000000; + text-decoration: none; + cursor: pointer; + opacity: 0.4; + -ms-filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=40); + filter: alpha(opacity=40); } /*Additional properties for button version iOS requires the button element instead of an anchor tag. If you want the anchor version, it requires `href="#"`.*/ button.toast-close-button { - padding: 0; - cursor: pointer; - background: transparent; - border: 0; - -webkit-appearance: none; + padding: 0; + cursor: pointer; + background: transparent; + border: 0; + -webkit-appearance: none; } .toast-top-center { - top: 0; - right: 0; - width: 100%; + top: 0; + right: 0; + width: 100%; } .toast-bottom-center { - bottom: 0; - right: 0; - width: 100%; + bottom: 0; + right: 0; + width: 100%; } .toast-top-full-width { - top: 0; - right: 0; - width: 100%; + top: 0; + right: 0; + width: 100%; } .toast-bottom-full-width { - bottom: 0; - right: 0; - width: 100%; + bottom: 0; + right: 0; + width: 100%; } .toast-top-left { - top: 12px; - left: 12px; + top: 12px; + left: 12px; } .toast-top-right { - top: 12px; - right: 12px; + top: 12px; + right: 12px; } .toast-bottom-right { - right: 12px; - bottom: 12px; + right: 12px; + bottom: 12px; } .toast-bottom-left { - bottom: 12px; - left: 12px; + bottom: 12px; + left: 12px; } #toast-container { - position: fixed; - z-index: 999999; - pointer-events: none; - /*overrides*/ + position: fixed; + z-index: 999999; + pointer-events: none; + /*overrides*/ } #toast-container * { - -moz-box-sizing: border-box; - -webkit-box-sizing: border-box; - box-sizing: border-box; + -moz-box-sizing: border-box; + -webkit-box-sizing: border-box; + box-sizing: border-box; } #toast-container > div { - position: relative; - pointer-events: auto; - overflow: hidden; - margin: 0 0 6px; - padding: 15px 15px 15px 50px; - width: 300px; - -moz-border-radius: 3px 3px 3px 3px; - -webkit-border-radius: 3px 3px 3px 3px; - border-radius: 3px 3px 3px 3px; - background-position: 15px center; - background-repeat: no-repeat; - -moz-box-shadow: 0 0 12px #999999; - -webkit-box-shadow: 0 0 12px #999999; - box-shadow: 0 0 12px #999999; - color: #ffffff; - opacity: 0.8; - -ms-filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=80); - filter: alpha(opacity=80); + position: relative; + pointer-events: auto; + overflow: hidden; + margin: 0 0 6px; + padding: 15px 15px 15px 50px; + width: 300px; + -moz-border-radius: 3px 3px 3px 3px; + -webkit-border-radius: 3px 3px 3px 3px; + border-radius: 3px 3px 3px 3px; + background-position: 15px center; + background-repeat: no-repeat; + -moz-box-shadow: 0 0 12px #999999; + -webkit-box-shadow: 0 0 12px #999999; + box-shadow: 0 0 12px #999999; + color: #ffffff; + opacity: 0.8; + -ms-filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=80); + filter: alpha(opacity=80); } #toast-container > :hover { - -moz-box-shadow: 0 0 12px #000000; - -webkit-box-shadow: 0 0 12px #000000; - box-shadow: 0 0 12px #000000; - opacity: 1; - -ms-filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=100); - filter: alpha(opacity=100); - cursor: pointer; + -moz-box-shadow: 0 0 12px #000000; + -webkit-box-shadow: 0 0 12px #000000; + box-shadow: 0 0 12px #000000; + opacity: 1; + -ms-filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=100); + filter: alpha(opacity=100); + cursor: pointer; } #toast-container > .toast-info { - background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAGwSURBVEhLtZa9SgNBEMc9sUxxRcoUKSzSWIhXpFMhhYWFhaBg4yPYiWCXZxBLERsLRS3EQkEfwCKdjWJAwSKCgoKCcudv4O5YLrt7EzgXhiU3/4+b2ckmwVjJSpKkQ6wAi4gwhT+z3wRBcEz0yjSseUTrcRyfsHsXmD0AmbHOC9Ii8VImnuXBPglHpQ5wwSVM7sNnTG7Za4JwDdCjxyAiH3nyA2mtaTJufiDZ5dCaqlItILh1NHatfN5skvjx9Z38m69CgzuXmZgVrPIGE763Jx9qKsRozWYw6xOHdER+nn2KkO+Bb+UV5CBN6WC6QtBgbRVozrahAbmm6HtUsgtPC19tFdxXZYBOfkbmFJ1VaHA1VAHjd0pp70oTZzvR+EVrx2Ygfdsq6eu55BHYR8hlcki+n+kERUFG8BrA0BwjeAv2M8WLQBtcy+SD6fNsmnB3AlBLrgTtVW1c2QN4bVWLATaIS60J2Du5y1TiJgjSBvFVZgTmwCU+dAZFoPxGEEs8nyHC9Bwe2GvEJv2WXZb0vjdyFT4Cxk3e/kIqlOGoVLwwPevpYHT+00T+hWwXDf4AJAOUqWcDhbwAAAAASUVORK5CYII=") !important; + background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAGwSURBVEhLtZa9SgNBEMc9sUxxRcoUKSzSWIhXpFMhhYWFhaBg4yPYiWCXZxBLERsLRS3EQkEfwCKdjWJAwSKCgoKCcudv4O5YLrt7EzgXhiU3/4+b2ckmwVjJSpKkQ6wAi4gwhT+z3wRBcEz0yjSseUTrcRyfsHsXmD0AmbHOC9Ii8VImnuXBPglHpQ5wwSVM7sNnTG7Za4JwDdCjxyAiH3nyA2mtaTJufiDZ5dCaqlItILh1NHatfN5skvjx9Z38m69CgzuXmZgVrPIGE763Jx9qKsRozWYw6xOHdER+nn2KkO+Bb+UV5CBN6WC6QtBgbRVozrahAbmm6HtUsgtPC19tFdxXZYBOfkbmFJ1VaHA1VAHjd0pp70oTZzvR+EVrx2Ygfdsq6eu55BHYR8hlcki+n+kERUFG8BrA0BwjeAv2M8WLQBtcy+SD6fNsmnB3AlBLrgTtVW1c2QN4bVWLATaIS60J2Du5y1TiJgjSBvFVZgTmwCU+dAZFoPxGEEs8nyHC9Bwe2GvEJv2WXZb0vjdyFT4Cxk3e/kIqlOGoVLwwPevpYHT+00T+hWwXDf4AJAOUqWcDhbwAAAAASUVORK5CYII=') !important; } #toast-container > .toast-error { - background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAHOSURBVEhLrZa/SgNBEMZzh0WKCClSCKaIYOED+AAKeQQLG8HWztLCImBrYadgIdY+gIKNYkBFSwu7CAoqCgkkoGBI/E28PdbLZmeDLgzZzcx83/zZ2SSXC1j9fr+I1Hq93g2yxH4iwM1vkoBWAdxCmpzTxfkN2RcyZNaHFIkSo10+8kgxkXIURV5HGxTmFuc75B2RfQkpxHG8aAgaAFa0tAHqYFfQ7Iwe2yhODk8+J4C7yAoRTWI3w/4klGRgR4lO7Rpn9+gvMyWp+uxFh8+H+ARlgN1nJuJuQAYvNkEnwGFck18Er4q3egEc/oO+mhLdKgRyhdNFiacC0rlOCbhNVz4H9FnAYgDBvU3QIioZlJFLJtsoHYRDfiZoUyIxqCtRpVlANq0EU4dApjrtgezPFad5S19Wgjkc0hNVnuF4HjVA6C7QrSIbylB+oZe3aHgBsqlNqKYH48jXyJKMuAbiyVJ8KzaB3eRc0pg9VwQ4niFryI68qiOi3AbjwdsfnAtk0bCjTLJKr6mrD9g8iq/S/B81hguOMlQTnVyG40wAcjnmgsCNESDrjme7wfftP4P7SP4N3CJZdvzoNyGq2c/HWOXJGsvVg+RA/k2MC/wN6I2YA2Pt8GkAAAAASUVORK5CYII=") !important; + background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAHOSURBVEhLrZa/SgNBEMZzh0WKCClSCKaIYOED+AAKeQQLG8HWztLCImBrYadgIdY+gIKNYkBFSwu7CAoqCgkkoGBI/E28PdbLZmeDLgzZzcx83/zZ2SSXC1j9fr+I1Hq93g2yxH4iwM1vkoBWAdxCmpzTxfkN2RcyZNaHFIkSo10+8kgxkXIURV5HGxTmFuc75B2RfQkpxHG8aAgaAFa0tAHqYFfQ7Iwe2yhODk8+J4C7yAoRTWI3w/4klGRgR4lO7Rpn9+gvMyWp+uxFh8+H+ARlgN1nJuJuQAYvNkEnwGFck18Er4q3egEc/oO+mhLdKgRyhdNFiacC0rlOCbhNVz4H9FnAYgDBvU3QIioZlJFLJtsoHYRDfiZoUyIxqCtRpVlANq0EU4dApjrtgezPFad5S19Wgjkc0hNVnuF4HjVA6C7QrSIbylB+oZe3aHgBsqlNqKYH48jXyJKMuAbiyVJ8KzaB3eRc0pg9VwQ4niFryI68qiOi3AbjwdsfnAtk0bCjTLJKr6mrD9g8iq/S/B81hguOMlQTnVyG40wAcjnmgsCNESDrjme7wfftP4P7SP4N3CJZdvzoNyGq2c/HWOXJGsvVg+RA/k2MC/wN6I2YA2Pt8GkAAAAASUVORK5CYII=') !important; } #toast-container > .toast-success { - background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAADsSURBVEhLY2AYBfQMgf///3P8+/evAIgvA/FsIF+BavYDDWMBGroaSMMBiE8VC7AZDrIFaMFnii3AZTjUgsUUWUDA8OdAH6iQbQEhw4HyGsPEcKBXBIC4ARhex4G4BsjmweU1soIFaGg/WtoFZRIZdEvIMhxkCCjXIVsATV6gFGACs4Rsw0EGgIIH3QJYJgHSARQZDrWAB+jawzgs+Q2UO49D7jnRSRGoEFRILcdmEMWGI0cm0JJ2QpYA1RDvcmzJEWhABhD/pqrL0S0CWuABKgnRki9lLseS7g2AlqwHWQSKH4oKLrILpRGhEQCw2LiRUIa4lwAAAABJRU5ErkJggg==") !important; + background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAADsSURBVEhLY2AYBfQMgf///3P8+/evAIgvA/FsIF+BavYDDWMBGroaSMMBiE8VC7AZDrIFaMFnii3AZTjUgsUUWUDA8OdAH6iQbQEhw4HyGsPEcKBXBIC4ARhex4G4BsjmweU1soIFaGg/WtoFZRIZdEvIMhxkCCjXIVsATV6gFGACs4Rsw0EGgIIH3QJYJgHSARQZDrWAB+jawzgs+Q2UO49D7jnRSRGoEFRILcdmEMWGI0cm0JJ2QpYA1RDvcmzJEWhABhD/pqrL0S0CWuABKgnRki9lLseS7g2AlqwHWQSKH4oKLrILpRGhEQCw2LiRUIa4lwAAAABJRU5ErkJggg==') !important; } #toast-container > .toast-warning { - background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAGYSURBVEhL5ZSvTsNQFMbXZGICMYGYmJhAQIJAICYQPAACiSDB8AiICQQJT4CqQEwgJvYASAQCiZiYmJhAIBATCARJy+9rTsldd8sKu1M0+dLb057v6/lbq/2rK0mS/TRNj9cWNAKPYIJII7gIxCcQ51cvqID+GIEX8ASG4B1bK5gIZFeQfoJdEXOfgX4QAQg7kH2A65yQ87lyxb27sggkAzAuFhbbg1K2kgCkB1bVwyIR9m2L7PRPIhDUIXgGtyKw575yz3lTNs6X4JXnjV+LKM/m3MydnTbtOKIjtz6VhCBq4vSm3ncdrD2lk0VgUXSVKjVDJXJzijW1RQdsU7F77He8u68koNZTz8Oz5yGa6J3H3lZ0xYgXBK2QymlWWA+RWnYhskLBv2vmE+hBMCtbA7KX5drWyRT/2JsqZ2IvfB9Y4bWDNMFbJRFmC9E74SoS0CqulwjkC0+5bpcV1CZ8NMej4pjy0U+doDQsGyo1hzVJttIjhQ7GnBtRFN1UarUlH8F3xict+HY07rEzoUGPlWcjRFRr4/gChZgc3ZL2d8oAAAAASUVORK5CYII=") !important; + background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAGYSURBVEhL5ZSvTsNQFMbXZGICMYGYmJhAQIJAICYQPAACiSDB8AiICQQJT4CqQEwgJvYASAQCiZiYmJhAIBATCARJy+9rTsldd8sKu1M0+dLb057v6/lbq/2rK0mS/TRNj9cWNAKPYIJII7gIxCcQ51cvqID+GIEX8ASG4B1bK5gIZFeQfoJdEXOfgX4QAQg7kH2A65yQ87lyxb27sggkAzAuFhbbg1K2kgCkB1bVwyIR9m2L7PRPIhDUIXgGtyKw575yz3lTNs6X4JXnjV+LKM/m3MydnTbtOKIjtz6VhCBq4vSm3ncdrD2lk0VgUXSVKjVDJXJzijW1RQdsU7F77He8u68koNZTz8Oz5yGa6J3H3lZ0xYgXBK2QymlWWA+RWnYhskLBv2vmE+hBMCtbA7KX5drWyRT/2JsqZ2IvfB9Y4bWDNMFbJRFmC9E74SoS0CqulwjkC0+5bpcV1CZ8NMej4pjy0U+doDQsGyo1hzVJttIjhQ7GnBtRFN1UarUlH8F3xict+HY07rEzoUGPlWcjRFRr4/gChZgc3ZL2d8oAAAAASUVORK5CYII=') !important; } #toast-container.toast-top-center > div, #toast-container.toast-bottom-center > div { - width: 300px; - margin-left: auto; - margin-right: auto; + width: 300px; + margin-left: auto; + margin-right: auto; } #toast-container.toast-top-full-width > div, #toast-container.toast-bottom-full-width > div { - width: 96%; - margin-left: auto; - margin-right: auto; + width: 96%; + margin-left: auto; + margin-right: auto; } .toast { - background-color: #030303; + background-color: #030303; } .toast-success { - background-color: #51a351; + background-color: #51a351; } .toast-error { - background-color: #bd362f; + background-color: #bd362f; } .toast-info { - background-color: #2f96b4; + background-color: #2f96b4; } .toast-warning { - background-color: #f89406; + background-color: #f89406; } .toast-progress { - position: absolute; - left: 0; - bottom: 0; - height: 4px; - background-color: #000000; - opacity: 0.4; - -ms-filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=40); - filter: alpha(opacity=40); + position: absolute; + left: 0; + bottom: 0; + height: 4px; + background-color: #000000; + opacity: 0.4; + -ms-filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=40); + filter: alpha(opacity=40); } /*Responsive Design*/ @media all and (max-width: 240px) { - #toast-container > div { - padding: 8px 8px 8px 50px; - width: 11em; - } - #toast-container .toast-close-button { - right: -0.2em; - top: -0.2em; - } + #toast-container > div { + padding: 8px 8px 8px 50px; + width: 11em; + } + #toast-container .toast-close-button { + right: -0.2em; + top: -0.2em; + } } @media all and (min-width: 241px) and (max-width: 480px) { - #toast-container > div { - padding: 8px 8px 8px 50px; - width: 18em; - } - #toast-container .toast-close-button { - right: -0.2em; - top: -0.2em; - } + #toast-container > div { + padding: 8px 8px 8px 50px; + width: 18em; + } + #toast-container .toast-close-button { + right: -0.2em; + top: -0.2em; + } } @media all and (min-width: 481px) and (max-width: 768px) { - #toast-container > div { - padding: 15px 15px 15px 50px; - width: 25em; - } + #toast-container > div { + padding: 15px 15px 15px 50px; + width: 25em; + } } diff --git a/ap/static/analyze/js/generateJson.js b/ap/static/analyze/js/generateJson.js index 14323c1..3e59677 100644 --- a/ap/static/analyze/js/generateJson.js +++ b/ap/static/analyze/js/generateJson.js @@ -4,37 +4,49 @@ const generateCircles = (json) => { // sigma const dataSigmaX = getNode(json, ['circles', 'Sigma', 'x']) || []; const dataSigmaY = getNode(json, ['circles', 'Sigma', 'y']) || []; - const dataSigmaText = dataSigmaX.map( - (ele, idx) => `border: Sigma
xvar: ${ele}
yvar: ${dataSigmaY[idx]}`, - ) || []; + const dataSigmaText = + dataSigmaX.map( + (ele, idx) => + `border: Sigma
xvar: ${ele}
yvar: ${dataSigmaY[idx]}`, + ) || []; // 2sigma const data2SigmaX = getNode(json, ['circles', '2Sigma', 'x']) || []; const data2SigmaY = getNode(json, ['circles', '2Sigma', 'y']) || []; - const data2SigmaText = data2SigmaX.map( - (ele, idx) => `border: 2Sigma
xvar: ${ele}
yvar: ${data2SigmaY[idx]}`, - ) || []; + const data2SigmaText = + data2SigmaX.map( + (ele, idx) => + `border: 2Sigma
xvar: ${ele}
yvar: ${data2SigmaY[idx]}`, + ) || []; // 3sigma const data3SigmaX = getNode(json, ['circles', '3Sigma', 'x']) || []; const data3SigmaY = getNode(json, ['circles', '3Sigma', 'y']) || []; - const data3SigmaText = data3SigmaX.map( - (ele, idx) => `border: 3Sigma
xvar: ${ele}
yvar: ${data3SigmaY[idx]}`, - ) || []; + const data3SigmaText = + data3SigmaX.map( + (ele, idx) => + `border: 3Sigma
xvar: ${ele}
yvar: ${data3SigmaY[idx]}`, + ) || []; // Range const dataRangeX = getNode(json, ['circles', 'Range', 'x']) || []; const dataRangeY = getNode(json, ['circles', 'Range', 'y']) || []; - const dataRangeText = dataRangeX.map( - (ele, idx) => `border: Range
xvar: ${ele}
yvar: ${dataRangeY[idx]}`, - ) || []; + const dataRangeText = + dataRangeX.map( + (ele, idx) => + `border: Range
xvar: ${ele}
yvar: ${dataRangeY[idx]}`, + ) || []; // Parcentile 0.85 - const dataParcentileX = getNode(json, ['circles', 'Percentile85', 'x']) || []; - const dataParcentileY = getNode(json, ['circles', 'Percentile85', 'y']) || []; - const dataParcentileText = dataParcentileX.map( - (ele, idx) => `border: Percentile 0.85
xvar: ${ele}
yvar: ${dataParcentileY[idx]}`, - ) || []; + const dataParcentileX = + getNode(json, ['circles', 'Percentile85', 'x']) || []; + const dataParcentileY = + getNode(json, ['circles', 'Percentile85', 'y']) || []; + const dataParcentileText = + dataParcentileX.map( + (ele, idx) => + `border: Percentile 0.85
xvar: ${ele}
yvar: ${dataParcentileY[idx]}`, + ) || []; // axis label const axislab = getNode(json, ['axislab']) || ['PC1', 'PC2']; @@ -59,14 +71,16 @@ const generateCircles = (json) => { }; }; - const generateXTrainScatter = (json) => { if (!json) return {}; // scatter const dataScatterX = getNode(json, ['scatter', 'x']) || []; const dataScatterY = getNode(json, ['scatter', 'y']) || []; - const dataScatterText = dataScatterX.map((ele, idx) => `'xvar: ${ele}
yvar: ${dataScatterY[idx]}'`) || []; + const dataScatterText = + dataScatterX.map( + (ele, idx) => `'xvar: ${ele}
yvar: ${dataScatterY[idx]}'`, + ) || []; const { dataSigmaX, @@ -88,159 +102,167 @@ const generateXTrainScatter = (json) => { } = generateCircles(json); return { - data: [{ - x: dataScatterX, - y: dataScatterY, - text: dataScatterText, - type: 'scatter', - mode: 'markers', - marker: { - autocolorscale: false, - color: 'white', - opacity: 0.5, - size: 3.02362204724409, - symbol: 'circle', + data: [ + { + x: dataScatterX, + y: dataScatterY, + text: dataScatterText, + type: 'scatter', + mode: 'markers', + marker: { + autocolorscale: false, + color: 'white', + opacity: 0.5, + size: 3.02362204724409, + symbol: 'circle', + line: { + width: 1.88976377952756, + color: 'rgba(255,255,255,1)', + }, + }, + hoveron: 'points', + showlegend: false, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: dataSigmaX, + y: dataSigmaY, + text: dataSigmaText, + type: 'scatter', + mode: 'lines', + line: { + width: 1.13385826771654, + color: 'rgba(204,229,255,1)', + dash: 'solid', + }, + hoveron: 'points', + name: 'Sigma', + legendgroup: 'Sigma', + showlegend: true, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: data2SigmaX, + y: data2SigmaY, + text: data2SigmaText, + type: 'scatter', + mode: 'lines', + line: { + width: 1.13385826771654, + color: 'rgba(153,204,255,1)', + dash: 'solid', + }, + hoveron: 'points', + name: '2Sigma', + legendgroup: '2Sigma', + showlegend: true, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: data3SigmaX, + y: data3SigmaY, + text: data3SigmaText, + type: 'scatter', + mode: 'lines', + line: { + width: 1.13385826771654, + color: 'rgba(102,178,255,1)', + dash: 'solid', + }, + hoveron: 'points', + name: '3Sigma', + legendgroup: '3Sigma', + showlegend: true, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: dataRangeX, + y: dataRangeY, + text: dataRangeText, + type: 'scatter', + mode: 'lines', + line: { + width: 1.13385826771654, + color: 'rgba(30,144,255,1)', + dash: 'solid', + }, + hoveron: 'points', + name: 'Range', + legendgroup: 'Range', + showlegend: true, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: dataParcentileX, + y: dataParcentileY, + text: dataParcentileText, + type: 'scatter', + mode: 'lines', + line: { + width: 1.13385826771654, + color: 'rgba(255,52,179,1)', + dash: 'solid', + }, + hoveron: 'points', + name: 'Parcentile 0.85', + legendgroup: 'Parcentile 0.85', + showlegend: true, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: [0, 0], + y: [-3.51416158802218, 3.51416158802218], + text: 'xintercept: 0', + type: 'scatter', + mode: 'lines', line: { width: 1.88976377952756, - color: 'rgba(255,255,255,1)', + color: 'rgba(64,64,64,1)', + dash: 'solid', }, + hoveron: 'points', + showlegend: false, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: [-3.5156396748279, 3.50907246263227], + y: [0, 0], + text: 'yintercept: 0', + type: 'scatter', + mode: 'lines', + line: { + width: 1.88976377952756, + color: 'rgba(64,64,64,1)', + dash: 'solid', + }, + hoveron: 'points', + showlegend: false, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, }, - hoveron: 'points', - showlegend: false, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: dataSigmaX, - y: dataSigmaY, - text: dataSigmaText, - type: 'scatter', - mode: 'lines', - line: { - width: 1.13385826771654, - color: 'rgba(204,229,255,1)', - dash: 'solid', - }, - hoveron: 'points', - name: 'Sigma', - legendgroup: 'Sigma', - showlegend: true, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: data2SigmaX, - y: data2SigmaY, - text: data2SigmaText, - type: 'scatter', - mode: 'lines', - line: { - width: 1.13385826771654, - color: 'rgba(153,204,255,1)', - dash: 'solid', - }, - hoveron: 'points', - name: '2Sigma', - legendgroup: '2Sigma', - showlegend: true, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: data3SigmaX, - y: data3SigmaY, - text: data3SigmaText, - type: 'scatter', - mode: 'lines', - line: { - width: 1.13385826771654, - color: 'rgba(102,178,255,1)', - dash: 'solid', - }, - hoveron: 'points', - name: '3Sigma', - legendgroup: '3Sigma', - showlegend: true, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: dataRangeX, - y: dataRangeY, - text: dataRangeText, - type: 'scatter', - mode: 'lines', - line: { - width: 1.13385826771654, - color: 'rgba(30,144,255,1)', - dash: 'solid', - }, - hoveron: 'points', - name: 'Range', - legendgroup: 'Range', - showlegend: true, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: dataParcentileX, - y: dataParcentileY, - text: dataParcentileText, - type: 'scatter', - mode: 'lines', - line: { - width: 1.13385826771654, - color: 'rgba(255,52,179,1)', - dash: 'solid', - }, - hoveron: 'points', - name: 'Parcentile 0.85', - legendgroup: 'Parcentile 0.85', - showlegend: true, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: [0, 0], - y: [-3.51416158802218, 3.51416158802218], - text: 'xintercept: 0', - type: 'scatter', - mode: 'lines', - line: { - width: 1.88976377952756, - color: 'rgba(64,64,64,1)', - dash: 'solid', - }, - hoveron: 'points', - showlegend: false, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: [-3.5156396748279, 3.50907246263227], - y: [0, 0], - text: 'yintercept: 0', - type: 'scatter', - mode: 'lines', - line: { - width: 1.88976377952756, - color: 'rgba(64,64,64,1)', - dash: 'solid', - }, - hoveron: 'points', - showlegend: false, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, ], layout: { margin: { @@ -341,21 +363,22 @@ const generateXTrainScatter = (json) => { scaleratio: 1, hoverformat: '.2f', }, - shapes: [{ - type: 'rect', - fillcolor: null, - line: { - color: null, - width: 0, - linetype: [], + shapes: [ + { + type: 'rect', + fillcolor: null, + line: { + color: null, + width: 0, + linetype: [], + }, + yref: 'paper', + xref: 'paper', + x0: 0, + x1: 1, + y0: 0, + y1: 1, }, - yref: 'paper', - xref: 'paper', - x0: 0, - x1: 1, - y0: 0, - y1: 1, - }, ], showlegend: true, legend: { @@ -369,25 +392,26 @@ const generateXTrainScatter = (json) => { xanchor: 'right', tracegroupgap: 3, }, - annotations: [{ - text: 'Guide', - x: 1.02, - y: 1, - showarrow: false, - ax: 0, - ay: 0, - font: { - color: 'rgba(255,255,255,1)', - family: '', - size: 13.2835201328352, + annotations: [ + { + text: 'Guide', + x: 1.02, + y: 1, + showarrow: false, + ax: 0, + ay: 0, + font: { + color: 'rgba(255,255,255,1)', + family: '', + size: 13.2835201328352, + }, + xref: 'paper', + yref: 'paper', + textangle: 0, + xanchor: 'left', + yanchor: 'bottom', + legendTitle: true, }, - xref: 'paper', - yref: 'paper', - textangle: 0, - xanchor: 'left', - yanchor: 'bottom', - legendTitle: true, - }, ], hovermode: 'closest', barmode: 'relative', @@ -436,7 +460,19 @@ const generateXTrainScatter = (json) => { }, debounce: 0, }, - shinyEvents: ['plotly_hover', 'plotly_click', 'plotly_selected', 'plotly_relayout', 'plotly_brushed', 'plotly_brushing', 'plotly_clickannotation', 'plotly_doubleclick', 'plotly_deselect', 'plotly_afterplot', 'plotly_sunburstclick'], + shinyEvents: [ + 'plotly_hover', + 'plotly_click', + 'plotly_selected', + 'plotly_relayout', + 'plotly_brushed', + 'plotly_brushing', + 'plotly_clickannotation', + 'plotly_doubleclick', + 'plotly_deselect', + 'plotly_afterplot', + 'plotly_sunburstclick', + ], base_url: 'https://plot.ly', }; }; @@ -447,7 +483,10 @@ const generateXTestScatter = (json, jsonTrain) => { // scatter const dataScatterX = getNode(json, ['scatter', 'x']) || []; const dataScatterY = getNode(json, ['scatter', 'y']) || []; - const dataScatterText = dataScatterX.map((ele, idx) => `'xvar: ${ele}
yvar: ${dataScatterY[idx]}'`) || []; + const dataScatterText = + dataScatterX.map( + (ele, idx) => `'xvar: ${ele}
yvar: ${dataScatterY[idx]}'`, + ) || []; const { dataSigmaX, @@ -468,161 +507,168 @@ const generateXTestScatter = (json, jsonTrain) => { axislab, } = generateCircles(jsonTrain); - return { - data: [{ - x: dataScatterX, - y: dataScatterY, - text: dataScatterText, - type: 'scatter', - mode: 'markers', - marker: { - autocolorscale: false, - color: 'rgba(255,165,0,1)', - opacity: 0.5, - size: 3.02362204724409, - symbol: 'square', + data: [ + { + x: dataScatterX, + y: dataScatterY, + text: dataScatterText, + type: 'scatter', + mode: 'markers', + marker: { + autocolorscale: false, + color: 'rgba(255,165,0,1)', + opacity: 0.5, + size: 3.02362204724409, + symbol: 'square', + line: { + width: 1.88976377952756, + color: 'rgba(255,165,0,1)', + }, + }, + hoveron: 'points', + showlegend: false, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: dataSigmaX, + y: dataSigmaY, + text: dataSigmaText, + type: 'scatter', + mode: 'lines', + line: { + width: 1.13385826771654, + color: 'rgba(204,229,255,1)', + dash: 'solid', + }, + hoveron: 'points', + name: 'Sigma', + legendgroup: 'Sigma', + showlegend: true, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: data2SigmaX, + y: data2SigmaY, + text: data2SigmaText, + type: 'scatter', + mode: 'lines', + line: { + width: 1.13385826771654, + color: 'rgba(153,204,255,1)', + dash: 'solid', + }, + hoveron: 'points', + name: '2Sigma', + legendgroup: '2Sigma', + showlegend: true, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: data3SigmaX, + y: data3SigmaY, + text: data3SigmaText, + type: 'scatter', + mode: 'lines', + line: { + width: 1.13385826771654, + color: 'rgba(102,178,255,1)', + dash: 'solid', + }, + hoveron: 'points', + name: '3Sigma', + legendgroup: '3Sigma', + showlegend: true, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: dataRangeX, + y: dataRangeY, + text: dataRangeText, + type: 'scatter', + mode: 'lines', + line: { + width: 1.13385826771654, + color: 'rgba(30,144,255,1)', + dash: 'solid', + }, + hoveron: 'points', + name: 'Range', + legendgroup: 'Range', + showlegend: true, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: dataParcentileX, + y: dataParcentileY, + text: dataParcentileText, + type: 'scatter', + mode: 'lines', + line: { + width: 1.13385826771654, + color: 'rgba(255,52,179,1)', + dash: 'solid', + }, + hoveron: 'points', + name: 'Parcentile 0.85', + legendgroup: 'Parcentile 0.85', + showlegend: true, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: [0, 0], + y: [-3.94029493306541, 3.94029493306541], + text: 'xintercept: 0', + type: 'scatter', + mode: 'lines', line: { width: 1.88976377952756, - color: 'rgba(255,165,0,1)', + color: 'rgba(64,64,64,1)', + dash: 'solid', }, + hoveron: 'points', + showlegend: false, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: [-3.94195225524749, 3.9345886914811], + y: [0, 0], + text: 'yintercept: 0', + type: 'scatter', + mode: 'lines', + line: { + width: 1.88976377952756, + color: 'rgba(64,64,64,1)', + dash: 'solid', + }, + hoveron: 'points', + showlegend: false, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, }, - hoveron: 'points', - showlegend: false, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: dataSigmaX, - y: dataSigmaY, - text: dataSigmaText, - type: 'scatter', - mode: 'lines', - line: { - width: 1.13385826771654, - color: 'rgba(204,229,255,1)', - dash: 'solid', - }, - hoveron: 'points', - name: 'Sigma', - legendgroup: 'Sigma', - showlegend: true, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: data2SigmaX, - y: data2SigmaY, - text: data2SigmaText, - type: 'scatter', - mode: 'lines', - line: { - width: 1.13385826771654, - color: 'rgba(153,204,255,1)', - dash: 'solid', - }, - hoveron: 'points', - name: '2Sigma', - legendgroup: '2Sigma', - showlegend: true, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: data3SigmaX, - y: data3SigmaY, - text: data3SigmaText, - type: 'scatter', - mode: 'lines', - line: { - width: 1.13385826771654, - color: 'rgba(102,178,255,1)', - dash: 'solid', - }, - hoveron: 'points', - name: '3Sigma', - legendgroup: '3Sigma', - showlegend: true, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: dataRangeX, - y: dataRangeY, - text: dataRangeText, - type: 'scatter', - mode: 'lines', - line: { - width: 1.13385826771654, - color: 'rgba(30,144,255,1)', - dash: 'solid', - }, - hoveron: 'points', - name: 'Range', - legendgroup: 'Range', - showlegend: true, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: dataParcentileX, - y: dataParcentileY, - text: dataParcentileText, - type: 'scatter', - mode: 'lines', - line: { - width: 1.13385826771654, - color: 'rgba(255,52,179,1)', - dash: 'solid', - }, - hoveron: 'points', - name: 'Parcentile 0.85', - legendgroup: 'Parcentile 0.85', - showlegend: true, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: [0, 0], - y: [-3.94029493306541, 3.94029493306541], - text: 'xintercept: 0', - type: 'scatter', - mode: 'lines', - line: { - width: 1.88976377952756, - color: 'rgba(64,64,64,1)', - dash: 'solid', - }, - hoveron: 'points', - showlegend: false, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: [-3.94195225524749, 3.9345886914811], - y: [0, 0], - text: 'yintercept: 0', - type: 'scatter', - mode: 'lines', - line: { - width: 1.88976377952756, - color: 'rgba(64,64,64,1)', - dash: 'solid', - }, - hoveron: 'points', - showlegend: false, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, ], layout: { margin: { @@ -722,21 +768,22 @@ const generateXTestScatter = (json, jsonTrain) => { scaleratio: 1, hoverformat: '.2f', }, - shapes: [{ - type: 'rect', - fillcolor: null, - line: { - color: null, - width: 0, - linetype: [], + shapes: [ + { + type: 'rect', + fillcolor: null, + line: { + color: null, + width: 0, + linetype: [], + }, + yref: 'paper', + xref: 'paper', + x0: 0, + x1: 1, + y0: 0, + y1: 1, }, - yref: 'paper', - xref: 'paper', - x0: 0, - x1: 1, - y0: 0, - y1: 1, - }, ], showlegend: false, legend: { @@ -795,9 +842,19 @@ const generateXTestScatter = (json, jsonTrain) => { }, debounce: 0, }, - shinyEvents: ['plotly_hover', 'plotly_click', 'plotly_selected', 'plotly_relayout', 'plotly_brushed', - 'plotly_brushing', 'plotly_clickannotation', 'plotly_doubleclick', 'plotly_deselect', - 'plotly_afterplot', 'plotly_sunburstclick'], + shinyEvents: [ + 'plotly_hover', + 'plotly_click', + 'plotly_selected', + 'plotly_relayout', + 'plotly_brushed', + 'plotly_brushing', + 'plotly_clickannotation', + 'plotly_doubleclick', + 'plotly_deselect', + 'plotly_afterplot', + 'plotly_sunburstclick', + ], base_url: 'https://plot.ly', }; }; @@ -813,9 +870,11 @@ const generateBiplot = (json, jsonTrain, sampleNo = null) => { const angle = getNode(json, ['angle']) || []; const hjust = getNode(json, ['hjust']) || []; const varname = getNode(json, ['varname']) || []; - const sensorHoverText = dataX.map( - (ele, i) => `xvar: ${ele}
yvar: ${dataY[i]}
varname: ${varname[i]}
angle: ${angle[i]}
hjust: ${hjust[i]}`, - ) || []; + const sensorHoverText = + dataX.map( + (ele, i) => + `xvar: ${ele}
yvar: ${dataY[i]}
varname: ${varname[i]}
angle: ${angle[i]}
hjust: ${hjust[i]}`, + ) || []; const vectorX = []; dataX.forEach((x, i) => { @@ -833,12 +892,19 @@ const generateBiplot = (json, jsonTrain, sampleNo = null) => { } vectorY.push(0); vectorY.push(x); - vectorText.push(`x: 0
y: 0
xvar: ${dataX[i]}
yvar: ${dataY[i]}`); - vectorText.push(`x: 0
y: 0
xvar: ${dataX[i]}
yvar: ${dataY[i]}`); + vectorText.push( + `x: 0
y: 0
xvar: ${dataX[i]}
yvar: ${dataY[i]}`, + ); + vectorText.push( + `x: 0
y: 0
xvar: ${dataX[i]}
yvar: ${dataY[i]}`, + ); }); // clickedPoint - const clickedPoint = getNode(json, ['clicked_point']) || { x: [null], y: [null] }; + const clickedPoint = getNode(json, ['clicked_point']) || { + x: [null], + y: [null], + }; const { dataSigmaX, @@ -860,193 +926,203 @@ const generateBiplot = (json, jsonTrain, sampleNo = null) => { } = generateCircles(jsonTrain); return { - data: [{ - x: dataSigmaX, - y: dataSigmaY, - text: dataSigmaText, - type: 'scatter', - mode: 'lines', - line: { - width: 1.13385826771654, - color: 'rgba(204,229,255,1)', - dash: 'solid', - }, - hoveron: 'points', - name: 'Sigma', - legendgroup: 'Sigma', - showlegend: true, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: data2SigmaX, - y: data2SigmaY, - text: data2SigmaText, - type: 'scatter', - mode: 'lines', - line: { - width: 1.13385826771654, - color: 'rgba(153,204,255,1)', - dash: 'solid', - }, - hoveron: 'points', - name: '2Sigma', - legendgroup: '2Sigma', - showlegend: true, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: data3SigmaX, - y: data3SigmaY, - text: data3SigmaText, - type: 'scatter', - mode: 'lines', - line: { - width: 1.13385826771654, - color: 'rgba(102,178,255,1)', - dash: 'solid', - }, - hoveron: 'points', - name: '3Sigma', - legendgroup: '3Sigma', - showlegend: true, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: dataRangeX, - y: dataRangeY, - text: dataRangeText, - type: 'scatter', - mode: 'lines', - line: { - width: 1.13385826771654, - color: 'rgba(30,144,255,1)', - dash: 'solid', - }, - hoveron: 'points', - name: 'Range', - legendgroup: 'Range', - showlegend: true, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: dataParcentileX, - y: dataParcentileY, - text: dataParcentileText, - type: 'scatter', - mode: 'lines', - line: { - width: 1.13385826771654, - color: 'rgba(255,52,179,1)', - dash: 'solid', - }, - hoveron: 'points', - name: 'Parcentile 0.85', - legendgroup: 'Parcentile 0.85', - showlegend: true, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: vectorX, - y: vectorY, - text: vectorText, - type: 'scatter', - mode: 'lines', - line: { - width: 1.88976377952756, - color: 'rgba(255,192,203,1)', - dash: 'solid', - }, - hoveron: 'points', - showlegend: false, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: dataX.map(x => x * 2), - y: dataY.map(y => y * 2), - text: varname, - hovertext: sensorHoverText, - textfont: { - size: 11.3385826771654, - color: 'rgba(255,255,255,1)', - }, - type: 'scatter', - mode: 'text', - hoveron: 'points', - showlegend: false, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: clickedPoint.x, - y: clickedPoint.y, - text: `xvar: ${clickedPoint.x[0]}
yvar: ${clickedPoint.y[0]}`, - type: 'scatter', - mode: 'markers', - marker: { - autocolorscale: false, - color: 'rgba(255,165,0,1)', - opacity: 0.5, - size: 11.3385826771654, - symbol: 'square', + data: [ + { + x: dataSigmaX, + y: dataSigmaY, + text: dataSigmaText, + type: 'scatter', + mode: 'lines', + line: { + width: 1.13385826771654, + color: 'rgba(204,229,255,1)', + dash: 'solid', + }, + hoveron: 'points', + name: 'Sigma', + legendgroup: 'Sigma', + showlegend: true, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: data2SigmaX, + y: data2SigmaY, + text: data2SigmaText, + type: 'scatter', + mode: 'lines', + line: { + width: 1.13385826771654, + color: 'rgba(153,204,255,1)', + dash: 'solid', + }, + hoveron: 'points', + name: '2Sigma', + legendgroup: '2Sigma', + showlegend: true, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: data3SigmaX, + y: data3SigmaY, + text: data3SigmaText, + type: 'scatter', + mode: 'lines', + line: { + width: 1.13385826771654, + color: 'rgba(102,178,255,1)', + dash: 'solid', + }, + hoveron: 'points', + name: '3Sigma', + legendgroup: '3Sigma', + showlegend: true, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: dataRangeX, + y: dataRangeY, + text: dataRangeText, + type: 'scatter', + mode: 'lines', + line: { + width: 1.13385826771654, + color: 'rgba(30,144,255,1)', + dash: 'solid', + }, + hoveron: 'points', + name: 'Range', + legendgroup: 'Range', + showlegend: true, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: dataParcentileX, + y: dataParcentileY, + text: dataParcentileText, + type: 'scatter', + mode: 'lines', + line: { + width: 1.13385826771654, + color: 'rgba(255,52,179,1)', + dash: 'solid', + }, + hoveron: 'points', + name: 'Parcentile 0.85', + legendgroup: 'Parcentile 0.85', + showlegend: true, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: vectorX, + y: vectorY, + text: vectorText, + type: 'scatter', + mode: 'lines', line: { width: 1.88976377952756, + color: 'rgba(255,192,203,1)', + dash: 'solid', + }, + hoveron: 'points', + showlegend: false, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: dataX.map((x) => x * 2), + y: dataY.map((y) => y * 2), + text: varname, + hovertext: sensorHoverText, + textfont: { + size: 11.3385826771654, + color: 'rgba(255,255,255,1)', + }, + type: 'scatter', + mode: 'text', + hoveron: 'points', + showlegend: false, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: clickedPoint.x, + y: clickedPoint.y, + text: `xvar: ${clickedPoint.x[0]}
yvar: ${clickedPoint.y[0]}`, + type: 'scatter', + mode: 'markers', + marker: { + autocolorscale: false, color: 'rgba(255,165,0,1)', + opacity: 0.5, + size: 11.3385826771654, + symbol: 'square', + line: { + width: 1.88976377952756, + color: 'rgba(255,165,0,1)', + }, }, + hoveron: 'points', + showlegend: false, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: [0, 0], + y: [-4.07856844526106, 6.84403868917397], + text: 'xintercept: 0', + type: 'scatter', + mode: 'lines', + line: { + width: 1.88976377952756, + color: 'rgba(64,64,64,1)', + dash: 'solid', + }, + hoveron: 'points', + showlegend: false, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, + }, + { + x: [-5.82714314845274, 5.83842812409785], + y: [0, 0], + text: 'yintercept: 0', + type: 'scatter', + mode: 'lines', + line: { + width: 1.88976377952756, + color: 'rgba(64,64,64,1)', + dash: 'solid', + }, + hoveron: 'points', + showlegend: false, + xaxis: 'x', + yaxis: 'y', + hoverinfo: 'text', + frame: null, }, - hoveron: 'points', - showlegend: false, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: [0, 0], - y: [-4.07856844526106, 6.84403868917397], - text: 'xintercept: 0', - type: 'scatter', - mode: 'lines', - line: { - width: 1.88976377952756, - color: 'rgba(64,64,64,1)', - dash: 'solid', - }, - hoveron: 'points', - showlegend: false, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, { - x: [-5.82714314845274, 5.83842812409785], - y: [0, 0], - text: 'yintercept: 0', - type: 'scatter', - mode: 'lines', - line: { - width: 1.88976377952756, - color: 'rgba(64,64,64,1)', - dash: 'solid', - }, - hoveron: 'points', - showlegend: false, - xaxis: 'x', - yaxis: 'y', - hoverinfo: 'text', - frame: null, - }, ], layout: { margin: { @@ -1156,21 +1232,22 @@ const generateBiplot = (json, jsonTrain, sampleNo = null) => { scaleratio: 1, hoverformat: '.2f', }, - shapes: [{ - type: 'rect', - fillcolor: null, - line: { - color: null, - width: 0, - linetype: [], + shapes: [ + { + type: 'rect', + fillcolor: null, + line: { + color: null, + width: 0, + linetype: [], + }, + yref: 'paper', + xref: 'paper', + x0: 0, + x1: 1, + y0: 0, + y1: 1, }, - yref: 'paper', - xref: 'paper', - x0: 0, - x1: 1, - y0: 0, - y1: 1, - }, ], showlegend: true, legend: { @@ -1184,25 +1261,26 @@ const generateBiplot = (json, jsonTrain, sampleNo = null) => { }, y: 0.940944881889764, }, - annotations: [{ - text: 'Guide', - x: 1.02, - y: 1, - showarrow: false, - ax: 0, - ay: 0, - font: { - color: 'rgba(255,255,255,1)', - family: '', - size: 13.2835201328352, + annotations: [ + { + text: 'Guide', + x: 1.02, + y: 1, + showarrow: false, + ax: 0, + ay: 0, + font: { + color: 'rgba(255,255,255,1)', + family: '', + size: 13.2835201328352, + }, + xref: 'paper', + yref: 'paper', + textangle: -0, + xanchor: 'left', + yanchor: 'bottom', + legendTitle: true, }, - xref: 'paper', - yref: 'paper', - textangle: -0, - xanchor: 'left', - yanchor: 'bottom', - legendTitle: true, - }, ], hovermode: 'closest', barmode: 'relative', @@ -1264,7 +1342,19 @@ const generateBiplot = (json, jsonTrain, sampleNo = null) => { }, debounce: 0, }, - shinyEvents: ['plotly_hover', 'plotly_click', 'plotly_selected', 'plotly_relayout', 'plotly_brushed', 'plotly_brushing', 'plotly_clickannotation', 'plotly_doubleclick', 'plotly_deselect', 'plotly_afterplot', 'plotly_sunburstclick'], + shinyEvents: [ + 'plotly_hover', + 'plotly_click', + 'plotly_selected', + 'plotly_relayout', + 'plotly_brushed', + 'plotly_brushing', + 'plotly_clickannotation', + 'plotly_doubleclick', + 'plotly_deselect', + 'plotly_afterplot', + 'plotly_sunburstclick', + ], base_url: 'https://plot.ly', }; }; diff --git a/ap/static/analyze/js/graphical_lasso.js b/ap/static/analyze/js/graphical_lasso.js index 1e47661..1fca392 100644 --- a/ap/static/analyze/js/graphical_lasso.js +++ b/ap/static/analyze/js/graphical_lasso.js @@ -1,8 +1,3 @@ -/* eslint-disable no-restricted-syntax */ -/* eslint-disable guard-for-in */ -/* eslint-disable no-unused-vars */ -/* eslint-disable no-undef */ -/* eslint-disable no-use-before-define */ const REQUEST_TIMEOUT = setRequestTimeOut(); const MAX_NUMBER_OF_SENSOR = 60; const MIN_NUMBER_OF_SENSOR = 0; @@ -33,9 +28,9 @@ const formElements = { }; const i18n = { - traceResulLimited: $('#i18nTraceResultLimited').text() || '', - SQLLimit: $('#i18nSQLLimit').text(), - allSelection: $('#i18nAllSelection').text(), + traceResulLimited: $('#i18nTraceResultLimited').text() || '', + SQLLimit: $('#i18nSQLLimit').text(), + allSelection: $('#i18nAllSelection').text(), objectiveHoverMsg: $('#i18nGLObjectiveHoverMsg').text(), }; @@ -55,7 +50,6 @@ $(() => { objectiveHoverMsg: i18n.objectiveHoverMsg, showStrColumn: true, hideStrVariable: false, // select Str columns as target sensors - hideCTCol: true, showFilter: true, allowObjectiveForRealOnly: true, disableSerialAsObjective: true, @@ -63,7 +57,13 @@ $(() => { endProcItem(); // add first condition process - const condProcItem = addCondProc(endProcs.ids, endProcs.names, '', formElements.formID, 'btn-add-cond-proc'); + const condProcItem = addCondProc( + endProcs.ids, + endProcs.names, + '', + formElements.formID, + 'btn-add-cond-proc', + ); condProcItem(); // click even of condition proc add button @@ -100,7 +100,7 @@ const collectFromDataGL = (clearOnFlyFilter) => { if (clearOnFlyFilter) { formData = new FormData(traceForm[0]); formData = genDatetimeRange(formData); - lastUsedFormData = formData; + lastUsedFormData = formData; } else { formData = lastUsedFormData; formData = transformCatFilterParams(formData); @@ -109,35 +109,43 @@ const collectFromDataGL = (clearOnFlyFilter) => { return formData; }; - const callToBackEndAPI = (clearOnFlyFilter = true) => { const formData = collectFromDataGL(clearOnFlyFilter); - showGraphCallApi('/ap/api/gl/plot', formData, REQUEST_TIMEOUT, async (res) => { - resData = res; - graphStore.setTraceData(res) - showGraphicalLasso(res); - - $('html, body').animate({ - scrollTop: $(formElements.plotCardId).offset().top, - }, 500); - // show info table - showInfoTable(res); + showGraphCallApi( + '/ap/api/gl/plot', + formData, + REQUEST_TIMEOUT, + async (res) => { + resData = res; + graphStore.setTraceData(res); + showGraphicalLasso(res); - fillDataToFilterModal(res.filter_on_demand, () => { - callToBackEndAPI(false); - }); + $('html, body').animate( + { + scrollTop: getOffsetTopDisplayGraph( + formElements.plotCardId, + ), + }, + 500, + ); + // show info table + showInfoTable(res); - }); + fillDataToFilterModal(res.filter_on_demand, () => { + callToBackEndAPI(false); + }); + }, + ); }; -const setSparsityValue = (alphas, bestAlphas=0, threshold) => { +const setSparsityValue = (alphas, bestAlphas = 0, threshold) => { // set maximum value of ticks and input range $('#sparsity').attr('max', alphas.length - 1); $('.range-ticks').html(''); // gen dynamic ticks of input range - alphas.forEach(alpha => { - $('.range-ticks').append(''); + alphas.forEach((alpha) => { + $('.range-ticks').append(''); }); const markedValue = alphas.indexOf(Number(bestAlphas)); // set selected value for inputs (by dummy value) @@ -150,22 +158,22 @@ const setSparsityValue = (alphas, bestAlphas=0, threshold) => { $('#sparsityValue').text(selectedSparsity); $('#thresholdValue').text(threshold); }; -const showGraphicalLasso = (res, changedThreshold = null, alpha = null, inCurrentPosition=false) => { +const showGraphicalLasso = ( + res, + changedThreshold = null, + alpha = null, + inCurrentPosition = false, +) => { formElements.plotCard.show(); $(`#${formElements.graphicalLassoCanvasId}`).empty(); if (!res.array_plotdata) return; - const [ - alphas, - best_alpha, - threshold, - dic_nodes, - dic_edges, - processNames, - ] = res.array_plotdata; + const [alphas, best_alpha, threshold, dic_nodes, dic_edges, processNames] = + res.array_plotdata; if (!dic_nodes) return; - selectedThreshold = changedThreshold !== null ? changedThreshold : threshold; + selectedThreshold = + changedThreshold !== null ? changedThreshold : threshold; selectedSparsity = alpha !== null ? alpha : best_alpha; const availableThresholds = Object.keys(dic_nodes); setRangeValue(res, 'threshold', null, true); @@ -189,13 +197,16 @@ const showGraphicalLasso = (res, changedThreshold = null, alpha = null, inCurren // update node label node.label = label; // update node position - if (inCurrentPosition && Object.keys(nodePositionData).includes(node.id)) { + if ( + inCurrentPosition && + Object.keys(nodePositionData).includes(node.id) + ) { node.x = nodePositionData[node.id].x; node.y = nodePositionData[node.id].y; } }); // convert to json for sigma.js - const graph_data = {nodes: nodes, edges: edges}; + const graph_data = { nodes: nodes, edges: edges }; // set and show default value of sparsity and threshold // use best_alphas @@ -208,7 +219,7 @@ const dfToList = (df) => { const edges = df[keys[0]].map((value, i) => { const res = {}; for (const key of keys) { - res[key] = df[key][i] + res[key] = df[key][i]; } return res; }); @@ -222,7 +233,12 @@ const castToStr = (number) => { } return number; }; -const setRangeValue = (res, rangeId, availableValues = null, isThreshold = false) => { +const setRangeValue = ( + res, + rangeId, + availableValues = null, + isThreshold = false, +) => { const range = $(`#${rangeId}`); const valueEl = $(`#${rangeId}Value`); @@ -239,8 +255,8 @@ const setRangeValue = (res, rangeId, availableValues = null, isThreshold = false } valueEl.text(value); showGraphicalLasso(res, selectedThreshold, selectedSparsity); - }) -} + }); +}; const resetSettings = () => { // reset node position @@ -260,4 +276,4 @@ const dumpData = (type) => { }; const handleExportData = (type) => { showGraphAndDumpData(type, dumpData); -}; \ No newline at end of file +}; diff --git a/ap/static/analyze/js/graphical_lasso_sigma.js b/ap/static/analyze/js/graphical_lasso_sigma.js index 8032c46..a9eb3a0 100644 --- a/ap/static/analyze/js/graphical_lasso_sigma.js +++ b/ap/static/analyze/js/graphical_lasso_sigma.js @@ -22,17 +22,17 @@ const drawGraphicalLassoSigma = (canvasId, data) => { edgeHoverExtremities: true, edgeHoverHighlightNodes: 'circle', sideMargin: 1, - } - }; + }, + }; const s = new sigma(sigmaLayout); const dragListener = sigma.plugins.dragNodes(s, s.renderers[0]); - dragListener.bind('dragend', function(event) { + dragListener.bind('dragend', function (event) { if (nodePositionData) { nodePositionData[event.data.node.id] = { x: event.data.node.x, y: event.data.node.y, - } + }; } }); -} \ No newline at end of file +}; diff --git a/ap/static/analyze/js/hotelling_biplot.js b/ap/static/analyze/js/hotelling_biplot.js index 42b81cb..c943e18 100644 --- a/ap/static/analyze/js/hotelling_biplot.js +++ b/ap/static/analyze/js/hotelling_biplot.js @@ -1,16 +1,15 @@ -/* eslint-disable no-undef */ const drawPCABiplotChart = (json, chartConfig = {}, sizeOfData = null) => { if (!json) return; - + const startTime = performance.now(); - + const figure = json; figure.layout.autosize = true; figure.layout.plot_bgcolor = '#222222'; figure.layout.paper_bgcolor = '#222222'; figure.layout.xaxis.gridcolor = '#444444'; figure.layout.yaxis.gridcolor = '#444444'; - + figure.data.forEach((dat) => { if (dat.mode === 'markers') { dat.hoverinfo = 'none'; diff --git a/ap/static/analyze/js/hotelling_common.js b/ap/static/analyze/js/hotelling_common.js index a14a17d..723c4bf 100644 --- a/ap/static/analyze/js/hotelling_common.js +++ b/ap/static/analyze/js/hotelling_common.js @@ -1,6 +1,3 @@ -/* eslint-disable no-restricted-syntax */ -/* eslint-disable guard-for-in */ -/* eslint-disable no-unused-vars */ const graphStore = new GraphStore(); // dont rename. must use formElements because some common functions @@ -57,7 +54,6 @@ const hideLoading = (divElement = null) => { divElement.html(''); }; - // A template to visualize clicked data point const clickedPointTemplate = (xVal, yVal) => ({ x: [xVal], @@ -90,11 +86,14 @@ const addDataPointFromXY = (elementId, orginalDataLen, x, y) => { if (dataLength > orginalDataLen) { Plotly.deleteTraces(elementId, dataLength - 1); } - Plotly.addTraces(elementId, clickedPointTrace, chartElement.data.length); + Plotly.addTraces( + elementId, + clickedPointTrace, + chartElement.data.length, + ); } }; - /* This function is to update clicked data point to timeseries chart: Q & T2 Args: @@ -102,7 +101,11 @@ const addDataPointFromXY = (elementId, orginalDataLen, x, y) => { dataPoint: data of clicked point got from click event isStartingChart: to identify if chart in the elementId is origin of the click */ -const updateTimeSeries = (elementId = null, dataPoint = {}, isStartingChart = true) => { +const updateTimeSeries = ( + elementId = null, + dataPoint = {}, + isStartingChart = true, +) => { // update Chart const tsElement = document.getElementById(elementId); const tsOriginalDataLength = 2; @@ -117,10 +120,16 @@ const updateTimeSeries = (elementId = null, dataPoint = {}, isStartingChart = tr for (let idx = 0; idx < data.length; idx++) { const trace = data[idx]; - if ((trace.name === 'test')) { // clicked point is in test data + if (trace.name === 'test') { + // clicked point is in test data const dataX = trace.x[clickedDataIndex]; const dataY = trace.y[clickedDataIndex]; - addDataPointFromXY(elementId, tsOriginalDataLength, dataX, dataY); + addDataPointFromXY( + elementId, + tsOriginalDataLength, + dataX, + dataY, + ); break; } } @@ -134,7 +143,12 @@ const updateTimeSeries = (elementId = null, dataPoint = {}, isStartingChart = tr dataPoint: data of clicked point got from click event isStartingChart: to identify if chart in the elementId is origin of the click */ -const updateScatter = (elementId = null, dataPoint = {}, isStartingChart = true, jsonDtTest = {}) => { +const updateScatter = ( + elementId = null, + dataPoint = {}, + isStartingChart = true, + jsonDtTest = {}, +) => { // update Chart const scatterElement = document.getElementById(elementId); const scatterOrginalDataLength = 6; @@ -153,7 +167,12 @@ const updateScatter = (elementId = null, dataPoint = {}, isStartingChart = true, if (trace.name === 'clickedPoint') { const dataX = jsonDtTest.data[0].x[clickedDataIndex]; const dataY = jsonDtTest.data[0].y[clickedDataIndex]; - addDataPointFromXY(elementId, scatterOrginalDataLength, dataX, dataY); + addDataPointFromXY( + elementId, + scatterOrginalDataLength, + dataX, + dataY, + ); isReplaced = true; break; } @@ -161,7 +180,12 @@ const updateScatter = (elementId = null, dataPoint = {}, isStartingChart = true, if (!isReplaced) { const dataX = jsonDtTest.data[0].x[clickedDataIndex]; const dataY = jsonDtTest.data[0].y[clickedDataIndex]; - addDataPointFromXY(elementId, scatterOrginalDataLength, dataX, dataY); + addDataPointFromXY( + elementId, + scatterOrginalDataLength, + dataX, + dataY, + ); } } }; @@ -178,8 +202,8 @@ const updateRecordInfo = (dataInfos = {}, sampleNo = 0) => { ${i18n.value} `; - const re = /\d+\_[1,2]$/; - const reGetDate = /\d+\_[2]$/; + const re = /\d+_[1,2]$/; + const reGetDate = /\d+_[2]$/; for (const dataInfo of dataInfos) { let [proc, col_name, col_val, col_attr] = dataInfo; let bgColorStyle = ''; @@ -208,20 +232,37 @@ const updateRecordInfo = (dataInfos = {}, sampleNo = 0) => { dataPoint: data which is got from the click event startingChart: id of chart which originates the click event */ -const broadcastClickEvent = (dataPoint, startingChart, jsonPCAScoreTest = {}) => { +const broadcastClickEvent = ( + dataPoint, + startingChart, + jsonPCAScoreTest = {}, +) => { // Update time series - updateTimeSeries(elementId = 'timeSeriesT2', dataPoint, startingChart === 'timeSeriesT2'); - updateTimeSeries(elementId = 'timeSeriesQ', dataPoint, startingChart === 'timeSeriesQ'); + updateTimeSeries( + (elementId = 'timeSeriesT2'), + dataPoint, + startingChart === 'timeSeriesT2', + ); + updateTimeSeries( + (elementId = 'timeSeriesQ'), + dataPoint, + startingChart === 'timeSeriesQ', + ); // Update Xtest scatter - updateScatter(elementId = 'xTest', dataPoint, startingChart === 'xTest', jsonDtTest = jsonPCAScoreTest); + updateScatter( + (elementId = 'xTest'), + dataPoint, + startingChart === 'xTest', + (jsonDtTest = jsonPCAScoreTest), + ); // Call backend to get jsons for Qcont + T2cont + BiPlot + record info const formData = collectInputAsFormData(); const { sampleNo } = dataPoint.points[0]; formData.set('sampleNo', sampleNo); loadingShow(); - getPCAPlotsFromBackend(formData, clickOnChart = true, sampleNo); + getPCAPlotsFromBackend(formData, (clickOnChart = true), sampleNo); // switch to record table $('[href="#table-info"]').tab('show'); @@ -254,16 +295,13 @@ const contributionChartLayout = (objData, type = 't2', sampleNo = null) => { xref: 'paper', }, xaxis: { - domain: [ - 0, - 1, - ], + domain: [0, 1], automargin: true, type: 'linear', autorange: false, range: [ 0, - 1.05 * Math.max(...objData.Ratio.map(x => Math.abs(x))), + 1.05 * Math.max(...objData.Ratio.map((x) => Math.abs(x))), ], tickmode: 'array', categoryorder: 'array', @@ -298,10 +336,7 @@ const contributionChartLayout = (objData, type = 't2', sampleNo = null) => { hoverformat: '.2f', }, yaxis: { - domain: [ - 0, - 1, - ], + domain: [0, 1], automargin: true, type: 'linear', autorange: true, @@ -430,22 +465,24 @@ const genContributionChartData = (objData, type = 't2', dpInfo = null) => { const t = colorScale[type].length - 1; const binVolume = (x) => { if (type === 't2') { - return 2 * x / t; + return (2 * x) / t; } if (absX !== absM) { return (absX - absM) / t; } - return 2 * absX / t; - }; - return absM >= absX ? { - min: M, - max: -1 * M, - binVol: binVolume(absM), - } : { - min: type === 't2' ? -1 * X : absM, - max: type === 't2' ? X : absX, - binVol: binVolume(absX), + return (2 * absX) / t; }; + return absM >= absX + ? { + min: M, + max: -1 * M, + binVol: binVolume(absM), + } + : { + min: type === 't2' ? -1 * X : absM, + max: type === 't2' ? X : absX, + binVol: binVolume(absX), + }; }; const cr = convertRange(objData.Ratio); const markerColor = (i) => { @@ -456,17 +493,17 @@ const genContributionChartData = (objData, type = 't2', dpInfo = null) => { const rRanger = () => { const r = [cr.min, cr.max]; const s = (cr.max - cr.min) / 4; - let subRange = [...Array(3).keys()].map(x => cr.min + ((x + 1) * s)); + let subRange = [...Array(3).keys()].map((x) => cr.min + (x + 1) * s); subRange = subRange.concat(r); const tVals = Array.from(new Set(subRange.sort((a, b) => a - b))); const ratioConvertor = (ranger) => { const minR = Math.min(...ranger); const maxR = Math.max(...ranger); const sR = (maxR - minR) / 100; - return ranger.map(i => 0.01 * (i - minR) / sR); + return ranger.map((i) => (0.01 * (i - minR)) / sR); }; return { - ticktext: tVals.map(x => x.toFixed(1)), + ticktext: tVals.map((x) => x.toFixed(1)), tickvals: ratioConvertor(tVals), }; }; @@ -475,7 +512,9 @@ const genContributionChartData = (objData, type = 't2', dpInfo = null) => { let procName = ''; let colName = ''; if (dpInfo) { - const rowInfo = dpInfo.filter(row => varName.toLowerCase() === row[1]); + const rowInfo = dpInfo.filter( + (row) => varName.toLowerCase() === row[1], + ); if (rowInfo.length) { [[procName, colName]] = rowInfo; distributionName = `${procName}-${colName}
`; @@ -489,17 +528,11 @@ const genContributionChartData = (objData, type = 't2', dpInfo = null) => { orientation: 'h', width: 0.9, base: 0, - x: [ + x: [Math.abs(v)], + y: [objData.Ratio.length - k], + hovertext: `${objData.Var[k]}
abs(Ratio): ${applySignificantDigit( Math.abs(v), - ], - y: [ - objData.Ratio.length - k, - ], - hovertext: `${objData.Var[k]}
abs(Ratio): ${ - applySignificantDigit(Math.abs(v)) - }
Ratio: ${ - applySignificantDigit(v) - }`, + )}
Ratio: ${applySignificantDigit(v)}`, type: 'bar', marker: { autocolorscale: false, @@ -518,12 +551,8 @@ const genContributionChartData = (objData, type = 't2', dpInfo = null) => { }); const ratioChart = { - x: [ - 1, - ], - y: [ - 0, - ], + x: [1], + y: [0], name: '99_d31a3926dd854cda0f79a49b4456c467', type: 'scatter', mode: 'markers', @@ -531,10 +560,7 @@ const genContributionChartData = (objData, type = 't2', dpInfo = null) => { hoverinfo: 'skip', showlegend: false, marker: { - color: [ - 0, - 1, - ], + color: [0, 1], colorscale: colorScale[type], colorbar: { bgcolor: 'transparent', diff --git a/ap/static/analyze/js/hotelling_q_contribution.js b/ap/static/analyze/js/hotelling_q_contribution.js index 0e6cc69..7db89aa 100644 --- a/ap/static/analyze/js/hotelling_q_contribution.js +++ b/ap/static/analyze/js/hotelling_q_contribution.js @@ -1,4 +1,3 @@ -/* eslint-disable no-undef */ const drawQContributionChart = (json, chartConfig = {}, sizeOfData = null) => { if (!json) return; @@ -10,7 +9,7 @@ const drawQContributionChart = (json, chartConfig = {}, sizeOfData = null) => { figure.layout.paper_bgcolor = '#222222'; figure.layout.xaxis.gridcolor = '#444444'; figure.layout.yaxis.gridcolor = '#444444'; - figure.layout.xaxis.tickangle = COMMON_CONSTANT.TICKS_ANGLE; + figure.layout.xaxis.tickangle = COMMON_CONSTANT.TICKS_ANGLE; figure.layout.legend = { bgcolor: '#222222', }; @@ -31,7 +30,6 @@ const drawQContributionChart = (json, chartConfig = {}, sizeOfData = null) => { style: { width: '100%', height: '100%' }, // responsive histogram }); - // send plotting time event const endTime = performance.now(); gtag('event', 'PCA_et', { @@ -48,21 +46,28 @@ const drawQContributionChart = (json, chartConfig = {}, sizeOfData = null) => { }); }; -const drawQContributionChartFromObj = (objData, sampleNo = null, chartConfig = {}, - sizeOfData = null, dpInfo = null, - shortName = null) => { +const drawQContributionChartFromObj = ( + objData, + sampleNo = null, + chartConfig = {}, + sizeOfData = null, + dpInfo = null, + shortName = null, +) => { if (!objData) return; const startTime = performance.now(); - Plotly.newPlot('qContributionChart', + Plotly.newPlot( + 'qContributionChart', genContributionChartData(objData, 'q', dpInfo), - contributionChartLayout(objData, 'q', sampleNo), { + contributionChartLayout(objData, 'q', sampleNo), + { ...genPlotlyIconSettings(), responsive: true, // responsive histogram useResizeHandler: true, // responsive histogram style: { width: '100%', height: '100%' }, // responsive histogram - }); - + }, + ); // send plotting time event const endTime = performance.now(); diff --git a/ap/static/analyze/js/hotelling_scatters.js b/ap/static/analyze/js/hotelling_scatters.js index 9abac4c..9568bd8 100644 --- a/ap/static/analyze/js/hotelling_scatters.js +++ b/ap/static/analyze/js/hotelling_scatters.js @@ -1,6 +1,3 @@ -/* eslint-disable no-undef */ -/* eslint-disable no-unused-vars */ - const drawXTrainScatter = (json, chartConfig = {}, sizeOfData = null) => { if (!json) return; @@ -26,8 +23,16 @@ const drawXTrainScatter = (json, chartConfig = {}, sizeOfData = null) => { figure.layout.autosize = true; figure.layout.xaxis.gridcolor = '#444444'; figure.layout.yaxis.gridcolor = '#444444'; - figure.layout.xaxis.title.font = {color: 'rgba(255,255,255,1)', family: '', size: 14}; - figure.layout.yaxis.title.font = {color: 'rgba(255,255,255,1)', family: '', size: 14}; + figure.layout.xaxis.title.font = { + color: 'rgba(255,255,255,1)', + family: '', + size: 14, + }; + figure.layout.yaxis.title.font = { + color: 'rgba(255,255,255,1)', + family: '', + size: 14, + }; figure.layout.xaxis.tickfont.color = 'rgba(255,255,255,1)'; figure.layout.yaxis.tickfont.color = 'rgba(255,255,255,1)'; figure.layout.xaxis.autorange = true; @@ -49,7 +54,7 @@ const drawXTrainScatter = (json, chartConfig = {}, sizeOfData = null) => { ...genPlotlyIconSettings(), responsive: true, // responsive histogram useResizeHandler: true, // responsive histogram - style: {width: '100%', height: '100%'}, // responsive histogram + style: { width: '100%', height: '100%' }, // responsive histogram }; const pcaXTrainPlot = document.getElementById('xTrain'); @@ -64,7 +69,7 @@ const drawXTrainScatter = (json, chartConfig = {}, sizeOfData = null) => { } }); - unHoverHandler(pcaXTrainPlot) + unHoverHandler(pcaXTrainPlot); // send plotting time const endTime = performance.now(); @@ -82,13 +87,18 @@ const drawXTrainScatter = (json, chartConfig = {}, sizeOfData = null) => { }); }; -const drawXTestScatter = (json, chartConfig = {}, sizeOfData = null, arrayPlotdata = null) => { +const drawXTestScatter = ( + json, + chartConfig = {}, + sizeOfData = null, + arrayPlotdata = null, +) => { if (!json) return; - + const startTime = performance.now(); - + const figure = json; - + // customize jsonDtTest figure.layout.plot_bgcolor = '#222222'; figure.layout.paper_bgcolor = '#222222'; @@ -127,7 +137,7 @@ const drawXTestScatter = (json, chartConfig = {}, sizeOfData = null, arrayPlotda ...genPlotlyIconSettings(), responsive: true, // responsive histogram useResizeHandler: true, // responsive histogram - style: {width: '100%', height: '100%'}, // responsive histogram + style: { width: '100%', height: '100%' }, // responsive histogram }; // assign custome data to plot-view handling on data-point @@ -139,29 +149,35 @@ const drawXTestScatter = (json, chartConfig = {}, sizeOfData = null, arrayPlotda }; } figure.data[0].customdata = scpCustomData; - + // plot xtest data const xTestElement = document.getElementById('xTest'); Plotly.newPlot('xTest', figure.data, figure.layout, plotConfig); // xtest click event - xTestElement.on('plotly_click', (dataPoint) => { - // to spread sampleNo to other charts - const {pointIndex} = dataPoint.points[0]; - dataPoint.points[0].sampleNo = pointIndex + 1; - dataPoint.points[0].clickedDataIndex = pointIndex; - - // to broadcast click event to other charts - broadcastClickEvent(dataPoint, startingChart = 'xTest', jsonPCAScoreTest = json); - }).on('plotly_hover', (data) => { - const dataPoint = data.points[0]; - if (dataPoint.data.mode === 'markers') { - showDataTablePCA(data, 'scatter', 'xTest'); - } else { - $('#dp-info-content').hide(); - } - }); + xTestElement + .on('plotly_click', (dataPoint) => { + // to spread sampleNo to other charts + const { pointIndex } = dataPoint.points[0]; + dataPoint.points[0].sampleNo = pointIndex + 1; + dataPoint.points[0].clickedDataIndex = pointIndex; + + // to broadcast click event to other charts + broadcastClickEvent( + dataPoint, + (startingChart = 'xTest'), + (jsonPCAScoreTest = json), + ); + }) + .on('plotly_hover', (data) => { + const dataPoint = data.points[0]; + if (dataPoint.data.mode === 'markers') { + showDataTablePCA(data, 'scatter', 'xTest'); + } else { + $('#dp-info-content').hide(); + } + }); unHoverHandler(xTestElement); showCustomContextMenu(xTestElement); @@ -204,7 +220,8 @@ const showDataTablePCA = (data, type = 'bar', chartID) => { genDataPointHoverTable( dataTable, { - x: data.event.pageX - 120, y: data.event.pageY, + x: data.event.pageX - 120, + y: data.event.pageY, }, 130, true, diff --git a/ap/static/analyze/js/hotelling_t2_contribution.js b/ap/static/analyze/js/hotelling_t2_contribution.js index 76f4e80..38db462 100644 --- a/ap/static/analyze/js/hotelling_t2_contribution.js +++ b/ap/static/analyze/js/hotelling_t2_contribution.js @@ -1,4 +1,3 @@ -/* eslint-disable no-undef */ const drawT2ContributionChart = (json, chartConfig = {}, sizeOfData = null) => { if (!json) return; @@ -10,7 +9,7 @@ const drawT2ContributionChart = (json, chartConfig = {}, sizeOfData = null) => { figure.layout.paper_bgcolor = '#222222'; figure.layout.xaxis.gridcolor = '#444444'; figure.layout.yaxis.gridcolor = '#444444'; - figure.layout.xaxis.tickangle = COMMON_CONSTANT.TICKS_ANGLE; + figure.layout.xaxis.tickangle = COMMON_CONSTANT.TICKS_ANGLE; figure.layout.autosize = true; figure.layout.legend = { bgcolor: '#222222', @@ -30,7 +29,6 @@ const drawT2ContributionChart = (json, chartConfig = {}, sizeOfData = null) => { style: { width: '100%', height: '100%' }, // responsive histogram }); - // send plotting time event const endTime = performance.now(); gtag('event', 'PCA_et', { @@ -47,19 +45,32 @@ const drawT2ContributionChart = (json, chartConfig = {}, sizeOfData = null) => { }); }; -const drawT2ContributionChartFromObj = (objData, sampleNo = null, chartConfig = {}, - sizeOfData = null, dpInfo=null, - shortName = null) => { +const drawT2ContributionChartFromObj = ( + objData, + sampleNo = null, + chartConfig = {}, + sizeOfData = null, + dpInfo = null, + shortName = null, +) => { if (!objData) return; const startTime = performance.now(); - Plotly.newPlot('t2ContributionChart', + Plotly.newPlot( + 't2ContributionChart', genContributionChartData(objData, 't2', dpInfo), - contributionChartLayout(objData, 't2', sampleNo, chartConfig, shortName), { + contributionChartLayout( + objData, + 't2', + sampleNo, + chartConfig, + shortName, + ), + { responsive: true, ...genPlotlyIconSettings(), - }); - + }, + ); // send plotting time event const endTime = performance.now(); diff --git a/ap/static/analyze/js/hotelling_timeseries.js b/ap/static/analyze/js/hotelling_timeseries.js index 106692f..cffd713 100644 --- a/ap/static/analyze/js/hotelling_timeseries.js +++ b/ap/static/analyze/js/hotelling_timeseries.js @@ -1,6 +1,3 @@ -/* eslint-disable no-unused-vars */ -/* eslint-disable no-undef */ - const drawShapes = (x = null, display = true) => { const plots = document.querySelectorAll('#timeSeriesT2, #timeSeriesQ'); const layoutUpdates = {}; @@ -29,7 +26,13 @@ const drawShapes = (x = null, display = true) => { }); }; -const drawTimeSeriesT2Chart = (json, jsonDtTest = {}, chartConfig = {}, sizeOfData = null, arrayPlotdata=null) => { +const drawTimeSeriesT2Chart = ( + json, + jsonDtTest = {}, + chartConfig = {}, + sizeOfData = null, + arrayPlotdata = null, +) => { if (!json) return; const startTime = performance.now(); @@ -70,7 +73,7 @@ const drawTimeSeriesT2Chart = (json, jsonDtTest = {}, chartConfig = {}, sizeOfDa scpCustomData = { proc_id_x: arrayPlotdata[0].end_proc_id, sensor_id_x: arrayPlotdata[0].end_col_id, - } + }; } figure.data[0].customdata = scpCustomData; @@ -93,17 +96,23 @@ const drawTimeSeriesT2Chart = (json, jsonDtTest = {}, chartConfig = {}, sizeOfDa } // to broadcast click event to other charts - broadcastClickEvent(dataPoint, startingChart = 'timeSeriesT2', jsonPCAScoreTest = jsonDtTest); + broadcastClickEvent( + dataPoint, + (startingChart = 'timeSeriesT2'), + (jsonPCAScoreTest = jsonDtTest), + ); }); // Add hover event for t2 & q plots - timeSeriesT2Element.on('plotly_hover', (data) => { - drawShapes(data.points[0].x); - }).on('plotly_unhover', (data) => { - drawShapes(null, false); - clearHoverTimeOut(); - }); - + timeSeriesT2Element + .on('plotly_hover', (data) => { + drawShapes(data.points[0].x); + }) + .on('plotly_unhover', (data) => { + drawShapes(null, false); + clearHoverTimeOut(); + }); + showCustomContextMenu(timeSeriesT2Element, true); // send plotting time const endTime = performance.now(); @@ -121,7 +130,13 @@ const drawTimeSeriesT2Chart = (json, jsonDtTest = {}, chartConfig = {}, sizeOfDa }); }; -const drawTimeSeriesQChart = (json, jsonDtTest = {}, chartConfig = {}, sizeOfData = null, arrayPlotdata = null) => { +const drawTimeSeriesQChart = ( + json, + jsonDtTest = {}, + chartConfig = {}, + sizeOfData = null, + arrayPlotdata = null, +) => { if (!json) return; const startTime = performance.now(); @@ -163,10 +178,10 @@ const drawTimeSeriesQChart = (json, jsonDtTest = {}, chartConfig = {}, sizeOfDat scpCustomData = { proc_id_x: arrayPlotdata[0].end_proc_id, sensor_id_x: arrayPlotdata[0].end_col_id, - } + }; } figure.data[0].customdata = scpCustomData; - + const timeSeriesQElement = document.getElementById('timeSeriesQ'); Plotly.newPlot('timeSeriesQ', figure.data, figure.layout, plotConfig); @@ -186,16 +201,22 @@ const drawTimeSeriesQChart = (json, jsonDtTest = {}, chartConfig = {}, sizeOfDat } // to broadcast click event to other charts - broadcastClickEvent(dataPoint, startingChart = 'timeSeriesQ', jsonPCAScoreTest = jsonDtTest); + broadcastClickEvent( + dataPoint, + (startingChart = 'timeSeriesQ'), + (jsonPCAScoreTest = jsonDtTest), + ); }); // Add hover event for t2 & q plots - timeSeriesQElement.on('plotly_hover', (data) => { - drawShapes(data.points[0].x); - }).on('plotly_unhover', (data) => { - drawShapes(null, false); - clearHoverTimeOut(); - }); - + timeSeriesQElement + .on('plotly_hover', (data) => { + drawShapes(data.points[0].x); + }) + .on('plotly_unhover', (data) => { + drawShapes(null, false); + clearHoverTimeOut(); + }); + showCustomContextMenu(timeSeriesQElement, true); // send plotting time const endTime = performance.now(); @@ -235,8 +256,9 @@ const genTimeSeriesData = (data, type = 'test', label = 'T2_statics') => { frame: null, }; retData.text = data.map((v, k) => { - const n = (type === 'test') ? (k + 1) : (k + 1 - data.length); - retData.line.color = (type === 'test') ? 'rgba(255,165,0,1)' : 'rgba(255,255,255,1)'; + const n = type === 'test' ? k + 1 : k + 1 - data.length; + retData.line.color = + type === 'test' ? 'rgba(255,165,0,1)' : 'rgba(255,255,255,1)'; retData.x.push(n); retData.y.push(v); return `sample_no: ${n}
${label}: ${v}`; @@ -244,7 +266,13 @@ const genTimeSeriesData = (data, type = 'test', label = 'T2_statics') => { return retData; }; -const drawTimeSeriesT2ChartFromObj = (objData, jsonDtTest = {}, chartConfig = {}, sizeOfData = null, arrayPlotdata=null) => { +const drawTimeSeriesT2ChartFromObj = ( + objData, + jsonDtTest = {}, + chartConfig = {}, + sizeOfData = null, + arrayPlotdata = null, +) => { if (!objData) return; // const chartConfig.width = $('#xTest').width(); @@ -268,10 +296,7 @@ const drawTimeSeriesT2ChartFromObj = (objData, jsonDtTest = {}, chartConfig = {} size: 13.2835201328352, }, xaxis: { - domain: [ - 0, - 1, - ], + domain: [0, 1], automargin: true, type: 'linear', autorange: true, @@ -305,10 +330,7 @@ const drawTimeSeriesT2ChartFromObj = (objData, jsonDtTest = {}, chartConfig = {} hoverformat: '.2f', }, yaxis: { - domain: [ - 0, - 1, - ], + domain: [0, 1], automargin: true, type: 'linear', autorange: true, @@ -384,24 +406,29 @@ const drawTimeSeriesT2ChartFromObj = (objData, jsonDtTest = {}, chartConfig = {} useResizeHandler: true, // responsive histogram style: { width: '100%', height: '100%' }, // responsive histogram }; - + // assign custome data to plot-view handling on data-point // apply for first/ full data points only if (arrayPlotdata) { scpCustomData = { proc_id_x: arrayPlotdata[0].end_proc_id, sensor_id_x: arrayPlotdata[0].end_col_id, - } + }; } t2TimeSeriesData[1].customdata = scpCustomData; - t2TimeSeriesData.forEach(dat => { + t2TimeSeriesData.forEach((dat) => { dat.hoverinfo = 'none'; }); - + const timeSeriesT2Element = document.getElementById('timeSeriesT2'); - - Plotly.newPlot(timeSeriesT2Element, t2TimeSeriesData, t2TimeSeriesLayout, plotConfig); - + + Plotly.newPlot( + timeSeriesT2Element, + t2TimeSeriesData, + t2TimeSeriesLayout, + plotConfig, + ); + // xtest click event timeSeriesT2Element.on('plotly_click', (dataPoint) => { // to spread sampleNo and clickedDataIndexto other charts @@ -417,20 +444,26 @@ const drawTimeSeriesT2ChartFromObj = (objData, jsonDtTest = {}, chartConfig = {} } // to broadcast click event to other charts - broadcastClickEvent(dataPoint, startingChart = 'timeSeriesT2', jsonPCAScoreTest = jsonDtTest); + broadcastClickEvent( + dataPoint, + (startingChart = 'timeSeriesT2'), + (jsonPCAScoreTest = jsonDtTest), + ); }); // Add hover event for t2 & q plots - timeSeriesT2Element.on('plotly_hover', (data) => { - drawShapes(data.points[0].x); - showDataTablePCA(data, 'timeseries', 'timeSeriesT2'); - }).on('plotly_unhover', (data) => { - drawShapes(null, false); - clearHoverTimeOut(); - }); - + timeSeriesT2Element + .on('plotly_hover', (data) => { + drawShapes(data.points[0].x); + showDataTablePCA(data, 'timeseries', 'timeSeriesT2'); + }) + .on('plotly_unhover', (data) => { + drawShapes(null, false); + clearHoverTimeOut(); + }); + showCustomContextMenu(timeSeriesT2Element, true); - + // send plotting time const endTime = performance.now(); gtag('event', 'PCA_et', { @@ -447,13 +480,27 @@ const drawTimeSeriesT2ChartFromObj = (objData, jsonDtTest = {}, chartConfig = {} }); }; -const drawTimeSeriesQChartFromObj = (objData, jsonDtTest = {}, chartConfig = {}, sizeOfData = null, arrayPlotdata = null) => { +const drawTimeSeriesQChartFromObj = ( + objData, + jsonDtTest = {}, + chartConfig = {}, + sizeOfData = null, + arrayPlotdata = null, +) => { if (!objData) return; const startTime = performance.now(); - const qTrainTimeSeries = genTimeSeriesData(objData.SPE, 'train', 'Q_statics'); - const qTestTimeSeries = genTimeSeriesData(objData.test, 'test', 'Q_statics'); + const qTrainTimeSeries = genTimeSeriesData( + objData.SPE, + 'train', + 'Q_statics', + ); + const qTestTimeSeries = genTimeSeriesData( + objData.test, + 'test', + 'Q_statics', + ); const qTimeSeriesData = [qTrainTimeSeries, qTestTimeSeries]; const qTimeSeriesLayout = { margin: { @@ -470,10 +517,7 @@ const drawTimeSeriesQChartFromObj = (objData, jsonDtTest = {}, chartConfig = {}, size: 13.2835201328352, }, xaxis: { - domain: [ - 0, - 1, - ], + domain: [0, 1], automargin: true, type: 'linear', autorange: true, @@ -507,10 +551,7 @@ const drawTimeSeriesQChartFromObj = (objData, jsonDtTest = {}, chartConfig = {}, hoverformat: '.2f', }, yaxis: { - domain: [ - 0, - 1, - ], + domain: [0, 1], automargin: true, type: 'linear', autorange: true, @@ -586,24 +627,29 @@ const drawTimeSeriesQChartFromObj = (objData, jsonDtTest = {}, chartConfig = {}, useResizeHandler: true, // responsive histogram style: { width: '100%', height: '100%' }, // responsive histogram }; - + // assign custome data to plot-view handling on data-point // apply for first/ full data points only if (arrayPlotdata) { scpCustomData = { proc_id_x: arrayPlotdata[0].end_proc_id, sensor_id_x: arrayPlotdata[0].end_col_id, - } + }; } // 1 is for Target data qTimeSeriesData[1].customdata = scpCustomData; - qTimeSeriesData.forEach(dat => { + qTimeSeriesData.forEach((dat) => { dat.hoverinfo = 'none'; }); const timeSeriesQElement = document.getElementById('timeSeriesQ'); - - Plotly.newPlot(timeSeriesQElement, qTimeSeriesData, qTimeSeriesLayout, plotConfig); - + + Plotly.newPlot( + timeSeriesQElement, + qTimeSeriesData, + qTimeSeriesLayout, + plotConfig, + ); + // xtest click event timeSeriesQElement.on('plotly_click', (dataPoint) => { // to spread sampleNo and clickedDataIndexto other charts @@ -619,20 +665,26 @@ const drawTimeSeriesQChartFromObj = (objData, jsonDtTest = {}, chartConfig = {}, } // to broadcast click event to other charts - broadcastClickEvent(dataPoint, startingChart = 'timeSeriesQ', jsonPCAScoreTest = jsonDtTest); + broadcastClickEvent( + dataPoint, + (startingChart = 'timeSeriesQ'), + (jsonPCAScoreTest = jsonDtTest), + ); }); // Add hover event for t2 & q plots - timeSeriesQElement.on('plotly_hover', (data) => { - drawShapes(data.points[0].x); - showDataTablePCA(data, 'timeseries', 'timeSeriesQ'); - }).on('plotly_unhover', (data) => { - drawShapes(null, false); - clearHoverTimeOut(); - }); - + timeSeriesQElement + .on('plotly_hover', (data) => { + drawShapes(data.points[0].x); + showDataTablePCA(data, 'timeseries', 'timeSeriesQ'); + }) + .on('plotly_unhover', (data) => { + drawShapes(null, false); + clearHoverTimeOut(); + }); + showCustomContextMenu(timeSeriesQElement, true); - + // send plotting time const endTime = performance.now(); gtag('event', 'PCA_et', { diff --git a/ap/static/analyze/js/pca.js b/ap/static/analyze/js/pca.js index 88548ef..a702bce 100644 --- a/ap/static/analyze/js/pca.js +++ b/ap/static/analyze/js/pca.js @@ -1,8 +1,3 @@ -/* eslint-disable no-restricted-syntax */ -/* eslint-disable no-use-before-define */ -/* eslint-disable no-unused-vars */ -/* eslint-disable no-undef */ -/* eslint-disable no-param-reassign */ const REQUEST_TIMEOUT = setRequestTimeOut(); const i18n = { allSelection: $('#i18nAllSelection').text(), @@ -25,7 +20,7 @@ const i18n = { const MAX_NUMBER_OF_SENSOR = 60; const MIN_NUMBER_OF_SENSOR = 0; -const INDEX_AXIS_LABEL = "Index" +const INDEX_AXIS_LABEL = 'Index'; const MSG_MAPPING = { E_ALL_NA: $('#i18nE01AllNA').text(), @@ -50,30 +45,59 @@ const drawPCAPlotJSON = (res, clickOnChart) => { // draw graphs if (!clickOnChart) { if (jsonPCAScoreTrain) { - drawXTrainScatter(jsonPCAScoreTrain, chartConfig, sizeOfData = res.dtsize_pca_score_train); + drawXTrainScatter( + jsonPCAScoreTrain, + chartConfig, + (sizeOfData = res.dtsize_pca_score_train), + ); } if (jsonPCAScoreTest) { - drawXTestScatter(jsonPCAScoreTest, chartConfig, sizeOfData = res.dtsize_pca_score_test, res.array_plotdata); + drawXTestScatter( + jsonPCAScoreTest, + chartConfig, + (sizeOfData = res.dtsize_pca_score_test), + res.array_plotdata, + ); if (jsonT2TimeSeries) { - drawTimeSeriesT2Chart(jsonT2TimeSeries, jsonPCAScoreTest, chartConfig, - sizeOfData = res.dtsize_t2_time_series, res.array_plotdata); + drawTimeSeriesT2Chart( + jsonT2TimeSeries, + jsonPCAScoreTest, + chartConfig, + (sizeOfData = res.dtsize_t2_time_series), + res.array_plotdata, + ); } if (jsonQTimeSeries) { - drawTimeSeriesQChart(jsonQTimeSeries, jsonPCAScoreTest, chartConfig, - sizeOfData = res.dtsize_q_time_series, res.array_plotdata); + drawTimeSeriesQChart( + jsonQTimeSeries, + jsonPCAScoreTest, + chartConfig, + (sizeOfData = res.dtsize_q_time_series), + res.array_plotdata, + ); } } } if (jsonQContribution) { - drawQContributionChart(jsonQContribution, chartConfig, sizeOfData = res.dtsize_q_contribution); + drawQContributionChart( + jsonQContribution, + chartConfig, + (sizeOfData = res.dtsize_q_contribution), + ); } if (jsonT2Contribution) { - drawT2ContributionChart(jsonT2Contribution, chartConfig, - sizeOfData = res.dtsize_t2_contribution); + drawT2ContributionChart( + jsonT2Contribution, + chartConfig, + (sizeOfData = res.dtsize_t2_contribution), + ); } if (jsonPCABiplot) { - drawPCABiplotChart(jsonPCABiplot, chartConfig, - sizeOfData = res.dtsize_pca_biplot); + drawPCABiplotChart( + jsonPCABiplot, + chartConfig, + (sizeOfData = res.dtsize_pca_biplot), + ); } }; @@ -84,16 +108,38 @@ const drawPCAPlotList = (res, clickOnChart, sampleNo = null) => { const jsonPCAScoreTrain = generateXTrainScatter(res.json_pca_score_train); if (jsonPCAScoreTrain && !clickOnChart) { - drawXTrainScatter(jsonPCAScoreTrain, chartConfig, sizeOfData = res.dtsize_pca_score_train); + drawXTrainScatter( + jsonPCAScoreTrain, + chartConfig, + (sizeOfData = res.dtsize_pca_score_train), + ); } - const jsonPCAScoreTest = generateXTestScatter(res.json_pca_score_test, res.json_pca_score_train); + const jsonPCAScoreTest = generateXTestScatter( + res.json_pca_score_test, + res.json_pca_score_train, + ); if (jsonPCAScoreTest && !clickOnChart) { - drawXTestScatter(jsonPCAScoreTest, chartConfig, sizeOfData = res.dtsize_pca_score_test, res.array_plotdata); + drawXTestScatter( + jsonPCAScoreTest, + chartConfig, + (sizeOfData = res.dtsize_pca_score_test), + res.array_plotdata, + ); } - const jsonPCABiplot = generateBiplot(res.json_pca_biplot, res.json_pca_score_train, sampleNo); - if (jsonPCABiplot) { drawPCABiplotChart(jsonPCABiplot, chartConfig, sizeOfData = res.dtsize_pca_biplot); } + const jsonPCABiplot = generateBiplot( + res.json_pca_biplot, + res.json_pca_score_train, + sampleNo, + ); + if (jsonPCABiplot) { + drawPCABiplotChart( + jsonPCABiplot, + chartConfig, + (sizeOfData = res.dtsize_pca_biplot), + ); + } const jsonT2TimeSeries = res.json_t2_time_series; const jsonQTimeSeries = res.json_q_time_series; @@ -101,31 +147,53 @@ const drawPCAPlotList = (res, clickOnChart, sampleNo = null) => { const jsonT2Contribution = res.json_t2_contribution; if (jsonT2TimeSeries && !clickOnChart) { - drawTimeSeriesT2ChartFromObj(jsonT2TimeSeries, jsonPCAScoreTest, chartConfig, - sizeOfData = res.dtsize_t2_time_series, res.array_plotdata); + drawTimeSeriesT2ChartFromObj( + jsonT2TimeSeries, + jsonPCAScoreTest, + chartConfig, + (sizeOfData = res.dtsize_t2_time_series), + res.array_plotdata, + ); } if (jsonQTimeSeries && !clickOnChart) { - drawTimeSeriesQChartFromObj(jsonQTimeSeries, jsonPCAScoreTest, chartConfig, - sizeOfData = res.dtsize_q_time_series, res.array_plotdata); + drawTimeSeriesQChartFromObj( + jsonQTimeSeries, + jsonPCAScoreTest, + chartConfig, + (sizeOfData = res.dtsize_q_time_series), + res.array_plotdata, + ); } if (jsonQContribution) { - drawQContributionChartFromObj(jsonQContribution, sampleNo, chartConfig, - sizeOfData = res.dtsize_q_contribution, dpInfo = res.data_point_info, - shortName = res.short_names); + drawQContributionChartFromObj( + jsonQContribution, + sampleNo, + chartConfig, + (sizeOfData = res.dtsize_q_contribution), + (dpInfo = res.data_point_info), + (shortName = res.short_names), + ); } if (jsonT2Contribution) { - drawT2ContributionChartFromObj(jsonT2Contribution, sampleNo, chartConfig, - sizeOfData = res.dtsize_t2_contribution, dpInfo = res.data_point_info, - shortName = res.short_names); + drawT2ContributionChartFromObj( + jsonT2Contribution, + sampleNo, + chartConfig, + (sizeOfData = res.dtsize_t2_contribution), + (dpInfo = res.data_point_info), + (shortName = res.short_names), + ); } }; -const reselectPCAData = (fromShowGraphBtn=false, reselectBtn=true) => { +const reselectPCAData = (fromShowGraphBtn = false, reselectBtn = true) => { const formData = collectInputAsFormData(); // warning about integer column has_integer_col if (formData.get('has_integer_col') === 'true') { - $(eles.msgContent).text(`${MSG_MAPPING.W_PCA_INTEGER}\n${i18n.confirmQuestion}`); + $(eles.msgContent).text( + `${MSG_MAPPING.W_PCA_INTEGER}\n${i18n.confirmQuestion}`, + ); $(eles.msgModal).modal('show'); } else { beforeShowGraphCommon(); @@ -133,8 +201,13 @@ const reselectPCAData = (fromShowGraphBtn=false, reselectBtn=true) => { } }; -const getPCAPlotsFromBackend = (formData, clickOnChart = false, sampleNo = null, autoUpdate = false, reselect=false) => { - +const getPCAPlotsFromBackend = ( + formData, + clickOnChart = false, + sampleNo = null, + autoUpdate = false, + reselect = false, +) => { const eleQCont = $(eles.qContributionChart); const eleT2Cont = $(eles.t2ContributionChart); const eleBiplot = $(eles.pcaBiplotChart); @@ -153,83 +226,102 @@ const getPCAPlotsFromBackend = (formData, clickOnChart = false, sampleNo = null, lastUsedFormData = formData; - showGraphCallApi('/ap/api/analyze/pca', formData, REQUEST_TIMEOUT, async (res) => { - if (clickOnChart) { - hideLoading(eleQCont); - hideLoading(eleT2Cont); - hideLoading(eleBiplot); - hideLoading(eleRecordInfoTbl); - } else { - $('#plot-cards').show(); - } - // save global - graphStore.setTraceData(_.cloneDeep(res)); - // share global var to base.js - formDataQueried = lastUsedFormData; - const json = false; - if (json) { - drawPCAPlotJSON(res, clickOnChart); - } else { - drawPCAPlotList(res, clickOnChart, sampleNo); - } - - showInfoTable(res[CONST.COMMON]); - - fillDataToFilterModal(res.filter_on_demand, () => { - handleFilterOndemand(); - }); - - // show delete number of NAN records - // const numSensors = countSelectedSensors(formData) || 1; - // if (res.removed_outlier_nan_train) { - // showToastrDeleteNA( - // i18n.trainingData, - // numSensors * res.actual_record_number_train, - // res.removed_outlier_nan_train, - // ); - // } - // if (res.removed_outlier_nan_test) { - // showToastrDeleteNA(i18n.testingData, - // numSensors * res.actual_record_number_test, - // res.removed_outlier_nan_test); - // } - if (!sampleNo) { - showAllDeleteNAToastrMsgs(res, formData); - } + showGraphCallApi( + '/ap/api/analyze/pca', + formData, + REQUEST_TIMEOUT, + async (res) => { + if (clickOnChart) { + hideLoading(eleQCont); + hideLoading(eleT2Cont); + hideLoading(eleBiplot); + hideLoading(eleRecordInfoTbl); + } else { + $('#plot-card-container').show(); + } + // save global + graphStore.setTraceData(_.cloneDeep(res)); + // share global var to base.js + formDataQueried = lastUsedFormData; + const json = false; + if (json) { + drawPCAPlotJSON(res, clickOnChart); + } else { + drawPCAPlotList(res, clickOnChart, sampleNo); + } - if (res.is_send_ga_off) { - showGAToastr(true); - } + showInfoTable(res[CONST.COMMON]); + + fillDataToFilterModal(res.filter_on_demand, () => { + handleFilterOndemand(); + }); + + // show delete number of NAN records + // const numSensors = countSelectedSensors(formData) || 1; + // if (res.removed_outlier_nan_train) { + // showToastrDeleteNA( + // i18n.trainingData, + // numSensors * res.actual_record_number_train, + // res.removed_outlier_nan_train, + // ); + // } + // if (res.removed_outlier_nan_test) { + // showToastrDeleteNA(i18n.testingData, + // numSensors * res.actual_record_number_test, + // res.removed_outlier_nan_test); + // } + if (!sampleNo) { + showAllDeleteNAToastrMsgs(res, formData); + } - if (res.actual_record_number_train > SQL_LIMIT || res.actual_record_number_test > SQL_LIMIT) { - showToastrMsg(i18n.SQLLimit); - } + if (res.is_send_ga_off) { + showGAToastr(true); + } - // show toastr to inform result was truncated upto 5000 - if (res.is_res_limited_train || res.is_res_limited_test) { - showToastrMsg(i18n.traceResulLimited.split('BREAK_LINE').join('
')); - } + if ( + res.actual_record_number_train > SQL_LIMIT || + res.actual_record_number_test > SQL_LIMIT + ) { + showToastrMsg(i18n.SQLLimit); + } - // update record table info - const jsonDataPointInfo = res.data_point_info; - if (jsonDataPointInfo) { - updateRecordInfo(dataInfos = jsonDataPointInfo, sampleNo = formData.get('sample_no')); - } + // show toastr to inform result was truncated upto 5000 + if (res.is_res_limited_train || res.is_res_limited_test) { + showToastrMsg( + i18n.traceResulLimited.split('BREAK_LINE').join('
'), + ); + } - // if (checkResultExist(res)) { - // saveInvalidFilterCaller(true); - // } else { - // saveInvalidFilterCaller(); - // } + // update record table info + const jsonDataPointInfo = res.data_point_info; + if (jsonDataPointInfo) { + updateRecordInfo( + (dataInfos = jsonDataPointInfo), + (sampleNo = formData.get('sample_no')), + ); + } - if (!autoUpdate) { - $('html, body').animate({ - scrollTop: $('#plot-cards').offset().top, - }, 1000); - } + // if (checkResultExist(res)) { + // saveInvalidFilterCaller(true); + // } else { + // saveInvalidFilterCaller(); + // } + + if (!autoUpdate) { + $('html, body').animate( + { + scrollTop: getOffsetTopDisplayGraph( + '#plot-card-container', + ), + }, + 1000, + ); + } - setPollingData(formData, longPollingHandler, []); - }, { page: 'pca', clickOnChart, reselect }); + setPollingData(formData, longPollingHandler, []); + }, + { page: 'pca', clickOnChart, reselect }, + ); }; const longPollingHandler = () => { @@ -250,7 +342,9 @@ const isIntegerDatatype = (type) => { // convert to lower case before compare const lowerType = type.toLowerCase(); for (let i = 0; i < NUMERIC_TYPE.length; i++) { - if (lowerType.includes(NUMERIC_TYPE[i])) { return true; } + if (lowerType.includes(NUMERIC_TYPE[i])) { + return true; + } } return false; }; @@ -314,7 +408,7 @@ const collectInputAsFormData = () => { formData.set('has_integer_col', isIntegerColChecked); formData.set('checked_val_count', valCount); - + // delete empty conditional procs [...formData.keys()].forEach((key) => { if (key.startsWith('cond_proc') && isEmpty(formData.get(key))) { @@ -331,12 +425,13 @@ const getPCAPlots = () => { updateStyleOfInvalidElements(); if (isValid) { - const formData = collectInputAsFormData(); // warning about integer column has_integer_col if (formData.get('has_integer_col') === 'true') { - $(eles.msgContent).text(`${MSG_MAPPING.W_PCA_INTEGER}\n${i18n.confirmQuestion}`); + $(eles.msgContent).text( + `${MSG_MAPPING.W_PCA_INTEGER}\n${i18n.confirmQuestion}`, + ); $(eles.msgModal).modal('show'); } else { beforeShowGraphCommon(); @@ -353,7 +448,6 @@ const confirmWarningAndGetPCA = () => { getPCAPlotsFromBackend(formData); }; - const bindCheckEvents = () => { // check all event $(eles.selectAll).change(() => { @@ -401,7 +495,11 @@ const loadUserInputAgain = (parent) => { setTimeout(() => { inputForms.each((i, form) => { try { - const userInput = saveLoadUserInput(`#${form.id}`, window.location.pathname, parent); + const userInput = saveLoadUserInput( + `#${form.id}`, + window.location.pathname, + parent, + ); userInput(); } catch (e) { console.log(e); @@ -416,7 +514,9 @@ const addClickEventAllRows = () => { const that = $(element); that.click((e) => { const currentRow = $(e.currentTarget); - const currentCheckbox = $(currentRow.find('input[type="checkbox"]')); + const currentCheckbox = $( + currentRow.find('input[type="checkbox"]'), + ); currentCheckbox.prop('checked', !currentCheckbox.is(':checked')); }); }); @@ -452,8 +552,8 @@ const loadSensors = () => { headers: myHeaders, }; fetch('/ap/api/analyze/sensor', requestOptions) - .then(response => response.text()) - .catch(error => console.log('error', error)); + .then((response) => response.text()) + .catch((error) => console.log('error', error)); setTimeout(() => { $(eles.spinner).removeClass('spinner-grow'); }, 20000); @@ -471,7 +571,6 @@ $(() => { showFilter: true, showStrColumn: true, hideStrVariable: true, - hideCTCol: true, disableSerialAsObjective: true, }); endProcItem(); @@ -484,7 +583,13 @@ $(() => { // add first condition process const condProcs = genProcessDropdownData(procConfigs); - const condProcItem = addCondProc(condProcs.ids, condProcs.names, '', eles.formID, 'btn-add-cond-proc'); + const condProcItem = addCondProc( + condProcs.ids, + condProcs.names, + '', + eles.formID, + 'btn-add-cond-proc', + ); condProcItem(); // click even of condition proc add button @@ -510,18 +615,24 @@ $(() => { }); const extractAndConvertDT = (datetimeStr) => { - const splitDT = splitDateTimeRange(datetimeStr); + const splitDT = splitDateTimeRange(datetimeStr); // to uct const start = toUTCDateTime(splitDT.startDate, splitDT.startTime); const end = toUTCDateTime(splitDT.endDate, splitDT.endTime); - return { start, end } + return { start, end }; }; const dumpData = (type) => { const formData = collectInputAsFormData(); - [CONST.STARTDATE, CONST.STARTTIME, CONST.ENDDATE, CONST.ENDTIME].map(el => formData.delete(el)); - const trainDateTime = $('#for-default-train').find('[name=DATETIME_RANGE_PICKER]').val(); - const testDateTime = $('#for-default-test').find('[name=DATETIME_RANGE_PICKER]').val(); + [CONST.STARTDATE, CONST.STARTTIME, CONST.ENDDATE, CONST.ENDTIME].map((el) => + formData.delete(el), + ); + const trainDateTime = $('#for-default-train') + .find('[name=DATETIME_RANGE_PICKER]') + .val(); + const testDateTime = $('#for-default-test') + .find('[name=DATETIME_RANGE_PICKER]') + .val(); const trainDT = extractAndConvertDT(trainDateTime); const testDT = extractAndConvertDT(testDateTime); formData.set(CONST.STARTDATE, trainDT.start.date); diff --git a/ap/static/analyze/js/pca_toastr.js b/ap/static/analyze/js/pca_toastr.js index 3481e06..205e401 100644 --- a/ap/static/analyze/js/pca_toastr.js +++ b/ap/static/analyze/js/pca_toastr.js @@ -1,4 +1,3 @@ - const showToastr = (errors) => { if (!errors) { return; @@ -11,13 +10,21 @@ const showToastr = (errors) => { } else if (errors instanceof Object) { const trainDataErr = { is_err: errors.train_data.error, - is_all_na: errors.train_data.errors && errors.train_data.errors.includes('E_ALL_NA'), - is_zero_var: errors.train_data.errors && errors.train_data.errors.includes('E_ZERO_VARIANCE'), + is_all_na: + errors.train_data.errors && + errors.train_data.errors.includes('E_ALL_NA'), + is_zero_var: + errors.train_data.errors && + errors.train_data.errors.includes('E_ZERO_VARIANCE'), }; const targetDataErr = { is_err: errors.target_data.error, - is_all_na: errors.target_data.errors && errors.target_data.errors.includes('E_ALL_NA'), - is_zero_var: errors.target_data.errors && errors.target_data.errors.includes('E_ZERO_VARIANCE'), + is_all_na: + errors.target_data.errors && + errors.target_data.errors.includes('E_ALL_NA'), + is_zero_var: + errors.target_data.errors && + errors.target_data.errors.includes('E_ZERO_VARIANCE'), }; let msgContent = ''; if (trainDataErr.is_all_na || targetDataErr.is_all_na) { @@ -60,8 +67,10 @@ const showAllDeleteNAToastrMsgs = (res, formData) => { ); } if (res.removed_outlier_nan_test) { - showToastrDeleteNA(i18n.testingData, + showToastrDeleteNA( + i18n.testingData, numSensors * res.actual_record_number_test, - res.removed_outlier_nan_test); + res.removed_outlier_nan_test, + ); } }; diff --git a/ap/static/calendar_heatmap/css/calendar_heatmap.css b/ap/static/calendar_heatmap/css/calendar_heatmap.css index 2b70a99..74d6613 100644 --- a/ap/static/calendar_heatmap/css/calendar_heatmap.css +++ b/ap/static/calendar_heatmap/css/calendar_heatmap.css @@ -45,7 +45,7 @@ canvas { /* Clear floats after the columns */ .chart-row:after { - content: ""; + content: ''; display: table; clear: both; } @@ -95,7 +95,7 @@ canvas { .chm-col { height: 32vh; padding: 2px !important; - border-radius:3px; + border-radius: 3px; width: 100%; } @@ -109,7 +109,8 @@ canvas { border-radius: 3px; } -.chm-plot .plot-container.plotly, .chm-plot .plot-container.plotly .modebar-container { +.chm-plot .plot-container.plotly, +.chm-plot .plot-container.plotly .modebar-container { height: 100%; width: 100%; } @@ -159,4 +160,4 @@ canvas { .chm-card-title span { white-space: pre; line-height: 1; -} \ No newline at end of file +} diff --git a/ap/static/calendar_heatmap/js/calendar_heatmap.js b/ap/static/calendar_heatmap/js/calendar_heatmap.js index 88eef1f..b85c1ae 100644 --- a/ap/static/calendar_heatmap/js/calendar_heatmap.js +++ b/ap/static/calendar_heatmap/js/calendar_heatmap.js @@ -1,8 +1,3 @@ -/* eslint-disable no-restricted-syntax,prefer-arrow-callback */ -/* eslint-disable guard-for-in */ -/* eslint-disable no-unused-vars */ -/* eslint-disable no-undef */ -/* eslint-disable no-use-before-define */ const REQUEST_TIMEOUT = setRequestTimeOut(); const MAX_NUMBER_OF_GRAPH = 18; const MAX_NUMBER_OF_SENSOR = 18; @@ -71,7 +66,6 @@ const i18n = { canChangeScale: $('#i18nCanChangeScale').text(), }; - const formElements = { formID: '#traceDataForm', scatterBtn: '#showGraphBtn', @@ -103,9 +97,18 @@ const formElements = { }; // maximum number of processes is 10. -const procColorPalettes = ['#6495ab', '#9d9a53', '#ae6e54', '#603567', - '#00af91', '#d7cece', '#470f0f', '#0f1451', - '#a4b790', '#000000']; +const procColorPalettes = [ + '#6495ab', + '#9d9a53', + '#ae6e54', + '#603567', + '#00af91', + '#d7cece', + '#470f0f', + '#0f1451', + '#a4b790', + '#000000', +]; const showLimitResultToastr = () => { const msgContent = `

${i18n.limitedHeatMap.replace('BREAK_LINE', '
')}

`; @@ -149,9 +152,14 @@ $(() => { }); endProcItem(endProcOnChange, checkAndHideStratifiedVar); - // add first condition process - const condProcItem = addCondProc(endProcs.ids, endProcs.names, '', formElements.formID, 'btn-add-cond-proc'); + const condProcItem = addCondProc( + endProcs.ids, + endProcs.names, + '', + formElements.formID, + 'btn-add-cond-proc', + ); condProcItem(); // click even of condition proc add button @@ -168,16 +176,16 @@ $(() => { // set default states setDefaultHeatmapCycle(); - + // Load userBookmarkBar $('#userBookmarkBar').show(); - + // bind events for changing scales bindScaleEvent(); - + // validation initValidation(formElements.formID); - + initializeDateTimeRangePicker(); }); @@ -258,22 +266,28 @@ const updateSVBox = async () => { } const svOptionParentId = `${eles.condProcPartno}${idx}`; - if (svSelected !== '') { // TODO check + if (svSelected !== '') { + // TODO check addGroupListCheckboxWithSearch( parentID, svOptionParentId, '', - categoryValues, categoryNames, currentCheckedIds, - `${eles.categoryValueMulti}${idx}`, noFilter = true, + categoryValues, + categoryNames, + currentCheckedIds, + `${eles.categoryValueMulti}${idx}`, + (noFilter = true), ); } }; -const countNumSensorSelected = (id = eles.endProcRow) => $(`#${id}`).find('input[type="checkbox"][value!=All]:checked').length || 0; +const countNumSensorSelected = (id = eles.endProcRow) => + $(`#${id}`).find('input[type="checkbox"][value!=All]:checked').length || 0; const checkAndHideStratifiedVar = () => { const card = $('#end-proc-row'); - const moreThanOneProcSelected = card && card.parent().find('.card').length > 1; + const moreThanOneProcSelected = + card && card.parent().find('.card').length > 1; // const numCheckedSensors = countNumSensorSelected(); if (moreThanOneProcSelected) { formElements.stratifiedVar.css('display', 'none'); @@ -330,7 +344,6 @@ const afterShowCHM = (abnormal = false) => { loadingHide(); }; - const countNumberProc = (formData) => { let numProc = 0; for (const pair of formData.entries()) { @@ -371,29 +384,35 @@ const collectInputAsFormData = (clearOnFlyFilter, autoUpdate = false) => { return formData; }; -const queryDataAndShowHeatMap = (clearOnFlyFilter = true, autoUpdate = false) => { - +const queryDataAndShowHeatMap = ( + clearOnFlyFilter = true, + autoUpdate = false, +) => { let formData = collectInputAsFormData(clearOnFlyFilter, autoUpdate); - showGraphCallApi('/ap/api/chm/plot', formData, REQUEST_TIMEOUT, async (res) => { - afterShowCHM(); - + showGraphCallApi( + '/ap/api/chm/plot', + formData, + REQUEST_TIMEOUT, + async (res) => { + afterShowCHM(); - res = sortArrayFormVal(res); - currentData = res; - graphStore.setTraceData(_.cloneDeep(res)); + res = sortArrayFormVal(res); + currentData = res; + graphStore.setTraceData(_.cloneDeep(res)); - const scale = formElements.heatmapScale.val(); + const scale = formElements.heatmapScale.val(); - drawHeatMap(res, scale, autoUpdate); + drawHeatMap(res, scale, autoUpdate); - // show info table - showInfoTable(res); + // show info table + showInfoTable(res); - checkAndShowToastr(res, clearOnFlyFilter); + checkAndShowToastr(res, clearOnFlyFilter); - setPollingData(formData, queryDataAndShowHeatMap, [false, true]); - }); + setPollingData(formData, queryDataAndShowHeatMap, [false, true]); + }, + ); }; const sortArrayFormVal = (res) => { @@ -404,16 +423,24 @@ const sortArrayFormVal = (res) => { res.array_plotdata = []; res.procs = Object.keys(arrayPlotData); for (const procId of Object.keys(arrayPlotData)) { - const plotList = arrayPlotData[procId].map(plot => { + const plotList = arrayPlotData[procId].map((plot) => { plot.proc_id = procId; return plot; - }) + }); res.array_plotdata.push(...plotList); } // sort graphs if (latestSortColIds && latestSortColIds.length) { - res.ARRAY_FORMVAL = sortGraphs(res.ARRAY_FORMVAL, 'GET02_VALS_SELECT', latestSortColIds); - res.array_plotdata = sortGraphs(res.array_plotdata, 'end_col', latestSortColIds); + res.ARRAY_FORMVAL = sortGraphs( + res.ARRAY_FORMVAL, + 'GET02_VALS_SELECT', + latestSortColIds, + ); + res.array_plotdata = sortGraphs( + res.array_plotdata, + 'end_col', + latestSortColIds, + ); } // if has facet and facet > 1 and sensor > 1 break row @@ -421,10 +448,10 @@ const sortArrayFormVal = (res) => { let unitFacet = []; for (const plotdata of res.array_plotdata) { if (!sensors.includes(plotdata.end_col)) { - sensors.push(plotdata.end_col) + sensors.push(plotdata.end_col); } if (plotdata.cate_value && !unitFacet.includes(plotdata.cate_value)) { - unitFacet.push(plotdata.cate_value) + unitFacet.push(plotdata.cate_value); } } res.row = 1; // default 1 row @@ -434,8 +461,7 @@ const sortArrayFormVal = (res) => { } return res; -} - +}; const checkAndShowToastr = (data, clearOnFlyFilter) => { if (clearOnFlyFilter) { @@ -453,7 +479,6 @@ const checkAndShowToastr = (data, clearOnFlyFilter) => { return; } - for (const idx in arrayPlotData) { const plotData = arrayPlotData[idx]; const zMax = plotData.z_max; @@ -467,14 +492,25 @@ const checkAndShowToastr = (data, clearOnFlyFilter) => { // show limit graphs displayed message if (data.isGraphLimited) { - showToastrMsg(i18nCommon.limitDisplayedGraphs.replace('NUMBER', MAX_NUMBER_OF_GRAPH)); + showToastrMsg( + i18nCommon.limitDisplayedGraphs.replace( + 'NUMBER', + MAX_NUMBER_OF_GRAPH, + ), + ); } }; - const drawHeatMapFromPlotData = (canvasId, plotData) => { - const numericDataTypes = [DataTypes.REAL.name, DataTypes.INTEGER.name, DataTypes.DATETIME.name] - const colorSelectDOM = numericDataTypes.includes(plotData.data_type) ? eles.colorReal : eles.colorCat; + const numericDataTypes = [ + DataTypes.REAL.name, + DataTypes.INTEGER.name, + DataTypes.DATETIME.name, + ]; + const colorSelectDOM = + !plotData.is_serial_no && numericDataTypes.includes(plotData.data_type) + ? eles.colorReal + : eles.colorCat; const colorOption = colorSelectDOM.val(); const prop = { canvasId, @@ -495,7 +531,7 @@ const drawHeatMapFromPlotData = (canvasId, plotData) => { colorOption, dataType: plotData.data_type, zFmt: plotData.z_fmt || '', - colorScaleCommon: plotData.color_scale_common || false + colorScaleCommon: plotData.color_scale_common || false, }; createHeatMap(prop); @@ -514,7 +550,7 @@ const createRowHTML = (rowIdx, length) => { }; const createCardHTML = (rowCardId, graphId, title, facet, isCTCol) => { - const CTLabel = isCTCol ? ` (${DataTypes.DATETIME.short}) [sec]` : '' + const CTLabel = isCTCol ? ` (${DataTypes.DATETIME.short}) [sec]` : ''; $(`#${rowCardId}`).append(`
@@ -546,10 +582,10 @@ const getCommonScale = (data) => { for (const plotData of procPlotDatas) { const minZ = plotData.z_min; const maxZ = plotData.z_max; - if (isEmpty(minScale) || !isEmpty(minZ) && minZ < minScale) { + if (isEmpty(minScale) || (!isEmpty(minZ) && minZ < minScale)) { minScale = minZ; } - if (isEmpty(maxScale) || !isEmpty(maxZ) && maxScale < maxZ) { + if (isEmpty(maxScale) || (!isEmpty(maxZ) && maxScale < maxZ)) { maxScale = maxZ; } } @@ -599,7 +635,10 @@ const drawHeatMap = (orgData, scaleOption = 'auto', autoUpdate = false) => { if (!isEmpty(cateValue)) { facetTitle = cateValue; if (Array.isArray(cateValue)) { - facetTitle = cateValue.length > 1 ? `${cateValue[0]} | ${cateValue[1]}` : cateValue[0]; + facetTitle = + cateValue.length > 1 + ? `${cateValue[0]} | ${cateValue[1]}` + : cateValue[0]; } title = `${end_proc_name}-${sensorName}`; } @@ -614,18 +653,27 @@ const drawHeatMap = (orgData, scaleOption = 'auto', autoUpdate = false) => { for (let rowIdx = 0; rowIdx < orgData.row; rowIdx++) { const rowCardId = createRowHTML(rowIdx, arrayPlotData.length); for (const plotIdx in arrayPlotData) { - - const plotData = arrayPlotData[plotIdx] - if (orgData.row > 1 && orgData.sensors[rowIdx] !== plotData.end_col) { + const plotData = arrayPlotData[plotIdx]; + if ( + orgData.row > 1 && + orgData.sensors[rowIdx] !== plotData.end_col + ) { continue; } const procId = plotData.proc_id; - const [title, sensorName, cardValue, facet, isCTCol] = buildGraphTitle(plotData, procId); + const [title, sensorName, cardValue, facet, isCTCol] = + buildGraphTitle(plotData, procId); plotData.sensorName = sensorName; plotData.title = title; plotData.cardValue = cardValue; - createCardHTML(rowCardId, `${rowIdx}_${plotIdx}`, title, facet, isCTCol); + createCardHTML( + rowCardId, + `${rowIdx}_${plotIdx}`, + title, + facet, + isCTCol, + ); // draw heat map const plotContainerId = `chm_${rowIdx}_${plotIdx}`; @@ -637,9 +685,12 @@ const drawHeatMap = (orgData, scaleOption = 'auto', autoUpdate = false) => { } if (!autoUpdate) { - $('html, body').animate({ - scrollTop: formElements.plotCard.offset().top, - }, 500); + $('html, body').animate( + { + scrollTop: getOffsetTopDisplayGraph('#scaleOption'), + }, + 500, + ); } // init filter modal @@ -649,7 +700,6 @@ const drawHeatMap = (orgData, scaleOption = 'auto', autoUpdate = false) => { }); }; - const selectHeatmapMenu = (scaleOption) => { drawHeatMap(currentData, scaleOption); @@ -657,8 +707,7 @@ const selectHeatmapMenu = (scaleOption) => { hideContextMenu(); }; - -const endProcOnChange = (async (event) => { +const endProcOnChange = async (event) => { const curTarget = event.target; const idx = parseProcGUIIndex(curTarget.id); const selectedEndProc = curTarget.value; @@ -702,21 +751,24 @@ const endProcOnChange = (async (event) => { addGroupListCheckboxWithSearch( parentId, `${eles.endProcVal}${idx}`, - '', ids, vals, checkedIds, + '', + ids, + vals, + checkedIds, `GET02_VALS_SELECT${idx}`, false, names, ); } - + updateSelectedItems(false, $(formElements.endProcSelectedItem)); - + getStratifiedVars(selectedEndProc).then(() => { $(eles.sVColumns).unbind('change'); $(eles.sVColumns).on('change', updateSVBox); $(eles.sVColumns).trigger('change'); }); -}); +}; const dumpData = (exportType, dataSrc) => { const formData = lastUsedFormData || collectInputAsFormData(true); diff --git a/ap/static/calendar_heatmap/js/calendar_heatmap_plotly.js b/ap/static/calendar_heatmap/js/calendar_heatmap_plotly.js index 9d0cbd9..829eb93 100644 --- a/ap/static/calendar_heatmap/js/calendar_heatmap_plotly.js +++ b/ap/static/calendar_heatmap/js/calendar_heatmap_plotly.js @@ -1,4 +1,3 @@ -/* eslint-disable max-len */ const funcTitle = { 'Cycle Time [s]': 'CT [s]', 'Range (max-min)': 'Range
(max-min)', @@ -7,19 +6,26 @@ const funcTitle = { 'Count/Min': 'Count
(Min)', }; -const getHalfOfScale = (colorScale, firstHalf=false) => { +const getHalfOfScale = (colorScale, firstHalf = false) => { const centerIdx = colorScale.length / 2; - colorScale = colorScale.filter((color, idx) => firstHalf ? idx < centerIdx : idx >= centerIdx - 1); - return colorScale.map((color, idx) => [String(idx / (colorScale.length - 1)), color[1]]); + colorScale = colorScale.filter((color, idx) => + firstHalf ? idx < centerIdx : idx >= centerIdx - 1, + ); + return colorScale.map((color, idx) => [ + String(idx / (colorScale.length - 1)), + color[1], + ]); }; const genColorScale = (data, colorOption, commonRange = null) => { if (commonRange) { - data = [commonRange.zmin, commonRange.zmax] + data = [commonRange.zmin, commonRange.zmax]; } - const minVal = Math.min(...data.filter(i => i !== null)); - const maxVal = Math.max(...data.filter(i => i !== null)); - const maxAbsVal = Math.max(...data.filter(i => i !== null).map(i => Math.abs(i))); + const minVal = Math.min(...data.filter((i) => i !== null)); + const maxVal = Math.max(...data.filter((i) => i !== null)); + const maxAbsVal = Math.max( + ...data.filter((i) => i !== null).map((i) => Math.abs(i)), + ); let colorScale = colorPallets[colorOption].scale; // for blue and blue rev @@ -33,9 +39,9 @@ const genColorScale = (data, colorOption, commonRange = null) => { let zmin = -maxAbsVal; let zmax = maxAbsVal; - colorScale = colorPallets[colorOption].isRev ? - reverseScale(dnJETColorScale) : - dnJETColorScale; + colorScale = colorPallets[colorOption].isRev + ? reverseScale(dnJETColorScale) + : dnJETColorScale; if (minVal >= 0) { zmin = 0; zmax = maxAbsVal; @@ -53,7 +59,11 @@ const genColorScale = (data, colorOption, commonRange = null) => { }; }; const createHeatMap = (prop) => { - const colorScale = genColorScale(prop.z, prop.colorOption, prop.colorScaleCommon); + const colorScale = genColorScale( + prop.z, + prop.colorOption, + prop.colorScaleCommon, + ); const common = { family: 'Calibri Light', tickSize: 10, @@ -63,8 +73,11 @@ const createHeatMap = (prop) => { primaryColor: '#65c5f1', }; - const customFuncTitle = Object.keys(funcTitle).includes(prop.aggFunction) ? funcTitle[prop.aggFunction] : prop.aggFunction; - const isChangeSize = prop.zFmt.includes('e') || Math.round(prop.zmax) > 1000; + const customFuncTitle = Object.keys(funcTitle).includes(prop.aggFunction) + ? funcTitle[prop.aggFunction] + : prop.aggFunction; + const isChangeSize = + prop.zFmt.includes('e') || Math.round(prop.zmax) > 1000; const heatmapTrace = { // name: [], @@ -127,7 +140,7 @@ const createHeatMap = (prop) => { showline: true, tickmode: 'array', ticktext: prop.yTicktext, - tickvals: prop.yTickvals.map(y => y + 0.5), + tickvals: prop.yTickvals.map((y) => y + 0.5), tickfont: { size: common.tickSize + 1, family: common.family, @@ -165,9 +178,9 @@ const createHeatMap = (prop) => { ...heatmapIconSettings, responsive: true, // responsive histogram useResizeHandler: true, // responsive histogram - style: {width: '100%', height: '100%'}, // responsive histogram + style: { width: '100%', height: '100%' }, // responsive histogram }; - + Plotly.react(prop.canvasId, { data, layout, @@ -194,12 +207,13 @@ const createHeatMap = (prop) => { genDataPointHoverTable( dataTable, { - x: data.event.pageX - 120, y: data.event.pageY, + x: data.event.pageX - 120, + y: data.event.pageY, }, 0, true, prop.canvasId, - 1 + 1, ); } }); diff --git a/ap/static/categorical_plot/css/categorical_plot.css b/ap/static/categorical_plot/css/categorical_plot.css index e05137e..43ee731 100644 --- a/ap/static/categorical_plot/css/categorical_plot.css +++ b/ap/static/categorical_plot/css/categorical_plot.css @@ -19,14 +19,16 @@ canvas { min-width: 400px !important; } -@keyframes rotate{ - 0% {} +@keyframes rotate { + 0% { + } 100% { transform: rotate(-360deg); } } -@-webkit-keyframes rotate{ - 0% {} +@-webkit-keyframes rotate { + 0% { + } 100% { -webkit-transform: rotate(-360deg); } @@ -42,12 +44,12 @@ canvas { right: 0; bottom: 0; left: 0; - background: rgba(0, 0, 0, .5); + background: rgba(0, 0, 0, 0.5); z-index: 999; } .loading::before { - content: ""; + content: ''; display: block; position: fixed; left: 50%; @@ -61,7 +63,7 @@ canvas { } .loading::after { - content: ""; + content: ''; display: block; position: fixed; left: 50%; @@ -71,12 +73,11 @@ canvas { border-radius: 40px; margin-top: -10px; margin-left: -10px; - border: 4px solid #60ABB9; + border: 4px solid #60abb9; border-right: 4px solid white; animation: rotate 1s infinite linear; } - .cate-plot-cards { display: inline-block; width: 100%; @@ -85,7 +86,7 @@ canvas { .plotInfo { position: absolute; - background-color: rgba(27, 27, 27, .4); + background-color: rgba(27, 27, 27, 0.4); border: 1px solid #65c5f1; border-radius: 3px; display: none; @@ -171,22 +172,22 @@ canvas { } #compareType .nav-pills { - border-radius: 4px 4px 0 0 ; + border-radius: 4px 4px 0 0; } -#compareType .nav-item .active{ +#compareType .nav-item .active { background-color: #303030; } -#compareType{ +#compareType { border-bottom: 0px; } #compareType .nav-item { - border-top: .5px solid #444; - border-left: .5px solid #444; - border-right: .5px solid #444; - border-radius: 4px 4px 0 0 ; + border-top: 0.5px solid #444; + border-left: 0.5px solid #444; + border-right: 0.5px solid #444; + border-radius: 4px 4px 0 0; margin-left: 1px; width: 225px; text-align: center; @@ -202,11 +203,12 @@ canvas { border-top: 0px; } -.summary-col{ +.summary-col { display: none; } -.hist-summary, .hist-summary-detail { +.hist-summary, +.hist-summary-detail { font-size: 0.8vw; display: none; } @@ -239,7 +241,7 @@ canvas { position: absolute; } -.item-name{ +.item-name { color: #65c5f1; text-align: center; vertical-align: middle; @@ -278,8 +280,8 @@ canvas { position: relative; } -.histogram-tab-content.tab-content>.tab-pane { - display: block!important; +.histogram-tab-content.tab-content > .tab-pane { + display: block !important; width: 100%; height: 100%; visibility: hidden; @@ -287,7 +289,7 @@ canvas { z-index: -1; } -.histogram-tab-content.tab-content>.tab-pane:not(:first-child) { +.histogram-tab-content.tab-content > .tab-pane:not(:first-child) { position: absolute; top: -10px; left: 0; @@ -334,4 +336,4 @@ canvas { text-overflow: ellipsis; max-width: calc(100% / 8); } -} \ No newline at end of file +} diff --git a/ap/static/categorical_plot/css/toastr.css b/ap/static/categorical_plot/css/toastr.css index 0e33542..3baf06e 100644 --- a/ap/static/categorical_plot/css/toastr.css +++ b/ap/static/categorical_plot/css/toastr.css @@ -1,200 +1,200 @@ .toast-title { - font-weight: bold; + font-weight: bold; } .toast-message { - -ms-word-wrap: break-word; - word-wrap: break-word; + -ms-word-wrap: break-word; + word-wrap: break-word; } .toast-message a, .toast-message label { - color: #ffffff; + color: #ffffff; } .toast-message a:hover { - color: #cccccc; - text-decoration: none; + color: #cccccc; + text-decoration: none; } .toast-close-button { - position: relative; - right: -0.3em; - top: -0.3em; - float: right; - font-size: 20px; - font-weight: bold; - color: #ffffff; - -webkit-text-shadow: 0 1px 0 #ffffff; - text-shadow: 0 1px 0 #ffffff; - opacity: 0.8; - -ms-filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=80); - filter: alpha(opacity=80); + position: relative; + right: -0.3em; + top: -0.3em; + float: right; + font-size: 20px; + font-weight: bold; + color: #ffffff; + -webkit-text-shadow: 0 1px 0 #ffffff; + text-shadow: 0 1px 0 #ffffff; + opacity: 0.8; + -ms-filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=80); + filter: alpha(opacity=80); } .toast-close-button:hover, .toast-close-button:focus { - color: #000000; - text-decoration: none; - cursor: pointer; - opacity: 0.4; - -ms-filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=40); - filter: alpha(opacity=40); + color: #000000; + text-decoration: none; + cursor: pointer; + opacity: 0.4; + -ms-filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=40); + filter: alpha(opacity=40); } /*Additional properties for button version iOS requires the button element instead of an anchor tag. If you want the anchor version, it requires `href="#"`.*/ button.toast-close-button { - padding: 0; - cursor: pointer; - background: transparent; - border: 0; - -webkit-appearance: none; + padding: 0; + cursor: pointer; + background: transparent; + border: 0; + -webkit-appearance: none; } .toast-top-center { - top: 0; - right: 0; - width: 100%; + top: 0; + right: 0; + width: 100%; } .toast-bottom-center { - bottom: 0; - right: 0; - width: 100%; + bottom: 0; + right: 0; + width: 100%; } .toast-top-full-width { - top: 0; - right: 0; - width: 100%; + top: 0; + right: 0; + width: 100%; } .toast-bottom-full-width { - bottom: 0; - right: 0; - width: 100%; + bottom: 0; + right: 0; + width: 100%; } .toast-top-left { - top: 12px; - left: 12px; + top: 12px; + left: 12px; } .toast-top-right { - top: 12px; - right: 12px; + top: 12px; + right: 12px; } .toast-bottom-right { - right: 12px; - bottom: 12px; + right: 12px; + bottom: 12px; } .toast-bottom-left { - bottom: 12px; - left: 12px; + bottom: 12px; + left: 12px; } #toast-container { - position: fixed; - z-index: 999999; - pointer-events: none; - /*overrides*/ + position: fixed; + z-index: 999999; + pointer-events: none; + /*overrides*/ } #toast-container * { - -moz-box-sizing: border-box; - -webkit-box-sizing: border-box; - box-sizing: border-box; + -moz-box-sizing: border-box; + -webkit-box-sizing: border-box; + box-sizing: border-box; } #toast-container > div { - position: relative; - pointer-events: auto; - overflow: hidden; - margin: 0 0 6px; - padding: 15px 15px 15px 50px; - width: 300px; - -moz-border-radius: 3px 3px 3px 3px; - -webkit-border-radius: 3px 3px 3px 3px; - border-radius: 3px 3px 3px 3px; - background-position: 15px center; - background-repeat: no-repeat; - -moz-box-shadow: 0 0 12px #999999; - -webkit-box-shadow: 0 0 12px #999999; - box-shadow: 0 0 12px #999999; - color: #ffffff; - opacity: 0.8; - -ms-filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=80); - filter: alpha(opacity=80); + position: relative; + pointer-events: auto; + overflow: hidden; + margin: 0 0 6px; + padding: 15px 15px 15px 50px; + width: 300px; + -moz-border-radius: 3px 3px 3px 3px; + -webkit-border-radius: 3px 3px 3px 3px; + border-radius: 3px 3px 3px 3px; + background-position: 15px center; + background-repeat: no-repeat; + -moz-box-shadow: 0 0 12px #999999; + -webkit-box-shadow: 0 0 12px #999999; + box-shadow: 0 0 12px #999999; + color: #ffffff; + opacity: 0.8; + -ms-filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=80); + filter: alpha(opacity=80); } #toast-container > :hover { - -moz-box-shadow: 0 0 12px #000000; - -webkit-box-shadow: 0 0 12px #000000; - box-shadow: 0 0 12px #000000; - opacity: 1; - -ms-filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=100); - filter: alpha(opacity=100); - cursor: pointer; + -moz-box-shadow: 0 0 12px #000000; + -webkit-box-shadow: 0 0 12px #000000; + box-shadow: 0 0 12px #000000; + opacity: 1; + -ms-filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=100); + filter: alpha(opacity=100); + cursor: pointer; } #toast-container > .toast-info { - background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAGwSURBVEhLtZa9SgNBEMc9sUxxRcoUKSzSWIhXpFMhhYWFhaBg4yPYiWCXZxBLERsLRS3EQkEfwCKdjWJAwSKCgoKCcudv4O5YLrt7EzgXhiU3/4+b2ckmwVjJSpKkQ6wAi4gwhT+z3wRBcEz0yjSseUTrcRyfsHsXmD0AmbHOC9Ii8VImnuXBPglHpQ5wwSVM7sNnTG7Za4JwDdCjxyAiH3nyA2mtaTJufiDZ5dCaqlItILh1NHatfN5skvjx9Z38m69CgzuXmZgVrPIGE763Jx9qKsRozWYw6xOHdER+nn2KkO+Bb+UV5CBN6WC6QtBgbRVozrahAbmm6HtUsgtPC19tFdxXZYBOfkbmFJ1VaHA1VAHjd0pp70oTZzvR+EVrx2Ygfdsq6eu55BHYR8hlcki+n+kERUFG8BrA0BwjeAv2M8WLQBtcy+SD6fNsmnB3AlBLrgTtVW1c2QN4bVWLATaIS60J2Du5y1TiJgjSBvFVZgTmwCU+dAZFoPxGEEs8nyHC9Bwe2GvEJv2WXZb0vjdyFT4Cxk3e/kIqlOGoVLwwPevpYHT+00T+hWwXDf4AJAOUqWcDhbwAAAAASUVORK5CYII=") !important; + background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAGwSURBVEhLtZa9SgNBEMc9sUxxRcoUKSzSWIhXpFMhhYWFhaBg4yPYiWCXZxBLERsLRS3EQkEfwCKdjWJAwSKCgoKCcudv4O5YLrt7EzgXhiU3/4+b2ckmwVjJSpKkQ6wAi4gwhT+z3wRBcEz0yjSseUTrcRyfsHsXmD0AmbHOC9Ii8VImnuXBPglHpQ5wwSVM7sNnTG7Za4JwDdCjxyAiH3nyA2mtaTJufiDZ5dCaqlItILh1NHatfN5skvjx9Z38m69CgzuXmZgVrPIGE763Jx9qKsRozWYw6xOHdER+nn2KkO+Bb+UV5CBN6WC6QtBgbRVozrahAbmm6HtUsgtPC19tFdxXZYBOfkbmFJ1VaHA1VAHjd0pp70oTZzvR+EVrx2Ygfdsq6eu55BHYR8hlcki+n+kERUFG8BrA0BwjeAv2M8WLQBtcy+SD6fNsmnB3AlBLrgTtVW1c2QN4bVWLATaIS60J2Du5y1TiJgjSBvFVZgTmwCU+dAZFoPxGEEs8nyHC9Bwe2GvEJv2WXZb0vjdyFT4Cxk3e/kIqlOGoVLwwPevpYHT+00T+hWwXDf4AJAOUqWcDhbwAAAAASUVORK5CYII=') !important; } #toast-container > .toast-error { - background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAHOSURBVEhLrZa/SgNBEMZzh0WKCClSCKaIYOED+AAKeQQLG8HWztLCImBrYadgIdY+gIKNYkBFSwu7CAoqCgkkoGBI/E28PdbLZmeDLgzZzcx83/zZ2SSXC1j9fr+I1Hq93g2yxH4iwM1vkoBWAdxCmpzTxfkN2RcyZNaHFIkSo10+8kgxkXIURV5HGxTmFuc75B2RfQkpxHG8aAgaAFa0tAHqYFfQ7Iwe2yhODk8+J4C7yAoRTWI3w/4klGRgR4lO7Rpn9+gvMyWp+uxFh8+H+ARlgN1nJuJuQAYvNkEnwGFck18Er4q3egEc/oO+mhLdKgRyhdNFiacC0rlOCbhNVz4H9FnAYgDBvU3QIioZlJFLJtsoHYRDfiZoUyIxqCtRpVlANq0EU4dApjrtgezPFad5S19Wgjkc0hNVnuF4HjVA6C7QrSIbylB+oZe3aHgBsqlNqKYH48jXyJKMuAbiyVJ8KzaB3eRc0pg9VwQ4niFryI68qiOi3AbjwdsfnAtk0bCjTLJKr6mrD9g8iq/S/B81hguOMlQTnVyG40wAcjnmgsCNESDrjme7wfftP4P7SP4N3CJZdvzoNyGq2c/HWOXJGsvVg+RA/k2MC/wN6I2YA2Pt8GkAAAAASUVORK5CYII=") !important; + background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAHOSURBVEhLrZa/SgNBEMZzh0WKCClSCKaIYOED+AAKeQQLG8HWztLCImBrYadgIdY+gIKNYkBFSwu7CAoqCgkkoGBI/E28PdbLZmeDLgzZzcx83/zZ2SSXC1j9fr+I1Hq93g2yxH4iwM1vkoBWAdxCmpzTxfkN2RcyZNaHFIkSo10+8kgxkXIURV5HGxTmFuc75B2RfQkpxHG8aAgaAFa0tAHqYFfQ7Iwe2yhODk8+J4C7yAoRTWI3w/4klGRgR4lO7Rpn9+gvMyWp+uxFh8+H+ARlgN1nJuJuQAYvNkEnwGFck18Er4q3egEc/oO+mhLdKgRyhdNFiacC0rlOCbhNVz4H9FnAYgDBvU3QIioZlJFLJtsoHYRDfiZoUyIxqCtRpVlANq0EU4dApjrtgezPFad5S19Wgjkc0hNVnuF4HjVA6C7QrSIbylB+oZe3aHgBsqlNqKYH48jXyJKMuAbiyVJ8KzaB3eRc0pg9VwQ4niFryI68qiOi3AbjwdsfnAtk0bCjTLJKr6mrD9g8iq/S/B81hguOMlQTnVyG40wAcjnmgsCNESDrjme7wfftP4P7SP4N3CJZdvzoNyGq2c/HWOXJGsvVg+RA/k2MC/wN6I2YA2Pt8GkAAAAASUVORK5CYII=') !important; } #toast-container > .toast-success { - background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAADsSURBVEhLY2AYBfQMgf///3P8+/evAIgvA/FsIF+BavYDDWMBGroaSMMBiE8VC7AZDrIFaMFnii3AZTjUgsUUWUDA8OdAH6iQbQEhw4HyGsPEcKBXBIC4ARhex4G4BsjmweU1soIFaGg/WtoFZRIZdEvIMhxkCCjXIVsATV6gFGACs4Rsw0EGgIIH3QJYJgHSARQZDrWAB+jawzgs+Q2UO49D7jnRSRGoEFRILcdmEMWGI0cm0JJ2QpYA1RDvcmzJEWhABhD/pqrL0S0CWuABKgnRki9lLseS7g2AlqwHWQSKH4oKLrILpRGhEQCw2LiRUIa4lwAAAABJRU5ErkJggg==") !important; + background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAADsSURBVEhLY2AYBfQMgf///3P8+/evAIgvA/FsIF+BavYDDWMBGroaSMMBiE8VC7AZDrIFaMFnii3AZTjUgsUUWUDA8OdAH6iQbQEhw4HyGsPEcKBXBIC4ARhex4G4BsjmweU1soIFaGg/WtoFZRIZdEvIMhxkCCjXIVsATV6gFGACs4Rsw0EGgIIH3QJYJgHSARQZDrWAB+jawzgs+Q2UO49D7jnRSRGoEFRILcdmEMWGI0cm0JJ2QpYA1RDvcmzJEWhABhD/pqrL0S0CWuABKgnRki9lLseS7g2AlqwHWQSKH4oKLrILpRGhEQCw2LiRUIa4lwAAAABJRU5ErkJggg==') !important; } #toast-container > .toast-warning { - background-image: url("data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAGYSURBVEhL5ZSvTsNQFMbXZGICMYGYmJhAQIJAICYQPAACiSDB8AiICQQJT4CqQEwgJvYASAQCiZiYmJhAIBATCARJy+9rTsldd8sKu1M0+dLb057v6/lbq/2rK0mS/TRNj9cWNAKPYIJII7gIxCcQ51cvqID+GIEX8ASG4B1bK5gIZFeQfoJdEXOfgX4QAQg7kH2A65yQ87lyxb27sggkAzAuFhbbg1K2kgCkB1bVwyIR9m2L7PRPIhDUIXgGtyKw575yz3lTNs6X4JXnjV+LKM/m3MydnTbtOKIjtz6VhCBq4vSm3ncdrD2lk0VgUXSVKjVDJXJzijW1RQdsU7F77He8u68koNZTz8Oz5yGa6J3H3lZ0xYgXBK2QymlWWA+RWnYhskLBv2vmE+hBMCtbA7KX5drWyRT/2JsqZ2IvfB9Y4bWDNMFbJRFmC9E74SoS0CqulwjkC0+5bpcV1CZ8NMej4pjy0U+doDQsGyo1hzVJttIjhQ7GnBtRFN1UarUlH8F3xict+HY07rEzoUGPlWcjRFRr4/gChZgc3ZL2d8oAAAAASUVORK5CYII=") !important; + background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYAAADgdz34AAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAGYSURBVEhL5ZSvTsNQFMbXZGICMYGYmJhAQIJAICYQPAACiSDB8AiICQQJT4CqQEwgJvYASAQCiZiYmJhAIBATCARJy+9rTsldd8sKu1M0+dLb057v6/lbq/2rK0mS/TRNj9cWNAKPYIJII7gIxCcQ51cvqID+GIEX8ASG4B1bK5gIZFeQfoJdEXOfgX4QAQg7kH2A65yQ87lyxb27sggkAzAuFhbbg1K2kgCkB1bVwyIR9m2L7PRPIhDUIXgGtyKw575yz3lTNs6X4JXnjV+LKM/m3MydnTbtOKIjtz6VhCBq4vSm3ncdrD2lk0VgUXSVKjVDJXJzijW1RQdsU7F77He8u68koNZTz8Oz5yGa6J3H3lZ0xYgXBK2QymlWWA+RWnYhskLBv2vmE+hBMCtbA7KX5drWyRT/2JsqZ2IvfB9Y4bWDNMFbJRFmC9E74SoS0CqulwjkC0+5bpcV1CZ8NMej4pjy0U+doDQsGyo1hzVJttIjhQ7GnBtRFN1UarUlH8F3xict+HY07rEzoUGPlWcjRFRr4/gChZgc3ZL2d8oAAAAASUVORK5CYII=') !important; } #toast-container.toast-top-center > div, #toast-container.toast-bottom-center > div { - width: 300px; - margin-left: auto; - margin-right: auto; + width: 300px; + margin-left: auto; + margin-right: auto; } #toast-container.toast-top-full-width > div, #toast-container.toast-bottom-full-width > div { - width: 96%; - margin-left: auto; - margin-right: auto; + width: 96%; + margin-left: auto; + margin-right: auto; } .toast { - background-color: #030303; + background-color: #030303; } .toast-success { - background-color: #51a351; + background-color: #51a351; } .toast-error { - background-color: #bd362f; + background-color: #bd362f; } .toast-info { - background-color: #2f96b4; + background-color: #2f96b4; } .toast-warning { - background-color: #f89406; + background-color: #f89406; } .toast-progress { - position: absolute; - left: 0; - bottom: 0; - height: 4px; - background-color: #000000; - opacity: 0.4; - -ms-filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=40); - filter: alpha(opacity=40); + position: absolute; + left: 0; + bottom: 0; + height: 4px; + background-color: #000000; + opacity: 0.4; + -ms-filter: progid:DXImageTransform.Microsoft.Alpha(Opacity=40); + filter: alpha(opacity=40); } /*Responsive Design*/ @media all and (max-width: 240px) { - #toast-container > div { - padding: 8px 8px 8px 50px; - width: 11em; - } - #toast-container .toast-close-button { - right: -0.2em; - top: -0.2em; - } + #toast-container > div { + padding: 8px 8px 8px 50px; + width: 11em; + } + #toast-container .toast-close-button { + right: -0.2em; + top: -0.2em; + } } @media all and (min-width: 241px) and (max-width: 480px) { - #toast-container > div { - padding: 8px 8px 8px 50px; - width: 18em; - } - #toast-container .toast-close-button { - right: -0.2em; - top: -0.2em; - } + #toast-container > div { + padding: 8px 8px 8px 50px; + width: 18em; + } + #toast-container .toast-close-button { + right: -0.2em; + top: -0.2em; + } } @media all and (min-width: 481px) and (max-width: 768px) { - #toast-container > div { - padding: 15px 15px 15px 50px; - width: 25em; - } + #toast-container > div { + padding: 15px 15px 15px 50px; + width: 25em; + } } diff --git a/ap/static/categorical_plot/js/categorical_histogram_with_density_curve.js b/ap/static/categorical_plot/js/categorical_histogram_with_density_curve.js index 0c1b020..32ef865 100644 --- a/ap/static/categorical_plot/js/categorical_histogram_with_density_curve.js +++ b/ap/static/categorical_plot/js/categorical_histogram_with_density_curve.js @@ -1,5 +1,3 @@ -/* eslint-disable no-restricted-syntax */ -// eslint-disable-next-line no-unused-vars const HistogramWithDensityCurve = ($, paramObj) => { // ////////////// プライベート関数の定義 //////////////////// const setParam = (key, defaultValue) => { @@ -9,7 +7,6 @@ const HistogramWithDensityCurve = ($, paramObj) => { return defaultValue; }; - const canvasId = setParam('canvasId', ''); const yLabelFreq = setParam('yLabelFreq', '度数(カウント)'); const yTitle = setParam('yTitle', ''); @@ -23,7 +20,7 @@ const HistogramWithDensityCurve = ($, paramObj) => { const plotData = setParam('plotData', []); const isCatLimited = setParam('isCatLimited', false); const allGroupNames = setParam('allGroupNames', []); - const labelFmt = setParam('labelFmt', ''); + const labelFmt = setParam('labelFmt', ''); let customBinSize = 1; if (kdeData && kdeData.hist_labels.length > 1) { @@ -31,7 +28,7 @@ const HistogramWithDensityCurve = ($, paramObj) => { } const maxKDE = Math.max(...kdeData.kde); const maxHist = Math.max(...kdeData.hist_counts); - const transKDE = kdeData.kde.map(i => maxHist * i / maxKDE); + const transKDE = kdeData.kde.map((i) => (maxHist * i) / maxKDE); const kdeDensity = { y: kdeData.hist_labels, x: transKDE, @@ -96,7 +93,8 @@ const HistogramWithDensityCurve = ($, paramObj) => { categoryLabels.reverse(); categoryIds.sort().reverse(); // 4321 categoryLabels.forEach((catName) => { - const categoryCount = plotData.category_distributed[catName].counts_org; + const categoryCount = + plotData.category_distributed[catName].counts_org; stepChartDat.push(categoryCount); }); } @@ -205,12 +203,12 @@ const HistogramWithDensityCurve = ($, paramObj) => { // layout.yaxis.tickangle = 45; // layout.yaxis.tickmode = 'array'; layout.yaxis.tickvals = allGroupNames.id; - layout.yaxis.ticktext = allGroupNames.id.map(cat => ''); + layout.yaxis.ticktext = allGroupNames.id.map((cat) => ''); const minYVal = Math.min(...allGroupNames.id); const maxYVal = Math.max(...allGroupNames.id); layout.yaxis.range = [minYVal - 1, maxYVal + 1]; layout.yaxis.autorange = false; - + // add label to barchart allGroupNames.id.forEach((catId, k) => { @@ -238,13 +236,12 @@ const HistogramWithDensityCurve = ($, paramObj) => { layout.xaxis.autorange = false; } - try { Plotly.newPlot(canvasId, data, layout, { displayModeBar: false, responsive: true, // responsive histogram useResizeHandler: true, // responsive histogram - style: {width: '100%', height: '100%'}, // responsive histogram + style: { width: '100%', height: '100%' }, // responsive histogram }); } catch (e) { console.log(canvasId, data, layout); @@ -286,12 +283,13 @@ const HistogramWithDensityCurve = ($, paramObj) => { }; const hdPlot = document.getElementById(canvasId); - hdPlot.on('plotly_hover', (data) => { - drawShapes(data.points[0].x, data.points[0].y); - if (data.points) { - showInforTbl(data, true, canvasId); - } - }) + hdPlot + .on('plotly_hover', (data) => { + drawShapes(data.points[0].x, data.points[0].y); + if (data.points) { + showInforTbl(data, true, canvasId); + } + }) .on('plotly_unhover', (data) => { drawShapes(null, null, false); }); @@ -315,7 +313,10 @@ const drawEmptyHistogram = ($, paramObj) => { orientation: 'h', }, ]; - const catLimitMsgs = $('#i18nCatLimitedMsg').text().split('BREAK_LINE').join('
'); + const catLimitMsgs = $('#i18nCatLimitedMsg') + .text() + .split('BREAK_LINE') + .join('
'); const layout = { showlegend: false, xaxis: { @@ -364,20 +365,22 @@ const drawEmptyHistogram = ($, paramObj) => { pad: 5, }, shapes: [], - annotations: [{ - xref: 'x', - yref: 'y', - text: catLimitMsgs, - showarrow: false, - font: { - color: '#65c5f1', + annotations: [ + { + xref: 'x', + yref: 'y', + text: catLimitMsgs, + showarrow: false, + font: { + color: '#65c5f1', + }, }, - }], + ], }; Plotly.newPlot(canvasId, data, layout, { displayModeBar: false, responsive: true, // responsive histogram useResizeHandler: true, // responsive histogram - style: {width: '100%', height: '100%'}, // responsive histogram + style: { width: '100%', height: '100%' }, // responsive histogram }); }; diff --git a/ap/static/categorical_plot/js/categorical_plot.js b/ap/static/categorical_plot/js/categorical_plot.js index 8bf32f2..b2ce729 100644 --- a/ap/static/categorical_plot/js/categorical_plot.js +++ b/ap/static/categorical_plot/js/categorical_plot.js @@ -1,13 +1,13 @@ -/* eslint-disable no-restricted-syntax */ -/* eslint-disable no-unused-vars */ -/* eslint-disable no-undef */ -/* eslint-disable no-use-before-define */ const REQUEST_TIMEOUT = setRequestTimeOut(); const MAX_NUMBER_OF_GRAPH = 32; const MAX_NUMBER_OF_SENSOR = 8; const MIN_NUMBER_OF_SENSOR = 0; -// eslint-disable-next-line prefer-const -const dicTabs = {'#byVarCompare': 'var', '#byTermCompare': 'term', '#byCyclicTerm': 'cyclicTerm'}; + +const dicTabs = { + '#byVarCompare': 'var', + '#byTermCompare': 'term', + '#byCyclicTerm': 'cyclicTerm', +}; let currentTraceDataVar; let currentTraceDataTerm; let currentTraceDataCyclicTerm; @@ -77,7 +77,6 @@ const eles = { stratifiedTabs: '.stratifiedTabs', }; - const formElements = { NO_FILTER: 'NO_FILTER', BY_VAR: 'var', @@ -88,7 +87,6 @@ const formElements = { condProcSelectedItem: '#varcond-proc-row select', }; - const i18n = { yLabelKDE: $('#i18nKDE').text(), yLabelFreq: $('#i18nFrequency').text(), @@ -152,7 +150,9 @@ $(() => { // add first condition process const varCondProcItem = addCondProc( - endProcs.ids, endProcs.names, eles.varTabPrefix, + endProcs.ids, + endProcs.names, + eles.varTabPrefix, eles.mainFormId.replace('#', ''), 'varbtn-add-cond-proc', ); @@ -200,64 +200,85 @@ const showScatterPlotImage = (fileNames) => { fileNames.forEach((e) => { imgs += ``; }); - scatterPlotCard.html(`
${imgs}
`); + scatterPlotCard.html( + `
${imgs}
`, + ); }; - const onChangeHistSummaryEventHandler = (eleIdPrefix = '') => { $(`input[name=${eleIdPrefix}${eles.summaryOption}]`).unbind('change'); - $(`input[name=${eleIdPrefix}${eles.summaryOption}]`).on('change', function f() { - let summaryHeight = null; - const summaryClass = $(this).val(); - const previousOption = $(`input[name=${eleIdPrefix}${eles.summaryOption}][data-checked=true]`); - if (summaryClass === 'none') { - $(`.${eleIdPrefix}.hist-summary`).each(function showHideSummary() { - $(this).css('display', 'none'); - }); - // if (previousOption.val() && previousOption.val() !== 'none') { - // // rescale histogram - // $(`.${eleIdPrefix}.his .hd-plot`).each(function reScaleHistogram() { - // const histogramId = $(this).attr('id'); - // $(`#${histogramId}`).css('height', GRAPH_CONST.histHeight); - // Plotly.relayout(histogramId, {}); - // }); - // } - $(`.${eleIdPrefix}.his .hd-plot`).each(function reScaleHistogram() { - const histogramId = $(this).attr('id'); - $(`#${histogramId}`).css('height', GRAPH_CONST.histHeight); - Plotly.relayout(histogramId, {}); - }); - - // mark this option as checked and remove others - $(this).attr('data-checked', 'true'); - $(`input[name=${eleIdPrefix}${eles.summaryOption}]:not(:checked)`).removeAttr('data-checked'); - } else { - $(`.${eleIdPrefix}.hist-summary`).each(function showHideSummary() { - $(this).css('display', 'flex'); - $(this).css('justify-content', 'center'); // to unify with FPP - }); - - $('.hist-summary-detail').each(function showUponOption() { - $(this).css('display', 'none'); - if ($(this).hasClass(summaryClass)) { - $(this).css('display', 'block'); - const h = $(this).height(); - summaryHeight = h < summaryHeight ? summaryHeight : h; - } - }); - - $(`.${eleIdPrefix}.his .hd-plot`).each(function reScaleHistogram() { - const histogramId = $(this).attr('id'); - const chartHeight = `calc(${GRAPH_CONST.histHeight} - ${summaryHeight + 6}px)`; - $(`#${histogramId}`).css('height', chartHeight); - Plotly.relayout(histogramId, {}); - }); - - // mark this option as checked and remove others - $(this).attr('data-checked', 'true'); - $(`input[name=${eleIdPrefix}${eles.summaryOption}]:not(:checked)`).removeAttr('data-checked'); - } - }); + $(`input[name=${eleIdPrefix}${eles.summaryOption}]`).on( + 'change', + function f() { + let summaryHeight = null; + const summaryClass = $(this).val(); + const previousOption = $( + `input[name=${eleIdPrefix}${eles.summaryOption}][data-checked=true]`, + ); + if (summaryClass === 'none') { + $(`.${eleIdPrefix}.hist-summary`).each( + function showHideSummary() { + $(this).css('display', 'none'); + }, + ); + // if (previousOption.val() && previousOption.val() !== 'none') { + // // rescale histogram + // $(`.${eleIdPrefix}.his .hd-plot`).each(function reScaleHistogram() { + // const histogramId = $(this).attr('id'); + // $(`#${histogramId}`).css('height', GRAPH_CONST.histHeight); + // Plotly.relayout(histogramId, {}); + // }); + // } + $(`.${eleIdPrefix}.his .hd-plot`).each( + function reScaleHistogram() { + const histogramId = $(this).attr('id'); + $(`#${histogramId}`).css( + 'height', + GRAPH_CONST.histHeight, + ); + Plotly.relayout(histogramId, {}); + }, + ); + + // mark this option as checked and remove others + $(this).attr('data-checked', 'true'); + $( + `input[name=${eleIdPrefix}${eles.summaryOption}]:not(:checked)`, + ).removeAttr('data-checked'); + } else { + $(`.${eleIdPrefix}.hist-summary`).each( + function showHideSummary() { + $(this).css('display', 'flex'); + $(this).css('justify-content', 'center'); // to unify with FPP + }, + ); + + $('.hist-summary-detail').each(function showUponOption() { + $(this).css('display', 'none'); + if ($(this).hasClass(summaryClass)) { + $(this).css('display', 'block'); + const h = $(this).height(); + summaryHeight = h < summaryHeight ? summaryHeight : h; + } + }); + + $(`.${eleIdPrefix}.his .hd-plot`).each( + function reScaleHistogram() { + const histogramId = $(this).attr('id'); + const chartHeight = `calc(${GRAPH_CONST.histHeight} - ${summaryHeight + 6}px)`; + $(`#${histogramId}`).css('height', chartHeight); + Plotly.relayout(histogramId, {}); + }, + ); + + // mark this option as checked and remove others + $(this).attr('data-checked', 'true'); + $( + `input[name=${eleIdPrefix}${eles.summaryOption}]:not(:checked)`, + ).removeAttr('data-checked'); + } + }, + ); }; const onChangeHistScale = (prefix) => { @@ -290,16 +311,20 @@ const rerenderHistogram = (prefix) => { }; const showTabsAndCharts = ( - eleIdPrefix, data, + eleIdPrefix, + data, genTab = true, onlySensorId = null, ) => { if (data == null) return; let sensors = []; - data.ARRAY_FORMVAL.forEach(arrayFormval => { - sensors = [...sensors, ...arrayFormval.GET02_VALS_SELECT.map(val => Number(val))] - }) - if (typeof (sensors) === 'string') { + data.ARRAY_FORMVAL.forEach((arrayFormval) => { + sensors = [ + ...sensors, + ...arrayFormval.GET02_VALS_SELECT.map((val) => Number(val)), + ]; + }); + if (typeof sensors === 'string') { sensors = [sensors]; } @@ -320,7 +345,10 @@ const showTabsAndCharts = ( // /////////////// each sensor //////////////////// const startProc = data.COMMON.start_proc; for (let sensorIdx = 0; sensorIdx < numSensors; sensorIdx++) { - if (onlySensorId !== null && onlySensorId !== Number(sensors[sensorIdx])) { + if ( + onlySensorId !== null && + onlySensorId !== Number(sensors[sensorIdx]) + ) { continue; } const tabId = `#${eleIdPrefix}${eles.categoryPlotCards}-${sensorIdx}`; @@ -328,10 +356,13 @@ const showTabsAndCharts = ( tabElement.empty(); tabElement.css('display', 'block'); - const sensor = sensors[sensorIdx]; - const sensorPlotDatas = eleIdPrefix !== 'directTerm' ? data.array_plotdata[sensor] - : data.array_plotdata.filter(plot => plot.end_col === Number(sensor)); + const sensorPlotDatas = + eleIdPrefix !== 'directTerm' + ? data.array_plotdata[sensor] + : data.array_plotdata.filter( + (plot) => plot.end_col === Number(sensor), + ); if (!sensorPlotDatas) { continue; } @@ -339,16 +370,24 @@ const showTabsAndCharts = ( // カラム名を取得する。 const displayColName = sensorPlotDatas[0].end_col_name; const endProcName = sensorPlotDatas[0].end_proc_name; - const isCategory = sensorPlotDatas[0] ? sensorPlotDatas[0].is_category : false; + const isCategory = sensorPlotDatas[0] + ? sensorPlotDatas[0].is_category + : false; const allGroupNames = isCategory - ? getAllGroupOfSensor(sensorPlotDatas) : []; + ? getAllGroupOfSensor(sensorPlotDatas) + : []; const generalInfo = { - startProc, endProcName: endProcName, + startProc, + endProcName: endProcName, }; - const isCatLimited = sensorPlotDatas[0] ? sensorPlotDatas[0].is_cat_limited : false; + const isCatLimited = sensorPlotDatas[0] + ? sensorPlotDatas[0].is_cat_limited + : false; if (isCatLimited) { - tabElement.closest('.tab-pane') - .find('.overlay-card').css('display', 'grid'); + tabElement + .closest('.tab-pane') + .find('.overlay-card') + .css('display', 'grid'); } // /////////////// each histogram //////////////////// for (let i = 0; i < numGraphs; i++) { @@ -356,38 +395,78 @@ const showTabsAndCharts = ( const termIdx = sensorPlotDatas[i].term_id || 0; const beforeRankValues = sensorPlotDatas[i].before_rank_values; const stepChartSummary = sensorPlotDatas[i].cat_summary || null; - const catExpBoxCols = [data.COMMON.catExpBox1, data.COMMON.catExpBox2].filter(c => c); - const filterCond = catExpBoxCols.length > 0 - ? catExpValue.toString().split(' | ') : null; + const catExpBoxCols = [ + data.COMMON.catExpBox1, + data.COMMON.catExpBox2, + ].filter((c) => c); + const filterCond = + catExpBoxCols.length > 0 + ? catExpValue.toString().split(' | ') + : null; // get latest thresholds -> show thresholds in scatter, histogram, summary - const [chartInfos, chartInfosOrg] = getChartInfo(sensorPlotDatas[i], 'TIME', filterCond); - const [latestChartInfo, latestChartInfoIdx] = chooseLatestThresholds(chartInfos, chartInfosOrg); + const [chartInfos, chartInfosOrg] = getChartInfo( + sensorPlotDatas[i], + 'TIME', + filterCond, + ); + const [latestChartInfo, latestChartInfoIdx] = + chooseLatestThresholds(chartInfos, chartInfosOrg); const scaleInfo = getScaleInfo(sensorPlotDatas[i], scaleOption); // y_min/max are defined in backend -> get only const kdeData = scaleInfo.kde_data; - const [minY, maxY] = calMinMaxYScale(scaleInfo['y-min'], scaleInfo['y-max'], scaleOption); - const maxX = frequencyOption === frequencyOptions.COMMON ? scaleInfo['x-max'] : null; - const minX = frequencyOption === frequencyOptions.COMMON ? scaleInfo['x-min'] : null; + const [minY, maxY] = calMinMaxYScale( + scaleInfo['y-min'], + scaleInfo['y-max'], + scaleOption, + ); + const maxX = + frequencyOption === frequencyOptions.COMMON + ? scaleInfo['x-max'] + : null; + const minX = + frequencyOption === frequencyOptions.COMMON + ? scaleInfo['x-min'] + : null; // produce summary data - const {summaries, end_col, end_proc_id} = sensorPlotDatas[i]; - const isHideNonePoint = isHideNoneDataPoint(end_proc_id, end_col, data.COMMON.remove_outlier); - const summaryData = calculateSummaryData(summaries, latestChartInfoIdx, isHideNonePoint); + const { summaries, end_col, end_proc_id } = sensorPlotDatas[i]; + const isHideNonePoint = isHideNoneDataPoint( + end_proc_id, + end_col, + data.COMMON.remove_outlier, + ); + const summaryData = calculateSummaryData( + summaries, + latestChartInfoIdx, + isHideNonePoint, + ); const isShowDate = eleIdPrefix !== eles.varTabPrefix; const timeCond = data.time_conds[termIdx]; const chartTitle = buildSummaryChartTitle( - catExpValue, catExpBoxCols, sensorPlotDatas[i].catExpBoxName, - isShowDate, timeCond, true, + catExpValue, + catExpBoxCols, + sensorPlotDatas[i].catExpBoxName, + isShowDate, + timeCond, + true, ); // create summaries HTMLs - const summariesHTML = buildSummaryResultsHTML(summaryData, i + 1, generalInfo, beforeRankValues, stepChartSummary,); + const summariesHTML = buildSummaryResultsHTML( + summaryData, + i + 1, + generalInfo, + beforeRankValues, + stepChartSummary, + ); const histogramId = `${eleIdPrefix}-${sensor}-${eles.histograms}${i + 1}`; - const fromStartPrcClass = String(sensorPlotDatas[i].end_proc_id) === String(startProc) - ? ' card-active' : ''; + const fromStartPrcClass = + String(sensorPlotDatas[i].end_proc_id) === String(startProc) + ? ' card-active' + : ''; const cardHtml = `
${chartTitle} @@ -433,10 +512,14 @@ const showTabsAndCharts = ( } // //////////////////////////////////// // report progress - loadingUpdate(loadingProgressBackend + sensorIdx * ((100 - loadingProgressBackend) / (numSensors || 1))); + loadingUpdate( + loadingProgressBackend + + sensorIdx * + ((100 - loadingProgressBackend) / (numSensors || 1)), + ); } - checkSummaryOption(`${eleIdPrefix}${eles.summaryOption}`); + checkSummaryOption(`${eleIdPrefix}${eles.summaryOption}`); // Init filter modal fillDataToFilterModal(data.filter_on_demand, () => { @@ -464,7 +547,9 @@ const setNameWithPrefix = (prefix) => { const resetSetting = (eleIdPrefix) => { resetSummaryOption(`${eleIdPrefix}${eles.summaryOption}`); - $(`select[name=${eleIdPrefix}${eles.frequencyScale}]`).val(frequencyOptions.COMMON); + $(`select[name=${eleIdPrefix}${eles.frequencyScale}]`).val( + frequencyOptions.COMMON, + ); $(`select[name=${eleIdPrefix}HistScale]`).val(scaleOptionConst.COMMON); }; @@ -480,7 +565,7 @@ const collectFormDataFromGUI = (clearOnFlyFilter, autoUpdate = false) => { } const traceForm = $(eles.mainFormId); let formData = new FormData(traceForm[0]); - + const eleIdPrefix = $('select[name=compareType]').val(); // collect form data if (clearOnFlyFilter) { @@ -489,7 +574,7 @@ const collectFormDataFromGUI = (clearOnFlyFilter, autoUpdate = false) => { formData = reformatFormData(eleIdPrefix, formData); formData = genDatetimeRange(formData); lastUsedFormData = formData; - + resetCheckedCats(); } else { formData = lastUsedFormData; @@ -501,78 +586,98 @@ const collectFormDataFromGUI = (clearOnFlyFilter, autoUpdate = false) => { const showGraph = (clearOnFlyFilter = true, autoUpdate = false) => { requestStartedAt = performance.now(); const eleIdPrefix = $('select[name=compareType]').val(); - - const isValid = checkValidations({max: MAX_NUMBER_OF_SENSOR}); + + const isValid = checkValidations({ max: MAX_NUMBER_OF_SENSOR }); updateStyleOfInvalidElements(); if (!isValid) return; - + // close sidebar beforeShowGraphCommon(clearOnFlyFilter); if (clearOnFlyFilter) { // reset sumary option resetSetting(eleIdPrefix); - }; + } const formData = collectFormDataFromGUI(clearOnFlyFilter, autoUpdate); - showGraphCallApi('/ap/api/stp/index', formData, REQUEST_TIMEOUT, async (res) => { - // set summary bar for prefix - setNameWithPrefix(eleIdPrefix); - - // show result section - $('#categoricalPlotArea').show(); - - if (!_.isEmpty(res.array_plotdata)) { - graphStore.setTraceData(_.cloneDeep(res)); - if (eleIdPrefix === eles.varTabPrefix) { - currentTraceDataVar = res; - showMessageIfFacetNotSelected(res); - } else if (eleIdPrefix === eles.termTabPrefix) { - currentTraceDataTerm = res; - } else { - currentTraceDataCyclicTerm = res; + showGraphCallApi( + '/ap/api/stp/index', + formData, + REQUEST_TIMEOUT, + async (res) => { + // set summary bar for prefix + setNameWithPrefix(eleIdPrefix); + + // show result section + $('#categoricalPlotArea').show(); + + if (!_.isEmpty(res.array_plotdata)) { + graphStore.setTraceData(_.cloneDeep(res)); + if (eleIdPrefix === eles.varTabPrefix) { + currentTraceDataVar = res; + showMessageIfFacetNotSelected(res); + } else if (eleIdPrefix === eles.termTabPrefix) { + currentTraceDataTerm = res; + } else { + currentTraceDataCyclicTerm = res; + } } - } - setScaleOption(eleIdPrefix); - - // show graphs - // if (eleIdPrefix === eles.varTabPrefix || eleIdPrefix === eles.cyclicTermTabPrefix) { - showTabsAndCharts(eleIdPrefix, res); - - // show info table - showInfoTable(res); - - // Move screen to graph after pushing グラフ表示 button - if (!autoUpdate) { - $('html, body').animate({ - scrollTop: $(`#${eles.categoryPlotCards}`).offset().top, - }, 500); - } + setScaleOption(eleIdPrefix); + + // show graphs + // if (eleIdPrefix === eles.varTabPrefix || eleIdPrefix === eles.cyclicTermTabPrefix) { + showTabsAndCharts(eleIdPrefix, res); + + // show info table + showInfoTable(res); + + // Move screen to graph after pushing グラフ表示 button + if (!autoUpdate) { + $('html, body').animate( + { + scrollTop: getOffsetTopDisplayGraph( + `#${eles.categoryPlotCards}`, + ), + }, + 500, + ); + } - // check result and show toastr msg - if (isEmpty(res.array_plotdata) || isEmpty(Object.values(res.array_plotdata)[0])) { - showToastrAnomalGraph(); - } + // check result and show toastr msg + if ( + isEmpty(res.array_plotdata) || + isEmpty(Object.values(res.array_plotdata)[0]) + ) { + showToastrAnomalGraph(); + } - // show limit graphs displayed in one tab message - if (res.isGraphLimited) { - showToastrMsg(i18nCommon.limitDisplayedGraphsInOneTab.replace('NUMBER', MAX_NUMBER_OF_GRAPH)); - } + // show limit graphs displayed in one tab message + if (res.isGraphLimited) { + showToastrMsg( + i18nCommon.limitDisplayedGraphsInOneTab.replace( + 'NUMBER', + MAX_NUMBER_OF_GRAPH, + ), + ); + } - // show scatter plot tab - const imgFile = res.images; - if (imgFile) { - showScatterPlotImage(imgFile); - } - setPollingData(formData, showGraph, [false, true]); + // show scatter plot tab + const imgFile = res.images; + if (imgFile) { + showScatterPlotImage(imgFile); + } + setPollingData(formData, showGraph, [false, true]); - // drag & drop for tables - $('.ui-sortable').sortable(); - }); + // drag & drop for tables + $('.ui-sortable').sortable(); + }, + ); }; const setScaleOption = (prefix) => { stpScaleOption.yAxis = $(`select[name=${prefix}${eles.histScale}]`).val(); - stpScaleOption.xAxis = $(`select[name=${prefix}${eles.frequencyScale}]`).val(); + stpScaleOption.xAxis = $( + `select[name=${prefix}${eles.frequencyScale}]`, + ).val(); }; diff --git a/ap/static/categorical_plot/js/categorical_plot_utils.js b/ap/static/categorical_plot/js/categorical_plot_utils.js index 05958e4..06864f1 100644 --- a/ap/static/categorical_plot/js/categorical_plot_utils.js +++ b/ap/static/categorical_plot/js/categorical_plot_utils.js @@ -1,5 +1,3 @@ -/* eslint-disable guard-for-in */ -/* eslint-disable no-restricted-syntax */ const cyclicEles = { datetimeFrom: $('#i18nDatetimeFrom').text(), datetimeTo: $('#i18nDatetimeTo').text(), @@ -43,16 +41,27 @@ const addLimitNumSensors = () => { }); }; - // generate HTML for tabs const generateTabHTML = (eleIdPrefix, data, sensors, showViewer = false) => { - const genNavItemHTML = (tabId, sensorMasterName, status = '', sensorID = null) => ``; - const catLimitMsgs = $('#i18nCatLimitedMsg').text().split('BREAK_LINE').join('
'); - const genTabContentHTML = (tabId, plotCardId, status = '') => `
+ const catLimitMsgs = $('#i18nCatLimitedMsg') + .text() + .split('BREAK_LINE') + .join('
'); + const genTabContentHTML = ( + tabId, + plotCardId, + status = '', + ) => `
${catLimitMsgs} @@ -61,19 +70,36 @@ const generateTabHTML = (eleIdPrefix, data, sensors, showViewer = false) => { const navItemHTMLs = []; const tabContentHTMLs = []; const existPlotData = !_.isEmpty(data.array_plotdata); - for (let sensorIdx = 0; existPlotData && sensorIdx < sensors.length; sensorIdx++) { - const sensorPlotDatas = eleIdPrefix !== 'directTerm' ? data.array_plotdata[sensors[sensorIdx]] - : data.array_plotdata.filter(plot => plot.end_col === Number(sensors[sensorIdx])); + const procNamesSet = new Set(); + for ( + let sensorIdx = 0; + existPlotData && sensorIdx < sensors.length; + sensorIdx++ + ) { + const sensorPlotDatas = + eleIdPrefix !== 'directTerm' + ? data.array_plotdata[sensors[sensorIdx]] + : data.array_plotdata.filter( + (plot) => plot.end_col === Number(sensors[sensorIdx]), + ); // カラム名を取得する。 - const sensorMasterName = sensorPlotDatas[0].end_col_name; - + const { end_proc_name, end_col_name } = sensorPlotDatas[0]; + const sensorMasterName = procNamesSet.has(end_proc_name) + ? `${end_col_name}` + : `${end_proc_name}|${end_col_name}`; + procNamesSet.add(end_proc_name); let status = ''; if (sensorIdx === 0) { status = 'active'; } const tabId = `${eleIdPrefix}HistogramsTab-${sensorIdx}`; const sensorID = sensors[sensorIdx]; - const navItemHTML = genNavItemHTML(tabId, sensorMasterName, status, sensorID); + const navItemHTML = genNavItemHTML( + tabId, + sensorMasterName, + status, + sensorID, + ); navItemHTMLs.push(navItemHTML); const plotCardId = `${eleIdPrefix}CatePlotCards-${sensorIdx}`; const tabContentHTML = genTabContentHTML(tabId, plotCardId, status); @@ -82,8 +108,14 @@ const generateTabHTML = (eleIdPrefix, data, sensors, showViewer = false) => { let viewerNavHTML = ''; let viewerContentHTML = ''; if (showViewer) { - viewerNavHTML = genNavItemHTML(tabId = 'scattersTab', sensorMasterName = i18n.viewerTabName); - viewerContentHTML = genTabContentHTML(tabId = 'scattersTab', plotCardId = 'varScatterPlotCards'); + viewerNavHTML = genNavItemHTML( + (tabId = 'scattersTab'), + (sensorMasterName = i18n.viewerTabName), + ); + viewerContentHTML = genTabContentHTML( + (tabId = 'scattersTab'), + (plotCardId = 'varScatterPlotCards'), + ); } const stratifiedVarTabHTML = `
+ + +
+
+
+ +
+
+
+
+
+ + + + + + + + + + + + +
+ +
+ +
+ + + + + + + + + + + + + +
* + ${i18n.columnRawName} + ${i18n.dataType} + System + * + + ${i18n.japaneseName} + + ${i18n.localName} + + ${i18n.unit} +
+ + + + + + + +
+ ${i18n.sampleData} +
+
+
+
+
+`; + } + + /** + * Find all columns and determine what column is main::Serial or Serial + * @description ONLY EDGE SERVER DO THIS LOGIC + * @param {ProcessColumnConfig[]} processColumnConfigs - a list of process column config + */ + static #determineSerialColumns(processColumnConfigs) { + let hasMainSerialCol = false; + processColumnConfigs.forEach((processColumnConfig) => { + // if v2 col_name is シリアルNo -> auto check + const isSerial = + /^.*シリアル|serial.*$/.test( + processColumnConfig.column_name.toString().toLowerCase(), + ) && + [DataTypes.STRING.name, DataTypes.INTEGER.name].includes( + processColumnConfig.data_type, + ); + if (isSerial) { + if (hasMainSerialCol) { + processColumnConfig.is_serial_no = true; + } else { + processColumnConfig.is_main_serial_no = true; + hasMainSerialCol = true; + } + } + }); + } + + /** + * Generate Process Config Body HTML string + * @param {ProcessColumnConfig[]} processColumnConfigs - a list of process column config + * @param {{}[]} sampleDataRows - a list of sample data + * @return {[string[], string[]]} - a tuple of rowSampleDataHTMLs & rowProcessColumnConfigHTMLs + */ + static generateProcessConfigBodyHTML(processColumnConfigs, sampleDataRows) { + const rowSampleDataHTMLs = []; + const rowProcessColumnConfigHTMLs = []; + processColumnConfigs.forEach((processColumnConfig, index) => { + if (processColumnConfig.id == null) { + // Temporary set id if it is null to can generate HTML completely + processColumnConfig.id = index; + } + + rowSampleDataHTMLs.push( + this.#generateOneRowOfSampleDataHTML( + sampleDataRows, + processColumnConfig, + ), + ); + rowProcessColumnConfigHTMLs.push( + this.#generateOneRowOfProcessColumnConfigHTML( + processColumnConfig, + index, + ), + ); + }); + + return [rowProcessColumnConfigHTMLs, rowSampleDataHTMLs]; + } + + /** + * Generate One Row Of Sample Data HTML string + * @public + * @param {{}[]} sampleDataRows - a list of sample data + * @param {ProcessColumnConfig} processColumnConfig + * @param {?string} extendClass + * @return {string} - an HTML string + */ + static generateOneRowOfSampleDataHTML( + sampleDataRows, + processColumnConfig, + extendClass = '', + ) { + return this.#generateOneRowOfSampleDataHTML( + sampleDataRows, + processColumnConfig, + extendClass, + ); + } + + /** + * Generate One Row Of Sample Data HTML string + * @private + * @param {{}[]} sampleDataRows - a list of sample data + * @param {ProcessColumnConfig} processColumnConfig + * @param {?string} extendClass + * @return {string} - an HTML string + */ + static #generateOneRowOfSampleDataHTML( + sampleDataRows, + processColumnConfig, + extendClass = '', + ) { + const tdHTMLs = []; + const $checkboxDatetimeFormat = + procModalElements.procDateTimeFormatCheckbox; + const formatIsChecked = + $checkboxDatetimeFormat.length > 0 + ? // In case of Process config modal in Config page, base on status of checkbox + $checkboxDatetimeFormat.is(':checked') + : // In case of Register by file page, always do format + true; + sampleDataRows.forEach((row) => { + const key = processColumnConfig.column_name; //col.column_name || + const originValue = row[key]; + let value; + if (formatIsChecked) { + switch (processColumnConfig.data_type) { + case DataTypes.DATETIME.name: + value = parseDatetimeStr(row[key]); + break; + case DataTypes.DATE.name: + value = parseDatetimeStr(row[key], true); + break; + case DataTypes.TIME.name: + value = parseTimeStr(row[key]); + break; + case DataTypes.INTEGER.name: + value = row[key]; + value = parseIntData(value); + break; + case DataTypes.REAL.name: + value = row[key]; + value = parseFloatData(value); + break; + default: + value = row[key]; + } + } else { + value = row[key]; + } + + tdHTMLs.push( + this.#generateOneColumnOfSampleDataHTML( + processColumnConfig, + value, + originValue, + extendClass, + ), + ); + }); + + return ( + `` + + tdHTMLs.join('') + + '' + ); + } + + /** + * Generate Row Of Sample Data HTML + * @public + * @param {ProcessColumnConfig} processColumnConfig + * @param {string} value + * @param {string} originValue + * @param {?string} extendClass + * @return {string} - an HTML string of row + */ + static #generateOneColumnOfSampleDataHTML( + processColumnConfig, + value, + originValue, + extendClass = '', + ) { + const isKSep = [ + DataTypes.REAL_SEP.name, + DataTypes.EU_REAL_SEP.name, + ].includes(processColumnConfig.data_type); + const checkedAtr = processColumnConfig.is_checked + ? 'checked=checked' + : ''; + return ( + '' + + `${!isEmpty(value) ? value : ''}` + + '' + ); + } + + /** + * Generate One Row Of Process Column Config HTML string + * @private + * @param {ProcessColumnConfig} processColumnConfig + * @param {number} index - a number index of row + * @return {string} - an HTML string + */ + static #generateOneRowOfProcessColumnConfigHTML( + processColumnConfig, + index, + ) { + // convert column_type to attr key + const col = { + ...processColumnConfig, + ...DataTypeDropdown_Controller.convertColumnTypeToAttrKey( + processColumnConfig.column_type, + ), + }; + col.is_show = true; + const checkedAttr = col.is_checked ? 'checked=checked' : ''; + const isRegisterProc = false; + + const dataTypeObject = /** @type DataTypeObject */ { + ...col, + value: col.data_type, + checked: checkedAttr, + isRegisteredCol: false, + isRegisterProc: isRegisterProc, + }; + let getKey = ''; + for (const attr of DataTypeAttrs) { + if (dataTypeObject[attr]) { + getKey = attr; + break; + } + } + const idSuffix = `_processId_${processColumnConfig.id}`; + const isDummyDatetime = col.dummyDatetimeIdx === index; + + return this.generateOneRowOfProcessColumnConfigHTML( + index, + col, + getKey, + dataTypeObject, + isRegisterProc, + isDummyDatetime, + idSuffix, + ); + } + + /** + * Generate Row Of Process Column HTML + * @public + * @param {number} index + * @param {ProcessColumnConfig} col + * @param {boolean} isChecked + * @param {string} getKey + * @param {DataTypeObject} dataTypeObject + * @param {boolean} isRegisteredProcess + * @param {boolean} isDummyDatetime + * @param {string} idSuffix + * @return {string} - an HTML string of row + */ + static generateOneRowOfProcessColumnConfigHTML( + index, + col, + getKey, + dataTypeObject, + isRegisteredProcess, + isDummyDatetime = false, + idSuffix = '', + ) { + const checkedAttr = col.is_checked ? 'checked=checked' : ''; + const isFixedName = fixedNameAttrs.includes(getKey); + const isRegisteredMainDatetimeColumn = + // ONLY FOR EDGE SERVER + (col.column_type === + DataTypeDropdown_Controller.DataGroupType.MAIN_DATE || + col.column_type === + DataTypeDropdown_Controller.DataGroupType.MAIN_TIME || + col.is_get_date) && + isRegisteredProcess; + return ` + + + ${index + 1} + + +
+ + + + +
+ + + ${DataTypeDropdown_Controller.generateHtml(index, dataTypeObject, getKey, isRegisteredMainDatetimeColumn)} + + + + + + + + + + + + + + +`; + } + + /** + * Get sample data based on column ids + * @param {number} columnId + * @param {?jQuery} $processColumnsTableBody + * @param {?jQuery} $processColumnsSampleDataTableBody + * @return {string[]} - a list that contains sample data + */ + static collectSampleData( + columnId, + $processColumnsTableBody = null, + $processColumnsSampleDataTableBody = null, + ) { + const colIdx = ( + $processColumnsTableBody ?? + procModalElements.processColumnsTableBody + ) + .find(`td[title="index"][data-column-id="${columnId}"]`) + .attr('data-col-idx'); + return this.#collectSampleData( + colIdx, + $processColumnsSampleDataTableBody, + ); + } + + /** + * Get sample data based on column ids + * @param {number | string} colIdx + * @param {?jQuery} $processColumnsSampleDataTableBody + * @param {?boolean} isGetOriginal - true: get value from attribute data-original, otherwise. + * @return {string[]} - a list that contains sample data + * @private + */ + static #collectSampleData( + colIdx, + $processColumnsSampleDataTableBody = null, + isGetOriginal = false, + ) { + return ( + $processColumnsSampleDataTableBody ?? + procModalElements.processColumnsSampleDataTableBody + ) + .find(`tr:eq(${colIdx}) .sample-data`) + .toArray() + .map((el) => + isGetOriginal + ? el.dataset.original.trim() + : el.textContent.trim(), + ); + } + + /** + * Collect Generated Datetime Sample Data + * @param {jQuery} $processColumnsTableBody + * @param {jQuery} $processColumnsSampleDataTableBody + * @param {string} columnName + * @return {{}[]} - a list of sample data + */ + static collectGeneratedDatetimeSampleData( + $processColumnsTableBody, + $processColumnsSampleDataTableBody, + columnName, + ) { + const mainDateColId = $processColumnsTableBody + .find('span[name=dataType][checked][is_main_date=true]') + .closest('tr') + .index(); + const mainDateSampleDatas = this.#collectSampleData( + mainDateColId, + $processColumnsSampleDataTableBody, + true, + ); + + const mainTimeColId = $processColumnsTableBody + .find('span[name=dataType][checked][is_main_time=true]') + .closest('tr') + .index(); + const mainTimeSampleDatas = this.#collectSampleData( + mainTimeColId, + $processColumnsSampleDataTableBody, + true, + ); + + const result = []; + mainDateSampleDatas.forEach((dateData, index) => { + const temp = {}; + temp[columnName] = dateData + ' ' + mainTimeSampleDatas[index]; + result.push(temp); + }); + + return result; + } + + /** + * Generate a new Datetime Column + * @param {jQuery} $processColumnsTableBody + * @param {jQuery} $processColumnsSampleDataTableBody + * @param {ProcessColumnConfig} col + * @param {string} getKey + * @param {DataTypeObject} dataTypeObject + * @param {boolean} isRegisterProc + */ + static generateDatetimeColumn( + $processColumnsTableBody, + $processColumnsSampleDataTableBody, + col, + getKey, + dataTypeObject, + isRegisterProc, + ) { + // Generate a new row in process config table + const index = $processColumnsTableBody.find('tr').length; + const rowHtml = this.generateOneRowOfProcessColumnConfigHTML( + index, + col, + getKey, + dataTypeObject, + isRegisterProc, + true, + undefined, + ); + const $rowHtmlObject = $(rowHtml); + $processColumnsTableBody.append(rowHtml); + // Add event for data type dropdown control + $rowHtmlObject + .find('div.config-data-type-dropdown') + .each((idx, dataTypeDropdownElement) => + DataTypeDropdown_Controller.addEvents(dataTypeDropdownElement), + ); + + // Generate a new row in sample data table + const sampleDatas = this.collectGeneratedDatetimeSampleData( + $processColumnsTableBody, + $processColumnsSampleDataTableBody, + col.column_name, + ); + const columnColorClass = 'dummy_datetime_col'; + const sampleRowHTML = this.generateOneRowOfSampleDataHTML( + sampleDatas, + col, + columnColorClass, + ); + $processColumnsSampleDataTableBody.append(sampleRowHTML); + } + + /** + * Remove generated main::Datetime column + * @param {jQuery} $processColumnsTableBody + * @param {jQuery} $processColumnsSampleDataTableBody + * @private + */ + static #removeGeneratedMainDatetimeColumn( + $processColumnsTableBody, + $processColumnsSampleDataTableBody, + ) { + // remove generated datetime column + const dummyDatetimeColRow = + $processColumnsTableBody.find('.dummyDatetimeCol'); + if (dummyDatetimeColRow.length === 0) return; + + const dummyDatetimeColIndex = dummyDatetimeColRow.index(); + dummyDatetimeColRow.remove(); + $processColumnsSampleDataTableBody + .find(`tr:eq(${dummyDatetimeColIndex})`) + .remove(); + reCalculateCheckedColumn(-1); + } + + /** + * Handle main::Date column and main::Time column are select/unselect + * @param {HTMLDivElement} dataTypeDropdownElement + * @param {string} attrKey + * @param {string} beforeAttrKey + */ + static handleMainDateAndMainTime( + dataTypeDropdownElement, + attrKey, + beforeAttrKey = '', + ) { + if (this.handleMainDateAndMainTime.disable) return; + + const $processColumnsTableBody = $( + dataTypeDropdownElement.closest('tbody'), + ); + const $processColumnsSampleDataTableBody = $processColumnsTableBody + .closest('div.proc-config-content') + .find('table[name="processColumnsTableSampleData"] tbody'); + const isMainDateTimeDummyColumnExist = + $processColumnsTableBody.find('.dummyDatetimeCol').length > 0; + + const $spanMainTime = $processColumnsTableBody.find( + 'span[name=dataType][checked][is_main_time=true]', + ); + const isMainTimeColumnChecked = $spanMainTime.length > 0; + const $spanMainDate = $processColumnsTableBody.find( + 'span[name=dataType][checked][is_main_date=true]', + ); + const isMainDateColumnChecked = $spanMainDate.length > 0; + + if ( + ['is_get_date'].includes(attrKey) && + isMainTimeColumnChecked && + isMainDateColumnChecked && + isMainDateTimeDummyColumnExist + ) { + // In case there are generated main::Datetime, main::Date, main::Time columns and another column is + // selected as main::Datetime -> change main::Date, main::Time column to normal type and remove generated + // main::Datetime column + this.#removeGeneratedMainDatetimeColumn( + $processColumnsTableBody, + $processColumnsSampleDataTableBody, + ); + + try { + this.handleMainDateAndMainTime.disable = true; // lock to avoid running this function + [$spanMainTime, $spanMainDate].forEach((el) => + DataTypeDropdown_Controller.changeToNormalDataType(el[0]), + ); + } finally { + this.handleMainDateAndMainTime.disable = undefined; // release lock + } + } else if ( + (isMainTimeColumnChecked && !isMainDateColumnChecked) || + (!isMainTimeColumnChecked && isMainDateColumnChecked) + ) { + // In case there is only main::Date or main::Time column + // when there is datetime column already generated, remove it + if (isMainDateTimeDummyColumnExist) { + this.#removeGeneratedMainDatetimeColumn( + $processColumnsTableBody, + $processColumnsSampleDataTableBody, + ); + } + if ( + beforeAttrKey.length || + ['is_main_date', 'is_main_time'].includes(attrKey) + ) { + $(procModalElements.msgContent).text( + $(procModalElements.msgSelectDateAndTime).text(), + ); + $(procModalElements.msgModal).modal('show'); + } + + // Change main::Datetime to normal Datetime data type + const mainDatetimeCol = + /** @type HTMLSpanElement */ $processColumnsTableBody.find( + 'span[name=dataType][checked][is_get_date=true]', + ); + if (mainDatetimeCol.length > 0) { + DataTypeDropdown_Controller.changeToNormalDataType( + mainDatetimeCol[0], + ); + } + } else if (isMainTimeColumnChecked && isMainDateColumnChecked) { + if (!isMainDateTimeDummyColumnExist) { + $(procModalElements.msgContent).text( + $(procModalElements.msgGenDateTime).text(), + ); + $(procModalElements.msgModal).modal('show'); + } else { + this.#removeGeneratedMainDatetimeColumn( + $processColumnsTableBody, + $processColumnsSampleDataTableBody, + ); + } + + const generatedDateTimeColName = 'DatetimeGenerated'; + const generatedDateTimeCol = { + is_linking_column: false, + coef: null, + is_int_category: false, + data_type: DataTypes.DATETIME.name, + name_local: null, + is_auto_increment: null, + order: 0, + format: null, + process_id: currentProcessId, + column_raw_name: generatedDateTimeColName, + shown_name: generatedDateTimeColName, + column_name: generatedDateTimeColName, + function_details: [], + column_type: masterDataGroup.GENERATED, + is_get_date: true, + is_category: false, + is_dummy_datetime: true, + operator: null, + is_serial_no: false, + raw_data_type: DataTypes.DATETIME.name, + name_jp: generatedDateTimeColName, + name_en: generatedDateTimeColName, + is_show: true, + is_master_col: false, + bridge_column_name: null, + master_type: null, + is_checked: true, + is_big_int: false, + id: '-1', + }; + const dataTypeObject = { + ...generatedDateTimeCol, + value: generatedDateTimeCol.data_type, + checked: 'checked=checked', + isRegisteredCol: true, + isRegisterProc: false, + }; + this.generateDatetimeColumn( + $processColumnsTableBody, + $processColumnsSampleDataTableBody, + generatedDateTimeCol, + 'is_get_date', + dataTypeObject, + false, + ); + reCalculateCheckedColumn(1); + showProcDatetimeFormatSampleData(); + } + } + + /** + * Sort columns in Process Config + * + * Order of columns: + * 1. main::Date + * 2. main::Time + * 3. main::Datetime + * 4. another columns + * + * @param {jQuery} $processConfigTableBody + * @param {jQuery} $processColumnsSampleDataTableBody + */ + static sortProcessColumns( + $processConfigTableBody, + $processColumnsSampleDataTableBody, + ) { + const ColumnTypeAttribute = { + MainDatetime: 'is_get_date', + MainDate: 'is_main_date', + MainTime: 'is_main_time', + }; + const inner = function (columnTypeAttribute) { + const targetRow = $processConfigTableBody + .find( + `td.column-date-type div.config-data-type-dropdown button span[${columnTypeAttribute}="true"]`, + ) + .closest('tr'); + if (targetRow.length > 0) { + const targetSampleDataRow = + $processColumnsSampleDataTableBody.find( + `tr:eq(${targetRow.index()})`, + ); + + // Move row to top of table + $processConfigTableBody.prepend(targetRow); + $processColumnsSampleDataTableBody.prepend(targetSampleDataRow); + + // Re-index order number + $processConfigTableBody + .find('td.column-number') + .toArray() + .forEach((ele, index) => { + ele.textContent = index + 1; + }); + } + }; + + inner(ColumnTypeAttribute.MainDatetime); + inner(ColumnTypeAttribute.MainTime); + inner(ColumnTypeAttribute.MainDate); + } +} diff --git a/ap/static/setting_module/js/register_from_file.js b/ap/static/setting_module/js/register_from_file.js index 1dc5f59..73d03b0 100644 --- a/ap/static/setting_module/js/register_from_file.js +++ b/ap/static/setting_module/js/register_from_file.js @@ -1,13 +1,20 @@ +/** + * @file Contains all constant that serve for data type dropdown. + * @author Tran Thi Kim Tuyen + * @author Tran Nguyen Duc Huy + * @author Tran Ngoc Tinh + * @author Pham Minh Hoang + */ + const registerFromFileEles = { folderBrowser: '#folderBrowser', fileBrowser: '#fileBrowser', directoryRadios: 'input[name=directorySelector]', - checkedDirectoryRadio: 'input[name=directorySelector]:checked', - folderRadio: '#directoryTypeFolder', - fileRadio: '#directoryTypeFile', fileBrowseButton: '#browseFileBtn', - folderBrowseButton: '#browseFolderBtn', - fileUrl: $('input[name=fileUrl]'), + containerReferenceFile: '#containerReferenceFile', + confirmRegisterByFile: '#confirmRegisterByFile', + registerAllFilesButton: '#register-all-file', + registerOneFileButton: '#register-one-file', folderUrl: $('input[name=folderUrl]'), refFileUrl: $('input[name=fileName]'), progressDisplay: '#progressDisplay', @@ -16,20 +23,11 @@ const registerFromFileEles = { processEnName: $('input[name=processName]'), processJapaneseName: $('input[name=processJapaneseName]'), processLocalName: $('input[name=processLocalName]'), + processOriginName: $('input[name=processOriginName]'), }; -const inputDOMByPath = { - directory: registerFromFileEles.folderUrl, - file: registerFromFileEles.fileUrl, - reference_file: registerFromFileEles.refFileUrl, -} - -let processInfo = null; -let registeredProcessId = null; - // override from db_config.js to use generateProcessList function from proc_config_modal.js todo move to common js file let dicProcessCols = {}; -let dicOriginDataType = {}; const i18n = { statusDone: $('#i18nStatusDone').text(), @@ -40,591 +38,1126 @@ const i18n = { reachFailLimit: $('#i18nReachFailLimit').text(), noCTCol: $('#i18nNoCTCol').text(), noCTColProc: $('#i18nNoCTColPrc').text(), -} + processName: document.getElementById('i18nProcessName_').textContent, + dataType: document.getElementById('i18ni18nDataType').textContent, + systemNameHoverMsg: document.getElementById('i18nSystemNameHoverMsg') + .textContent, + japaneseName: document.getElementById('i18nJapaneseName').textContent, + localName: document.getElementById('i18nLocalName').textContent, + unit: $('#i18nUnit').text(), + alreadyRegistered: document.getElementById('i18nAlreadyRegistered') + .textContent, + columnRawName: document.getElementById('i18nColumnRawName').textContent, + format: document.getElementById('i18nFormat').textContent, + sampleData: document.getElementById('i18nSampleData').textContent, + operatorHover: document.getElementById('i18nOperatorHover').textContent, + operator: document.getElementById('i18nOperator').textContent, + coefHover: document.getElementById('i18nCoefHover').textContent, + coef: document.getElementById('i18nCoef').textContent, +}; const registerI18n = { i18nFileIsSelected: $('#i18nFileIsSelected').text(), i18nProgressSourceSelected: $('#i18nProgressSourceSelected').text(), i18nDbSourceExist: $('#i18nDbSourceExist').text(), - i18nProcessNameIsAlreadyRegistered: $('#i18nProcessNameIsAlreadyRegistered').text(), - i18nDataSourceNameIsAlreadyRegistered: $('#i18nDataSourceNameIsAlreadyRegistered').text(), + i18nProcessNameIsAlreadyRegistered: $( + '#i18nProcessNameIsAlreadyRegistered', + ).text(), + i18nDataSourceNameIsAlreadyRegistered: $( + '#i18nDataSourceNameIsAlreadyRegistered', + ).text(), i18nProcessRegisterStart: $('#i18nProcessRegisterStart').text(), i18nProgressFolderCheck: $('#i18nProgressFolderCheck').text(), i18nProgressImportingData: $('#i18nProgressImportingData').text(), i18nProgressFinished: $('#i18nProgressFinished').text(), + i18nDataSourceNameIsEmpty: $('#i18nDataSourceNameIsEmpty').text(), + i18nProcessNameIsEmpty: $('#i18nProcessNameIsEmpty').text(), + i18nProgressGenDataTable: 'Generate Data Table', + i18nProgressScanFile: 'Scan File', + i18nProgressScanMaster: 'Scan Master', + i18nProgressScanDataType: 'Scan Data Type', + i18nProgressPullData: 'Pull CSV Data', + i18nErrorNoGetdate: $('#i18nErrorNoGetdate').text(), + i18nScanning: 'Scanning', }; const registerSteps = { - IMPORTING: "importing" -} - -const ICON_STATUS = { - SUCCESS: "success", - PROCESSING: "processing", - WARNING: "warning", -} - -const REGISTER_JOB_STATUS = { - DONE: "DONE", - FAILED: "FAILED", - PROCESSING: "PROCESSING", -} - -const parseIntData = (v) => { - let val = trimBoth(String(v)); - if (isEmpty(val)) { - val = ''; - } else { - val = parseInt(Number(val)); - if (isNaN(val)) { - val = ''; - } - } - return val; + IMPORTING: 'importing', + SCANNING: 'SCANNING', }; -const changeBackgroundColor = (ele) => { - if ([DataTypes.STRING.name, DataTypes.REAL_SEP.name, DataTypes.EU_REAL_SEP.name].includes(ele.getAttribute('value'))) { - $(ele).closest('.config-data-type-dropdown').find('[name=dataType]') - .css('color', 'orange'); - } else { - $(ele).closest('.config-data-type-dropdown').find('[name=dataType]') - .css('color', 'white'); - } -}; - -const parseDataType = (ele, idx) => { - // change background color - changeBackgroundColor(ele); - - const vals = [...procModalElements.processColumnsSampleDataTableBody.find(`tr:eq(${idx}) .sample-data`)].map(el => $(el)); - - const attrName = 'data-original'; - - switch (ele.getAttribute('value')) { - case DataTypes.INTEGER.name: - for (const e of vals) { - let val = e.attr(attrName); - const isBigInt = Boolean(e.attr('is-big-int')); - if (!isBigInt) { - val = parseIntData(val); - } - e.html(val); - } - break; - case DataTypes.REAL.name: - for (const e of vals) { - let val = e.attr(attrName); - val = parseFloatData(val); - e.html(val); - } - break; - case DataTypes.DATETIME.name: - for (const e of vals) { - let val = e.attr(attrName); - val = parseDatetimeStr(val); - e.html(val); - } - break; - case DataTypes.REAL_SEP.name: - for (const e of vals) { - let val = e.attr(attrName); - val = val.replaceAll(',', ''); - val = parseFloatData(val); - e.html(val); - } - break; - case DataTypes.INTEGER_SEP.name: - for (const e of vals) { - let val = e.attr(attrName); - val = val.replaceAll(',', ''); - val = parseIntData(val); - e.html(val); - } - break; - case DataTypes.EU_REAL_SEP.name: - for (const e of vals) { - let val = e.attr(attrName); - val = val.replaceAll('.', ''); - val = val.replaceAll(',', '.'); - val = parseFloatData(val); - e.html(val); - } - break; - case DataTypes.EU_INTEGER_SEP.name: - for (const e of vals) { - let val = e.attr(attrName); - val = val.replaceAll('.', ''); - val = val.replaceAll(',', '.'); - val = parseIntData(val); - e.html(val); - } - break; - default: - for (const e of vals) { - let val = e.attr(attrName); - val = trimBoth(String(val)); - e.html(val); - } - break; - } +const ICON_STATUS = { + SUCCESS: 'success', + PROCESSING: 'processing', + WARNING: 'warning', }; - -const parseFloatData = (v) => { - let val = trimBoth(String(v)); - if (isEmpty(val)) { - val = ''; - } else if (val.toLowerCase() === COMMON_CONSTANT.INF.toLowerCase()) { - val = COMMON_CONSTANT.INF.toLowerCase(); - } else if (val.toLowerCase() === COMMON_CONSTANT.MINF.toLowerCase()) { - val = COMMON_CONSTANT.MINF.toLowerCase(); - } else { - // TODO why do we need to re-parse? - val = parseFloat(Number(val)); - if (isNaN(val)) { - val = ''; - } - } - return val; +const REGISTER_JOB_STATUS = { + DONE: 'DONE', + FAILED: 'FAILED', + PROCESSING: 'PROCESSING', }; const isAddNewMode = () => true; // override end -$(registerFromFileEles.directoryRadios).on("change", (e) => { - const self = $(e.currentTarget) - const value = self[0].value - if(value === "folder"){ - $(registerFromFileEles.fileBrowser).hide() - $(registerFromFileEles.folderBrowser).show() - } else if (value === "file"){ - $(registerFromFileEles.folderBrowser).hide() - $(registerFromFileEles.fileBrowser).show() +$(registerFromFileEles.directoryRadios).on('change', (e) => { + const self = $(e.currentTarget); + const value = self[0].value; + if (value === 'folder') { + $(registerFromFileEles.fileBrowser).hide(); + $(registerFromFileEles.folderBrowser).show(); + } else if (value === 'file') { + $(registerFromFileEles.folderBrowser).hide(); + $(registerFromFileEles.fileBrowser).show(); } -}) - -const resourceSelection = (resource_type) => { - fetch(`/ap/api/setting/browser/${resource_type}`, {cache: 'no-store'}) - .then(res => res.json()) - .then(res => { - if (res.path) { - $(inputDOMByPath[res.kind]).val(res.path).change(); - } - }); -} - -// fileHandle is a FileSystemFileHandle -// withWrite is a boolean set to true if write - -async function verifyPermission(fileHandle, withWrite) { - const opts = {}; - if (withWrite) { - opts.mode = "readwrite"; - } - - const opfsRoot = await navigator.storage.getDirectory(); - // A FileSystemDirectoryHandle whose type is "directory" - // and whose name is "". - console.log(opfsRoot); - - // Check if we already have permission, if so, return true. - if ((await fileHandle.queryPermission(opts)) === "granted") { - return true; - } - - // Request permission to the file, if the user grants permission, return true. - if ((await fileHandle.requestPermission(opts)) === "granted") { - return true; - } +}); - // The user did not grant permission, return false. - return false; -} - -const handleOnChangeFolderAndFileUrl = async () => { +/** + * Validate input data source URL and file + * @return {Promise} - true: valid, false: invalid + */ +async function validateInputUrlAndFile() { // remove add red border - [registerFromFileEles.fileUrl, registerFromFileEles.folderUrl, registerFromFileEles.refFileUrl].forEach(el => { - removeBorderFromInvalidInput($(el)); - removeQuotesfromInputAndUpdate($(el)); - }) - const isFile = $(registerFromFileEles.checkedDirectoryRadio).val() !== 'folder'; - let url = ''; - let fileUrl = ''; - if (isFile) { - url = $(registerFromFileEles.fileUrl).val().trim(); - // clear folder input - $(registerFromFileEles.folderUrl).val(''); - $(registerFromFileEles.refFileUrl).val(''); - } else { - url = $(registerFromFileEles.folderUrl).val().trim(); - fileUrl = $(registerFromFileEles.refFileUrl).val().trim(); - // clear file input - $(registerFromFileEles.fileUrl).val(''); - hiddenPreviewContentData(); - } + [registerFromFileEles.folderUrl, registerFromFileEles.refFileUrl].forEach( + (el) => { + removeBorderFromInvalidInput(el); + }, + ); + + const urlInfo = await getUrlInfo(); resetProgressBar(); - if (!url) { + if (!urlInfo.url) { disableRegisterDataFileBtn(); resetPreviewTableContent(); - return; - }; - enableRegisterDataFileBtn(); - displayPreviewContentData(); + clearCacheDatasourceConfig(); + showHideRefFile(false); + return false; + } - const checkResult = await checkFolder(url, isFile) + const checkResult = await checkFolder( + urlInfo.isFile ? urlInfo.fileUrl : urlInfo.url, + urlInfo.isFile, + ); if (!checkResult.is_valid || !checkResult.is_valid_file) { // show error msg to the right side addMessengerToProgressBar(checkResult.err_msg, ICON_STATUS.WARNING); - if (isFile) { - addBorderToInvalidInput($(registerFromFileEles.fileUrl)); - disableRegisterDataFileBtn(); - } else { - addBorderToInvalidInput($(registerFromFileEles.folderUrl)); - disableRegisterDataFileBtn(); - } + addBorderToInvalidInput($(registerFromFileEles.folderUrl)); + disableRegisterDataFileBtn(); resetPreviewTableContent(); - return; + return false; } - if (!isFile && fileUrl) { + if (!urlInfo.isFile && urlInfo.fileUrl) { // check selected filename - const checkFileName = await checkFolder(fileUrl, true) + const checkFileName = await checkFolder(urlInfo.fileUrl, true); if (!checkFileName.is_valid || !checkFileName.is_valid_file) { // show error msg to the right side - addMessengerToProgressBar(checkFileName.err_msg, ICON_STATUS.WARNING); + addMessengerToProgressBar( + checkFileName.err_msg, + ICON_STATUS.WARNING, + ); addBorderToInvalidInput($(registerFromFileEles.refFileUrl)); disableRegisterDataFileBtn(); resetPreviewTableContent(); + return false; + } + } + + return true; +} + +/** + * Handle Url | File Url changes + * @param {boolean} isVerifyUrl - true: will do check url is file or not, false: do not check + * @return {Promise} + */ +const handleOnChangeFolderAndFileUrl = async (isVerifyUrl) => { + // remove add red border + [registerFromFileEles.folderUrl, registerFromFileEles.refFileUrl].forEach( + (el) => { + removeQuotesfromInputAndUpdate(el); + removeLastBackslashFromInputAndUpdate(el); + }, + ); + hiddenPreviewContentData(); + removeExtendSections(); + + if (isVerifyUrl) { + const url = $(registerFromFileEles.folderUrl).val().trim(); + const folderOrFileInfo = await checkFolderOrFile(url); + // Show modal confirm import one data file of all files in the same folder + if (folderOrFileInfo.isFile) { + $(registerFromFileEles.registerAllFilesButton).data('url', url); + $(registerFromFileEles.confirmRegisterByFile).modal('show'); return; + } else { + showHideRefFile(true); } } + const isValid = await validateInputUrlAndFile(); + if (!isValid) return; + // show ✔ ファイルが選択されました msg addMessengerToProgressBar(registerI18n.i18nProgressSourceSelected); - fillDsNameAndProcessName(url, isFile); - - // show latest record - const formData = new FormData(); - if (isFile) { - formData.set('fileName', url); - } else { - formData.set('folder', url); - formData.set('fileName', fileUrl); - } + // call api to collect latest records' + const urlInfo = await getUrlInfo(); + const formData = new FormData(); + if (urlInfo.isFile) { + formData.set('fileName', urlInfo.fileUrl); + } else { + formData.set('folder', urlInfo.url); + formData.set('fileName', urlInfo.fileUrl); + } + const request = getLatestRecord(formData); - await getLatestRecord(formData); + await handleResponseData(request); + enableRegisterDataFileBtn(); }; - -const fillDsNameAndProcessName = (url, isFile) => { +const fillDatasourceName = (url, isFile) => { checkOnFocus = false; const folderName = getDbSourceAndProcessNameFromUrl(url, isFile); // loading from external api const params = getRequestParams(); if (params.loadGUIFromUrl) { // fill datasource and proc name fields if not provided - if(!params.dataSourceName) { - const folderName = getDbSourceAndProcessNameFromUrl(url, isFile); + if (!params.dataSourceName) { registerFromFileEles.databaseName.val(folderName); + registerFromFileEles.databaseName[0].dataset.originalValue = + folderName; } + return; + } + + registerFromFileEles.databaseName.val(folderName); + registerFromFileEles.databaseName[0].dataset.originalValue = folderName; +}; + +const fillProcessName = (url, isFile) => { + checkOnFocus = false; + const folderName = getDbSourceAndProcessNameFromUrl(url, isFile); + registerFromFileEles.processOriginName.val(folderName); + registerFromFileEles.processEnName[0].dataset.originalValue = folderName; + registerFromFileEles.processEnName.val(folderName).trigger('change'); + + // loading from external api + const params = getRequestParams(); + if (params.loadGUIFromUrl) { + // fill proc name fields if not provided if (isJPLocale && !params.procesNameJp) { - registerFromFileEles.processJapaneseName.val(folderName).trigger('change'); + registerFromFileEles.processJapaneseName[0].dataset.originalValue = + folderName; + registerFromFileEles.processJapaneseName + .val(folderName) + .trigger('change'); } else if (!isJPLocale && !params.processNameLocal) { - registerFromFileEles.processLocalName.val(folderName).trigger('change'); + registerFromFileEles.processLocalName[0].dataset.originalValue = + folderName; + registerFromFileEles.processLocalName + .val(folderName) + .trigger('change'); } return; } + // fill data source name - registerFromFileEles.databaseName.val(folderName); if (isJPLocale) { - registerFromFileEles.processJapaneseName.val(folderName).trigger('change'); + registerFromFileEles.processJapaneseName[0].dataset.originalValue = + folderName; + registerFromFileEles.processJapaneseName + .val(folderName) + .trigger('change'); } else { + registerFromFileEles.processLocalName[0].dataset.originalValue = + folderName; registerFromFileEles.processLocalName.val(folderName).trigger('change'); } }; -const checkEmptyDataSourceAndProcessName = () => { - const dbsName = registerFromFileEles.databaseName.val(); - const procNameEn = registerFromFileEles.processEnName.val(); +/** + * Check data source name is empty or not + * @return {boolean} - true: datasource name is empty, otherwise. + */ +function isDataSourceNameEmpty() { + removeBorderFromInvalidInput(registerFromFileEles.databaseName); + const isEmpty = registerFromFileEles.databaseName.val().trim() === ''; + if (isEmpty) { + addMessengerToProgressBar( + registerI18n.i18nDataSourceNameIsEmpty, + ICON_STATUS.WARNING, + ); + addBorderToInvalidInput(registerFromFileEles.databaseName); + } - if (!dbsName && !procNameEn) return false; + return isEmpty; +} - return true; -}; +/** + * Check all process names are empty or not + * @return {boolean} - true: at least one process name is empty, false: all process names are not empty + */ +function isProcessNameEmpty() { + let isEmpty = false; + document + .querySelectorAll( + 'div[id^="procSettingModal"] form[id^="procCfgForm"] input[name="processName"]', + ) + .forEach((processNameElement) => { + const $processNameElement = $(processNameElement); + removeBorderFromInvalidInput($processNameElement); + if (processNameElement.value.trim() === '') { + isEmpty = true; + addBorderToInvalidInput($processNameElement); + } + }); -const checkDuplicatedDbsAndProcessName = async () => { - [registerFromFileEles.databaseName, registerFromFileEles.processEnName, registerFromFileEles.processJapaneseName, registerFromFileEles.processLocalName].forEach(el => { - removeBorderFromInvalidInput(el); - }) + if (isEmpty) { + addMessengerToProgressBar( + registerI18n.i18nProcessNameIsEmpty, + ICON_STATUS.WARNING, + ); + } + + return isEmpty; +} + +/** + * Check Data Source Name is duplicated with exist ones in DB or not + * @return {Promise} - true: duplicate with exist data source name in DB, false: unique (not duplicate) + */ +async function isDataSourceNameDuplicate() { + removeBorderFromInvalidInput(registerFromFileEles.databaseName); const dbsName = registerFromFileEles.databaseName.val(); - const procNameEn = registerFromFileEles.processEnName.val(); - const procNameJp = registerFromFileEles.processJapaneseName.val(); - const procNameLocal = registerFromFileEles.processLocalName.val(); - let isValid = true; + let isDuplicate = false; const isDuplicatedDbsName = await checkDuplicatedDataSourceName(dbsName); - const [isDuplicatedProcNameEn, isDuplicatedProcNameJp, isDuplicatedProcNameLocal] = await checkDuplicatedProcessName(procNameEn, procNameJp, procNameLocal); if (isDuplicatedDbsName) { - isValid = false; + isDuplicate = true; // show duplicated dbs msg - addMessengerToProgressBar(registerI18n.i18nDataSourceNameIsAlreadyRegistered, ICON_STATUS.WARNING); + addMessengerToProgressBar( + registerI18n.i18nDataSourceNameIsAlreadyRegistered, + ICON_STATUS.WARNING, + ); addBorderToInvalidInput(registerFromFileEles.databaseName); } - if (isDuplicatedProcNameEn || isDuplicatedProcNameJp || isDuplicatedProcNameLocal) { - isValid = false; - // show duplicated process msg - addMessengerToProgressBar(registerI18n.i18nProcessNameIsAlreadyRegistered, ICON_STATUS.WARNING); + return isDuplicate; +} - if (isDuplicatedProcNameEn) { - addBorderToInvalidInput(registerFromFileEles.processEnName); - } - if (isDuplicatedProcNameJp) { - addBorderToInvalidInput(registerFromFileEles.processJapaneseName); - } - if (isDuplicatedProcNameLocal) { - addBorderToInvalidInput(registerFromFileEles.processLocalName); +/** + * Check Process Names is duplicated with exist ones in DB and each others or not + * @return {Promise} - true: duplicate, false: not duplicate + */ +async function isProcessNameDuplicate() { + const processSystemNameElements = document.querySelectorAll( + 'input[name="processName"]', + ); + const processJapaneseNameElements = document.querySelectorAll( + 'input[name="processJapaneseName"]', + ); + const processLocalNameElements = document.querySelectorAll( + 'input[name="processLocalName"]', + ); + + [ + ...processSystemNameElements, + ...processJapaneseNameElements, + ...processLocalNameElements, + ].forEach((el) => { + removeBorderFromInvalidInput($(el)); + }); + + let isDuplicate = false; + // Check duplicate with exist processes in Database + for (let i = 0; i < processSystemNameElements.length; i++) { + const processSystemNameElement = processSystemNameElements[i]; + const processJapaneseNameElement = processJapaneseNameElements[i]; + const processLocalNameElement = processLocalNameElements[i]; + const [ + isDuplicatedNameSystem, + isDuplicatedNameJapanese, + isDuplicatedNameLocal, + ] = await checkDuplicatedProcessName( + processSystemNameElement.value, + processJapaneseNameElement.value, + processLocalNameElement.value, + ); + if ( + isDuplicatedNameSystem || + isDuplicatedNameJapanese || + isDuplicatedNameLocal + ) { + isDuplicate = true; + if (isDuplicatedNameSystem) { + addBorderToInvalidInput($(processSystemNameElement)); + } + if (isDuplicatedNameJapanese) { + addBorderToInvalidInput($(processJapaneseNameElement)); + } + if (isDuplicatedNameLocal) { + addBorderToInvalidInput($(processLocalNameElement)); + } } } - return isValid; -}; + // Check duplicate each others + [ + processSystemNameElements, + processJapaneseNameElements, + processLocalNameElements, + ].forEach((nameElements) => { + const existNames = []; + nameElements.forEach((nameElement) => { + if (nameElement.value === '' || nameElement.value == null) { + // No check for empty name + return true; + } + + if (existNames.includes(nameElement.value)) { + isDuplicate = true; + addBorderToInvalidInput($(nameElement)); + } else { + existNames.push(nameElement.value); + } + return true; + }); + }); + + if (isDuplicate) { + // show duplicated process msg + addMessengerToProgressBar( + registerI18n.i18nProcessNameIsAlreadyRegistered, + ICON_STATUS.WARNING, + ); + } + + return isDuplicate; +} + +/** + * Add Red Border To Target Input + * @param {jQuery} inputEl - an input jQuery object + */ const addBorderToInvalidInput = (inputEl) => { inputEl.addClass('column-name-invalid'); }; +/** + * Remove Red Border To Target Input + * @param {jQuery} inputEl - an input jQuery object + */ const removeBorderFromInvalidInput = (inputEl) => { inputEl.removeClass('column-name-invalid'); }; +/** + * Check folder Url Info + * @param {string} folderUrl - a folder path + * @param {boolean} isFile - is file or not + * @return {Promise<{ + * status: number, + * url: string, + * is_exists: boolean, + * dir: string, + * not_empty_dir: boolean, + * is_valid: boolean, + * err_msg: string, + * is_valid_file: boolean, + * }>} + */ const checkFolder = async (folderUrl, isFile) => { const data = { url: folderUrl, - isFile: isFile + isFile: isFile, }; - const response = await fetchData('/ap/api/setting/check_folder', JSON.stringify(data), 'POST'); - return response; + return await fetchData( + '/ap/api/setting/check_folder', + JSON.stringify(data), + 'POST', + ); }; -const getLatestRecord = async (data) => { - // '/ap/api/setting/show_latest_records' - try { - const option = { - url: '/ap/api/setting/show_latest_records', - data: data, - dataType: 'json', - type: 'POST', - contentType: false, - processData: false, - cache: false, +/** + * Get latest record + * @param data + * @return {Promise} - an object that contains all information of data source and process config + */ +const getLatestRecord = (data) => + new Promise((resolve, reject) => { + try { + const option = { + url: '/ap/api/setting/show_latest_records_for_register_by_file', + data: data, + dataType: 'json', + type: 'POST', + contentType: false, + processData: false, + cache: false, + }; + + $.ajax({ + ...option, + success: (json) => { + resolve(json); + }, + error: (jqXHR, textStatus, errorThrown) => { + reject(jqXHR, textStatus, errorThrown); + }, + }).then(() => {}); + } catch (e) { + console.log(e); + } + }); + +/** + * Render Datasource Config Base on response data + * @param {LatestRecordOfProcess} data - a response data that contains all process config & data source information + * @return {Promise} + */ +async function renderDatasourceConfig(data) { + const urlInfo = await getUrlInfo(); + fillDatasourceName( + urlInfo.isFile ? urlInfo.fileUrl : urlInfo.url, + urlInfo.isFile, + ); + if (data.datasourceConfig?.master_type === 'V2') { + // In case of V2 data + } else { + // TODO: check this code necessary ??? + // In case of OTHER csv + fillProcessName( + urlInfo.isFile ? urlInfo.fileUrl : urlInfo.url, + urlInfo.isFile, + ); + } + + const datasourceNameElement = document.getElementById('databaseName'); + data.datasourceConfig.name = datasourceNameElement.value.trim(); + data.datasourceConfig.csv_detail.is_file_path = urlInfo.isFile; + datasourceNameElement.__cache__ = data.datasourceConfig; +} + +/** + * Clear cache of data source config + */ +function clearCacheDatasourceConfig() { + const datasourceNameElement = document.getElementById('databaseName'); + delete datasourceNameElement.__cache__; +} + +/** + * Handle response data -> render process config UI + * @param {Promise} request + * @return {Promise} + */ +async function handleResponseData(request) { + addMessengerToProgressBar( + registerI18n.i18nScanning, + ICON_STATUS.PROCESSING, + registerSteps.SCANNING, + ); + + const data = await request; + + // render data source config before render process config + await renderDatasourceConfig(data); + + const processConfigs = + /** @type {ProcessData[]} */ + data.datasourceConfig.master_type !== 'V2' + ? await convertStructureData(data) + : convertStructureDataV2(data); + displayPreviewContentData(); + renderProcessConfig(processConfigs); + + addMessengerToProgressBar( + registerI18n.i18nScanning, + ICON_STATUS.SUCCESS, + registerSteps.SCANNING, + true, + ); +} + +/** + * Convert Structure Data from other to V2 + * @param {LatestRecordOfProcess} data - a response data that contains all process config & data source information + * @return {Promise<[{data: {name_local: null, columns: {}[], name: *, name_jp: *, is_csv: boolean, name_en: *, shown_name: *}, rows: {}[]}]>} + */ +async function convertStructureData(data) { + const urlInfo = await getUrlInfo(); + const dataSourceAndProcessName = getDbSourceAndProcessNameFromUrl( + urlInfo.isFile ? urlInfo.fileUrl : urlInfo.url, + urlInfo.isFile, + ); + const otherData = /** @type OtherProcessData */ data.processConfigs[0]; + return [ + { + data: { + columns: otherData.cols, + is_csv: true, + name: dataSourceAndProcessName, + name_en: dataSourceAndProcessName, + name_jp: dataSourceAndProcessName, + name_local: null, + shown_name: dataSourceAndProcessName, + origin_name: dataSourceAndProcessName, + dummy_datetime_idx: otherData.dummy_datetime_idx, + }, + rows: otherData.rows, + }, + ]; +} + +/** + * Convert Structure Data from other to V2 + * @param {LatestRecordOfProcess} data - a response data that contains all process config & data source information + * @description THIS METHOD ONLY USED FOR EDGE SERVER (NOT BRIDGE STATION) + * @return {[{data: {name_local: null, columns: {}[], name: *, name_jp: *, is_csv: boolean, name_en: *, shown_name: *}, rows: {}[]}]} + */ +function convertStructureDataV2(data) { + const isShowJapaneseName = docCookies.getItem('locale') === 'ja'; + return data.processConfigs.map((processConfig) => { + return { + data: { + columns: processConfig.cols, + is_csv: true, + name: processConfig.name, + name_en: processConfig.name_en, + name_jp: processConfig.name_jp, + name_local: processConfig.name_local, + shown_name: isShowJapaneseName + ? processConfig.name_jp + : processConfig.name_en, + origin_name: processConfig.origin_name, + dummy_datetime_idx: processConfig.dummy_datetime_idx, + }, + rows: processConfig.rows, }; + }); +} - $.ajax({ - ...option, - success: async (json) => { - prcPreviewData = json; - dataGroupType = json.data_group_type; - const dummyDatetimeIdx = json.dummy_datetime_idx; - json.cols = json.cols.map(col => { - return { - ...col, - is_checked: true, - } - }) - generateProcessList(json.cols, json.rows, dummyDatetimeIdx, true, true, true); - } - }); +/** + * Render (1-n) Process Config(s) for response data + * @param {ProcessData[]} data - a response data that contains all process config(s) information + */ +function renderProcessConfig(data) { + data.forEach((processData, index) => { + // TODO: is_checked attribute must be include in response data. + processData.data.columns.forEach( + (processColumnConfig) => (processColumnConfig.is_checked = true), + ); + + // This set id logic ONLY APPLY for EDGE SERVER + processData.data.id = + processData.data.id == null ? index : processData.data.id; + + /** @type ProcessConfigSection */ + let processConfigSectionObj; + if (index === 0) { + // In case of main section, no need to render + processConfigSectionObj = + ProcessConfigSection.createProcessConfigSectionForMain( + processData.data, + processData.rows, + ); + } else { + // In case of extend section, need to render + processConfigSectionObj = + ProcessConfigSection.createProcessConfigSectionForExtend( + processData.data, + processData.rows, + ); + + processConfigSectionObj.render(); + } - } catch (e) { - console.log(e); - } -}; + processConfigSectionObj.injectEvents(); + }); +} -const addMessengerToProgressBar = (msg = '', - status= ICON_STATUS.SUCCESS, - step = '', - modify= false, - addJobLink = false, - ) => { +const addMessengerToProgressBar = ( + msg = '', + status = ICON_STATUS.SUCCESS, + step = '', + modify = false, + addJobLink = false, +) => { const progressContent = $(registerFromFileEles.progressDisplay); - let iconClass = '' - let stepClass = '' - if(step){ - stepClass = `progress-msg-${step}` + let iconClass = ''; + let stepClass = ''; + if (step) { + stepClass = `progress-msg-${step}`; } - switch(status) { + switch (status) { case ICON_STATUS.SUCCESS: - iconClass = 'fa-check' + iconClass = 'fa-check'; break; case ICON_STATUS.WARNING: - iconClass = 'fa-triangle-exclamation' + iconClass = 'fa-triangle-exclamation'; break; case ICON_STATUS.PROCESSING: - iconClass = 'fa-solid fa-spinner' + iconClass = 'fa-solid fa-spinner fa-spin'; break; default: break; } - const msgContent = status === ICON_STATUS.WARNING && addJobLink ? `${msg}` : msg; + const msgContent = + status === ICON_STATUS.WARNING && addJobLink + ? `${msg}` + : msg; const msgDiv = `
${msgContent}
`; - if(!modify){ + if (!modify) { progressContent.prepend(msgDiv); - } - else { + } else { $(`.${stepClass}`).remove(); - progressContent.prepend(msgDiv) + progressContent.prepend(msgDiv); } }; const resetProgressBar = () => { $(registerFromFileEles.progressDisplay).empty(); - [registerFromFileEles.databaseName, registerFromFileEles.processEnName, registerFromFileEles.processJapaneseName, registerFromFileEles.processLocalName].forEach(el => { + [ + registerFromFileEles.databaseName, + registerFromFileEles.processEnName, + registerFromFileEles.processJapaneseName, + registerFromFileEles.processLocalName, + ].forEach((el) => { el.val(''); removeBorderFromInvalidInput(el); - }) + }); }; - const resetPreviewTableContent = () => { procModalElements.processColumnsTableBody.empty(); procModalElements.processColumnsSampleDataTableBody.empty(); }; -const checkDuplicatedDataSourceName = async (dbsName = '') => { - // /ap/api/setting/check_duplicated_db_source +/** + * Check Duplicated Data Source Name + * @param {string} datasourceName + * @return {Promise} - true: is duplicate, false: not duplicate + */ +const checkDuplicatedDataSourceName = async (datasourceName = '') => { const data = { - name: dbsName - } - const response = await fetchData('/ap/api/setting/check_duplicated_db_source', JSON.stringify(data), 'POST'); + name: datasourceName, + }; + const response = await fetchData( + '/ap/api/setting/check_duplicated_db_source', + JSON.stringify(data), + 'POST', + ); return response.is_duplicated; }; -const checkDuplicatedProcessName = async (nameEn = '', nameJp = '', nameLocal = '') => { - // /ap/api/setting/check_duplicated_db_source +/** + * Check Duplicated Process Name + * @param {string} nameEn - Name English + * @param {string} nameJp - Name Japanese + * @param {string} nameLocal - Name Local + * @return {Promise} - a list of boolean + * - true: is duplicate | false: not duplicate, <--English--> + * - true: is duplicate | false: not duplicate, <--Japanese--> + * - true: is duplicate | false: not duplicate, <--Local--> + */ +const checkDuplicatedProcessName = async ( + nameEn = '', + nameJp = '', + nameLocal = '', +) => { const data = { name_en: nameEn, name_jp: nameJp, - name_local: nameLocal - } - const response = await fetchData('/ap/api/setting/check_duplicated_process_name', JSON.stringify(data), 'POST'); + name_local: nameLocal, + }; + /** @type {{is_duplicated: boolean[3]}} */ + const response = await fetchData( + '/ap/api/setting/check_duplicated_process_name', + JSON.stringify(data), + 'POST', + ); return response.is_duplicated; }; const getDbSourceAndProcessNameFromUrl = (url, isFile) => { const fullPath = url.replace(/\\/g, '/').split('/'); - const folderName = isFile ? fullPath[fullPath.length - 2] : fullPath[fullPath.length - 1]; - return folderName; + return isFile + ? fullPath[fullPath.length - 2] + : fullPath[fullPath.length - 1]; }; -const saveDataSourceAndProc = async () => { - processInfo = null; - // clear old status - // to do: clear all #progressDisplay +/** + * Get Url info + * @return {Promise<{isFile: boolean, url: string, fileUrl: string}>} + */ +function getUrlInfo() { + return new Promise(function (resolve) { + let url = $(registerFromFileEles.folderUrl).val().trim(); + let fileUrl = $(registerFromFileEles.refFileUrl).val().trim(); + checkFolderOrFile(url).then((urlInfo) => { + let isFile; + if (urlInfo.isFile) { + isFile = true; + fileUrl = url; + url = getFolderPathFromFilePath(url); + } else { + isFile = + fileUrl !== '' && + $(registerFromFileEles.containerReferenceFile)[0].style + .display !== 'none'; + } - // check duplicated dbs and process name - const isValid = await checkDuplicatedDbsAndProcessName(); - if (!isValid) return; + resolve({ + url: url, + isFile: isFile, + fileUrl: fileUrl, + }); + }); + }); +} - const isFillDbs = checkEmptyDataSourceAndProcessName(); - if (!isFillDbs) return; - - addMessengerToProgressBar(registerI18n.i18nProcessRegisterStart) - addMessengerToProgressBar(registerI18n.i18nProgressFolderCheck) - let dataSourcePath = ''; - const dataSourceName = $(registerFromFileEles.databaseName).val() - const registerMode = $(registerFromFileEles.checkedDirectoryRadio).val() - const selectJson = getSelectedColumnsAsJson(); - const [procCfgData, unusedColumns] = collectProcCfgData(selectJson); - if(registerMode === "folder") { - dataSourcePath = $(inputDOMByPath.directory).val() - } - else if(registerMode === "file") { - dataSourcePath = $(inputDOMByPath.file).val() - } - const dictCsvInfo = { - name: dataSourceName, - type: "CSV", - csv_detail: { - directory: dataSourcePath, - delimiter: "Auto", - csv_columns: procCfgData.columns, +/** + * Check there must be a main::Datetime column for each process + * @return {boolean} - true: main::Datetime is already selected, false: not have main::Datetime in process config + */ +const isMainDatetimeColumnSelected = () => { + let isSelected = true; + const tableBodyElements = document.querySelectorAll( + 'table[name=processColumnsTable] tbody', + ); + tableBodyElements.forEach((tableBodyElement) => { + const $mainDatetime = $(tableBodyElement).find( + 'td.column-date-type button>span[data-attr-key="is_get_date"]', + ); + + const isExistMainDatetimeColumn = $mainDatetime.length > 0; + const isMainDatetimeColumnChecked = $mainDatetime + .closest('tr') + .find('td.column-raw-name input[type="checkbox"]') + .is(':checked'); + if (!(isExistMainDatetimeColumn && isMainDatetimeColumnChecked)) { + isSelected = false; } - }; + }); - const data = { - proc_config: procCfgData, - import_data: true, - unused_columns: unusedColumns, - csv_info: dictCsvInfo, + if (!isSelected) { + addMessengerToProgressBar( + registerI18n.i18nErrorNoGetdate, + ICON_STATUS.WARNING, + ); } + + return isSelected; +}; + +/** + * Collect Process Data + * @return {RequestProcessData[]} - a list of process data + */ +function collectProcessConfigInfos() { + const processConfigs = []; + const sectionHTMLObjects = document.querySelectorAll( + '[id^="procSettingModal"]', + ); + sectionHTMLObjects.forEach((sectionHTMLObject) => { + const processConfigSection = + sectionHTMLObject.__object__ != null + ? sectionHTMLObject.__object__ + : new ProcessConfigSection(sectionHTMLObject); + const requestProcessConfig = + processConfigSection.collectProcessConfig(); + const processConfig = + ProcessConfigSection.splitUsedColumnAndUnusedColumn( + requestProcessConfig, + ); + processConfigs.push(processConfig); + }); + + return processConfigs; +} + +/** + * Collect data source config + * @return {DatasourceConfig} - an object of data source config + */ +function collectDatasourceInfo() { + const datasourceNameElement = document.getElementById('databaseName'); + datasourceNameElement.__cache__.name = datasourceNameElement.value.trim(); + return datasourceNameElement.__cache__; +} + +/** + * Collect all information of data source and process config + * @return {{ + * import_data: boolean, + * proc_configs: RequestProcessData[], + * request_id: string, + * csv_info: DatasourceConfig, + * }} + */ +function collectAllDataInfo() { + const processConfigs = collectProcessConfigInfos(); + const datasourceConfig = collectDatasourceInfo(); + window.RegisterByFileRequestID = create_UUID(); + window.newProcessIds = undefined; + window.sseProcessIds = []; + + return { + csv_info: datasourceConfig, + proc_configs: processConfigs, + import_data: true, + request_id: window.RegisterByFileRequestID, + }; +} + +/** + * Save DataSource And Processes + * @return {Promise} + */ +const saveDataSourceAndProc = async () => { + const isDSNameEmpty = isDataSourceNameEmpty(); + const isProcNameEmpty = isProcessNameEmpty(); + if (isDSNameEmpty || isProcNameEmpty) return; + + // check duplicated dbs and process name + const isDSNameDuplicate = await isDataSourceNameDuplicate(); + const isProcNameDuplicate = await isProcessNameDuplicate(); + if (isDSNameDuplicate || isProcNameDuplicate) return; + + // check is_get_date column is already defined + const isDatetimeColumnSelected = isMainDatetimeColumnSelected(); + if (!isDatetimeColumnSelected) return; + + addMessengerToProgressBar(registerI18n.i18nProcessRegisterStart); + addMessengerToProgressBar(registerI18n.i18nProgressFolderCheck); + + const data = collectAllDataInfo(); try { - $(registerFromFileEles.registerButton).prop('disabled', true).removeClass("btn-primary").addClass("btn-secondary"); - const response = await fetchData('/ap/api/setting/register_source_and_proc', JSON.stringify(data), 'POST'); - processInfo = response.process_info; - registeredProcessId = processInfo.id; + $(registerFromFileEles.registerButton) + .prop('disabled', true) + .removeClass('btn-primary') + .addClass('btn-secondary'); + /** + * @type {{ + * message: string, + * is_error: boolean, + * processIds: number[], + * }} + */ + const response = await fetchData( + '/ap/api/setting/register_source_and_proc', + JSON.stringify(data), + 'POST', + ).catch((err) => { + addMessengerToProgressBar( + err.responseJSON.message, + ICON_STATUS.WARNING, + ); + console.error(`[Backend Error] ${err.responseJSON.detail}`); + }); + + if (response) { + addMessengerToProgressBar(response.message, ICON_STATUS.SUCCESS); + window.newProcessIds = response.processIds; + console.log(response.processIds); + } } catch (e) { - console.log(e) + console.log(e); } }; +/** + * Redirect To CHM Page + * @param processId + * @return {Promise} + */ const redirectToCHMPage = async (processId) => { // go to chm after import data // const processId = processInfo.id; - const res = await fetchData(`/ap/api/setting/redirect_to_chm_page/${processId}`, '', 'GET'); + const res = await fetchData( + `/ap/api/setting/redirect_to_chm_page/${processId}`, + '', + 'GET', + ); + goToOtherPage(res.url, true); +}; + +const redirectToPage = async (processIds, page) => { + const data = { + page: page, + processIds: processIds, + }; + const res = await fetchData( + `/ap/api/setting/redirect_to_page`, + JSON.stringify(data), + 'POST', + ); goToOtherPage(res.url, true); }; +/** + * Update Data Register Status + * @param {{ + * data: { + * RegisterByFileRequestID: string, + * step: string, + * status: string, + * is_first_imported: boolean, + * process_id: number, + * use_dummy_datetime: boolean, + * }, + * }} postDat - a dictionary contains SSE message from Backend + */ const updateDataRegisterStatus = (postDat) => { - if (postDat.data.process_id !== registeredProcessId) return; + if (postDat.data.RegisterByFileRequestID !== window.RegisterByFileRequestID) + return; + + // switch (postDat.data.step) { + // case registerSteps.GEN_DATA_TABLE: + // addMessengerToProgressBar( + // registerI18n.i18nProgressGenDataTable, + // ICON_STATUS.PROCESSING, + // registerSteps.GEN_DATA_TABLE, + // ); + // break; + // + // case registerSteps.SCAN_FILE: + // addMessengerToProgressBar( + // registerI18n.i18nProgressGenDataTable, + // ICON_STATUS.SUCCESS, + // registerSteps.GEN_DATA_TABLE, + // true, + // ); + // addMessengerToProgressBar( + // registerI18n.i18nProgressScanFile, + // ICON_STATUS.PROCESSING, + // registerSteps.SCAN_FILE, + // ); + // break; + // + // case registerSteps.SCAN_MASTER: + // addMessengerToProgressBar( + // registerI18n.i18nProgressScanFile, + // ICON_STATUS.SUCCESS, + // registerSteps.SCAN_FILE, + // true, + // ); + // addMessengerToProgressBar( + // registerI18n.i18nProgressScanMaster, + // ICON_STATUS.PROCESSING, + // registerSteps.SCAN_MASTER, + // ); + // break; + // + // case registerSteps.SCAN_DATA_TYPE: + // addMessengerToProgressBar( + // registerI18n.i18nProgressScanMaster, + // ICON_STATUS.SUCCESS, + // registerSteps.SCAN_MASTER, + // true, + // ); + // addMessengerToProgressBar( + // registerI18n.i18nProgressScanDataType, + // ICON_STATUS.PROCESSING, + // registerSteps.SCAN_DATA_TYPE, + // ); + // break; + // + // case registerSteps.PULL_CSV_DATA: + // addMessengerToProgressBar( + // registerI18n.i18nProgressScanDataType, + // ICON_STATUS.SUCCESS, + // registerSteps.SCAN_DATA_TYPE, + // true, + // ); + // addMessengerToProgressBar( + // registerI18n.i18nProgressPullData, + // ICON_STATUS.PROCESSING, + // registerSteps.PULL_CSV_DATA, + // ); + // break; + // } + // processing - if (postDat.data.status === REGISTER_JOB_STATUS.PROCESSING) { - addMessengerToProgressBar( - registerI18n.i18nProgressImportingData, - ICON_STATUS.PROCESSING, - registerSteps.IMPORTING - ) - } - // failed importing - if (postDat.data.status === REGISTER_JOB_STATUS.FAILED) { - addMessengerToProgressBar( - registerI18n.i18nProgressImportingData, - ICON_STATUS.WARNING, - registerSteps.IMPORTING, - true, - true - ) - } - if (postDat.data.status === REGISTER_JOB_STATUS.DONE) { - // modify processing to check icon - addMessengerToProgressBar( - registerI18n.i18nProgressImportingData, - ICON_STATUS.SUCCESS, - registerSteps.IMPORTING, - true - ) - // add finish item - addMessengerToProgressBar( - registerI18n.i18nProgressFinished, - ICON_STATUS.SUCCESS, - registerSteps.IMPORTING - ) + switch (postDat.data.status) { + case REGISTER_JOB_STATUS.PROCESSING: + // addMessengerToProgressBar( + // registerI18n.i18nProgressPullData, + // ICON_STATUS.SUCCESS, + // registerSteps.PULL_CSV_DATA, + // true, + // ); + addMessengerToProgressBar( + registerI18n.i18nProgressImportingData, + ICON_STATUS.PROCESSING, + registerSteps.IMPORTING, + true, + ); + break; + + case postDat.data.status === REGISTER_JOB_STATUS.FAILED: + addMessengerToProgressBar( + registerI18n.i18nProgressImportingData, + ICON_STATUS.WARNING, + registerSteps.IMPORTING, + true, + true, + ); + break; + + case postDat.data.status === REGISTER_JOB_STATUS.DONE: + // modify processing to check icon + addMessengerToProgressBar( + registerI18n.i18nProgressImportingData, + ICON_STATUS.SUCCESS, + registerSteps.IMPORTING, + true, + ); + // add finish item + addMessengerToProgressBar( + registerI18n.i18nProgressFinished, + ICON_STATUS.SUCCESS, + registerSteps.IMPORTING, + true, + ); + break; } + // redirect if first chunk of data be imported if (postDat.data.is_first_imported) { + console.log(postDat.data.process_id); + if (!window.sseProcessIds.includes(postDat.data.process_id)) { + // Add process id into ready list that contains processes have already imported data + window.sseProcessIds.push(postDat.data.process_id); + } + + if ( + window.newProcessIds && + window.newProcessIds.length !== window.sseProcessIds.length + ) { + // in case there are some process that not have data yet, wait to import data + return; + } + + console.log('Ready to show graph...'); + // in case all processes have data, redirect to show graph page setTimeout(async () => { - await redirectToCHMPage(postDat.data.process_id); + const pageRedirect = postDat.data.use_dummy_datetime + ? 'ap/fpp' + : 'ap/chm'; + await redirectToPage(window.newProcessIds, pageRedirect); }, 3000); } }; @@ -644,8 +1177,8 @@ const getRequestParams = () => { estimationFile, dataSourceName, procesNameJp, - processNameLocal - } + processNameLocal, + }; }; const handleLoadGUiFromExternalAPIRequest = () => { @@ -656,7 +1189,7 @@ const handleLoadGUiFromExternalAPIRequest = () => { estimationFile, dataSourceName, procesNameJp, - processNameLocal + processNameLocal, } = getRequestParams(); if (!loadGUIFromUrl) return; @@ -667,7 +1200,6 @@ const handleLoadGUiFromExternalAPIRequest = () => { if (isFile) { $(registerFromFileEles.directoryRadios).val('file').trigger('change'); - registerFromFileEles.fileUrl.val(sourceFile).trigger('change'); } else { if (sourceFolder) { registerFromFileEles.folderUrl.val(sourceFolder).trigger('change'); @@ -678,13 +1210,15 @@ const handleLoadGUiFromExternalAPIRequest = () => { } } - setTimeout(()=> { + setTimeout(() => { // set data source name if (dataSourceName) { registerFromFileEles.databaseName.val(dataSourceName); } if (procesNameJp) { - registerFromFileEles.processJapaneseName.val(procesNameJp).trigger('change'); + registerFromFileEles.processJapaneseName + .val(procesNameJp) + .trigger('change'); } if (processNameLocal) { registerFromFileEles.processLocalName.val(processNameLocal); @@ -695,49 +1229,125 @@ const handleLoadGUiFromExternalAPIRequest = () => { } }, 500); - clickRegisterButtonWhenEnabled() + clickRegisterButtonWhenEnabled(); }; function clickRegisterButtonWhenEnabled() { // poll until all values are filled and button is enabled - if($(registerFromFileEles.registerButton).hasOwnProperty('disabled') || - !($(registerFromFileEles.processEnName).val())){ + if ( + Object.prototype.hasOwnProperty.call( + $(registerFromFileEles.registerButton), + 'disabled', + ) || + !$(registerFromFileEles.processEnName).val() + ) { setTimeout(clickRegisterButtonWhenEnabled, 500); } else { // click register data - $(registerFromFileEles.registerButton).click(); + $(registerFromFileEles.registerButton).trigger('click'); } } const disableRegisterDataFileBtn = () => { - $(registerFromFileEles.registerButton).prop('disabled', true).removeClass('btn-primary').addClass('btn-secondary'); + $(registerFromFileEles.registerButton) + .prop('disabled', true) + .removeClass('btn-primary') + .addClass('btn-secondary'); }; const enableRegisterDataFileBtn = () => { - $(registerFromFileEles.registerButton).prop('disabled', false).removeClass('btn-secondary').addClass('btn-primary'); + $(registerFromFileEles.registerButton) + .prop('disabled', false) + .removeClass('btn-secondary') + .addClass('btn-primary'); }; const removeQuotesfromInputAndUpdate = (inputEl) => { - const url = $(inputEl).val().replace(/"/g, ""); + const url = $(inputEl).val().replace(/"/g, ''); $(inputEl).val(url); -} +}; + +const removeLastBackslashFromInputAndUpdate = (inputEl) => { + const url = $(inputEl).val().replace(/\\$/g, ''); + $(inputEl).val(url); +}; const displayPreviewContentData = () => { $(procModalElements.procPreviewSection).show(); -} +}; const hiddenPreviewContentData = () => { $(procModalElements.procPreviewSection).hide(); +}; + +const showHideRefFile = (isShow) => { + const display = isShow ? '' : 'none'; + $(registerFromFileEles.containerReferenceFile)[0].style.setProperty( + 'display', + display, + 'important', + ); + if (!isShow) { + $(registerFromFileEles.refFileUrl).val(''); + } +}; + +/** + * Get Folder Path From File Path + * @param {string} filePath - a file path + * @return {string} - a folder path + */ +function getFolderPathFromFilePath(filePath) { + const lastSlashIndex = filePath.lastIndexOf('\\'); + const lastForwardSlash = filePath.lastIndexOf('/'); + const lastIndex = Math.max(lastSlashIndex, lastForwardSlash); + return filePath.substring(0, lastIndex).replace(/^\/+|\/+$/g, ''); +} + +const switchToRegisterByFolder = (event) => { + showHideRefFile(true); + + // Change file path to folder path + const filePath = $(event.currentTarget).data('url'); + const folderPath = getFolderPathFromFilePath(filePath); + $(registerFromFileEles.folderUrl).val(folderPath); + + handleOnChangeFolderAndFileUrl(false).then(() => {}); +}; + +/** + * Remove all process configs in extend section html + */ +function removeExtendSections() { + document + .querySelectorAll('div.section.extend-section') + .forEach((extendSection) => $(extendSection).remove()); } -$(document).ready(() => { - checkOnFocus = false; +jQuery(function () { + checkOnFocus = false; // hide folder/file picker button if there is not server admin if (!isAdmin) { $('.btn-browse').css('display', 'none'); } + $(registerFromFileEles.registerAllFilesButton).on( + 'click', + switchToRegisterByFolder, + ); + $(registerFromFileEles.registerOneFileButton).on('click', () => { + showHideRefFile(false); + handleOnChangeFolderAndFileUrl(false).then(() => {}); + }); + disableRegisterDataFileBtn(); handleLoadGUiFromExternalAPIRequest(); -}); \ No newline at end of file + + // This logic is ONLY for EDGE SERVER to avoid bug relate to merge mode + setTimeout(() => { + procModalElements.proc.off('focusout').on('focusout', () => { + checkDuplicateProcessName('data-name-en'); + }); + }, 200); +}); diff --git a/ap/static/setting_module/js/system.js b/ap/static/setting_module/js/system.js new file mode 100644 index 0000000..6d91878 --- /dev/null +++ b/ap/static/setting_module/js/system.js @@ -0,0 +1,188 @@ +const systemElements = { + divSystemConfig: '#cfgSystem', + backupAndRestoreModal: '#backupAndRestoreModal', +}; + +let defaultTimeRange = ''; + +const openBackupAndRestoreModal = () => { + // TODO: this processId assignment is wrong + if (!_.isEmpty(processes)) { + processId = Object.keys(processes)[0]; + } + generateHTMLBackupAndRestoreModal(); + hideRadioDefaultInterval(); + switchBackupRestoreTab(); // Switch to 'Backup' tab at default + $(systemElements.backupAndRestoreModal).modal('show'); + defaultTimeRange = $('#datetimeRangePicker').val(); + // get process id +}; + +const closeBackupAndRestoreModal = () => { + // TODO cleanup + $(systemElements.backupAndRestoreModal).modal('hide'); + // reset default datetime + handleSetValueToDateRangePicker(defaultTimeRange, false); + closeCalenderModal(); + processId = undefined; + + // Release cache function after closing this modal + $('#backupAndRestoreModal')[0].cacheFunction = undefined; +}; + +const generateHTMLBackupAndRestoreModal = () => { + const processSelection = $('#backupAndRestoreProcessSelection'); + + const processLists = Object.values(processes).map((process) => { + return ``; + }); + + // const uuid = create_UUID(); + const uuid = 1; + // must set to end_proc because datetime picker use this ... + const processSelectionId = `end_proc_${uuid}`; + processSelection.html( + ` +${i18nCommon.process} +
+ +
+`, + ); + + addAttributeToElement(processSelection); + processSelection.addClass('d-flex align-items-center'); + $(`#${processSelectionId}`).on('change', (e) => { + processId = e.currentTarget.value; + setProcessID(); + }); + initializeDateTimeRangePicker(); + showDataFinderButton(processId); +}; + +const getBackupAndRestoreInfo = () => { + const selectDateTimeRange = $('#datetimeRangePicker').val(); + const [starting, ending] = selectDateTimeRange.split( + DATETIME_PICKER_SEPARATOR, + ); + if (starting && ending) { + const startTime = moment.utc(moment(starting)).format(DATETIME_FORMAT); + const endTime = moment.utc(moment(ending)).format(DATETIME_FORMAT); + return { + processId: processId, + startTime: startTime, + endTime: endTime, + }; + } +}; + +const doBackupData = async () => { + const data = getBackupAndRestoreInfo(); + if (data.processId) { + await fetch('/ap/api/setting/backup_data', { + method: 'POST', + body: JSON.stringify({ + process_id: data.processId, + start_time: data.startTime, + end_time: data.endTime, + }), + }); + showBackupDataToastr(); + } + $(systemElements.backupAndRestoreModal).modal('hide'); + closeCalenderModal(); +}; + +const doRestoreData = async () => { + const data = getBackupAndRestoreInfo(); + if (data.processId) { + await fetch('/ap/api/setting/restore_data', { + method: 'POST', + body: JSON.stringify({ + process_id: data.processId, + start_time: data.startTime, + end_time: data.endTime, + }), + }); + showRestoreDataToastr(); + } + $(systemElements.backupAndRestoreModal).modal('hide'); + closeCalenderModal(); +}; + +const hideRadioDefaultInterval = () => { + $('#radioDefaultInterval').parent().addClass('d-none'); +}; + +/** + * Get Data Count For Calendar (Call api to re-get total records of backup or restore) + */ +function getDataCountForCalendar() { + /** @type{function(): void | { + * tableFrom: function(): void, + * tableTo: function(): void, + * }} + * */ + const cacheFunction = document.getElementById( + 'backupAndRestoreModal', + ).cacheFunction; + if (cacheFunction) { + if (_.isFunction(cacheFunction)) { + cacheFunction(); + } else { + for (const func of Object.values(cacheFunction)) { + func(); + } + } + } +} + +/** + * Switch Backup Restore Tab + * @param {HTMLLIElement} liElement - a Li html element + */ +const switchBackupRestoreTab = ( + liElement = document.getElementById('liBackupTab'), +) => { + // Set tab active + liElement.classList.add('active'); + liElement.firstElementChild.classList.add('active'); + liElement.firstElementChild.setAttribute('aria-selected', String(true)); + + // Set another tab deactivate + const anotherLiElement = + liElement.nextElementSibling ?? liElement.previousElementSibling; + anotherLiElement.classList.remove('active'); + anotherLiElement.firstElementChild.classList.remove('active'); + anotherLiElement.firstElementChild.setAttribute( + 'aria-selected', + String(false), + ); + + const aElement = + /** @type{HTMLAnchorElement} */ liElement.firstElementChild; + const currentTab = aElement.getAttribute('href').replace('#', ''); + $('#idFlagBKRT').val(currentTab); + $(`#${aElement.dataset.showButtonId}`).show(); + $(`#${aElement.dataset.hideButtonId}`).hide(); + getDataCountForCalendar(); +}; + +$(() => { + $(systemElements.divSystemConfig)[0].addEventListener( + 'contextmenu', + baseRightClickHandler, + false, + ); + $(systemElements.divSystemConfig)[0].addEventListener( + 'mouseup', + handleMouseUp, + false, + ); +}); diff --git a/ap/static/setting_module/js/trace_config.js b/ap/static/setting_module/js/trace_config.js index 3308952..26e75a5 100644 --- a/ap/static/setting_module/js/trace_config.js +++ b/ap/static/setting_module/js/trace_config.js @@ -1,16 +1,9 @@ -/* eslint-disable consistent-return,prefer-destructuring */ -/* eslint-disable array-callback-return */ -/* eslint-disable no-use-before-define */ -/* eslint-disable guard-for-in */ -/* eslint-disable no-restricted-syntax */ -/* eslint-disable no-unused-vars */ -/* eslint-disable no-undef */ - // TODO use class // global variable to store data of processes let processes = {}; -const getConfigOption = () => JSON.parse(localStorage.getItem('network-config')) || {}; +const getConfigOption = () => + JSON.parse(localStorage.getItem('network-config')) || {}; const configOption = getConfigOption(); let hierarchicalDirection = configOption.direction || 'LR'; @@ -51,6 +44,9 @@ const SEP_LABEL = '\n'; const SEP_PROC = '-'; const SEP_TITLE = '
'; +const DEFAULT_DELTA_TIME_DATA_LINK = 0; // minute +const DEFAULT_CUTOFF_DATA_LINK = 60; // minute + // define custom locales const i18nNames = { allDigits: $('#i18nAllDigits').text(), @@ -67,7 +63,8 @@ const i18nNames = { linkWithTime: $('#i18nLinkWithTime').text(), i18nCutOff: $('#i18nCutOff').text(), }; -const locale = docCookies.getItem('locale') === 'ja' ? 'jp' : docCookies.getItem('locale'); +const locale = + docCookies.getItem('locale') === 'ja' ? 'jp' : docCookies.getItem('locale'); const locales = {}; locales[locale] = { edit: $('#i18nEdit').text(), @@ -104,6 +101,7 @@ const tracingElements = { deltaDatetimeVisible: '#delta-datetime:visible', inputDeltaDatetime: '.deltaDatetime', inputCutOff: '.cutOff', + datetimeReprClassName: 'datetimeRepr', }; const destroy = () => { @@ -120,15 +118,20 @@ const cancelEdgeEdit = (callback) => { const isStarType = () => hierarchicalDirection === 'Star'; const getMatchingDigits = (item) => { - const result = $(item).find(`input[name*="${tracingElements.subStrOpt}"]:checked`) + const result = $(item) + .find(`input[name*="${tracingElements.subStrOpt}"]:checked`) .map((i, e) => { const itemId = $(e).attr('id').split('-')[1]; // if ($(`#checkbox-${itemId}`).prop('checked')) { if ($(e).val() === '0') { return []; } - const fromDigit = $(`select[name="${tracingElements.fromDigits}-${itemId}"]`).val(); - const toDigits = $(`select[name="${tracingElements.toDigits}-${itemId}"]`).val(); + const fromDigit = $( + `select[name="${tracingElements.fromDigits}-${itemId}"]`, + ).val(); + const toDigits = $( + `select[name="${tracingElements.toDigits}-${itemId}"]`, + ).val(); return [Number(fromDigit), Number(toDigits)]; // } }) @@ -139,11 +142,13 @@ const getMatchingDigits = (item) => { // TODO refactor i18n const validateSubStr = (selfSubstr, targetSubstr) => { // selfSubstr, targetSubstr [[1,3], [2,4]] - const invalidOpts = self => self.filter(([s, e]) => s > e); + const invalidOpts = (self) => self.filter(([s, e]) => s > e); // Check valid options - if (invalidOpts(selfSubstr).length > 0 - || invalidOpts(targetSubstr).length > 0 - || selfSubstr.length !== targetSubstr.length) { + if ( + invalidOpts(selfSubstr).length > 0 || + invalidOpts(targetSubstr).length > 0 || + selfSubstr.length !== targetSubstr.length + ) { return { is_valid: false, message: $('#i18nInvalidDigit').text(), @@ -152,7 +157,11 @@ const validateSubStr = (selfSubstr, targetSubstr) => { // Check self digits same as target digits const invalidDigits = targetSubstr.filter((e, i) => { - if (selfSubstr[i] !== undefined && selfSubstr[i].length > 0 && e.length > 0) { + if ( + selfSubstr[i] !== undefined && + selfSubstr[i].length > 0 && + e.length > 0 + ) { return e[1] - e[0] !== selfSubstr[i][1] - selfSubstr[i][0]; } }, selfSubstr); @@ -168,107 +177,6 @@ const validateSubStr = (selfSubstr, targetSubstr) => { }; }; -const saveEdgeDataToGlobal = (edgeData, callback) => { - if (typeof edgeData.to === 'object') edgeData.to = edgeData.to.id; - if (typeof edgeData.from === 'object') edgeData.from = edgeData.from.id; - edgeData.arrows = 'to'; - - // get Trace Target Data from modal - const targetProcId = $('select[name="edgeForwardProc"]').val(); - edgeData.target_proc = targetProcId; - const targetCols = []; - const targetOrgCols = []; - const targetSubStrs = []; - $('div[id^="edgeForwardCol-"]').find('.form-group').each(function f(idx) { - const colElement = $(this).find('select[name=forwardCol]'); - targetSubStrs.push(getMatchingDigits($(this))); - const colAlias = $('option:selected', colElement).attr('alias'); - const colOrg = $('option:selected', colElement).attr('original'); - if (!isEmpty(colAlias)) targetCols.push(colAlias); - if (!isEmpty(colOrg)) targetOrgCols.push(colOrg); - }); - edgeData.target_col = targetCols; - edgeData.target_orig_col = targetOrgCols; - edgeData.target_substr = targetSubStrs; - - - // get Trace Self Data from modal - const selfProcId = $('select[name="edgeBackProc"]').val(); - edgeData.self_proc = selfProcId; - const selfCols = []; - const selfOrgCols = []; - const selfSubStrs = []; - $('div[id^="edgeBackCol-"]').find('.form-group').each(function f(idx) { - const colElement = $(this).find('select[name=backCol]'); - selfSubStrs.push(getMatchingDigits($(this))); - const colAlias = $('option:selected', colElement).attr('alias'); - const colOrg = $('option:selected', colElement).attr('original'); - if (!isEmpty(colAlias)) selfCols.push(colAlias); - if (!isEmpty(colOrg)) selfOrgCols.push(colOrg); - }); - edgeData.self_col = selfCols; - edgeData.back_orig_col = selfOrgCols; - edgeData.self_substr = selfSubStrs; - - // validate: choose at least 1 column to trace - if (selfCols.length === 0 || targetCols.length === 0) { - displayRegisterMessage( - '#alertMsgCheckSubStr', { - message: i18nNames.i18nNoColumn, - is_error: true, - }, - ); - return; - } - - validEdge = validateSubStr(edgeData.self_substr, edgeData.target_substr); - if (!validEdge.is_valid) { - displayRegisterMessage( - '#alertMsgCheckSubStr', { - message: validEdge.message, - is_error: true, - }, - ); - return; - } - - // save data to external/global dict - mapIdFromIdTo2Edge[`${edgeData.from}-${edgeData.to}`] = edgeData; - - callback(edgeData); - $('#modal-edge-popup').modal('hide'); - return edgeData; -}; - -const objectToArray = obj => Object.keys(obj).map((key) => { - obj[key].id = key; - return obj[key]; -}); - -const resizeExportArea = () => { - exportArea.style.height = `${1 + exportArea.scrollHeight}px`; -}; - -// get alias of a column -const getAliasFromField = (col) => { - const splits = col.split(' as '); - if (splits.length > 0) { - const lastEl = splits[splits.length - 1]; - return lastEl.replace(/"| |'/g, ''); - } - return col; -}; - -// get original column name of a column -const getOrigColumnFromField = (col) => { - const splits = col.split(' as '); - if (splits.length > 0) { - const lastEl = splits[0]; - return lastEl.replace(/"| |'/g, ''); - } - return col; -}; - let targetProc = null; let targetColCandidates = []; let targetColCandidateMasters = []; @@ -284,8 +192,8 @@ let selfColCandidates = []; let selfColCandidateMasters = []; let selfColNames = []; let selfDataTypes = []; -let deltaTimes = ''; -let cutOffs = ''; +let deltaTimes = []; +let cutOffs = []; const layoutOption = () => { if (hierarchicalDirection !== NORMAL_TYPE) { @@ -372,7 +280,6 @@ const drawVisNetwork = (layout = layoutOption()) => { const selfProcId = edge.fromId; const targetProcId = edge.toId; - // delete from graph callback(edgeData); @@ -394,28 +301,37 @@ const drawVisNetwork = (layout = layoutOption()) => { if (hierarchicalDirection) { $('#traceNetworkLayout').val(hierarchicalDirection); - $(`#traceNetworkLayout option[value=${hierarchicalDirection}]`).prop('disabled', true); + $(`#traceNetworkLayout option[value=${hierarchicalDirection}]`).prop( + 'disabled', + true, + ); } - $('#traceNetworkLayout').off('change').on('change', (e) => { - if (e.currentTarget.value) { - hierarchicalDirection = e.currentTarget.value; - } - drawVisNetwork(); - setTimeout(() => { - getNodePositionAndSaveLocal(); - // disable current layout to avoid to re-select from dropdown menu - if (hierarchicalDirection) { - $('#traceNetworkLayout option').prop('disabled', false); - $(`#traceNetworkLayout option[value=${hierarchicalDirection}]`).prop('disabled', true); + $('#traceNetworkLayout') + .off('change') + .on('change', (e) => { + if (e.currentTarget.value) { + hierarchicalDirection = e.currentTarget.value; } - }, 500); - }); + drawVisNetwork(); + setTimeout(() => { + getNodePositionAndSaveLocal(); + // disable current layout to avoid to re-select from dropdown menu + if (hierarchicalDirection) { + $('#traceNetworkLayout option').prop('disabled', false); + $( + `#traceNetworkLayout option[value=${hierarchicalDirection}]`, + ).prop('disabled', true); + } + }, 500); + }); // add reset layout button events - $('#resetLayout').off('click').on('click', (e) => { - const layoutSelected = $('#traceNetworkLayout').val(); - $('#traceNetworkLayout').val(layoutSelected).change(); - }); + $('#resetLayout') + .off('click') + .on('click', (e) => { + const layoutSelected = $('#traceNetworkLayout').val(); + $('#traceNetworkLayout').val(layoutSelected).change(); + }); container.addEventListener('mouseleave', () => { moveToOptions.scale = network.getScale(); moveToOptions.position = network.getViewPosition(); @@ -436,11 +352,14 @@ const getNodePositionAndSaveLocal = () => { }; const saveLocalStorage = () => { - localStorage.setItem('network-config', JSON.stringify({ - moveto: moveToOptions, - direction: hierarchicalDirection, - nodesPosition, - })); + localStorage.setItem( + 'network-config', + JSON.stringify({ + moveto: moveToOptions, + direction: hierarchicalDirection, + nodesPosition, + }), + ); }; const handleEditEdge = (edgeData, callback) => { @@ -448,7 +367,8 @@ const handleEditEdge = (edgeData, callback) => { $('#alertMsgCheckSubStr').hide(); // get data from external dict currentEditEdge = edgeData; - const edgeDataFull = mapIdFromIdTo2Edge[`${edgeData.from.id}-${edgeData.to.id}`]; + const edgeDataFull = + mapIdFromIdTo2Edge[`${edgeData.from.id}-${edgeData.to.id}`]; if (edgeDataFull) { edgeData.target_proc = edgeDataFull.target_proc; edgeData.target_col = edgeDataFull.target_col; @@ -456,6 +376,8 @@ const handleEditEdge = (edgeData, callback) => { edgeData.self_col = edgeDataFull.self_col; edgeData.self_substr = edgeDataFull.self_substr; edgeData.target_substr = edgeDataFull.target_substr; + edgeData.delta_time = edgeDataFull.delta_time; + edgeData.cut_off = edgeDataFull.cut_off; } // save edited edge @@ -503,8 +425,10 @@ const onChangeProcs = (edgeData) => { setTimeout(() => { let from = ''; let to = ''; - if (typeof edgeData.to === 'object') edgeData.to = edgeData.to.id; - if (typeof edgeData.from === 'object') edgeData.from = edgeData.from.id; + if (typeof edgeData.to === 'object') + edgeData.to = edgeData.to.id; + if (typeof edgeData.from === 'object') + edgeData.from = edgeData.from.id; edgeData.from = e.currentTarget.value; from = edgeData.from.toString(); to = edgeData.to.toString(); @@ -534,8 +458,10 @@ const onChangeProcs = (edgeData) => { setTimeout(() => { let from = ''; let to = ''; - if (typeof edgeData.to === 'object') edgeData.to = edgeData.to.id; - if (typeof edgeData.from === 'object') edgeData.from = edgeData.from.id; + if (typeof edgeData.to === 'object') + edgeData.to = edgeData.to.id; + if (typeof edgeData.from === 'object') + edgeData.from = edgeData.from.id; edgeData.to = e.currentTarget.value; from = edgeData.from.toString(); to = edgeData.to.toString(); @@ -565,7 +491,7 @@ const masterInnerOrder = [ masterDataGroup.EQ_NO, masterDataGroup.PART_NAME, masterDataGroup.PART_NO, - masterDataGroup.ST_NO + masterDataGroup.ST_NO, ]; const reOrderLinkCols = (linkingCols) => { @@ -582,23 +508,26 @@ const reOrderLinkCols = (linkingCols) => { let intCols = []; for (const linkingCol of linkingCols) { if (linkingCol.is_serial_no) { - serialCols.push(linkingCol) + serialCols.push(linkingCol); } else if (linkingCol.is_get_date) { - getDateCols.push(linkingCol) + getDateCols.push(linkingCol); } else if (linkingCol.data_type === DataTypes.DATETIME.name) { - dateTimeCols.push(linkingCol) + dateTimeCols.push(linkingCol); } else if (linkingCol.data_type === DataTypes.DATE.name) { - mainDateCols.push(linkingCol) + mainDateCols.push(linkingCol); } else if (linkingCol.data_type === DataTypes.TIME.name) { - mainTimeCols.push(linkingCol) + mainTimeCols.push(linkingCol); } else if (masterInnerOrder.includes(linkingCol.column_type)) { - masterCols.push(linkingCol) - } else if (linkingCol.is_int_category && linkingCol.data_type === DataTypes.INTEGER.name) { - catCols.push(linkingCol) + masterCols.push(linkingCol); + } else if ( + linkingCol.is_int_category && + linkingCol.data_type === DataTypes.INTEGER.name + ) { + catCols.push(linkingCol); } else if (linkingCol.data_type === DataTypes.STRING.name) { - stringCols.push(linkingCol) - } else if (linkingCol.data_type === DataTypes.INTEGER.name){ - intCols.push(linkingCol) + stringCols.push(linkingCol); + } else if (linkingCol.data_type === DataTypes.INTEGER.name) { + intCols.push(linkingCol); } } getDateCols.sort((a, b) => a.order - b.order); @@ -612,7 +541,8 @@ const reOrderLinkCols = (linkingCols) => { stringCols.sort((a, b) => a.order - b.order); intCols.sort((a, b) => a.order - b.order); // push serial at top of dropdown in linking modal - return [...serialCols, + return [ + ...serialCols, ...getDateCols, ...dateTimeCols, ...mainDateCols, @@ -622,7 +552,8 @@ const reOrderLinkCols = (linkingCols) => { ...masterCols, ...catCols, ...stringCols, - ...intCols]; + ...intCols, + ]; }; const getInforToGenerateColumns = (selfProcId, targetProcId, edgeData) => { @@ -632,11 +563,13 @@ const getInforToGenerateColumns = (selfProcId, targetProcId, edgeData) => { selectedSelfCols = [...edgeData.self_col]; } - const selfSerialColumns = reOrderLinkCols(processes[selfProcId].columns.filter(e => e.is_linking_column)); - selfColCandidates = selfSerialColumns.map(e => e.id); - selfColCandidateMasters = selfSerialColumns.map(e => e.shown_name); - selfColNames = selfSerialColumns.map(e => e.name_en); - selfDataTypes = selfSerialColumns.map(e => e.data_type); + const selfSerialColumns = reOrderLinkCols( + processes[selfProcId].columns.filter((e) => e.is_linking_column), + ); + selfColCandidates = selfSerialColumns.map((e) => e.id); + selfColCandidateMasters = selfSerialColumns.map((e) => e.shown_name); + selfColNames = selfSerialColumns.map((e) => e.name_en); + selfDataTypes = selfSerialColumns.map((e) => e.data_type); if (typeof edgeData.target_col !== 'object') { selectedTargetCols = [edgeData.target_col]; @@ -644,11 +577,13 @@ const getInforToGenerateColumns = (selfProcId, targetProcId, edgeData) => { selectedTargetCols = [...edgeData.target_col]; } - const targetSerialColumns = reOrderLinkCols(processes[targetProcId].columns.filter(e => e.is_linking_column)); - targetColCandidates = targetSerialColumns.map(e => e.id); - targetColCandidateMasters = targetSerialColumns.map(e => e.shown_name); - targetColNames = targetSerialColumns.map(e => e.name_en); - targetDataTypes = targetSerialColumns.map(e => e.data_type); + const targetSerialColumns = reOrderLinkCols( + processes[targetProcId].columns.filter((e) => e.is_linking_column), + ); + targetColCandidates = targetSerialColumns.map((e) => e.id); + targetColCandidateMasters = targetSerialColumns.map((e) => e.shown_name); + targetColNames = targetSerialColumns.map((e) => e.name_en); + targetDataTypes = targetSerialColumns.map((e) => e.data_type); targetChosenTraceKeys = []; selfChosenTraceKeys = []; @@ -659,7 +594,6 @@ const getInforToGenerateColumns = (selfProcId, targetProcId, edgeData) => { cutOffs = edgeData.cut_off; }; - const initVisData = (processesArray) => { // clear vis data edges.clear(); @@ -669,7 +603,7 @@ const initVisData = (processesArray) => { // update processes from global const // create Vis nodes from processes data let procTraces = []; - processes = {} + processes = {}; for (const key in processesArray) { const procCopy = { ...processesArray[key] }; procCopy.master = procCopy.shown_name; @@ -684,7 +618,6 @@ const initVisData = (processesArray) => { processes[procCopy.id] = procCopy; } - // create Vis edge from processes trace data // use trace forward to create edges procTraces.forEach((trace) => { @@ -692,15 +625,25 @@ const initVisData = (processesArray) => { from: trace.self_process_id, to: trace.target_process_id, arrows: 'to', - font: { multi: 'html', strokeColor: COLOR.background, align: 'top' }, + font: { + multi: 'html', + strokeColor: COLOR.background, + align: 'top', + }, self_proc: trace.self_process_id, target_proc: trace.target_process_id, - self_col: trace.trace_keys.map(key => key.self_column_id), - target_col: trace.trace_keys.map(key => key.target_column_id), - self_substr: trace.trace_keys.map(key => [key.self_column_substr_from, key.self_column_substr_to]), - target_substr: trace.trace_keys.map(key => [key.target_column_substr_from, key.target_column_substr_to]), - delta_time: trace.trace_keys.map(key => key.delta_time), - cut_off: trace.trace_keys.map(key => key.cut_off), + self_col: trace.trace_keys.map((key) => key.self_column_id), + target_col: trace.trace_keys.map((key) => key.target_column_id), + self_substr: trace.trace_keys.map((key) => [ + key.self_column_substr_from, + key.self_column_substr_to, + ]), + target_substr: trace.trace_keys.map((key) => [ + key.target_column_substr_from, + key.target_column_substr_to, + ]), + delta_time: trace.trace_keys.map((key) => key.delta_time), + cut_off: trace.trace_keys.map((key) => key.cut_off), }; const edgeKey = `${trace.self_process_id}-${trace.target_process_id}`; mapIdFromIdTo2Edge[edgeKey] = edge; @@ -715,31 +658,45 @@ const init = () => { reloadTraceConfigFromDB(); }; - // Generate Sub string options -const digitOptions = position => [...Array(100).keys()].map((x) => { - let digitChecked = ''; - if (position === x + 1) { - digitChecked = 'selected="selected"'; - } else { - digitChecked = ''; - } - return ``; -}, position); - +const digitOptions = (position) => + [...Array(100).keys()].map((x) => { + let digitChecked = ''; + if (position === x + 1) { + digitChecked = 'selected="selected"'; + } else { + digitChecked = ''; + } + return ``; + }, position); const addAutoCheckPartialMatch = () => { // auto select partial-radio $('.partial-digit').each(function f() { $(this).on('change', function autoSelectSubstr() { - $($(this).closest('.partial-option').find('.partial-radio')[0]).prop('checked', 1); + $( + $(this).closest('.partial-option').find('.partial-radio')[0], + ).prop('checked', 1); }); }); }; -const addGroupListSelection = (parentId, id, itemName, itemVals, itemOrgCols = null, itemAliases = null, - checkedVals = null, itemDisplayNames = [], subStrOpt = [], forwardCol = null, clearOption = true, - columnNames = [], dataTypes = [], chosenOption = []) => { +const addGroupListSelection = ( + parentId, + id, + itemName, + itemVals, + itemOrgCols = null, + itemAliases = null, + checkedVals = null, + itemDisplayNames = [], + subStrOpt = [], + forwardCol = null, + clearOption = true, + columnNames = [], + dataTypes = [], + chosenOption = [], +) => { if (clearOption) $(`#${parentId}`).empty(); if (checkedVals.length == 0) { @@ -759,7 +716,8 @@ const addGroupListSelection = (parentId, id, itemName, itemVals, itemOrgCols = n let endDigit = 1; if (subStrOpt.length > 0 && subStrOpt[i] !== undefined) { // Update Edge - if (!subStrOpt[i].length || subStrOpt[i].some(digit => !digit)) { // all substring digits >= 1 + if (!subStrOpt[i].length || subStrOpt[i].some((digit) => !digit)) { + // all substring digits >= 1 defaultSelected = 'checked="checked"'; } else { subSelected = 'checked="checked"'; @@ -772,13 +730,18 @@ const addGroupListSelection = (parentId, id, itemName, itemVals, itemOrgCols = n const fromOptions = digitOptions(starDigit).join(''); const toOptions = digitOptions(endDigit).join(''); - let options = itemAliases.map((v, k) => { - const selected = `${v}` === `${selector}` ? ' selected="selected"' : ''; - return ``; - }, - selector, - itemDisplayNames).join(''); + }, + selector, + itemDisplayNames, + ) + .join(''); if (isNullOption) { options = ''; @@ -846,7 +809,10 @@ const getSelectedColumns = (allSelectedColElements) => { if (allSelectedColElements.length > 0) { Array.prototype.forEach.call(allSelectedColElements, (element) => { - if (element.value && selectedCols.includes(element.value) === false) { + if ( + element.value && + selectedCols.includes(element.value) === false + ) { selectedCols.push(element.value); } }); @@ -860,7 +826,9 @@ let selfChosenTraceKeys = []; // TODO check validation const updateSelectedColumns = (isForce = false) => { const allSelectedTargetColElements = $('select[name=forwardCol]'); - const allChosenTargetCols = getSelectedColumns(allSelectedTargetColElements); + const allChosenTargetCols = getSelectedColumns( + allSelectedTargetColElements, + ); const allSelectedTargetCols = allSelectedTargetColElements.get(); targetChosenTraceKeys = allChosenTargetCols; let targetColIndex = 0; @@ -868,7 +836,10 @@ const updateSelectedColumns = (isForce = false) => { const currentCardSelector = $(selected).val(); $.each($(selected).find('option'), (key, option) => { const optionVal = $(option).val(); - if (allChosenTargetCols.includes(optionVal) && currentCardSelector !== optionVal) { + if ( + allChosenTargetCols.includes(optionVal) && + currentCardSelector !== optionVal + ) { $(option).attr('disabled', 'disabled'); } else if (!allChosenTargetCols.includes(optionVal)) { $(option).removeAttr('disabled'); @@ -878,7 +849,6 @@ const updateSelectedColumns = (isForce = false) => { targetColIndex++; }); - const allSelectedSelfColElements = $('select[name=backCol]'); const allChosenSelfCols = getSelectedColumns(allSelectedSelfColElements); const allSelectedSelfCols = allSelectedSelfColElements.get(); @@ -888,7 +858,10 @@ const updateSelectedColumns = (isForce = false) => { const currentCardSelector = $(selected).val(); $.each($(selected).find('option'), (key, option) => { const optionVal = $(option).val(); - if (allChosenSelfCols.includes(optionVal) && currentCardSelector !== optionVal) { + if ( + allChosenSelfCols.includes(optionVal) && + currentCardSelector !== optionVal + ) { $(option).attr('disabled', 'disabled'); } else if (!allChosenSelfCols.includes(optionVal)) { $(option).removeAttr('disabled'); @@ -911,46 +884,59 @@ const updateSelectedColumns = (isForce = false) => { // } }; - const addTraceKey = (isNew = false) => { let unusedTargetCols = []; let unusedSelfCols = []; if (isNew) { unusedTargetCols = targetColCandidates.filter( - chosen => !targetChosenTraceKeys.find(remain => `${remain}` === `${chosen}`), + (chosen) => + !targetChosenTraceKeys.find( + (remain) => `${remain}` === `${chosen}`, + ), ); unusedSelfCols = selfColCandidates.filter( - chosen => !selfChosenTraceKeys.find(remain => `${remain}` === `${chosen}`), + (chosen) => + !selfChosenTraceKeys.find( + (remain) => `${remain}` === `${chosen}`, + ), ); } const selectedColumns = []; const idPostfix = `-${generateRandomString(5)}`; - const fwdColumns = addGroupListSelection('edgeForwardColParent', `edgeForwardCol${idPostfix}`, 'forwardCol', + const fwdColumns = addGroupListSelection( + 'edgeForwardColParent', + `edgeForwardCol${idPostfix}`, + 'forwardCol', targetColCandidates, - itemOrgCols = targetColCandidates, - itemAliases = targetColCandidates, - checkedVals = isNew ? [unusedTargetCols[0]] : selectedTargetCols, - itemDisplayNames = targetColCandidateMasters, - subStrOpt = isNew ? [] : targetSubStrOpt, - forwardCol = true, - clearOption = !isNew, - columnNames = targetColNames, - columnDataTypes = targetDataTypes, - targetChosenTraceKeys); - - const bwdColumns = addGroupListSelection('edgeBackColParent', `edgeBackCol${idPostfix}`, 'backCol', + (itemOrgCols = targetColCandidates), + (itemAliases = targetColCandidates), + (checkedVals = isNew ? [unusedTargetCols[0]] : selectedTargetCols), + (itemDisplayNames = targetColCandidateMasters), + (subStrOpt = isNew ? [] : targetSubStrOpt), + (forwardCol = true), + (clearOption = !isNew), + (columnNames = targetColNames), + (columnDataTypes = targetDataTypes), + targetChosenTraceKeys, + ); + + const bwdColumns = addGroupListSelection( + 'edgeBackColParent', + `edgeBackCol${idPostfix}`, + 'backCol', selfColCandidates, - itemOrgs = selfColCandidates, - itemAliases = selfColCandidates, - checkedVals = isNew ? [unusedSelfCols[0]] : selectedSelfCols, - itemDisplayNames = selfColCandidateMasters, - subStrOpt = isNew ? [] : selfSubStrOpt, - forwardCol = false, - clearOption = !isNew, - columnNames = selfColNames, - columnDataTypes = selfDataTypes, - selfChosenTraceKeys); + (itemOrgs = selfColCandidates), + (itemAliases = selfColCandidates), + (checkedVals = isNew ? [unusedSelfCols[0]] : selectedSelfCols), + (itemDisplayNames = selfColCandidateMasters), + (subStrOpt = isNew ? [] : selfSubStrOpt), + (forwardCol = false), + (clearOption = !isNew), + (columnNames = selfColNames), + (columnDataTypes = selfDataTypes), + selfChosenTraceKeys, + ); for (let i = 0; i < fwdColumns.length; i++) { if (bwdColumns[i] && fwdColumns[i]) { @@ -977,7 +963,7 @@ const addTraceKey = (isNew = false) => { // update selected columns to disable selected columns updateSelectedColumns(); - if (!isNew) { + if (!isNew && deltaTimes !== undefined) { updateDeltaTime(); } @@ -1003,11 +989,9 @@ const saveTraceConfigs = (edgesCfg) => { }, body: JSON.stringify(edgesCfg), }) - .then(response => response.clone().json()) - .then((json) => { - }) - .catch((json) => { - }); + .then((response) => response.clone().json()) + .then((json) => {}) + .catch((json) => {}); }; const saveTraceConfigToDB = () => { @@ -1019,16 +1003,18 @@ const saveTraceConfigToDB = () => { displayRegisterMessage(tracingElements.alertProcLink); - // show msg informLinkingJobStarted(); }; - const getV2OrderedProcesses = async (data) => { - const res = await fetchData('api/setting/get_v2_ordered_processes', JSON.stringify(data), 'POST'); + const res = await fetchData( + 'api/setting/get_v2_ordered_processes', + JSON.stringify(data), + 'POST', + ); return res.ordered_processes; -} +}; const syncVisData = (procs = []) => { initVisData(procs); @@ -1063,7 +1049,7 @@ const reloadTraceConfigFromDB = (isUpdatePosition = true) => { 'Content-Type': 'application/json', }, }) - .then(response => response.clone().json()) + .then((response) => response.clone().json()) .then((json) => { // update new data const { procs } = JSON.parse(json.trace_config); @@ -1114,7 +1100,6 @@ $(() => { // show register modal $('#btn-trace-config-register').click(() => { $('#regenerate-confirm-modal').modal('show'); - }); // show reload confirm modal @@ -1143,14 +1128,23 @@ $(() => { }); }); -const updatePredictionNetwork = (networkContent, predictive, isUpdatePosition) => { +const updatePredictionNetwork = ( + networkContent, + predictive, + isUpdatePosition, +) => { // update prediction to network updateNodeInfo(networkContent.nodes, predictive, isUpdatePosition); updateEdgeInfo(networkContent.edges, predictive); }; // call backend API to save -const calcProcLink = async (url, currentEdges, isPredictive = false, isUpdatePosition = true) => { +const calcProcLink = async ( + url, + currentEdges, + isPredictive = false, + isUpdatePosition = true, +) => { let bodyData = null; if (currentEdges) { bodyData = JSON.stringify(currentEdges); @@ -1161,12 +1155,22 @@ const calcProcLink = async (url, currentEdges, isPredictive = false, isUpdatePos // simulate proc link const simulateProcLink = (currentEdges) => { - calcProcLink('api/setting/simulate_proc_link', currentEdges, true, false).then(); + calcProcLink( + 'api/setting/simulate_proc_link', + currentEdges, + true, + false, + ).then(); }; // calc real proc link' const realProcLink = (isUpdatePosition) => { - calcProcLink('api/setting/count_proc_link', false, false, isUpdatePosition).then(); + calcProcLink( + 'api/setting/count_proc_link', + false, + false, + isUpdatePosition, + ).then(); }; const getProcNameFromLabel = (procId) => { @@ -1178,20 +1182,35 @@ const getProcNameFromLabel = (procId) => { } }; -const updateNodeInfo = (predictionNodes, isPredictive = false, isUpdatePosition = true) => { +const updateNodeInfo = ( + predictionNodes, + isPredictive = false, + isUpdatePosition = true, +) => { const nodesPos = getConfigOption().nodesPosition; for (const procId in predictionNodes) { const totalCount = applySignificantDigit(predictionNodes[procId]); const currentProcName = getProcNameFromLabel(procId); const color = isPredictive ? COLOR.prediction : COLOR.real; - const totalTitle = isPredictive ? i18nNames.nodeLinkTitlePred : i18nNames.nodeLinkTitleReal; + const totalTitle = isPredictive + ? i18nNames.nodeLinkTitlePred + : i18nNames.nodeLinkTitleReal; const title = `${totalCount} : ${totalTitle}`; - const xPostion = nodesPos && nodesPos[procId] ? nodesPos[procId].x : null; - const yPostion = nodesPos && nodesPos[procId] ? nodesPos[procId].y : null; - const nodePosition = xPostion !== null && yPostion !== null ? { x: xPostion, y: yPostion } : {}; - const position = nodesPos && Object.keys(nodesPos).length > 0 - && hierarchicalDirection !== NORMAL_TYPE - && isUpdatePosition ? nodePosition : {}; + const xPostion = + nodesPos && nodesPos[procId] ? nodesPos[procId].x : null; + const yPostion = + nodesPos && nodesPos[procId] ? nodesPos[procId].y : null; + const nodePosition = + xPostion !== null && yPostion !== null + ? { x: xPostion, y: yPostion } + : {}; + const position = + nodesPos && + Object.keys(nodesPos).length > 0 && + hierarchicalDirection !== NORMAL_TYPE && + isUpdatePosition + ? nodePosition + : {}; nodes.update({ id: procId, @@ -1220,7 +1239,9 @@ const updateEdgeInfo = (predictionEdges, isPredictive = false) => { continue; } const color = isPredictive ? COLOR.prediction : COLOR.real; - const linkTitle = isPredictive ? i18nNames.edgeLinkTitlePred : i18nNames.edgeLinkTitleReal; + const linkTitle = isPredictive + ? i18nNames.edgeLinkTitlePred + : i18nNames.edgeLinkTitleReal; const title = `${linkCount} : ${linkTitle}`; for (const edgeId of edges.getIds()) { const edgeData = edges.get(edgeId); @@ -1250,39 +1271,45 @@ function networkRightClickHandler(e) { // trigger nodes network.off('oncontext'); - network.on('oncontext', (params) => { - const nodeID = network.getNodeAt(params.pointer.DOM); - const edgeId = network.getEdgeAt(params.pointer.DOM); - const selectedNode = nodeID || params.nodes[0]; - const selectedEdge = edgeId || params.edges[0]; - if (!selectedEdge && !selectedNode) return; - - const menu = $('#contextMenuTraceCfg'); - if (selectedNode) { - // hide edge items - $('.edge-item').hide(); - $(menu).find('li').attr('data-node-id', selectedNode); - network.selectNodes([selectedNode]); - } + network.on( + 'oncontext', + (params) => { + const nodeID = network.getNodeAt(params.pointer.DOM); + const edgeId = network.getEdgeAt(params.pointer.DOM); + const selectedNode = nodeID || params.nodes[0]; + const selectedEdge = edgeId || params.edges[0]; + if (!selectedEdge && !selectedNode) return; + + const menu = $('#contextMenuTraceCfg'); + if (selectedNode) { + // hide edge items + $('.edge-item').hide(); + $(menu).find('li').attr('data-node-id', selectedNode); + network.selectNodes([selectedNode]); + } - if (selectedEdge) { - $('.edge-item').show(); - $(menu).find('li').attr('data-edge-id', selectedEdge); - network.selectEdges([selectedEdge]); - } + if (selectedEdge) { + $('.edge-item').show(); + $(menu).find('li').attr('data-edge-id', selectedEdge); + network.selectEdges([selectedEdge]); + } - // show context menu when right click timeseries - const menuHeight = menu.height(); - const windowHeight = $(window).height(); - const left = params.event.clientX; - let top = params.event.clientY; - if (windowHeight - top < menuHeight) { - top -= menuHeight; - } - menu.css({ - left: `${left}px`, top: `${top}px`, display: 'block', - }); - }, false); + // show context menu when right click timeseries + const menuHeight = menu.height(); + const windowHeight = $(window).height(); + const left = params.event.clientX; + let top = params.event.clientY; + if (windowHeight - top < menuHeight) { + top -= menuHeight; + } + menu.css({ + left: `${left}px`, + top: `${top}px`, + display: 'block', + }); + }, + false, + ); return false; } @@ -1331,14 +1358,14 @@ const addEdgesFromNode = (e) => { const nodeId = $('#contextMenuTraceCfg li').attr('data-node-id'); const nodeList = Object.keys(processes); const indexFromNode = nodeList.indexOf(nodeId); - const indexToNode = indexFromNode === (nodeList.length - 1) ? 0 : indexFromNode + 1; + const indexToNode = + indexFromNode === nodeList.length - 1 ? 0 : indexFromNode + 1; const toNodeId = nodeList[indexToNode]; // trigger to add edge to self node if (nodeId) { // add self trace as default - handleAddEdge({ from: nodeId, to: processes[toNodeId].id }, () => { - }); + handleAddEdge({ from: nodeId, to: processes[toNodeId].id }, () => {}); } }; @@ -1346,8 +1373,7 @@ const editSelectedEdge = (e) => { $('#contextMenuTraceCfg').hide(); const edgeId = $('#contextMenuTraceCfg li').attr('data-edge-id'); const edgeData = edges.get(edgeId); - handleEditEdge(edgeData, () => { - }); + handleEditEdge(edgeData, () => {}); }; const removeSelectedEdge = (e) => { @@ -1372,25 +1398,28 @@ const getEdgeFromUI = () => { const targetCols = []; const targetOrgCols = []; const targetSubStrs = []; - $('div[id^="edgeForwardCol-"]').find('.form-group').each(function f(idx) { - const isOptionDateTime = $(this).find(tracingElements.edgeConfigDatetime).length; - const colElement = $(this).find('select[name=forwardCol]'); - const colAlias = $('option:selected', colElement).attr('alias'); - const colOrg = $('option:selected', colElement).attr('original'); - if (!isEmpty(colAlias)) targetCols.push(colAlias); - if (!isEmpty(colOrg)) targetOrgCols.push(colOrg); - - if (isOptionDateTime === 0) { - targetSubStrs.push(getMatchingDigits($(this))); - return; - } - targetSubStrs.push([]); - }); + $('div[id^="edgeForwardCol-"]') + .find('.form-group') + .each(function f(idx) { + const isOptionDateTime = $(this).find( + tracingElements.edgeConfigDatetime, + ).length; + const colElement = $(this).find('select[name=forwardCol]'); + const colAlias = $('option:selected', colElement).attr('alias'); + const colOrg = $('option:selected', colElement).attr('original'); + if (!isEmpty(colAlias)) targetCols.push(colAlias); + if (!isEmpty(colOrg)) targetOrgCols.push(colOrg); + + if (isOptionDateTime === 0) { + targetSubStrs.push(getMatchingDigits($(this))); + return; + } + targetSubStrs.push([]); + }); edgeData.target_col = targetCols; edgeData.target_orig_col = targetOrgCols; edgeData.target_substr = targetSubStrs; - // get Trace Self Data from modal const selfProcId = $('select[name="edgeBackProc"]').val(); edgeData.self_proc = selfProcId; @@ -1417,8 +1446,8 @@ const getEdgeFromUI = () => { if (isOptionDateTime === 0) { selfSubStrs.push(getMatchingDigits($(this))); - deltaTimes.push(0); - cutOffs.push(0); + deltaTimes.push(null); + cutOffs.push(null); } else { selfSubStrs.push([]); const deltaTime = $(this) @@ -1462,7 +1491,10 @@ const saveEditEdge = () => { return; } - validEdge = validateSubStr(edgeData.self_substr, edgeData.target_substr); + const validEdge = validateSubStr( + edgeData.self_substr, + edgeData.target_substr, + ); if (!validEdge.is_valid) { displayRegisterMessage('#alertMsgCheckSubStr', { message: validEdge.message, @@ -1470,17 +1502,14 @@ const saveEditEdge = () => { }); return; } - // validate delta time - const isValidDeltaTimes = deltaDatetimes.every((element) => { - if (element === '') return true; - const number = Number(element); - return Number.isInteger(number); - }); - const isValidCutOffs = cutOffs.every((element) => { - if (element === '') return true; - const number = Number(element); - return Number.isInteger(number); - }); + + // validate delta time and cut off + const isValidDeltaTimes = deltaDatetimes + .filter((v) => v !== null) + .every(isValidDatetimeInputValue); + const isValidCutOffs = cutOffs + .filter((v) => v !== null) + .every(isValidDatetimeInputValue); if (!isValidDeltaTimes || !isValidCutOffs) { displayRegisterMessage('#alertMsgCheckSubStr', { message: $('#i18nInvalidDeltaTime').text(), @@ -1489,7 +1518,7 @@ const saveEditEdge = () => { return; } - // save data to external/global dict + // save data to external/global dict drawEdgeToGUI(edgeData); $('#modal-edge-popup').modal('hide'); @@ -1505,7 +1534,7 @@ const drawEdgeToGUI = (edgeData) => { edges.update(edgeData); } currentEditEdge = {}; -} +}; const cancelEditEdge = () => false; @@ -1513,7 +1542,7 @@ const handleSwitchTraceConfig = (e) => { const edgeData = getEdgeFromUI(); const keys = Object.keys(edgeData); const values = Object.values(edgeData); - const replacedKeys = keys.map(k => { + const replacedKeys = keys.map((k) => { let key = k; if (key === 'from') { key = 'to'; @@ -1547,8 +1576,7 @@ const handleSwitchTraceConfig = (e) => { replacedKeys.forEach((item, index) => { newEdgeData[item] = values[index]; }); - handleEditEdge(newEdgeData, () => { - }); + handleEditEdge(newEdgeData, () => {}); }; const updateTraceConfig = (element, index) => { @@ -1563,9 +1591,12 @@ const updateTraceConfig = (element, index) => { if (dataType === DataTypes.DATETIME.name) { const inputElement = `
x -
` +
`; - if (targetColParent.find(tracingElements.edgeConfigDatetime).length === 0) { + if ( + targetColParent.find(tracingElements.edgeConfigDatetime).length === + 0 + ) { targetColParent.append(inputElement); } @@ -1581,41 +1612,121 @@ const updateTraceConfig = (element, index) => { const innerHTML = `
-
- ${i18nNames.linkWithTime} +
+ ${i18nNames.linkWithTime} + + 00:00:00
-
- ${i18nNames.i18nCutOff} +
+ ${i18nNames.i18nCutOff} + + 00:00:00
`; $(elementParent).append(innerHTML); + updateDeltaTimeElement( + index, + DEFAULT_DELTA_TIME_DATA_LINK, + DEFAULT_CUTOFF_DATA_LINK, + ); } return; } if (dataType !== DataTypes.DATETIME.name) { targetColParent.find(tracingElements.edgeConfigDatetime).remove(); - targetColParent.parents().eq(1).find(tracingElements.deltaDatetime).remove(); + targetColParent + .parents() + .eq(1) + .find(tracingElements.deltaDatetime) + .remove(); targetColParent.find(tracingElements.edgeConfigSerial).show(); } -} +}; const updateDeltaTime = () => { - deltaTimes.forEach(function (deltaTime, index) { - const traceDeltaTimeEle = $('#traceInfoModal').find( - `div[name="trace-config-delta-time-${index + 1}"]`, - ); - if (traceDeltaTimeEle.length) { - traceDeltaTimeEle - .find('input[name="deltaDatetime"]') - .first() - .val(deltaTime); - traceDeltaTimeEle - .find('input[name="cutOff"]') - .first() - .val(cutOffs[index]); - } + deltaTimes?.forEach(function (deltaTime, index) { + updateDeltaTimeElement(index + 1, deltaTime, cutOffs[index]); }); }; + +/** + * + * @param {number} index + * @param {number} deltaTime + * @param {number} cutOff + */ +const updateDeltaTimeElement = (index, deltaTime, cutOff) => { + const traceDeltaTimeEle = $('#traceInfoModal').find( + `div[name="trace-config-delta-time-${index}"]`, + ); + if (traceDeltaTimeEle.length) { + traceDeltaTimeEle + .find('input[name="deltaDatetime"]') + .first() + .val(deltaTime) + .trigger('change'); + traceDeltaTimeEle + .find('input[name="cutOff"]') + .first() + .val(cutOff) + .trigger('change'); + } +}; + +/** + * + * @param {object} e + */ +const updateDatetimeReprValue = (e) => { + const elem = $(e); + + const reprElem = elem + .siblings(`span[name="${tracingElements.datetimeReprClassName}"]`) + .first(); + + const { valid, value } = parseDateTimeByMinute(e.value); + if (!valid) { + elem.addClass(BORDER_RED_CLASS); + } else { + elem.removeClass(BORDER_RED_CLASS); + } + + reprElem.text(value); +}; + +/** + * + * @param {string} s + * @return {boolean} + */ +const isValidDatetimeInputValue = (s) => { + return s.trim().length !== 0 && !Number.isNaN(Number(s)); +}; + +/** + * + * @param {string} minuteString + * @return {{valid: boolean, value: string}} + */ +const parseDateTimeByMinute = (minuteString) => { + const isValid = isValidDatetimeInputValue(minuteString); + + const minuteNumber = Number(minuteString); + const hours = Math.floor(minuteNumber / 60); + const minutes = Math.floor(minuteNumber - hours * 60); + const seconds = Math.floor(minuteNumber * 60 - minutes * 60 - hours * 3600); + + const padNumber = (e) => String(e).padStart(2, '0'); + + const parsedValue = isValid + ? `${padNumber(hours)}:${padNumber(minutes)}:${padNumber(seconds)}` + : '--:--:--'; + + return { + valid: isValid, + value: parsedValue, + }; +}; diff --git a/ap/static/table_viewer/css/table_viewer.css b/ap/static/table_viewer/css/table_viewer.css index 633543f..fd87d84 100644 --- a/ap/static/table_viewer/css/table_viewer.css +++ b/ap/static/table_viewer/css/table_viewer.css @@ -16,11 +16,13 @@ table.dataTable .odd { background-color: #303030 !important; } -table.dataTable .odd:hover, table.dataTable .even:hover { - background-color: rgba(0,0,0,0.075) !important; +table.dataTable .odd:hover, +table.dataTable .even:hover { + background-color: rgba(0, 0, 0, 0.075) !important; } -table.dataTable thead th, table.dataTable thead td { +table.dataTable thead th, +table.dataTable thead td { border-bottom: none !important; } @@ -69,10 +71,19 @@ table.dataTable thead th, table.dataTable thead td { background-clip: padding-box; border: 1px solid transparent; border-radius: 0.25rem; - -webkit-transition: border-color 0.15s ease-in-out, -webkit-box-shadow 0.15s ease-in-out; - transition: border-color 0.15s ease-in-out, -webkit-box-shadow 0.15s ease-in-out; - transition: border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out; - transition: border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out, -webkit-box-shadow 0.15s ease-in-out; + -webkit-transition: + border-color 0.15s ease-in-out, + -webkit-box-shadow 0.15s ease-in-out; + transition: + border-color 0.15s ease-in-out, + -webkit-box-shadow 0.15s ease-in-out; + transition: + border-color 0.15s ease-in-out, + box-shadow 0.15s ease-in-out; + transition: + border-color 0.15s ease-in-out, + box-shadow 0.15s ease-in-out, + -webkit-box-shadow 0.15s ease-in-out; } .dataTables_length option:hover { @@ -97,10 +108,19 @@ table.dataTable thead th, table.dataTable thead td { background-clip: padding-box; border: 1px solid transparent; border-radius: 0.25rem; - -webkit-transition: border-color 0.15s ease-in-out, -webkit-box-shadow 0.15s ease-in-out; - transition: border-color 0.15s ease-in-out, -webkit-box-shadow 0.15s ease-in-out; - transition: border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out; - transition: border-color 0.15s ease-in-out, box-shadow 0.15s ease-in-out, -webkit-box-shadow 0.15s ease-in-out; + -webkit-transition: + border-color 0.15s ease-in-out, + -webkit-box-shadow 0.15s ease-in-out; + transition: + border-color 0.15s ease-in-out, + -webkit-box-shadow 0.15s ease-in-out; + transition: + border-color 0.15s ease-in-out, + box-shadow 0.15s ease-in-out; + transition: + border-color 0.15s ease-in-out, + box-shadow 0.15s ease-in-out, + -webkit-box-shadow 0.15s ease-in-out; } .dataTables_wrapper .dataTables_paginate .ellipsis { @@ -121,7 +141,7 @@ table.dataTable thead th, table.dataTable thead td { border: 1px solid #222; } -.select2-container--default .select2-results__option[aria-selected=true] { +.select2-container--default .select2-results__option[aria-selected='true'] { background-color: #5897fb; } @@ -130,17 +150,27 @@ table.dataTable thead th, table.dataTable thead td { border: 1px solid #222222; } -.select2-container--default .select2-selection--single .select2-selection__rendered { +.select2-container--default + .select2-selection--single + .select2-selection__rendered { color: #ffffff; line-height: 30px; } -.select2-container--default .select2-selection--single .select2-selection__arrow { +.select2-container--default + .select2-selection--single + .select2-selection__arrow { height: 30px; } -.select2-container--default .select2-selection--single .select2-selection__arrow b, -.select2-container--default.select2-container--open .select2-selection--single .select2-selection__arrow b { +.select2-container--default + .select2-selection--single + .select2-selection__arrow + b, +.select2-container--default.select2-container--open + .select2-selection--single + .select2-selection__arrow + b { border-width: 6px 3px 0 3px; margin-left: -1px; border-color: #ffffff transparent transparent transparent; @@ -156,11 +186,11 @@ table.dataTable thead th, table.dataTable thead td { color: #ffffff; } -.select2-selection__rendered{ +.select2-selection__rendered { padding-left: 1rem !important; } select.col-md-5, span.select2.select2-container.select2-container--default { max-width: 40vw; min-width: 40vw; -} \ No newline at end of file +} diff --git a/ap/static/table_viewer/js/table_viewer.js b/ap/static/table_viewer/js/table_viewer.js index 7ec5a76..eca067a 100644 --- a/ap/static/table_viewer/js/table_viewer.js +++ b/ap/static/table_viewer/js/table_viewer.js @@ -1,5 +1,3 @@ -/* eslint-disable no-unused-vars */ -/* eslint-disable no-undef */ const tableSpecialCharRegex = /\/|\*|"| /g; const cachedColumns = {}; let dataTableInstance = null; @@ -36,13 +34,12 @@ const getAllDatabaseConfigTbView = async () => { 'Content-Type': 'application/json', }, }) - .then(response => response.clone().json()) + .then((response) => response.clone().json()) .catch(); return json; }; -// eslint-disable-next-line no-unused-vars const getColumnNames = async (database, tableName) => { if (!database || !tableName) return {}; @@ -55,14 +52,18 @@ const getColumnNames = async (database, tableName) => { } // get columns from db - const url = new URL('/ap/api/table_viewer/column_names', window.location.href).href; + const url = new URL( + '/ap/api/table_viewer/column_names', + window.location.href, + ).href; const params = { database, table: replacedTableName, }; const paramString = new URLSearchParams(params); - const json = await fetch(`${url}?${paramString.toString()}`) - .then(response => response.clone().json()); + const json = await fetch(`${url}?${paramString.toString()}`).then( + (response) => response.clone().json(), + ); // add columns to cache if (cachedColumns[database]) { @@ -81,15 +82,19 @@ const showSortColumnOptions = async (procId) => { ele.sortSelection.empty(); ele.sortSelection.append(sortSelectionHTML); return; - }; + } - const res = await fetchData(`/ap/api/setting/proc_filter_config/${procId}`, {}, 'GET'); + const res = await fetchData( + `/ap/api/setting/proc_table_viewer_columns/${procId}`, + {}, + 'GET', + ); const procCfg = res.data; const procColumns = res.data.columns; const tableName = procCfg.data_source.db_detail ? procCfg.table_name : ''; - setDbsAndTableInfo(procCfg.data_source.id, tableName) + setDbsAndTableInfo(procCfg.data_source.id, tableName); // show loading icon $(ele.tblViewerSpinner).toggleClass('spinner-grow'); @@ -98,7 +103,11 @@ const showSortColumnOptions = async (procId) => { if (procColumns.length) { procColumns.forEach((col) => { const columnName = col.column_raw_name || col.column_name; - sortSelectionHTML += buildOptionHTML(col.column_name, col.shown_name, columnName); + sortSelectionHTML += buildOptionHTML( + col.column_name, + col.shown_name, + columnName, + ); }); } @@ -107,9 +116,14 @@ const showSortColumnOptions = async (procId) => { addAttributeToElement(); }; -const queryRecordsFromDB = ((databaseCode, tableName, - sortColumn = null, sortOrder = 'DESC', limit = null, - callbackFunc = null) => { +const queryRecordsFromDB = ( + databaseCode, + tableName, + sortColumn = null, + sortOrder = 'DESC', + limit = null, + callbackFunc = null, +) => { const data = { database_code: databaseCode, table_name: tableName, @@ -125,7 +139,7 @@ const queryRecordsFromDB = ((databaseCode, tableName, }, body: JSON.stringify(data), }) - .then(response => response.clone().json()) + .then((response) => response.clone().json()) .then((json) => { loadingShow(true); if (callbackFunc) { @@ -137,7 +151,7 @@ const queryRecordsFromDB = ((databaseCode, tableName, loadingHide(); setTimeout(loadingHide, 0); }); -}); +}; const setDbsAndTableInfo = (dbsCode, tableName) => { ele.dbsCodeInput.val(dbsCode); @@ -170,7 +184,10 @@ const getLanguage = () => { ja: 'Japanese', }; const locale = docCookies.getItem('locale'); - const url = new URL(`/ap/static/table_viewer/lang/${langMap[locale]}.json`, window.location.href).href; + const url = new URL( + `/ap/static/table_viewer/lang/${langMap[locale]}.json`, + window.location.href, + ).href; return url; }; @@ -268,28 +285,28 @@ $(() => { cleanViewTable(); loadingShow(); - const { - databaseCode, - tableName, - sortColumn, - sortOrder, - limit, - } = getFormInput(); + const { databaseCode, tableName, sortColumn, sortOrder, limit } = + getFormInput(); if (!databaseCode) { setTimeout(loadingHide, 0); return; } - queryRecordsFromDB(databaseCode, tableName, - sortColumn, sortOrder, limit, - showRecordsToViewTable); + queryRecordsFromDB( + databaseCode, + tableName, + sortColumn, + sortOrder, + limit, + showRecordsToViewTable, + ); }); ele.tableSelection.select2(); ele.sortSelection.select2(); // Load userBookmarkBar $('#userBookmarkBar').show(); - + // show load settings menu handleLoadSettingBtns(); }); diff --git a/ap/static/table_viewer/lang/English.json b/ap/static/table_viewer/lang/English.json index 8977af7..259f9ea 100644 --- a/ap/static/table_viewer/lang/English.json +++ b/ap/static/table_viewer/lang/English.json @@ -1,23 +1,23 @@ -{ - "sEmptyTable": "No data available in table", - "sInfo": "Showing _START_ to _END_ of _TOTAL_ entries", - "sInfoEmpty": "Showing 0 to 0 of 0 entries", - "sInfoFiltered": "(filtered from _MAX_ total entries)", - "sInfoPostFix": "", - "sInfoThousands": ",", - "sLengthMenu": "Show _MENU_ entries", - "sLoadingRecords": "Loading...", - "sProcessing": "Processing...", - "sSearch": "Search:", - "sZeroRecords": "No matching records found", - "oPaginate": { - "sFirst": "First", - "sLast": "Last", - "sNext": "Next", - "sPrevious": "Previous" - }, - "oAria": { - "sSortAscending": ": activate to sort column ascending", - "sSortDescending": ": activate to sort column descending" - } -} \ No newline at end of file +{ + "sEmptyTable": "No data available in table", + "sInfo": "Showing _START_ to _END_ of _TOTAL_ entries", + "sInfoEmpty": "Showing 0 to 0 of 0 entries", + "sInfoFiltered": "(filtered from _MAX_ total entries)", + "sInfoPostFix": "", + "sInfoThousands": ",", + "sLengthMenu": "Show _MENU_ entries", + "sLoadingRecords": "Loading...", + "sProcessing": "Processing...", + "sSearch": "Search:", + "sZeroRecords": "No matching records found", + "oPaginate": { + "sFirst": "First", + "sLast": "Last", + "sNext": "Next", + "sPrevious": "Previous" + }, + "oAria": { + "sSortAscending": ": activate to sort column ascending", + "sSortDescending": ": activate to sort column descending" + } +} diff --git a/ap/static/table_viewer/lang/Japanese.json b/ap/static/table_viewer/lang/Japanese.json index 7ceb778..9774cb3 100644 --- a/ap/static/table_viewer/lang/Japanese.json +++ b/ap/static/table_viewer/lang/Japanese.json @@ -1,23 +1,23 @@ -{ - "sEmptyTable": "テーブルにデータがありません", - "sInfo": " _TOTAL_ 件中 _START_ から _END_ まで表示", - "sInfoEmpty": " 0 件中 0 から 0 まで表示", - "sInfoFiltered": "(全 _MAX_ 件より抽出)", - "sInfoPostFix": "", - "sInfoThousands": ",", - "sLengthMenu": "_MENU_ 件表示", - "sLoadingRecords": "読み込み中...", - "sProcessing": "処理中...", - "sSearch": "検索:", - "sZeroRecords": "一致するレコードがありません", - "oPaginate": { - "sFirst": "先頭", - "sLast": "最終", - "sNext": "次", - "sPrevious": "前" - }, - "oAria": { - "sSortAscending": ": 列を昇順に並べ替えるにはアクティブにする", - "sSortDescending": ": 列を降順に並べ替えるにはアクティブにする" - } -} +{ + "sEmptyTable": "テーブルにデータがありません", + "sInfo": " _TOTAL_ 件中 _START_ から _END_ まで表示", + "sInfoEmpty": " 0 件中 0 から 0 まで表示", + "sInfoFiltered": "(全 _MAX_ 件より抽出)", + "sInfoPostFix": "", + "sInfoThousands": ",", + "sLengthMenu": "_MENU_ 件表示", + "sLoadingRecords": "読み込み中...", + "sProcessing": "処理中...", + "sSearch": "検索:", + "sZeroRecords": "一致するレコードがありません", + "oPaginate": { + "sFirst": "先頭", + "sLast": "最終", + "sNext": "次", + "sPrevious": "前" + }, + "oAria": { + "sSortAscending": ": 列を昇順に並べ替えるにはアクティブにする", + "sSortDescending": ": 列を降順に並べ替えるにはアクティブにする" + } +} diff --git a/ap/static/tile_interface/css/tile_interface.css b/ap/static/tile_interface/css/tile_interface.css index d1a5b85..ce5f7e7 100644 --- a/ap/static/tile_interface/css/tile_interface.css +++ b/ap/static/tile_interface/css/tile_interface.css @@ -74,11 +74,11 @@ a.link-address { text-align: center; text-decoration: none; outline: none; - transition: all .3s; + transition: all 0.3s; } .ghost-button:hover { - box-shadow: 0 0 45px rgba(255, 255, 255, .6); + box-shadow: 0 0 45px rgba(255, 255, 255, 0.6); } /* tile search by use */ @@ -115,16 +115,16 @@ a.link-address { border-right: 12px solid transparent; border-top: 12px solid #444444; float: left; - -webkit-transition: -webkit-transform .8s ease-in-out; - -moz-transition: -moz-transform .8s ease-in-out; - -o-transition: -o-transform .8s ease-in-out; - transition: transform .8s ease-in-out; + -webkit-transition: -webkit-transform 0.8s ease-in-out; + -moz-transition: -moz-transform 0.8s ease-in-out; + -o-transition: -o-transform 0.8s ease-in-out; + transition: transform 0.8s ease-in-out; } .btn-collapse.collapsed { - transform:rotate(-90deg); - -ms-transform:rotate(-90deg); - -webkit-transform:rotate(-90deg); - -o-transform:rotate(-90deg); + transform: rotate(-90deg); + -ms-transform: rotate(-90deg); + -webkit-transform: rotate(-90deg); + -o-transform: rotate(-90deg); } .section-name { min-width: 60%; @@ -133,7 +133,8 @@ a.link-address { width: 25px; margin-top: -5px; } -.section-name, .icon-col { +.section-name, +.icon-col { display: flex; } .section-name h3 { @@ -160,4 +161,3 @@ a.link-address { .collapsed-btn svg { margin-right: 4px; } - diff --git a/ap/static/tile_interface/js/tile_interface.js b/ap/static/tile_interface/js/tile_interface.js index 4cef678..22455a5 100644 --- a/ap/static/tile_interface/js/tile_interface.js +++ b/ap/static/tile_interface/js/tile_interface.js @@ -1,7 +1,8 @@ -// eslint-disable-next-line no-unused-vars let isOpenNewTab = false; -document.getElementById('content').addEventListener('contextmenu', event => event.preventDefault()); -const setOpenTab = () => isOpenNewTab = true; +document + .getElementById('content') + .addEventListener('contextmenu', (event) => event.preventDefault()); +const setOpenTab = () => (isOpenNewTab = true); const redirectPage = (tile) => { // mark as call page from tile interface, do not apply user setting useTileInterface().set(); diff --git a/ap/static/trace_data/css/trace_data.css b/ap/static/trace_data/css/trace_data.css index 38c1606..c8aa85e 100644 --- a/ap/static/trace_data/css/trace_data.css +++ b/ap/static/trace_data/css/trace_data.css @@ -36,7 +36,7 @@ #plot-cards table thead:after { line-height: 0px; - content: "\200C"; + content: '\200C'; display: block; } @@ -111,12 +111,12 @@ right: 0; bottom: 0; left: 0; - background: rgba(0, 0, 0, .5); + background: rgba(0, 0, 0, 0.5); z-index: 999; } .loading::before { - content: ""; + content: ''; display: block; position: fixed; left: 50%; @@ -130,7 +130,7 @@ } .loading::after { - content: ""; + content: ''; display: block; position: fixed; left: 50%; @@ -140,7 +140,7 @@ border-radius: 40px; margin-top: -10px; margin-left: -10px; - border: 4px solid #60ABB9; + border: 4px solid #60abb9; border-right: 4px solid white; animation: rotate 1s infinite linear; } @@ -149,7 +149,8 @@ min-width: 130px; } -.item-name, .prc-name { +.item-name, +.prc-name { color: #65c5f1; text-align: center; vertical-align: middle; @@ -206,27 +207,26 @@ right: 0; bottom: 0; background-color: #434343; - -webkit-transition: .4s; - transition: .4s; + -webkit-transition: 0.4s; + transition: 0.4s; } .slider:before { position: absolute; - content: ""; + content: ''; height: 28px; width: 28px; left: 0px; bottom: 0px; background-color: white; - -webkit-transition: .4s; - transition: .4s; + -webkit-transition: 0.4s; + transition: 0.4s; } input.primary:checked + .slider { background-color: #375a7f; } - input:focus + .slider { box-shadow: 0 0 1px #375a7f; } @@ -308,7 +308,8 @@ input:checked + .slider:before { text-decoration: underline; } -.hist-summary, .hist-summary-detail { +.hist-summary, +.hist-summary-detail { font-size: 0.8vw; display: none; } @@ -400,7 +401,7 @@ input:checked + .slider:before { .cate-value .cate-tooltip { visibility: hidden; width: 400px; - background-color: rgba(8, 8, 8, .6); + background-color: rgba(8, 8, 8, 0.6); color: #fff; text-align: left; font-size: 11px; @@ -472,10 +473,10 @@ input:checked + .slider:before { .cross.h.left, .cross.v.right, .cross.h.right, -.cross[id$="left-v"], -.cross[id$="left-h"], -.cross[id$="right-v"], -.cross[id$="right-h"], +.cross[id$='left-v'], +.cross[id$='left-h'], +.cross[id$='right-v'], +.cross[id$='right-h'], .cross.cross-anchor { height: fit-content; } @@ -578,7 +579,8 @@ span.btn-anchor { z-index: 100; } -span.btn-anchor.pin, span.btn-anchor:hover { +span.btn-anchor.pin, +span.btn-anchor:hover { color: white; } @@ -608,7 +610,8 @@ span.btn-anchor.pin, span.btn-anchor:hover { text-align: center; } -#serialTable th, #serialTable td { +#serialTable th, +#serialTable td { padding: 0.5rem; } @@ -633,7 +636,7 @@ span.btn-anchor.pin, span.btn-anchor:hover { .whisker { max-width: 4vw; min-width: 3vw; - height: calc(15vw - 1rem) !important + height: calc(15vw - 1rem) !important; } .time-series { @@ -676,12 +679,12 @@ span.btn-anchor.pin, span.btn-anchor:hover { .show-more-btn { cursor: pointer; display: inline-block; - padding: 0 .5em; + padding: 0 0.5em; margin-top: 5px; color: #666; line-height: 2; border: 1px solid #ddd; - border-radius: .25em; + border-radius: 0.25em; } .show-more-div { @@ -692,9 +695,9 @@ span.btn-anchor.pin, span.btn-anchor:hover { position: relative; } -.histogram-tab-content.tab-content>.tab-pane:not(:first-child) { +.histogram-tab-content.tab-content > .tab-pane:not(:first-child) { position: absolute; - display: block!important; + display: block !important; width: 100%; height: 100%; visibility: hidden; @@ -704,11 +707,11 @@ span.btn-anchor.pin, span.btn-anchor:hover { z-index: -1; } -.histogram-tab-content.tab-content>.active { - position: static!important; +.histogram-tab-content.tab-content > .active { + position: static !important; display: block; - visibility: visible!important; - opacity: 1!important; + visibility: visible !important; + opacity: 1 !important; z-index: unset; } @@ -726,4 +729,4 @@ span.btn-anchor.pin, span.btn-anchor:hover { height: 100%; font-size: 10px; padding: 0 5px 0 24px; -} \ No newline at end of file +} diff --git a/ap/static/trace_data/js/trace_data.js b/ap/static/trace_data/js/trace_data.js index 2f0393b..f207122 100644 --- a/ap/static/trace_data/js/trace_data.js +++ b/ap/static/trace_data/js/trace_data.js @@ -1,9 +1,3 @@ -/* eslint-disable no-loop-func */ -/* eslint-disable no-restricted-syntax */ -/* eslint-disable guard-for-in */ -/* eslint-disable no-unused-vars */ -/* eslint-disable no-undef */ -/* eslint-disable no-use-before-define */ const REQUEST_TIMEOUT = setRequestTimeOut(); const MAX_NUMBER_OF_GRAPH = 20; @@ -109,7 +103,8 @@ const i18n = { default: $('#partNoDefaultName').text() || 'Default', timestamp: $('#i18nTimestamp').text() || 'Default', index: $('#i18nIndex').text() || 'Default', - cannotBeDisplayed: $('#i18nCannotBeDisplayed').text() || 'Cannot be displayed', + cannotBeDisplayed: + $('#i18nCannotBeDisplayed').text() || 'Cannot be displayed', thinDataShown: $('#i18nThinDataShown').text(), catLimitMsg: $('#i18nCatLimitedMsg').text().split('BREAK_LINE'), overUniqueLimitLabel: $('#i18nOverUniqueLimitLabel').text(), @@ -136,17 +131,18 @@ const updateIndexInforTable = () => { $('#index-infor-table tbody').append(indexTbodyDOM); }; const triggerSerialTableEvents = () => { - $('.index-inform').unbind('mouseenter').on('mouseenter', () => { - $('.index-inform-content').show(); - updateIndexInforTable(); - }); + $('.index-inform') + .unbind('mouseenter') + .on('mouseenter', () => { + $('.index-inform-content').show(); + updateIndexInforTable(); + }); $('.index-inform-content').on('mouseleave', () => { $('.index-inform-content').hide(); }); }; - $(() => { // generate tab ID while (tabID === null || sessionStorage.getItem(tabID)) { @@ -179,8 +175,13 @@ $(() => { }); // add first condition process - const condProcItem = addCondProc(endProcs.ids, endProcs.names, '', formElements.formID.replace('#', ''), - 'btn-add-cond-proc'); + const condProcItem = addCondProc( + endProcs.ids, + endProcs.names, + '', + formElements.formID.replace('#', ''), + 'btn-add-cond-proc', + ); condProcItem(); // click even of condition proc add button @@ -221,7 +222,12 @@ $(() => { setGraphSetting(); const currentTraceData = graphStore.getTraceData(); // TODO: should update than re-draw - drawHistogramsTab(currentTraceData, fppScaleOption.yAxis, false, fppScaleOption.xAxis); + drawHistogramsTab( + currentTraceData, + fppScaleOption.yAxis, + false, + fppScaleOption.xAxis, + ); }); // set copy clipboard for setting information @@ -245,33 +251,52 @@ $(() => { const autoScrollToChart = (milisec = 100) => { // Move screen to graph after pushing グラフ表示 button loadingHide(); - $('html, body').animate({ - scrollTop: $(formElements.traceDataTabs).offset().top + 15, - }, milisec); + $('html, body').animate( + { + scrollTop: getOffsetTopDisplayGraph(formElements.traceDataTabs), + }, + milisec, + ); }; const buildTimeSeriesCardHTML = (chartOption, cssName) => { - const {index} = chartOption; - const {endProcName} = chartOption; - const {sensorId} = chartOption; - const {getProc} = chartOption; - const {getVal} = chartOption; - const {catExpBox} = chartOption; + const { index } = chartOption; + const { endProcName } = chartOption; + const { sensorId } = chartOption; + const { getProc } = chartOption; + const { getVal } = chartOption; + const { catExpBox } = chartOption; const graphCanvasHTML = buildGraphContainerHTML(chartOption); - const {allSummaryData} = chartOption; - const {latestChartInfoIdx} = chartOption; - const {startProc} = chartOption; - const {beforeRankValues} = chartOption; - const {stepChartSummary} = chartOption; - const {isCTCol} = chartOption; + const { allSummaryData } = chartOption; + const { latestChartInfoIdx } = chartOption; + const { startProc } = chartOption; + const { beforeRankValues } = chartOption; + const { stepChartSummary } = chartOption; + const { isCTCol } = chartOption; + const { unit } = chartOption; const generalInfo = { - getProc, getVal, startProc, endProcName, catExpBox, + getProc, + getVal, + startProc, + endProcName, + catExpBox, }; const summaryResultsHTMLs = []; allSummaryData.forEach((summaryOption, idx) => { - const summaryResultsHTML = buildTimeSeriesSummaryResultsHTML(summaryOption, idx, generalInfo, beforeRankValues, stepChartSummary, isCTCol); - const display = `${latestChartInfoIdx}` === `${idx}` ? 'display:block;' : 'display:none;'; + const summaryResultsHTML = buildTimeSeriesSummaryResultsHTML( + summaryOption, + idx, + generalInfo, + beforeRankValues, + stepChartSummary, + isCTCol, + unit, + ); + const display = + `${latestChartInfoIdx}` === `${idx}` + ? 'display:block;' + : 'display:none;'; summaryResultsHTMLs.push(`
${summaryResultsHTML}
`); @@ -335,20 +360,26 @@ const calcContainerWidth = (showScatterPlot = false) => { }; const buildGraphContainerHTML = (chartOption) => { - const {endProcName} = chartOption; - const {index} = chartOption; - const {tsCanvasId} = chartOption; - const {histCanvasId} = chartOption; - const {whiskerCanvasId} = chartOption; - const {sctrCanvasId} = chartOption; - const {showScatterPlot} = chartOption; - const {chartCols} = chartOption; - const {getVal} = chartOption; - const {catExpBox} = chartOption; - const {dicScatterXY} = chartOption; - const {isCTCol} = chartOption; + const { endProcName } = chartOption; + const { index } = chartOption; + const { tsCanvasId } = chartOption; + const { histCanvasId } = chartOption; + const { whiskerCanvasId } = chartOption; + const { sctrCanvasId } = chartOption; + const { showScatterPlot } = chartOption; + const { chartCols } = chartOption; + const { getVal } = chartOption; + const { catExpBox } = chartOption; + const { dicScatterXY } = chartOption; + const { isCTCol } = chartOption; + let { unit } = chartOption; let graphCanvasHTML = ''; let catExpBoxHTML = ''; + if (unit && unit !== '' && unit !== 'Null') { + unit = ` [${unit}]`; + } else { + unit = ''; + } let CTLabel = ''; if (isCTCol) { @@ -363,7 +394,7 @@ const buildGraphContainerHTML = (chartOption) => {
${endProcName} - ${getVal} ${CTLabel} + ${getVal}${unit} ${CTLabel} ${catExpBoxHTML}
@@ -452,7 +483,6 @@ const buildGraphContainerHTML = (chartOption) => {
`; - const colSize = chartCols.histogram; graphCanvasHTML += ` @@ -462,6 +492,9 @@ const buildGraphContainerHTML = (chartOption) => {
+
+ ${unit.trim()} +
`; if (showScatterPlot && dicScatterXY[index - 1]) { @@ -478,7 +511,18 @@ const buildGraphContainerHTML = (chartOption) => { return graphCanvasHTML; }; -const produceExceptionArrayY = (plotdata, yMin, yMax, unlinkedIdxs, noneIdxs, infIdxs, negInfIdxs, negOutlierIdxs, outlierIdxs) => { +const produceExceptionArrayY = ( + plotdata, + yMin, + yMax, + unlinkedIdxs, + noneIdxs, + infIdxs, + negInfIdxs, + negOutlierIdxs, + outlierIdxs, + beforeRankValues, +) => { const arrayYEx = new Array(plotdata.length).fill(null); const plotDataExColor = new Array(plotdata.length).fill(null); @@ -492,7 +536,9 @@ const produceExceptionArrayY = (plotdata, yMin, yMax, unlinkedIdxs, noneIdxs, in plotDataExColor[idx] = CONST.COLOR_INF; } for (const idx of noneIdxs || []) { - arrayYEx[idx] = yMax; + arrayYEx[idx] = beforeRankValues + ? yMax + CONST.RESIZE_RANGE_CHART + : yMax; // Bar chart show NA at yMax plotDataExColor[idx] = CONST.COLOR_NONE; } for (const idx of outlierIdxs || []) { @@ -507,30 +553,41 @@ const produceExceptionArrayY = (plotdata, yMin, yMax, unlinkedIdxs, noneIdxs, in arrayYEx[idx] = yMin; plotDataExColor[idx] = CONST.COLOR_UNLINKED; } - return {arrayYEx, plotDataExColor}; + return { arrayYEx, plotDataExColor }; }; -const getStartEndPoint = (xAxisOption = 'TIME', timesLength = 20, data = {}) => { +const getStartEndPoint = ( + xAxisOption = 'TIME', + timesLength = 20, + data = {}, +) => { if (xAxisOption === 'INDEX') { return [0, Math.max(20, timesLength)]; } - const startDateTime = moment.utc(`${data.COMMON.START_DATE} ${data.COMMON.START_TIME}`).local().format(moment.HTML5_FMT.DATETIME_LOCAL_SECONDS); - const endDateTime = moment.utc(`${data.COMMON.END_DATE} ${data.COMMON.END_TIME}`).local().format(moment.HTML5_FMT.DATETIME_LOCAL_SECONDS); + const startDateTime = moment + .utc(`${data.COMMON.START_DATE} ${data.COMMON.START_TIME}`) + .local() + .format(moment.HTML5_FMT.DATETIME_LOCAL_SECONDS); + const endDateTime = moment + .utc(`${data.COMMON.END_DATE} ${data.COMMON.END_TIME}`) + .local() + .format(moment.HTML5_FMT.DATETIME_LOCAL_SECONDS); return [startDateTime, endDateTime]; }; - const convertToIndex = (times, chartInfo, startPoint, endPoint) => { const actFrom = chartInfo['act-from']; const actTo = chartInfo['act-to']; let fromIndex = startPoint; if (!isEmpty(actFrom)) { - fromIndex = binarySearch(times, createDatetime(actFrom), (x, y) => (x - y)) + 1; + fromIndex = + binarySearch(times, createDatetime(actFrom), (x, y) => x - y) + 1; } let toIndex = endPoint; if (!isEmpty(actTo)) { - toIndex = binarySearch(times, createDatetime(actTo), (x, y) => (x - y)) + 1; + toIndex = + binarySearch(times, createDatetime(actTo), (x, y) => x - y) + 1; } const chartInfoCI = _.cloneDeep(chartInfo); chartInfoCI['act-from'] = fromIndex; @@ -542,7 +599,7 @@ const convertChartInfoToIndex = (data) => { let times = getNode(data, ['times'], []) || []; const [startPoint, endPoint] = getStartEndPoint('INDEX', times.length); const len = data.array_plotdata.length; - times = times.map(x => new Date(x)); + times = times.map((x) => new Date(x)); if (!data.array_plotdata) return; for (let i = 0; i < len; i++) { const chartInfos = data.array_plotdata[i].chart_infos || []; @@ -551,11 +608,21 @@ const convertChartInfoToIndex = (data) => { data.array_plotdata[i].chart_infos_org_ci = []; for (const cIdx in chartInfos) { const chartInfo = chartInfos[cIdx]; - const chartInfoCI = convertToIndex(times, chartInfo, startPoint, endPoint); + const chartInfoCI = convertToIndex( + times, + chartInfo, + startPoint, + endPoint, + ); data.array_plotdata[i].chart_infos_ci.push(chartInfoCI); const chartInfoOrg = chartInfosOrg[cIdx]; - const chartInfoOrgCI = convertToIndex(times, chartInfoOrg, startPoint, endPoint); + const chartInfoOrgCI = convertToIndex( + times, + chartInfoOrg, + startPoint, + endPoint, + ); data.array_plotdata[i].chart_infos_org_ci.push(chartInfoOrgCI); } } @@ -577,7 +644,6 @@ const selectScatterXY = (plots) => { return dicIdxs; }; - const traceDataChart = (data, clearOnFlyFilter) => { if (isEmpty(data)) return; @@ -608,7 +674,7 @@ const traceDataChart = (data, clearOnFlyFilter) => { // convert time data to local time, local time is applied to all functions const times = getNode(data, ['times'], []) || []; // data.times = times.map(x => new Date(x)); // array of Date objects, not strings - data.times = times.map(x => moment.utc(x).toDate()); // convert to localtime + data.times = times.map((x) => moment.utc(x).toDate()); // convert to localtime const xAxisOption = data.COMMON.xOption; let [startPoint, endPoint] = getStartEndPoint('TIME', 20, data); @@ -627,7 +693,11 @@ const traceDataChart = (data, clearOnFlyFilter) => { // マスタが存在するならマスタ情報を適用 const endProcId = plotData.end_proc_id; const sensorId = plotData.end_col_id; - const isHideNonePoint = isHideNoneDataPoint(endProcId, sensorId, data.COMMON.remove_outlier); + const isHideNonePoint = isHideNoneDataPoint( + endProcId, + sensorId, + data.COMMON.remove_outlier, + ); const formCommon = data.COMMON; const arrayY = data.array_plotdata[i].array_y; @@ -638,7 +708,9 @@ const traceDataChart = (data, clearOnFlyFilter) => { const slotCount = data.array_plotdata[i].slot_count; const scaleInfo = getScaleInfo(data.array_plotdata[i], scaleOption); const unlinkedIdxs = data.array_plotdata[i].unlinked_idxs; - const noneIdxs = isHideNonePoint ? [] : data.array_plotdata[i].none_idxs; + const noneIdxs = isHideNonePoint + ? [] + : data.array_plotdata[i].none_idxs; const infIdxs = data.array_plotdata[i].inf_idxs; const negInfIdxs = data.array_plotdata[i].neg_inf_idxs; const isCatLimited = data.array_plotdata[i].is_cat_limited || false; @@ -648,17 +720,30 @@ const traceDataChart = (data, clearOnFlyFilter) => { beforeRankValues = makeDictFrom2Arrays(...beforeRankValues); } - const [dictIdx2YValue, arrayYTS] = buildMapIndex2OutlierYValue(data.array_plotdata[i], scaleInfo); - const categoryDistributed = beforeRankValues ? data.array_plotdata[i].category_distributed : null; + const [dictIdx2YValue, arrayYTS] = buildMapIndex2OutlierYValue( + data.array_plotdata[i], + scaleInfo, + ); + const categoryDistributed = beforeRankValues + ? data.array_plotdata[i].category_distributed + : null; // get latest thresholds -> show thresholds in scatter, histogram, summary const filterCond = data.array_plotdata[i].catExpBox - ? (Array.isArray(data.array_plotdata[i].catExpBox) - ? data.array_plotdata[i].catExpBox : [data.array_plotdata[i].catExpBox]) + ? Array.isArray(data.array_plotdata[i].catExpBox) + ? data.array_plotdata[i].catExpBox + : [data.array_plotdata[i].catExpBox] : null; - const [chartInfos, chartInfosOrg] = getChartInfo(data.array_plotdata[i], xAxisOption, filterCond); + const [chartInfos, chartInfosOrg] = getChartInfo( + data.array_plotdata[i], + xAxisOption, + filterCond, + ); const [latestChartInfo, latestChartInfoIdx] = chooseLatestThresholds( - chartInfos, chartInfosOrg, null, convertFunc, + chartInfos, + chartInfosOrg, + null, + convertFunc, ); const threshHigh = latestChartInfo['thresh-high']; const threshLow = latestChartInfo['thresh-low']; @@ -667,27 +752,40 @@ const traceDataChart = (data, clearOnFlyFilter) => { // y_min/max are defined in backend -> get only const kdeData = scaleInfo.kde_data; - const yMax = scaleOption === scaleOptionConst.THRESHOLD ? scaleInfo['y-max'] + (scaleInfo['y-max'] * 0.1) : scaleInfo['y-max']; - const yMin = scaleOption === scaleOptionConst.THRESHOLD ? scaleInfo['y-min'] - (scaleInfo['y-min'] * 0.1) : scaleInfo['y-min']; + const yMax = + scaleOption === scaleOptionConst.THRESHOLD + ? scaleInfo['y-max'] + scaleInfo['y-max'] * 0.1 + : scaleInfo['y-max']; + const yMin = + scaleOption === scaleOptionConst.THRESHOLD + ? scaleInfo['y-min'] - scaleInfo['y-min'] * 0.1 + : scaleInfo['y-min']; const outlierIdxs = scaleInfo.upper_outlier_idxs; const negOutlierIdxs = scaleInfo.lower_outlier_idxs; // produce exception y-array and color array from y type - const { - arrayYEx, - plotDataExColor, - } = produceExceptionArrayY(arrayY, yMin, yMax, unlinkedIdxs, noneIdxs, infIdxs, negInfIdxs, negOutlierIdxs, outlierIdxs); - + const { arrayYEx, plotDataExColor } = produceExceptionArrayY( + arrayY, + yMin, + yMax, + unlinkedIdxs, + noneIdxs, + infIdxs, + negInfIdxs, + negOutlierIdxs, + outlierIdxs, + beforeRankValues, + ); // カラム名を取得する。 const columnName = plotData.end_col_show_name; - let {catExpBox} = data.array_plotdata[i]; + let { catExpBox } = data.array_plotdata[i]; if (catExpBox === null) { catExpBox = COMMON_CONSTANT.NA; } - if (typeof (catExpBox) === 'object') { - catExpBox.map(val => (val === null ? COMMON_CONSTANT.NA : val)); + if (typeof catExpBox === 'object') { + catExpBox.map((val) => (val === null ? COMMON_CONSTANT.NA : val)); catExpBox = catExpBox.join(' | '); } const isCTCol = isCycleTimeCol(endProcId, sensorId); @@ -695,11 +793,14 @@ const traceDataChart = (data, clearOnFlyFilter) => { const stepChartSummary = data.array_plotdata[i].cat_summary || null; const allSummaryData = []; for (const summaryIdx in data.array_plotdata[i].summaries) { - const summaryData = calculateSummaryData(data.array_plotdata[i].summaries, summaryIdx, isHideNonePoint); + const summaryData = calculateSummaryData( + data.array_plotdata[i].summaries, + summaryIdx, + isHideNonePoint, + ); allSummaryData.push(summaryData); } - // get serial for every datapoint const chartOption = { numCards: data.TBLS, @@ -748,13 +849,17 @@ const traceDataChart = (data, clearOnFlyFilter) => { dicScatterXY, stepChartSummary, isCTCol, + unit: plotData.unit, }; // 起点とターゲット変数工程を比較する。 // make sure process id is integer number before compare together let cardHtml = ''; if (String(endProcId) === String(startProc)) { - cardHtml = buildTimeSeriesCardHTML(chartOption, 'card-border-active'); + cardHtml = buildTimeSeriesCardHTML( + chartOption, + 'card-border-active', + ); } else { cardHtml = buildTimeSeriesCardHTML(chartOption, 'card'); } @@ -817,12 +922,21 @@ const traceDataChart = (data, clearOnFlyFilter) => { // 今回はAjaxでupdateが必要が無いのでオブジェクトを返さない const chartLabels = data.ARRAY_FORMVAL.map( - fv => `${procConfigs[fv.end_proc].name} ${columnName}`, + (fv) => `${procConfigs[fv.end_proc].name} ${columnName}`, ); - const tsChartObject = YasuTsChart($, chartParamObj, chartLabels, tabID, xaxis = xAxisOption, isStepChart = beforeRankValues); + const tsChartObject = YasuTsChart( + $, + chartParamObj, + chartLabels, + tabID, + (xaxis = xAxisOption), + (isStepChart = beforeRankValues), + ); - const hist = beforeRankValues ? StepBarChart($, histParamObj) : YasuHistogram($, histParamObj); + const hist = beforeRankValues + ? StepBarChart($, histParamObj) + : YasuHistogram($, histParamObj); histObjs.push(hist.histObj); // TODO need to add comment // store just been created graph objects to graph storage @@ -833,7 +947,10 @@ const traceDataChart = (data, clearOnFlyFilter) => { // produce scatter plots if (showScatterPlot) { - const dictCanvas2Scatter = produceScatterPlotCharts(data, scaleOption); + const dictCanvas2Scatter = produceScatterPlotCharts( + data, + scaleOption, + ); graphStore.setDctCanvas2Scatter(dictCanvas2Scatter); } @@ -842,11 +959,14 @@ const traceDataChart = (data, clearOnFlyFilter) => { updateThresholdsOnClick(chartOption.tsCanvasId, lastDataPointIndex); // report progress. TODO apply for other pages - loadingUpdate(loadingProgressBackend + i * ((100 - loadingProgressBackend) / (data.TBLS || 1))); + loadingUpdate( + loadingProgressBackend + + i * ((100 - loadingProgressBackend) / (data.TBLS || 1)), + ); } // produce categorical table - produceCategoricalTable(data, options = {chartCols}, clearOnFlyFilter); + produceCategoricalTable(data, (options = { chartCols }), clearOnFlyFilter); // drag and drop timeseries card to save order + redraw scatter plot addTimeSeriesCardSortableEventHandler(); @@ -892,7 +1012,7 @@ const getRowAndSensors = (data) => { sensors.push(plotdata.end_col_id); } - const facet = plotdata.catExpBox ? plotdata.catExpBox.join(' | ') : '' + const facet = plotdata.catExpBox ? plotdata.catExpBox.join(' | ') : ''; if (facet && !unitFacet.includes(facet)) { unitFacet.push(facet); @@ -909,9 +1029,13 @@ const getRowAndSensors = (data) => { return data; }; - // build histogram tab -const drawHistogramsTab = (data, scaleOption = fppScaleOption.yAxis, isReset = true, frequencyOption = fppScaleOption.xAxis) => { +const drawHistogramsTab = ( + data, + scaleOption = fppScaleOption.yAxis, + isReset = true, + frequencyOption = fppScaleOption.xAxis, +) => { $(formElements.histogramTab).empty(); $(formElements.histogramTab).css('display', 'block'); @@ -923,7 +1047,9 @@ const drawHistogramsTab = (data, scaleOption = fppScaleOption.yAxis, isReset = t for (let rowIdx = 0; rowIdx < data.row; rowIdx++) { // add row const rowID = `hist-cards-${rowIdx}`; - $(formElements.histogramTab).append(`
`); + $(formElements.histogramTab).append( + `
`, + ); for (let i = 0; i < numChart; i++) { const formVal = data.ARRAY_FORMVAL[i]; const beforeRankValues = data.array_plotdata[i].before_rank_values; @@ -934,27 +1060,45 @@ const drawHistogramsTab = (data, scaleOption = fppScaleOption.yAxis, isReset = t } const filterCond = data.array_plotdata[i].catExpBox - ? (Array.isArray(data.array_plotdata[i].catExpBox) - ? data.array_plotdata[i].catExpBox : [data.array_plotdata[i].catExpBox]) + ? Array.isArray(data.array_plotdata[i].catExpBox) + ? data.array_plotdata[i].catExpBox + : [data.array_plotdata[i].catExpBox] : null; - const [chartInfos, chartInfosOrg] = getChartInfo(data.array_plotdata[i], 'TIME', filterCond); - const [latestChartInfo, latestChartInfoIdx] = chooseLatestThresholds(chartInfos, chartInfosOrg); + const [chartInfos, chartInfosOrg] = getChartInfo( + data.array_plotdata[i], + 'TIME', + filterCond, + ); + const [latestChartInfo, latestChartInfoIdx] = + chooseLatestThresholds(chartInfos, chartInfosOrg); const scaleInfo = getScaleInfo(data.array_plotdata[i], scaleOption); // y_min/max are defined in backend -> get only const kdeData = scaleInfo.kde_data; - const [minY, maxY] = calMinMaxYScale(scaleInfo['y-min'], scaleInfo['y-max'], scaleOption) - const minX = frequencyOption === frequencyOptions.COMMON ? scaleInfo['x-min'] : null; - const maxX = frequencyOption === frequencyOptions.COMMON ? scaleInfo['x-max'] : null; + const [minY, maxY] = calMinMaxYScale( + scaleInfo['y-min'], + scaleInfo['y-max'], + scaleOption, + ); + const minX = + frequencyOption === frequencyOptions.COMMON + ? scaleInfo['x-min'] + : null; + const maxX = + frequencyOption === frequencyOptions.COMMON + ? scaleInfo['x-max'] + : null; const endProcId = data.array_plotdata[i].end_proc_id; const getVal = data.array_plotdata[i].end_col_id; - let {catExpBox} = plotdata; + let { catExpBox } = plotdata; if (catExpBox === null) { catExpBox = COMMON_CONSTANT.NA; } - if (typeof (catExpBox) === 'object') { - catExpBox.map(val => (val === null ? COMMON_CONSTANT.NA : val)); + if (typeof catExpBox === 'object') { + catExpBox.map((val) => + val === null ? COMMON_CONSTANT.NA : val, + ); catExpBox = catExpBox.join(' | '); } @@ -964,28 +1108,60 @@ const drawHistogramsTab = (data, scaleOption = fppScaleOption.yAxis, isReset = t const stepChartSummary = data.array_plotdata[i].cat_summary || null; // create summaries HTMLs - const {end_col_id, end_proc_id, summaries, data_type} = plotdata; - const isHideNonePoint = isHideNoneDataPoint(end_proc_id, end_col_id, data.COMMON.remove_outlier); - const summaryData = calculateSummaryData(summaries, latestChartInfoIdx, isHideNonePoint); + const { end_col_id, end_proc_id, summaries, data_type } = plotdata; + const isHideNonePoint = isHideNoneDataPoint( + end_proc_id, + end_col_id, + data.COMMON.remove_outlier, + ); + const summaryData = calculateSummaryData( + summaries, + latestChartInfoIdx, + isHideNonePoint, + ); const isCategory = plotdata.is_category; const allGroupNames = isCategory - ? getAllGroupOfSensor(data.array_plotdata.filter( - plot => plot.end_col_id === plotdata.end_col_id - )) : []; - - const isLimitCat = plotdata.is_cat_limited || (isCategory && allGroupNames.id.length >= 29); + ? getAllGroupOfSensor( + data.array_plotdata.filter( + (plot) => plot.end_col_id === plotdata.end_col_id, + ), + ) + : []; + + const isLimitCat = + plotdata.is_cat_limited || + (isCategory && allGroupNames.id.length >= 29); const generalInfo = { - getVal, startProc, endProcId, + getVal, + startProc, + endProcId, }; - const summariesHTML = buildSummaryResultsHTML(summaryData, `${rowIdx}-${i + 1}`, generalInfo, beforeRankValues, stepChartSummary); - - const catExpBoxCols = [data.COMMON['catExpBox1'], data.COMMON['catExpBox2']].filter(c => c); - - const chartTitle = buildSummaryChartTitle(catExpBox, catExpBoxCols, plotdata.catExpBoxName, false, {}, true); + const summariesHTML = buildSummaryResultsHTML( + summaryData, + `${rowIdx}-${i + 1}`, + generalInfo, + beforeRankValues, + stepChartSummary, + ); + + const catExpBoxCols = [ + data.COMMON['catExpBox1'], + data.COMMON['catExpBox2'], + ].filter((c) => c); + + const chartTitle = buildSummaryChartTitle( + catExpBox, + catExpBoxCols, + plotdata.catExpBoxName, + false, + {}, + true, + ); // create histogram HTMLs - const hisCardBorder = (String(endProcId) === String(startProc)) ? 'his-active' : ''; - const canvasId = `${formElements.histograms}-${rowIdx}-${i + 1}` + const hisCardBorder = + String(endProcId) === String(startProc) ? 'his-active' : ''; + const canvasId = `${formElements.histograms}-${rowIdx}-${i + 1}`; const cardHtml = `
${chartTitle} @@ -1038,7 +1214,10 @@ const loading = $('.loading'); const resetGraphSetting = () => { $(`select[name=${formElements.frequencyScale}]`).val(frequencyOptions.AUTO); - $(`input[name=${formElements.summaryOption}][value=none]`).prop('checked', true); + $(`input[name=${formElements.summaryOption}][value=none]`).prop( + 'checked', + true, + ); $(formElements.yScaleOption).val(1); }; @@ -1050,7 +1229,7 @@ const traceDataWithDBChecking = (action) => { // continue to trace data or export CSV/TSV if (action === 'TRACE-DATA') { - isValid = checkValidations({max: MAX_NUMBER_OF_SENSOR}); + isValid = checkValidations({ max: MAX_NUMBER_OF_SENSOR }); updateStyleOfInvalidElements(); if (!isValid) return; // close sidebar @@ -1062,6 +1241,9 @@ const traceDataWithDBChecking = (action) => { loadingUpdate(5); handleSubmit(true); + + // reset options in dropdown of serialTableModal2 when re-open + lastSelectedOrder = []; } }; const clearTraceResultCards = () => { @@ -1096,7 +1278,7 @@ const updateCategoryOrder = (formData) => { formData.delete(name.order); formData.set(name.xOption, xOption); - updateOrderCols.forEach(orderCol => { + updateOrderCols.forEach((orderCol) => { formData.append(name.process, orderCol.serialProcess); formData.append(name.serial, orderCol.serialColumn); formData.append(name.order, orderCol.serialOrder); @@ -1139,83 +1321,127 @@ const traceData = (clearOnFlyFilter, autoUpdate) => { let formData = collectFormDataTrace(clearOnFlyFilter, autoUpdate); formData = handleXSettingOnGUI(formData); + showGraphCallApi( + '/ap/api/fpp/index', + formData, + REQUEST_TIMEOUT, + async (res) => { + $(formElements.traceDataTabs).css('display', 'block'); + + // sort graphs + if (latestSortColIds && latestSortColIds.length) { + res.ARRAY_FORMVAL = sortGraphs( + res.ARRAY_FORMVAL, + 'GET02_VALS_SELECT', + latestSortColIds, + ); + res.array_plotdata = sortGraphs( + res.array_plotdata, + 'end_col_id', + latestSortColIds, + ); + } + convertChartInfoToIndex(res); + // store trace result + graphStore.setTraceData(_.cloneDeep(res)); - showGraphCallApi('/ap/api/fpp/index', formData, REQUEST_TIMEOUT, async (res) => { - $(formElements.traceDataTabs).css('display', 'block'); - - // sort graphs - if (latestSortColIds && latestSortColIds.length) { - res.ARRAY_FORMVAL = sortGraphs(res.ARRAY_FORMVAL, 'GET02_VALS_SELECT', latestSortColIds); - res.array_plotdata = sortGraphs(res.array_plotdata, 'end_col_id', latestSortColIds); - } - convertChartInfoToIndex(res); + availableOrderingSettings = res.COMMON.available_ordering_columns; + // add datetime and serial columnId to availableOrderingSettings - // store trace result - graphStore.setTraceData(_.cloneDeep(res)); - - availableOrderingSettings = res.COMMON.available_ordering_columns; + for (const procId in availableOrderingSettings) { + const procInfo = procConfigs[procId]; + if (!procInfo) { + // in case of process has been deleted at that time -> 'undefined' process be found + continue; + } + const serialDateTimeColId = procInfo?.columns + .filter((col) => col.is_serial_no || col.is_get_date) + .map((col) => col.id); + + serialDateTimeColId.forEach((columnId) => { + if ( + availableOrderingSettings[procId].indexOf(columnId) < 0 + ) { + availableOrderingSettings[procId].push(columnId); + } + }); + availableOrderingSettings[procId].sort((a, b) => a - b); + } + // add datetime and serial columnId to availableOrderingSettings - end - // TODO: lay nhung column va process o res de disable chinh xac hon. - if (clearOnFlyFilter) { - initTableValue(); - } + // TODO: lay nhung column va process o res de disable chinh xac hon. + if (clearOnFlyFilter) { + initTableValue(); + } - res.filter_on_demand.category = orderCategoryWithOrderSeries(res, clearOnFlyFilter); - const { category } = res.filter_on_demand; + res.filter_on_demand.category = orderCategoryWithOrderSeries( + res, + clearOnFlyFilter, + ); + const { category } = res.filter_on_demand; - setGraphSetting(); - // draw + show data to graphs - traceDataChart(res, clearOnFlyFilter); + setGraphSetting(); + // draw + show data to graphs + traceDataChart(res, clearOnFlyFilter); - showInfoTable(res); + showInfoTable(res); - // render cat, category label filer modal - fillDataToFilterModal(res.filter_on_demand, () => { - bindCategorySort(); - handleSubmit(false, false); - }); + // render cat, category label filer modal + fillDataToFilterModal(res.filter_on_demand, () => { + bindCategorySort(); + handleSubmit(false, false); + }); - // Move screen to graph after pushing グラフ表示 button - if (!autoUpdate) { - autoScrollToChart(500); - } + // Move screen to graph after pushing グラフ表示 button + if (!autoUpdate) { + autoScrollToChart(500); + } - // show toastr to inform result was truncated upto 5000 - if (res.is_res_limited) { - showToastrMsg(i18n.traceResulLimited.split('BREAK_LINE').join('
')); - } + // show toastr to inform result was truncated upto 5000 + if (res.is_res_limited) { + showToastrMsg( + i18n.traceResulLimited.split('BREAK_LINE').join('
'), + ); + } - // show toastr to inform result was truncated upto 5000 - if (res.is_thin_data) { - showToastrMsg(i18n.thinDataShown); - } + // show toastr to inform result was truncated upto 5000 + if (res.is_thin_data) { + showToastrMsg(i18n.thinDataShown); + } - // show limit graphs displayed message - if (res.isGraphLimited) { - showToastrMsg(i18nCommon.limitDisplayedGraphs.replace('NUMBER', MAX_NUMBER_OF_GRAPH)); - } + // show limit graphs displayed message + if (res.isGraphLimited) { + showToastrMsg( + i18nCommon.limitDisplayedGraphs.replace( + 'NUMBER', + MAX_NUMBER_OF_GRAPH, + ), + ); + } - setPollingData(formData, handleSubmit, [false, true]); + setPollingData(formData, handleSubmit, [false, true]); - if ((isEmpty(res.array_plotdata) - || isEmpty(res.array_plotdata[0].array_y)) - && (isEmpty(category) - || isEmpty(category[0]))) { - showToastrAnomalGraph(); - } - isShowIndexInGraphArea = false; - }); + if ( + (isEmpty(res.array_plotdata) || + isEmpty(res.array_plotdata[0].array_y)) && + (isEmpty(category) || isEmpty(category[0])) + ) { + showToastrAnomalGraph(); + } + isShowIndexInGraphArea = false; + }, + ); }; - const setGraphSetting = () => { // frequencyScale, yScaleOption - fppScaleOption.xAxis = $(`select[name=${formElements.frequencyScale}]`).val(); + fppScaleOption.xAxis = $( + `select[name=${formElements.frequencyScale}]`, + ).val(); fppScaleOption.yAxis = $(formElements.yScaleOption).val(); }; - const csvExport = async (type) => { const formData = lastUsedFormData || collectFormDataTrace(true); const queryString = genQueryStringFromFormData(formData); @@ -1232,7 +1458,10 @@ const csvExport = async (type) => { const buildMapIndex2OutlierYValue = (plotdata, scaleInfo) => { const dictIdx2YValue = {}; const arrayYTS = [...plotdata.array_y]; - const idxs = [...scaleInfo.lower_outlier_idxs, ...scaleInfo.upper_outlier_idxs]; + const idxs = [ + ...scaleInfo.lower_outlier_idxs, + ...scaleInfo.upper_outlier_idxs, + ]; for (const idx of idxs) { arrayYTS[idx] = null; // it's outlier value -> clear, not shown as normal data dictIdx2YValue[idx] = plotdata.array_y[idx]; @@ -1240,36 +1469,40 @@ const buildMapIndex2OutlierYValue = (plotdata, scaleInfo) => { return [dictIdx2YValue, arrayYTS]; }; -// eslint-disable-next-line no-unused-vars const scrollTSChart = (() => { jQuery.expr.filters.offscreen = (el) => { const rect = el.getBoundingClientRect(); return ( - (rect.x + rect.width) < 0 - || (rect.y + rect.height) < 0 - || (rect.x > window.innerWidth || rect.y > window.innerHeight) + rect.x + rect.width < 0 || + rect.y + rect.height < 0 || + rect.x > window.innerWidth || + rect.y > window.innerHeight ); }; const $window = $(window); let $stickies; const whenScrolling = () => { - const isScrollOverCategoryTabl = $(window).scrollTop() + 385 < $('#cateArea').offset().top; + const isScrollOverCategoryTabl = + $(window).scrollTop() + 385 < $('#cateArea').offset().top; if (isScrollOverCategoryTabl) { - if ($stickies.find('.btn-anchor').hasClass('pin') - && $stickies.hasClass('pinChart')) { + if ( + $stickies.find('.btn-anchor').hasClass('pin') && + $stickies.hasClass('pinChart') + ) { $stickies.removeClass('pinChart'); - $stickies.css({position: ''}); + $stickies.css({ position: '' }); } } else if ($stickies.find('.btn-anchor').hasClass('pin')) { $stickies.addClass('pinChart'); } }; const load = (stickies) => { - if (typeof stickies === 'object' - && stickies instanceof jQuery - && stickies.length > 0 - && stickies.id !== 'cate-card' + if ( + typeof stickies === 'object' && + stickies instanceof jQuery && + stickies.length > 0 && + stickies.id !== 'cate-card' ) { let $originWH = $(document).height(); $stickies = stickies.each((_, e) => { @@ -1343,7 +1576,9 @@ const pinTSChart = (chartDOMId) => { scrollTSChart.load(cardEle); } else { // check if category table was pinned, return origin situation - const isCateTablePinned = $(formElements.cateCard).find('.btn-anchor').hasClass('pin'); + const isCateTablePinned = $(formElements.cateCard) + .find('.btn-anchor') + .hasClass('pin'); if (isCateTablePinned) { scrollCategoryTable.load($(formElements.cateCard)); } @@ -1351,7 +1586,7 @@ const pinTSChart = (chartDOMId) => { // remove width + position to make more responsive when unpin if (!cardEle.hasClass('pinChart')) { - cardEle.css({width: '', position: ''}); + cardEle.css({ width: '', position: '' }); } }; @@ -1379,12 +1614,16 @@ const handleXSettingOnGUI = (formData = null, xAxisSettings = null) => { }; const bindXAxisSettings = (procId, columns) => { - const hasDummyDatetime = columns.filter(column => column.is_dummy_datetime); + const hasDummyDatetime = columns.filter( + (column) => column.is_dummy_datetime, + ); let xAxisSettings = { xOption: CONST.XOPT_TIME, }; if (hasDummyDatetime.length) { - const serialCols = columns.filter(column => column.is_serial_no || column.order); + const serialCols = columns.filter( + (column) => column.is_serial_no || column.order, + ); if (serialCols.length) { xAxisSettings.xOption = CONST.XOPT_INDEX; // get first serial or order column as default @@ -1393,7 +1632,7 @@ const bindXAxisSettings = (procId, columns) => { serialColumn: serialCols[0].id, serialOrder: 1, }; - xAxisSettings = {...xAxisSettings, ...serialSettings}; + xAxisSettings = { ...xAxisSettings, ...serialSettings }; handleXSettingOnGUI(null, serialSettings); } } @@ -1431,7 +1670,12 @@ const onChangeYScale = () => { const currentTraceData = graphStore.getTraceData(); - drawHistogramsTab(currentTraceData, fppScaleOption.yAxis, false, fppScaleOption.xAxis); + drawHistogramsTab( + currentTraceData, + fppScaleOption.yAxis, + false, + fppScaleOption.xAxis, + ); }); }; @@ -1446,8 +1690,12 @@ const handleExportData = (type) => { const bindScatterPlotEvents = () => { $(formElements.showScatterPlotSelect).on('change', (e) => { // check facets - const facetLv1 = $('select[name=catExpBox] option:selected[value="1"]').length; - const facetLv2 = $('select[name=catExpBox] option:selected[value="2"]').length; + const facetLv1 = $( + 'select[name=catExpBox] option:selected[value="1"]', + ).length; + const facetLv2 = $( + 'select[name=catExpBox] option:selected[value="2"]', + ).length; if (facetLv1 || facetLv2) { // uncheck facets $('select[name=catExpBox] option[value=""]').prop('selected', true); @@ -1464,4 +1712,4 @@ const goToGraphConfigPageFPP = (url) => { const procId = graphStore.getArrayPlotData(selectedCanvasId).end_proc_id; goToOtherPage(`${url}?proc_id=${procId}`, false); -} \ No newline at end of file +}; diff --git a/ap/static/trace_data/js/trace_data_categorical_table.js b/ap/static/trace_data/js/trace_data_categorical_table.js index a748a1a..dd31713 100644 --- a/ap/static/trace_data/js/trace_data_categorical_table.js +++ b/ap/static/trace_data/js/trace_data_categorical_table.js @@ -1,33 +1,37 @@ /* eslint-disable */ -/* eslint-disable no-restricted-syntax */ // add border for boxes on hover const showBorderWhenHoverCateBox = () => { $('.box-has-data').hover( function hoverIn() { $(this).addClass('cate-box-border'); - }, function hoverOut() { + }, + function hoverOut() { $(this).removeClass('cate-box-border'); }, ); }; - -const buildCategoryBoxes = (cateCol, timeCol = null, uiStartTime = null, uiEndTime = null) => { +const buildCategoryBoxes = ( + cateCol, + timeCol = null, + uiStartTime = null, + uiEndTime = null, +) => { let isIndex = false; if (timeCol === null) { isIndex = true; - timeCol = Array.from(Array(cateCol.length), (_, i) => i + 1) + timeCol = Array.from(Array(cateCol.length), (_, i) => i + 1); } const strDtMin = uiStartTime || timeCol[0]; const strDtMax = uiEndTime || timeCol[timeCol.length - 1]; const firstTimeVal = timeCol[0]; - const firstTimeDt = isIndex ? firstTimeVal : new Date(firstTimeVal) + const firstTimeDt = isIndex ? firstTimeVal : new Date(firstTimeVal); const lastTimeVal = timeCol[timeCol.length - 1]; - const lastTimeDt = isIndex ? lastTimeVal : new Date(lastTimeVal) + const lastTimeDt = isIndex ? lastTimeVal : new Date(lastTimeVal); const maxDt = isIndex ? strDtMax : new Date(strDtMax); const minDt = isIndex ? strDtMin : new Date(strDtMin); - const totalLen = (maxDt - minDt); + const totalLen = maxDt - minDt; const len = cateCol.length; const boxes = []; @@ -55,42 +59,43 @@ const buildCategoryBoxes = (cateCol, timeCol = null, uiStartTime = null, uiEndTi if (boxes.length) { // update boxEndPos const prevBoxEndTime = boxes[boxes.length - 1].endTime; - const endTimeObj = isIndex ? prevBoxEndTime : new Date(prevBoxEndTime); + const endTimeObj = isIndex + ? prevBoxEndTime + : new Date(prevBoxEndTime); const prevBoxEndPos = (endTimeObj - minDt) / totalLen; boxes[boxes.length - 1].boxEndPos = prevBoxEndPos; // add empty/gap box - if (boxes.length > 1) { // don't need to add gap box after firstbox - boxes.push( - { - cateName: '', - count: 0, - startTime: prevBoxEndTime, - endTime: timeCol[i], - boxStartPos: prevBoxEndPos, // min = min time of time column. - boxEndPos: (dtVal - minDt) / totalLen, // or just leave it empty - endIndex: i, - }, - ); + if (boxes.length > 1) { + // don't need to add gap box after firstbox + boxes.push({ + cateName: '', + count: 0, + startTime: prevBoxEndTime, + endTime: timeCol[i], + boxStartPos: prevBoxEndPos, // min = min time of time column. + boxEndPos: (dtVal - minDt) / totalLen, // or just leave it empty + endIndex: i, + }); } } // add new box: counting, fix start time, update end time, fix catename, fix boxStartPos - boxes.push( - { - cateName: currentCateVal, - count: 1, - startTime: timeCol[i], - endTime: timeCol[i], - boxStartPos: (dtVal - minDt) / totalLen, // min = min time of time column. - boxEndPos: (dtVal - minDt) / totalLen, // or just leave it empty - startIndex: i, - endIndex: i, - }, - ); + boxes.push({ + cateName: currentCateVal, + count: 1, + startTime: timeCol[i], + endTime: timeCol[i], + boxStartPos: (dtVal - minDt) / totalLen, // min = min time of time column. + boxEndPos: (dtVal - minDt) / totalLen, // or just leave it empty + startIndex: i, + endIndex: i, + }); } else { const prevBoxEndTime = boxes[boxes.length - 1].endTime; - const endTimeObj = isIndex ? prevBoxEndTime : new Date(prevBoxEndTime); + const endTimeObj = isIndex + ? prevBoxEndTime + : new Date(prevBoxEndTime); const prevBoxEndPos = (endTimeObj - minDt) / totalLen; boxes[boxes.length - 1].boxEndPos = prevBoxEndPos; const currentRangePercent = (timeCol[i] - endTimeObj) / totalLen; @@ -133,7 +138,9 @@ const buildCategoryBoxes = (cateCol, timeCol = null, uiStartTime = null, uiEndTi prevCateVal = currentCateVal; } // update boxEndPos of the last box - const endTimeObj = isIndex ? boxes[boxes.length - 1].endTime : new Date(boxes[boxes.length - 1].endTime); + const endTimeObj = isIndex + ? boxes[boxes.length - 1].endTime + : new Date(boxes[boxes.length - 1].endTime); boxes[boxes.length - 1].boxEndPos = (endTimeObj - minDt) / totalLen; // last box @@ -152,7 +159,6 @@ const buildCategoryBoxes = (cateCol, timeCol = null, uiStartTime = null, uiEndTi return boxes; }; - // // combine consecutive small boxes together const combineEmptyBoxes = (boxes) => { let isNarrowBoxCombined = false; @@ -166,25 +172,29 @@ const combineEmptyBoxes = (boxes) => { boxes[i].cateName = ''; } - if (boxes[i].count > 0) { // a box has data (count > 0) + if (boxes[i].count > 0) { + // a box has data (count > 0) combinedBoxes.push(boxes[i]); // mark this box as previous non-empty box previousIsEmpty = false; - } else { // an empty box - if (previousIsEmpty) { // append to previous box + } else { + // an empty box + if (previousIsEmpty) { + // append to previous box const lastIdx = combinedBoxes.length - 1; combinedBoxes[lastIdx].boxEndPos = boxes[i].boxEndPos; combinedBoxes[lastIdx].endTime = boxes[i].endTime; isNarrowBoxCombined = true; - } else { // add new empty box + } else { + // add new empty box combinedBoxes.push(boxes[i]); } // mark this box as previous empty box previousIsEmpty = true; } } - return {combinedBoxes, isNarrowBoxCombined}; + return { combinedBoxes, isNarrowBoxCombined }; }; const combineSmallBoxes = (boxes) => { @@ -195,22 +205,27 @@ const combineSmallBoxes = (boxes) => { for (let i = 0; i < numBox; i++) { const count = parseInt(boxes[i].count || 0); - if (boxes[i].boxEndPos - boxes[i].boxStartPos > 0.002 && count > 0) { // a big box has data + if (boxes[i].boxEndPos - boxes[i].boxStartPos > 0.002 && count > 0) { + // a big box has data combinedBoxes.push(boxes[i]); // mark this box as previous non-empty box previousIsSmall = false; - } else { // an empty box or small box - if (previousIsSmall && i < numBox - 1) { // previous is small + current is small -> merge + } else { + // an empty box or small box + if (previousIsSmall && i < numBox - 1) { + // previous is small + current is small -> merge const lastIdx = combinedBoxes.length - 1; combinedBoxes[lastIdx].boxEndPos = boxes[i].boxEndPos; combinedBoxes[lastIdx].endTime = boxes[i].endTime; combinedBoxes[lastIdx].endIndex = boxes[i].endIndex; - combinedBoxes[lastIdx].count = parseInt(combinedBoxes[lastIdx].count || 0) + count; + combinedBoxes[lastIdx].count = + parseInt(combinedBoxes[lastIdx].count || 0) + count; // combinedBoxes[lastIdx].cateName += `.${boxes[i].cateName}`; // TODO display name combinedBoxes[lastIdx].isGroup = true; isNarrowBoxCombined = true; - } else { // add new small box + } else { + // add new small box combinedBoxes.push(boxes[i]); } // mark this box as previous small box @@ -219,11 +234,17 @@ const combineSmallBoxes = (boxes) => { } } } - return {combinedBoxes, isNarrowBoxCombined}; + return { combinedBoxes, isNarrowBoxCombined }; }; - -const createCateTableHTML = (cateBoxes, cateId, thinDataGroupCounts, indexOrderColumns, isThinData, xAxisOption) => { +const createCateTableHTML = ( + cateBoxes, + cateId, + thinDataGroupCounts, + indexOrderColumns, + isThinData, + xAxisOption, +) => { let tds = ''; if (cateBoxes === null) { tds = ` @@ -238,7 +259,7 @@ const createCateTableHTML = (cateBoxes, cateId, thinDataGroupCounts, indexOrderC `; } for (const idx in cateBoxes || []) { - const cateBoxIdx = `cate-edge-${cateId}-${idx}` + const cateBoxIdx = `cate-edge-${cateId}-${idx}`; const box = cateBoxes[idx]; let widthPercent = box.boxEndPos - box.boxStartPos; if (idx === '0') { @@ -267,7 +288,10 @@ const createCateTableHTML = (cateBoxes, cateId, thinDataGroupCounts, indexOrderC // } // NA case (have data but it is unknown/undefined data) - if ((box.cateName === '' || box.cateName === null) && box.count > 0) { + if ( + (box.cateName === '' || box.cateName === null) && + box.count > 0 + ) { colorClass = 'box-is-na'; showLabelTooltip = COMMON_CONSTANT.NA; showLabelBox = COMMON_CONSTANT.NA; @@ -278,14 +302,16 @@ const createCateTableHTML = (cateBoxes, cateId, thinDataGroupCounts, indexOrderC colorClass = 'box-is-group'; showLabelTooltip = i18n.cannotBeDisplayed; showLabelBox = ''; - widthPercent = widthPercent > 0.01 ? widthPercent : widthPercent * 0.85; + widthPercent = + widthPercent > 0.01 ? widthPercent : widthPercent * 0.85; } - let hoverStr = `${i18n.value}: ${showLabelTooltip}
`; let dataCount = 0; if (isThinData) { - dataCount = thinDataGroupCounts.slice(box.startIndex, box.endIndex + 1).reduce((a, b) => a + b, 0); + dataCount = thinDataGroupCounts + .slice(box.startIndex, box.endIndex + 1) + .reduce((a, b) => a + b, 0); } else { dataCount = box.count; } @@ -297,7 +323,13 @@ const createCateTableHTML = (cateBoxes, cateId, thinDataGroupCounts, indexOrderC for (const aggKey of ['min', 'max', 'value']) { if (dicOrder[aggKey]) { // val.concat(dicOrder[aggKey].slice(box.startIndex, box.endIndex + 1)); - val = [...val, ...dicOrder[aggKey].slice(box.startIndex, box.endIndex + 1)]; + val = [ + ...val, + ...dicOrder[aggKey].slice( + box.startIndex, + box.endIndex + 1, + ), + ]; } } // console.log(val); @@ -306,15 +338,15 @@ const createCateTableHTML = (cateBoxes, cateId, thinDataGroupCounts, indexOrderC hoverStr += `${i18n.minVal}: ${checkTrue(minVal) ? minVal : COMMON_CONSTANT.NA}
`; hoverStr += `${i18n.maxVal}: ${checkTrue(maxVal) ? maxVal : COMMON_CONSTANT.NA}
`; } - } else { hoverStr += `${i18n.startTime}: ${new Date(box.startTime).toLocaleString()}
${i18n.endTime}: ${new Date(box.endTime).toLocaleString()}
`; } - const boxInfo = box.count > 0 - ? `${showLabelBox} ${hoverStr} ` - : ''; + const boxInfo = + box.count > 0 + ? `${showLabelBox} ${hoverStr} ` + : ''; td = `
${leftEdge} @@ -342,13 +374,19 @@ const pinCategoryTable = () => { scrollCategoryTable.load($(formElements.cateCard)); }; - -const createCateCard = (cateNameHTMLs, tableHTMLs, width, cols = {}, traceDat = {}) => { +const createCateCard = ( + cateNameHTMLs, + tableHTMLs, + width, + cols = {}, + traceDat = {}, +) => { const tblWidthCSS = width ? `width: ${width}px;` : ''; // tableHTMLs.length = 1; const cardHtml = `
- ${tableHTMLs.length > 0 ? - `
+ ${ + tableHTMLs.length > 0 + ? `
${createAnchorHTML()}
@@ -370,7 +408,9 @@ const createCateCard = (cateNameHTMLs, tableHTMLs, width, cols = {}, traceDat =
-
` : '' } +
` + : '' + }
`; return cardHtml; }; @@ -382,7 +422,7 @@ const createAnchorHTML = () => { onclick="pinCategoryTable();"> `; -} +}; const resetCateogryTablePosition = () => { $(formElements.cateCard).removeClass('cate-fixed'); @@ -394,14 +434,22 @@ let isDivResizing = false; // adjust category table to be aligned with TS chart const getFirstTSChartId = () => { - const firstChartId = $('#plot-cards').find('canvas[chart-type="timeSeries"]').first().attr('id') || 'chart01'; + const firstChartId = + $('#plot-cards') + .find('canvas[chart-type="timeSeries"]') + .first() + .attr('id') || 'chart01'; return `#${firstChartId}`; -} +}; const getFirstHistChartId = () => { - const firstChartId = $('#plot-cards').find('canvas[chart-type="histogram"]').first().attr('id') || 'hist01'; + const firstChartId = + $('#plot-cards') + .find('canvas[chart-type="histogram"]') + .first() + .attr('id') || 'hist01'; return `#${firstChartId}`; -} +}; const getChartErea = (canvasId) => { for (const idx in Chart.instances) { @@ -432,11 +480,14 @@ const adjustCatetoryTableLength = () => { // set catetable position const chartArea = getChartErea(firstTSChartId); const tableWidth1 = chartArea.right - chartArea.left + 2; // 2 is border width of two sides - const zoomRate = Math.min(1, Math.round(window.devicePixelRatio * 100) / 100); + const zoomRate = Math.min( + 1, + Math.round(window.devicePixelRatio * 100) / 100, + ); const adjust = Math.abs((1 - zoomRate) * 12); const chartOffsetLeft = firstTSChart.offset().left; const cateTblOffsetLeft = chartOffsetLeft + chartArea.left - 1 - adjust; // -1 to reserve left border - cateTableContainer.offset({left: cateTblOffsetLeft}); + cateTableContainer.offset({ left: cateTblOffsetLeft }); const canvasOffsetLeft = firstTSChart.offset().left; const canvasOffsetRight = canvasOffsetLeft + firstTSChart.width(); @@ -448,7 +499,6 @@ const adjustCatetoryTableLength = () => { $(`${formElements.cateTable} table`).each(function f() { $(this).css('width', tableWidth - adjust); }); - } catch (error) { // console.log(error); } @@ -468,19 +518,17 @@ const contentResizeHandler = () => { } } - new ResizeObserver(onResize) - .observe(document.getElementById('content')); + new ResizeObserver(onResize).observe(document.getElementById('content')); }; - -// eslint-disable-next-line no-unused-vars const scrollCategoryTable = (() => { jQuery.expr.filters.offscreen = (el) => { const rect = el.getBoundingClientRect(); return ( - (rect.x + rect.width) < 0 - || (rect.y + rect.height) < 0 - || (rect.x > window.innerWidth || rect.y > window.innerHeight) + rect.x + rect.width < 0 || + rect.y + rect.height < 0 || + rect.x > window.innerWidth || + rect.y > window.innerHeight ); }; const $window = $(window); @@ -488,24 +536,33 @@ const scrollCategoryTable = (() => { const whenScrolling = () => { $stickies.each((i, e) => { - const $cardParent = $(e).parents().filter('form') - .parent() - .parent(); + const $cardParent = $(e).parents().filter('form').parent().parent(); const $thisSticky = $(e); const $stickyPosition = $thisSticky.data('originalPosition'); - if ($stickyPosition <= $window.scrollTop() && !$cardParent.is(':offscreen')) { + if ( + $stickyPosition <= $window.scrollTop() && + !$cardParent.is(':offscreen') + ) { const $nextSticky = $stickies.eq(i + 1); - const $nextStickyPosition = $nextSticky.data('originalPosition') - $thisSticky.data('originalHeight'); + const $nextStickyPosition = + $nextSticky.data('originalPosition') - + $thisSticky.data('originalHeight'); $thisSticky.addClass('cate-fixed'); - if ($nextSticky.length > 0 && $thisSticky.offset().top >= $nextStickyPosition) { - $thisSticky.addClass('absolute').css('top', $nextStickyPosition); + if ( + $nextSticky.length > 0 && + $thisSticky.offset().top >= $nextStickyPosition + ) { + $thisSticky + .addClass('absolute') + .css('top', $nextStickyPosition); } // align scrolled category table with plot-cards - const tableLeftOffset = $(formElements.tsPlotCards).offset().left; + const tableLeftOffset = $(formElements.tsPlotCards).offset() + .left; const plotCardWidth = $(formElements.tsPlotCards).outerWidth(); $thisSticky.css('left', tableLeftOffset); $thisSticky.css('width', plotCardWidth); @@ -514,8 +571,11 @@ const scrollCategoryTable = (() => { $thisSticky.removeClass('cate-fixed'); - if ($prevSticky.length > 0 - && $window.scrollTop() <= $thisSticky.data('originalPosition') - $thisSticky.data('originalHeight') + if ( + $prevSticky.length > 0 && + $window.scrollTop() <= + $thisSticky.data('originalPosition') - + $thisSticky.data('originalHeight') ) { $prevSticky.removeClass('absolute').removeAttr('style'); } @@ -523,9 +583,10 @@ const scrollCategoryTable = (() => { }); }; const load = (stickies) => { - if (typeof stickies === 'object' - && stickies instanceof jQuery - && stickies.length > 0 + if ( + typeof stickies === 'object' && + stickies instanceof jQuery && + stickies.length > 0 ) { let $originWH = $(document).height(); $stickies = stickies.each((_, e) => { @@ -562,36 +623,42 @@ const nonNACalcuation = (cates) => { return [0, 0]; } const total = cates.length; - const nonNACount = cates.filter(c => c !== null).length; - return [Number(100 * nonNACount / (total || 1)).toFixed(2), nonNACount, total]; + const nonNACount = cates.filter((c) => c !== null).length; + return [ + Number((100 * nonNACount) / (total || 1)).toFixed(2), + nonNACount, + total, + ]; }; - const orderCategoryWithOrderSeries = (traceData, clearOnFlyFilter) => { const shouldOrderByIndexList = isShowIndexInGraphArea || clearOnFlyFilter; if (!shouldOrderByIndexList) return traceData.filter_on_demand.category; const { category } = traceData.filter_on_demand; + const { indexOrderColumns } = traceData; - const indexOrderColID = indexOrderColumns.map(col => col.id); + const indexOrderColID = indexOrderColumns.map((col) => col.id); let categoryDataAfterOrdering = []; const notMatchedCat = []; - category && category.forEach((cat) => { - const idx = indexOrderColID.indexOf(cat.column_id); - if (idx !== -1) { - // matched - categoryDataAfterOrdering[idx] = cat; - } else { - notMatchedCat.push(cat); - } - }); + category && + category.forEach((cat) => { + const idx = indexOrderColID.indexOf(cat.column_id); + if (idx !== -1) { + // matched + categoryDataAfterOrdering[idx] = cat; + } else { + notMatchedCat.push(cat); + } + }); // remove empty category data - categoryDataAfterOrdering = categoryDataAfterOrdering.filter(cat => cat !== null); + categoryDataAfterOrdering = categoryDataAfterOrdering.filter( + (cat) => cat !== null, + ); // cat which not match in serial ordering: shoow at below of category box categoryDataAfterOrdering = categoryDataAfterOrdering.concat(notMatchedCat); return categoryDataAfterOrdering; -} - +}; const produceCategoricalTable = (traceData, options = {}) => { const { category } = traceData.filter_on_demand; @@ -601,9 +668,13 @@ const produceCategoricalTable = (traceData, options = {}) => { const endDt = traceData.COMMON[CONST.ENDDATE]; const endTm = traceData.COMMON[CONST.ENDTIME]; const fmt = 'YYYY/MM/DD HH:mm'; - const uiStartTime = new Date(formatDateTime(`${startDt} ${startTm}`, fmt)).toISOString(); - const uiEndTime = new Date(formatDateTime(`${endDt} ${endTm}`, fmt)).toISOString(); - const {thinDataGroupCounts, indexOrderColumns, is_thin_data} = traceData; + const uiStartTime = new Date( + formatDateTime(`${startDt} ${startTm}`, fmt), + ).toISOString(); + const uiEndTime = new Date( + formatDateTime(`${endDt} ${endTm}`, fmt), + ).toISOString(); + const { thinDataGroupCounts, indexOrderColumns, is_thin_data } = traceData; // calculate categorical boxes: start/end position, count, etc const dicAllCateBoxes = {}; @@ -629,12 +700,18 @@ const produceCategoricalTable = (traceData, options = {}) => { if (xAxisOption === 'INDEX') { boxes = buildCategoryBoxes(dicCate.data); } else { - boxes = buildCategoryBoxes(dicCate.data, traceData.times, uiStartTime, uiEndTime); + boxes = buildCategoryBoxes( + dicCate.data, + traceData.times, + uiStartTime, + uiEndTime, + ); } // pre-process boxes code // const { combinedBoxes, isNarrowBoxCombined } = combineEmptyBoxes(boxes); - const {combinedBoxes, isNarrowBoxCombined} = combineSmallBoxes(boxes); + const { combinedBoxes, isNarrowBoxCombined } = + combineSmallBoxes(boxes); boxes = combinedBoxes; isBoxCombined = isNarrowBoxCombined; } @@ -651,7 +728,6 @@ const produceCategoricalTable = (traceData, options = {}) => { [nonNAPercent, nonNACount, total] = nonNACalcuation(dicCate.data); } - const boxName = `${dicCate.proc_master_name} ${dicCate.column_master_name}`; dicAllCateBoxes[boxName] = boxes; @@ -666,10 +742,10 @@ const produceCategoricalTable = (traceData, options = {}) => { ${nonNAPercent}% (${applySignificantDigit(nonNACount)}/${applySignificantDigit(total) || '0'}) - `) + `); }); - cateNameHTMLs.push(``) + cateNameHTMLs.push(``); // show msg to warn about combining narrow boxes if (isBoxCombined) { @@ -682,7 +758,14 @@ const produceCategoricalTable = (traceData, options = {}) => { // generate a table for each categorical field const tblHTMLs = []; Object.values(dicAllCateBoxes).forEach((cateBoxes, idx) => { - const cateHTML = createCateTableHTML(cateBoxes, idx, thinDataGroupCounts, indexOrderColumns, is_thin_data, xAxisOption); + const cateHTML = createCateTableHTML( + cateBoxes, + idx, + thinDataGroupCounts, + indexOrderColumns, + is_thin_data, + xAxisOption, + ); tblHTMLs.push(cateHTML); }); @@ -701,7 +784,13 @@ const produceCategoricalTable = (traceData, options = {}) => { histogram: 3, scatterPlot: 0, }; - const cateCardHTML = createCateCard(cateNameHTMLs, tblHTMLs, tableWidth, cols, traceData); + const cateCardHTML = createCateCard( + cateNameHTMLs, + tblHTMLs, + tableWidth, + cols, + traceData, + ); // clear old result $(formElements.cateCard).empty(); @@ -721,9 +810,10 @@ const produceCategoricalTable = (traceData, options = {}) => { contentResizeHandler(); // 116 13 save focused box for keyboard movement - $('#cateTable table td').on('click', (e) => { // TODO 116 + $('#cateTable table td').on('click', (e) => { + // TODO 116 savedBox = e.currentTarget; - }) + }); initIndexModal(); @@ -735,10 +825,11 @@ const name = { serial: 'TermSerialColumn', order: 'TermSerialOrder', xOption: 'TermXOption', -} +}; let xOption = ''; let selectedSerials = null; let selectedProcess = null; +let selectedProcessSerial = null; // using to check xAxisModal2 let currentTable = null; let currentXOption = ''; let lastSelectedOrder = []; @@ -751,7 +842,7 @@ const initIndexModal = () => { oldXOption = currentXOption; xOptionSwitch.attr(CONST.DEFAULT_VALUE, currentXOption); resetCustomSelect(xOptionSwitch); - } + }; // set default checked switch setDefault(); @@ -763,15 +854,30 @@ const initIndexModal = () => { } if (option === CONST.XOPT_INDEX) { showSerialModal(formElements.serialTableModal2); - setSelect2Selection(formElements.serialTable2) - bindDragNDrop($(`${formElements.serialTable2} tbody`), formElements.serialTable2, name.serial); + setSelect2Selection(formElements.serialTable2); + bindDragNDrop( + $(`${formElements.serialTable2} tbody`), + formElements.serialTable2, + name.serial, + ); disableUnselectedOption(selectedSerials, name.serial); disableUnselectedOption(selectedProcess, name.process); $(formElements.btnAddSerial2).unbind('click'); $(formElements.btnAddSerial2).on('click', () => { - addSerialOrderRow(formElements.serialTable2, name.process, name.serial, name.order, null, null, null, null, true).then(() => { + addSerialOrderRow( + formElements.serialTable2, + name.process, + name.serial, + name.order, + null, + null, + null, + null, + true, + ).then(() => { initSelect(); + updateCurrentSelectedProcessSerial(name.serial); disableUnselectedOption(selectedSerials, name.serial); disableUnselectedOption(selectedProcess, name.process); }); @@ -784,7 +890,7 @@ const initIndexModal = () => { oldXOption = xOption; handleSubmit(false); } - }) + }); $(formElements.cancelOrderIndexModal).unbind('click'); $(formElements.cancelOrderIndexModal).on('click', function () { @@ -792,13 +898,18 @@ const initIndexModal = () => { renderTableContent(); if (currentXOption === CONST.XOPT_TIME) { $(formElements.tsXScale).val(currentXOption); - resetCustomSelect($(formElements.tsXScale)) + resetCustomSelect($(formElements.tsXScale)); } - }) + }); $(formElements.okOrderIndexModal).unbind('click'); $(formElements.okOrderIndexModal).on('click', () => { - getLastedSelectedValue(formElements.serialTable2, name.process, name.serial, name.order) + getLastedSelectedValue( + formElements.serialTable2, + name.process, + name.serial, + name.order, + ); xOption = CONST.XOPT_INDEX; currentXOption = xOption; // reset xAxisShowSettings @@ -806,45 +917,73 @@ const initIndexModal = () => { isShowIndexInGraphArea = true; oldXOption = xOption; handleSubmit(false); - }) -} + }); +}; function disableUnselectedOption(selectedSerials, serialName) { if (!selectedSerials) return; const serialSelects = $(`select[name=${serialName}]`); + const procData = getProcessColSelected(); serialSelects.each(function () { const selectElement = $(this); - const selectedVal = selectElement.val(); const options = [...selectElement.find('option')]; + const optionSelected = selectElement.val(); for (const opt of options) { const option = $(opt); const val = option.val(); + const isGetDate = option.attr('data-is-get-date') === 'true'; + const isSerialNo = option.attr('data-is-serial-no') === 'true'; + const procColId = option.attr('data-selected-proc-id'); + const serialColDataId = option.val(); + const currentOption = `${procColId}-${serialColDataId}`; + if (!selectedSerials.has(val)) { - option.attr('disabled', true); - } - if (!selectedSerials.has(selectedVal)) { - selectElement.val('').change(); + if ((isGetDate || isSerialNo) && procData.includes(procColId)) { + option.attr( + 'disabled', + selectedProcessSerial && + selectedProcessSerial.has(currentOption), + ); + } else { + option.attr('disabled', true); + } + } else if (procData.includes(procColId) && optionSelected !== val) { + option.attr( + 'disabled', + selectedProcessSerial && + selectedProcessSerial.has(currentOption), + ); + } else { + option.attr('disabled', false); } } }); } function initSelect() { - bindChangeProcessEvent(formElements.serialTable2, name.process, name.serial, () => { - disableUnselectedOption(selectedSerials, name.serial); - disableUnselectedOption(selectedProcess, name.process); - }); + bindChangeProcessEvent( + formElements.serialTable2, + name.process, + name.serial, + () => { + disableUnselectedOption(selectedSerials, name.serial); + disableUnselectedOption(selectedProcess, name.process); + }, + ); updatePriorityAndDisableSelected(formElements.serialTable2, name.serial); - setTimeout(() => { // wait select2 to be shown - bindChangeOrderColEvent(formElements.serialTable2, name.serial, () => { - }); + setTimeout(() => { + // wait select2 to be shown + bindChangeOrderColEvent( + formElements.serialTable2, + name.serial, + () => {}, + ); }, 200); } - function getLastedSelectedValue(tableID, procName, columnName, orderName) { lastSelectedOrder = []; const mainTableBody = $(`${tableID} tbody tr`); @@ -857,48 +996,90 @@ function getLastedSelectedValue(tableID, procName, columnName, orderName) { serialProcess: proc, serialColumn: column, serialOrder: order, - }) - }) + }); + }); - latestIndexOrder = [...lastSelectedOrder] + latestIndexOrder = [...lastSelectedOrder]; } function renderTableContent() { $(`${formElements.serialTable2} tbody`).html(''); // get start proc const startProc = getFirstSelectedProc(); - const hasAvailableOrderColumn = availableOrderingSettings[startProc] && availableOrderingSettings[startProc].length > 0; + // get serial + const procInfo = procConfigs[startProc]; + + const hasAvailableOrderColumn = + availableOrderingSettings[startProc] && + availableOrderingSettings[startProc].length > 0; + const noTableRow = lastSelectedOrder.length <= 0; const isShowDefaultRow = hasAvailableOrderColumn && noTableRow; if (isShowDefaultRow) { - addSerialOrderRow(formElements.serialTable2, name.process, name.serial, name.order, startProc, availableOrderingSettings[startProc][0], null, null, true); + addSerialOrderRow( + formElements.serialTable2, + name.process, + name.serial, + name.order, + startProc, + // sort availableOrderingSettings from min to max + availableOrderingSettings[startProc][0], + null, + null, + true, + ); } else { lastSelectedOrder.forEach((row, i) => { - addSerialOrderRow(formElements.serialTable2, name.process, name.serial, name.order, row.serialProcess, row.serialColumn, row.serialOrder, null, true); - }) + addSerialOrderRow( + formElements.serialTable2, + name.process, + name.serial, + name.order, + row.serialProcess, + row.serialColumn, + row.serialOrder, + null, + true, + ); + }); } setTimeout(() => { selectedSerials = getSelectedOrderCols(); - selectedProcess = getSelectedOrderCols(formElements.serialTable, 'serialProcess'); - const availableProcess = new Set(Object.keys(availableOrderingSettings)); + selectedProcess = getSelectedOrderCols( + formElements.serialTable, + 'serialProcess', + ); + const availableProcess = new Set( + Object.keys(availableOrderingSettings), + ); let availableSerials = []; - Object.values(availableOrderingSettings).forEach(cols => availableSerials.push(...cols)); - availableSerials = availableSerials.map(colID => String(colID)); + Object.values(availableOrderingSettings).forEach((cols) => + availableSerials.push(...cols), + ); + availableSerials = availableSerials.map((colID) => String(colID)); selectedSerials = new Set([...selectedSerials, ...availableSerials]); selectedProcess = new Set([...selectedProcess, ...availableProcess]); // disableUnselectedOption(selectedSerials, name.serial); // disableUnselectedOption(selectedProcess, name.process); initSelect(); - }, 2000) + }, 2000); } function initTableValue() { currentXOption = lastUsedFormData.get('xOption'); if (currentXOption === CONST.XOPT_INDEX) { - getLastedSelectedValue(formElements.serialTable, 'serialProcess', 'serialColumn', 'serialOrder') + getLastedSelectedValue( + formElements.serialTable, + 'serialProcess', + 'serialColumn', + 'serialOrder', + ); } - if (isSaveGraphSetting() && hasIndexOrderInGraphSetting(getGraphSettings())) { + if ( + isSaveGraphSetting() && + hasIndexOrderInGraphSetting(getGraphSettings()) + ) { const indexOrder = getIndexOrder(); if (indexOrder.length > 0) { lastSelectedOrder = indexOrder; @@ -916,13 +1097,13 @@ function transformIndexOrderParams(formData) { const key = item[0]; const value = item[1]; if (key === name.process) { - formData.append(name.process, value) + formData.append(name.process, value); } if (key === name.serial) { - formData.append(name.serial, value) + formData.append(name.serial, value); } if (key === name.order) { - formData.append(name.order, value) + formData.append(name.order, value); } } } else { @@ -934,15 +1115,15 @@ function transformIndexOrderParams(formData) { } function removeUnusedFormParams(formData, clearOnFlyFilter = false) { - formData.delete(name.process) - formData.delete(name.serial) - formData.delete(name.order) + formData.delete(name.process); + formData.delete(name.serial); + formData.delete(name.order); if (clearOnFlyFilter) { - if (formData.get('xOption') === CONST.XOPT_TIME) { - formData.delete('serialProcess') - formData.delete('serialColumn') - formData.delete('serialOrder') + if (formData.get('xOption') === CONST.XOPT_TIME) { + formData.delete('serialProcess'); + formData.delete('serialColumn'); + formData.delete('serialOrder'); } } @@ -954,7 +1135,7 @@ const bindGraphScaleEvent = () => { const scaleOption = $(this).children('option:selected').val() || '1'; updateGraphScale(scaleOption); }); -} +}; const initDuplicatedSerial = () => { const key = 'duplicated_serial'; @@ -967,4 +1148,4 @@ const initDuplicatedSerial = () => { lastUsedFormData.set(key, val); handleSubmit(false); }); -} +}; diff --git a/ap/static/trace_data/js/trace_data_cross_hair.js b/ap/static/trace_data/js/trace_data_cross_hair.js index deae83e..0f8856d 100644 --- a/ap/static/trace_data/js/trace_data_cross_hair.js +++ b/ap/static/trace_data/js/trace_data_cross_hair.js @@ -9,35 +9,65 @@ const celes = { cateTable: '#cateTable', }; -const colors = ['#91e2ff', '#9d9a53', '#ae6e54', '#603567', '#00af91', '#d7cece', '#470f0f', '#0f1451', - '#a4b790', '#4eb55d', '#bf4db4', '#ba8534']; +const colors = [ + '#91e2ff', + '#9d9a53', + '#ae6e54', + '#603567', + '#00af91', + '#d7cece', + '#470f0f', + '#0f1451', + '#a4b790', + '#4eb55d', + '#bf4db4', + '#ba8534', +]; /* -* VERTICAL LINE AT CATEGORY TABLE -* */ + * VERTICAL LINE AT CATEGORY TABLE + * */ function removeVerticalLine() { if (isRemoveCrosshair) { - $(celes.crossV).css({display: 'none'}); - $(celes.crossH).css({display: 'none'}); + $(celes.crossV).css({ display: 'none' }); + $(celes.crossH).css({ display: 'none' }); } isRemoveCrosshair = true; } -function showVerticalLine(offsetTop, offsetLeft, lineHeight, isLeft = true, index = 0) { - const color = colors[index] +function showVerticalLine( + offsetTop, + offsetLeft, + lineHeight, + isLeft = true, + index = 0, +) { + const color = colors[index]; const showLine = (crossV, crossH, offsetTop, offsetLeft, lineHeight) => { - crossV.css({display: 'block'}); - crossH.css({display: 'block'}); - crossH.css({'border-left': `1px solid ${color}`}); - crossV.css({top: offsetTop}); - crossH.css({left: offsetLeft}); - crossH.css({height: lineHeight}); + crossV.css({ display: 'block' }); + crossH.css({ display: 'block' }); + crossH.css({ 'border-left': `1px solid ${color}` }); + crossV.css({ top: offsetTop }); + crossH.css({ left: offsetLeft }); + crossH.css({ height: lineHeight }); }; if (isLeft) { - showLine($(celes.crossVLeft), $(celes.crossHLeft), offsetTop, offsetLeft, lineHeight); + showLine( + $(celes.crossVLeft), + $(celes.crossHLeft), + offsetTop, + offsetLeft, + lineHeight, + ); } else { - showLine($(celes.crossVRight), $(celes.crossHRight), offsetTop, offsetLeft, lineHeight); + showLine( + $(celes.crossVRight), + $(celes.crossHRight), + offsetTop, + offsetLeft, + lineHeight, + ); } } @@ -76,23 +106,25 @@ const genVerticalCrossLineHtml = (edgeLeft, edgeRight, id) => {
`; - }; + } if (idRight) { const offsetTopRight = edgeRight.offset().top; - const offsetLeftRight = edgeRight.offset().left + edgeRight.outerWidth(); + const offsetLeftRight = + edgeRight.offset().left + edgeRight.outerWidth(); const lineHeighRight = $('#baseFooter').offset().top - offsetTopRight; crossRight = `
`; - }; + } return [crossLeft, crossRight]; }; const genAllCrossInLine = (lineId = null) => { - const cateBoxEl = lineId !== null ? `.cate-box[line=${lineId}]` : '.cate-box'; + const cateBoxEl = + lineId !== null ? `.cate-box[line=${lineId}]` : '.cate-box'; $(`${formElements.cateTable} ${cateBoxEl}`).parent().attr('gen-all', 1); $(`${formElements.cateTable} ${cateBoxEl}`).each(function () { genSingleCross(this); @@ -105,7 +137,11 @@ const genSingleCross = (e) => { const edgeLeft = $(e).find('.cate-edge-left'); const edgeRight = $(e).find('.cate-edge-right'); - const [crossLeft, crossRight] = genVerticalCrossLineHtml(edgeLeft, edgeRight, id); + const [crossLeft, crossRight] = genVerticalCrossLineHtml( + edgeLeft, + edgeRight, + id, + ); $('body').append(crossLeft); $('body').append(crossRight); @@ -113,7 +149,9 @@ const genSingleCross = (e) => { const hideAllCrossAnchorInline = (lineId) => { if (lineId) { - $(`${formElements.cateTable} .cate-box[line=${lineId}]`).parent().removeAttr('gen-all'); + $(`${formElements.cateTable} .cate-box[line=${lineId}]`) + .parent() + .removeAttr('gen-all'); $(`.cross[line=${lineId}]`).remove(); } else { $(`${formElements.cateTable} .cate-box`).parent().removeAttr('gen-all'); @@ -122,11 +160,13 @@ const hideAllCrossAnchorInline = (lineId) => { }; const resetPositionOfCrossLine = () => { - const anchorLabels = [...$('.cross-line[data-parent-id]')].map(el => $(el).attr('data-parent-id')); - hideAllCrossAnchorInline(); - for (const parentId of anchorLabels) { - handleShowAnchor($(`[data-id=${parentId}]`)); - } + const anchorLabels = [...$('.cross-line[data-parent-id]')].map((el) => + $(el).attr('data-parent-id'), + ); + hideAllCrossAnchorInline(); + for (const parentId of anchorLabels) { + handleShowAnchor($(`[data-id=${parentId}]`)); + } }; const hideOneCross = (line, col) => { @@ -141,12 +181,12 @@ const showOneCrossAnchor = (e) => { }; const addDrawVerticalLineEvent = () => { - $('.cate-box').on('click', (e) => { const target = e.target.closest('.cate-box'); const [line, col] = $(target).attr('id').split('-'); const id = `cate-edge-${line}-${col}`; - const hasCross = $(`#${id}-right-v`).length || $(`#${id}-left-v`).length; + const hasCross = + $(`#${id}-right-v`).length || $(`#${id}-left-v`).length; if (hasCross) { hideOneCross(line, col); } else { @@ -157,7 +197,11 @@ const addDrawVerticalLineEvent = () => { $('.cate-box').on('dblclick', (e) => { const target = e.target.closest('.cate-box'); const [line, _] = $(target).attr('id').split('-'); - const hasAllCross = $(`${formElements.cateTable} .cate-box[line=${line}]`).parent().attr('gen-all'); + const hasAllCross = $( + `${formElements.cateTable} .cate-box[line=${line}]`, + ) + .parent() + .attr('gen-all'); if (hasAllCross) { hideAllCrossAnchorInline(line); } else { @@ -190,7 +234,13 @@ const addDrawVerticalLineEvent = () => { isLeft = false; } const lineHeight = $('#baseFooter').offset().top - offsetTop; - showVerticalLine(offsetTop, offsetLeft, lineHeight, isLeft, index); + showVerticalLine( + offsetTop, + offsetLeft, + lineHeight, + isLeft, + index, + ); } }); }); @@ -199,7 +249,8 @@ const addDrawVerticalLineEvent = () => { function showVerticalLineOnClick(clickEvent) { // show red vertical line on category table when user click in graph const offsetTop = $(celes.cateTable)[0] - ? $(celes.cateTable).offset().top : $('#plot-cards').offset().top; + ? $(celes.cateTable).offset().top + : $('#plot-cards').offset().top; const offsetLeft = clickEvent.native.clientX; const lineHeight = $(celes.cateTable).height() || 0; showVerticalLine(offsetTop, offsetLeft, lineHeight); @@ -208,11 +259,17 @@ function showVerticalLineOnClick(clickEvent) { function removeCrossHairOfChart(graphObj, update = true) { if (!graphObj) return; try { - if (graphObj.options.plugins.annotation.annotations['crosshair-x'] - || graphObj.options.plugins.annotation.annotations['crosshair-y']) { - delete graphObj.options.plugins.annotation.annotations['crosshair-x']; - delete graphObj.options.plugins.annotation.annotations['crosshair-y']; - if (update) graphObj.update(mode = 'none'); + if ( + graphObj.options.plugins.annotation.annotations['crosshair-x'] || + graphObj.options.plugins.annotation.annotations['crosshair-y'] + ) { + delete graphObj.options.plugins.annotation.annotations[ + 'crosshair-x' + ]; + delete graphObj.options.plugins.annotation.annotations[ + 'crosshair-y' + ]; + if (update) graphObj.update((mode = 'none')); } } catch (e) { console.log(e); @@ -221,27 +278,27 @@ function removeCrossHairOfChart(graphObj, update = true) { function removeThresholdsOfChart(graphObj, type = CONST.ALL) { if (!graphObj) return; -// try { -// const verticalIds = [CONST.vUCL, CONST.vLCL, CONST.vUPCL, CONST.vLPCL]; -// const horizonalIds = [CONST.UCL, CONST.LCL, CONST.UPCL, CONST.LPCL]; -// let toBeRemovedIds = horizonalIds + verticalIds; -// if (type === CONST.VERTICAL) { -// toBeRemovedIds = verticalIds; -// } else if (type === CONST.HORIZONTAL) { -// toBeRemovedIds = horizonalIds; -// } -// const lines = graphObj.options.plugins.annotation.annotations; -// const newLines = []; -// for (let i = 0; i < lines.length; i++) { -// if (!toBeRemovedIds.includes(lines[i].id)) { -// newLines.push(lines[i]); -// } -// } -// graphObj.options.plugins.annotation.annotations = newLines; -// graphObj.update(mode='none'); -// } catch (e) { -// console.log(e); -// } + // try { + // const verticalIds = [CONST.vUCL, CONST.vLCL, CONST.vUPCL, CONST.vLPCL]; + // const horizonalIds = [CONST.UCL, CONST.LCL, CONST.UPCL, CONST.LPCL]; + // let toBeRemovedIds = horizonalIds + verticalIds; + // if (type === CONST.VERTICAL) { + // toBeRemovedIds = verticalIds; + // } else if (type === CONST.HORIZONTAL) { + // toBeRemovedIds = horizonalIds; + // } + // const lines = graphObj.options.plugins.annotation.annotations; + // const newLines = []; + // for (let i = 0; i < lines.length; i++) { + // if (!toBeRemovedIds.includes(lines[i].id)) { + // newLines.push(lines[i]); + // } + // } + // graphObj.options.plugins.annotation.annotations = newLines; + // graphObj.update(mode='none'); + // } catch (e) { + // console.log(e); + // } } // remove all cross hair when click outside of graph canvas @@ -272,14 +329,18 @@ $(document).on('click', (e) => { // hide index-inform-content when click const indexInfo = target.closest('.index-inform-content'); if (!indexInfo.length) { - $('.index-inform-content').css({display: 'none'}); + $('.index-inform-content').css({ display: 'none' }); } // hide all tooltips of cate box - $('.cate-tooltip').css({visibility: 'hidden'}); + $('.cate-tooltip').css({ visibility: 'hidden' }); }); -function removeAllCrossHair(updateTS = true, updateHist = true, updateSct = true) { +function removeAllCrossHair( + updateTS = true, + updateHist = true, + updateSct = true, +) { graphStore.getAllTimeSeries().forEach((graphObj) => { removeCrossHairOfChart(graphObj, updateTS); }); @@ -293,7 +354,6 @@ function removeAllCrossHair(updateTS = true, updateHist = true, updateSct = true }); } - const scatterHorizontalline = (yValue, color = CONST.CH_OTHER) => ({ type: 'line', id: 'crosshair-y', @@ -326,7 +386,11 @@ const scatterVertialLine = (xValue, color = CONST.CH_OTHER) => ({ }, }); -const histHorizontalLine = (yValue, props=undefined, color = CONST.CH_OTHER) => { +const histHorizontalLine = ( + yValue, + props = undefined, + color = CONST.CH_OTHER, +) => { // histogram is drawing as reverse mode // need to get item in chart from yValue let values = yValue; @@ -402,14 +466,19 @@ const drawCrossHairOnDoubleClick = (clickPosition, selectedCanvasId) => { return; } // draw vertical from corresponding xValue - const scatterDataPoint = scatterChartObject.data.datasets[0].data[clickPosition - 1]; + const scatterDataPoint = + scatterChartObject.data.datasets[0].data[clickPosition - 1]; if (scatterDataPoint) { const cXValue = scatterDataPoint.x; const cYValue = scatterDataPoint.y; if (!isEmpty(cXValue) && !isEmpty(cYValue)) { - scatterChartObject.options.plugins.annotation.annotations['crosshair-x'] = scatterVertialLine(cXValue, color); - scatterChartObject.options.plugins.annotation.annotations['crosshair-y'] = scatterHorizontalline(cYValue, color); - scatterChartObject.update(mode = 'none'); + scatterChartObject.options.plugins.annotation.annotations[ + 'crosshair-x' + ] = scatterVertialLine(cXValue, color); + scatterChartObject.options.plugins.annotation.annotations[ + 'crosshair-y' + ] = scatterHorizontalline(cYValue, color); + scatterChartObject.update((mode = 'none')); } } } @@ -418,20 +487,29 @@ const drawCrossHairOnDoubleClick = (clickPosition, selectedCanvasId) => { // draw horizontal from yValue const histChartObject = graphStore.getHistById(canvasId); // get time series chart of the row of histogram to get corresponding yValue - const coTimeSeriesGraph = graphStore.getTimeSeriesFromHist(canvasId); + const coTimeSeriesGraph = + graphStore.getTimeSeriesFromHist(canvasId); if (coTimeSeriesGraph) { // get coYValue from clickPosition - const coYValue = coTimeSeriesGraph.data.datasets[0].data[clickPosition]; + const coYValue = + coTimeSeriesGraph.data.datasets[0].data[clickPosition]; if (!isEmpty(coYValue) && histChartObject) { props = undefined; - if (histChartObject.data.rank_values && histChartObject.data.cat_labels) { + if ( + histChartObject.data.rank_values && + histChartObject.data.cat_labels + ) { props = { - rank_values: histChartObject.data.rank_values || undefined, - cat_labels: histChartObject.data.cat_labels || undefined, + rank_values: + histChartObject.data.rank_values || undefined, + cat_labels: + histChartObject.data.cat_labels || undefined, }; } - histChartObject.options.plugins.annotation.annotations['crosshair-y'] = histHorizontalLine(coYValue, props); - histChartObject.update(mode = 'none'); + histChartObject.options.plugins.annotation.annotations[ + 'crosshair-y' + ] = histHorizontalLine(coYValue, props); + histChartObject.update((mode = 'none')); } } } @@ -441,26 +519,38 @@ const drawCrossHairOnDoubleClick = (clickPosition, selectedCanvasId) => { // draw horizontal from yValue // corresponding data point may be normal/irregular data point - const coClickedData = tsChartObject.data.datasets[0].data[clickPosition] - || tsChartObject.data.datasets[1].data[clickPosition]; + const coClickedData = + tsChartObject.data.datasets[0].data[clickPosition] || + tsChartObject.data.datasets[1].data[clickPosition]; const xValue = tsChartObject.data.labels[clickPosition]; // dont draw horizontal line if that data point is null if (!isEmpty(coClickedData)) { - tsChartObject.options.plugins.annotation.annotations['crosshair-y'] = tsHorizonalLine(coClickedData, color); + tsChartObject.options.plugins.annotation.annotations[ + 'crosshair-y' + ] = tsHorizonalLine(coClickedData, color); } // draw vertical from xValue - tsChartObject.options.plugins.annotation.annotations['crosshair-x'] = tsVerticalLine(xValue, color); - tsChartObject.update(mode = 'none'); + tsChartObject.options.plugins.annotation.annotations[ + 'crosshair-x' + ] = tsVerticalLine(xValue, color); + tsChartObject.update((mode = 'none')); } }); }; -const drawCrosshairSingleClick = (clickPosition, xValue, yValue, selectedCanvasId) => { +const drawCrosshairSingleClick = ( + clickPosition, + xValue, + yValue, + selectedCanvasId, +) => { removeAllCrossHair(false, true, true); // find histogram and scatter plot at the same row - const sameRowCanvases = $(`#${selectedCanvasId}`).closest('div .chart-row').find('canvas'); + const sameRowCanvases = $(`#${selectedCanvasId}`) + .closest('div .chart-row') + .find('canvas'); sameRowCanvases.each(function f() { const canvasId = $(this).attr('id'); const chartType = $(this).attr('chart-type'); @@ -472,8 +562,10 @@ const drawCrosshairSingleClick = (clickPosition, xValue, yValue, selectedCanvasI if (chartType === 'scatter') { const scatterChartObject = graphStore.getScatterById(canvasId); if (scatterChartObject) { - scatterChartObject.options.plugins.annotation.annotations['crosshair-y'] = scatterHorizontalline(yValue, color); - scatterChartObject.update(mode = 'none'); + scatterChartObject.options.plugins.annotation.annotations[ + 'crosshair-y' + ] = scatterHorizontalline(yValue, color); + scatterChartObject.update((mode = 'none')); } } @@ -481,14 +573,21 @@ const drawCrosshairSingleClick = (clickPosition, xValue, yValue, selectedCanvasI const histChartObject = graphStore.getHistById(canvasId); if (histChartObject) { props = undefined; - if (histChartObject.data.rank_values && histChartObject.data.cat_labels) { + if ( + histChartObject.data.rank_values && + histChartObject.data.cat_labels + ) { props = { - rank_values: histChartObject.data.rank_values || undefined, - cat_labels: histChartObject.data.cat_labels || undefined, + rank_values: + histChartObject.data.rank_values || undefined, + cat_labels: + histChartObject.data.cat_labels || undefined, }; } - histChartObject.options.plugins.annotation.annotations['crosshair-y'] = histHorizontalLine(yValue, props, color); - histChartObject.update(mode = 'none'); + histChartObject.options.plugins.annotation.annotations[ + 'crosshair-y' + ] = histHorizontalLine(yValue, props, color); + histChartObject.update((mode = 'none')); } } @@ -496,8 +595,10 @@ const drawCrosshairSingleClick = (clickPosition, xValue, yValue, selectedCanvasI // draw vertical from xValue const tsChartObject = graphStore.getTimeSeriesById(canvasId); // draw horizontal from yValue - tsChartObject.options.plugins.annotation.annotations['crosshair-y'] = tsHorizonalLine(yValue, color); - tsChartObject.update(mode = 'none'); + tsChartObject.options.plugins.annotation.annotations[ + 'crosshair-y' + ] = tsHorizonalLine(yValue, color); + tsChartObject.update((mode = 'none')); } }); @@ -507,7 +608,8 @@ const drawCrosshairSingleClick = (clickPosition, xValue, yValue, selectedCanvasI if (canvasId === selectedCanvasId) { color = CONST.CH_SELF; } - graphObj.options.plugins.annotation.annotations['crosshair-x'] = tsVerticalLine(xValue, color); - graphObj.update(mode = 'none'); + graphObj.options.plugins.annotation.annotations['crosshair-x'] = + tsVerticalLine(xValue, color); + graphObj.update((mode = 'none')); }); }; diff --git a/ap/static/trace_data/js/trace_data_histogram.js b/ap/static/trace_data/js/trace_data_histogram.js index 6a31cd5..5cfe294 100644 --- a/ap/static/trace_data/js/trace_data_histogram.js +++ b/ap/static/trace_data/js/trace_data_histogram.js @@ -1,9 +1,3 @@ -/* eslint-disable prefer-destructuring */ -/* eslint-disable no-undef */ -/* eslint-disable no-use-before-define */ - -/* eslint-disable no-unused-vars */ - function YasuHistogram($, paramObj) { // 内部変数の初期化 const canvasId = setParam('canvasId', 'hist01'); @@ -24,7 +18,6 @@ function YasuHistogram($, paramObj) { valMax += 0.5; } - // ////////////// プライベート関数の定義 //////////////////// function setParam(key, defaultValue) { if (key in paramObj && !isEmpty(paramObj[key])) { @@ -41,7 +34,7 @@ function YasuHistogram($, paramObj) { const maxKDE = Math.max(...kdes); const maxHist = Math.max(...kdeData.hist_counts); - const transKDE = kdes.map(i => maxHist * i / maxKDE); + const transKDE = kdes.map((i) => (maxHist * i) / maxKDE); const data = { labels, @@ -70,7 +63,7 @@ function YasuHistogram($, paramObj) { borderWidth: 1, }); } - + const genDataTable = (yValue, nTotal) => { const valueLabel = $('#i18nValue').text(); let tblContent = ''; @@ -80,9 +73,9 @@ function YasuHistogram($, paramObj) { return tblContent; }; const externalTooltipHandler = (context) => { - const {chart, tooltip} = context; + const { chart, tooltip } = context; if (!tooltip.dataPoints) return; - const {offsetLeft: positionX, offsetTop: positionY} = chart.canvas; + const { offsetLeft: positionX, offsetTop: positionY } = chart.canvas; const dpIndex = tooltip.dataPoints[0].dataIndex; const histVal = tooltip.dataPoints[1].dataset.data[dpIndex]; let yValue = tooltip.title[0]; @@ -93,7 +86,7 @@ function YasuHistogram($, paramObj) { const topPosition = canvasOffset.top + positionY + tooltip.caretY; genDataPointHoverTable( genDataTable(yValue, histVal), - {x: leftPosition - 192, y: topPosition}, + { x: leftPosition - 192, y: topPosition }, 125, true, chart.canvas.id, @@ -146,7 +139,8 @@ function YasuHistogram($, paramObj) { beginAtZero: true, align: 'end', font: { - family: 'Calibri Light', size: 12, + family: 'Calibri Light', + size: 12, }, }, grid: { @@ -184,7 +178,9 @@ function YasuHistogram($, paramObj) { ticks: { // minRotation: 0, // maxRotation: 0, - sampleSize: beforeRankValues ? Object.keys(beforeRankValues).length : 8, + sampleSize: beforeRankValues + ? Object.keys(beforeRankValues).length + : 8, color: CONST.TICK, // show text before ranked instead of ranked value // callback(value, index, values) { @@ -226,16 +222,20 @@ function YasuHistogram($, paramObj) { } if (!isEmpty(threshHigh)) { - config.options.plugins.annotation.annotations.ucl = createHistHorizonalThreshold(threshHigh, CONST.RED, CONST.UCL); + config.options.plugins.annotation.annotations.ucl = + createHistHorizonalThreshold(threshHigh, CONST.RED, CONST.UCL); } if (!isEmpty(threshLow)) { - config.options.plugins.annotation.annotations.lcl = createHistHorizonalThreshold(threshLow, CONST.RED, CONST.LCL); + config.options.plugins.annotation.annotations.lcl = + createHistHorizonalThreshold(threshLow, CONST.RED, CONST.LCL); } if (!isEmpty(prcMin)) { - config.options.plugins.annotation.annotations.lpcl = createHistHorizonalThreshold(prcMin, CONST.BLUE, CONST.LPCL); + config.options.plugins.annotation.annotations.lpcl = + createHistHorizonalThreshold(prcMin, CONST.BLUE, CONST.LPCL); } if (!isEmpty(prcMax)) { - config.options.plugins.annotation.annotations.upcl = createHistHorizonalThreshold(prcMax, CONST.BLUE, CONST.UPCL); + config.options.plugins.annotation.annotations.upcl = + createHistHorizonalThreshold(prcMax, CONST.BLUE, CONST.UPCL); } const chart = new Chart(ctx, config); @@ -259,7 +259,12 @@ function YasuHistogram($, paramObj) { }; } -const createHistHorizonalThreshold = (threshHold, color = CONST.RED, id = CONST.UCL, borderDash = []) => ({ +const createHistHorizonalThreshold = ( + threshHold, + color = CONST.RED, + id = CONST.UCL, + borderDash = [], +) => ({ type: 'line', id, mode: 'horizontal', diff --git a/ap/static/trace_data/js/trace_data_histogram_with_kde.js b/ap/static/trace_data/js/trace_data_histogram_with_kde.js index bb2ca3c..fca84cd 100644 --- a/ap/static/trace_data/js/trace_data_histogram_with_kde.js +++ b/ap/static/trace_data/js/trace_data_histogram_with_kde.js @@ -1,6 +1,3 @@ -/* eslint-disable guard-for-in */ -/* eslint-disable no-restricted-syntax */ -// eslint-disable-next-line no-unused-vars const HistogramWithDensityCurve = ($, paramObj) => { // ////////////// プライベート関数の定義 //////////////////// const setParam = (key, defaultValue) => { @@ -32,7 +29,7 @@ const HistogramWithDensityCurve = ($, paramObj) => { const maxKDE = Math.max(...kdeData.kde); const maxHist = Math.max(...kdeData.hist_counts); - const transKDE = kdeData.kde.map(i => maxHist * i / maxKDE); + const transKDE = kdeData.kde.map((i) => (maxHist * i) / maxKDE); const kdeDensity = { y: kdeData.hist_labels, x: transKDE, @@ -94,7 +91,8 @@ const HistogramWithDensityCurve = ($, paramObj) => { categoryLabels.reverse(); categoryIds.sort().reverse(); // 4321 categoryLabels.forEach((catName) => { - const categoryCount = plotdata.category_distributed[catName].counts_org; + const categoryCount = + plotdata.category_distributed[catName].counts_org; stepChartDat.push(categoryCount); }); } @@ -103,8 +101,10 @@ const HistogramWithDensityCurve = ($, paramObj) => { const sortedGroups = { id: sortedCat, value: categoryLabels, - } - const xData = !isCatLimited ? genFullCategoryData(sortedCat, stepChartDat, sortedGroups) : []; + }; + const xData = !isCatLimited + ? genFullCategoryData(sortedCat, stepChartDat, sortedGroups) + : []; const barChart = { y: !isCatLimited ? sortedCat : [], x: xData, @@ -126,7 +126,7 @@ const HistogramWithDensityCurve = ($, paramObj) => { isbarchart: true, groupname: { value: sortedGroups.value || [], - } + }, }, }; const data = !beforeRankValues ? [histogram, kdeDensity] : [barChart]; @@ -205,7 +205,7 @@ const HistogramWithDensityCurve = ($, paramObj) => { // layout.yaxis.tickangle = 45; // layout.yaxis.tickmode = 'array'; layout.yaxis.tickvals = allGroupNames.id; - layout.yaxis.ticktext = allGroupNames.id.map(cat => ''); + layout.yaxis.ticktext = allGroupNames.id.map((cat) => ''); // layout.yaxis.range = [valMin - 1, valMax + 1]; const minYVal = Math.min(...allGroupNames.id); const maxYVal = Math.max(...allGroupNames.id); @@ -284,17 +284,17 @@ const HistogramWithDensityCurve = ($, paramObj) => { }); } - const hdPlot = document.getElementById(canvasId); - hdPlot.on('plotly_hover', (data) => { - if (data.points) { - drawShapes(data.points[0].x, data.points[0].y, true); - } - showInforTbl(data, true, canvasId); - }) + hdPlot + .on('plotly_hover', (data) => { + if (data.points) { + drawShapes(data.points[0].x, data.points[0].y, true); + } + showInforTbl(data, true, canvasId); + }) .on('plotly_unhover', (data) => { drawShapes(null, null, false); // $('#dp-info-content').hide(); clearHoverTimeOut(); }); -}; \ No newline at end of file +}; diff --git a/ap/static/trace_data/js/trace_data_scatter_plot.js b/ap/static/trace_data/js/trace_data_scatter_plot.js index 8dedd61..b9b860f 100644 --- a/ap/static/trace_data/js/trace_data_scatter_plot.js +++ b/ap/static/trace_data/js/trace_data_scatter_plot.js @@ -1,6 +1,6 @@ let avoidMultiClickCnt = 0; let currentScaleOption; -// eslint-disable-next-line no-unused-vars + const scatterChart = (ctx, data, prop) => { const contextMenu = { scatter: 'contextMenuScatter', @@ -14,7 +14,7 @@ const scatterChart = (ctx, data, prop) => { }; const externalScatterTooltipHandler = (context) => { - const {chart, tooltip} = context; + const { chart, tooltip } = context; if (!tooltip.dataPoints) return; const xValue = applySignificantDigit(tooltip.dataPoints[0].raw.x); const yValue = applySignificantDigit(tooltip.dataPoints[0].raw.y); @@ -26,17 +26,20 @@ const scatterChart = (ctx, data, prop) => { yName = `${prop.yColumnName}@${prop.yProcName}`; } else { xName = `${prop.xColumnName}`; - yName = `${prop.yColumnName}` + yName = `${prop.yColumnName}`; } - const {offsetLeft: positionX, offsetTop: positionY} = chart.canvas; + const { offsetLeft: positionX, offsetTop: positionY } = chart.canvas; // hover information table position const canvasOffset = $(`#${chart.canvas.id}`).offset(); const leftPosition = canvasOffset.left + positionX + tooltip.caretX; const topPosition = canvasOffset.top + positionY + tooltip.caretY; genDataPointHoverTable( - genHoverDataTable([[xName, xValue], [yName, yValue]]), - {x: leftPosition - 192, y: topPosition}, + genHoverDataTable([ + [xName, xValue], + [yName, yValue], + ]), + { x: leftPosition - 192, y: topPosition }, 125, true, chart.canvas.id, @@ -71,7 +74,8 @@ const scatterChart = (ctx, data, prop) => { align: 'end', color: CONST.TICK, font: { - family: 'Calibri Light', size: 12, + family: 'Calibri Light', + size: 12, }, callback(value, index, values) { let showVal = applySignificantDigit(value); @@ -126,7 +130,8 @@ const scatterChart = (ctx, data, prop) => { afterTickToLabelConversion: function adjust(context) { const { ticks } = context; context.ticks[0].label = ''; - if (ticks.length) context.ticks[ticks.length - 1].label = ''; + if (ticks.length) + context.ticks[ticks.length - 1].label = ''; }, afterFit(scaleInstance) { scaleInstance.width = 60; // sets the width to 100px @@ -158,7 +163,7 @@ const scatterChart = (ctx, data, prop) => { tooltip: { displayColors: false, enabled: false, - external: externalScatterTooltipHandler, + external: externalScatterTooltipHandler, }, }, events: ['click', 'mousemove'], @@ -188,7 +193,12 @@ const scatterChart = (ctx, data, prop) => { } }, onHover(e) { - const point = this.getElementsAtEventForMode(e, 'nearest', { intersect: true }, false); + const point = this.getElementsAtEventForMode( + e, + 'nearest', + { intersect: true }, + false, + ); // if (point.length) e.target.style.cursor = 'pointer'; // else e.target.style.cursor = 'default'; @@ -227,39 +237,63 @@ const scatterChart = (ctx, data, prop) => { // procThresholds.xLow if (!isEmpty(procThresholds.xLow)) { - chartOptions.plugins.annotation.annotations.vlpcl = createVerticalThreshold(procThresholds.xLow, CONST.BLUE, CONST.vLPCL); + chartOptions.plugins.annotation.annotations.vlpcl = + createVerticalThreshold( + procThresholds.xLow, + CONST.BLUE, + CONST.vLPCL, + ); } // procThresholds.xHigh if (!isEmpty(procThresholds.xHigh)) { - chartOptions.plugins.annotation.annotations.vupcl = createVerticalThreshold(procThresholds.xHigh, CONST.BLUE, CONST.vUPCL); + chartOptions.plugins.annotation.annotations.vupcl = + createVerticalThreshold( + procThresholds.xHigh, + CONST.BLUE, + CONST.vUPCL, + ); } // procThresholds.yLow if (!isEmpty(procThresholds.yLow)) { - chartOptions.plugins.annotation.annotations.lpcl = createHorizonalThreshold(procThresholds.yLow, CONST.BLUE, CONST.LPCL); + chartOptions.plugins.annotation.annotations.lpcl = + createHorizonalThreshold( + procThresholds.yLow, + CONST.BLUE, + CONST.LPCL, + ); } // procThresholds.yHigh if (!isEmpty(procThresholds.yHigh)) { - chartOptions.plugins.annotation.annotations.upcl = createHorizonalThreshold(procThresholds.yHigh, CONST.BLUE, CONST.UPCL); + chartOptions.plugins.annotation.annotations.upcl = + createHorizonalThreshold( + procThresholds.yHigh, + CONST.BLUE, + CONST.UPCL, + ); } // draw line annotation if (!isEmpty(uclThresholds.xLow)) { - chartOptions.plugins.annotation.annotations.vlcl = createVerticalThreshold(uclThresholds.xLow, CONST.RED, CONST.vLCL); + chartOptions.plugins.annotation.annotations.vlcl = + createVerticalThreshold(uclThresholds.xLow, CONST.RED, CONST.vLCL); } if (!isEmpty(uclThresholds.xHigh)) { - chartOptions.plugins.annotation.annotations.vucl = createVerticalThreshold(uclThresholds.xHigh, CONST.RED, CONST.vUCL); + chartOptions.plugins.annotation.annotations.vucl = + createVerticalThreshold(uclThresholds.xHigh, CONST.RED, CONST.vUCL); } if (!isEmpty(uclThresholds.yLow)) { - chartOptions.plugins.annotation.annotations.lcl = createHorizonalThreshold(uclThresholds.yLow, CONST.RED, CONST.LCL); + chartOptions.plugins.annotation.annotations.lcl = + createHorizonalThreshold(uclThresholds.yLow, CONST.RED, CONST.LCL); } if (!isEmpty(uclThresholds.yHigh)) { - chartOptions.plugins.annotation.annotations.ucl = createHorizonalThreshold(uclThresholds.yHigh, CONST.RED, CONST.UCL); + chartOptions.plugins.annotation.annotations.ucl = + createHorizonalThreshold(uclThresholds.yHigh, CONST.RED, CONST.UCL); } // destroy instance @@ -298,7 +332,8 @@ const scatterChart = (ctx, data, prop) => { canvas.addEventListener('contextmenu', showContextMenu, false); canvas.addEventListener('mousedown', handleMouseDown, false); - function handleMouseDown(e) { // later, not just mouse down, + mouseout of menu + function handleMouseDown(e) { + // later, not just mouse down, + mouseout of menu menu.style.display = 'none'; } @@ -319,7 +354,12 @@ function scatterPlotOnDbClick(chart, event) { // removeAllCrossHair(false); // get clicked x,y values to draw cross hair lines - const eventElement = chart.getElementsAtEventForMode(event, 'nearest', { intersect: true }, false); + const eventElement = chart.getElementsAtEventForMode( + event, + 'nearest', + { intersect: true }, + false, + ); if (!eventElement || eventElement.length < 1) return; const clickedIdx = eventElement[0].index + 1; @@ -335,7 +375,12 @@ function scatterPlotOnDbClick(chart, event) { function scatterPlotOnClick(chart, event) { // get clicked x,y values to draw cross hair lines - const eventElement = chart.getElementsAtEventForMode(event, 'nearest', { intersect: true }, false); + const eventElement = chart.getElementsAtEventForMode( + event, + 'nearest', + { intersect: true }, + false, + ); if (!eventElement || eventElement.length < 1) { removeAllCrossHair(true, true, true); return; @@ -350,12 +395,16 @@ function scatterPlotOnClick(chart, event) { const xValue = clickedDataPoint.x; const yValue = clickedDataPoint.y; // console.log('xValue=', xValue, ', yValue=', yValue); - chart.options.plugins.annotation.annotations['crosshair-x'] = scatterVertialLine(xValue, CONST.CH_SELF); - chart.options.plugins.annotation.annotations['crosshair-y'] = scatterHorizontalline(yValue, CONST.CH_SELF); - chart.update(mode = 'none'); + chart.options.plugins.annotation.annotations['crosshair-x'] = + scatterVertialLine(xValue, CONST.CH_SELF); + chart.options.plugins.annotation.annotations['crosshair-y'] = + scatterHorizontalline(yValue, CONST.CH_SELF); + chart.update((mode = 'none')); // same row TODO use class to get - const sameRowCanvases = $(event.chart.canvas).closest('div .chart-row').find('canvas'); + const sameRowCanvases = $(event.chart.canvas) + .closest('div .chart-row') + .find('canvas'); sameRowCanvases.each(function f() { const canvasId = $(this).attr('id'); const chartType = $(this).attr('chart-type'); @@ -363,16 +412,20 @@ function scatterPlotOnClick(chart, event) { if (chartType === 'histogram') { const histChartObject = graphStore.getHistById(canvasId); if (histChartObject) { - histChartObject.options.plugins.annotation.annotations['crosshair-y'] = scatterHorizontalline(yValue); - histChartObject.update(mode = 'none'); + histChartObject.options.plugins.annotation.annotations[ + 'crosshair-y' + ] = scatterHorizontalline(yValue); + histChartObject.update((mode = 'none')); } } if (chartType === 'timeSeries') { // draw horizontal from yValue const tsChartObject = graphStore.getTimeSeriesById(canvasId); - tsChartObject.options.plugins.annotation.annotations['crosshair-y'] = tsHorizonalLine(yValue); - tsChartObject.update(mode = 'none'); + tsChartObject.options.plugins.annotation.annotations[ + 'crosshair-y' + ] = tsHorizonalLine(yValue); + tsChartObject.update((mode = 'none')); } }); @@ -412,14 +465,17 @@ const drawNextChartCrosshair = (currentCanvasId, clickedIdx) => { const tsChartObject = graphStore.getTimeSeriesById(nextTsCanvasId); const coYValue = tsChartObject.data.datasets[0].data[clickedIdx + 1]; if (!isEmpty(coYValue)) { - tsChartObject.options.plugins.annotation.annotations['crosshair-y'] = tsHorizonalLine(coYValue, CONST.CH_OTHER); - tsChartObject.update(mode = 'none'); + tsChartObject.options.plugins.annotation.annotations['crosshair-y'] = + tsHorizonalLine(coYValue, CONST.CH_OTHER); + tsChartObject.update((mode = 'none')); } }; const getPlotData = (data, xSensorIdx, ySensorIdx) => { - if (xSensorIdx > data.array_plotdata.length - 1 - || ySensorIdx > data.array_plotdata.length - 1) { + if ( + xSensorIdx > data.array_plotdata.length - 1 || + ySensorIdx > data.array_plotdata.length - 1 + ) { return null; } const xData = data.array_plotdata[xSensorIdx]; @@ -458,16 +514,29 @@ const generateScatterData = (xArr, yArr) => { return data; }; - const getThresholdData = (data, xSensorIdx, ySensorIdx) => { const chartInfosX = data.array_plotdata[xSensorIdx].chart_infos || []; - const chartInfosXOrg = data.array_plotdata[xSensorIdx].chart_infos_org || []; + const chartInfosXOrg = + data.array_plotdata[xSensorIdx].chart_infos_org || []; const chartInfosY = data.array_plotdata[ySensorIdx].chart_infos || []; - const chartInfosYOrg = data.array_plotdata[ySensorIdx].chart_infos_org || []; - const [latestChartInfoX, _x] = chooseLatestThresholds(chartInfosX, chartInfosXOrg); - const [latestChartInfoY, _y] = chooseLatestThresholds(chartInfosY, chartInfosYOrg); - const xScaleOption = getScaleInfo(data.array_plotdata[xSensorIdx], currentScaleOption); - const yScaleOption = getScaleInfo(data.array_plotdata[ySensorIdx], currentScaleOption); + const chartInfosYOrg = + data.array_plotdata[ySensorIdx].chart_infos_org || []; + const [latestChartInfoX, _x] = chooseLatestThresholds( + chartInfosX, + chartInfosXOrg, + ); + const [latestChartInfoY, _y] = chooseLatestThresholds( + chartInfosY, + chartInfosYOrg, + ); + const xScaleOption = getScaleInfo( + data.array_plotdata[xSensorIdx], + currentScaleOption, + ); + const yScaleOption = getScaleInfo( + data.array_plotdata[ySensorIdx], + currentScaleOption, + ); return { procThresholds: { @@ -520,7 +589,8 @@ const produceScatterPlotCharts = (data, scaleOption = null) => { const scatterData = generateScatterData(pdt.xArr, pdt.yArr); // get threshold data - const { uclThresholds, procThresholds, scaleMinMaxTicks } = getThresholdData(data, xSensorIdx, ySensorIdx); + const { uclThresholds, procThresholds, scaleMinMaxTicks } = + getThresholdData(data, xSensorIdx, ySensorIdx); // chart properties const prop = { @@ -550,8 +620,10 @@ const produceScatterPlotCharts = (data, scaleOption = null) => { const redrawScatterAfterMoveCart = () => { // destroy all scatter plots + reset dict for (const graphIdx in Chart.instances) { - if (Chart.instances[graphIdx].canvas - && $(Chart.instances[graphIdx].canvas).attr('chart-type') === 'scatter') { + if ( + Chart.instances[graphIdx].canvas && + $(Chart.instances[graphIdx].canvas).attr('chart-type') === 'scatter' + ) { try { Chart.instances[graphIdx].destroy(); // destroy scatter plot instances } catch (e) { @@ -581,7 +653,8 @@ const redrawScatterAfterMoveCart = () => { // re-new scatter axies $('.sctr-plot-ts').each((k, scatterContainter) => { // x is vertical axis -> sensor data of the next card - const newXSensorIdx = latestSensorOrders[(k + 1) % latestSensorOrders.length]; + const newXSensorIdx = + latestSensorOrders[(k + 1) % latestSensorOrders.length]; $(scatterContainter).attr('x-sensor-idx', newXSensorIdx); }); @@ -590,13 +663,12 @@ const redrawScatterAfterMoveCart = () => { graphStore.setDctCanvas2Scatter(scatterPlots); }; - const addTimeSeriesCardSortableEventHandler = () => { /* - * When user change order of timeseries card: - * + we store order/position of those column/sensor to db. - * + and re-draw scatter plots. - * */ + * When user change order of timeseries card: + * + we store order/position of those column/sensor to db. + * + and re-draw scatter plots. + * */ $('.ui-sortable').sortable({ update(event, ui) { // redraw scatter plots @@ -630,71 +702,84 @@ const addTimeSeriesCardSortableEventHandler = () => { orders: procSensorOrders, }), }) - .then(response => response.clone().json()) - .then(() => { - }) - .catch(() => { - }); + .then((response) => response.clone().json()) + .then(() => {}) + .catch(() => {}); }, }); }; - const handleSelectSCPMenuItem = (selectedItem = 'click') => { const selectedCanvasId = graphStore.getSelectedCanvas(); if (!selectedCanvasId) return; const scpChartObj = graphStore.getScatterById(selectedCanvasId); if (!scpChartObj) return; - const lastHoveredDataPoint = graphStore.getLastHoveredDataPoint(selectedCanvasId); + const lastHoveredDataPoint = + graphStore.getLastHoveredDataPoint(selectedCanvasId); if (!lastHoveredDataPoint) return; const { index, datasetIndex } = lastHoveredDataPoint; - const clickedDataPoint = scpChartObj.data.datasets[datasetIndex].data[index]; + const clickedDataPoint = + scpChartObj.data.datasets[datasetIndex].data[index]; const yValue = clickedDataPoint.y; const xValue = clickedDataPoint.x; switch (selectedItem) { - case 'click': { - removeAllCrossHair(true, true, true); - scpChartObj.options.plugins.annotation.annotations['crosshair-x'] = scatterVertialLine(xValue, CONST.CH_SELF); - scpChartObj.options.plugins.annotation.annotations['crosshair-y'] = scatterHorizontalline(yValue, CONST.CH_SELF); - scpChartObj.update(mode = 'none'); - - // same row TODO use class to get - const sameRowCanvases = $(`#${selectedCanvasId}`).closest('div .chart-row').find('canvas'); - sameRowCanvases.each(function f() { - const canvasId = $(this).attr('id'); - const chartType = $(this).attr('chart-type'); - - if (chartType === 'histogram') { - const histChartObject = graphStore.getHistById(canvasId); - if (histChartObject) { - histChartObject.options.plugins.annotation.annotations['crosshair-y'] = scatterHorizontalline(yValue); - histChartObject.update(mode = 'none'); + case 'click': { + removeAllCrossHair(true, true, true); + scpChartObj.options.plugins.annotation.annotations['crosshair-x'] = + scatterVertialLine(xValue, CONST.CH_SELF); + scpChartObj.options.plugins.annotation.annotations['crosshair-y'] = + scatterHorizontalline(yValue, CONST.CH_SELF); + scpChartObj.update((mode = 'none')); + + // same row TODO use class to get + const sameRowCanvases = $(`#${selectedCanvasId}`) + .closest('div .chart-row') + .find('canvas'); + sameRowCanvases.each(function f() { + const canvasId = $(this).attr('id'); + const chartType = $(this).attr('chart-type'); + + if (chartType === 'histogram') { + const histChartObject = graphStore.getHistById(canvasId); + if (histChartObject) { + histChartObject.options.plugins.annotation.annotations[ + 'crosshair-y' + ] = scatterHorizontalline(yValue); + histChartObject.update((mode = 'none')); + } } - } - if (chartType === 'timeSeries') { - // draw horizontal from yValue - const tsChartObject = graphStore.getTimeSeriesById(canvasId); - tsChartObject.options.plugins.annotation.annotations['crosshair-y'] = tsHorizonalLine(yValue); - tsChartObject.update(mode = 'none'); - } - }); + if (chartType === 'timeSeries') { + // draw horizontal from yValue + const tsChartObject = + graphStore.getTimeSeriesById(canvasId); + tsChartObject.options.plugins.annotation.annotations[ + 'crosshair-y' + ] = tsHorizonalLine(yValue); + tsChartObject.update((mode = 'none')); + } + }); - // draw horizontal crosshair for the next timeseries - drawNextChartCrosshair(selectedCanvasId, index); + // draw horizontal crosshair for the next timeseries + drawNextChartCrosshair(selectedCanvasId, index); - break; - } - case 'doubleClick': { - drawCrossHairOnDoubleClick(index + 1, selectedCanvasId); - break; - } - default: + break; + } + case 'doubleClick': { + drawCrossHairOnDoubleClick(index + 1, selectedCanvasId); + break; + } + default: } }; -const createHorizonalThreshold = (threshHold, color = CONST.RED, id = CONST.UCL, borderDash = []) => ({ +const createHorizonalThreshold = ( + threshHold, + color = CONST.RED, + id = CONST.UCL, + borderDash = [], +) => ({ type: 'line', id, mode: 'horizontal', @@ -705,7 +790,12 @@ const createHorizonalThreshold = (threshHold, color = CONST.RED, id = CONST.UCL, borderDash, }); -const createVerticalThreshold = (threshHold, color = CONST.RED, id = CONST.vUCL, borderDash = []) => ({ +const createVerticalThreshold = ( + threshHold, + color = CONST.RED, + id = CONST.vUCL, + borderDash = [], +) => ({ type: 'line', id, mode: 'vertical', diff --git a/ap/static/trace_data/js/trace_data_step_bar_chart.js b/ap/static/trace_data/js/trace_data_step_bar_chart.js index e54970c..844b9eb 100644 --- a/ap/static/trace_data/js/trace_data_step_bar_chart.js +++ b/ap/static/trace_data/js/trace_data_step_bar_chart.js @@ -26,7 +26,6 @@ const StepBarChart = ($, paramObj) => { }; } - // ////////////// プライベート関数の定義 //////////////////// function setParam(key, defaultValue) { if (key in paramObj && !isEmpty(paramObj[key])) { @@ -49,8 +48,12 @@ const StepBarChart = ($, paramObj) => { categoryLabels.reverse(); // get origin step bar chart data - const stepChartDat = categoryLabels.map(label => categoryDistributed[label].pctg); - const shortCatLabels = categoryLabels.map(label => categoryDistributed[label].short_name); + const stepChartDat = categoryLabels.map( + (label) => categoryDistributed[label].pctg, + ); + const shortCatLabels = categoryLabels.map( + (label) => categoryDistributed[label].short_name, + ); const data = { labels: isCatLimited ? [] : shortCatLabels, datasets: [ @@ -78,25 +81,29 @@ const StepBarChart = ($, paramObj) => { return tblContent; }; - const externalTooltipHandler = (context) => { - const {chart, tooltip} = context; + const externalTooltipHandler = (context) => { + const { chart, tooltip } = context; if (!tooltip.dataPoints) return; - const {offsetLeft: positionX, offsetTop: positionY} = chart.canvas; + const { offsetLeft: positionX, offsetTop: positionY } = chart.canvas; const canvasOffset = $(`#${chart.canvas.id}`).offset(); const leftPosition = canvasOffset.left + positionX + tooltip.caretX; const topPosition = canvasOffset.top + positionY + tooltip.caretY; const dataIndex = tooltip.dataPoints[0].dataIndex; const plotData = graphStore.getArrayPlotData(chart.canvas.id); const categoryName = categoryLabels ? categoryLabels[dataIndex] : null; - const [cateName, count, ratio] = getStepChartHoverInfo(dataIndex, categoryName, plotData) + const [cateName, count, ratio] = getStepChartHoverInfo( + dataIndex, + categoryName, + plotData, + ); genDataPointHoverTable( genDataTable(cateName, count, ratio), - {x: leftPosition - 192, y: topPosition}, + { x: leftPosition - 192, y: topPosition }, 125, true, chart.canvas.id, ); - } + }; const config = { type: 'bar', @@ -146,7 +153,8 @@ const StepBarChart = ($, paramObj) => { maxTicksLimit: 8, align: 'end', font: { - family: 'Calibri Light', size: 12, + family: 'Calibri Light', + size: 12, }, }, grid: { @@ -190,21 +198,28 @@ const StepBarChart = ($, paramObj) => { }; if (!isEmpty(threshHigh)) { - config.options.plugins.annotation.annotations.ucl = createHistHorizonalThreshold(threshHigh, CONST.RED, CONST.UCL); + config.options.plugins.annotation.annotations.ucl = + createHistHorizonalThreshold(threshHigh, CONST.RED, CONST.UCL); } if (!isEmpty(threshLow)) { - config.options.plugins.annotation.annotations.lcl = createHistHorizonalThreshold(threshLow, CONST.RED, CONST.LCL); + config.options.plugins.annotation.annotations.lcl = + createHistHorizonalThreshold(threshLow, CONST.RED, CONST.LCL); } if (!isEmpty(prcMin)) { - config.options.plugins.annotation.annotations.lpcl = createHistHorizonalThreshold(prcMin, CONST.BLUE, CONST.LPCL); + config.options.plugins.annotation.annotations.lpcl = + createHistHorizonalThreshold(prcMin, CONST.BLUE, CONST.LPCL); } if (!isEmpty(prcMax)) { - config.options.plugins.annotation.annotations.upcl = createHistHorizonalThreshold(prcMax, CONST.BLUE, CONST.UPCL); + config.options.plugins.annotation.annotations.upcl = + createHistHorizonalThreshold(prcMax, CONST.BLUE, CONST.UPCL); } if (isCatLimited) { config.options.plugins.annotation.annotations['catLimited'] = { - type: 'label', content: [...i18n.catLimitMsg], color: '#65c5f1', font: { size: 9 }, + type: 'label', + content: [...i18n.catLimitMsg], + color: '#65c5f1', + font: { size: 9 }, }; } @@ -230,7 +245,8 @@ const StepBarChart = ($, paramObj) => { }; const getStepChartHoverInfo = (categoryIndex, categoryName, plotDat) => { - let count, ratio = ''; + let count, + ratio = ''; if (!categoryName && plotDat.before_rank_values) { const beforeRankVals = [...plotDat.before_rank_values[0]]; // because current we draw items from bot -> top, diff --git a/ap/static/trace_data/js/trace_data_summary_table.js b/ap/static/trace_data/js/trace_data_summary_table.js index 7b360fe..f71bec7 100644 --- a/ap/static/trace_data/js/trace_data_summary_table.js +++ b/ap/static/trace_data/js/trace_data_summary_table.js @@ -1,9 +1,23 @@ -const buildTimeSeriesSummaryResultsHTML = (summaryOption, tableIndex, generalInfo, beforeRankValues = null, stepChartSummary = null, isCTCol = false) => { +const buildTimeSeriesSummaryResultsHTML = ( + summaryOption, + tableIndex, + generalInfo, + beforeRankValues = null, + stepChartSummary = null, + isCTCol = false, + unit = null, +) => { const { getProc } = generalInfo; const { getVal } = generalInfo; const { catExpBox } = generalInfo; let catExpBoxHtml = ''; + if (unit && unit !== '' && unit !== 'Null') { + unit = ` [${unit}]`; + } else { + unit = ''; + } + let CTLabel = ''; if (isCTCol) { CTLabel = `(${DataTypes.DATETIME.short}) [sec]`; @@ -29,7 +43,7 @@ const buildTimeSeriesSummaryResultsHTML = (summaryOption, tableIndex, generalInf - ${getVal} ${CTLabel} + ${getVal}${unit} ${CTLabel} ${catExpBoxHtml} @@ -37,7 +51,13 @@ const buildTimeSeriesSummaryResultsHTML = (summaryOption, tableIndex, generalInf `; - const summaryHtml = buildSummaryResultsHTML(summaryOption, tableIndex, generalInfo, beforeRankValues, stepChartSummary); + const summaryHtml = buildSummaryResultsHTML( + summaryOption, + tableIndex, + generalInfo, + beforeRankValues, + stepChartSummary, + ); return `
@@ -45,11 +65,13 @@ const buildTimeSeriesSummaryResultsHTML = (summaryOption, tableIndex, generalInf ${summaryHtml}
`; - }; const removeClass = (element) => { - const colClasses = element.prop('className').split(' ').filter(x => x.startsWith('col-sm')); + const colClasses = element + .prop('className') + .split(' ') + .filter((x) => x.startsWith('col-sm')); for (const cls of colClasses) { element.removeClass(cls); } @@ -116,7 +138,6 @@ const onChangeSummaryEventHandler = (showScatterPlot) => { }); }; - const onChangeHistSummaryEventHandler = (e) => { let summaryHeight = null; const summaryClass = $(e).val(); @@ -160,9 +181,8 @@ const onChangeHistSummaryEventHandler = (e) => { Plotly.relayout(histogramId, {}); }); - // mark this option as checked and remove others $(e).attr('data-checked', 'true'); $('input[name=summaryOption]:not(:checked)').removeAttr('data-checked'); } -}; \ No newline at end of file +}; diff --git a/ap/static/trace_data/js/trace_data_time_series.js b/ap/static/trace_data/js/trace_data_time_series.js index b505180..01f28d3 100644 --- a/ap/static/trace_data/js/trace_data_time_series.js +++ b/ap/static/trace_data/js/trace_data_time_series.js @@ -1,18 +1,16 @@ -/* eslint-disable */ -/* eslint-disable no-underscore-dangle */ -/* eslint-disable no-undef */ -/* eslint-disable no-use-before-define */ -/* eslint-disable no-unused-vars */ -/* eslint-disable prefer-destructuring */ - let avoidMultiClickCntTS = 0; const THIN_DATA_COUNT = 4000; // limit category label to show as substring // and three dots ('JP0123...') const CAT_LABEL_LIMIT = 12; - -function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIME') { +function YasuTsChart( + $, + paramObj, + chartLabels = null, + tabID = null, + xaxis = 'TIME', +) { const canvasId = setParam('canvasId', 'chart01'); const procId = setParam('procId', null); let tsData = setParam('tsData', []); @@ -52,8 +50,8 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM const isThinData = setParam('isThinData', false); if (beforeRankValues) { - minY -= 0.5; - maxY += 0.5; + minY -= CONST.RESIZE_RANGE_CHART; + maxY += CONST.RESIZE_RANGE_CHART; } // NOTE: append start_datetime and end_datetime in GUI to data. @@ -79,7 +77,8 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM } const decimation = { - enabled: true, algorithm: 'min-max', + enabled: true, + algorithm: 'min-max', }; let tickConfig = { @@ -105,7 +104,7 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM let step = (maxTick / tickCount).toFixed(0); const zeroPad = step.length - 1; - const zeroPadStr = ''.padEnd(zeroPad, '0') + const zeroPadStr = ''.padEnd(zeroPad, '0'); let checkAllEndZero = true; for (let i = 1; i < step.length; i++) { if (step[i] !== '0') { @@ -124,7 +123,7 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM for (let i = 1; i <= tickCount; i++) { const label = i * step; if (label < maxTick && label > lastTick) { - ticks.push(label) + ticks.push(label); lastTick = label; } } @@ -173,7 +172,7 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM // external handler of tooltip const externalTooltipHandler = (context) => { // Tooltip Element - const {chart, tooltip} = context; + const { chart, tooltip } = context; const tooltipEl = getOrCreateTooltip(chart); if (!tooltip.dataPoints) return; // Hide if no tooltip @@ -211,7 +210,9 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM const y = dataPoint.parsed.y; if (dicRankLabels) { const rankLabel = dicRankLabels[y]; - return rankLabel ? `Cat${(`0${Number(y)}`).slice(-2)}: ${rankLabel}` : COMMON_CONSTANT.NA; + return rankLabel + ? `Cat${`0${Number(y)}`.slice(-2)}: ${rankLabel}` + : COMMON_CONSTANT.NA; } return `${applySignificantDigit(y)}`; @@ -221,8 +222,8 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM const datasetIndex = dataPoint.datasetIndex; const dataIndex = dataPoint.dataIndex; let medVal = dataPoint.parsed.y; - let minVal = '' - let maxVal = '' + let minVal = ''; + let maxVal = ''; const inf = CONST.INF; const negInf = CONST.NEG_INF; const noLinked = CONST.NO_LINKED; @@ -238,7 +239,7 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM medVal = COMMON_CONSTANT.NA; } if (unlinkedIdxs.includes(dataIndex)) { - medVal = noLinked; // TODO which label for unlinked data? + medVal = noLinked; // TODO which label for unlinked data? } if (isOutlierValue(dataIndex, outlierDict)) { // Sprint 79 #12: Display actual value if type is OUTLIER @@ -263,7 +264,12 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM medVal = ''; } - if ((COMMON_CONSTANT.NA === medVal && COMMON_CONSTANT.NA === minVal && COMMON_CONSTANT.NA === maxVal) || medVal === noLinked) { + if ( + (COMMON_CONSTANT.NA === medVal && + COMMON_CONSTANT.NA === minVal && + COMMON_CONSTANT.NA === maxVal) || + medVal === noLinked + ) { minVal = ''; maxVal = ''; } @@ -277,7 +283,9 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM if (!threholdInfo['type']) { return i18n.default; } - return (currentLocale === localeConst.JP) ? threholdInfo['type'] : threholdInfo['eng_name']; + return currentLocale === localeConst.JP + ? threholdInfo['type'] + : threholdInfo['eng_name']; }; // get threshold of timerange const getThresholdInfor = (dataPoint) => { @@ -287,44 +295,77 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM // not time value in TSP, this time is time of end proc, not time of start proc const plotData = currentTraceData.array_plotdata[dataIndex]; const filterCond = plotData.catExpBox - ? (Array.isArray(plotData.catExpBox) ? plotData.catExpBox : [plotData.catExpBox]) + ? Array.isArray(plotData.catExpBox) + ? plotData.catExpBox + : [plotData.catExpBox] : null; - const [chartInfos, chartInfosOrg] = getChartInfo(plotData, 'TIME', filterCond); - const clickedVal = currentTraceData.array_plotdata[dataIndex].array_x[dataPoint.dataIndex]; - const [latestChartInfo,] = chooseLatestThresholds(chartInfos, chartInfosOrg, clickedVal); + const [chartInfos, chartInfosOrg] = getChartInfo( + plotData, + 'TIME', + filterCond, + ); + const clickedVal = + currentTraceData.array_plotdata[dataIndex].array_x[ + dataPoint.dataIndex + ]; + const [latestChartInfo] = chooseLatestThresholds( + chartInfos, + chartInfosOrg, + clickedVal, + ); const threshHigh = latestChartInfo['thresh-high'] || ''; const threshLow = latestChartInfo['thresh-low'] || ''; const prcMax = latestChartInfo['prc-max'] || ''; const prcMin = latestChartInfo['prc-min'] || ''; - const validFrom = latestChartInfo['act-from'] ? moment(latestChartInfo['act-from']).format('YYYY-MM-DD HH:mm:ss') : ''; - const validTo = latestChartInfo['act-to'] ? moment(latestChartInfo['act-to']).format('YYYY-MM-DD HH:mm:ss') : ''; + const validFrom = latestChartInfo['act-from'] + ? moment(latestChartInfo['act-from']).format( + 'YYYY-MM-DD HH:mm:ss', + ) + : ''; + const validTo = latestChartInfo['act-to'] + ? moment(latestChartInfo['act-to']).format( + 'YYYY-MM-DD HH:mm:ss', + ) + : ''; const filterCol = filterNameByLocale(latestChartInfo); const filterDetail = latestChartInfo['name'] || i18n.default; return { - threshHigh, threshLow, prcMax, prcMin, validFrom, validTo, filterCol, filterDetail - } + threshHigh, + threshLow, + prcMax, + prcMin, + validFrom, + validTo, + filterCol, + filterDetail, + }; }; const currentThreshold = getThresholdInfor(dataPoint, canvasId); const getDatTimeObj = (dataPoint) => { const currentTraceData = graphStore.getTraceData(); - const datetimeCol = currentTraceData.common_info[procId].datetime_col; + const datetimeCol = + currentTraceData.common_info[procId].datetime_col; const canvasId = dataPoint.chart.canvas.id; const dataIndex = $(`#${canvasId}`).attr('plotdata-index') || 0; - const x = currentTraceData.array_plotdata[dataIndex].array_x[dataPoint.dataIndex] + const x = + currentTraceData.array_plotdata[dataIndex].array_x[ + dataPoint.dataIndex + ]; if (isEmpty(x)) { - return {name: '', value: ''}; + return { name: '', value: '' }; } const xLabel = formatDateTime(x); // convert to localtime - return {name: datetimeCol, value: xLabel}; + return { name: datetimeCol, value: xLabel }; }; const getSerialObj = (dataPoint) => { const currentTraceData = graphStore.getTraceData(); - const serialColsName = currentTraceData.common_info[procId].serial_columns || []; + const serialColsName = + currentTraceData.common_info[procId].serial_columns || []; if (isEmpty(serialColsName)) { - return [{name: '', value: ''}]; + return [{ name: '', value: '' }]; } const dataIndex = dataPoint.dataIndex; @@ -332,18 +373,19 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM const canvasId = graphStore.getSelectedCanvas(); const canvasIdx = parseInt(canvasId.substring(5, 8)) - 1; // canvasId = 'chart03' -> canvasIdx = 2 if (currentTraceData.is_thin_data) { - serials = currentTraceData.array_plotdata[canvasIdx].serial_data; + serials = + currentTraceData.array_plotdata[canvasIdx].serial_data; } else { serials = currentTraceData.serial_data[procId]; } if (isEmpty(serials)) { - return [{name: '', value: ''}]; + return [{ name: '', value: '' }]; } let pointSerials = serials[dataIndex]; if (isEmpty(pointSerials)) { - return [{name: '', value: ''}]; + return [{ name: '', value: '' }]; } - if (typeof (pointSerials) !== 'object') { + if (typeof pointSerials !== 'object') { pointSerials = [pointSerials]; } const serialVal = pointSerials.map((v, i) => { @@ -352,7 +394,7 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM value: v || '', }; }); - serialVal.push({name: '', value: ''}) + serialVal.push({ name: '', value: '' }); return serialVal; }; @@ -362,7 +404,12 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM const genDataTable = (dataPoint, outlierDict, currentThreshold) => { let thresholdTr = ''; if (plotDataMin.length || plotDataMax.length) { - const [minVal, medVal, maxVal] = getDatYMinMaxVal(dataPoint, plotDataMin, plotDataMax, outlierDict); + const [minVal, medVal, maxVal] = getDatYMinMaxVal( + dataPoint, + plotDataMin, + plotDataMax, + outlierDict, + ); if (isOutlierValue(dataPoint.dataIndex, outlierDict)) { thresholdTr += genTRItems(i18n.outlierVal, medVal); } else { @@ -373,17 +420,29 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM // show from, to N of slot if (slotFrom && slotFrom[dataPoint.dataIndex]) { - thresholdTr += genTRItems('From', formatDateTime(slotFrom[dataPoint.dataIndex])) + thresholdTr += genTRItems( + 'From', + formatDateTime(slotFrom[dataPoint.dataIndex]), + ); } if (slotTo && slotTo[dataPoint.dataIndex]) { - thresholdTr += genTRItems('To', formatDateTime(slotTo[dataPoint.dataIndex])) + thresholdTr += genTRItems( + 'To', + formatDateTime(slotTo[dataPoint.dataIndex]), + ); } if (slotCount && slotCount[dataPoint.dataIndex]) { - thresholdTr += genTRItems('N', applySignificantDigit(slotCount[dataPoint.dataIndex])) + thresholdTr += genTRItems( + 'N', + applySignificantDigit(slotCount[dataPoint.dataIndex]), + ); } - } else { - const yVal = getDatYVal(dataPoint, beforeRankValues, outlierDict); + const yVal = getDatYVal( + dataPoint, + beforeRankValues, + outlierDict, + ); if (isOutlierValue(dataPoint.dataIndex, outlierDict)) { thresholdTr += genTRItems(i18n.outlierVal, yVal); } else { @@ -393,32 +452,45 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM const datetimeVals = getDatTimeObj(dataPoint); const serialVals = getSerialObj(dataPoint); thresholdTr += genTRItems(i18n.dateTime, datetimeVals.value); - serialVals.forEach(serialVal => { + serialVals.forEach((serialVal) => { if (serialVal.value) { thresholdTr += genTRItems(i18n.serial, serialVal.value); } }); } - thresholdTr += genTRItems('', ''); // br // filter - thresholdTr += genTRItems(i18n.attribute, - currentThreshold.filterCol, currentThreshold.filterDetail); + thresholdTr += genTRItems( + i18n.attribute, + currentThreshold.filterCol, + currentThreshold.filterDetail, + ); thresholdTr += genTRItems('', ''); // br // threshold table thresholdTr += genTRItems('', i18n.limit, i18n.procLimit); - thresholdTr += genTRItems(i18n.threshHigh, currentThreshold.threshHigh, currentThreshold.prcMax); - thresholdTr += genTRItems(i18n.threshLow, currentThreshold.threshLow, currentThreshold.prcMin); + thresholdTr += genTRItems( + i18n.threshHigh, + currentThreshold.threshHigh, + currentThreshold.prcMax, + ); + thresholdTr += genTRItems( + i18n.threshLow, + currentThreshold.threshLow, + currentThreshold.prcMin, + ); thresholdTr += genTRItems('', ''); // apply time - thresholdTr += genTRItems(i18n.validFrom, currentThreshold.validFrom); + thresholdTr += genTRItems( + i18n.validFrom, + currentThreshold.validFrom, + ); thresholdTr += genTRItems(i18n.validTo, currentThreshold.validTo); return thresholdTr; - } + }; - const {offsetLeft: positionX, offsetTop: positionY} = chart.canvas; + const { offsetLeft: positionX, offsetTop: positionY } = chart.canvas; // Display, position, and set styles for font tooltipEl.style.opacity = 0; @@ -428,29 +500,45 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM const topPosition = canvasOffset.top + positionY + tooltip.caretY; genDataPointHoverTable( genDataTable(dataPoint, outlierDict, currentThreshold), - {x: leftPosition - 192, y: topPosition}, + { x: leftPosition - 192, y: topPosition }, 0, true, chart.canvas.id, ); }; - let y_fmt = '' + let y_fmt = ''; const ctx = $(`#${canvasId}`).get(0).getContext('2d'); const fixTick = 8; const config = { - type: 'line', data: {}, options: { - responsive: true, maintainAspectRatio: false, animation: false, spanGaps: true, normalized: true, plugins: { - decimation, annotation: { + type: 'line', + data: {}, + options: { + responsive: true, + maintainAspectRatio: false, + animation: false, + spanGaps: true, + normalized: true, + plugins: { + decimation, + annotation: { annotations: {}, - }, legend: { + }, + legend: { display: false, - }, tooltip: { - enabled: false, position: 'nearest', external: externalTooltipHandler - }, chartAreaBorder: { - borderColor: CONST.COLOR_FRAME_BORDER, borderWidth: 2, borderDash: false, }, - }, scales: { + tooltip: { + enabled: false, + position: 'nearest', + external: externalTooltipHandler, + }, + chartAreaBorder: { + borderColor: CONST.COLOR_FRAME_BORDER, + borderWidth: 2, + borderDash: false, + }, + }, + scales: { x: { parsing: false, afterBuildTicks: function (scale) { @@ -465,17 +553,17 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM let step = Math.floor(xLabels.length / fixTick); - const idxs = [] + const idxs = []; let lastTick; for (let i = 0; i <= fixTick; i++) { - lastTick = i * step - idxs.push(lastTick) + lastTick = i * step; + idxs.push(lastTick); } if (lastTick < xLabels.length - 1) { if (lastTick > xLabels.length - step) { idxs[idxs.length - 1] = xLabels.length - 1; } else { - idxs.push(xLabels.length - 1) + idxs.push(xLabels.length - 1); } } @@ -487,11 +575,15 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM }); scale.ticks = ticks; }, - beforeTickToLabelConversion: function(scale) { + beforeTickToLabelConversion: function (scale) { if (xaxis === 'INDEX') { return; } - scale._unit = getUnitDateTimeFormat(scale.min, scale.max, scale.ticks.length); + scale._unit = getUnitDateTimeFormat( + scale.min, + scale.max, + scale.ticks.length, + ); }, ticks: { callback: function (value) { @@ -521,32 +613,59 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM // return '#555'; // }, font: { - family: 'Calibri Light', size: 12, - }, maxRotation: tickConfig.rotation, minRotation: tickConfig.rotation, // sampleSize: 8, - color: CONST.TICK, align: 'center', //autoSkip: false, + family: 'Calibri Light', + size: 12, + }, + maxRotation: tickConfig.rotation, + minRotation: tickConfig.rotation, // sampleSize: 8, + color: CONST.TICK, + align: 'center', //autoSkip: false, // maxtickslimit: 10, - }, grid: { - color: CONST.GRID, drawTicks: false, drawBorder: false, + }, + grid: { + color: CONST.GRID, + drawTicks: false, + drawBorder: false, }, // afterTickToLabelConversion: function adjust(context) { // for (const idx in context.ticks) { // context.ticks[idx].label = (context.ticks[idx].label || '').padStart(tickConfig.pad, ' '); // } // }, - }, y: { - display: true, min: minY, max: maxY, font: { - family: 'Calibri Light', size: 12, - }, afterBuildTicks: function (axis) { + }, + y: { + display: true, + min: minY, + max: maxY, + font: { + family: 'Calibri Light', + size: 12, + }, + afterBuildTicks: function (axis) { if (beforeRankValues) { let ticks = []; for (const key of Object.keys(beforeRankValues)) { - ticks.push({value: key}); + ticks.push({ value: key }); } axis.ticks = []; if (ticks) { - ticks = [{value: String(Number(ticks[0].value) - 0.5)}].concat(ticks); - ticks = ticks.concat([{value: String(Number(ticks[ticks.length - 1].value) + 0.5)}]) - axis.ticks = ticks + ticks = [ + { + value: String( + Number(ticks[0].value) - 0.5, + ), + }, + ].concat(ticks); + ticks = ticks.concat([ + { + value: String( + Number( + ticks[ticks.length - 1].value, + ) + 0.5, + ), + }, + ]); + axis.ticks = ticks; } // limit 8 ticks @@ -556,19 +675,19 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM } let step = Math.floor(yabels.length / fixTick); - step = Math.max(2, step) + step = Math.max(2, step); - const idxs = [] + const idxs = []; let lastTick; for (let i = 0; i <= fixTick; i++) { - lastTick = i * step - idxs.push(lastTick) + lastTick = i * step; + idxs.push(lastTick); } if (lastTick < yabels.length - 1) { if (lastTick > yabels.length - step) { idxs[idxs.length - 1] = yabels.length - 1; } else { - idxs.push(yabels.length - 1) + idxs.push(yabels.length - 1); } } @@ -581,24 +700,30 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM axis.ticks = yticks; // end limit 8 ticks } else { - const ticks = axis.ticks.map(tick => tick.value); + const ticks = axis.ticks.map((tick) => tick.value); y_fmt = getFmtValueOfArray(ticks); } return; - }, afterTickToLabelConversion: function adjust(context) { + }, + afterTickToLabelConversion: function adjust(context) { const ticks = context.ticks; context.ticks[0].label = ''; - if (ticks.length) context.ticks[ticks.length - 1].label = ''; + if (ticks.length) + context.ticks[ticks.length - 1].label = ''; alignLengthTickLabels(context.ticks); - }, afterFit: function (scaleInstance) { + }, + afterFit: function (scaleInstance) { scaleInstance.width = 60; // sets the width to 100px - }, ticks: { + }, + ticks: { labelOffset: beforeRankValues ? -10 : 0, mirror: !!beforeRankValues, padding: beforeRankValues ? -37 : 5, maxRotation: 0, minRotation: 0, - sampleSize: beforeRankValues ? Object.keys(beforeRankValues).length : 8, + sampleSize: beforeRankValues + ? Object.keys(beforeRankValues).length + : 8, color: CONST.TICK, maxTicksLimit: 9, // count: 9, // show max 8 tick labels @@ -607,15 +732,23 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM if (isCatLimited) return ''; // String Ranked label - let showVal = applySignificantDigit(value, undefined, y_fmt); + let showVal = applySignificantDigit( + value, + undefined, + y_fmt, + ); if (beforeRankValues) { - showVal = (`0${Number(showVal)}`).slice(-2); + showVal = `0${Number(showVal)}`.slice(-2); showVal = showVal.padEnd(5); const onlyVal = beforeRankValues[value]; - const isNeedToAddDotSymbol = String(onlyVal).length > CAT_LABEL_LIMIT; + const isNeedToAddDotSymbol = + String(onlyVal).length > CAT_LABEL_LIMIT; if (onlyVal !== undefined) { showVal = `Cat${showVal}`; - showVal += String(onlyVal).substring(0, CAT_LABEL_LIMIT); + showVal += String(onlyVal).substring( + 0, + CAT_LABEL_LIMIT, + ); if (isNeedToAddDotSymbol) { showVal += '...'; } @@ -627,13 +760,22 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM } } return showVal; - } - }, grid: { - color: CONST.GRID, drawTicks: false, drawBorder: false, - } + }, + }, + grid: { + color: CONST.GRID, + drawTicks: false, + drawBorder: false, + }, }, - }, onHover(evt, a, chart) { - const item = chart.getElementsAtEventForMode(evt, 'nearest', {intersect: true}, false); // const lastItem = chart.getActiveElements(); + }, + onHover(evt, a, chart) { + const item = chart.getElementsAtEventForMode( + evt, + 'nearest', + { intersect: true }, + false, + ); // const lastItem = chart.getActiveElements(); if (item.length) { // save hovered data index @@ -641,12 +783,13 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM const hoveredIndex = item[0].index; const datasetId = item[0].datasetIndex; graphStore.saveHoveredDataPoint(chartCanvasId, { - index: hoveredIndex, datasetIndex: datasetId, + index: hoveredIndex, + datasetIndex: datasetId, }); graphStore.setSelectedCanvas(chartCanvasId); - } - }, elements: { + }, + elements: { point: { pointStyle(ctx) { if (ctx.datasetIndex === CONST.NORMAL_DATASET) { @@ -656,7 +799,8 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM }, }, }, - }, plugins: [chartAreaBorder], + }, + plugins: [chartAreaBorder], }; let xTicks = null; @@ -683,28 +827,41 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM const yLabels = isCatLimited ? [] : xLabels; const chartData = isCatLimited ? [] : plotData; - const pointSize = plotData.length <= CONST.SMALL_DATA_SIZE ? 2.5 : plotData.length < 1000 ? 1.5 : 1; + const pointSize = + plotData.length <= CONST.SMALL_DATA_SIZE + ? 2.5 + : plotData.length < 1000 + ? 1.5 + : 1; config.data = { - labels: yLabels, datasets: [{ - label: 'Dataset 1', - data: chartData, - backgroundColor: pointColor, // ts chart dot color - borderColor: pointColor, // link between dot color - borderWidth: 0.5, - showLine: beforeRankValues ? false : !(isThinData || plotData.length >= 1000), - pointRadius: pointSize, - order: 0, - pointBackgroundColor: new Array(plotData.length).fill(pointColor), // stepped: !!beforeRankValues, - dictIdx2YValue, - }, { - label: 'Dataset 2', - data: plotDataEx, - pointBackgroundColor: plotDataExColor, - type: 'line', - order: 1, - showLine: false, - },], + labels: yLabels, + datasets: [ + { + label: 'Dataset 1', + data: chartData, + backgroundColor: pointColor, // ts chart dot color + borderColor: pointColor, // link between dot color + borderWidth: 0.5, + showLine: beforeRankValues + ? false + : !(isThinData || plotData.length >= 1000), + pointRadius: pointSize, + order: 0, + pointBackgroundColor: new Array(plotData.length).fill( + pointColor, + ), // stepped: !!beforeRankValues, + dictIdx2YValue, + }, + { + label: 'Dataset 2', + data: plotDataEx, + pointBackgroundColor: plotDataExColor, + type: 'line', + order: 1, + showLine: false, + }, + ], }; if (!isEmpty(minX)) { @@ -730,9 +887,14 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM const prcMin = chartInfo['prc-min']; const startDateTime = convertFunc(startPoint); const endDateTime = convertFunc(endPoint); - let actFrom = isEmpty(chartInfo['act-from']) ? startDateTime : convertFunc(chartInfo['act-from']); - let actTo = isEmpty(chartInfo['act-to']) ? endDateTime : convertFunc(chartInfo['act-to']); - if (endDateTime < actFrom || actTo < startDateTime) { // out of range + let actFrom = isEmpty(chartInfo['act-from']) + ? startDateTime + : convertFunc(chartInfo['act-from']); + let actTo = isEmpty(chartInfo['act-to']) + ? endDateTime + : convertFunc(chartInfo['act-to']); + if (endDateTime < actFrom || actTo < startDateTime) { + // out of range continue; } if (actFrom < startDateTime) { @@ -742,19 +904,23 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM actTo = endDateTime; } if (!isEmpty(threshHigh)) { - config.options.plugins.annotation.annotations[`ucl-${idx}`] = createTSThreshold(threshHigh, CONST.RED, actFrom, actTo); + config.options.plugins.annotation.annotations[`ucl-${idx}`] = + createTSThreshold(threshHigh, CONST.RED, actFrom, actTo); } if (!isEmpty(threshLow)) { - config.options.plugins.annotation.annotations[`lcl-${idx}`] = createTSThreshold(threshLow, CONST.RED, actFrom, actTo); + config.options.plugins.annotation.annotations[`lcl-${idx}`] = + createTSThreshold(threshLow, CONST.RED, actFrom, actTo); } if (!isEmpty(prcMin)) { - config.options.plugins.annotation.annotations[`lpcl-${idx}`] = createTSThreshold(prcMin, CONST.BLUE, actFrom, actTo); + config.options.plugins.annotation.annotations[`lpcl-${idx}`] = + createTSThreshold(prcMin, CONST.BLUE, actFrom, actTo); } if (!isEmpty(prcMax)) { - config.options.plugins.annotation.annotations[`upcl-${idx}`] = createTSThreshold(prcMax, CONST.BLUE, actFrom, actTo); + config.options.plugins.annotation.annotations[`upcl-${idx}`] = + createTSThreshold(prcMax, CONST.BLUE, actFrom, actTo); } } @@ -782,18 +948,27 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM backgroundColor: CONST.COLOR_ERROR_BAR, borderColor: CONST.COLOR_ERROR_BAR, borderWidth: 0.2, - } + }; } } - const stepLineAnnotation = genStepLineAnnotation(plotData, xLabels, pointColor); + const stepLineAnnotation = genStepLineAnnotation( + plotData, + xLabels, + pointColor, + ); if (!isCatLimited && !_.isEmpty(stepLineAnnotation)) { - config.options.plugins.annotation.annotations = Object.assign(config.options.plugins.annotation.annotations, stepLineAnnotation); + config.options.plugins.annotation.annotations = Object.assign( + config.options.plugins.annotation.annotations, + stepLineAnnotation, + ); } if (isCatLimited) { config.options.plugins.annotation.annotations['catLimited'] = { - type: 'label', content: [...i18n.catLimitMsg], color: '#65c5f1' + type: 'label', + content: [...i18n.catLimitMsg], + color: '#65c5f1', }; } @@ -806,7 +981,8 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM canvas.addEventListener('contextmenu', rightClickHandler, false); canvas.addEventListener('mousedown', handleMouseDown, false); - function handleMouseDown() { // later, not just mouse down, + mouseout of menu + function handleMouseDown() { + // later, not just mouse down, + mouseout of menu hideFPPContextMenu(); } @@ -824,7 +1000,9 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM top -= menuHeight; } menu.css({ - left: `${left}px`, top: `${top}px`, display: 'block', + left: `${left}px`, + top: `${top}px`, + display: 'block', }); // save selected canvas @@ -836,7 +1014,13 @@ function YasuTsChart($, paramObj, chartLabels = null, tabID = null, xaxis = 'TIM return chart; } -const createTSThreshold = (thresholdVal, color = CONST.BLUE, startPoint = null, endPoint = null, borderDash = []) => ({ +const createTSThreshold = ( + thresholdVal, + color = CONST.BLUE, + startPoint = null, + endPoint = null, + borderDash = [], +) => ({ type: 'box', scaleID: 'y', xMin: startPoint, @@ -856,12 +1040,22 @@ const updateThresholdsOnClick = (canvasId, clickedIdx) => { // not time value in TSP, this time is time of end proc, not time of start proc const plotData = currentTraceData.array_plotdata[dataIndex]; const filterCond = plotData.catExpBox - ? (Array.isArray(plotData.catExpBox) - ? plotData.catExpBox : [plotData.catExpBox]) + ? Array.isArray(plotData.catExpBox) + ? plotData.catExpBox + : [plotData.catExpBox] : null; - const [chartInfos, chartInfosOrg] = getChartInfo(plotData, 'TIME', filterCond); - const clickedVal = currentTraceData.array_plotdata[dataIndex].array_x[clickedIdx]; - const [latestChartInfo, latestIndex] = chooseLatestThresholds(chartInfos, chartInfosOrg, clickedVal); + const [chartInfos, chartInfosOrg] = getChartInfo( + plotData, + 'TIME', + filterCond, + ); + const clickedVal = + currentTraceData.array_plotdata[dataIndex].array_x[clickedIdx]; + const [latestChartInfo, latestIndex] = chooseLatestThresholds( + chartInfos, + chartInfosOrg, + clickedVal, + ); // from latest chartInfo, update histogram, update scatter plot, update summary const threshHigh = latestChartInfo['thresh-high']; const threshLow = latestChartInfo['thresh-low']; @@ -873,7 +1067,9 @@ const updateThresholdsOnClick = (canvasId, clickedIdx) => { const prcMin = latestChartInfo['prc-min']; // update thresholds - const sameRowCanvases = $(`#${canvasId}`).closest('div .chart-row').find('canvas'); + const sameRowCanvases = $(`#${canvasId}`) + .closest('div .chart-row') + .find('canvas'); sameRowCanvases.each(function f() { const canvasId = $(this).attr('id'); const chartType = $(this).attr('chart-type'); @@ -884,20 +1080,40 @@ const updateThresholdsOnClick = (canvasId, clickedIdx) => { removeThresholdsOfChart(scatterChartObject, CONST.HORIZONTAL); if (threshHigh !== null) { - scatterChartObject.options.plugins.annotation.annotations.ucl = createHorizonalThreshold(threshHigh, CONST.RED, CONST.UCL); + scatterChartObject.options.plugins.annotation.annotations.ucl = + createHorizonalThreshold( + threshHigh, + CONST.RED, + CONST.UCL, + ); } if (threshLow !== null) { - scatterChartObject.options.plugins.annotation.annotations.lcl = createHorizonalThreshold(threshLow, CONST.RED, CONST.LCL); + scatterChartObject.options.plugins.annotation.annotations.lcl = + createHorizonalThreshold( + threshLow, + CONST.RED, + CONST.LCL, + ); } if (prcMax !== null) { - scatterChartObject.options.plugins.annotation.annotations.upcl = createHorizonalThreshold(prcMax, CONST.BLUE, CONST.UPCL); + scatterChartObject.options.plugins.annotation.annotations.upcl = + createHorizonalThreshold( + prcMax, + CONST.BLUE, + CONST.UPCL, + ); } if (prcMin !== null) { - scatterChartObject.options.plugins.annotation.annotations.lpcl = createHorizonalThreshold(prcMin, CONST.BLUE, CONST.LPCL); + scatterChartObject.options.plugins.annotation.annotations.lpcl = + createHorizonalThreshold( + prcMin, + CONST.BLUE, + CONST.LPCL, + ); } - scatterChartObject.update(mode = 'none'); + scatterChartObject.update((mode = 'none')); // update next scatter plot thresholds const scatterCanvasIds = []; @@ -906,9 +1122,10 @@ const updateThresholdsOnClick = (canvasId, clickedIdx) => { }); const numOfCarts = scatterCanvasIds.length; const currentRowIdx = scatterCanvasIds.indexOf(canvasId); - const prevRowPos = (currentRowIdx + 1 - 1) || numOfCarts; + const prevRowPos = currentRowIdx + 1 - 1 || numOfCarts; const prevScatterCanvasId = scatterCanvasIds[prevRowPos - 1]; - const prevScatterChartObject = graphStore.getScatterById(prevScatterCanvasId); + const prevScatterChartObject = + graphStore.getScatterById(prevScatterCanvasId); if (!prevScatterChartObject) { return; } @@ -916,23 +1133,41 @@ const updateThresholdsOnClick = (canvasId, clickedIdx) => { removeThresholdsOfChart(prevScatterChartObject, CONST.VERTICAL); if (threshHigh !== null) { - prevScatterChartObject.options.plugins.annotation.annotations.vucl = createVerticalThreshold(threshHigh, CONST.RED, CONST.vUCL); + prevScatterChartObject.options.plugins.annotation.annotations.vucl = + createVerticalThreshold( + threshHigh, + CONST.RED, + CONST.vUCL, + ); } if (threshLow !== null) { - prevScatterChartObject.options.plugins.annotation.annotations.vlcl = createVerticalThreshold(threshLow, CONST.RED, CONST.vLCL); + prevScatterChartObject.options.plugins.annotation.annotations.vlcl = + createVerticalThreshold( + threshLow, + CONST.RED, + CONST.vLCL, + ); } if (prcMax !== null) { - prevScatterChartObject.options.plugins.annotation.annotations.vupcl = createVerticalThreshold(prcMax, CONST.BLUE, CONST.vUPCL); + prevScatterChartObject.options.plugins.annotation.annotations.vupcl = + createVerticalThreshold( + prcMax, + CONST.BLUE, + CONST.vUPCL, + ); } if (prcMin !== null) { - prevScatterChartObject.options.plugins.annotation.annotations.vlpcl = createVerticalThreshold(prcMin, CONST.BLUE, CONST.vLPCL); + prevScatterChartObject.options.plugins.annotation.annotations.vlpcl = + createVerticalThreshold( + prcMin, + CONST.BLUE, + CONST.vLPCL, + ); } - prevScatterChartObject.update(mode = 'none'); + prevScatterChartObject.update((mode = 'none')); } - - } // update histogram of the same row if (chartType === 'histogram') { @@ -940,26 +1175,47 @@ const updateThresholdsOnClick = (canvasId, clickedIdx) => { if (histChartObject) { removeThresholdsOfChart(histChartObject); if (threshHigh !== null) { - histChartObject.options.plugins.annotation.annotations.ucl = createHorizonalThreshold(threshHigh, CONST.RED, CONST.UCL); + histChartObject.options.plugins.annotation.annotations.ucl = + createHorizonalThreshold( + threshHigh, + CONST.RED, + CONST.UCL, + ); } if (threshLow !== null) { - histChartObject.options.plugins.annotation.annotations.lcl = createHorizonalThreshold(threshLow, CONST.RED, CONST.LCL); + histChartObject.options.plugins.annotation.annotations.lcl = + createHorizonalThreshold( + threshLow, + CONST.RED, + CONST.LCL, + ); } if (prcMax !== null) { - histChartObject.options.plugins.annotation.annotations.upcl = createHorizonalThreshold(prcMax, CONST.BLUE, CONST.UPCL); + histChartObject.options.plugins.annotation.annotations.upcl = + createHorizonalThreshold( + prcMax, + CONST.BLUE, + CONST.UPCL, + ); } if (prcMin !== null) { - histChartObject.options.plugins.annotation.annotations.lpcl = createHorizonalThreshold(prcMin, CONST.BLUE, CONST.LPCL); + histChartObject.options.plugins.annotation.annotations.lpcl = + createHorizonalThreshold( + prcMin, + CONST.BLUE, + CONST.LPCL, + ); } - histChartObject.update(mode = 'none'); + histChartObject.update((mode = 'none')); } - } }); // update to show summary corresponding to the click point - const allSummaries = $(`#${canvasId}`).closest('div .chart-row').find('.summary'); + const allSummaries = $(`#${canvasId}`) + .closest('div .chart-row') + .find('.summary'); allSummaries.each(function showHideSummary() { if ($(this).hasClass(`summary-${latestIndex}`)) { $(this).css('display', 'block'); @@ -972,7 +1228,6 @@ const updateThresholdsOnClick = (canvasId, clickedIdx) => { if (!plotData.before_rank_values) { drawSensorWhisker(dataIndex, plotData, clickedVal, cfgYMin, cfgYMax); } - }; function timeSeriesOnClick(chart, event) { @@ -980,8 +1235,14 @@ function timeSeriesOnClick(chart, event) { showVerticalLineOnClick(event); // get clicked position from event - const eventElement = chart.getElementsAtEventForMode(event, 'nearest', {intersect: true}, false); - if (!eventElement || eventElement.length < 1) { // click outside datapoint + const eventElement = chart.getElementsAtEventForMode( + event, + 'nearest', + { intersect: true }, + false, + ); + if (!eventElement || eventElement.length < 1) { + // click outside datapoint removeAllCrossHair(true, true, true); return; } @@ -1002,14 +1263,21 @@ function timeSeriesOnClick(chart, event) { // on DOUBLE CLICK: draw all crosshair line on double click setTimeout(() => { if (curCnt === avoidMultiClickCntTS) { - if (numDataPoints < 100000) drawCrossHairOnDoubleClick(clickPosition, canvasId); + if (numDataPoints < 100000) + drawCrossHairOnDoubleClick(clickPosition, canvasId); } }, 200); } else { setTimeout(() => { if (curCnt === avoidMultiClickCntTS) { // on SINGLE CLICK - if (numDataPoints < 100000) drawCrosshairSingleClick(clickPosition, xValue, yValue, canvasId); + if (numDataPoints < 100000) + drawCrosshairSingleClick( + clickPosition, + xValue, + yValue, + canvasId, + ); // from xValue -> find latest chartInfo updateThresholdsAllTSP(clickPosition - 1); @@ -1033,7 +1301,7 @@ const hideGraphScaleMenu = () => { const showGraphScaleMenu = (menuItem) => { const optionElement = $(menuItem); const offset = optionElement.offset(); - const menuTS = formElements.menuTS; // TODO + const menuTS = formElements.menuTS; // TODO const offsetTS = menuTS.offset(); const width = menuTS.width(); const menuGraphScale = formElements.menuScale; @@ -1045,14 +1313,16 @@ const showGraphScaleMenu = (menuItem) => { top -= menuHeight - optionElement.outerHeight(); } menuGraphScale.css({ - left: `${offset.left + width}px`, top: `${top}px`, display: 'block', - }) -} + left: `${offset.left + width}px`, + top: `${top}px`, + display: 'block', + }); +}; let menuItemOffset = {}; const saveOffset = (menuItem = null) => { menuItemOffset = $(menuItem).offset(); -} +}; const handleSelectTSMenuItem = (selectedItem = 'click', menuItem = null) => { if (selectedItem === 'graphScale') { @@ -1066,9 +1336,9 @@ const handleSelectTSMenuItem = (selectedItem = 'click', menuItem = null) => { display: 'block', }); filterInfoTable.offset({ - top: menuItemOffset.top, // TODO fix case clicking at bottom + top: menuItemOffset.top, // TODO fix case clicking at bottom left: menuItemOffset.left, - }) + }); } const selectedCanvasId = graphStore.getSelectedCanvas(); @@ -1078,19 +1348,26 @@ const handleSelectTSMenuItem = (selectedItem = 'click', menuItem = null) => { } const tsChartObject = graphStore.getTimeSeriesById(selectedCanvasId); - const lastHoveredDataPoint = graphStore.getLastHoveredDataPoint(selectedCanvasId); + const lastHoveredDataPoint = + graphStore.getLastHoveredDataPoint(selectedCanvasId); if (!lastHoveredDataPoint) { hideFPPContextMenu(); return; } - const {index: clickPosition, datasetIndex} = lastHoveredDataPoint; - const yValue = tsChartObject.data.datasets[datasetIndex].data[clickPosition]; + const { index: clickPosition, datasetIndex } = lastHoveredDataPoint; + const yValue = + tsChartObject.data.datasets[datasetIndex].data[clickPosition]; const xValue = tsChartObject.data.labels[clickPosition]; switch (selectedItem) { case 'click': { - drawCrosshairSingleClick(clickPosition, xValue, yValue, selectedCanvasId); + drawCrosshairSingleClick( + clickPosition, + xValue, + yValue, + selectedCanvasId, + ); break; } case 'doubleClick': { @@ -1102,12 +1379,15 @@ const handleSelectTSMenuItem = (selectedItem = 'click', menuItem = null) => { const isThinData = currentTraceData.is_thin_data; let cycleId; if (isThinData) { - cycleId = graphStore.getArrayPlotData(selectedCanvasId).cycle_ids[clickPosition]; + cycleId = + graphStore.getArrayPlotData(selectedCanvasId).cycle_ids[ + clickPosition + ]; } else { cycleId = currentTraceData.cycle_ids[clickPosition]; } // const xTime = graphStore.getArrayPlotData(selectedCanvasId).array_x[clickPosition]; - let xTime =currentTraceData.times[clickPosition]; + let xTime = currentTraceData.times[clickPosition]; const formDat = lastUsedFormData || null; let queryString = genQueryStringFromFormData(formDat); @@ -1139,7 +1419,11 @@ const updateGraphScale = (scaleOption = '1') => { const endProcId = formVal.end_proc; const sensorId = formVal.GET02_VALS_SELECT; - const isHideNonePoint = isHideNoneDataPoint(endProcId, sensorId, currentTraceData.COMMON.remove_outlier); + const isHideNonePoint = isHideNoneDataPoint( + endProcId, + sensorId, + currentTraceData.COMMON.remove_outlier, + ); const beforeRankValues = plotData.before_rank_values; if (beforeRankValues) { continue; @@ -1154,21 +1438,55 @@ const updateGraphScale = (scaleOption = '1') => { const scaleInfo = getScaleInfo(plotData, scaleOption); - const [minY, maxY] = calMinMaxYScale(scaleInfo['y-min'], scaleInfo['y-max'], scaleOption); + const [minY, maxY] = calMinMaxYScale( + scaleInfo['y-min'], + scaleInfo['y-max'], + scaleOption, + ); const outlierIdxs = scaleInfo.upper_outlier_idxs; const negOutlierIdxs = scaleInfo.lower_outlier_idxs; const kdeDat = scaleInfo.kde_data; - const [dictIdx2YValue, arrayYTS] = buildMapIndex2OutlierYValue(plotData, scaleInfo); - const { - arrayYEx, plotDataExColor - } = produceExceptionArrayY(arrayY, minY, maxY, unlinkedIdxs, noneIdxs, infIdxs, negInfIdxs, negOutlierIdxs, outlierIdxs); + const [dictIdx2YValue, arrayYTS] = buildMapIndex2OutlierYValue( + plotData, + scaleInfo, + ); + const { arrayYEx, plotDataExColor } = produceExceptionArrayY( + arrayY, + minY, + maxY, + unlinkedIdxs, + noneIdxs, + infIdxs, + negInfIdxs, + negOutlierIdxs, + outlierIdxs, + beforeRankValues, + ); - updateScales(i + 1, minY, maxY, kdeDat, arrayYEx, plotDataExColor, arrayYTS, dictIdx2YValue); + updateScales( + i + 1, + minY, + maxY, + kdeDat, + arrayYEx, + plotDataExColor, + arrayYTS, + dictIdx2YValue, + ); } }; -const updateScales = (row, newMin, newMax, kdeDat, arrayYEx, plotDataExColor, arrayYTS, dictIdx2YValue) => { +const updateScales = ( + row, + newMin, + newMax, + kdeDat, + arrayYEx, + plotDataExColor, + arrayYTS, + dictIdx2YValue, +) => { const createCanvasId = (graphType, row) => graphType + `${row}`; const updateMinMaxY = (chartObject, minY, maxY, kdeDat = null) => { if (isEmpty(chartObject)) return; @@ -1181,11 +1499,19 @@ const updateScales = (row, newMin, newMax, kdeDat, arrayYEx, plotDataExColor, ar chartObject.data.datasets[1].data = kdeObj.transKDE; } chartObject.update(); - } - const tsChartObject = graphStore.getTimeSeriesById(createCanvasId('chart0', row)); - const histChartObject = graphStore.getHistById(createCanvasId('hist0', row)); - const whiskerChartObject = graphStore.getWhiskerById(createCanvasId('whisker0', row)); - const scatterChartObject = graphStore.getScatterById(createCanvasId('sctr0', row)); + }; + const tsChartObject = graphStore.getTimeSeriesById( + createCanvasId('chart0', row), + ); + const histChartObject = graphStore.getHistById( + createCanvasId('hist0', row), + ); + const whiskerChartObject = graphStore.getWhiskerById( + createCanvasId('whisker0', row), + ); + const scatterChartObject = graphStore.getScatterById( + createCanvasId('sctr0', row), + ); if (tsChartObject) { tsChartObject.data.datasets[0].data = arrayYTS; @@ -1195,30 +1521,32 @@ const updateScales = (row, newMin, newMax, kdeDat, arrayYEx, plotDataExColor, ar updateMinMaxY(tsChartObject, newMin, newMax); } - updateMinMaxY(histChartObject, newMin, newMax, kdeDat); // TODO update minX, maxX for histogram + updateMinMaxY(histChartObject, newMin, newMax, kdeDat); // TODO update minX, maxX for histogram updateMinMaxY(whiskerChartObject, newMin, newMax); updateMinMaxY(scatterChartObject, newMin, newMax); -} +}; const handleGraphScaleClick = (option = '1') => { hideFPPContextMenu(); updateGraphScale(option); $('select[name=tsScaleY]').val(option); -} +}; const hideFPPContextMenu = () => { // hideGraphScaleMenu(); - $('.context-menu').css({display: 'none'}); - $('.context-menu-2nd').css({display: 'none'}); + $('.context-menu').css({ display: 'none' }); + $('.context-menu-2nd').css({ display: 'none' }); }; const deleteThisRow = (self, isGraphArea) => { - const tableId = isGraphArea ? formElements.serialTable2 : formElements.serialTable; + const tableId = isGraphArea + ? formElements.serialTable2 + : formElements.serialTable; $(self).closest('tr').remove(); - + updateCurrentSelectedProcessSerial(name.serial); if (isGraphArea) { - initSelect() + initSelect(); disableUnselectedOption(selectedSerials, name.serial); disableUnselectedOption(selectedProcess, name.process); } else { @@ -1226,7 +1554,13 @@ const deleteThisRow = (self, isGraphArea) => { } }; -const htmlOrderColRowTemplate = (priority, processSelectHTML, serialSelectHTML, orderSelectHTML, isGraphArea) => ` +const htmlOrderColRowTemplate = ( + priority, + processSelectHTML, + serialSelectHTML, + orderSelectHTML, + isGraphArea, +) => ` ${priority} ${processSelectHTML} @@ -1244,11 +1578,11 @@ const htmlOrderColRowTemplate = (priority, processSelectHTML, serialSelectHTML, `; - const buildProcessColumnHTML = (selectedProcId, name = 'serialProcess') => { const procOptions = []; for (const procId in procConfigs) { - const selected = Number(procId) === Number(selectedProcId) ? 'selected' : ''; + const selected = + Number(procId) === Number(selectedProcId) ? 'selected' : ''; const option = ``; procOptions.push(option); } @@ -1259,21 +1593,27 @@ const buildProcessColumnHTML = (selectedProcId, name = 'serialProcess') => { `; }; -const buildColumnHTML = (serialCols, tableId = formElements.serialTable, name = 'serialColumn', selectedCol = null) => { +const buildColumnHTML = ( + serialCols, + tableId = formElements.serialTable, + name = 'serialColumn', + selectedCol = null, + selectedProcId, +) => { const defaultOption = ''; const optionHTMLs = [defaultOption]; const selectedOrderCols = getSelectedOrderCols(tableId, name); let alreadySet = false; for (const idx in serialCols) { const col = serialCols[idx]; - let optionHTML = ``; // TODO no need, order alphabet + let optionHTML = ``; // TODO no need, order alphabet if (selectedCol) { if (col.id === selectedCol) { - optionHTML = ``; + optionHTML = ``; alreadySet = true; } } else if (!alreadySet && !selectedOrderCols.has(`${col.id}`)) { - optionHTML = ``; + optionHTML = ``; alreadySet = true; } optionHTMLs.push(optionHTML); @@ -1293,11 +1633,20 @@ const buildOrderHTML = (orderName, selectedOrder = null) => { return `` - + `; }; -const createOrderColRowHTML = async (selectedProcId, tableId = formElements.serialTable, processName = 'serialProcess', serialName = 'serialColumn', orderName = 'serialOrder', selectedCol = null, selectedOrder = null, priority = null, isGraphArea = false) => { +const createOrderColRowHTML = async ( + selectedProcId, + tableId = formElements.serialTable, + processName = 'serialProcess', + serialName = 'serialColumn', + orderName = 'serialOrder', + selectedCol = null, + selectedOrder = null, + priority = null, + isGraphArea = false, +) => { const calcPriority = () => $(`${tableId} tbody tr`).length + 1; // get serial @@ -1308,24 +1657,48 @@ const createOrderColRowHTML = async (selectedProcId, tableId = formElements.seri // sort serial & datetime to show first const newSortedCols = orderSeriesCols(columns); for (const col of newSortedCols) { - if (col.is_serial_no || col.is_get_date || CfgProcess_CONST.CATEGORY_TYPES.includes(col.data_type)) { + if ( + col.is_serial_no || + col.is_get_date || + CfgProcess_CONST.CATEGORY_TYPES.includes(col.data_type) + ) { orderCols.push(col); } } - const processSelectHTML = buildProcessColumnHTML(selectedProcId, processName); - const columnSelectHTML = buildColumnHTML(orderCols, tableId, serialName, selectedCol); + const processSelectHTML = buildProcessColumnHTML( + selectedProcId, + processName, + ); + const columnSelectHTML = buildColumnHTML( + orderCols, + tableId, + serialName, + selectedCol, + selectedProcId, + ); const orderSelectHTML = buildOrderHTML(orderName, selectedOrder); - return htmlOrderColRowTemplate(priority || calcPriority(), processSelectHTML, columnSelectHTML, orderSelectHTML, isGraphArea); + return htmlOrderColRowTemplate( + priority || calcPriority(), + processSelectHTML, + columnSelectHTML, + orderSelectHTML, + isGraphArea, + ); }; -const getSelectedOrderCols = (tableId = formElements.serialTable, serialName = 'serialColumn') => { +const getSelectedOrderCols = ( + tableId = formElements.serialTable, + serialName = 'serialColumn', +) => { const numRows = $(`${tableId} tbody tr`).length; if (!numRows) return new Set(); const selectedCols = $(`select[name=${serialName}]`); - return new Set(selectedCols.find(':selected').map(function getVal() { - return $(this).val(); - }),); + return new Set( + selectedCols.find(':selected').map(function getVal() { + return $(this).val(); + }), + ); }; const disableSelectedSerials = (selectedSerials, name = 'serialColumn') => { @@ -1347,17 +1720,28 @@ const disableSelectedSerials = (selectedSerials, name = 'serialColumn') => { }); }; -const disableSelectedOption = (tableId = formElements.serialTable, serialName = 'serialColumn') => { +const disableSelectedOption = ( + tableId = formElements.serialTable, + serialName = 'serialColumn', +) => { const selectedSerials = getSelectedOrderCols(tableId, serialName); disableSelectedSerials(selectedSerials, serialName); }; -const updatePriorityAndDisableSelected = (tableId = formElements.serialTable, serialName = 'serialColumn') => { +const updatePriorityAndDisableSelected = ( + tableId = formElements.serialTable, + serialName = 'serialColumn', +) => { updatePriority(tableId); disableSelectedOption(tableId, serialName); }; -const bindChangeProcessEvent = (tableId = formElements.serialTable, processName = 'serialProcess', serialName = 'serialColumn', callback = null) => { +const bindChangeProcessEvent = ( + tableId = formElements.serialTable, + processName = 'serialProcess', + serialName = 'serialColumn', + callback = null, +) => { $(`select[name=${processName}]`).each(function changeProc() { const binded = $(this).data('bind-on-change'); if (!binded) { @@ -1366,16 +1750,20 @@ const bindChangeProcessEvent = (tableId = formElements.serialTable, processName $(this).data('bind-on-change', 1); const selectedProcId = $(this).val(); - const orderColElement = $(this).closest('tr').find(`select[name=${serialName}]`); + const orderColElement = $(this) + .closest('tr') + .find(`select[name=${serialName}]`); if (isEmpty(selectedProcId)) { // empty proc -> empty column orderColElement.empty().select2({ - placeholder: `${i18nCommon.search}...`, allowClear: true, width: 'auto', + placeholder: `${i18nCommon.search}...`, + allowClear: true, + width: 'auto', language: { noResults: function () { - return i18nCommon.notApplicable; - } - } + return i18nCommon.notApplicable; + }, + }, }); return; } @@ -1385,38 +1773,96 @@ const bindChangeProcessEvent = (tableId = formElements.serialTable, processName const columns = procInfo.getColumns(); const selectedVal = orderColElement.val(); - const selectedSerials = getSelectedOrderCols(tableId, serialName); + const selectedSerialCols = getSelectedOrderCols( + tableId, + serialName, + ); let alreadyPickedOrderCol = false; let defaultOrderCol = ''; - const orderCols = [{id: '', text: '---', selected: true}]; + const orderCols = [{ id: '', text: '---', selected: true }]; // sort serial & datetime to show first const newSortedCols = orderSeriesCols(columns); for (const col of newSortedCols) { - if (col.is_serial_no || col.is_get_date || CfgProcess_CONST.CATEGORY_TYPES.includes(col.data_type)) { - const orderObject = {id: col.id, text: col.shown_name, title: col.name_en}; - - const isColSelectedOnSameElement = `${selectedVal}` === `${col.id}`; - const isColSelected = selectedSerials.has(col.id) || selectedSerials.has(`${col.id}`); + if ( + col.is_serial_no || + col.is_get_date || + CfgProcess_CONST.CATEGORY_TYPES.includes(col.data_type) + ) { + const orderObject = { + id: col.id, + text: col.shown_name, + title: col.name_en, + 'data-is-get-date': col.is_get_date, + 'data-is-serial-no': col.is_serial_no, + 'data-selected-proc-id': selectedProcId, + }; + const procData = getProcessColSelected(); + const currentOption = `${selectedProcId}-${col.id}`; + const isColSelectedOnSameElement = + `${selectedVal}` === `${col.id}`; + const isColSelected = + selectedSerialCols.has(col.id) || + selectedSerialCols.has(`${col.id}`); if (!isColSelectedOnSameElement && isColSelected) { orderObject.disabled = true; - } else if (!alreadyPickedOrderCol) { - defaultOrderCol = col.id; - alreadyPickedOrderCol = true; + } else if ( + !alreadyPickedOrderCol && + !selectedSerials.has(col.id) + ) { + if ( + (col.is_get_date || col.is_serial_no) && + procData.includes(selectedProcId) + ) { + if ( + !selectedProcessSerial && + !selectedProcessSerial.has(currentOption) + ) { + defaultOrderCol = col.id; + alreadyPickedOrderCol = true; + } + } } orderCols.push(orderObject); } } orderColElement.empty().select2({ - placeholder: `${i18nCommon.search}...`, allowClear: true, width: 'auto', data: orderCols, + placeholder: `${i18nCommon.search}...`, + allowClear: true, + width: 'auto', + data: orderCols, language: { noResults: function () { - return i18nCommon.notApplicable; - } - } + return i18nCommon.notApplicable; + }, + }, }); + orderColElement + .select2() + .find('option') + .each(function () { + const selectElement = $(this); + const optionValue = selectElement.val(); + const orderCol = orderCols.find( + (col) => `${col.id}` === `${optionValue}`, + ); + selectElement + .attr( + 'data-is-get-date', + orderCol['data-is-get-date'], + ) + .attr( + 'data-is-serial-no', + orderCol['data-is-serial-no'], + ) + .attr( + 'data-selected-proc-id', + orderCol['data-selected-proc-id'], + ); + }); + setSelect2Selection(tableId); if (defaultOrderCol) { @@ -1428,27 +1874,100 @@ const bindChangeProcessEvent = (tableId = formElements.serialTable, processName }); }; -const bindChangeOrderColEvent = (tableId = formElements.serialTable, name = 'serialColumn', callback = null) => { - $(`select[name=${name}]`).on('change', () => { +const bindChangeOrderColEvent = ( + tableId = formElements.serialTable, + name = 'serialColumn', + callback = null, +) => { + const serialSelectEl = $(`select[name=${name}]`); + serialSelectEl.on('select2:change', (e) => { + // catch event select option with select2 + updateCurrentSelectedProcessSerial('TermSerialColumn'); + updateSelectedProcessSerial(true, e); + disableUnselectedOption(selectedSerials, 'TermSerialColumn'); + enableOptionSelectedValue(e); if (callback) { callback(); } else { disableSelectedOption(tableId, name); } }); + + serialSelectEl.on('select2:unselecting', (e) => { + // catch event unselect option with select2 + updateCurrentSelectedProcessSerial('TermSerialColumn'); + updateSelectedProcessSerial(false, e); + disableUnselectedOption(selectedSerials, 'TermSerialColumn'); + }); + + serialSelectEl.off('select2:opening'); + serialSelectEl.on('select2:opening', (e) => { + // catch event open drop down with select2 + updateCurrentSelectedProcessSerial('TermSerialColumn'); + disableUnselectedOption(selectedSerials, 'TermSerialColumn'); + enableOptionSelectedValue(e); + }); + const updateSelectedProcessSerial = (isSelect, e) => { + let serialColDataId; + const currentElement = $(e.currentTarget); + const parentElement = currentElement.parents().eq(1); + const serialProcId = parentElement + .find(`select[name="TermSerialProcess"] option:selected`) + .val(); + if (isSelect) { + serialColDataId = e.params.data.id; + selectedProcessSerial.add(`${serialProcId}-${serialColDataId}`); + } else { + serialColDataId = e.params.args.data.id; + selectedProcessSerial.delete(`${serialProcId}-${serialColDataId}`); + } + }; + const enableOptionSelectedValue = (e) => { + $(e.currentTarget).find('option:selected').attr('disabled', false); + }; }; -const showIndexOrderingSetting = async (tableId = formElements.serialTable, processName = 'serialProcess', serialName = 'serialColumn', orderName = 'serialOrder') => { +const updateCurrentSelectedProcessSerial = (serialName) => { + const currentProcessSerial = new Set(); + const serialSelects = $(`select[name=${serialName}]`); + serialSelects.each(function () { + const selectElement = $(this); + const parentElement = selectElement.parents().eq(1); + const serialProcId = parentElement + .find(`select[name="TermSerialProcess"] option:selected`) + .val(); + const serialColDataId = selectElement.find(':selected').val(); + if (serialColDataId) { + currentProcessSerial.add(`${serialProcId}-${serialColDataId}`); + } + }); + selectedProcessSerial = currentProcessSerial; +}; + +const showIndexOrderingSetting = async ( + tableId = formElements.serialTable, + processName = 'serialProcess', + serialName = 'serialColumn', + orderName = 'serialOrder', +) => { // get serial for start proc const startProc = getFirstSelectedProc(); // add to modal const serialTableBody = $(`${tableId} tbody`); serialTableBody.empty(); - const serialOrderRowHTML = await createOrderColRowHTML(startProc, tableId, processName, serialName, orderName); + const serialOrderRowHTML = await createOrderColRowHTML( + startProc, + tableId, + processName, + serialName, + orderName, + ); serialTableBody.html(serialOrderRowHTML); // set value of serialColumn is first value - $(`select[name=${serialName}]`).val($(`select[name=${serialName}] option:nth-child(2)`).val()) + $(`select[name=${serialName}]`).val( + $(`select[name=${serialName}] option:nth-child(2)`).val(), + ); setSelect2Selection(); bindChangeProcessEvent(tableId, processName, serialName); @@ -1458,28 +1977,52 @@ const showIndexOrderingSetting = async (tableId = formElements.serialTable, proc bindDragNDrop(serialTableBody, tableId, serialName); }; -const bindDragNDrop = (serialTableBody, tableId = formElements.serialTable, serialName = 'serialColumn') => { +const bindDragNDrop = ( + serialTableBody, + tableId = formElements.serialTable, + serialName = 'serialColumn', +) => { // drag & drop for tables serialTableBody.sortable({ - helper: dragDropRowInTable.fixHelper, update: () => { - updatePriorityAndDisableSelected(tableId, serialName) + helper: dragDropRowInTable.fixHelper, + update: () => { + updatePriorityAndDisableSelected(tableId, serialName); }, }); }; -const addSerialOrderRow = async (tableId = formElements.serialTable, processName = 'serialProcess', serialName = 'serialColumn', orderName = 'serialOrder', selectedProc = null, selectedCol = null, selectedOrder = null, priority = null, isGraphArea = false) => { +const addSerialOrderRow = async ( + tableId = formElements.serialTable, + processName = 'serialProcess', + serialName = 'serialColumn', + orderName = 'serialOrder', + selectedProc = null, + selectedCol = null, + selectedOrder = null, + priority = null, + isGraphArea = false, +) => { if (!tableId) { tableId = formElements.serialTable; } const startProc = selectedProc ? selectedProc : getFirstSelectedProc(); const serialTableBody = $(`${tableId} tbody`); - const serialOrderRowHTML = await createOrderColRowHTML(startProc, tableId, processName, serialName, orderName, selectedCol, selectedOrder, priority, isGraphArea); + const serialOrderRowHTML = await createOrderColRowHTML( + startProc, + tableId, + processName, + serialName, + orderName, + selectedCol, + selectedOrder, + priority, + isGraphArea, + ); serialTableBody.append(serialOrderRowHTML); setSelect2Selection(tableId); bindDragNDrop(serialTableBody, tableId, serialName); }; - const showSerialModal = (tableId = formElements.serialTableModal) => { $(tableId).modal('show'); }; @@ -1513,18 +2056,20 @@ const bindXAxisEvents = () => { $('#xOption').data('change-val-only', false); }); - $(formElements.btnAddSerial).off('click').on('click', () => { - addSerialOrderRow().then(() => { - bindChangeProcessEvent(); - updatePriorityAndDisableSelected(); - setTimeout(() => { // wait select2 to be shown - bindChangeOrderColEvent(); - }, 200); + $(formElements.btnAddSerial) + .off('click') + .on('click', () => { + addSerialOrderRow().then(() => { + bindChangeProcessEvent(); + updatePriorityAndDisableSelected(); + setTimeout(() => { + // wait select2 to be shown + bindChangeOrderColEvent(); + }, 200); + }); }); - }); }; - const genStepLineAnnotation = (plotData, xLabels, pointColor) => { const annotations = {}; const startPoints = []; @@ -1533,8 +2078,8 @@ const genStepLineAnnotation = (plotData, xLabels, pointColor) => { for (let i in plotData) { i = Number(i); let val = plotData[i]; - let nextVal = (i < plotData.length - 1) ? plotData[i + 1] : null; - let prevVal = (i > 0) ? plotData[i - 1] : null; + let nextVal = i < plotData.length - 1 ? plotData[i + 1] : null; + let prevVal = i > 0 ? plotData[i - 1] : null; const pointPosition = Number(xLabels[i]) ? i : xLabels[i]; // start if prev != val and next == val @@ -1568,10 +2113,27 @@ const genStepLineAnnotation = (plotData, xLabels, pointColor) => { const orderSeriesCols = (columns) => { // sort serial & datetime to show first - const serialCols = columns.filter(col => col.is_serial_no); - const datetimeCols = columns.filter(col => col.is_get_date); - const normalCols = columns.filter(col => !col.is_serial_no && !col.is_get_date); + const serialCols = columns.filter((col) => col.is_serial_no); + const datetimeCols = columns.filter((col) => col.is_get_date); + const normalCols = columns.filter( + (col) => !col.is_serial_no && !col.is_get_date, + ); return [...serialCols, ...datetimeCols, ...normalCols]; }; +const getProcessColSelected = () => { + const formData = collectFormData('#traceDataForm'); + const procData = []; + const regex = /^end_proc\d+$/; + + for (const item of formData.entries()) { + const key = item[0]; + const value = item[1]; + if (regex.test(key)) { + procData.push(value); + } + } + + return procData; +}; diff --git a/ap/static/trace_data/js/trace_data_whisker_plot.js b/ap/static/trace_data/js/trace_data_whisker_plot.js index 65b19cd..debc7b8 100644 --- a/ap/static/trace_data/js/trace_data_whisker_plot.js +++ b/ap/static/trace_data/js/trace_data_whisker_plot.js @@ -218,17 +218,28 @@ const whiskerPlot = (ctx, prop) => { return chart; }; - const getChartInforIndex = (plotData, clickedVal) => { const chartInfosY = plotData.chart_infos || []; const chartInfosYOrg = plotData.chart_infos_org || []; - const [latestChartInfoY, idx] = chooseLatestThresholds(chartInfosY, chartInfosYOrg, clickedVal); + const [latestChartInfoY, idx] = chooseLatestThresholds( + chartInfosY, + chartInfosYOrg, + clickedVal, + ); return idx; }; -const genProp = (plotData, chartInforIndex = null, cfgYMin = null, cfgYMax = null) => { - const nonParametric = getNode(plotData.summaries[chartInforIndex], ['non_parametric']) || {}; - const stats = getNode(plotData.summaries[chartInforIndex], ['basic_statistics']) || {}; +const genProp = ( + plotData, + chartInforIndex = null, + cfgYMin = null, + cfgYMax = null, +) => { + const nonParametric = + getNode(plotData.summaries[chartInforIndex], ['non_parametric']) || {}; + const stats = + getNode(plotData.summaries[chartInforIndex], ['basic_statistics']) || + {}; let yMax = cfgYMax; let yMin = cfgYMin; @@ -278,16 +289,42 @@ const showWhiskerHover = (e) => { // show hover const canvasId = e.chart.canvas.id; - const rec = $(`#${canvasId}`)[0].getBoundingClientRect(); + const pos = calWhiskerParamTblRect(canvasId); const hoverElement = $(`#${canvasId}Hover`); - hoverElement.css('left', rec.x + rec.width - 35); - hoverElement.css('top', rec.y); + hoverElement.css('left', pos.left); + hoverElement.css('top', pos.top); hoverElement.css('display', 'block'); hoverElement.css('visibility', 'visible'); hoverElement.css('opacity', '1'); // #s120b14 todo pin from mouse position to use copy button }; +const calWhiskerParamTblRect = (whiskerId) => { + const offsetTop = 0; + const whiskerHoverElem = $(`#${whiskerId}Hover`); + const whiskerRect = $(`#${whiskerId}`)[0].getBoundingClientRect(); + const tblOffsetTop = + parseInt(whiskerHoverElem.css('padding-top').slice(0, -2)) + + parseInt(whiskerHoverElem.css('margin-top').slice(0, -2)); + const tblOffsetBottom = + parseInt(whiskerHoverElem.css('padding-bottom').slice(0, -2)) + + parseInt(whiskerHoverElem.css('margin-bottom').slice(0, -2)); + const tblHeight = + whiskerHoverElem.height() + tblOffsetBottom + tblOffsetTop - 35; + const tblVisible = $(window).height() - (whiskerRect.y + tblOffsetTop); + const overTblBottom = tblHeight - tblVisible; + const posLeft = whiskerRect.x + whiskerRect.width - 35; + let posTop = whiskerRect.y > 0 ? whiskerRect.y : offsetTop; + if (overTblBottom > 0) { + if (whiskerRect.y > overTblBottom) { + posTop = whiskerRect.y - overTblBottom; + } else { + posTop = offsetTop; + } + } + return { left: posLeft, top: posTop }; +}; + const hideAllWhiskerHover = () => { $('.whisker-hover').css('display', 'none'); }; @@ -314,7 +351,13 @@ const produceWhiskerPlots = (data) => { }); }; -const drawSensorWhisker = (sensorIdx, plotData, clickedVal = null, cfgYMin = null, cfgYMax = null) => { +const drawSensorWhisker = ( + sensorIdx, + plotData, + clickedVal = null, + cfgYMin = null, + cfgYMax = null, +) => { const canvasId = `whisker0${parseInt(sensorIdx) + 1}`; const canvasElement = $(`#${canvasId}`); const chartInforIndex = getChartInforIndex(plotData, clickedVal); diff --git a/ap/table_viewer/controllers.py b/ap/table_viewer/controllers.py index 32c4ac2..3016efe 100644 --- a/ap/table_viewer/controllers.py +++ b/ap/table_viewer/controllers.py @@ -17,7 +17,7 @@ @table_viewer_blueprint.route('/table_viewer') def index(): - all_procs = get_all_process() + all_procs = get_all_process(with_parent=False) output_dict = { 'page_title': _('Table Viewer'), 'procs': all_procs, diff --git a/ap/templates/500.html b/ap/templates/500.html index cd0fa29..272069c 100644 --- a/ap/templates/500.html +++ b/ap/templates/500.html @@ -1,13 +1,16 @@ {% extends "base.html" %} {% block asset %} - + {% endblock %} {% block midbody %} -
-

{{ _("Sorry, something went wrong") }}

-
-{% endblock %} \ No newline at end of file +
+

+ {{ _("Sorry, something went wrong") }} + +

+
+{% endblock %} diff --git a/ap/templates/aggregate_plot/aggregate_plot.html b/ap/templates/aggregate_plot/aggregate_plot.html index 88ca7e4..4952fbc 100644 --- a/ap/templates/aggregate_plot/aggregate_plot.html +++ b/ap/templates/aggregate_plot/aggregate_plot.html @@ -14,62 +14,85 @@
- {{ macros.label(_("Target period")) }} - {{ macros.paste_btn() }} + {{ macros.label(_("Target period")) }} + {{ macros.paste_btn() }}
- {{macros.multiple_target_period(DivisionNumberHover=_('Division number Hover AgP'), rlp=True, cyclicCalender=True, dataNumber=True, WindowLengthHover=_('Window length Hover AgP'), IntervalHover=_('Interval Hover AgP'))}} + {{ macros.multiple_target_period(DivisionNumberHover=_('Division number Hover AgP'), rlp=True, cyclicCalender=True, dataNumber=True, WindowLengthHover=_('Window length Hover AgP'), IntervalHover=_('Interval Hover AgP')) }}
{{ macros.cond_proc([],'', width=2, title=_('Graph Filter: Limits data to be displayed to a subset.'), pysm=3, is_optional=true) }} -
-
- {{ macros.order_btn() }} +
+
+ {{ macros.order_btn() }} {{ macros.preprocess_options(isRealOnlyMsg=_('The target is a variable whose data type is Real')) }} - - {{ macros.export_gui() }} -
-
- - {{ macros.cat_label_filter_modal() }} + + {{ macros.export_gui() }} +
+
+ + {{ macros.cat_label_filter_modal() }} {{ macros.end_col_order_modal() }} {{ macros.end_col_order_modal(graphArea='1') }} {{ macros.jump_modal() }} -
+ -