Skip to content

Commit

Permalink
Merge pull request #17 from apdn7/feature/v4.5.2
Browse files Browse the repository at this point in the history
Feature/v4.5.2
  • Loading branch information
apdn7 authored Feb 7, 2024
2 parents 472bf54 + 98285eb commit b424a8a
Show file tree
Hide file tree
Showing 149 changed files with 1,644 additions and 606 deletions.
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
# Analysis Platform

```
2024-01-15: Released version 4.5.1! see RELEASE.md for details.
2024-02-07: (Hotfix) Released version 4.5.2, which fixes the issue where graph display was completely disabled.
Please refer the "Upgrade" section to upgrade AP+DN7.
```

Analysis Platform is an open source web application to import, connect and visualize factory IoT data. It helps to collect, link and integrate data from multiple data sources.
Expand Down
22 changes: 22 additions & 0 deletions RELEASE.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,27 @@
# Releases

## v4.5.2

This version includes below bufixes:

* (Important) Fixed an issue where graph display was completely disabled (From Feb.5, 2024) in AP+DN7 version >=4.2.0
* Fixed an issue where historical data older than one month was not being loaded from the database
* Fixed an issue where AP+DN7 can not display graph after `Process Name` is edited
* Fixed an issue about the limit of number of variables that can be selected in each visualization
* Fixed an issue about data count when a column `process_id` exists in data source

Please follow the instructions in the following manual to upgrade AP+DN7.

* EN: [Upgrade Manual](https://github.com/apdn7/AnalysisPlatform/files/12557931/AP%2BDN7_upgrade_manual_En_v4.1.1_r2.pdf)
* JP: [Upgrade Manual](https://github.com/apdn7/AnalysisPlatform/files/12557930/AP%2BDN7_upgrade_manual_Jp_v4.1.1_r2.pdf)

Improvements

* Common
* Implemented `Jump` function to all visualizations except PCA and COG
* Users can now experiment with various visualizations more easily than before
<img src="https://github.com/apdn7/AnalysisPlatform/assets/106378158/59cfc500-2d2d-41c7-9718-cb8ac8aea035" alt="JumpFunction" width="700">

## v4.5.1

This version is a minor update including some bug fixes
Expand Down
2 changes: 1 addition & 1 deletion VERSION
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
v4.5.1.faa01916
v4.5.2.4ce7870a
1
OSS

Expand Down
4 changes: 2 additions & 2 deletions ap/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
EXTERNAL_API,
INIT_APP_DB_FILE,
INIT_BASIC_CFG_FILE,
LAST_REQUEST_TIME,
LOG_LEVEL,
PARTITION_NUMBER,
REQUEST_THREAD_ID,
Expand Down Expand Up @@ -84,7 +85,7 @@
}

# last request time
dic_request_info = {'last_request_time': datetime.utcnow()}
dic_request_info = {LAST_REQUEST_TIME: datetime.utcnow()}

# ############## init application metadata db ###############
db_engine = None
Expand Down Expand Up @@ -335,7 +336,6 @@ def before_request_callback():
)

if not is_ignore_content and request.blueprint != EXTERNAL_API:
dic_request_info['last_request_time'] = datetime.utcnow()
bind_user_info(request)

if not dic_config.get(TESTING):
Expand Down
8 changes: 3 additions & 5 deletions ap/api/aggregate_plot/services.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,9 @@ def gen_agp_data(dic_param: DicParam):
dic_param = filter_cat_dict_common(df, dic_param, cat_exp, cat_procs, graph_param)
export_data = df

# calculate cycle_time and replace target column
convert_datetime_to_ct(df, graph_param)

# chunk data by cyclic terms
if graph_param.common.cyclic_div_num:
df = get_df_chunk_cyclic(df, dic_param)
Expand Down Expand Up @@ -157,8 +160,6 @@ def gen_df_direct_term(dic_param, dic_cat_filters, use_expired_cache):

df_term[DIVIDE_FMT_COL] = f'{term[START_DT]} | {term[END_DT]}'

convert_datetime_to_ct(df_term, graph_param)

if df is None:
df = df_term.copy()
else:
Expand Down Expand Up @@ -196,9 +197,6 @@ def gen_agp_data_from_df(df: pd.DataFrame, graph_param: DicParam) -> List[Dict[A
plot_data = []
target_vars = graph_param.common.sensor_cols

# calculate cycle_time and replace target column
convert_datetime_to_ct(df, graph_param)

str_cols = []

# each target var be shown on one chart (barchart or line chart)
Expand Down
6 changes: 4 additions & 2 deletions ap/api/setting_module/services/data_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ def import_data(
# return commit_error

if job_id:
save_proc_data_count(df, get_date_col, job_id)
save_proc_data_count(df, get_date_col, job_id, proc_id)

return cycles_len

Expand Down Expand Up @@ -1448,9 +1448,11 @@ def get_df_first_n_last(df: DataFrame, first_count=10, last_count=10):


@log_execution_time()
def save_proc_data_count(df, get_date_col, job_id):
def save_proc_data_count(df, get_date_col, job_id, proc_id):
if not df.size:
return None
# assign proc_id
df[ProcDataCount.process_id.key] = proc_id
# group data by datetime time
df[get_date_col] = df[get_date_col].apply(
lambda x: '{}'.format(
Expand Down
79 changes: 51 additions & 28 deletions ap/api/setting_module/services/factory_import.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from datetime import datetime
from datetime import datetime, timedelta

import pandas as pd
from pandas import DataFrame
Expand All @@ -17,7 +17,6 @@
get_df_first_n_last,
get_new_adding_columns,
import_data,
save_proc_data_count,
save_sensors,
validate_datetime,
write_duplicate_import,
Expand All @@ -31,6 +30,7 @@
add_days,
add_double_quotes,
add_years,
calculator_day_ago,
convert_time,
)
from ap.common.constants import DATETIME_DUMMY, MSG_DB_CON_FAILED, DataType, JobStatus
Expand Down Expand Up @@ -147,7 +147,7 @@ def import_factory(proc_id):
}

# get factory max date
fac_max_date, is_tz_col = get_factory_max_date(proc_cfg)
fac_min_date, fac_max_date, is_tz_col = get_factory_min_max_date(proc_cfg)

inserted_row_count = 0
calc_range_days_func = calc_sql_range_days()
Expand Down Expand Up @@ -177,6 +177,15 @@ def import_factory(proc_id):
end_time, range_day=sql_day, is_tz_col=is_tz_col
)
else:
min_date = convert_time(fac_min_date, return_string=False)
min_date = min_date - timedelta(minutes=1)
max_date = convert_time(fac_max_date, return_string=False)
filter_time = get_future_import_first_time(min_date, max_date, is_tz_col)

if not filter_time:
limit_date = add_days(days=-SQL_DAYS_AGO)
limit_date = limit_date.replace(hour=0, minute=0, second=0, microsecond=0)
filter_time = convert_time(limit_date)
start_time, end_time, filter_time = get_sql_range_time(filter_time, is_tz_col=is_tz_col)

# no data in range, stop
Expand Down Expand Up @@ -338,18 +347,24 @@ def _calc_sql_range_days(cur_day_cnt, cur_record_cnt):


@log_execution_time()
def get_sql_range_time(
filter_time=None, range_day=SQL_DAY, start_days_ago=SQL_DAYS_AGO, is_tz_col=False
):
# if there is no data , this poling is the first time, so get data of n days ago.
limit_date = add_days(days=-start_days_ago)
limit_date = limit_date.replace(hour=0, minute=0, second=0, microsecond=0)
def get_future_import_first_time(min_time, max_time, is_tz_col):
if not max_time:
return None

if filter_time:
filter_time = max(convert_time(filter_time), convert_time(limit_date))
else:
filter_time = convert_time(limit_date)
if calculator_day_ago(min_time, is_tz_col) <= SQL_DAYS_AGO:
return min_time

if calculator_day_ago(max_time, is_tz_col) < SQL_DAYS_AGO:
return None

if not min_time:
return None

return min_time


@log_execution_time()
def get_sql_range_time(filter_time=None, range_day=SQL_DAY, is_tz_col=False):
# start time
start_time = convert_time(filter_time, return_string=False)

Expand Down Expand Up @@ -443,37 +458,39 @@ def get_factory_data(proc_cfg, column_names, auto_increment_col, start_time, end


@log_execution_time()
def get_factory_max_date(proc_cfg):
def get_factory_min_max_date(proc_cfg):
"""
get factory max date
"""

with DbProxy(proc_cfg.data_source) as db_instance:
# gen sql
agg_results = []
get_date_col = add_double_quotes(proc_cfg.get_auto_increment_col_else_get_date())
orig_tblname = proc_cfg.table_name.strip('"')
if not isinstance(db_instance, mysql.MySQL):
table_name = add_double_quotes(orig_tblname)
else:
table_name = orig_tblname

sql = f'select max({get_date_col}) from {table_name}'
if isinstance(db_instance, mssqlserver.MSSQLServer):
sql = f'select convert(varchar(30), max({get_date_col}), 127) from {table_name}'
_, rows = db_instance.run_sql(sql, row_is_dict=False)

if not rows:
return None

out = rows[0][0]
for agg_func in ['MIN', 'MAX']:
sql = f'select {agg_func}({get_date_col}) from {table_name}'
if isinstance(db_instance, mssqlserver.MSSQLServer):
sql = f'select convert(varchar(30), {agg_func}({get_date_col}), 127) from {table_name}'
_, rows = db_instance.run_sql(sql, row_is_dict=False)

if out == DATETIME_DUMMY:
return None
if not rows:
return None

agg_results.append(rows[0][0])
min_time, max_time = agg_results
if max_time == DATETIME_DUMMY:
return None, None, False
is_tz_col = db_instance.is_timezone_hold_column(orig_tblname, get_date_col)
out = format_factory_date_to_meta_data(out, is_tz_col)
min_time = format_factory_date_to_meta_data(min_time, is_tz_col)
max_time = format_factory_date_to_meta_data(max_time, is_tz_col)

return out, is_tz_col
return min_time, max_time, is_tz_col


SQL_PAST_DAYS_AGO = 1
Expand Down Expand Up @@ -638,7 +655,9 @@ def factory_past_data_transform(proc_id):
cols = next(data)
remain_rows = tuple()
inserted_row_count = 0
has_data = False
for rows in data:
has_data = True
is_import, rows, remain_rows = gen_import_data(rows, remain_rows, auto_increment_idx)
if not is_import:
continue
Expand Down Expand Up @@ -719,7 +738,11 @@ def factory_past_data_transform(proc_id):
log_str = 'FACTORY PAST DATA IMPORT SQL(days={}, records={}, range={}-{})'
logger.info(log_str.format(SQL_PAST_DAYS_AGO, total_row, start_time, end_time))

yield 100
if not has_data:
gen_import_job_info(job_info, 0, start_time, end_time)
job_info.auto_increment_col_timezone = is_tz_col
job_info.percent = 100
yield job_info


@log_execution_time()
Expand Down
6 changes: 3 additions & 3 deletions ap/api/setting_module/services/polling_frequency.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
)
from ap.api.setting_module.services.process_delete import add_del_proc_job
from ap.common.common_utils import add_seconds
from ap.common.constants import CfgConstantType, DBType
from ap.common.constants import CfgConstantType, DBType, LAST_REQUEST_TIME
from ap.common.logger import log_execution_time, logger
from ap.common.scheduler import JobType, add_job_to_scheduler, remove_jobs, scheduler_app_context
from ap.setting_module.models import CfgConstant, CfgProcess, JobManagement
Expand Down Expand Up @@ -85,7 +85,7 @@ def add_import_job(

add_job_to_scheduler(job_id, job_name, trigger, import_func, run_now, dic_import_param)

add_idle_mornitoring_job()
# add_idle_mornitoring_job()

# double check
attempt = 0
Expand Down Expand Up @@ -131,7 +131,7 @@ def idle_monitoring(_job_id=None, _job_name=None):
"""
# check last request > now() - 5 minutes
last_request_time = dic_request_info.get('last_request_time', datetime.utcnow())
last_request_time = dic_request_info.get(LAST_REQUEST_TIME, datetime.utcnow())
if last_request_time > add_seconds(seconds=-5 * 60):
return

Expand Down
12 changes: 8 additions & 4 deletions ap/api/setting_module/services/save_load_user_setting.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,13 @@ def transform_settings(mapping_groups):


def transform_setting(src_vals, des_vals):
dic_src_checkboxes, dic_src_others = group_by_name(src_vals)
dic_des_checkboxes, dic_des_others = group_by_name(des_vals)
dic_src_checkboxes, dic_src_others, dic_src_datetime_picker = group_by_name(src_vals)
dic_des_checkboxes, dic_des_others, dic_des_datetime_picker = group_by_name(des_vals)
checkbox_vals = mapping_checkbox_radio(dic_src_checkboxes, dic_des_checkboxes)
datetimepicker_vals = mapping_checkbox_radio(dic_src_datetime_picker, dic_des_datetime_picker)
other_vals = mapping_others(dic_src_others, dic_des_others)

return other_vals + checkbox_vals
return other_vals + checkbox_vals + datetimepicker_vals


def mapping_checkbox_radio(dic_src, dic_des):
Expand Down Expand Up @@ -118,6 +119,7 @@ def mapping_others(dic_src, dic_des):
def group_by_name(vals):
dic_checkboxes = defaultdict(list)
dic_others = defaultdict(list)
dic_datetime_picker = defaultdict(list)
for dic_vals in vals:
setting = UserSettingDetail(dic_vals)
if not setting.name:
Expand All @@ -127,6 +129,8 @@ def group_by_name(vals):
if setting.name == 'cat_filter':
continue
dic_checkboxes[setting.name.lower()].append(setting)
elif setting.name == 'DATETIME_RANGE_PICKER':
dic_datetime_picker[setting.id].append(setting)
else:
short_name, _ = split_str_and_last_number(setting.name)
short_name = short_name.lower()
Expand All @@ -137,7 +141,7 @@ def group_by_name(vals):
}
dic_others = {key: sorted(vals, key=lambda x: x.name) for key, vals in dic_others.items()}

return dic_checkboxes, dic_others
return dic_checkboxes, dic_others, dic_datetime_picker


def map_form(dic_src_vals, dic_des_vals):
Expand Down
6 changes: 5 additions & 1 deletion ap/api/trace_data/services/time_series_chart.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import traceback
from collections import Counter, defaultdict
from copy import deepcopy
from datetime import datetime
from itertools import groupby
from math import ceil
from typing import Dict, List
Expand All @@ -14,7 +15,7 @@
from pandas import DataFrame, Series
from sqlalchemy import and_

from ap import db
from ap import db, dic_request_info
from ap.api.common.services.services import convert_datetime_to_ct, get_filter_on_demand_data
from ap.api.trace_data.services.proc_link import TraceGraph
from ap.api.trace_data.services.regex_infinity import (
Expand Down Expand Up @@ -1883,6 +1884,9 @@ def get_data_from_db(
use_expired_cache=False,
with_categorized_real=False,
):
# the system is busy
dic_request_info[LAST_REQUEST_TIME] = datetime.utcnow()

# DEBUG Function
df, actual_total_record, duplicated_serials_number = get_df_from_db(
graph_param, is_save_df_to_file, _use_expired_cache=use_expired_cache
Expand Down
Loading

0 comments on commit b424a8a

Please sign in to comment.