Skip to content

Commit

Permalink
fix(backend): tendbcluster 备份聚合补充binlog_info字段 TencentBlueKing#7144
Browse files Browse the repository at this point in the history
  • Loading branch information
iSecloud authored and zhangzhw8 committed Sep 26, 2024
1 parent 8676f0c commit a4acc85
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 29 deletions.
31 changes: 2 additions & 29 deletions dbm-ui/backend/db_services/mysql/fixpoint_rollback/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,35 +138,6 @@ def _format_job_backup_log(self, raw_backup_logs: List[str]) -> List[Dict[str, A
def _get_log_from_bklog(collector: str, start_time: datetime, end_time: datetime, query_string="*") -> List[Dict]:
return BKLogHandler.query_logs(collector, start_time, end_time, query_string)

@staticmethod
def _format_backup_for_tendb(raw_log: Dict[str, Any], backup_log: Dict[str, Any]) -> Dict[str, Any]:
"""
对tendb cluster的日志进行进一步的格式化
@param backup_log: 通过aggregate_backup_log_by_id已经聚合后的日志
"""
# 初始化相关角色集合,并舍弃无用的字段信息
if "remote_node" not in backup_log:
backup_log["remote_node"] = {}
backup_log["spider_node"], backup_log["spider_slave"] = [], []
delete_fields = [
"mysql_host",
"mysql_port",
"master_host",
"master_port",
"mysql_role",
"binlog_info",
"data_schema_grant",
]
for field in delete_fields:
backup_log.pop(field)

# 同一个backid中,取最小的backup_begin_time,最大的backup_end_time和最大的consistent_backup_time。
# 因为是字典序比较,所以可以直接用来比较时间
backup_log["backup_begin_time"] = min(backup_log["backup_begin_time"], raw_log["backup_begin_time"])
backup_log["backup_end_time"] = min(backup_log["backup_end_time"], raw_log["backup_end_time"])
backup_log["backup_time"] = max(backup_log["backup_time"], raw_log["consistent_backup_time"])
return backup_log

def aggregate_tendb_dbbackup_logs(self, backup_logs: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
"""
聚合tendb的mysql_backup_result日志,按照backup_id聚合mysql备份记录
Expand Down Expand Up @@ -221,6 +192,7 @@ def insert_log_into_node(_backup_node, _log):
return None

if not _backup_node or (
# TODO: 此条件永真,后续可以去掉
_log["backup_host"] not in _backup_node
and (
# 能覆盖的条件:
Expand Down Expand Up @@ -249,6 +221,7 @@ def insert_log_into_node(_backup_node, _log):
_backup_node["mysql_role"] = _log["mysql_role"]
_backup_node["host"], _backup_node["port"] = _log["backup_host"], _log["backup_port"]
_backup_node["file_list_details"] = []
_backup_node["binlog_info"] = _log.get("binlog_info")

# 更新备份时间,并插入文件列表信息
insert_time_field(_backup_node, _log)
Expand Down
3 changes: 3 additions & 0 deletions dbm-ui/backend/ticket/builders/redis/redis_toolbox_cut_off.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,9 @@ def patch_ticket_detail(self):
"location_spec": {"city": cluster.region, "sub_zone_ids": []},
"affinity": cluster.disaster_tolerance_level,
}
# 如果是proxy,则至少跨两个机房
if role == InstanceRole.REDIS_PROXY.value:
resource_spec[resource_role].update(group_count=2)
elif role == InstanceRole.REDIS_SLAVE.value:
# 如果是替换slave, 需要和当前集群中的配对的 master 不同机房
redis_slaves = StorageInstance.objects.prefetch_related("as_receiver", "machine").filter(
Expand Down

0 comments on commit a4acc85

Please sign in to comment.