From a0cf31369b345727f35c54ff0b18785add5b0c4d Mon Sep 17 00:00:00 2001 From: Gonzalo Rafuls Date: Wed, 7 Feb 2024 19:16:27 +0100 Subject: [PATCH] feat: added gruop_by on hosts and schedules removed ansible facts gathering test fixes Closes: https://github.com/redhat-performance/quads/issues/464 Change-Id: I01871d3d209d25877f57fac262bb7b185dd6028b --- conf/quads.yml | 9 -- quads/quads_api.py | 16 ++-- quads/server/blueprints/hosts.py | 9 +- quads/server/blueprints/schedules.py | 2 +- quads/server/dao/baseDao.py | 38 +++++++-- quads/server/dao/host.py | 35 +++++--- quads/server/dao/schedule.py | 85 ++++++++++++++++--- quads/tools/create_input_assignments.py | 108 +++++------------------- quads/tools/reports.py | 26 ++---- tests/api/test_hosts.py | 72 ++++++++++------ tests/api/test_schedules.py | 6 +- tests/cli/test_report.py | 9 +- 12 files changed, 217 insertions(+), 198 deletions(-) diff --git a/conf/quads.yml b/conf/quads.yml index da9502656..13d007d3b 100644 --- a/conf/quads.yml +++ b/conf/quads.yml @@ -123,15 +123,6 @@ json_web_path: /var/www/html/cloud # number of days of retaining old .json files json_retention_days: 0 -# Whether or not you want the QUADS host to gather and display ansible facts in -# an HTMl page, you need ansible-cmdb rpm for this functionality which can be -# got from https://github.com/fboender/ansible-cmdb/releases -gather_ansible_facts: false - -# this is where we place the generated ansible configuration management database -# html -ansible_facts_web_path: /var/www/html/ansible_facts - # untouchable_hosts are hosts that should be avoided by QUADS in any way. # use this to define hosts QUADS should never move. untouchable_hosts: foreman.example.com c08-h30-r630.example.com diff --git a/quads/quads_api.py b/quads/quads_api.py index 95b5d556a..af3c515e7 100644 --- a/quads/quads_api.py +++ b/quads/quads_api.py @@ -37,15 +37,11 @@ def __init__(self, config: Config): self.config = config self.base_url = config.API_URL self.session = requests.Session() - self.auth = HTTPBasicAuth( - self.config.get("quads_api_username"), self.config.get("quads_api_password") - ) + self.auth = HTTPBasicAuth(self.config.get("quads_api_username"), self.config.get("quads_api_password")) # Base functions def get(self, endpoint: str) -> Response: - _response = self.session.get( - os.path.join(self.base_url, endpoint), verify=False, auth=self.auth - ) + _response = self.session.get(os.path.join(self.base_url, endpoint), verify=False, auth=self.auth) if _response.status_code == 500: raise APIServerException("Check the flask server logs") if _response.status_code == 400: @@ -85,9 +81,7 @@ def patch(self, endpoint, data) -> Response: return _response def delete(self, endpoint) -> Response: - _response = self.session.delete( - os.path.join(self.base_url, endpoint), verify=False, auth=self.auth - ) + _response = self.session.delete(os.path.join(self.base_url, endpoint), verify=False, auth=self.auth) if _response.status_code == 500: raise APIServerException("Check the flask server logs") if _response.status_code == 400: @@ -105,6 +99,10 @@ def get_hosts(self) -> List[Host]: hosts.append(host_obj) return hosts + def get_host_models(self): + response = self.get("hosts?group_by=model") + return response.json() + def filter_hosts(self, data) -> List[Host]: url_params = url_parse.urlencode(data) response = self.get(f"hosts?{url_params}") diff --git a/quads/server/blueprints/hosts.py b/quads/server/blueprints/hosts.py index 54241c5da..9cc85d520 100644 --- a/quads/server/blueprints/hosts.py +++ b/quads/server/blueprints/hosts.py @@ -26,7 +26,14 @@ def get_hosts() -> Response: else: _hosts = HostDao.get_hosts() - return jsonify([_host.as_dict() for _host in _hosts]) + + if _hosts and type(_hosts[0]) is Host: + return jsonify([_host.as_dict() for _host in _hosts]) + else: + for _host in _hosts: + return jsonify([tuple(_host) for _host in _hosts]) + + return jsonify(_hosts) @host_bp.route("/") diff --git a/quads/server/blueprints/schedules.py b/quads/server/blueprints/schedules.py index 22443dbd4..75a72bfae 100644 --- a/quads/server/blueprints/schedules.py +++ b/quads/server/blueprints/schedules.py @@ -16,7 +16,7 @@ def get_schedules() -> Response: if request.args: try: - _schedules = ScheduleDao.filter_schedules(**request.args) + _schedules = ScheduleDao.filter_schedule_dict(request.args) except (EntryNotFound, InvalidArgument) as ex: response = { "status_code": 400, diff --git a/quads/server/dao/baseDao.py b/quads/server/dao/baseDao.py index 7abed0788..f2ca096a2 100644 --- a/quads/server/dao/baseDao.py +++ b/quads/server/dao/baseDao.py @@ -1,6 +1,7 @@ from quads.server.models import Interface, Disk, Memory, Processor, Host, db from flask import current_app from sqlalchemy.exc import SQLAlchemyError +from sqlalchemy import func FILTERING_OPERATORS = { "==": "eq", @@ -67,9 +68,21 @@ def safe_commit() -> bool: return False @classmethod - def create_query_select(cls, model, filters=None, columns=None): - query_columns = cls.create_query_columns(model=model, columns=columns) - query = db.session.query(*query_columns).distinct(model.id) + def create_query_select(cls, model, filters=None, columns=None, group_by=None): + """ + Create a query to select data from a model with filters and columns. + :param model: The model to query. + :param filters: A list of filter expressions. + :param columns: A list of columns to select. + :param group_by: A column to group by. + :return: The query result. + """ + if group_by: + group_by_column = cls.get_group_by_column(model=model, group_by=group_by) + query_columns = [group_by_column, func.count(group_by_column)] + else: + query_columns = cls.create_query_columns(model=model, columns=columns) + query = db.session.query(*query_columns) for expression in filters: try: column_name, op, value = expression @@ -99,12 +112,12 @@ def create_query_select(cls, model, filters=None, columns=None): % FILTERING_OPERATORS[op] ) except IndexError: # pragma: no cover - raise Exception( - "Invalid filter operator: %s" % FILTERING_OPERATORS[op] - ) + raise Exception("Invalid filter operator: %s" % FILTERING_OPERATORS[op]) if value == "null": value = None query = query.filter(getattr(column, attr)(value)) + if group_by: + query = query.group_by(group_by_column) return query.all() @classmethod @@ -114,8 +127,15 @@ def create_query_columns(cls, model, columns): cols = [] for column in columns: - attr = getattr(model, column, None) - if not attr: + _attr = getattr(model, column, None) + if not _attr: raise Exception("Invalid column name %s" % column) - cols.append(attr) + cols.append(_attr) return cols + + @classmethod + def get_group_by_column(cls, model, group_by): + _attr = getattr(model, group_by) + if not _attr: + raise Exception("Invalid column name %s" % group_by) + return _attr diff --git a/quads/server/dao/host.py b/quads/server/dao/host.py index 88ad4e840..2737a049d 100644 --- a/quads/server/dao/host.py +++ b/quads/server/dao/host.py @@ -1,6 +1,6 @@ from typing import List, Optional -from sqlalchemy import Boolean +from sqlalchemy import Boolean, func from sqlalchemy.orm import RelationshipProperty from sqlalchemy.orm.relationships import Relationship @@ -19,9 +19,7 @@ class HostDao(BaseDao): @classmethod - def create_host( - cls, name: str, model: str, host_type: str, default_cloud: str - ) -> Host: + def create_host(cls, name: str, model: str, host_type: str, default_cloud: str) -> Host: _host_obj = cls.get_host(name) if _host_obj: raise EntryExisting @@ -95,10 +93,16 @@ def get_hosts() -> List[Host]: hosts = db.session.query(Host).all() return hosts + @staticmethod + def get_host_models(): + host_models = db.session.query(Host.model, func.count(Host.model)).group_by(Host.model).all() + return host_models + @staticmethod def filter_hosts_dict(data: dict) -> List[Host]: filter_tuples = [] operator = "==" + group_by = None for k, value in data.items(): fields = k.split(".") if len(fields) > 2: @@ -115,6 +119,10 @@ def filter_hosts_dict(data: dict) -> List[Host]: operator = OPERATORS[op] break + if fields[0].lower() == "group_by": + first_field = value + group_by = value + k = value field = Host.__mapper__.attrs.get(first_field) if not field: raise InvalidArgument(f"{k} is not a valid field.") @@ -133,17 +141,16 @@ def filter_hosts_dict(data: dict) -> List[Host]: if first_field.lower() in MAP_HOST_META.keys(): if len(fields) > 1: field_name = f"{first_field.lower()}.{field_name.lower()}" - filter_tuples.append( - ( - field_name, - operator, - value, + + if fields[0].lower() != "group_by": + filter_tuples.append( + ( + field_name, + operator, + value, + ) ) - ) - if filter_tuples: - _hosts = HostDao.create_query_select(Host, filters=filter_tuples) - else: - _hosts = HostDao.get_hosts() + _hosts = HostDao.create_query_select(Host, filters=filter_tuples, group_by=group_by) return _hosts @staticmethod diff --git a/quads/server/dao/schedule.py b/quads/server/dao/schedule.py index 90efad4c6..0f26cc0ee 100644 --- a/quads/server/dao/schedule.py +++ b/quads/server/dao/schedule.py @@ -1,9 +1,11 @@ from datetime import datetime from typing import List, Type -from sqlalchemy import and_ +from sqlalchemy import and_, Boolean, func +from sqlalchemy.orm import RelationshipProperty +from sqlalchemy.orm.relationships import Relationship from quads.server.dao.assignment import AssignmentDao -from quads.server.dao.baseDao import BaseDao, EntryNotFound, InvalidArgument, SQLError +from quads.server.dao.baseDao import BaseDao, EntryNotFound, InvalidArgument, SQLError, OPERATORS, MAP_HOST_META from quads.server.dao.cloud import CloudDao from quads.server.dao.host import HostDao from quads.server.models import db, Host, Schedule, Cloud, Assignment @@ -11,9 +13,7 @@ class ScheduleDao(BaseDao): @classmethod - def create_schedule( - cls, start: datetime, end: datetime, assignment: Assignment, host: Host - ) -> Schedule: + def create_schedule(cls, start: datetime, end: datetime, assignment: Assignment, host: Host) -> Schedule: _schedule_obj = Schedule(start=start, end=end, assignment=assignment, host=host) db.session.add(_schedule_obj) cls.safe_commit() @@ -90,6 +90,73 @@ def get_future_schedules(host: Host = None, cloud: Cloud = None) -> List[Schedul future_schedules = query.all() return future_schedules + @staticmethod + def filter_schedule_dict(data: dict) -> List[Schedule]: + filter_tuples = [] + date_fields = ["start", "end", "build_start", "build_end"] + operator = "==" + group_by = None + for k, value in data.items(): + fields = k.split(".") + if len(fields) > 2: + raise InvalidArgument(f"Too many arguments: {fields}") + + first_field = fields[0] + field_name = fields[-1] + if "__" in k: + for op in OPERATORS.keys(): + if op in field_name: + if first_field == field_name: + first_field = field_name[: field_name.index(op)] + field_name = field_name[: field_name.index(op)] + operator = OPERATORS[op] + break + + if value.lower() == "none": + value = None + + if fields[0].lower() == "group_by": + first_field = value + group_by = value + k = value + field = Schedule.__mapper__.attrs.get(first_field) + if not field: + raise InvalidArgument(f"{k} is not a valid field.") + if ( + type(field) != RelationshipProperty + and type(field) != Relationship + and type(field.columns[0].type) == Boolean + ): + value = value.lower() in ["true", "y", 1, "yes"] + else: + if first_field.lower() == "host": + host = HostDao.get_host(value) + if not host: + raise EntryNotFound(f"Host not found: {value}") + value = host + field_name = first_field + + if first_field in date_fields: + try: + if value: + value = datetime.strptime(value, "%Y-%m-%dT%H:%M") + except ValueError: + raise InvalidArgument(f"Invalid date format for {first_field}: {value}") + + if fields[0].lower() != "group_by": + filter_tuples.append( + ( + field_name, + operator, + value, + ) + ) + try: + _schedules = ScheduleDao.create_query_select(Schedule, filters=filter_tuples, group_by=group_by) + except Exception as e: + raise InvalidArgument(str(e)) + return _schedules + @staticmethod def filter_schedules( start: datetime = None, @@ -116,9 +183,7 @@ def filter_schedules( end_date = datetime.strptime(end, "%Y-%m-%dT%H:%M") end = end_date except ValueError: - raise InvalidArgument( - "end argument must be a datetime object or a correct datetime format string" - ) + raise InvalidArgument("end argument must be a datetime object or a correct datetime format string") elif not isinstance(end, datetime): raise InvalidArgument("end argument must be a datetime object") query = query.filter(Schedule.end <= end) @@ -135,9 +200,7 @@ def filter_schedules( return filter_schedules @staticmethod - def get_current_schedule( - date: datetime = None, host: Host = None, cloud: Cloud = None - ) -> List[Type[Schedule]]: + def get_current_schedule(date: datetime = None, host: Host = None, cloud: Cloud = None) -> List[Type[Schedule]]: query = db.session.query(Schedule) if cloud: query = query.join(Assignment).filter(Assignment.cloud == cloud) diff --git a/quads/tools/create_input_assignments.py b/quads/tools/create_input_assignments.py index 585175879..34c3313a1 100755 --- a/quads/tools/create_input_assignments.py +++ b/quads/tools/create_input_assignments.py @@ -43,8 +43,6 @@ def print_summary(): _headers.append("**OSPENV**") if Config["openshift_management"]: _headers.append("**OCPINV**") - if Config["gather_ansible_facts"]: - _headers.append("**HWFACTS**") _summary.append("| %s |\n" % " | ".join(_headers)) _summary.append("| %s |\n" % " | ".join(["---" for _ in range(len(_headers))])) @@ -70,13 +68,9 @@ def print_summary(): style_tag_end = "" if cloud["validated"] or cloud_name == "cloud01": style_tag_start = '' - instack_link = os.path.join( - Config["quads_url"], "cloud", "%s_instackenv.json" % cloud_name - ) + instack_link = os.path.join(Config["quads_url"], "cloud", "%s_instackenv.json" % cloud_name) instack_text = "download" - ocpinv_link = os.path.join( - Config["quads_url"], "cloud", "%s_ocpinventory.json" % cloud_name - ) + ocpinv_link = os.path.join(Config["quads_url"], "cloud", "%s_ocpinventory.json" % cloud_name) ocpinv_text = "download" status = ( '%.0f%%' - % (percent, percent, " ".join(classes), percent) + 'class="%s">%.0f%%' % (percent, percent, " ".join(classes), percent) ) _data = [ @@ -127,63 +118,17 @@ def print_summary(): link, ] - if Config["gather_ansible_facts"]: - factstyle_tag_end = "" - if os.path.exists( - os.path.join( - Config["ansible_facts_web_path"], - "ansible_facts", - "%s_overview.html" % cloud_specific_tag, - ) - ): - factstyle_tag_start = '' - ansible_facts_link = os.path.join( - Config["quads_url"], - "ansible_facts", - "%s_overview.html" % cloud_specific_tag, - ) - else: - factstyle_tag_start = '' - ansible_facts_link = os.path.join( - Config["quads_url"], "underconstruction" - ) - if cloud_name == "cloud01": - _data.append("") + _data.append(status) + if cloud_name == "cloud01": + if Config["openshift_management"] or Config["openstack_management"]: _data.append("") - _data.append(status) - _data.append("") - else: - _data.append( - "%s%s%s" - % (instack_link, style_tag_start, instack_text, style_tag_end) - ) - _data.append( - "%s%s%s" - % (ocpinv_link, style_tag_start, ocpinv_text, style_tag_end) - ) - _data.append(status) - _data.append( - "%sinventory%s" - % (ansible_facts_link, factstyle_tag_start, factstyle_tag_end) - ) else: - _data.append(status) - if cloud_name == "cloud01": - if Config["openstack_management"]: - _data.append("") - if Config["openshift_management"]: - _data.append("") - else: - if Config["openstack_management"]: - _data.append( - "%s%s%s" - % (instack_link, style_tag_start, instack_text, style_tag_end) - ) - if Config["openshift_management"]: - _data.append( - "%s%s%s" - % (ocpinv_link, style_tag_start, ocpinv_text, style_tag_end) - ) + text = "" + if Config["openstack_management"]: + text = instack_text + if Config["openshift_management"]: + text = ocpinv_text + _data.append("%s%s%s" % (ocpinv_link, style_tag_start, text, style_tag_end)) _summary.append("| %s |\n" % " | ".join(_data)) @@ -215,10 +160,7 @@ def print_unmanaged(hosts): if not host_obj: short_host = real_host.split(".")[0] - lines.append( - "| %s | console |\n" - % (short_host, host) - ) + lines.append("| %s | console |\n" % (short_host, host)) return lines @@ -229,10 +171,7 @@ def print_faulty(broken_hosts): lines.append("| %s |\n" % " | ".join(["---" for _ in range(len(_headers))])) for host in broken_hosts: short_host = host.name.split(".")[0] - lines.append( - "| %s | console |\n" - % (short_host, host.name) - ) + lines.append("| %s | console |\n" % (short_host, host.name)) return lines @@ -288,14 +227,10 @@ def main(): lines = [] all_hosts = loop.run_until_complete(foreman.get_all_hosts()) - blacklist = re.compile( - "|".join([re.escape(word) for word in Config["exclude_hosts"].split("|")]) - ) + blacklist = re.compile("|".join([re.escape(word) for word in Config["exclude_hosts"].split("|")])) broken_hosts = quads.filter_hosts({"broken": False}) - domain_broken_hosts = [ - host for host in broken_hosts if Config["domain"] in host.name - ] + domain_broken_hosts = [host for host in broken_hosts if Config["domain"] in host.name] mgmt_hosts = {} for host, properties in all_hosts.items(): @@ -320,16 +255,11 @@ def main(): name = cloud["name"] owner = cloud["owner"] lines.append("### \n" % name.strip()) - lines.append( - "### **%s : %s (%s) -- %s**\n\n" - % (name.strip(), cloud["count"], cloud["description"], owner) - ) + lines.append("### **%s : %s (%s) -- %s**\n\n" % (name.strip(), cloud["count"], cloud["description"], owner)) lines.extend(print_header()) _cloud_obj = quads.get_cloud(name) _hosts = sorted( - quads.filter_hosts( - {"cloud": _cloud_obj.name, "retired": False, "broken": False} - ), + quads.filter_hosts({"cloud": _cloud_obj.name, "retired": False, "broken": False}), key=lambda x: x.name, ) for host in _hosts: diff --git a/quads/tools/reports.py b/quads/tools/reports.py index fd9a44012..be8ac7c89 100644 --- a/quads/tools/reports.py +++ b/quads/tools/reports.py @@ -32,15 +32,13 @@ def report_available(_logger, _start, _end): total_allocated_month = 0 total_hosts = len(hosts) for _date in date_span(start, end): - total_allocated_month += len( - quads.get_current_schedules({"date": _date.strftime("%Y-%m-%dT%H:%M")}) - ) + total_allocated_month += len(quads.get_current_schedules({"date": _date.strftime("%Y-%m-%dT%H:%M")})) days += 1 utilized = total_allocated_month * 100 // (total_hosts * days) _logger.info(f"Percentage Utilized: {utilized}%") - # TODO: This should return future schedules as well - schedules = quads.get_current_schedules() + payload = {"build_start__ne": None, "build_end__ne": None} + schedules = quads.get_schedules(payload) total = timedelta() for schedule in schedules: if schedule.build_end and schedule.build_start: @@ -106,12 +104,7 @@ def report_available(_logger, _start, _end): def report_scheduled(_logger, months, year): headers = ["Month", "Scheduled", "Systems", "% Utilized"] - _logger.info( - f"{headers[0]:<8}| " - f"{headers[1]:>8}| " - f"{headers[2]:>8}| " - f"{headers[3]:>11}| " - ) + _logger.info(f"{headers[0]:<8}| " f"{headers[1]:>8}| " f"{headers[2]:>8}| " f"{headers[3]:>11}| ") now = datetime.now() now = now.replace(year=year, hour=22, minute=0, second=0) @@ -140,18 +133,11 @@ def process_scheduled(_logger, month, now): utilization = 0 for date in date_span(start, end): days += 1 - scheduled_count += len( - quads.get_current_schedules({"date": date.strftime("%Y-%m-%dT%H:%M")}) - ) + scheduled_count += len(quads.get_current_schedules({"date": date.strftime("%Y-%m-%dT%H:%M")})) if hosts and days: utilization = scheduled_count * 100 // (days * hosts) f_month = f"{start.month:02}" - _logger.info( - f"{start.year}-{f_month:<3}| " - f"{scheduled:>9}| " - f"{hosts:>8}| " - f"{utilization:>10}%| " - ) + _logger.info(f"{start.year}-{f_month:<3}| " f"{scheduled:>9}| " f"{hosts:>8}| " f"{utilization:>10}%| ") def report_detailed(_logger, _start, _end): diff --git a/tests/api/test_hosts.py b/tests/api/test_hosts.py index 9258ca375..bc460c820 100644 --- a/tests/api/test_hosts.py +++ b/tests/api/test_hosts.py @@ -53,10 +53,7 @@ def test_invalid_undefined_model(self, test_client, auth, prefill): ) assert response.status_code == 400 assert response.json["error"] == "Bad Request" - assert ( - response.json["message"] - == "Model R999 does not seem to be part of the defined models on quads.yml" - ) + assert response.json["message"] == "Model R999 does not seem to be part of the defined models on quads.yml" @pytest.mark.parametrize("prefill", prefill_settings, indirect=True) def test_invalid_missing_name(self, test_client, auth, prefill): @@ -141,10 +138,7 @@ def test_invalid_undefined_default_cloud(self, test_client, auth, prefill): ) assert response.status_code == 400 assert response.json["error"] == "Bad Request" - assert ( - response.json["message"] - == f"Default Cloud not found: {host_request['default_cloud']}" - ) + assert response.json["message"] == f"Default Cloud not found: {host_request['default_cloud']}" @pytest.mark.parametrize("prefill", prefill_settings, indirect=True) def test_valid_multi(self, test_client, auth, prefill): @@ -168,9 +162,7 @@ def test_valid_multi(self, test_client, auth, prefill): assert response.json["model"] == req["model"].upper() assert response.json["host_type"] == req["host_type"] assert response.json["default_cloud_id"] == response.json["cloud_id"] - duration = datetime.utcnow() - datetime.strptime( - response.json["created_at"], "%a, %d %b %Y %H:%M:%S GMT" - ) + duration = datetime.utcnow() - datetime.strptime(response.json["created_at"], "%a, %d %b %Y %H:%M:%S GMT") assert duration.total_seconds() < 5 @pytest.mark.parametrize("prefill", prefill_settings, indirect=True) @@ -190,9 +182,7 @@ def test_invalid_host_already_exists(self, test_client, auth, prefill): ) assert response.status_code == 400 assert response.json["error"] == "Bad Request" - assert ( - response.json["message"] == f"Host {HOST_1_REQUEST['name']} already exists" - ) + assert response.json["message"] == f"Host {HOST_1_REQUEST['name']} already exists" class TestGetHosts: @@ -308,6 +298,45 @@ def test_valid_filter_alias(self, test_client, auth, prefill): assert response.json[0]["host_type"] == HOST_2_REQUEST["host_type"] assert response.json[0]["default_cloud_id"] == response.json[0]["cloud_id"] + @pytest.mark.parametrize("prefill", prefill_settings, indirect=True) + def test_valid_group_by(self, test_client, auth, prefill): + """ + | GIVEN: Defaults, auth token and clouds and hosts from TestCreateHosts + | WHEN: User tries to get hosts that have different host_type than "scalelab" (not equal to) + | THEN: User should be able to get the host + """ + auth_header = auth.get_auth_header() + response = unwrap_json( + test_client.get( + "/api/v3/hosts?group_by=model", + headers=auth_header, + ) + ) + assert response.status_code == 200 + assert len(response.json) == 2 + assert response.json[0][0] == "FC640" + assert response.json[0][1] == 1 + assert response.json[1][0] == "R640" + assert response.json[1][1] == 1 + + @pytest.mark.parametrize("prefill", prefill_settings, indirect=True) + def test_valid_group_by_bad(self, test_client, auth, prefill): + """ + | GIVEN: Defaults, auth token and clouds and hosts from TestCreateHosts + | WHEN: User tries to get hosts that have different host_type than "scalelab" (not equal to) + | THEN: User should be able to get the host + """ + auth_header = auth.get_auth_header() + response = unwrap_json( + test_client.get( + "/api/v3/hosts?group_by=bad", + headers=auth_header, + ) + ) + assert response.status_code == 400 + assert response.json["error"] == "Bad Request" + assert response.json["message"] == "bad is not a valid field." + @pytest.mark.parametrize("prefill", prefill_settings, indirect=True) def test_invalid_filter_by_undefined_cloud(self, test_client, auth, prefill): """ @@ -343,10 +372,7 @@ def test_invalid_filter_too_many_args(self, test_client, auth, prefill): ) assert response.status_code == 400 assert response.json["error"] == "Bad Request" - assert ( - response.json["message"] - == f"Too many arguments: {too_many_args_filter.split('=')[0].split('.')}" - ) + assert response.json["message"] == f"Too many arguments: {too_many_args_filter.split('=')[0].split('.')}" @pytest.mark.parametrize("prefill", prefill_settings, indirect=True) def test_invalid_filter_by_invalid_field(self, test_client, auth, prefill): @@ -365,10 +391,7 @@ def test_invalid_filter_by_invalid_field(self, test_client, auth, prefill): ) assert response.status_code == 400 assert response.json["error"] == "Bad Request" - assert ( - response.json["message"] - == f"{invalid_field_filter.split('=')[0]} is not a valid field." - ) + assert response.json["message"] == f"{invalid_field_filter.split('=')[0]} is not a valid field." class TestUpdateHosts: @@ -437,10 +460,7 @@ def test_invalid_undefined_default_cloud(self, test_client, auth, prefill): ) assert response.status_code == 400 assert response.json["error"] == "Bad Request" - assert ( - response.json["message"] - == f"Cloud not found: {host_request['default_cloud']}" - ) + assert response.json["message"] == f"Cloud not found: {host_request['default_cloud']}" @pytest.mark.parametrize("prefill", prefill_settings, indirect=True) def test_invalid_undefined_cloud(self, test_client, auth, prefill): diff --git a/tests/api/test_schedules.py b/tests/api/test_schedules.py index 601e802fb..3da81ba3d 100644 --- a/tests/api/test_schedules.py +++ b/tests/api/test_schedules.py @@ -359,9 +359,7 @@ def test_invalid_filter(self, test_client, auth, prefill): ) assert response.status_code == 400 assert response.json["error"] == "Bad Request" - assert response.json["message"] == ( - "start argument must be a datetime object or a correct datetime format string" - ) + assert response.json["message"] == "Invalid date format for start: invalid" @pytest.mark.parametrize("prefill", prefill_settings, indirect=True) def test_valid_filter(self, test_client, auth, prefill): @@ -375,7 +373,7 @@ def test_valid_filter(self, test_client, auth, prefill): resp = SCHEDULE_1_RESPONSE.copy() response = unwrap_json( test_client.get( - f"/api/v3/schedules?host={hostname}", + f"/api/v3/schedules?host.name={hostname}", headers=auth_header, ) ) diff --git a/tests/cli/test_report.py b/tests/cli/test_report.py index b49c9436d..54a9a53f2 100644 --- a/tests/cli/test_report.py +++ b/tests/cli/test_report.py @@ -10,10 +10,9 @@ def test_report_available(self): self.quads_cli_call("report_available") assert self._caplog.messages[0].startswith("QUADS report for ") assert self._caplog.messages[1] == "Percentage Utilized: 25%" - assert self._caplog.messages[2] == "Average build delta: 0:00:00" - assert self._caplog.messages[3] == "Server Type | Total| Free| Scheduled| 2 weeks| 4 weeks" + assert self._caplog.messages[2] == "Server Type | Total| Free| Scheduled| 2 weeks| 4 weeks" + assert self._caplog.messages[3] == "R930 | 1| 0| 100%| 1| 1" assert self._caplog.messages[4] == "R640 | 1| 0| 100%| 0| 0" - assert self._caplog.messages[5] == "R930 | 1| 0| 100%| 1| 1" def test_report_scheduled(self): today = datetime.now() @@ -26,7 +25,7 @@ def test_report_scheduled(self): else: past_date = f"{today.year}-{today.month - 1:02d}" assert self._caplog.messages[0] == "Month | Scheduled| Systems| % Utilized| " - assert self._caplog.messages[1] == f"{today.year}-{today.month:02d} | 1| 2| 25%| " + assert self._caplog.messages[1] == f"{today.year}-{today.month:02d} | 0| 2| 25%| " assert self._caplog.messages[2] == f"{past_date} | 0| 2| 0%| " def test_report_scheduled_no_args(self): @@ -48,7 +47,7 @@ def test_report_scheduled_year(self): else: past_date = f"{today.year}-{today.month - 1:02d}" assert self._caplog.messages[0] == "Month | Scheduled| Systems| % Utilized| " - assert self._caplog.messages[1] == f"{today.year}-{today.month:02d} | 1| 2| 25%| " + assert self._caplog.messages[1] == f"{today.year}-{today.month:02d} | 0| 2| 25%| " assert self._caplog.messages[2] == f"{past_date} | 0| 2| 0%| " def test_report_detailed(self):