diff --git a/RedfishInteropValidator.py b/RedfishInteropValidator.py index 189c76d..3a6a739 100644 --- a/RedfishInteropValidator.py +++ b/RedfishInteropValidator.py @@ -3,7 +3,6 @@ # Copyright 2016 Distributed Management Task Force, Inc. All rights reserved. # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Interop-Validator/blob/master/LICENSE.md -import io import os import sys import re @@ -12,476 +11,25 @@ import logging import json import traverseService as rst -import jsonschema import argparse -from enum import Enum +from io import StringIO from commonProfile import getProfiles, checkProfileAgainstSchema +from traverseService import AuthenticationError +from tohtml import renderHtml, writeHtml +from metadata import setup_schema_pack +import commonInterop rsvLogger = rst.getLogger() -config = {'WarnRecommended': False} +VERBO_NUM = 15 +logging.addLevelName(VERBO_NUM, "VERBO") -class sEnum(Enum): - FAIL = 'FAIL' - PASS = 'PASS' - WARN = 'WARN' - -class msgInterop: - def __init__(self, name, entry, expected, actual, success): - self.name = name - self.entry = entry - self.expected = expected - self.actual = actual - if isinstance(success, bool): - self.success = sEnum.PASS if success else sEnum.FAIL - else: - self.success = success - self.parent = None - - -def validateRequirement(entry, decodeditem, conditional=False): - """ - Validates Requirement entry - """ - propDoesNotExist = (decodeditem == 'DNE') - rsvLogger.info('Testing ReadRequirement \n\texpected:' + str(entry) + ', exists: ' + str(not propDoesNotExist)) - # If we're not mandatory, pass automatically, else fail - # However, we have other entries "IfImplemented" and "Conditional" - # note: Mandatory is default!! if present in the profile. Make sure this is made sure. - originalentry = entry - if entry == "IfImplemented" or (entry == "Conditional" and conditional): - entry = "Mandatory" - paramPass = not entry == "Mandatory" or \ - entry == "Mandatory" and not propDoesNotExist - if entry == "Recommended" and propDoesNotExist: - rsvLogger.info('\tItem is recommended but does not exist') - if config['WarnRecommended']: - rsvLogger.error('\tItem is recommended but does not exist, escalating to WARN') - paramPass = sEnum.WARN - - rsvLogger.info('\tpass ' + str(paramPass)) - if not paramPass: - rsvLogger.error('\tNoPass') - return msgInterop('ReadRequirement', originalentry, 'Must Exist' if entry == "Mandatory" else 'Any', 'Exists' if not propDoesNotExist else 'DNE', paramPass),\ - paramPass - - -def isPropertyValid(profilePropName, rObj): - node = rObj.typeobj - while node is not None: - for prop in node.propList: - if profilePropName == prop.propChild: - return None, True - node = node.parent - rsvLogger.error('{} - Does not exist in ResourceType Schema, please consult profile provided'.format(profilePropName)) - return msgInterop('PropertyValidity', profilePropName, 'Should Exist', 'in ResourceType Schema', False), False - - -def validateMinCount(alist, length, annotation=0): - """ - Validates Mincount annotation - """ - rsvLogger.info('Testing minCount \n\texpected:' + str(length) + ', val:' + str(annotation)) - paramPass = len(alist) >= length or annotation >= length - rsvLogger.info('\tpass ' + str(paramPass)) - if not paramPass: - rsvLogger.error('\tNoPass') - return msgInterop('MinCount', length, '<=', annotation if annotation > len(alist) else len(alist), paramPass),\ - paramPass - - -def validateSupportedValues(enumlist, annotation): - """ - Validates SupportedVals annotation - """ - rsvLogger.info('Testing supportedValues \n\t:' + str(enumlist) + ', exists:' + str(annotation)) - for item in enumlist: - paramPass = item in annotation - if not paramPass: - break - rsvLogger.info('\tpass ' + str(paramPass)) - if not paramPass: - rsvLogger.error('\tNoPass') - return msgInterop('SupportedValues', enumlist, 'included in...', annotation, paramPass),\ - paramPass - - -def findPropItemforString(propObj, itemname): - """ - Finds an appropriate object for an item - """ - node = propObj.typeobj - while node is not None: - for prop in node.propList: - decodedName = prop.name.split(':')[-1] - if itemname == decodedName: - return prop - node = node.parent - return None - - -def validateWriteRequirement(propObj, entry, itemname): - """ - Validates if a property is WriteRequirement or not - """ - rsvLogger.info('writeable \n\t' + str(entry)) - permission = 'Read' - expected = "OData.Permission/ReadWrite" if entry else "Any" - if entry: - targetProp = findPropItemforString(propObj, itemname.replace('#', '')) - propAttr = None - if targetProp is not None: - propAttr = targetProp.propDict.get('OData.Permissions') - if propAttr is not None: - permission = propAttr.get('EnumMember', 'Read') - paramPass = permission \ - == "OData.Permission/ReadWrite" - else: - paramPass = False - else: - paramPass = True - - rsvLogger.info('\tpass ' + str(paramPass)) - if not paramPass: - rsvLogger.error('\tNoPass') - return msgInterop('WriteRequirement', entry, expected, permission, paramPass),\ - paramPass - - -def checkComparison(val, compareType, target): - """ - Validate a given comparison option, given a value and a target set - """ - rsvLogger.info('Testing a comparison \n\t' + str((val, compareType, target))) - vallist = val if isinstance(val, list) else [val] - paramPass = False - if compareType == "AnyOf": - for item in vallist: - paramPass = item in target - if paramPass: - break - else: - continue - - if compareType == "AllOf": - alltarget = set() - for item in vallist: - paramPass = item in target and item not in alltarget - if paramPass: - alltarget.add(item) - if len(alltarget) == len(target): - break - else: - continue - paramPass = len(alltarget) == len(target) - if compareType == "LinkToResource": - vallink = val.get('@odata.id') - success, decoded, code, elapsed = rst.callResourceURI(vallink) - if success: - ourType = decoded.get('@odata.type') - if ourType is not None: - SchemaType = rst.getType(ourType) - paramPass = SchemaType in target - else: - paramPass = False - else: - paramPass = False - - if compareType == "Equal": - paramPass = val == target - if compareType == "NotEqual": - paramPass = val != target - if compareType == "GreaterThan": - paramPass = val > target - if compareType == "GreaterThanOrEqual": - paramPass = val >= target - if compareType == "LessThan": - paramPass = val < target - if compareType == "LessThanOrEqual": - paramPass = val <= target - if compareType == "Absent": - paramPass = val == 'DNE' - if compareType == "Present": - paramPass = val != 'DNE' - rsvLogger.info('\tpass ' + str(paramPass)) - if not paramPass: - rsvLogger.error('\tNoPass') - return msgInterop('Comparison', target, compareType, val, paramPass),\ - paramPass - - -def validateMembers(members, entry, annotation): - """ - Validate an entry of Members and its count annotation - """ - rsvLogger.info('Testing members \n\t' + str((members, entry, annotation))) - if not validateRequirement('Mandatory', members): - return False - if "MinCount" in entry: - mincount, mincountpass = validateMinCount(members, entry["MinCount"], annotation) - mincount.name = 'MembersMinCount' - return mincount, mincountpass - - -def validateMinVersion(fulltype, entry): - """ - Checks for the minimum version of a resource's type - """ - fulltype = fulltype.replace('#', '') - rsvLogger.info('Testing minVersion \n\t' + str((fulltype, entry))) - # If fulltype doesn't contain version as is, try it as v#_#_# - versionSplit = entry.split('.') - versionNew = 'v' - for x in versionSplit: - versionNew = versionNew + x + '_' - versionNew = versionNew[:-1] - # get version from payload - v_payload = rst.getNamespace(fulltype).split('.', 1)[-1] - # use string comparison, given version numbering is accurate to regex - paramPass = v_payload >= (versionNew if 'v' in v_payload else entry) - rsvLogger.info('\tpass ' + str(paramPass)) - if not paramPass: - rsvLogger.error('\tNo Pass') - return msgInterop('MinVersion', '{} ({})'.format(entry, versionNew), '<=', fulltype, paramPass),\ - paramPass - - -def checkConditionalRequirement(propResourceObj, entry, decodedtuple, itemname): - """ - Returns boolean if entry's conditional is true or false - """ - rsvLogger.info('Evaluating conditionalRequirements') - if "SubordinateToResource" in entry: - isSubordinate = False - # iterate through parents via resourceObj - # list must be reversed to work backwards - resourceParent = propResourceObj.parent - for expectedParent in reversed(entry["SubordinateToResource"]): - if resourceParent is not None: - parentType = resourceParent.typeobj.stype - isSubordinate = parentType == expectedParent - rsvLogger.info('\tsubordinance ' + - str(parentType) + ' ' + str(isSubordinate)) - resourceParent = resourceParent.parent - else: - rsvLogger.info('no parent') - isSubordinate = False - return isSubordinate - if "CompareProperty" in entry: - decodeditem, decoded = decodedtuple - # find property in json payload by working backwards thru objects - # decoded tuple is designed just for this piece, since there is - # no parent in dictionaries - comparePropName = entry["CompareProperty"] - while comparePropName not in decodeditem and decoded is not None: - decodeditem, decoded = decoded - compareProp = decodeditem.get(comparePropName, 'DNE') - return checkComparison(compareProp, entry["Comparison"], entry.get("CompareValues", []))[1] - - -def validatePropertyRequirement(propResourceObj, entry, decodedtuple, itemname, chkCondition=False): - """ - Validate PropertyRequirements - """ - msgs = [] - counts = Counter() - decodeditem, decoded = decodedtuple - if entry is None or len(entry) == 0: - rsvLogger.debug('there are no requirements for this prop') - else: - rsvLogger.info('propRequirement with value: ' + str(decodeditem if not isinstance( - decodeditem, dict) else 'dict')) - # If we're working with a list, then consider MinCount, Comparisons, then execute on each item - # list based comparisons include AnyOf and AllOf - if isinstance(decodeditem, list): - rsvLogger.info("inside of a list: " + itemname) - if "MinCount" in entry: - msg, success = validateMinCount(decodeditem, entry["MinCount"], - decoded[0].get(itemname.split('.')[-1] + '@odata.count', 0)) - msgs.append(msg) - msg.name = itemname + '.' + msg.name - for k, v in entry.get('PropertyRequirements', {}).items(): - # default to AnyOf if Comparison is not present but Values is - comparisonValue = v.get("Comparison", "AnyOf") if v.get("Values") is not None else None - if comparisonValue in ["AllOf", "AnyOf"]: - msg, success = (checkComparison([val.get(k, 'DNE') for val in decodeditem], - comparisonValue, v["Values"])) - msgs.append(msg) - msg.name = itemname + '.' + msg.name - cnt = 0 - for item in decodeditem: - listmsgs, listcounts = validatePropertyRequirement( - propResourceObj, entry, (item, decoded), itemname + '#' + str(cnt)) - counts.update(listcounts) - msgs.extend(listmsgs) - cnt += 1 - - else: - # consider requirement before anything else - # problem: if dne, skip? - - # Read Requirement is default mandatory if not present - msg, success = validateRequirement(entry.get('ReadRequirement', 'Mandatory'), decodeditem) - msgs.append(msg) - msg.name = itemname + '.' + msg.name - - if "WriteRequirement" in entry: - msg, success = validateWriteRequirement(propResourceObj, entry["WriteRequirement"], itemname) - msgs.append(msg) - msg.name = itemname + '.' + msg.name - if "ConditionalRequirements" in entry: - innerList = entry["ConditionalRequirements"] - for item in innerList: - if checkConditionalRequirement(propResourceObj, item, decodedtuple, itemname): - rsvLogger.info("\tCondition DOES apply") - conditionalMsgs, conditionalCounts = validatePropertyRequirement( - propResourceObj, item, decodedtuple, itemname, chkCondition = True) - counts.update(conditionalCounts) - for item in conditionalMsgs: - item.name = item.name.replace('.', '.Conditional.', 1) - msgs.extend(conditionalMsgs) - else: - rsvLogger.info("\tCondition does not apply") - if "MinSupportValues" in entry: - msg, success = validateSupportedValues( - decodeditem, entry["MinSupportValues"], - decoded[0].get(itemname.split('.')[-1] + '@Redfish.AllowableValues', [])) - msgs.append(msg) - msg.name = itemname + '.' + msg.name - if "Comparison" in entry and not chkCondition and\ - entry["Comparison"] not in ["AnyOf", "AllOf"]: - msg, success = checkComparison(decodeditem, entry["Comparison"], entry.get("Values",[])) - msgs.append(msg) - msg.name = itemname + '.' + msg.name - if "PropertyRequirements" in entry: - innerDict = entry["PropertyRequirements"] - if isinstance(decodeditem, dict): - for item in innerDict: - rsvLogger.info('inside complex ' + itemname + '.' + item) - complexMsgs, complexCounts = validatePropertyRequirement( - propResourceObj, innerDict[item], (decodeditem.get(item, 'DNE'), decodedtuple), item) - msgs.extend(complexMsgs) - counts.update(complexCounts) - else: - rsvLogger.info('complex {} is missing or not a dictionary'.format(itemname + '.' + item, None)) - return msgs, counts - - -def validateActionRequirement(propResourceObj, entry, decodedtuple, actionname): - """ - Validate Requirements for one action - """ - decodeditem, decoded = decodedtuple - counts = Counter() - msgs = [] - rsvLogger.info('actionRequirement \n\tval: ' + str(decodeditem if not isinstance( - decodeditem, dict) else 'dict') + ' ' + str(entry)) - if "ReadRequirement" in entry: - # problem: if dne, skip - msg, success = validateRequirement(entry.get('ReadRequirement', "Mandatory"), decodeditem) - msgs.append(msg) - msg.name = actionname + '.' + msg.name - propDoesNotExist = (decodeditem == 'DNE') - if propDoesNotExist: - return msgs, counts - # problem: if dne, skip - if "Parameters" in entry: - innerDict = entry["Parameters"] - for k in innerDict: - item = innerDict[k] - annotation = decodeditem.get(str(k) + '@Redfish.AllowableValues', 'DNE') - # problem: if dne, skip - # assume mandatory - msg, success = validateRequirement(item.get('ReadRequirement', "Mandatory"), annotation) - msgs.append(msg) - msg.name = actionname + '.Parameters.' + msg.name - if annotation == 'DNE': - continue - if "ParameterValues" in item: - msg, success = validateSupportedValues( - item["ParameterValues"], annotation) - msgs.append(msg) - msg.name = actionname + '.' + msg.name - if "RecommendedValues" in item: - msg, success = validateSupportedValues( - item["RecommendedValues"], annotation) - msg.name = msg.name.replace('Supported', 'Recommended') - if config['WarnRecommended'] and not success: - rsvLogger.error('\tRecommended parameters do not all exist, escalating to WARN') - msg.success = sEnum.WARN - elif not success: - rsvLogger.error('\tRecommended parameters do not all exist, but are not Mandatory') - msg.success = sEnum.PASS - - msgs.append(msg) - msg.name = actionname + '.' + msg.name - # consider requirement before anything else, what if action - # if the action doesn't exist, you can't check parameters - # if it doesn't exist, what should not be checked for action - return msgs, counts - - -def validateInteropResource(propResourceObj, interopDict, decoded): - """ - Base function that validates a single Interop Resource by its entry - """ - msgs = [] - rsvLogger.info('### Validating an InteropResource') - rsvLogger.debug(str(interopDict)) - counts = Counter() - # decodedtuple provides the chain of dicts containing dicts, needed for CompareProperty - decodedtuple = (decoded, None) - if "MinVersion" in interopDict: - msg, success = validateMinVersion(propResourceObj.typeobj.fulltype, interopDict['MinVersion']) - msgs.append(msg) - if "PropertyRequirements" in interopDict: - # problem, unlisted in 0.9.9a - innerDict = interopDict["PropertyRequirements"] - for item in innerDict: - vmsg, isvalid = isPropertyValid(item, propResourceObj) - if not isvalid: - msgs.append(vmsg) - vmsg.name = '{}.{}'.format(item, vmsg.name) - continue - rsvLogger.info('### Validating PropertyRequirements for {}'.format(item)) - pmsgs, pcounts = validatePropertyRequirement( - propResourceObj, innerDict[item], (decoded.get(item, 'DNE'), decodedtuple), item) - rsvLogger.info(pcounts) - counts.update(pcounts) - msgs.extend(pmsgs) - if "ActionRequirements" in interopDict: - innerDict = interopDict["ActionRequirements"] - actionsJson = decoded.get('Actions', {}) - decodedInnerTuple = (actionsJson, decodedtuple) - for item in innerDict: - actionName = '#' + propResourceObj.typeobj.stype + '.' + item - rsvLogger.info(actionName) - amsgs, acounts = validateActionRequirement(propResourceObj, innerDict[item], (actionsJson.get( - actionName, 'DNE'), decodedInnerTuple), actionName) - rsvLogger.info(acounts) - counts.update(acounts) - msgs.extend(amsgs) - if "CreateResource" in interopDict: - rsvLogger.info('Skipping CreateResource') - pass - if "DeleteResource" in interopDict: - rsvLogger.info('Skipping DeleteResource') - pass - if "UpdateResource" in interopDict: - rsvLogger.info('Skipping UpdateResource') - pass - - for item in msgs: - if item.success == sEnum.WARN: - counts['warn'] += 1 - elif item.success == sEnum.PASS: - counts['pass'] += 1 - elif item.success == sEnum.FAIL: - counts['fail.{}'.format(item.name)] += 1 - rsvLogger.info(counts) - return msgs, counts +def verboseout(self, message, *args, **kws): + if self.isEnabledFor(VERBO_NUM): + self._log(VERBO_NUM, message, args, **kws) +logging.Logger.verboseout = verboseout def checkPayloadConformance(uri, decoded): @@ -522,60 +70,109 @@ def checkPayloadConformance(uri, decoded): return success, messages +def setupLoggingCaptures(): + class WarnFilter(logging.Filter): + def filter(self, rec): + return rec.levelno == logging.WARN + + errorMessages = StringIO() + warnMessages = StringIO() + fmt = logging.Formatter('%(levelname)s - %(message)s') + errh = logging.StreamHandler(errorMessages) + errh.setLevel(logging.ERROR) + errh.setFormatter(fmt) + + warnh = logging.StreamHandler(warnMessages) + warnh.setLevel(logging.WARN) + warnh.addFilter(WarnFilter()) + warnh.setFormatter(fmt) + + rsvLogger.addHandler(errh) + rsvLogger.addHandler(warnh) + + yield + + rsvLogger.removeHandler(errh) + rsvLogger.removeHandler(warnh) + warnstrings = warnMessages.getvalue() + warnMessages.close() + errorstrings = errorMessages.getvalue() + errorMessages.close() + + yield warnstrings, errorstrings + + def validateSingleURI(URI, profile, uriName='', expectedType=None, expectedSchema=None, expectedJson=None, parent=None): """ Validates a single URI that is given, returning its ResourceObject, counts and links """ # rs-assertion: 9.4.1 # Initial startup here - errorMessages = io.StringIO() - fmt = logging.Formatter('%(levelname)s - %(message)s') - errh = logging.StreamHandler(errorMessages) - errh.setLevel(logging.ERROR) - errh.setFormatter(fmt) - # rsvLogger.addHandler(errh) + # Initial startup here + lc = setupLoggingCaptures() + next(lc) # Start counts = Counter() results = OrderedDict() messages = [] - success = True + + results[uriName] = {'uri': URI, 'success': False, 'counts': counts, + 'messages': messages, 'errors': '', 'warns': '', + 'rtime': '', 'context': '', 'fulltype': ''} # check for @odata mandatory stuff # check for version numbering problems # check id if its the same as URI - # check @odata.context instead of local. Realize that @odata is NOT a - # "property" + # check @odata.context instead of local. Realize that @odata is NOT a "property" # Attempt to get a list of properties - successGet, jsondata, status, rtime = rst.callResourceURI(URI) + if URI is None: + if parent is not None: + parentURI = parent.uri + else: + parentURI = '...' + URI = parentURI + '...' + if expectedJson is None: + successGet, jsondata, status, rtime = rst.callResourceURI(URI) + else: + successGet, jsondata = True, expectedJson successPayload, odataMessages = checkPayloadConformance(URI, jsondata if successGet else {}) if not successPayload: counts['failPayloadError'] += 1 - rsvLogger.error(str(URI) + ': payload error, @odata property nonvalid') - # rsvLogger.removeHandler(errh) - # return False, counts, results, None, propResourceObj - # Generate dictionary of property info + rsvLogger.error(str(URI) + ': payload error, @odata property non-conformant',) + # Generate dictionary of property info try: - propResourceObj = rst.ResourceObj( - uriName, URI, expectedType, expectedSchema, expectedJson, parent) - if not propResourceObj.initiated: + propResourceObj = rst.createResourceObject( + uriName, URI, expectedJson, expectedType, expectedSchema, parent) + if not propResourceObj: counts['problemResource'] += 1 - success = False - results[uriName] = (URI, success, counts, messages, - errorMessages, None, None) + results[uriName]['warns'], results[uriName]['errors'] = next(lc) return False, counts, results, None, None - except Exception as e: + except AuthenticationError: + raise # re-raise exception + except Exception: + rsvLogger.exception("") counts['exceptionResource'] += 1 - success = False - results[uriName] = (URI, success, counts, messages, - errorMessages, None, None) + results[uriName]['warns'], results[uriName]['errors'] = next(lc) return False, counts, results, None, None - counts['passGet'] += 1 - results[uriName] = (str(URI) + ' ({}s)'.format(propResourceObj.rtime), success, counts, messages, errorMessages, propResourceObj.context, propResourceObj.typeobj.fulltype, propResourceObj.jsondata) + + # if URI was sampled, get the notation text from rst.uri_sample_map + sample_string = rst.uri_sample_map.get(URI) + sample_string = sample_string + ', ' if sample_string is not None else '' + + results[uriName]['uri'] = (str(URI)) + results[uriName]['samplemapped'] = (str(sample_string)) + results[uriName]['rtime'] = propResourceObj.rtime + results[uriName]['context'] = propResourceObj.context + results[uriName]['origin'] = propResourceObj.schemaObj.origin + results[uriName]['fulltype'] = propResourceObj.typeobj.fulltype + results[uriName]['success'] = True + + rsvLogger.info("\t URI {}, Type ({}), GET SUCCESS (time: {})".format(URI, propResourceObj.typeobj.stype, propResourceObj.rtime)) uriName, SchemaFullType, jsondata = propResourceObj.name, propResourceObj.typeobj.fulltype, propResourceObj.jsondata SchemaNamespace, SchemaType = rst.getNamespace( @@ -585,7 +182,7 @@ def validateSingleURI(URI, profile, uriName='', expectedType=None, expectedSchem if SchemaType not in objRes: rsvLogger.debug( - 'No Such Type in sample {} {}.{}, skipping'.format(URI, SchemaNamespace, SchemaType)) + '\nNo Such Type in sample {} {}.{}, skipping'.format(URI, SchemaNamespace, SchemaType)) else: rsvLogger.info("\n*** %s, %s", uriName, URI) rsvLogger.debug("\n*** %s, %s, %s", expectedType, @@ -593,11 +190,11 @@ def validateSingleURI(URI, profile, uriName='', expectedType=None, expectedSchem objRes = objRes.get(SchemaType) rsvLogger.info(SchemaType) try: - propMessages, propCounts = validateInteropResource( + propMessages, propCounts = commonInterop.validateInteropResource( propResourceObj, objRes, jsondata) messages = messages.extend(propMessages) counts.update(propCounts) - except Exception as ex: + except Exception: rsvLogger.exception("Something went wrong") rsvLogger.error( 'Could not finish validation check on this payload') @@ -605,9 +202,9 @@ def validateSingleURI(URI, profile, uriName='', expectedType=None, expectedSchem rsvLogger.info('%s, %s\n', SchemaFullType, counts) # Get all links available + results[uriName]['warns'], results[uriName]['errors'] = next(lc) rsvLogger.debug(propResourceObj.links) - rsvLogger.removeHandler(errh) return True, counts, results, propResourceObj.links, propResourceObj @@ -624,8 +221,7 @@ def validateURITree(URI, uriName, profile, expectedType=None, expectedSchema=Non # Resource level validation rcounts = Counter() rmessages = [] - rsuccess = True - rerror = io.StringIO() + rerror = StringIO() objRes = dict(profile.get('Resources')) @@ -641,39 +237,46 @@ def validateURITree(URI, uriName, profile, expectedType=None, expectedSchema=Non serviceVersion = profile.get("Protocol") if serviceVersion is not None: serviceVersion = serviceVersion.get('MinVersion', '1.0.0') - msg, mpss = validateMinVersion(thisobj.jsondata.get("RedfishVersion", "0"), serviceVersion) + msg, mpss = commonInterop.validateMinVersion(thisobj.jsondata.get("RedfishVersion", "0"), serviceVersion) rmessages.append(msg) currentLinks = [(l, links[l], thisobj) for l in links] + # todo : churning a lot of links, causing possible slowdown even with set checks while len(currentLinks) > 0: newLinks = list() for linkName, link, parent in currentLinks: - if refLinks is not currentLinks and ('Links' in linkName.split('.', 1)[0] or 'RelatedItem' in linkName.split('.', 1)[0] or 'Redundancy' in linkName.split('.', 1)[0]): - refLinks.append((linkName, link, parent)) - continue - if link[0] in allLinks: + linkURI, autoExpand, linkType, linkSchema, innerJson = link + + if linkURI in allLinks or linkType == 'Resource.Item': continue - linkURI, autoExpand, linkType, linkSchema, innerJson = link + if refLinks is not currentLinks and ('Links' in linkName.split('.') or 'RelatedItem' in linkName.split('.') or 'Redundancy' in linkName.split('.')): + refLinks.append((linkName, link, parent)) + continue if autoExpand and linkType is not None: linkSuccess, linkCounts, linkResults, innerLinks, linkobj = \ validateSingleURI(linkURI, profile, "{} -> {}".format(uriName, linkName), linkType, linkSchema, innerJson, parent=parent) else: linkSuccess, linkCounts, linkResults, innerLinks, linkobj = \ - validateSingleURI(linkURI, profile, "{} -> {}".format(uriName, linkName), parent=parent) + validateSingleURI(linkURI, profile, "{} -> {}".format(uriName, linkName), linkType, linkSchema, parent=parent) + + allLinks.add(linkURI) + + if not linkSuccess: + continue innerLinksTuple = [(l, innerLinks[l], linkobj) for l in innerLinks] newLinks.extend(innerLinksTuple) results.update(linkResults) + SchemaType = rst.getType(linkobj.typeobj.fulltype) # Check schema level for requirements - SchemaType = rst.getType(linkobj.typeobj.fulltype) if SchemaType in objRes: traverseLogger.info("Checking service requirement for {}".format(SchemaType)) - req = objRes[SchemaType].get("ReadRequirement", "Mandatory") - msg, pss = validateRequirement(req, None) - if pss and objRes[SchemaType].get('mark', False) == False: + req = objRes[SchemaType].get("ReadRequirement", "Mandatory") + msg, pss = commonInterop.validateRequirement(req, None) + if pss and not objRes[SchemaType].get('mark', False): rmessages.append(msg) msg.name = SchemaType + '.' + msg.name objRes[SchemaType]['mark'] = True @@ -682,9 +285,9 @@ def validateURITree(URI, uriName, profile, expectedType=None, expectedSchema=Non innerList = objRes[SchemaType]["ConditionalRequirements"] newList = list() for condreq in innerList: - condtrue = checkConditionalRequirement(linkobj, condreq, (linkobj.jsondata, None), None) + condtrue = commonInterop.checkConditionalRequirement(linkobj, condreq, (linkobj.jsondata, None), None) if condtrue: - msg, cpss = validateRequirement(condreq.get("ReadRequirement", "Mandatory"), None) + msg, cpss = commonInterop.validateRequirement(condreq.get("ReadRequirement", "Mandatory"), None) if cpss: rmessages.append(msg) msg.name = SchemaType + '.Conditional.' + msg.name @@ -693,40 +296,47 @@ def validateURITree(URI, uriName, profile, expectedType=None, expectedSchema=Non else: newList.append(condreq) objRes[SchemaType]["ConditionalRequirements"] = newList - - currentLinks = newLinks - if len(currentLinks) == 0 and len(refLinks) > 0: - refLinks = OrderedDict() + + if refLinks is not currentLinks and len(newLinks) == 0 and len(refLinks) > 0: currentLinks = refLinks + else: + currentLinks = newLinks # interop service level checks finalResults = OrderedDict() for left in objRes: - resultEnum = sEnum.FAIL + resultEnum = commonInterop.sEnum.FAIL if URI != "/redfish/v1": - resultEnum = sEnum.WARN + resultEnum = commonInterop.sEnum.WARN traverseLogger.info("We are not validating root, warn only") if not objRes[left].get('mark', False): - req = objRes[left].get("ReadRequirement", "Mandatory") + req = objRes[left].get("ReadRequirement", "Mandatory") rmessages.append( - msgInterop(left + '.ReadRequirement', req, 'Must Exist' if req == "Mandatory" else 'Any', 'DNE', resultEnum)) + commonInterop.msgInterop(left + '.ReadRequirement', req, 'Must Exist' if req == "Mandatory" else 'Any', 'DNE', resultEnum)) if "ConditionalRequirements" in objRes[left]: innerList = objRes[left]["ConditionalRequirements"] for condreq in innerList: req = condreq.get("ReadRequirement", "Mandatory") rmessages.append( - msgInterop(left + '.Conditional.ReadRequirement', req, 'Must Exist' if req == "Mandatory" else 'Any', 'DNE', resultEnum)) + commonInterop.msgInterop(left + '.Conditional.ReadRequirement', req, 'Must Exist' if req == "Mandatory" else 'Any', 'DNE', resultEnum)) for item in rmessages: - if item.success == sEnum.WARN: + if item.success == commonInterop.sEnum.WARN: rcounts['warn'] += 1 - elif item.success == sEnum.PASS: + elif item.success == commonInterop.sEnum.PASS: rcounts['pass'] += 1 - elif item.success == sEnum.FAIL: + elif item.success == commonInterop.sEnum.FAIL: rcounts['fail.{}'.format(item.name)] += 1 - finalResults['n/a'] = ("Service Level Requirements", rcounts.get('fail', 0) == 0, rcounts, rmessages, rerror, "n/a", "n/a") + finalResults['n/a'] = {'uri': "Service Level Requirements", 'success':rcounts.get('fail', 0) == 0,\ + 'counts':rcounts,\ + 'messages':rmessages, 'errors':rerror.getvalue(), 'warns': '',\ + 'rtime':'', 'context':'', 'fulltype':''} + for l in sorted(allLinks): + print(l) + print(len(allLinks)) finalResults.update(results) + rerror.close() return validateSuccess, counts, finalResults, refLinks, thisobj @@ -736,95 +346,121 @@ def validateURITree(URI, uriName, profile, expectedType=None, expectedSchema=Non ############################################################# -def main(argv): - # Set config +validatorconfig = {'payloadmode': 'Default', 'payloadfilepath': None, 'logpath': './logs'} + +def main(arglist=None, direct_parser=None): + """ + Main program + """ argget = argparse.ArgumentParser(description='tool for testing services against an interoperability profile') - argget.add_argument('--ip', type=str, help='ip to test on [host:port]') - argget.add_argument('--cache', type=str, help='cache mode [Off, Fallback, Prefer] followed by directory', nargs=2) - argget.add_argument('-u', '--user', default=None, type=str, help='user for basic auth') - argget.add_argument('-p', '--passwd', default=None, type=str, help='pass for basic auth') - argget.add_argument('--dir', type=str, default='./SchemaFiles/metadata', help='directory for local schema files') + + # config + argget.add_argument('-c', '--config', type=str, help='config file (overrides other params)') + + # tool + argget.add_argument('--schemadir', type=str, default='./SchemaFiles/metadata', help='directory for local schema files') + argget.add_argument('--schema_pack', type=str, default='', help='Deploy DMTF schema from zip distribution, for use with --localonly (Specify url or type "latest", overwrites current schema)') + argget.add_argument('--desc', type=str, default='No desc', help='sysdescription for identifying logs') + argget.add_argument('--logdir', type=str, default='./logs', help='directory for log files') + argget.add_argument('--payload', type=str, help='mode to validate payloads [Tree, Single, SingleFile, TreeFile] followed by resource/filepath', nargs=2) + argget.add_argument('--sample', type=int, default=0, help='sample this number of members from large collections for validation; default is to validate all members') + argget.add_argument('--linklimit', type=str, help='Limit the amount of links in collections, formatted TypeName:## TypeName:## ..., default LogEntry:20 ', nargs='*') + argget.add_argument('-v', action='store_true', help='verbose log output to stdout') + argget.add_argument('--debug_logging', action="store_const", const=logging.DEBUG, default=logging.INFO, + help='Output debug statements to text log, otherwise it only uses INFO') + argget.add_argument('--verbose_checks', action="store_const", const=VERBO_NUM, default=logging.INFO, + help='Show all checks in logging') + argget.add_argument('--nooemcheck', action='store_true', help='Don\'t check OEM items') + + # service + argget.add_argument('-i', '--ip', type=str, help='ip to test on [host:port]') + argget.add_argument('-u', '--user', default='', type=str, help='user for basic auth') + argget.add_argument('-p', '--passwd', default='', type=str, help='pass for basic auth') argget.add_argument('--timeout', type=int, default=30, help='requests timeout in seconds') argget.add_argument('--nochkcert', action='store_true', help='ignore check for certificate') argget.add_argument('--nossl', action='store_true', help='use http instead of https') - argget.add_argument('--authtype', type=str, default='Basic', help='authorization type (None|Basic|Session)') - argget.add_argument('--localonly', action='store_true', help='only use local schema') + argget.add_argument('--forceauth', action='store_true', help='force authentication on unsecure connections') + argget.add_argument('--authtype', type=str, default='Basic', help='authorization type (None|Basic|Session|Token)') + argget.add_argument('--localonly', action='store_true', help='only use locally stored schema on your harddrive') argget.add_argument('--service', action='store_true', help='only use uris within the service') argget.add_argument('--suffix', type=str, default='_v1.xml', help='suffix of local schema files (for version differences)') argget.add_argument('--ca_bundle', default="", type=str, help='path to Certificate Authority bundle file or directory') - argget.add_argument('--http_proxy', type=str, default=None, help='URL for the HTTP proxy') - argget.add_argument('--https_proxy', type=str, default=None, help='URL for the HTTPS proxy') + argget.add_argument('--token', default="", type=str, help='bearer token for authtype Token') + argget.add_argument('--http_proxy', type=str, default='', help='URL for the HTTP proxy') + argget.add_argument('--https_proxy', type=str, default='', help='URL for the HTTPS proxy') + argget.add_argument('--cache', type=str, help='cache mode [Off, Fallback, Prefer] followed by directory', nargs=2) - # Config information unrelated to Traversal - argget.add_argument('-c', '--config', type=str, help='config file (overrides other params)') - argget.add_argument('--desc', type=str, default='No desc', help='sysdescription for identifying logs') - argget.add_argument('--payload', type=str, help='mode to validate payloads [Tree, Single, SingleFile, TreeFile] followed by resource/filepath', nargs=2) - argget.add_argument('--logdir', type=str, default='./logs', help='directory for log files') - argget.add_argument('-v', action='store_true', help='verbose log output to stdout') - # Config information unique to Interop Validator argget.add_argument('profile', type=str, default='sample.json', help='interop profile with which to validate service against') argget.add_argument('--schema', type=str, default=None, help='schema with which to validate interop profile against') argget.add_argument('--warnrecommended', action='store_true', help='warn on recommended instead of pass') - - args = argget.parse_args() - # Can set verbose no matter config or not - if args.v: - rst.ch.setLevel(logging.DEBUG) - + args = argget.parse_args(arglist) - # Set config - try: - if args.config is not None: - rst.setConfig(args.config) - rst.isConfigSet() - elif args.ip is not None: - rst.setConfigNamespace(args) - rst.isConfigSet() - else: - rsvLogger.info('No ip or config specified.') - argget.print_help() - return 1 - except Exception as ex: - rsvLogger.exception("Something went wrong") # Printout FORMAT - return 1 + # set up config + if direct_parser is not None: + try: + cdict = rst.convertConfigParserToDict(direct_parser) + rst.setConfig(cdict) + except Exception as ex: + rsvLogger.exception("Something went wrong") + return 1, None, 'Config Parser Exception' + elif args.config is None and args.ip is None: + rsvLogger.info('No ip or config specified.') + argget.print_help() + return 1, None, 'Config Incomplete' + else: + try: + rst.setByArgparse(args) + except Exception: + rsvLogger.exception("Something went wrong") + return 1, None, 'Config Exception' + + config = rst.config # Set interop config items config['WarnRecommended'] = rst.config.get('warnrecommended', args.warnrecommended) - config['profile'] = args.profile - config['schema'] = args.schema - - # Strings - config_str = "" - for cnt, item in enumerate(sorted(list(rst.config.keys() - set(['systeminfo', 'configuri', 'targetip', 'configset', 'password']))), 1): - config_str += "{}: {}, ".format(str(item), str(rst.config[item] if rst.config[item] != '' else 'None')) - if cnt % 6 == 0: - config_str += '\n' - - inner_config_str = "" - for cnt, item in enumerate(sorted(list(config.keys() - set(['systeminfo', 'configuri', 'targetip', 'configset', 'password']))), 1): - inner_config_str += "{}: {}, ".format(str(item), str(config[item] if config[item] != '' else 'None')) - if cnt % 6 == 0: - inner_config_str += '\n' - - sysDescription, ConfigURI = (rst.config['systeminfo'], rst.config['configuri']) - logpath = rst.config['logpath'] + commonInterop.config['WarnRecommended'] = config['WarnRecommended'] + config['profile'] = args.profile + config['schema'] = args.schema + + # Setup schema store + if config['schema_pack'] is not None and config['schema_pack'] != '': + httpprox = config['httpproxy'] + httpsprox = config['httpsproxy'] + proxies = {} + proxies['http'] = httpprox if httpprox != "" else None + proxies['https'] = httpsprox if httpsprox != "" else None + setup_schema_pack(config['schema_pack'], config['metadatafilepath'], proxies, config['timeout']) # Logging config + logpath = config['logpath'] startTick = datetime.now() if not os.path.isdir(logpath): os.makedirs(logpath) fmt = logging.Formatter('%(levelname)s - %(message)s') - fh = logging.FileHandler(datetime.strftime(startTick, os.path.join(logpath, "ConformanceLog_%m_%d_%Y_%H%M%S.txt"))) - fh.setLevel(logging.DEBUG) + fh = logging.FileHandler(datetime.strftime(startTick, os.path.join(logpath, "InteropLog_%m_%d_%Y_%H%M%S.txt"))) + fh.setLevel(min(args.debug_logging, args.verbose_checks)) fh.setFormatter(fmt) - rsvLogger.addHandler(fh) # Printout FORMAT + rsvLogger.addHandler(fh) + + # Then start service + try: + currentService = rst.startService() + except Exception as ex: + rsvLogger.error("Service could not be started: {}".format(ex)) + return 1, None, 'Service Exception' + + metadata = currentService.metadata + sysDescription, ConfigURI = (config['systeminfo'], config['targetip']) + + # start printing rsvLogger.info('ConfigURI: ' + ConfigURI) - rsvLogger.info(inner_config_str) - rsvLogger.info('System Info: ' + sysDescription) # Printout FORMAT - rsvLogger.info(config_str) - rsvLogger.info('Start time: ' + startTick.strftime('%x - %X')) # Printout FORMAT + rsvLogger.info('System Info: ' + sysDescription) + rsvLogger.info('Profile:' + config['profile']) + rsvLogger.info('\n'.join( + ['{}: {}'.format(x, config[x]) for x in sorted(list(config.keys() - set(['systeminfo', 'targetip', 'password', 'description', 'profile'])))])) + rsvLogger.info('Start time: ' + startTick.strftime('%x - %X')) # Interop Profile handling profile = schema = None @@ -857,7 +493,7 @@ def main(argv): rsvLogger.error('File not found {}'.format(rst.config.get('payloadfilepath'))) return 1 - results = None + results = None for profile in profiles: profileName = profile.get('ProfileName') if 'Single' in rst.config.get('payloadmode'): @@ -871,139 +507,71 @@ def main(argv): results = resultsNew else: for item in resultsNew: - print(item) - innerCounts = results[item][2] - innerCounts.update(resultsNew[item][2]) + innerCounts = results[item]['counts'] + innerCounts.update(resultsNew[item]['counts']) if item in results: - for x in resultsNew[item][3]: + for x in resultsNew[item]['messages']: x.name = profileName + ' -- ' + x.name - results[item][3].extend(resultsNew[item][3]) - else: - newKey = profileName + '...' + key - input(newKey) - results[newKey] = resultsNew[key] + results[item]['messages'].extend(resultsNew[item]['messages']) #resultsNew = {profileName+key: resultsNew[key] for key in resultsNew if key in results} #results.update(resultsNew) + finalCounts = Counter() nowTick = datetime.now() - rsvLogger.info('Elapsed time: ' + str(nowTick-startTick).rsplit('.', 1)[0]) # Printout FORMAT - if rst.currentSession.started: - rst.currentSession.killSession() - - # Handle Schema Level validations - - # Render html - htmlStrTop = 'Conformance Test Summary\ - \ - ' - - htmlStrBodyHeader = '\ - \ - \ - \ - \ - \ - \ - \ - ' - - htmlStr = '' - - rsvLogger.info(len(results)) - for cnt, item in enumerate(results): - printPayload = False - innerCounts = results[item][2] - finalCounts.update(innerCounts) - if results[item][3] is not None and len(results[item][3]) == 0: - continue - htmlStr += '' - htmlStr += '".format(json.dumps(results[item][7], - indent=4, separators=(',', ': ')) if len(results[item]) >= 8 else "n/a") - - htmlStr += '
##### Redfish Conformance Test Report #####
System: ' + ConfigURI + '
' + str(inner_config_str.replace('\n', '
')) + '
Description: ' + sysDescription + '
' + str(config_str.replace('\n', '
')) + '
Start time: ' + (startTick).strftime('%x - %X') + '
Run time: ' + str(nowTick-startTick).rsplit('.', 1)[0] + '
' - htmlStr += ''.format(results[item][0], cnt, cnt) - htmlStr += ''.format(item, results[item][5], results[item][6]) - htmlStr += '' - htmlStr += '' - htmlStr += '
{}
\ -
Show results
\ -
URI: {}
XML: {}
type: {}
GET Success' if results[item] - [1] else 'class="fail"> GET Failure') + '' + rsvLogger.info('Elapsed time: {}'.format(str(nowTick-startTick).rsplit('.', 1)[0])) + + finalCounts.update(metadata.get_counter()) + for item in results: + innerCounts = results[item]['counts'] + # detect if there are error messages for this resource, but no failure counts; if so, add one to the innerCounts + counters_all_pass = True for countType in sorted(innerCounts.keys()): if innerCounts.get(countType) == 0: continue + if any(x in countType for x in ['problem', 'fail', 'bad', 'exception']): + counters_all_pass = False if 'fail' in countType or 'exception' in countType: - rsvLogger.error('{} {} errors in {}'.format(innerCounts[countType], countType, results[item][0].split(' ')[0])) + rsvLogger.error('{} {} errors in {}'.format(innerCounts[countType], countType, results[item]['uri'])) innerCounts[countType] += 0 - htmlStr += '
{p}: {q}
'.format( - p=countType, - q=innerCounts.get(countType, 0), - style='class="fail log"' if 'fail' in countType or 'exception' in countType else 'class="warn log"' if 'warn' in countType.lower() else 'class=log') - htmlStr += '
' - if results[item][4] is not None: - htmlStr += '' - htmlStr += "
'.format(cnt) - if results[item][3] is not None: - for i in results[item][3]: - htmlStr += '' - htmlStr += '' - htmlStr += '' - htmlStr += '' - htmlStr += '' - htmlStr += ''.format(str(i.success.value).lower(), str(i.success.value)) - htmlStr += '' - htmlStr += '
Name Entry Value must be Service Value Success
' + str(i.name) + '' + str(i.entry) + '' + str(i.expected) + '' + str(i.actual) + '{}
' + str(results[item][4].getvalue()).replace('\n', '
') + '
Payload\ -

{}

\ -
' - - htmlStrTotal = '
Final counts: ' - for countType in sorted(finalCounts.keys()): - if finalCounts.get(countType) == 0: - continue - htmlStrTotal += '{p}: {q}, '.format(p=countType, q=finalCounts.get(countType, 0)) - htmlStrTotal += '
Expand All
' - htmlStrTotal += '
Collapse All
' + error_messages_present = False + if results[item]['errors'] is not None and len(results[item]['errors']) > 0: + error_messages_present = True + if results[item]['warns'] is not None and len(results[item]['warns']) > 0: + innerCounts['warningPresent'] = 1 + if counters_all_pass and error_messages_present: + innerCounts['failErrorPresent'] = 1 - htmlPage = htmlStrTop + htmlStrBodyHeader + htmlStrTotal + htmlStr - - with open(datetime.strftime(startTick, os.path.join(logpath, "ConformanceHtmlLog_%m_%d_%Y_%H%M%S.html")), 'w') as f: - f.write(htmlPage) + finalCounts.update(results[item]['counts']) fails = 0 - for key in finalCounts: - if 'problem' in key or 'fail' in key or 'exception' in key: + for key in [key for key in finalCounts.keys()]: + if finalCounts[key] == 0: + del finalCounts[key] + continue + if any(x in key for x in ['problem', 'fail', 'bad', 'exception']): fails += finalCounts[key] + tool_version = '0.0' + + html_str = renderHtml(results, finalCounts, tool_version, startTick, nowTick) + + lastResultsPage = datetime.strftime(startTick, os.path.join(logpath, "InteropHtmlLog%m_%d_%Y_%H%M%S.html")) + + writeHtml(html_str, lastResultsPage) + success = success and not (fails > 0) rsvLogger.info(finalCounts) if not success: - rsvLogger.info("Validation has failed: %d problems found", fails) + rsvLogger.info("Validation has failed: {} problems found".format(fails)) else: rsvLogger.info("Validation has succeeded.") status_code = 0 - return status_code + return status_code, lastResultsPage, 'Validation done' if __name__ == '__main__': - sys.exit(main(sys.argv)) + status_code, lastResultsPage, exit_string = main() + sys.exit(status_code) diff --git a/RedfishLogo.py b/RedfishLogo.py new file mode 100644 index 0000000..5f52892 --- /dev/null +++ b/RedfishLogo.py @@ -0,0 +1,13 @@ +# Copyright Notice: +# Copyright 2018 Distributed Management Task Force, Inc. All rights reserved. +# License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/master/LICENSE.md + +""" +Redfish Logo + +File : RedfishLogo.py + +Brief : This file contains the Base64 encoded image data for the Redfish Logo +""" + +logo = "R0lGODlhLAHTAHAAACH5BAEAAPwALAAAAAAsAdMAhwAAAAAAMwAAZgAAmQAAzAAA/wArAAArMwArZgArmQArzAAr/wBVAABVMwBVZgBVmQBVzABV/wCAAACAMwCAZgCAmQCAzACA/wCqAACqMwCqZgCqmQCqzACq/wDVAADVMwDVZgDVmQDVzADV/wD/AAD/MwD/ZgD/mQD/zAD//zMAADMAMzMAZjMAmTMAzDMA/zMrADMrMzMrZjMrmTMrzDMr/zNVADNVMzNVZjNVmTNVzDNV/zOAADOAMzOAZjOAmTOAzDOA/zOqADOqMzOqZjOqmTOqzDOq/zPVADPVMzPVZjPVmTPVzDPV/zP/ADP/MzP/ZjP/mTP/zDP//2YAAGYAM2YAZmYAmWYAzGYA/2YrAGYrM2YrZmYrmWYrzGYr/2ZVAGZVM2ZVZmZVmWZVzGZV/2aAAGaAM2aAZmaAmWaAzGaA/2aqAGaqM2aqZmaqmWaqzGaq/2bVAGbVM2bVZmbVmWbVzGbV/2b/AGb/M2b/Zmb/mWb/zGb//5kAAJkAM5kAZpkAmZkAzJkA/5krAJkrM5krZpkrmZkrzJkr/5lVAJlVM5lVZplVmZlVzJlV/5mAAJmAM5mAZpmAmZmAzJmA/5mqAJmqM5mqZpmqmZmqzJmq/5nVAJnVM5nVZpnVmZnVzJnV/5n/AJn/M5n/Zpn/mZn/zJn//8wAAMwAM8wAZswAmcwAzMwA/8wrAMwrM8wrZswrmcwrzMwr/8xVAMxVM8xVZsxVmcxVzMxV/8yAAMyAM8yAZsyAmcyAzMyA/8yqAMyqM8yqZsyqmcyqzMyq/8zVAMzVM8zVZszVmczVzMzV/8z/AMz/M8z/Zsz/mcz/zMz///8AAP8AM/8AZv8Amf8AzP8A//8rAP8rM/8rZv8rmf8rzP8r//9VAP9VM/9VZv9Vmf9VzP9V//+AAP+AM/+AZv+Amf+AzP+A//+qAP+qM/+qZv+qmf+qzP+q///VAP/VM//VZv/Vmf/VzP/V////AP//M///Zv//mf//zP///wAAAAAAAAAAAAAAAAj/APcJHEiwoMGDCBMqXMiwocOF52C9gnXuocWLGDNq3Mixo8ePIENqbJeqZCpwtl65E8mypcuXMGPKnLmvHiyTqWDZ2vkqFc2fQIMKHUr0YK9UPWHdzMYLpa2SFYtKnUq1qtWB7lJ9e6rK1jdVr2yhzAbWFqyVV/fRc4c2rdu3cBty3alN7M2wtsy9Aovy1S2r9MyWpBi3sOG3JO+CqyWWcU6xgnVCTtVu6q3JqtiZpXe4s+eg9JTaqlUWsi1tSJ2+olYLZdPB9Ya622mu19NX53hFtPW5t2+WR3eeRmp6Z05ek2Eh75sq5V+gT83pfo2bFy92t7IR+829u8WszReD/+tZl/G38SVd3wyPnNrJ221hEuvZPLe5m7CG1a4Nrrn3/wAWlNJeOKnS3E5OPaUTSv29Ms16NzHn30v1NHdfKgZiWFJeuuV1WypRBShib+eEl1Iq2hj4ijatOQXOXeaYVlKGYfESI2olbQechbZEhJSK2vRizi21tYPfOTvFN+KSboUWVlgJxigcglQmCE5TMTbVVHFNTcTZR1nl1YuN19lnjn5j1kbmOcLkdk4tzzEpZ1VPCedUa4wx6NVoVDbF2JQu7rSllFeq0otHsNSyHzi9MHplL0LaIqR959zipo1CvjjMnJzKdqBXKNVSF4J/WoncVlUi56GHV4qlJYOp6P+I0VG8hGPjLZhKF+ml0wmZq5CQbhlbp8TGJFFxCxbXGJV7mhZojE5B66qHMeoFTjQWhXalmtIN2RSa0tmHq5vA6mrOfue8smmx7IZ0VGsnsibeifThNa8t2SQ6rS1N3cTsqqJ5yMtNhzZ0lJQSqaIUkTYmPFFY9tmYkk7TnQNsLf4q2e7GD7kjEUrr9WSWcapMY2DJn46WoSouMtdTq96u1xxy901TkkoKgTcMOBnilEptMpfknqS53WISRRXzYqB7jfLG8dNy4eUYgaqYx1Wegr0Ca05IncoThns5u5SBKMWIX2NxElTPiT/zsuGVL1qYnq/+Soceaq8kLXJJvLT/s1PBUAdOEDHp6YRjXalMI9w36e201WO1YPgNLA+6ZovNOpWUZdzNieyazRLzgpqO2KYraUm39AfLfqLXV1K5XGHXOIqXlrw6VLqVaMuwgj+9tkmOm6U1V1uBbKJx7ymOPKEYiyUylWGZUzJtKXVF/YZqmdWtgTEi1WjDM3PfI3W96O53Tm1KDEs7SoOo385gAd47u8HhPd5pf96mE8YbwrsaXgjaC5TstBOTVYtK5tiQrYymuKI1qD85kZL0DkSNV/CnF0lJYCr00yEMhStSESzXdEoCrvfx7Evz4xQ9JoInAxVHPG8TC9kCSLkqnUhV0bpNqwQmGH7ZZWbg6J4q/wY2s/HxTXW40s1erpS48vGLZ6lIn90MVK7y6YaE+wFWO0SWwjkZzSs6WQ9PXiSqp7Bmf8Rxyl2QlRM9JQhDCPRQKrIxrZepqnoXEsv3XNigc0kqgxjK1W0sVhskIYV99gEW7gipyOao4i30IMa6uugQwp1kIou5SVcmNxqnuBBeQ0NVNtpIqgfaYhfUQw9klrOTuv2QQzECy4W0kSuoPCUVYzrHhcAxDBcC6ym0jBEIVXGpcgXSTW3qBTtukhunVcVJidsJRzIBADEYQAydqpdwwkKWV0zuRVB6xcs6eTRx5kRrWNOknmQEi7JNS4O0aZ/CBPOYy/1BJxPRXPskk/8hJq5mNGK8VPde4bcsDqkkiARWbo7pTKlEIxy2GEaJutIfFFpEDDfYx3aIAQA5HcUkdVlRSi53oFowZlQbOo3KbuIeF5qGMdPIW5/AAUEfQouJXSEUfWyWHrMlDico6cUoeToYEJrEZjmt2OWGyMj9YFFI6SPJSYxUFGxpdBrDaEc75mESA/nhkRZRRkYBkAkDZGISmRiRMoijDbzghY7HGlV5wGiWxQQQUM5qpYtYaUNBQYZBr3IWzVblokhFCjLnUipik3ZYXprrl1ciVyH1KAxYTAVfvWjHMLJKjIL2IhzD8MtD0LqPFShDDGnFpnf0sY+HpkJUzQsLLCaXJ33/6Us8CyoVAbcCLz7dSU+MkZblgiioHYIDVwJz1K2EySjpcIhIvkJSU3yVxehqqR25nE5t2CGkhJ6pkG3aWYiEItSsahYdmtWsO9Kr1YgooyFn3YcB0GDafWT0PySRrdZmy1988cR5L7qfTjhJxt9OqUX/wqtwWLmcLbnmgMiRrg9plqUeIcluEbYFdnTFqA5dsJA2upTFapU0yYY3G2kDymxuwd6smlernGWvLVJ8kHrEoLT7QMNZ09obq9bEXxPxr2z/m5IX/RdrdOzkn+A14OLodjHmcOcTXQXYaUmYSO/sUV6uhKss0Wy5vPjehce0rV5JJ2no6luv2FTd+8gq/yiX0ap6XdwOdNDZxe4wb0Qm2VqDoOG++5jEjbtjtObRcciHNjIdMbaVusxVjXpdlpX+KhwpVXqVOcyyO6fTqgtLKi8x0qWWIxZZ61xYN7kUE4jJ9avtAkuZypyxUPQRDY+x487naEeus3qOYawXxlqVZHt3p5BMxOAGMVDtb9whTtkeWiL5SlSi7neaII90cvlkEUpQFVvI9NY8WGuMvlDVqnIXl5VGpjJyJLbgbU3Jh6c+rK20m6oSbwmR/DkLUSCaXidCtHx3xi6k2pFngkuUF0aTn4hYKxCuoOgVGKNaV/crqp4YqHgyk1nVqMTT/aGkQDGM3IxSusMZUe+WPv/DFdzWowq/5aZnGVpdhjVUC0K2r0DNHZPDgYUchb+EtcsYyDCaw+uP+iyz6kU5sFGOE4uKyJLarMsoYZGNfDqolUnZkL5yUsO9PGgrEIopZG7yoESpyCsGgkVb11NuFDWuR/lsp0Qi2KO7lMVug5G7zaDboPRgCkI8MZBuhIE3zfkNHLyDiY8FEpoNwRiKkAELNXx9vgy1o7PgeQVEIaiTEf3OZHF/mIFY5DywmH1DbyfLgZhjONRT9GtVSind8wJH5AnGnRvKUji6doukQLCZRsONrl5mm5KEw1++Qo2Gz3GO8WSmF5rkBdNwNpQv+iu9zNTsPEpEmWGQfegtT2//TgjeDmX0h8WxAtBRzBmWwSymrZ1rvUjnOBzWgDE1fzrnifAp9tEMjZ1xM0Bu8x7Khy8H0i8sYx0oYTKv8R5tZCOqkyV59Bp5gR/hEja9wjfcZw7cR2M04THNMQ8Rkl7cQ3lDRxntYCBZZTOdJVE5QWfBRw8b4nSeMXSoUBZKwTiqEG2k0RwhFTmbJBniVDX4VEF/FU1udDbGgyrO0xy1QA3ttBz+oicTtGU6hGW34SG5AUF+gxzesx8WaBu5ITIXRjaXYkDAohk+MROJ13C2wD5adRMsllU9kVkuhiEvplV/gILEoEHrRQzz0B/gAB5G4nOHsRbugCQxJRF4kyg43lJ1I3MzRVY9JkJ2O8EYAnSJH9ccxRNDYlE4qRFAqvAN68YcfiUyoaYgHdYqgYRlOdFM6XIcHAg+sMBdRNRhQ1KHYGiIiicQQ7c+wCYycmYg4YBdWoVQd1YSBXcU65NVs5ETa5UK7rB9sECDvSFUYoQxPbgTh6YK5eGIT6E4mJgTZXQ5HzMle5Esz+MUjyFAg+Uvf9U+rgFFrYIcoPMon/gzQ9IfqnAuQhIh1mEr6ZJUsfiGmKKBZjFraiFGEpVZxIAaACdVvLBeWYVQndVZUkV5+UWR7fBFMrhB62V+Df8VF4tXEFKVL1oDFizUTjdjZPxzIHQ0Q4txdnsyHtNQKum4FbzwFQfyIEf4HleCHKMEPV3xKkghXa1Sh3JEMYViIdTlL0hSkJeCIimhMTRxC+szgnQmjJrFTJuFXXgIY712SFnFjJRnfifBVSAyD531UXz2G9FAjokyGPkChHjBSfAHMrc0UnMXKLfUTl8DQD2RZBgSXHu5JXnhQvthO1yWE/3IL7bCjxt2Sx2GLzkFXfIYZvdRFrnhHrhEE9HAcAIxH+AAlriEXbumfOnVILnWKMgIbAZicFLVa6Z5C2vVFblmJK84kr8RDvtFOWChITkxOdK2jeOBHjB3EvryQ3b/JUM8ZTKkhBTCWRbzeHuI6TMb8iqumQrWUYHYyZ3VEmZDaR8OdzQ0IZoDAQs36DPGiHRymF620DMa8obYRZGvSQyjhJ0Ep0E+Aw70UCIAMjHYZnr5YhYY1xPo1EpVR3VUZyf6EjakMjESMW1jh5LnOCjVU1ytxIgLUorIARa69D3awKHtZEW6cR9aI0gDWhlDgUHDcBoTCg69Rpub1VbFaF4Do3aYdHnsFRHrU3B+oxR1wSj9NjI6cQ6S5A66EaDiZKD+lSjNM1umIXXFQW5WoyfFwyx6ckd7RWV2AmGgllwkxmXdIqbS5Y8R5i25kV2MhBLWGBPEsDq5RpvAhqO7/6ZZM/piwzAPLZZe5oVnlDcPvuaMlPdr60UPBDcPswEgoYEv0mag7QRx+OSk+1NXbsUs+dNJBERA1Rkog6UlVCYloLo5g9UoYmgd5iIpidVcNDNmBoVBt4CeIFGSNZE6w9ALt8pd2NWQeZpedFqWBWeCxmhnPCpncias7pAM6ZUMf6hZgDgMgdGLAlEP0JAQ1ToQxIAktwCtEOFW/RUWdUFgDxpgTzZ2f9Vby6KlzCItrAQtrGQbWvhE3zOmEmM3x1Ut3zM+aWKq2mVdukQYs0oQQbcPXShRutae7ECjuzajeGqwLtaaxfqnwfaVF6lewMYWl8enyqBV67WoMCGrVv8pdN8JC7RKEBOxaOA4ZOgURnaFMffzJ1CiW6UkFtHCGOVwcu1qU0+EofVoK1OmK+PTYZgSlG5CLdSFcKxTRaoSsh0RGJaia8PQNxI1tb1mDuzjsMbYb8DGXnYYsXKGkX7Ko3nGpzGWZ/TAa0CRHwnxl2DTMxVRsqaDSbLFaDyRWyybTgjmOICJrpqorjM1ZXvFKB5iG1FGM82lctOlJZGZakl7QbqxYfv6WFLCEkbCJrlhjLrRDmeysA5rMVCLp2J5q+zVghYrrDzaWYi6pxSJseW3Vb3ggS5xDn8AVgfBcjE1I/lJGQhRIcKZGqeXOOtBeqPUMz45QMFpL/snnXj/oVOh6CECdDLUg4Ds4Z3CqXmmxi8mITGNQiAz8gq+Yil/mUW39GYbQQwKMnGXK7omkacX0lU94muapCGwkFm5yX0t54zQhxN+QZHxKZ06YSRMyxEDWxP7oAxLwZux4Q6oUEFcgx+DkQp/kBCU87zEQXUq8sAfF78Esiz0QUoGqCIG4pN21bZP1HsrQx/UchfH4bMNzDU/w4oVhJvg88DR5CZA+IrTgVQ3yBnXWsAKsQz6wDsIcjQ40Z43c5oSZXcd3Dco1xM2ow0uZjSWN7Y3AxYZ4mu3xL0TMhO3QFQhO7tcY3FTVxImw5sNFxbn8T/wNzz5SE96UhInNRhhgyo3/0FutyQlNgNBlpPHmlGFTZkvc0S4NxFMQvSiqjMRDcQvgqe5uqkcb2IgT/gK3NU9tahZooURwzANXYkizsh9mgVBu0cZUgsVW9WROSGCJ4Go9OA260MMNaWC4kcZazEPqAEOycA97qAPRvKmLAEeVowKbSgQWWFOKLMeU9cTqIAQ6XIsYjTIjMBJG9JNfCIYdUE5OjFKo8J5q1Iasgclm7hunygZgYQfWnITijIu6cF9uJEapqY5lUl3UKE6qYYiYVaaqYAKAWwQanerhvSGyhRap+l4r2OMCIV05pgV/fiVhChVmWWRYNmMWvUUt0APN9iVvMgS+pDFArSGBmFGP/9icQQCIanwXgZBOCCzjYPRPIzTHIqxbZl4P1BkL2GTIDbzQhhSco9pPBYULTXUKsr3YMTRnUhyEySWQKogDJp5S9hVHYMENPVxJvsEC7JqFIMhftPAYuVzFAltJJoFFRLFfS42DLtHUASjVQ29QdhltScolhgysQ3ykbYQDi4RmtGAQh+FH3PH0QURfCWTFDYD0tA5XlgBrhF3SROKKnC0Ip3TSsMJPcfzMlgDoTRlHADEjwekOgqYF0lRzrJ4OgnYfDaSnQsFIuYQmbJISObIaccxKTfRPQ2lD0HHWub7cePTz1arTO/ZtV7d1fdbtS8YIX+qjHgmVZhXIvmxVe7/YDTVWBK8LBKNpyIIylMIwbZt68IdbJXMpl+FCTYBFo4n8norvTiBx8Y17RRfMTw4RDZb0jXqQUrTZYFhVtO1QXvcKSaKnAq2IkSoahsxlRuYMki6ESGWe4I9Mhjkuw+uopvvK1X+bJoJ+9W9RBkzWtBapdwMDboktLVtXZbjl3QWArstYTQ3gzlQyDXKPHHBCSEVdBMHrlFaA66+K3bNsyF4M83WnD/8mE7Hs5eUFoof95iNEjmrIzDWTDPknN8uJF26dHdXhEv7wRUg5prz61zHYR8AeRwv8iUeU0jDgDdwCNy3PUqeq0yz+T6vuTNIgcq/GpvCilCXxw7jR3kk/xXMLgEeLXwXHWyNH0l10tnXJQPGek0QuvMkpmdxjZHjlrht7sfBPukY9TSTAvjjPlRTNDNDgzKF0gHfXggi7MYLaEjfOYeBZqIgk3WUmTWAY2JAEjVjKcHPfQOV2KUgq4mHuoqrWOSC/VhwEcJszeHhKIh0Jzionny2RvKWIREN9UBrNfGXDuLAmkQgm0KrGkIgD/LRt2vG+8BAwil3P0LEKBGcMeeEDurjPVh1ThyFvTVOV7IX3ZQhO3Gmd4FcEJR2qeAeAXmQJ4EpBLLR9VHb8Zk39gE2fQ4i9ougr5NF54MbmcUz9EEjeZprpEyHNKIhLHZL9I4iFetrr7nnG/9C5y3BfQIfnGSswsSEEJZE7xvNcgmRdXO0cZaZdSzkfN0uUs2ZhQnSVjHn8vP4GNf50fLCYIMcOhJR7eKkXFoWNkVbdSdDz0mrOuTCjw7CF1HbvgZCRxYzyd53EmiS9XanHNj14NyXWWJPwStiXnPnwGGL61yLH/kMEo1XEjhid/lixSgZ6IKeOCwfwwi1u5LaLBizaAHWZHnFICdFs/9S833ll1Tol1ZGLVSWrwyyKwoYMQzDHzYSLrlS+dqlXWvKam4iWWJvtRZz9bmJalbbnlBLmwzrq8fKXgfbYr86uu2AEj8h4tYNFkSvknEvjQlh+ypsEkw7MOCUg/sjGtr/SEbTpo2tJBx44m3r9Pw7cbPTUo+WI4FTBqrSxTAcAjsS49+saqqPpZldplAV82qZWzHcxSanPvp9k0vCgJpiT5uhj7kNm7ViL7oGa4ddu7Wlm2cA4Y6XrXr7DB5EmFDhQob73KWCqOpVKlgSU1mElerVRImvMtZCqC8hPVjTIkIM19BgNlsbbdWCZYulNlvgXn2LaSsmOJg1Y36zhbOmLaLgiNYaCvSlUV5GiZorGnWgUXBNbZm7BRVq04HmBoZrmrVm015XeYHlJdZcL15Qz6U15/Vtr7Vyeb292xbv23a8er0FPMxrO7q9+p6jK1ixuXOCGxvmRbjx5GG92A07/9eOsOZhm9t17tXZnWF3nTtrhnVO5WrWIWGhUpUqW8RX00q+UjVbG25YuCdmdLeanrtz54Kv1ik0GzhYSF+1JPrzlc2ksJQSDRoT6VKiSqlij0q0a9SqRQeOtXXOFtirY7dypduV6zmv4OjaKuw2rzmwgPP6R8wczP6LLL+/CiNMwHOEQUwywx7EzLMHNTMswso+o4xCzSg0LbNe3OkrltZGXK0X2mKraDaKNJJIlYxq6y03j1IhkbV6NorpuZaYs0UbpFgCaqfmckpqOvKGckqqocoxD5yqqOLKrLGebK+t9Gzxq7y3wKoKrF7sGwivrPICR7C78GMnLwIBY/C/xP/eWquvwwZs8C/HIsNMGMLYgWyzyxozZ7PTOsts0HZKK02zRE2DpZ0aH0UIGog0gmii32LjyFJKM6LIt1RsgZSh55rThtMha3nOoo52lGlSioZ6qdVXYTVqo1eggrUll3BtyihYchqoqd4qmm68Hj2K6SxemhJmt5joO3Mgj/Dz6ktbXNztKrYAgwmWuhDU6RWwDouMF49qQQyzsj51sczJwv21FgyHYW5YbQit7NdhzSHmsw1hASfUUG+5SDYXU9GmYBZTocY3ai6KDWGIYMkmI1AFPqgliptb8bmZZFNYlWlewrThiF8COCOJfIS1llRMUgW78rKJTRWjoHJq0iv/jYrY1fZchajhtuhSJeIny2LHlmliq2otcICO7Sq+XBUMQY5SKZMxywiG6F4+zZnUpEo1i0js2AyzBWiIbhkmHIhgntjfW2ApCGMSH6q0YNxYrG1TjaYxZKJsPE1471Rusdul5yZa+hVqdGopap8m6hEimS2vdeJUkDIKqItwy8aqzH0DzxyVazNv4vIS9jZtmwdyOZWtMM3IL6/YiUVzbU2nKC5eVM4LRYgWBGzviQ7UM23bPm0w7VcC5NoyiAoEO5XGXralneuxnicjWC5jh3XUHLWbRFtQuYhmgxf+bcVpfDOJ2BY9mqjjZSAlRqPeZEJx71qC1IhNkLK05kSN/2WOWxzTLkKkWkgugFhKG0WehrX2cMocTMPSBZ+DFajgRifMO8sFD3eOStniYVfBigUzoo34WCw+JEwFXSZWvb9EhlPnsNha1BO1pzXqTFjDTGcmgpiL+KsdFmtHbBqDIYocMRXhME07LHex8rXGRHr73MJotrwVvQhTsnnR53LTm0mRbzXRMAgvJiaTD2Znc/FaIFJiAhHmPBApEtkJBSfinbQZZTaq4AVQbAKwyE1lIFhT46eoIpGn1GR4lKtSbOqjCq9oBCxjAqLl1sILpvWFLrhZS0sQY5LDfOkVkRle1cBmmBx2BomWMZEqkqYR7bEyk6nwTNoalREBHWoYFP85ThVZE0FMFbNjX8wNi2jmGzyCTiMesc0rEgaI56GxIfq4hTQ1Qg2YDI4nxMIawGBkHUd+inQ6sQlFrlPHn4TnWjqCVVM0WROm1PEpiaxSBHnXuk9+allO82Bb8kIXF65lNlfJyhXrQp8rVu1rf7EYoAbUDl1uhnKTkWIM3QaLKCIRIh0yUaMgYpjPpEaYI+JU4Yz5x/apjFIWMdn75tewTmGqYpViGz2U4Q5llOVFz5kYPM+1TOhYDidIuUgtEkZBo0BkO7HCnlO8MxGaJClnRoLgkx6YloO254O/GxlXwME09QyNjm9Jj1sm0rReUI5cFxnoXSDyrcRUT4d3OVD/A7/xoIxkDTPDS1s21OXEMkEkl03kJTF0UreTIkdvsVnmilrUNc11yrLeqw3thrW33sxoRa4aXEtogiKleoQlv6JjTWYEubFi7TdAQao2VCFI1TKSVkTRiHmmJLmbiYdpXbmIliZ4jrWahZOf8ko4rvKq/djOYsrqa34kSSDD2s5qMSNeudrBOuIlzBaYaczxwMbRJSLxbJwh4SvmkdpgNlYl9MibRYJmqfhx8TdiW6r+9Oub2dwGYTPqSHMm+CtpbmSZzHFOUPDYrZcBDCmxIwpvXSdHp1DOKZyL3K26Ax4PbmVKkPQV8yBIkd+ROCs2AWTT0jLXsqrHK9FlTESt/6QRNr3lYaqkUwCrNickNmaFe4olOwimig39EmsZ5YwTYUGSw7l3RJCFMjPrZ5GrLfVg3sNI2eg3rE7tr8HGW1m3wPEH26BqWCzJlFEa+EyXNvV9nf2Imh2ZG5nF6mnPSRLOHvidqjCtLLz71LEQVk6ncMp2Y8JNVcSCGHZEDS8TxA4sGrat9CTyW3UZRmzwqkPBOC+IkCbKUjszRZ5NIzTeG0rUoNJeJzcEvqaCiPpatJuCkdGLs6mf+gQXI9BdbWOWitiLhvUNoOKROgiGrE5gwqKlWe45XBRZcMPzm6CQR2W4ckpT/VlPLGHuZz2b52cnpQ0r0YVr+ymLXP35lv+nRVtnDK3LRf7CoPwEt2qBWSEsMxoycGivfq56hWYi6KpSqabVrYHhZE31PtpILL7A3k2LaHZr7/VmN6YlVsXeXFoCWxwmpRoVThxME9UOO1fSjBdPsFMLo7AsV0TB6jeC5auhWGUq5rk5T6p6pWUxZ+dfgkt7ygMVutRnSn6xinkAdKZK+ydBu4OMYPiTPX1/qbdBxEy2bzGv4tREPUs+OEopxXAqP/ZlDtsm3475G9zAxLQ13TKyRlWxxfUIJjAh9o7qvrhBJmnu3MEwOrezTu4Qnlaiww7Oco6foecTKmDxCrXaYxfA1AUra6ph5TPflmH8Rxh12fSDJuMXxDz/hh2Amfe85FQoQ1WoMYwyeNhbwzAxgqwkHTGZ4aScPshi9r8SYbjF6eeswW32184qcI6gQyrVNj87naMnkgjJZyRVnypPoorin6SVJ7XlFm3pkla+FJcxzYet9DlxCCu90LvAaaE6XOhfGKND+nSGMY4hnvZOv3rVeygz/8cQ7dGMQsmMfwkY2bubgZOslzqmhYEytYOfzuoIF6GfTmmYYdmYiukWt2MjUpmOUumJaWGJDqyFPyiEP9iCQqiCQmDBQkjBFoTBFyyERWDBRdgCGgyDGbxBG9RBGhSDGTSEReACRqBBRpjBH1yEHzRCRgiDIGQEJywEIpTCKAyDRWAEOjFYhCRcBEOQQiusQkZgQikMgy5kQi8kQzC0wiyUQiLEwjXUQjBEQziEQzFYwzFkBEkgjCT8i1dgLAT/rBF6gCG+EZ4syoibgsDc8yLQ6Y2bkgnjewW66zi6c5ZG5ECYsIlaQZmX0IZFAIBO9MRPBMVQFMVRJMVSNMVTRMVUVMVVZMUwaIcV2IIwgAGO8kO7qYfj0ojZsI34ob2JkSnC0Z9oEjY80p+NQD44w4m425/e+Kb/uruY8JEt68QDOIAAoEZrrMZr1EZqzEZrxEZvzMZwBMdx7MZyNEdyLEdyVMduXMd2TMdyFIBOFIN5pMd6tEd7RIN7nEc0iAEA2IJeCIMYoMNDqsXy0YdwyJTK8o1EXJ5EXJHi+5uFpLuaoazNehmFScYv0hueCJk/6MQH0IEcUIAcEEmShIAZ/yjJHDhJHUiAHNABkCxJGoBJCMgBmaSBkgxJmnzJkHyAm9TJB5gBlnRJoHzJoQzKlixKmRxKHZiBnxzKpyzKlwzKB1hKlazKn5zKHAgAAGg1TQAAKuCFLSDCGygYVECcgrRFhJwU75mvYqqNGaEYYryNiJmGBvKs4uMInFigt3ORCLIN1HqVpdoNbSiETpyBBKBKxJyBB0iAxXTMloTMHIjMlgTKBGjMB0hMyZTMyzzMw8wBxjxMoGRMyRTNxpzMzVxMyfRMxTTNx3TNyTxM05TN0FzNz1zMrYQGJ8sEAKiCcxCDMNiCKpCgDyIGtCyfeuiF/JKvMGLLhFGFBhochf9byBnxiMGknf5Csxf5hgVbCuw5p5boRNtsScfUTMp0TNE8z/MkycrUTNEsz8+0zdQkT6B8T/TMTPvUzNQ0z8ykzPdsSfgEUKr0T83cSsZShn3IzWXQBzQ60Nw8UAQtiNysh9w8iN1UAQ2JjdOgqE+hB+MsH0Asic9pSL4BNvrxDW2iCI3DLAmES2SphYcBzHrRSzIaNKXiRACIzNFEzAGFTfuMTf98zPLsTMykTdDUUR5VTR89Us5c0sV0T8uMzcWUzQF9ANw0CGVgxVB80En4SgKEDQ65M/w4CJEwzofwiF74kA5djXpwooJZpoZ804ybHzjrsokcnKSyDuqMOKX/ujsDoh+jKEwAEFL0PE3FXNL3RFIlpUr8NE8kLVTSTM/SdNT6XM8ZSE0e1YFOrBsszdJPfNDdpALtGTVcIqmQcgejKE4PNQgFdJWY2Dp3IFOFSM619BQ6hURazYg3q0CKeJFJ/I01iwnreJEIyonm6MT9PE/GtEwdVdLJVNLNhNTKrE3OPE1JfVRkVVL9TEwpVU3LhMzKtM/I3MoH5VQACABrlAEEmAF1RQAESNd2ddd27cQtBQAVmAwYwpce8hdbOMuQqEWVqqxJ+YO15NdhgNWDaIcJUsTPqcDeaBiM65TdqKNseNHc2piJwbVTYZpkO61jHdAxIAMyAAKQJYMc/yAD/bxW/xTNNFAD9dzP+oTMBCiGbGVMZY1NxMTZZbXMASVJlBRQzSTJ8fRRCNDUKx1Fa1zUpKRKmpxXgwBVUTUy1oMhIzqiYTAIaypITuGshWmcnhEb/bmFv7iFN6OIX3xYmuKyLqNO1EKWjGQJzBFWili2IQFPQZ3MBNAE1lAGSlhW/bRN95TSvDVUIOVPoN2H8/xWYliGZaCHZVAGx62Hx23cx43cZUiDy0wANWgNODBU9bTSfeDUCSWGSbiBTwwAdSXUxWzafdjN3oyQjyKpivoMw+ghVd0HEgK+ibhASmlAimxVzalOtcUjYdSfmyqz9JG73JKYIamfdJIm5//4SJfN2xGJBsJ9zMoszhwI0hydVsnM28v1z1QlETUYTZRsjctNTfssWtDtRIXQBBXwxABoT81cXQtdkM+4iOzBjL7yjGEgmDNVVXrIL05hH5ixlTFau5uyNbiMH74EGouTLF9riYKJtphZtohRBY8EgEhNADh4FGLY2ZfF3Csd3G/12281CE0I4c28nxpRYSlNg9ZoWWl1z8/lVIbwSk9kT8RcXS4N1eIYhvxylQoR1R6aB4Rs4autxfwRHu+xDeArmGkoBLNNyNsoRF5zRNYxvu3cCIZ5xEY0UY0YTJ/omOgl1OndB5JU4xxgWTRGCEiFTL89COvd4c2cXmJ40s//1ARNUIY93uMk9mNi0ARQIAbtTUwdOIiQdEk1VuT6PM8CNVqubAgY8MRGrd8uvRB6wWKSiiIT6bfPoAk0IlOslb0JWh8q8z2J66ITPRj6qUuHVUZjlLuNycTn4FOS4w5bVjlYCM8cxduD2FGclVIEyAHxNYj2TF/PPIhNIGYR/tvFRICDiIYVbkwEUFacjeFj9lZv1dEZ+IE5LtxIFedxjeTVEANPLM9OVFMuFYMN4eQNQa8AjCINbZQx7cOwUwb18ZuVckguo7tHrDgEiruKqZhhO5UMPBZlgwVnGRKhAJgbhU0EoARgzs9wbUmEiIYfZVI3JtQkjdSJpuhClc/M/53j/rTN+S1p9SzfbIUAAwCATe3EGxADNMgEhihdABAA+gWAT/XHzZiQJQoiQUmyfvmMzpibe65FrtHah0EmYFORZZrTjMW45HM7H7k4kGPe/TmtB7O7mBjB3ujl9vxlg6Dmit5MhFBhZGZMY1ZhZ/7WBEiIRD3PSSjpbjXkzhyDkB5Ubs3jBLBhUYwBq02IFehEB1BdAGjh3XRFICtqCdmQJSJADe2M9UiIWA27elCeL8qU3KspLF6m6ozlMzM+DswWqx4SNANBQmoJnIBo/wTpfUBdDi5PuK7rEI7mhIjjOq5MzUUIOJjWwiXpY3bZJwWCgzhhR47ZxczUl47kUf80ZmjoxAA4yUvmggyJEMOA7MoYlERJMn/phVoIDmioB1KWvYw6mKV6kdxbnkc80cu6KeQ7xoJOvr3Tux1JMLyDxkBV6w4GZpoF2uDeBzWwZpSkTIUgA8Ne1O11gHtO68j87dcmTR59gLw2CCMVZ/wUzb8uRTFAiHPe4AfoYQBY7Al556DWDF8gqaIGke2mqH41TqUxu73pr4UZRiujVc96LDMzHEyhiVKJHMvp8VpNBY/lTARAYya9cMgk03r4T8gkg4UIYXHOAYWghyUtXATg7TQGykg1XOEuXEZ9z/UtV1K8AYSI7hy4ZCqICQ1xopgxomtpVcoYjYIxWHaABVb/q0V6qB8EKiaH/K+X2giJgNFMWZmuiSAYsb1XQKpfhI4pqw0NZtQjp9n0HOt9UNbPrPQ37msWhnIf9WU0Ls9wlUwK34dtztnuzVFyZt9TLHOD8HCiBQA1BdXYEBRXKZPPOHTZ+g30mpRXSJT1IghVPQcM5ghcCzZFXFG6A6MVMtHh5VWkkohBEiOKIYoD2w6PJdQs516axfKDGIM4Hut68GCDoAfWjNYpPwhQv/DyzHKPVlJENgiWZdkxUAN6TwMyyNaWhORVP0W6XnUcuGQxMIlb/42M2LrM6KvOmIchM+q5ytAVn5heEGWDoFDZ04dbowiRecRjv0htIiNipLuK/2GaQP84uKSYpFqq5ECRlNHvu0VjBKjWdT8I8k3N2wZd2tbmZwbKB90Hlj2IH8hRIbVMf9dybHXMb2YN0VRSDUfFB+1EBAhxMcgIw9Dd7gGiz1CiKHqZZMio7dEMYoAhejCd9kLqVts35gQ+iuGNLjLERBQttouNb8iW+umWxXkV55CI7agFIi/PcS914D7uizYIvk1MnFeGSj/8zmVMhBD8fWjrbnVZv5/UxleJjhbN5YbpVGz1fjzXnXZaEX+ae6nadpiIrXOlGGoHdBjVQzmLzqB1QtEIg21x2z30+ekiSmE7MtKmlWlYaOoUnXjib8rILw5RaKwFQwhPHz3y7v+t4/88CDhQ15bMcs5Fd7L2TzTm2+pfhmaGz3CFcCS3T1JXicYUT3EFAAod81MsiHM+gBAPA+8yoiC++owSkNOAiH7RUIhAFE4KOIMFiHm2UrnbZ/AgwoQKFzJMSA8WtVSvqL1S9SpVNliwVKWCle0VrIodX10E+WrgtJAVbXmsBStVKlsmbWmDSdMlyFosXwEAkCPBjAdBEVA6mCDBzxxClyZFmuDBQUpKg6o5OOaosoOajk6FevDB0YPRZjht2jTNQbNll+r4+tSs0KlIl86A0LOeQWU99/Lt2xPNvkx8swYGIGagrXPterVr9zLcsMYvbzU+Rwxmu8iZ22GO3Gv/mqrGw9wN7HhQ375oDVezVtgLZsXYFrONtOgxle2UIDO+BAnTJc5pJ8GdDNlRpq1pqbTVYm6r59ygZDUdRCC3aVDsOQ6mAfoAwbK0YMcgRCA9QVWDW2eAT/v0PHY47pPSdUreIFmyS72X7Z8jAACE6eUXgT3FsA8aPQFImGBh1ETZMOcMM0xNtrQjIUcSeoabaMRk1tmFEhHjTmOI0eNHL62puOJBL32EG0kvwViTiy9xRJxxqmTUETUugQNjSLZkYwtF09gCDks22SITdGrJZ9BR2b33gFD+HfRDUgMcVM9+4RmkxnsJIHQUWNQZNMZ+SvWX3j45SEcXXT+kpRR2/0xN+eZdeRW45z6TDGaQYFu8ZOFimV0EYTs/EjrPOTB51lg7HI2WGUftzDPMQ6mEMw9HqdDDIqiruQOTRBzd1pFEMPp2kUWongRbpy/B8g0sMrnUEXCyKqfKHz3RF1QCRUF5nVNS/vSAmfvo8BRSBylTLFpG/fSGVtaRRa1BcAAF11DJqmVsswaBRdavVbr5gFkAQqPnngQiyNenfQJQhXKZMSZZKuBENkxpGskISzskbkjQZqQmCVMv84gUU6gNLxQObDX5Vmqpr2hTEUcVKSdTTRzRxpFMxCEZsUu0wqRjrwDAyV6yUaLrX1kCeLmPdUhtpx5XYCH0UwJ4GXTuVP8HEcNsnWSxqea3VJIx38pwOoXuAwD6PGC7fe0jxp+FUaGKvooxNoxNEeJLairURCaMwJylslmEMp4MTjseup1Kig7bvU89F4XkkUa67b2bxTnNGhJxHNvS3N6wNLdkrdocjiRLtfT0JrDJWtc05egKzVUC3taX7LPR7rNVlUGJ5Z1aPz3ZJrBdNXXfPpm/We4MSalrEDFV97XCPn0xCEAYmSkWoWaQfg2phJCOdu++AW8GqfIkOn/hYrDEe3fDjcY2UuAX/713RiB9P/hJr4CTq/m1FHdSLeAU0tPLTa1+1NPg8jyzDmD95O1aYho1M1wSQJg2Ye47wmoTzJoip5//3ak+PyHXe+wCgKnpji8H8h2gAMCFxgjjaxL64KOSp5kJPW9faZuUh4bhocy4IxkCU4bzbHEL7NmtN98zCd9wGDjFecRWGqGVTGABDiEWh1bf2AmTADC7oxzQP+PS31GStQxr6UxaxercQWb2LOwI0D2yE8rREmgfKx6rXF2pHVJutw+qVRANgsman4LXC68Nr45su9CjqKfCxtxrhSaE1ApZOA93RIYe7hCGLQpCw1C54xUpWU5FsmERHNIKJELiocVg4b2S8dBHJ2HJkYTICPjBBwEt60/t4nIU0bXpWOzJoneMtZAzvoxNYCrg0QoYlLYwMCmy649QJEjBCvZk/x8rwGBhtnBHDRWKhCAUTWaIF7DmjTBtKyRGZK4Zt8iQCBaLdNhAdJhD7wFOI+Yr3+HIyRJauURIQtKJTngCgOi87IA1ow9/cjDA0XFllwfJRANn4ICZGWQZsdyP6DRxz/4kYBLuKWACYEdPMcJlQeyqYAzeyBcvxdEd5+BFoYShPDyyjZDUK6HzPORHEsHQhZZaIQznQY+4zcND5/hmwzIlkfCFpFXeC8mO/mVJWjHHNkB0SY5S4atYHmV+zKJfUNJQjIRsZU0HARMtw8WdgGr1WQl8QBi/+JMFIvCgmUtdntZIzAAR6HdV0FehGHOLWkGGeow5R60Sucdz3IIltf+CKwohJxoStfAcS2IMId0xjxniNHu4GQlMPgabnopEkhsLUi1IVSvzychjKZtLU5KlCWIogxigUAZBEUK6pjyAGPNZC1kQ4lXa+Q8/dqpSsOaEOaRINAe0pFwD/wOAdam1gmJIkF9+t7W1Fao0MdkMY14DE7PxkWykUsVMI+NcxhCDHcRglMEUS0h63LSx4JQIRTqyN9wMKRXTUN96X9Gc2DzScTKZJJJgsdQ3YRFUPPtVAogbu5U5BS2o6U7RvgPAOyXFAWEEMH14WVaKNk1qF62aRpEZqJrAtTQPaiY7TNUR0TQqFeaYI8TWNhpbWCRD0JTuoGh6Idea12Gjig3/33Y0scj1lG8w0YhEZLIjx53kfSoDbrJYo4z/PnBbtd0ibMHiv7HcFo3IsorTeJas+gA3AaJLSurMtTI1snFPuSsQYeIY49I4Jl+aCceNhpGSZJRIFYnMzDwQNo+XTKPNvRivdBMVk5nSoxe2qDH2BmKbV2GkxxnJyMYGMjiJ2Fdvi5OnfxBQlXqgJiHL0IQmcoAAok0Jac6CormcQh0wUc6V6NHKV2FHS9chRRNw0AREU4cUCBhgghcuUIaTm8Ew/AgyjUpYjPcVE4FwKDM0ulRmXnLXVIzmIoqB1DRWHBNCwqK8iL4bPWiDw5g4Eleb7ciQPlk25HzjRn818qwTUTDqowiA3vKm31R8aS6ezfusSNHPPmCmSlHTL7j8KThwwQVVVMMnoEGx6D7OvKcbVC3Nhkk2Hmti7BG24xYSMelA9EXYi0AGU9GmW2K5udh8Gf/62zjljMVO5psh7oYjPppVqYDIqlroxMi61B9Y7NSU+u3Hyq0DpnR+C6cEN/xcYi0WulL5dF0/QAdpvRoajjuJrMfAMGI47teznnWwi8FngdpYZJJ3EQtJMzI/AliJOtK8l1xbMxmiM6YmRBrMuNy8KlGFychd1Iqws1atWueNWEKcyf0cgg/8+a90TVvaObCArF3i5V8WJqXbrz9qdNbXbyAGGoNKMIDR5GL2VSHkbeYWJFGpoiDVSLo1ZoXWjltjCLnncPQd0e6ISE+FE8SSbBYnuDpcbwo3yiM3funNP7quMW9WAv+2LFP3zxezv/SrH0TADjO9QRp5DsSupJn/95I0pNCPzTynAm3cHAbd388oW1yv9zWWtN5MwhHmDGlWLR6ikADJRTQJ0Ukf0z1f0wydGYmRv7kS9TUNRUXgT3weTvmJGCDELcCVtRXPvkAMwExIo3BNLwwRh7xfm5mDSbVDYthf79FDaYAGTAhHkMygcrTPSXRKRKRMDiiAUuiA6zgffSwgrf1gEJaFEAod1NCJ0P1gdiRgEQ5h/DBF1QEIooEfQjRSL7wEXPVChHhIOOVeO1wE2ahCLaRNYyBMOyQDX7EgG56DbfDGOhHO3kTOOgVREikIAABIAOghH+ZhH+5hHgaiHgoiIf5hIR7iICYiIi5iIiqiIzIiIQIA/wyswAFMYgwcwArAwCVq4gHEgCbGACh6YieKoifGAIBcoEI0SmKIzfOMIKJkRjgcyQhuhsB8VwZqF+mxIQviyEl8RCcBB3P00HDohH6t1Z6oAF8g414oY08wozE+Y184YwVJ4zHuBQwACDRWDTICBkPIEB994whRU3axUDRJTzu4IO/pojruwzCQIZENEVHxWH4RY/ogCTjc45HYIz7eoz7yYz7+oz4eiS2Yw5Hwgj2awz0SJEIipEEuZDiYw0OCwzkw5DmAA0JOpESagzlM5EZqZC905ERyZDtoZEiG5EeOpGJsJPWYg10lii34zDoahGN4EEoNDzTd0TRtk/XEZP9MQgwOIUc2NEeuiAwotVOtDMmR6ERSLsnhMOWSQM5TDuSSGCQvCCQ4VCVBTiWSZCVBGpYt8AJBVmVVjiBVjuVA9gIvhAMv3AIvVOVHndiJ8UIvqGUvfORHgqVdwmVe8sJH7eWF1AJj8eQ+xKLwcNPxjIZmpBDuacYKCmZMPsQ5FU6t8ND51ErJ1OERpdOStJMtHBE+diZTZiZU2qNAliZpUmVfUeWSKORXDiQ4kOVA3oJGtuVVDiRYftRH2QJZWiRY9mZf6qU58KVw4mZe2uVHJQosKBJPukO3pR03fVAz5V7zGBpMOmZMqmL5tASRMQ5vcFYdbpYQgeb5bCaS6IT/PZqnUkKOQa5ma14l5CBJBmJle8rQVRrkRy5JXyFJWgZnLDKlQt6loZmDbMrmXQpnYgynXMKlheQlv3yUS1hnLN7CMABWTg4DOgwDd0mGclqnY9IIxYTMRQhHp0jEyJCKcuCGU/7Ij0HlwsBGVPJCLXQK5BCkdXWEPY7oybTm3IBMcLoNDHbbWZpDp3ylgMKljLzCXhoWqQzDiSlpjVqnPsACKnTK8ewdZjhGYHIoh9KDe5mKUkqMbbSYTmTMesXEEdkIjAgkbagXSsTEkbDK2tVm2YAECcYEL8iK+DgSLBjkjzVHqjipv4hhcKLlyeRLgvKlzKnCoJ6YosEEXIIN/4pqg4wcmnWOyo8xZgymwlx1mpZ26j5IV74oTouZg/mURgCGJ5KogvDVREzQhlMOyjfY4x/AAkFGkqKtp6m650oQR77Y5o+8gjkMCkSCJUfwKa3ipauOIEyQYGLEJW60T0e8ZRYqBy9gBl9KGjv0AjtQCEx0aokFjIy0wx9km6eWq0G4YEwQEbk1pU1MaqVZEkmkW0d4piM5JUosCcgkh5sapEUQ5FXCgvANinveqsC2ZbVyDYu9QlqCpRZWpdxdxFUGKEcQpEXw5UfCxERKCly6l3GWhqfqgyowRkVMaJaaa7lyhk7AhOMI0Y8YycWQ4ePERn4dSb4uybJG5XGQ5v/bkSZuiOX5qAKtoqh9lkaw9ipZFutAZANavmZsTORACmlMfCRcXuty3OVHyWW1psL4sYPHIWk7gFRFeFO5eqXjbKjJmmxy9AbyKdqtlCqSXIR9CaTMHsmzRuVFHBGuBpmcGirkkOjaheWvhoPe8CcvuCo4WASBFq02JOgcwS1eDmc4FWdIwkS2fhRubOQc2QIqmG2nVufZfu4+WCoR2Wyr1mxvDOObxsR47ipoouh7KppVdoRq0kjCbqXNqsKd7utVJp5EBGdirF1utuWg4CZeEmRKAGeCcsRHTqhEDANYDsMrpCPoTu/0quKQqCgJngRzkKDjtM+STMQnkWBm0m3/qaqm3jqs6lalsEJsgCasOdgE4cKEw+7pROZubW6kMETEWdon1dIv454YZmys1n6UNuQi9R6wyULmeh3OjRDjSKToin6vjTIlbuyCaQbZU+4qn9povgqoGCaSonzU4WptciDsBuumggorgSKrRUircL4v3ZyY4NpZYyKwDX+u2tjExRRqDGbw28LsU5KKGOZsaUbEwHYWqxhqo5KNclqqEIPlGD6WRg4oOwxvW25k/1osFn8UR2SrOUDqTiaEatwwGWspxECOJunNcpgnU6roUo7MS3DYi9rscEhlzNkGe/KipiwE7EpkX+HfvqJlcJ7D4M5RW4pwRwCnXTqKkXJu/xk/cqe24ysc0fjOLGhq5VOycWmWpme6ZlXWZn2ypr9+5RA5skHUg/de8dTi5rAKqEF+7VseKG4y6V3qJTuMn1dCsi5/LjgYgn2NZ/sQIziIpia/rkDWQlbi6nt+ch77LCzUDWtkoXuCpWxS5dIKMlrS5dQq8okVJ1i+ginvsjg7Jj2QRMhYZVP+VRuT51NasGk6pXyuJyhjZa3UH2tEKZII8sIapAjL5VUKw1VeLV8NcvHipjAQcsmOs0JrKWcsCRApnkNn8iYbM676qzKTcmv6K8A4DF5dZTXX7wgKw0PyJYG+prQqssUSx0KvtMlCTGhG9ESzcS2IpWlWJUar5v9VDqtMJHSouN7VjqChAShBvzBI8eVBI+grtANLL3W5cuknEZl1hSfOCsfrDrFFRCWadoQ92w2XpqlGFulGxMQWL7FF2CVgMjVan+xHyMRjLYxVZlaaFuTJSFKIfmVvYGyN5bBugoMw3DUKoy+MuNeeJmdaF3bnLsnG2KOl3Ve+kOhmJh7F9mqaulw9CGsv5J+hfqRl06jWGrZnd6qlrvPN5utxECSN2DSv0i1B2J87LG7TbrYVE6TYfjZtc+j5jKfauhNxWATk3O1THmzIqGMGmo/9Hm2+qGAi1bZycyhpzEraTjQGkyrQgnJJbDVrL24vuGpcStLh8PRyf7c6OsZx3Motb3/vVfett/FkX4HMIufLO4A3fFtnaUBOZk7wyDjOLghRp+pDhQRnsKZ3fAf4OjZ0ejrSJ/GCxLCEUpdrI2kEOAg4hDum4pRmsfatpkY4hmc4QryD6urNMU+D52q4iAu46wnkESHpiKe4hquE+aj/uItreIi/uIzPOI3XuI3fOI7nuI7vOI/3uI//OJAHuRgLOZEzxBgPuUIcuctx6j4weZE/uZbGOJTruDJUuZUrmRiEouhlQu9ZeWlN+Y7jQF8YsLPsSVY4uZLfTV8kGZi7XJq3xhi/OYdiTdYkRJlZjUGkgSeSR6d5X6gsAzK1uf0RkxhwOegi117s00FUjWrQ+V7gVKALet913VpJwuc6ek8oury0Czv6haFjjz5EuqR/G6Wv1YGYLKJnukLAgF+ggTJAwyQcSKr3BBd8k6iPeo0d0zOeerlieoAohKcrhJ/0BSpiD6DXOa4jGqtDo0OV66wr+p0vxJ1rusOMebK7/1yp98QNTMLWZTmf9LqoS1xPFHtCZBg30lCoI/u1N9YB9MWnH8Ss98S7c+izJ4S4G0ZDQAMxZIKUV7u6r/s3ZTsANHu5+8UNgLu6B9u891403DrA05DAL7xB6DpfOLuolxnBs6HDP/zdUHxPZDxC+DoAIHyiFzxfSDxlbzzHO8yy74UbMESw+Vqn1jtC3DvKu5y1r3zA9wXIw7tfMERWyLlstcYyoIacz4yvp5bNoztDFH1CQEOkp9Zp4E1q6PxCRDxDDHvFt0YmZALYZUIuUvtC7LvWzTvNl/nJ342XEEMafD2ZL7q6K0MmTMLXvQGNqYaTW71BCHzPG4Rf8PpCZP+Cx/dFDIi9Qoi8gRCGr39K1omewMdA6GU5jYmB6Dl+QqCGPsjAnpz7lgQ64mt7v1u9wL+8QsQ75yNEPayVkukOYNT75xOIoQ++sFcQsP/JvRcIm+u9QbR72jtLJgg89/3TMwZ9Xgh/Bc09Br1+sO9Dr239QSg/X/AOQlhNzJu57icE8FfQwStE9evO2xujwGcF9Pe+wMsWgRhA9ucizz/j9SNEyxujtJ9/oXM7xfmFCtQ+gcTADYBiu4QHQIQBMJBgQYOZ9u1bYTBhwhsGYzTcl4mgAWUS6xk0KGZSpkliNA68KJFkSZMnUaZUuZJlS5cvW8YIOXPgCmgmA2hEWJL/osE0EntuLAky5EViaDoSJXjD46RJaIglNMCwocaSygCIMTkTjcmHBrvCFDuWbFmzZyXKpKlRq8mgBEe6tVpV58lJRUliLbizpFqCEiGajEsyZNSTbNEmVryYsUu/a8XwNfkYQFiUlA3v02jZpFK4JIkZnHRyYUHABuu53JzSM4AbjWHHln32AEONEVPqNa1S98Cwd6mihKaRHsm3ACRLnLo7IV6Wc1Eexz2benXrJCnvCx04ukEYmoZpyjSefHnurdumLP1ZYm/kJ7M3BA5x8OHgJ91Pv76fP2MYVFsDYLSTAlyLpoQoS64zg5Yx7iD7mGuOpgEhHEg/kzIq6ML+njjsEKb49lmuoPoa+srAExFEbaXj6tuOIAqxu09Cmm4gUbOCXktpuIIM8NDHH1mqLcKQ4DvRSKlkPIlF0B40ScSBvDKQsxkHSg8/7oDMUssUI3SRoA33MbFKNBpBAykxkCLTzI/SRIpLgm5Sacn2RCsyQozEvI0wHFXK8MstAQXyvzsDnHIfLgyyMSaDMkOptfqOU5BKKFmjyUqhdMQy/9BN+QOxIcoAaHQfNOokK8GV8myxyb6SdAtUAOIyKEeU8uPU1v2E/KskIiXyEoAeVxIjhvpI1XAljSBdlaQnAXDpuPcmnfVKY2+tdjZPG3r2Qo1EvWqpXqEjENm8Si1pPUpbOi49TFPS1Np3FRtU16EQ+zTcrcrVc9pEmdzrJGbzisFKjMCi69vc3IVX4bKwVY7bhtwTySRfJXao3n7HBbcgGNO6Tykb3aNQVt4SXtjkl3JFF1+D4tznVaaUqccjZn0jOKRMLlJGmTxHdNDflXWNuNuMbyRoYKABAPPkpVVqGCiNVjjNyIQLnBpWcje2c96QYnCDGE0KlHogaQUzCHxYptFeSV6Vo2Q34hPJNtjIR30mSNIQ77N6oEbZ3dfCtAFPyek9ldX7aInOPbBY9hryleOGEtfYyORGVqnkwAO/t0KCUmuoah4VlWhxrhOi++mfd5URmlf5JfxvhKnFXPbqiBF2IzQ6b2kSMQ0QI3TqlEGDshjQ+H32xAICADs=" diff --git a/SelfSignedCerts.md b/SelfSignedCerts.md new file mode 100644 index 0000000..8bed05f --- /dev/null +++ b/SelfSignedCerts.md @@ -0,0 +1,100 @@ +Copyright 2018 Distributed Management Task Force, Inc. All rights reserved. + +# Self-signed Certificate Walkthrough + + +## Introduction + +The Redfish Service Validator (as well as several of the other tools in the [DMTF github organization](https://github.com/DMTF)) use the `requests` Python package for sending HTTP requests to a Redfish service. The `requests` package provides a feature to facilitate communicating with services that are protected with a server certificate that is self-signed or signed by a certificate authority (CA) that is not included in standard CA bundles. + +Users of the Redfish Service Validator can make use this feature by specifying their own certificate bundle via the `--ca_bundle` command-line parameter: + +``` +--ca_bundle CA_BUNDLE path to Certificate Authority bundle file or directory +``` + +The bundle can also be specified via the `CertificateBundle = CA_BUNDLE` option in the `config.ini` file. + +While specifying the bundle to use is quite simple, creating the bundle to be used can be more challenging. This document provides one possible set of steps to create a bundle for use with the Redfish Service Validator (or any other tool/code that takes a certificate bundle as an option). + +## Steps + +Steps to create a certificate bundle directory that works with the [verify=CA_BUNDLE option](http://docs.python-requests.org/en/master/user/advanced/#ssl-cert-verification) in the python `requests` library: + +The steps below use a couple of commands from the openssl package. If it's not already installed on your system, go ahead and find and install it. + +The steps here also assume you already have a Redfish service up and running that uses a self-signed cert. + +**Step 1.** Get the certificate for your service into a PEM format file. + +If you were the one that generated your self-signed cert, you may already have this at hand. But if not, you can get it from the running service using openssl. Run this openssl command, substituting the proper IP address and port for your service: + +``` +$ openssl s_client -connect : -showcerts +``` + +This will generate a couple (or a few) screenfuls of output. Scroll back through the output and look for a block of text that begins with "-----BEGIN CERTIFICATE-----" and ends with "-----END CERTIFICATE-----". Copy that text (including the "-----BEGIN CERTIFICATE-----" and "-----END CERTIFICATE-----" lines) and paste them into a text file. Name the text file with a '.pem' extension, for example, `myhost.pem`. + +**Step 2.** Place the file into a directory you will use for your ca_bundle directory for self-signed certs. + +For this example, I'll use a directory called `/tmp/ca_certs` and a PEM file called `myhost.pem` (from Step 1). + +``` +$ mkdir /tmp/ca_certs +$ cp myhost.pem /tmp/ca_certs/ +$ ls /tmp/ca_certs/ +myhost.pem +``` + +**Step 3.** Rehash the ca_certs directory. + +This is the step that makes your ca_certs directory recognized as a CA_BUNDLE directory that can be used by various runtimes to validate server certificates. + +``` +$ c_rehash /tmp/ca_certs/ +Doing /tmp/ca_certs/ +$ ls /tmp/ca_certs/ +889897a9.0 myhost.pem +``` + +You'll notice that now an additional file has been added to your `/tmp/ca_certs` directory. The name of the file is a based on a hash of the public cert in the .pem file. + +Note: If you have additional server certificates that you want to add to the bundle of trusted certs, simply repeat steps 1, 2 and 3 for each server certificate. They can all go into the same ca_certs bundle directory. + +**Step 4.** You _may_ need to perform this step depending on whether the hostname where your service is running matches the `CN` or one of the `Subject Alternative Name` values in your certificate. + +Rerun the openssl command from Step 1 that displays the contents of your certificate. Look through the output for the `Subject Alternative Name` values (if present) or the `CN` (Common Name) value. You want to be sure the hostname you use to access the service matches one of those names. + +For example, let's assume your certificate has a `CN` value of `CN=idrac`. Example: + +``` +$ openssl s_client -connect : -showcerts +CONNECTED(00000003) +depth=0 C = US, ST = New York, L = Metropolis, O = Acme Inc., OU = Remote Access Group, CN = idrac, emailAddress = info@example.com +... +``` + +You would want to be sure you specify a hostname of `idrac` when accessing the service. If that is already the correct hostname for your service, you don't need to perform this step. But if not, you may need to create an entry in your local /etc/hosts file (or equivalent) to map the IP address of your service to that hostname. + +So, if the CN looks like 'CN=idrac' and the IP address of your service is 127.0.0.1, add an entry like this to your /etc/hosts: + +``` +127.0.0.1 idrac +``` + +**Step 5.** Use the new ca_certs directory + +You should now be able to use the new ca_certs dir you created with the Redfish Service Validator tool like this: + +``` +$ python3 RedfishServiceValidator.py -i idrac --authtype Basic -u -p --ca_bundle /tmp/ca_certs --payload Single /redfish/v1/Systems +``` + +Or in another python program using the `requests` library like this: + +``` +r = requests.get('https://idrac/redfish/v1', verify='/tmp/ca_certs') +``` + + + \ No newline at end of file diff --git a/cache/README.md b/cache/README.md new file mode 100644 index 0000000..81ed323 --- /dev/null +++ b/cache/README.md @@ -0,0 +1,11 @@ +To use cache, place modified files into this directory similar to Redfish mockup directory structure. + +To replace your ServiceRoot, place 'index.json' to './cache/redfish/v1/', then use command line options: + + + +`python3 RedfishServiceValidator.py --cache Prefer ./cache` + + + +Or the config file option listed in config.ini diff --git a/commonInterop.py b/commonInterop.py new file mode 100644 index 0000000..0c34e51 --- /dev/null +++ b/commonInterop.py @@ -0,0 +1,481 @@ + +# Copyright Notice: +# Copyright 2016 Distributed Management Task Force, Inc. All rights reserved. +# License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Interop-Validator/blob/master/LICENSE.md + +import io +import os +import sys +import re +from datetime import datetime +from collections import Counter, OrderedDict +import logging +import json +import traverseService as rst +import jsonschema +import argparse +from enum import Enum +from io import StringIO + +from commonProfile import getProfiles, checkProfileAgainstSchema +from traverseService import AuthenticationError +from tohtml import renderHtml, writeHtml + +rsvLogger = rst.getLogger() + +config = {'WarnRecommended': False} + +class sEnum(Enum): + FAIL = 'FAIL' + PASS = 'PASS' + WARN = 'WARN' + +class msgInterop: + def __init__(self, name, entry, expected, actual, success): + self.name = name + self.entry = entry + self.expected = expected + self.actual = actual + if isinstance(success, bool): + self.success = sEnum.PASS if success else sEnum.FAIL + else: + self.success = success + self.parent = None + + +def validateRequirement(entry, decodeditem, conditional=False): + """ + Validates Requirement entry + """ + propDoesNotExist = (decodeditem == 'DNE') + rsvLogger.info('Testing ReadRequirement \n\texpected:' + str(entry) + ', exists: ' + str(not propDoesNotExist)) + # If we're not mandatory, pass automatically, else fail + # However, we have other entries "IfImplemented" and "Conditional" + # note: Mandatory is default!! if present in the profile. Make sure this is made sure. + originalentry = entry + if entry == "IfImplemented" or (entry == "Conditional" and conditional): + entry = "Mandatory" + paramPass = not entry == "Mandatory" or \ + entry == "Mandatory" and not propDoesNotExist + if entry == "Recommended" and propDoesNotExist: + rsvLogger.info('\tItem is recommended but does not exist') + if config['WarnRecommended']: + rsvLogger.error('\tItem is recommended but does not exist, escalating to WARN') + paramPass = sEnum.WARN + + rsvLogger.info('\tpass ' + str(paramPass)) + if not paramPass: + rsvLogger.error('\tNoPass') + return msgInterop('ReadRequirement', originalentry, 'Must Exist' if entry == "Mandatory" else 'Any', 'Exists' if not propDoesNotExist else 'DNE', paramPass),\ + paramPass + + +def isPropertyValid(profilePropName, rObj): + for prop in rObj.getResourceProperties(): + if profilePropName == prop.propChild: + return None, True + rsvLogger.error('{} - Does not exist in ResourceType Schema, please consult profile provided'.format(profilePropName)) + return msgInterop('PropertyValidity', profilePropName, 'Should Exist', 'in ResourceType Schema', False), False + + +def validateMinCount(alist, length, annotation=0): + """ + Validates Mincount annotation + """ + rsvLogger.info('Testing minCount \n\texpected:' + str(length) + ', val:' + str(annotation)) + paramPass = len(alist) >= length or annotation >= length + rsvLogger.info('\tpass ' + str(paramPass)) + if not paramPass: + rsvLogger.error('\tNoPass') + return msgInterop('MinCount', length, '<=', annotation if annotation > len(alist) else len(alist), paramPass),\ + paramPass + + +def validateSupportedValues(enumlist, annotation): + """ + Validates SupportedVals annotation + """ + rsvLogger.info('Testing supportedValues \n\t:' + str(enumlist) + ', exists:' + str(annotation)) + for item in enumlist: + paramPass = item in annotation + if not paramPass: + break + rsvLogger.info('\tpass ' + str(paramPass)) + if not paramPass: + rsvLogger.error('\tNoPass') + return msgInterop('SupportedValues', enumlist, 'included in...', annotation, paramPass),\ + paramPass + + +def findPropItemforString(propObj, itemname): + """ + Finds an appropriate object for an item + """ + for prop in propObj.getResourceProperties(): + decodedName = prop.name.split(':')[-1] + if itemname == decodedName: + return prop + return None + + +def validateWriteRequirement(propObj, entry, itemname): + """ + Validates if a property is WriteRequirement or not + """ + rsvLogger.info('writeable \n\t' + str(entry)) + permission = 'Read' + expected = "OData.Permission/ReadWrite" if entry else "Any" + if entry: + targetProp = findPropItemforString(propObj, itemname.replace('#', '')) + propAttr = None + if targetProp is not None: + propAttr = targetProp.propDict.get('OData.Permissions') + if propAttr is not None: + permission = propAttr.get('EnumMember', 'Read') + paramPass = permission \ + == "OData.Permission/ReadWrite" + else: + paramPass = False + else: + paramPass = True + + rsvLogger.info('\tpass ' + str(paramPass)) + if not paramPass: + rsvLogger.error('\tNoPass') + return msgInterop('WriteRequirement', entry, expected, permission, paramPass),\ + paramPass + + +def checkComparison(val, compareType, target): + """ + Validate a given comparison option, given a value and a target set + """ + rsvLogger.info('Testing a comparison \n\t' + str((val, compareType, target))) + vallist = val if isinstance(val, list) else [val] + paramPass = False + if compareType == "AnyOf": + for item in vallist: + paramPass = item in target + if paramPass: + break + else: + continue + + if compareType == "AllOf": + alltarget = set() + for item in vallist: + paramPass = item in target and item not in alltarget + if paramPass: + alltarget.add(item) + if len(alltarget) == len(target): + break + else: + continue + paramPass = len(alltarget) == len(target) + if compareType == "LinkToResource": + vallink = val.get('@odata.id') + success, decoded, code, elapsed = rst.callResourceURI(vallink) + if success: + ourType = decoded.get('@odata.type') + if ourType is not None: + SchemaType = rst.getType(ourType) + paramPass = SchemaType in target + else: + paramPass = False + else: + paramPass = False + + if compareType == "Equal": + paramPass = val == target + if compareType == "NotEqual": + paramPass = val != target + if compareType == "GreaterThan": + paramPass = val > target + if compareType == "GreaterThanOrEqual": + paramPass = val >= target + if compareType == "LessThan": + paramPass = val < target + if compareType == "LessThanOrEqual": + paramPass = val <= target + if compareType == "Absent": + paramPass = val == 'DNE' + if compareType == "Present": + paramPass = val != 'DNE' + rsvLogger.info('\tpass ' + str(paramPass)) + if not paramPass: + rsvLogger.error('\tNoPass') + return msgInterop('Comparison', target, compareType, val, paramPass),\ + paramPass + + +def validateMembers(members, entry, annotation): + """ + Validate an entry of Members and its count annotation + """ + rsvLogger.info('Testing members \n\t' + str((members, entry, annotation))) + if not validateRequirement('Mandatory', members): + return False + if "MinCount" in entry: + mincount, mincountpass = validateMinCount(members, entry["MinCount"], annotation) + mincount.name = 'MembersMinCount' + return mincount, mincountpass + + +def validateMinVersion(fulltype, entry): + """ + Checks for the minimum version of a resource's type + """ + fulltype = fulltype.replace('#', '') + rsvLogger.info('Testing minVersion \n\t' + str((fulltype, entry))) + # If fulltype doesn't contain version as is, try it as v#_#_# + versionSplit = entry.split('.') + versionNew = 'v' + for x in versionSplit: + versionNew = versionNew + x + '_' + versionNew = versionNew[:-1] + # get version from payload + v_payload = rst.getNamespace(fulltype).split('.', 1)[-1] + # use string comparison, given version numbering is accurate to regex + paramPass = v_payload >= (versionNew if 'v' in v_payload else entry) + rsvLogger.info('\tpass ' + str(paramPass)) + if not paramPass: + rsvLogger.error('\tNo Pass') + return msgInterop('MinVersion', '{} ({})'.format(entry, versionNew), '<=', fulltype, paramPass),\ + paramPass + + +def checkConditionalRequirement(propResourceObj, entry, decodedtuple, itemname): + """ + Returns boolean if entry's conditional is true or false + """ + rsvLogger.info('Evaluating conditionalRequirements') + if "SubordinateToResource" in entry: + isSubordinate = False + # iterate through parents via resourceObj + # list must be reversed to work backwards + resourceParent = propResourceObj.parent + for expectedParent in reversed(entry["SubordinateToResource"]): + if resourceParent is not None: + parentType = resourceParent.typeobj.stype + isSubordinate = parentType == expectedParent + rsvLogger.info('\tsubordinance ' + + str(parentType) + ' ' + str(isSubordinate)) + resourceParent = resourceParent.parent + else: + rsvLogger.info('no parent') + isSubordinate = False + return isSubordinate + if "CompareProperty" in entry: + decodeditem, decoded = decodedtuple + # find property in json payload by working backwards thru objects + # decoded tuple is designed just for this piece, since there is + # no parent in dictionaries + comparePropName = entry["CompareProperty"] + while comparePropName not in decodeditem and decoded is not None: + decodeditem, decoded = decoded + compareProp = decodeditem.get(comparePropName, 'DNE') + return checkComparison(compareProp, entry["Comparison"], entry.get("CompareValues", []))[1] + + +def validatePropertyRequirement(propResourceObj, entry, decodedtuple, itemname, chkCondition=False): + """ + Validate PropertyRequirements + """ + msgs = [] + counts = Counter() + decodeditem, decoded = decodedtuple + if entry is None or len(entry) == 0: + rsvLogger.debug('there are no requirements for this prop') + else: + rsvLogger.info('propRequirement with value: ' + str(decodeditem if not isinstance( + decodeditem, dict) else 'dict')) + # If we're working with a list, then consider MinCount, Comparisons, then execute on each item + # list based comparisons include AnyOf and AllOf + if isinstance(decodeditem, list): + rsvLogger.info("inside of a list: " + itemname) + if "MinCount" in entry: + msg, success = validateMinCount(decodeditem, entry["MinCount"], + decoded[0].get(itemname.split('.')[-1] + '@odata.count', 0)) + msgs.append(msg) + msg.name = itemname + '.' + msg.name + for k, v in entry.get('PropertyRequirements', {}).items(): + # default to AnyOf if Comparison is not present but Values is + comparisonValue = v.get("Comparison", "AnyOf") if v.get("Values") is not None else None + if comparisonValue in ["AllOf", "AnyOf"]: + msg, success = (checkComparison([val.get(k, 'DNE') for val in decodeditem], + comparisonValue, v["Values"])) + msgs.append(msg) + msg.name = itemname + '.' + msg.name + cnt = 0 + for item in decodeditem: + listmsgs, listcounts = validatePropertyRequirement( + propResourceObj, entry, (item, decoded), itemname + '#' + str(cnt)) + counts.update(listcounts) + msgs.extend(listmsgs) + cnt += 1 + + else: + # consider requirement before anything else + # problem: if dne, skip? + + # Read Requirement is default mandatory if not present + msg, success = validateRequirement(entry.get('ReadRequirement', 'Mandatory'), decodeditem) + msgs.append(msg) + msg.name = itemname + '.' + msg.name + + if "WriteRequirement" in entry: + msg, success = validateWriteRequirement(propResourceObj, entry["WriteRequirement"], itemname) + msgs.append(msg) + msg.name = itemname + '.' + msg.name + if "ConditionalRequirements" in entry: + innerList = entry["ConditionalRequirements"] + for item in innerList: + if checkConditionalRequirement(propResourceObj, item, decodedtuple, itemname): + rsvLogger.info("\tCondition DOES apply") + conditionalMsgs, conditionalCounts = validatePropertyRequirement( + propResourceObj, item, decodedtuple, itemname, chkCondition = True) + counts.update(conditionalCounts) + for item in conditionalMsgs: + item.name = item.name.replace('.', '.Conditional.', 1) + msgs.extend(conditionalMsgs) + else: + rsvLogger.info("\tCondition does not apply") + if "MinSupportValues" in entry: + msg, success = validateSupportedValues( + decodeditem, entry["MinSupportValues"], + decoded[0].get(itemname.split('.')[-1] + '@Redfish.AllowableValues', [])) + msgs.append(msg) + msg.name = itemname + '.' + msg.name + if "Comparison" in entry and not chkCondition and\ + entry["Comparison"] not in ["AnyOf", "AllOf"]: + msg, success = checkComparison(decodeditem, entry["Comparison"], entry.get("Values",[])) + msgs.append(msg) + msg.name = itemname + '.' + msg.name + if "PropertyRequirements" in entry: + innerDict = entry["PropertyRequirements"] + if isinstance(decodeditem, dict): + for item in innerDict: + rsvLogger.info('inside complex ' + itemname + '.' + item) + complexMsgs, complexCounts = validatePropertyRequirement( + propResourceObj, innerDict[item], (decodeditem.get(item, 'DNE'), decodedtuple), item) + msgs.extend(complexMsgs) + counts.update(complexCounts) + else: + rsvLogger.info('complex {} is missing or not a dictionary'.format(itemname + '.' + item, None)) + return msgs, counts + + +def validateActionRequirement(propResourceObj, entry, decodedtuple, actionname): + """ + Validate Requirements for one action + """ + decodeditem, decoded = decodedtuple + counts = Counter() + msgs = [] + rsvLogger.info('actionRequirement \n\tval: ' + str(decodeditem if not isinstance( + decodeditem, dict) else 'dict') + ' ' + str(entry)) + if "ReadRequirement" in entry: + # problem: if dne, skip + msg, success = validateRequirement(entry.get('ReadRequirement', "Mandatory"), decodeditem) + msgs.append(msg) + msg.name = actionname + '.' + msg.name + propDoesNotExist = (decodeditem == 'DNE') + if propDoesNotExist: + return msgs, counts + # problem: if dne, skip + if "Parameters" in entry: + innerDict = entry["Parameters"] + for k in innerDict: + item = innerDict[k] + annotation = decodeditem.get(str(k) + '@Redfish.AllowableValues', 'DNE') + # problem: if dne, skip + # assume mandatory + msg, success = validateRequirement(item.get('ReadRequirement', "Mandatory"), annotation) + msgs.append(msg) + msg.name = actionname + '.Parameters.' + msg.name + if annotation == 'DNE': + continue + if "ParameterValues" in item: + msg, success = validateSupportedValues( + item["ParameterValues"], annotation) + msgs.append(msg) + msg.name = actionname + '.' + msg.name + if "RecommendedValues" in item: + msg, success = validateSupportedValues( + item["RecommendedValues"], annotation) + msg.name = msg.name.replace('Supported', 'Recommended') + if config['WarnRecommended'] and not success: + rsvLogger.error('\tRecommended parameters do not all exist, escalating to WARN') + msg.success = sEnum.WARN + elif not success: + rsvLogger.error('\tRecommended parameters do not all exist, but are not Mandatory') + msg.success = sEnum.PASS + + msgs.append(msg) + msg.name = actionname + '.' + msg.name + # consider requirement before anything else, what if action + # if the action doesn't exist, you can't check parameters + # if it doesn't exist, what should not be checked for action + return msgs, counts + + +def validateInteropResource(propResourceObj, interopDict, decoded): + """ + Base function that validates a single Interop Resource by its entry + """ + msgs = [] + rsvLogger.info('### Validating an InteropResource') + rsvLogger.debug(str(interopDict)) + counts = Counter() + # decodedtuple provides the chain of dicts containing dicts, needed for CompareProperty + decodedtuple = (decoded, None) + if "MinVersion" in interopDict: + msg, success = validateMinVersion(propResourceObj.typeobj.fulltype, interopDict['MinVersion']) + msgs.append(msg) + if "PropertyRequirements" in interopDict: + # problem, unlisted in 0.9.9a + innerDict = interopDict["PropertyRequirements"] + for item in innerDict: + vmsg, isvalid = isPropertyValid(item, propResourceObj) + if not isvalid: + msgs.append(vmsg) + vmsg.name = '{}.{}'.format(item, vmsg.name) + continue + rsvLogger.info('### Validating PropertyRequirements for {}'.format(item)) + pmsgs, pcounts = validatePropertyRequirement( + propResourceObj, innerDict[item], (decoded.get(item, 'DNE'), decodedtuple), item) + rsvLogger.info(pcounts) + counts.update(pcounts) + msgs.extend(pmsgs) + if "ActionRequirements" in interopDict: + innerDict = interopDict["ActionRequirements"] + actionsJson = decoded.get('Actions', {}) + decodedInnerTuple = (actionsJson, decodedtuple) + for item in innerDict: + actionName = '#' + propResourceObj.typeobj.stype + '.' + item + rsvLogger.info(actionName) + amsgs, acounts = validateActionRequirement(propResourceObj, innerDict[item], (actionsJson.get( + actionName, 'DNE'), decodedInnerTuple), actionName) + rsvLogger.info(acounts) + counts.update(acounts) + msgs.extend(amsgs) + if "CreateResource" in interopDict: + rsvLogger.info('Skipping CreateResource') + pass + if "DeleteResource" in interopDict: + rsvLogger.info('Skipping DeleteResource') + pass + if "UpdateResource" in interopDict: + rsvLogger.info('Skipping UpdateResource') + pass + + for item in msgs: + if item.success == sEnum.WARN: + counts['warn'] += 1 + elif item.success == sEnum.PASS: + counts['pass'] += 1 + elif item.success == sEnum.FAIL: + counts['fail.{}'.format(item.name)] += 1 + rsvLogger.info(counts) + return msgs, counts + diff --git a/commonRedfish.py b/commonRedfish.py new file mode 100644 index 0000000..8d59622 --- /dev/null +++ b/commonRedfish.py @@ -0,0 +1,88 @@ + +import re + +""" + Power.1.1.1.Power , Power.v1_0_0.Power +""" + + +versionpattern = 'v[0-9]_[0-9]_[0-9]' +urlpattern = 'v[0-9]_[0-9]_[0-9]' + +def parseURL(string: str): + """parseURL + + :param string: url in question + :type string: str + """ + pass + +def isNonService(uri): + """ + Checks if a uri is within the service + """ + return uri is not None and 'http' in uri[:8] + + +def getNamespace(string: str): + """getNamespace + + Gives namespace of a type string, version included + + :param string: A type string + :type string: str + """ + if '#' in string: + string = string.rsplit('#', 1)[1] + return string.rsplit('.', 1)[0] + +def getVersion(string: str): + """getVersion + + Gives version stripped from type/namespace string, if possible + + :param string: A type/namespace string + :type string: str + """ + regcap = re.search(versionpattern, string) + return regcap.group() if regcap else None + + +def getNamespaceUnversioned(string: str): + """getNamespaceUnversioned + + Gives namespace of a type string, version NOT included + + :param string: + :type string: str + """ + if '#' in string: + string = string.rsplit('#', 1)[1] + return string.split('.', 1)[0] + + +def getType(string: str): + """getType + + Gives type of a type string (right hand side) + + :param string: + :type string: str + """ + if '#' in string: + string = string.rsplit('#', 1)[1] + return string.rsplit('.', 1)[-1] + + +def createContext(typestring: str): + """createContext + + Create an @odata.context string from a type string + + :param typestring: + :type string: str + """ + ns_name = getNamespaceUnversioned(typestring) + type_name = getType(typestring) + context = '/redfish/v1/$metadata' + '#' + ns_name + '.' + type_name + return context diff --git a/metadata.py b/metadata.py new file mode 100644 index 0000000..5ff519e --- /dev/null +++ b/metadata.py @@ -0,0 +1,345 @@ +# Copyright Notice: +# Copyright 2018 Distributed Management Task Force, Inc. All rights reserved. +# License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/master/LICENSE.md + +import os +import time +from collections import Counter, OrderedDict, defaultdict +import traverseService as rst + +from io import BytesIO +import requests +import zipfile + +EDM_NAMESPACE = "http://docs.oasis-open.org/odata/ns/edm" +EDMX_NAMESPACE = "http://docs.oasis-open.org/odata/ns/edmx" +EDM_TAGS = ['Action', 'Annotation', 'Collection', 'ComplexType', 'EntityContainer', 'EntityType', 'EnumType', 'Key', + 'Member', 'NavigationProperty', 'Parameter', 'Property', 'PropertyRef', 'PropertyValue', 'Record', + 'Schema', 'Singleton', 'Term', 'TypeDefinition'] +EDMX_TAGS = ['DataServices', 'Edmx', 'Include', 'Reference'] + + +live_zip_uri = 'http://redfish.dmtf.org/schemas/DSP8010_2018.1.zip' + + +def setup_schema_pack(uri, local_dir, proxies, timeout): + rst.traverseLogger.info('Unpacking schema pack...') + if uri == 'latest': + uri = live_zip_uri + try: + response = requests.get(uri, timeout=timeout, proxies=proxies) + expCode = [200] + elapsed = response.elapsed.total_seconds() + statusCode = response.status_code + rst.traverseLogger.debug('{}, {}, {},\nTIME ELAPSED: {}'.format(statusCode, + expCode, response.headers, elapsed)) + if statusCode in expCode: + if not zipfile.is_zipfile(BytesIO(response.content)): + pass + else: + zf = zipfile.ZipFile(BytesIO(response.content)) + for name in zf.namelist(): + if '.xml' in name: + cpath = '{}/{}'.format(local_dir, name.split('/')[-1]) + rst.traverseLogger.debug((name, cpath)) + item = zf.open(name) + with open(cpath, 'wb') as f: + f.write(item.read()) + item.close() + zf.close() + except Exception as ex: + rst.traverseLogger.error("A problem when getting resource has occurred {}".format(uri)) + rst.traverseLogger.warn("output: ", exc_info=True) + return True + + +def bad_edm_tags(tag): + return tag.namespace == EDM_NAMESPACE and tag.name not in EDM_TAGS + + +def bad_edmx_tags(tag): + return tag.namespace == EDMX_NAMESPACE and tag.name not in EDMX_TAGS + + +def other_ns_tags(tag): + return tag.namespace != EDM_NAMESPACE and tag.namespace != EDMX_NAMESPACE + + +def reference_missing_uri_attr(tag): + return tag.name == 'Reference' and tag.get('Uri') is None + + +def include_missing_namespace_attr(tag): + return tag.name == 'Include' and tag.get('Namespace') is None + + +def format_tag_string(tag): + tag_name = tag.name if tag.prefix is None else tag.prefix + ':' + tag.name + tag_attr = '' + for attr in tag.attrs: + tag_attr += '{}="{}" '.format(attr, tag.attrs[attr]) + return (tag_name + ' ' + tag_attr).strip() + + +def list_html(entries): + html_str = '
    ' + for entry in entries: + html_str += '
  • {}
  • '.format(entry) + html_str += '
' + return html_str + + +def tag_list_html(tags_dict): + html_str = '
    ' + for tag in tags_dict: + html_str += '
  • {} {}
  • ' \ + .format(tag, '(' + str(tags_dict[tag]) + ' occurrences)' if tags_dict[tag] > 1 else '') + html_str += '
' + return html_str + + +class Metadata(object): + metadata_uri = '/redfish/v1/$metadata' + schema_type = '$metadata' + + def __init__(self, logger): + logger.info('Constructing metadata...') + self.success_get = False + self.uri_to_namespaces = defaultdict(list) + self.elapsed_secs = 0 + self.metadata_namespaces = set() + self.service_namespaces = set() + self.schema_store = dict() + self.bad_tags = dict() + self.bad_tag_ns = dict() + self.refs_missing_uri = dict() + self.includes_missing_ns = dict() + self.bad_schema_uris = set() + self.bad_namespace_include = set() + self.counter = OrderedCounter() + self.logger = logger + self.redfish_extensions_alias_ok = False + + start = time.time() + self.schema_obj = rst.rfSchema.getSchemaObject(Metadata.schema_type, Metadata.metadata_uri) + self.md_soup = None + self.service_refs = None + uri = Metadata.metadata_uri + + self.elapsed_secs = time.time() - start + if self.schema_obj: + self.md_soup = self.schema_obj.soup + self.service_refs = self.schema_obj.refs + self.success_get = True + # set of namespaces included in $metadata + self.metadata_namespaces = {k for k in self.service_refs.keys()} + # create map of schema URIs to namespaces from $metadata + for k in self.service_refs.keys(): + self.uri_to_namespaces[self.service_refs[k][1]].append(self.service_refs[k][0]) + logger.debug('Metadata: uri = {}'.format(uri)) + logger.debug('Metadata: metadata_namespaces: {} = {}' + .format(type(self.metadata_namespaces), self.metadata_namespaces)) + # check for Redfish alias for RedfishExtensions.v1_0_0 + ref = self.service_refs.get('Redfish') + if ref is not None and ref[0] == 'RedfishExtensions.v1_0_0': + self.redfish_extensions_alias_ok = True + logger.debug('Metadata: redfish_extensions_alias_ok = {}'.format(self.redfish_extensions_alias_ok)) + # check for XML tag problems + self.check_tags() + # check that all namespace includes are found in the referenced schema + self.check_namespaces_in_schemas() + logger.debug('Metadata: bad_tags = {}'.format(self.bad_tags)) + logger.debug('Metadata: bad_tag_ns = {}'.format(self.bad_tag_ns)) + logger.debug('Metadata: refs_missing_uri = {}'.format(self.refs_missing_uri)) + logger.debug('Metadata: includes_missing_ns = {}'.format(self.includes_missing_ns)) + logger.debug('Metadata: bad_schema_uris = {}'.format(self.bad_schema_uris)) + logger.debug('Metadata: bad_namespace_include = {}'.format(self.bad_namespace_include)) + for schema in self.service_refs: + name, uri = self.service_refs[schema] + self.schema_store[name] = rst.rfSchema.getSchemaObject(name, uri) + if self.schema_store[name] is not None: + for ref in self.schema_store[name].refs: + pass + else: + logger.warning('Metadata: getSchemaDetails() did not return success') + + def get_schema_obj(self): + return self.schema_obj + + def get_soup(self): + return self.md_soup + + def get_service_refs(self): + return self.service_refs + + def get_metadata_namespaces(self): + return self.metadata_namespaces + + def get_service_namespaces(self): + return self.service_namespaces + + def add_service_namespace(self, namespace): + self.service_namespaces.add(namespace) + + def get_missing_namespaces(self): + return self.service_namespaces - self.metadata_namespaces + + def get_schema_uri(self, namespace): + ref = self.service_refs.get(namespace) + if ref is not None: + return ref[1] + else: + return None + + def check_tags(self): + """ + Perform some checks on the tags in the $metadata XML looking for unrecognized tags, + tags missing required attributes, etc. + """ + try: + for tag in self.md_soup.find_all(bad_edm_tags): + tag_str = format_tag_string(tag) + self.bad_tags[tag_str] = self.bad_tags.get(tag_str, 0) + 1 + for tag in self.md_soup.find_all(bad_edmx_tags): + tag_str = format_tag_string(tag) + self.bad_tags[tag_str] = self.bad_tags.get(tag_str, 0) + 1 + for tag in self.md_soup.find_all(reference_missing_uri_attr): + tag_str = format_tag_string(tag) + self.refs_missing_uri[tag_str] = self.refs_missing_uri.get(tag_str, 0) + 1 + for tag in self.md_soup.find_all(include_missing_namespace_attr): + tag_str = format_tag_string(tag) + self.includes_missing_ns[tag_str] = self.includes_missing_ns.get(tag_str, 0) + 1 + for tag in self.md_soup.find_all(other_ns_tags): + tag_str = tag.name if tag.prefix is None else tag.prefix + ':' + tag.name + tag_ns = 'xmlns{}="{}"'.format(':' + tag.prefix if tag.prefix is not None else '', tag.namespace) + tag_str = tag_str + ' ' + tag_ns + self.bad_tag_ns[tag_str] = self.bad_tag_ns.get(tag_str, 0) + 1 + except Exception as e: + self.logger.warning('Metadata: Problem parsing $metadata document: {}'.format(e)) + + def check_namespaces_in_schemas(self): + """ + Check that all namespaces included from a schema URI are actually in that schema + """ + for k in self.uri_to_namespaces.keys(): + schema_uri = k + if '#' in schema_uri: + schema_uri, frag = k.split('#', 1) + schema_type = os.path.basename(os.path.normpath(k)).strip('.xml').strip('_v1') + success, soup, _ = rst.rfSchema.getSchemaDetails(schema_type, schema_uri) + if success: + for namespace in self.uri_to_namespaces[k]: + if soup.find('Schema', attrs={'Namespace': namespace}) is None: + msg = 'Namespace {} not found in schema {}'.format(namespace, k) + self.logger.debug('Metadata: {}'.format(msg)) + self.bad_namespace_include.add(msg) + else: + self.logger.error('Metadata: failure opening schema {} of type {}'.format(schema_uri, schema_type)) + self.bad_schema_uris.add(schema_uri) + + def get_counter(self): + """ + Create a Counter instance containing the counts of any errors found + """ + counter = OrderedCounter() + # informational counters + counter['metadataNamespaces'] = len(self.metadata_namespaces) + counter['serviceNamespaces'] = len(self.service_namespaces) + # error counters + counter['missingRedfishAlias'] = 0 if self.redfish_extensions_alias_ok else 1 + counter['missingNamespaces'] = len(self.get_missing_namespaces()) + counter['badTags'] = len(self.bad_tags) + counter['missingUriAttr'] = len(self.refs_missing_uri) + counter['missingNamespaceAttr'] = len(self.includes_missing_ns) + counter['badTagNamespaces'] = len(self.bad_tag_ns) + counter['badSchemaUris'] = len(self.bad_schema_uris) + counter['badNamespaceInclude'] = len(self.bad_namespace_include) + self.counter = counter + return self.counter + + def to_html(self): + """ + Convert the $metadata validation results to HTML + """ + time_str = 'response time {0:.6f}s'.format(self.elapsed_secs) + section_title = '{} ({})'.format(Metadata.metadata_uri, time_str) + + counter = self.get_counter() + + html_str = '' + html_str += '{}'\ + .format(section_title) + html_str += '' + html_str += ''.format(section_title) + html_str += ''\ + .format(Metadata.metadata_uri, Metadata.schema_type) + html_str += '' + html_str += '' + html_str += '
{}
\ +
Show results
\ +
Schema File: {}
Resource Type: {}
GET Success' if self.success_get else 'class="fail"> GET Failure') + '' + + errors_found = False + for count_type in counter.keys(): + style = 'class=log' + if 'bad' in count_type or 'missing' in count_type: + if counter[count_type] > 0: + errors_found = True + style = 'class="fail log"' + html_str += '
{p}: {q}
'.format( + p=count_type, q=counter.get(count_type, 0), style=style) + + html_str += '
' + html_str += '' + + if self.success_get and not errors_found: + html_str += '' + elif not self.success_get: + html_str += ''\ + .format(Metadata.metadata_uri) + else: + if not self.redfish_extensions_alias_ok: + html_str += '' + if len(self.get_missing_namespaces()) > 0: + html_str += '' + if len(self.bad_tags) > 0: + html_str += '' + if len(self.refs_missing_uri) > 0: + html_str += '' + if len(self.includes_missing_ns) > 0: + html_str += '' + if len(self.bad_tag_ns) > 0: + html_str += '' + if len(self.bad_schema_uris) > 0: + html_str += '' + if len(self.bad_namespace_include) > 0: + html_str += '' + html_str += '
$metadata validation results
Validation successful
ERROR - Unable to retrieve $metadata resource at {}
ERROR - $metadata does not include the required "RedfishExtensions.v1_0_0" namespace with an alias of "Redfish"
ERROR - The following namespaces are referenced by the service, but are not included in $metadata:
    ' + for ns in self.get_missing_namespaces(): + html_str += '
  • {}
  • '.format(ns) + html_str += '
ERROR - The following tag names in $metadata are unrecognized (check spelling or case):' + html_str += tag_list_html(self.bad_tags) + html_str += '
ERROR - The following Reference tags in $metadata are missing the expected Uri attribute (check spelling or case):' + html_str += tag_list_html(self.refs_missing_uri) + html_str += '
ERROR - The following Include tags in $metadata are missing the expected Namespace attribute (check spelling or case):' + html_str += tag_list_html(self.includes_missing_ns) + html_str += '
ERROR - The following tags in $metadata have an unexpected namespace:' + html_str += tag_list_html(self.bad_tag_ns) + html_str += '
ERROR - The following schema URIs referenced from $metadata could not be retrieved:' + html_str += list_html(self.bad_schema_uris) + html_str += '
ERROR - The following namespaces included in $metadata could not be found in the referenced schema URI:' + html_str += list_html(self.bad_namespace_include) + html_str += '
' + + return html_str + + +class OrderedCounter(Counter, OrderedDict): + """Counter that remembers the order elements are first encountered""" + + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, OrderedDict(self)) + + def __reduce__(self): + return self.__class__, (OrderedDict(self),) diff --git a/redfish.ico b/redfish.ico new file mode 100644 index 0000000..301b15d Binary files /dev/null and b/redfish.ico differ diff --git a/requirements.txt b/requirements.txt index 5a1f470..be8028c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,3 @@ -beautifulsoup4 -requests +beautifulsoup4>=4.6.0 lxml -jsonschema +requests diff --git a/rfSchema.py b/rfSchema.py new file mode 100644 index 0000000..d30dd3c --- /dev/null +++ b/rfSchema.py @@ -0,0 +1,330 @@ + +from collections import namedtuple +from bs4 import BeautifulSoup +from functools import lru_cache + +from commonRedfish import getType, getNamespace, isNonService, getNamespaceUnversioned, getVersion +import traverseService as rst + +config = [] + +@lru_cache(maxsize=64) +def getSchemaDetails(SchemaType, SchemaURI): + """ + Find Schema file for given Namespace. + + param SchemaType: Schema Namespace, such as ServiceRoot + param SchemaURI: uri to grab schema, given LocalOnly is False + return: (success boolean, a Soup object, origin) + """ + rst.traverseLogger.debug('getting Schema of {} {}'.format(SchemaType, SchemaURI)) + currentService = rst.currentService + + if SchemaType is None: + return False, None, None + + if currentService is None: + return getSchemaDetailsLocal(SchemaType, SchemaURI) + + elif currentService.active and getNamespace(SchemaType) in currentService.metadata.schema_store: + result = currentService.metadata.schema_store[getNamespace(SchemaType)] + if result is not None: + return True, result.soup, result.origin + + config = currentService.config + LocalOnly, SchemaLocation, ServiceOnly = config['localonlymode'], config['metadatafilepath'], config['servicemode'] + + if (SchemaURI is not None and not LocalOnly) or (SchemaURI is not None and '/redfish/v1/$metadata' in SchemaURI): + # Get our expected Schema file here + # if success, generate Soup, then check for frags to parse + # start by parsing references, then check for the refLink + if '#' in SchemaURI: + base_schema_uri, frag = tuple(SchemaURI.rsplit('#', 1)) + else: + base_schema_uri, frag = SchemaURI, None + success, data, status, elapsed = rst.callResourceURI(base_schema_uri) + if success: + soup = BeautifulSoup(data, "xml") + # if frag, look inside xml for real target as a reference + if frag is not None: + # prefer type over frag, truncated down + # using frag, check references + frag = getNamespace(SchemaType) + frag = frag.split('.', 1)[0] + refType, refLink = getReferenceDetails( + soup, name=base_schema_uri).get(frag, (None, None)) + if refLink is not None: + success, linksoup, newlink = getSchemaDetails(refType, refLink) + if success: + return True, linksoup, newlink + else: + rst.traverseLogger.error( + "SchemaURI couldn't call reference link {} inside {}".format(frag, base_schema_uri)) + else: + rst.traverseLogger.error( + "SchemaURI missing reference link {} inside {}".format(frag, base_schema_uri)) + # error reported; assume likely schema uri to allow continued validation + uri = 'http://redfish.dmtf.org/schemas/v1/{}_v1.xml'.format(frag) + rst.traverseLogger.info("Continue assuming schema URI for {} is {}".format(SchemaType, uri)) + return getSchemaDetails(SchemaType, uri) + else: + return True, soup, base_schema_uri + if isNonService(base_schema_uri) and ServiceOnly: + rst.traverseLogger.info("Nonservice URI skipped: {}".format(base_schema_uri)) + else: + rst.traverseLogger.debug("SchemaURI called unsuccessfully: {}".format(base_schema_uri)) + if LocalOnly: + rst.traverseLogger.debug("This program is currently LOCAL ONLY") + if ServiceOnly: + rst.traverseLogger.debug("This program is currently SERVICE ONLY") + if not LocalOnly and not ServiceOnly and isNonService(SchemaURI): + rst.traverseLogger.warning("SchemaURI {} was unable to be called, defaulting to local storage in {}".format(SchemaURI, SchemaLocation)) + return getSchemaDetailsLocal(SchemaType, SchemaURI) + + +def getSchemaDetailsLocal(SchemaType, SchemaURI): + """ + Find Schema file for given Namespace, from local directory + + param SchemaType: Schema Namespace, such as ServiceRoot + param SchemaURI: uri to grab schem (generate information from it) + return: (success boolean, a Soup object, origin) + """ + Alias = getNamespaceUnversioned(SchemaType) + config = rst.config + SchemaLocation, SchemaSuffix = config['metadatafilepath'], config['schemasuffix'] + if SchemaURI is not None: + uriparse = SchemaURI.split('/')[-1].split('#') + xml = uriparse[0] + else: + rst.traverseLogger.warning("SchemaURI was empty, must generate xml name from type {}".format(SchemaType)), + return getSchemaDetailsLocal(SchemaType, Alias + SchemaSuffix) + rst.traverseLogger.debug((SchemaType, SchemaURI, SchemaLocation + '/' + xml)) + filestring = Alias + SchemaSuffix if xml is None else xml + try: + # get file + with open(SchemaLocation + '/' + xml, "r") as filehandle: + data = filehandle.read() + + # get tags + soup = BeautifulSoup(data, "xml") + edmxTag = soup.find('edmx:Edmx', recursive=False) + parentTag = edmxTag.find('edmx:DataServices', recursive=False) + child = parentTag.find('Schema', recursive=False) + SchemaNamespace = child['Namespace'] + FoundAlias = SchemaNamespace.split(".")[0] + rst.traverseLogger.debug(FoundAlias) + + if '/redfish/v1/$metadata' in SchemaURI: + if len(uriparse) > 1: + frag = getNamespace(SchemaType) + frag = frag.split('.', 1)[0] + refType, refLink = getReferenceDetails( + soup, name=SchemaLocation + '/' + filestring).get(frag, (None, None)) + if refLink is not None: + rst.traverseLogger.debug('Entering {} inside {}, pulled from $metadata'.format(refType, refLink)) + return getSchemaDetails(refType, refLink) + else: + rst.traverseLogger.error('Could not find item in $metadata {}'.format(frag)) + return False, None, None + else: + return True, soup, "local" + SchemaLocation + '/' + filestring + + if FoundAlias in Alias: + return True, soup, "local" + SchemaLocation + '/' + filestring + + except FileNotFoundError: + # if we're looking for $metadata locally... ditch looking for it, go straight to file + if '/redfish/v1/$metadata' in SchemaURI and Alias != '$metadata': + rst.traverseLogger.warning("Unable to find a harddrive stored $metadata at {}, defaulting to {}".format(SchemaLocation, Alias + SchemaSuffix)) + return getSchemaDetailsLocal(SchemaType, Alias + SchemaSuffix) + else: + rst.traverseLogger.warn + ( + "Schema file {} not found in {}".format(filestring, SchemaLocation)) + if Alias == '$metadata': + rst.traverseLogger.warning( + "If $metadata cannot be found, Annotations may be unverifiable") + except Exception as ex: + rst.traverseLogger.error("A problem when getting a local schema has occurred {}".format(SchemaURI)) + rst.traverseLogger.warning("output: ", exc_info=True) + return False, None, None + + +def check_redfish_extensions_alias(name, namespace, alias): + """ + Check that edmx:Include for Namespace RedfishExtensions has the expected 'Redfish' Alias attribute + :param name: the name of the resource + :param item: the edmx:Include item for RedfishExtensions + :return: bool + """ + if alias is None or alias != 'Redfish': + msg = ("In the resource {}, the {} namespace must have an alias of 'Redfish'. The alias is {}. " + + "This may cause properties of the form [PropertyName]@Redfish.TermName to be unrecognized.") + rst.traverseLogger.error(msg.format(name, namespace, + 'missing' if alias is None else "'" + str(alias) + "'")) + return False + return True + +def getReferenceDetails(soup, metadata_dict=None, name='xml'): + """ + Create a reference dictionary from a soup file + + param arg1: soup + param metadata_dict: dictionary of service metadata, compare with + return: dictionary + """ + includeTuple = namedtuple('include', ['Namespace', 'Uri']) + refDict = {} + + maintag = soup.find("edmx:Edmx", recursive=False) + reftags = maintag.find_all('edmx:Reference', recursive=False) + for ref in reftags: + includes = ref.find_all('edmx:Include', recursive=False) + for item in includes: + uri = ref.get('Uri') + ns, alias = (item.get(x) for x in ['Namespace', 'Alias']) + if ns is None or uri is None: + rst.traverseLogger.error("Reference incorrect for: {}".format(item)) + continue + if alias is None: + alias = ns + refDict[alias] = includeTuple(ns, uri) + # Check for proper Alias for RedfishExtensions + if name == '$metadata' and ns.startswith('RedfishExtensions.'): + check_bool = check_redfish_extensions_alias(name, ns, alias) + + cntref = len(refDict) + if metadata_dict is not None: + refDict.update(metadata_dict) + rst.traverseLogger.debug("References generated from {}: {} out of {}".format(name, cntref, len(refDict))) + return refDict + + +class rfSchema: + def __init__(self, soup, context, origin, metadata=None, name='xml'): + self.soup = soup + self.refs = getReferenceDetails(soup, metadata, name) + self.context = context + self.origin = origin + self.name = name + + def getSchemaFromReference(self, namespace): + """getSchemaFromReference + + Get SchemaObj from generated references + + :param namespace: Namespace of reference + """ + tup = self.refs.get(namespace) + tupVersionless = self.refs.get(getNamespace(namespace)) + if tup is None: + if tupVersionless is None: + rst.traverseLogger.warning('No such reference {} in {}'.format(namespace, self.origin)) + return None + else: + tup = tupVersionless + rst.traverseLogger.warning('No such reference {} in {}, using unversioned'.format(namespace, self.origin)) + typ, uri = tup + newSchemaObj = getSchemaObject(typ, uri) + return newSchemaObj + + def getTypeTagInSchema(self, currentType, tagType=['EntityType', 'ComplexType']): + """getTypeTagInSchema + + Get type tag in schema + + :param currentType: type string + :param tagType: Array or single string containing the xml tag name + """ + pnamespace, ptype = getNamespace(currentType), getType(currentType) + soup = self.soup + + currentSchema = soup.find( # BS4 line + 'Schema', attrs={'Namespace': pnamespace}) + + if currentSchema is None: + return None + + currentEntity = currentSchema.find(tagType, attrs={'Name': ptype}, recursive=False) # BS4 line + + return currentEntity + + def getParentType(self, currentType, tagType=['EntityType', 'ComplexType']): + """getParentType + + Get parent of this Entity/ComplexType + + :param currentType: type string + :param tagType: Array or single string containing the xml tag name + """ + currentType = currentType.replace('#', '') + typetag = self.getTypeTagInSchema(currentType, tagType) + if typetag is not None: + currentType = typetag.get('BaseType') + if currentType is None: + return False, None, None + typetag = self.getTypeTagInSchema(currentType, tagType) + if typetag is not None: + return True, self, currentType + else: + namespace = getNamespace(currentType) + schemaObj = self.getSchemaFromReference(namespace) + if schemaObj is None: + return False, None, None + propSchema = schemaObj.soup.find( + 'Schema', attrs={'Namespace': namespace}) + if propSchema is None: + return False, None, None + return True, schemaObj, currentType + else: + return False, None, None + + def getHighestType(self, acquiredtype: str, limit=None): + """getHighestType + + get Highest possible version for given type + + :param acquiredtype: Type available + :param limit: Version string limit (full namespace or just version 'v1_x_x') + """ + typelist = list() + + if limit is not None: + if getVersion(limit) is None: + rst.traverseLogger.warning('Limiting namespace has no version, erasing: {}'.format(limit)) + limit = None + else: + limit = getVersion(limit) + + for schema in self.soup.find_all('Schema'): + newNamespace = schema.get('Namespace') + if limit is not None: + if getVersion(newNamespace) is None: + continue + if getVersion(newNamespace) > limit: + continue + if schema.find(['EntityType', 'ComplexType'], attrs={'Name': getType(acquiredtype)}, recursive=False): + typelist.append(newNamespace) + + for ns in reversed(sorted(typelist)): + rst.traverseLogger.debug( + "{} {}".format(ns, getType(acquiredtype))) + acquiredtype = ns + '.' + getType(acquiredtype) + return acquiredtype + return acquiredtype + + +def getSchemaObject(typename, uri, metadata=None): + """getSchemaObject + + Wrapper for getting an rfSchema object + + :param typename: Type with namespace of schema + :param uri: Context/URI of metadata/schema containing reference to namespace + :param metadata: parent refs of service + """ + success, soup, origin = getSchemaDetails(typename, uri) + + return rfSchema(soup, uri, origin, metadata=metadata, name=typename) if success else None + diff --git a/rfSession.py b/rfSession.py index 3c8b38e..443e94a 100644 --- a/rfSession.py +++ b/rfSession.py @@ -8,69 +8,88 @@ import logging import sys from requests.packages.urllib3.exceptions import InsecureRequestWarning +from http.client import responses commonHeader = {'OData-Version': '4.0'} requests.packages.urllib3.disable_warnings(InsecureRequestWarning) -sessionLogger = logging.getLogger(__name__) -sessionLogger.setLevel(logging.DEBUG) -ch = logging.StreamHandler(sys.stdout) -ch.setLevel(logging.INFO) -sessionLogger.addHandler(ch) - - -def getLogger(): - """ - Grab logger for tools that might use this lib - """ - return sessionLogger - class rfSession: - def __init__(self): - self.user, self.pwd, self.server = None, None, None - self.key, self.loc = None, None - self.timeout, self.tick = 0, 0 - self.started, self.chkCert = False, False + def __init__(self, user, password, server, logger=None, chkCert=True, proxies=None): + self.user = user + self.pwd = password + self.server = server + self.chkCert = chkCert + self.proxies = {} if proxies is None else proxies + self.key = None + self.loc = None + self.timeout = None + self.tick = None + self.started = False + self.chkCert = False + + if logger is None: + self.logger = logging.getLogger(__name__) + self.logger.setLevel(logging.DEBUG) + ch = logging.StreamHandler(sys.stdout) + ch.setLevel(logging.INFO) + self.logger.addHandler(ch) + else: + self.logger = logger - def startSession(self, user, password, server, chkCert=True): + def startSession(self): payload = { - "UserName": user, - "Password": password + 'UserName': self.user, + 'Password': self.pwd } - sr = requests.get(server + '/redfish/v1/', verify=chkCert, headers=commonHeader) + sr = requests.get(self.server + '/redfish/v1/', verify=self.chkCert, headers=commonHeader, proxies=self.proxies) success = sr.status_code in [200, 204] and sr.json() is not None if not success: - sessionLogger.error("Could not retrieve serviceroot to start Session") + self.logger.error('Could not retrieve service root to start Session') return False links = sr.json().get('Links') if links is not None: sessionsObj = links.get('Sessions') if sessionsObj is None: sessionsURI = '/redfish/v1/SessionService/Sessions' - sessionLogger.info('using default URI', sessionsURI) + self.logger.info('using default URI', sessionsURI) else: sessionsURI = sessionsObj.get('@odata.id', '/redfish/v1/SessionService/Sessions') else: - sessionLogger.error("Could not retrieve serviceroot.links to start Session") + self.logger.error('Could not retrieve service root link to start Session') return False - response = requests.post(server + sessionsURI, json=payload, verify=chkCert, headers=commonHeader) + response = requests.post(self.server + sessionsURI, json=payload, verify=self.chkCert, + headers=commonHeader, proxies=self.proxies) statusCode = response.status_code - ourSessionKey = response.headers.get("X-Auth-Token") - ourSessionLocation = response.headers.get("Location", "/None") + ourSessionKey = response.headers.get('X-Auth-Token') + ourSessionLocation = response.headers.get('Location', '/None') + if ourSessionLocation.startswith('/'): + ourSessionLocation = self.server + ourSessionLocation success = statusCode in range(200, 204) and ourSessionKey is not None - self.user, self.pwd, self.server = user, None, server - self.key, self.loc = ourSessionKey, ourSessionLocation - self.timeout, self.tick = timedelta(minutes=30), datetime.now() - self.started, self.chkCert = success, chkCert + self.key = ourSessionKey + self.loc = ourSessionLocation + self.timeout = timedelta(minutes=30) + self.tick = datetime.now() + self.started = success if success: - sessionLogger.info("Session successfully started") + self.logger.info('Session successfully created') else: - sessionLogger.info("Session failed to start {}".format(statusCode)) + if response.text is not None: + self.logger.info('Response body from session creation:') + self.logger.info('{}'.format(response.text)) + self.logger.info('Headers: {}'.format(response.headers)) + if statusCode in [400, 401]: + self.logger.error('Error creating session. Status code "{} {}". Check supplied username and password.' + .format(statusCode, responses[statusCode])) + raise ValueError('Bad Username or Password') + else: + self.logger.error('Error creating session. Status code "{} {}".' + .format(statusCode, responses[statusCode])) + raise ValueError('Bad response from service') return success @@ -79,17 +98,26 @@ def isSessionOld(self): def getSessionKey(self): if not self.started: - sessionLogger.error("This session is not started") + self.logger.error('This session is not started') return None if self.isSessionOld(): - sessionLogger.error("This session is old") + self.logger.warning('This session is old') self.tick = datetime.now() return self.key def killSession(self): if self.started and not self.isSessionOld(): - headers = {"X-Auth-Token": self.getSessionKey()} + headers = {'X-Auth-Token': self.getSessionKey()} headers.update(commonHeader) - response = requests.delete(str(self.server) + str(self.loc), verify=self.chkCert, headers=headers) + try: + requests.delete(self.loc, verify=self.chkCert, headers=headers, proxies=self.proxies) + except Exception as e: + self.logger.warning('Error deleting current session: {}'.format(e)) self.started = False return True + + def getLogger(self): + """ + Grab logger for tools that might use this lib + """ + return self.logger diff --git a/tests.py b/tests/interoptests.py similarity index 98% rename from tests.py rename to tests/interoptests.py index 1cae002..c8eeb76 100644 --- a/tests.py +++ b/tests/interoptests.py @@ -1,7 +1,7 @@ # Copyright Notice: # Copyright 2017 Distributed Management Task Force, Inc. All rights reserved. -# License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Interop-Validator/blob/master/LICENSE.md +# License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Usecase-Checkers/LICENSE.md # # Unit tests for RedfishInteropValidator.py # diff --git a/tohtml.py b/tohtml.py new file mode 100644 index 0000000..03043c3 --- /dev/null +++ b/tohtml.py @@ -0,0 +1,235 @@ + +# Copyright Notice: +# Copyright 2016 Distributed Management Task Force, Inc. All rights reserved. +# License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/master/LICENSE.md + +import traverseService as rst +from commonRedfish import * +import RedfishLogo as logo +import html + + +def wrapTag(string, tag='div', attr=None): + string = str(string) + ltag, rtag = '<{}>'.format(tag), ''.format(tag) + if attr is not None: + ltag = '<{} {}>'.format(tag, attr) + return ltag + string + rtag + + +# hack in tagnames into module namespace +for tagName in ['tr', 'td', 'th', 'div', 'b', 'table', 'body', 'head']: + globals()[tagName] = lambda string, attr=None, tag=tagName: wrapTag(string, tag=tag, attr=attr) + + +def infoBlock(strings, split='
', ffunc=None, sort=True): + if isinstance(strings, dict): + infos = [b('{}: '.format(y)) + str(x) for y,x in (sorted(strings.items()) if sort else strings.items())] + else: + infos = strings + return split.join([ffunc(*x) for x in enumerate(infos)] if ffunc is not None else infos) + + +def tableBlock(lines, titles, widths=None, ffunc=None): + widths = widths if widths is not None else [100 for x in range(len(titles))] + attrlist = ['style="width:{}%"'.format(str(x)) for x in widths] + tableHeader = tr(''.join([th(x,y) for x,y in zip(titles,attrlist)])) + for line in lines: + tableHeader += tr(''.join([ffunc(cnt, x) if ffunc is not None else td(x) for cnt, x in enumerate(line)])) + return table(tableHeader) + + +def applySuccessColor(num, entry): + if num < 4: + return wrapTag(entry, 'td') + success_col = str(entry) + if 'FAIL' in str(success_col).upper(): + entry = '' + str(success_col) + '' + elif 'DEPRECATED' in str(success_col).upper(): + entry = '' + str(success_col) + '' + elif 'PASS' in str(success_col).upper(): + entry = '' + str(success_col) + '' + else: + entry = '' + str(success_col) + '' + return entry + + +def applyInfoSuccessColor(num, entry): + if 'fail' in entry or 'exception' in entry: + style = 'class="fail"' + elif 'warn' in entry: + style = 'class="warn"' + else: + style = None + return div(entry, attr=style) + + +def renderHtml(results, finalCounts, tool_version, startTick, nowTick): + # Render html + config = rst.config + config_str = ', '.join(sorted(list(config.keys() - set(['systeminfo', 'targetip', 'password', 'description'])))) + rsvLogger = rst.getLogger() + sysDescription, ConfigURI = (config['systeminfo'], config['targetip']) + logpath = config['logpath'] + + # wrap html + htmlPage = '' + htmlStrTop = 'Conformance Test Summary\ + \ + ' + htmlStrBodyHeader = '' + # Logo and logname + infos = [wrapTag('##### Redfish Conformance Test Report #####', 'h2')] + infos.append(wrapTag('DMTF Redfish Logo', 'h4')) + infos.append('

' + 'https://github.com/DMTF/Redfish-Interop-Validator

') + infos.append('Tool Version: {}'.format(tool_version)) + infos.append(startTick.strftime('%c')) + infos.append('(Run time: {})'.format( + str(nowTick-startTick).rsplit('.', 1)[0])) + infos.append('

This tool is provided and maintained by the DMTF. ' + 'For feedback, please open issues
in the tool\'s Github repository: ' + '' + 'https://github.com/DMTF/Redfish-Interop-Validator/issues

') + + htmlStrBodyHeader += tr(th(infoBlock(infos))) + + infos = {'System': ConfigURI, 'Description': sysDescription} + htmlStrBodyHeader += tr(th(infoBlock(infos))) + + infos = {'Profile': config['profile'], 'Schema': config['schema']} + htmlStrBodyHeader += tr(th(infoBlock(infos))) + + infos = {x: config[x] for x in config if x not in ['systeminfo', 'targetip', 'password', 'description', 'profile', 'schema']} + block = tr(th(infoBlock(infos, '|||'))) + for num, block in enumerate(block.split('|||'), 1): + sep = '
' if num % 4 == 0 else ', ' + sep = '' if num == len(infos) else sep + htmlStrBodyHeader += block + sep + + htmlStrTotal = '
Final counts: ' + for countType in sorted(finalCounts.keys()): + if finalCounts.get(countType) == 0: + continue + htmlStrTotal += '{p}: {q}, '.format(p=countType, q=finalCounts.get(countType, 0)) + htmlStrTotal += '
Expand All
' + htmlStrTotal += '
Collapse All
' + + htmlStrBodyHeader += tr(td(htmlStrTotal)) + + htmlPage = rst.currentService.metadata.to_html() + for cnt, item in enumerate(results): + entry = [] + val = results[item] + rtime = '(response time: {})'.format(val['rtime']) + + if len(val['messages']) == 0: + continue + + # uri block + prop_type = val['fulltype'] + if prop_type is not None: + namespace = getNamespace(prop_type) + type_name = getType(prop_type) + + infos = [str(val.get(x)) for x in ['uri', 'samplemapped'] if val.get(x) not in ['',None]] + infos.append(rtime) + infos.append(type_name) + uriTag = tr(th(infoBlock(infos, ' '), 'class="titlerow bluebg"')) + entry.append(uriTag) + + # info block + infos = [str(val.get(x)) for x in ['uri'] if val.get(x) not in ['',None]] + infos.append(rtime) + infos.append(div('Show Results', attr='class="button warn" onClick="document.getElementById(\'resNum{}\').classList.toggle(\'resultsShow\');"'.format(cnt))) + buttonTag = td(infoBlock(infos), 'class="title" style="width:30%"') + + infos = [str(val.get(x)) for x in ['context', 'origin', 'fulltype']] + infos = {y: x for x,y in zip(infos, ['Context', 'File Origin', 'Resource Type'])} + infosTag = td(infoBlock(infos), 'class="titlesub log" style="width:40%"') + + success = val['success'] + if success: + getTag = td('GET Success', 'class="pass"') + else: + getTag = td('GET Failure', 'class="fail"') + + + countsTag = td(infoBlock(val['counts'], split='', ffunc=applyInfoSuccessColor), 'class="log"') + + rhead = ''.join([buttonTag, infosTag, getTag, countsTag]) + for x in [('tr',), ('table', 'class=titletable'), ('td', 'class=titlerow'), ('tr')]: + rhead = wrapTag(''.join(rhead), *x) + entry.append(rhead) + + htmlStr = '' + + # actual table + rows = [(i.name, i.entry, i.expected, i.actual, str(i.success.value)) for i in val['messages']] + titles = ['Property Name', 'Value', 'Expected', 'Actual', 'Result'] + widths = ['15','30','30','10','15'] + tableHeader = tableBlock(rows, titles, widths, ffunc=applySuccessColor) + + # lets wrap table and errors and warns into one single column table + tableHeader = tr(td((tableHeader))) + + # warns and errors + errors = val['errors'] + if len(errors) == 0: + errors = 'No errors' + infos = errors.split('\n') + errorTags = tr(td(infoBlock(infos), 'class="fail log"')) + + warns = val['warns'] + if len(warns) == 0: + warns = 'No warns' + infos = warns.split('\n') + warnTags = tr(td(infoBlock(infos), 'class="warn log"')) + + tableHeader += errorTags + tableHeader += warnTags + tableHeader = table(tableHeader) + tableHeader = td(tableHeader, 'class="results" id=\'resNum{}\''.format(cnt)) + + entry.append(tableHeader) + + # append + htmlPage += ''.join([tr(x) for x in entry]) + + return wrapTag(wrapTag(htmlStrTop + wrapTag(htmlStrBodyHeader + htmlPage, 'table'), 'body'), 'html') + + + +def writeHtml(string, path): + with open(path, 'w', encoding='utf-8') as f: + f.write(string) + diff --git a/traverseService.py b/traverseService.py index a9cdeed..99f8269 100644 --- a/traverseService.py +++ b/traverseService.py @@ -3,42 +3,46 @@ # Copyright 2016 Distributed Management Task Force, Inc. All rights reserved. # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Interop-Validator/blob/master/LICENSE.md -from bs4 import BeautifulSoup -import configparser import requests import sys import re import os import json +import random from collections import OrderedDict from functools import lru_cache import logging from rfSession import rfSession from requests.packages.urllib3.exceptions import InsecureRequestWarning +from http.client import responses +import copy +import configparser + +import metadata as md +from commonRedfish import * +import rfSchema traverseLogger = logging.getLogger(__name__) traverseLogger.setLevel(logging.DEBUG) ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.INFO) -traverseLogger.addHandler(ch) # Printout FORMAT, consider allowing debug to be piped here -argparse2configparser = { - 'user': 'username', 'nochkcert': '!certificatecheck', 'ca_bundle': 'certificatebundle', 'schemamode': 'schemamode', - 'suffix': 'schemasuffix', 'dir': 'metadatafilepath', 'nossl': '!usessl', 'timeout': 'timeout', 'service': 'servicemode', - 'http_proxy': 'httpproxy', 'localonly': 'localonlymode', 'https_proxy': 'httpsproxy', 'passwd': 'password', - 'ip': 'targetip', 'logdir': 'logpath', 'desc': 'systeminfo', 'authtype': 'authtype', - 'payload': 'payloadmode+payloadfilepath', 'cache': 'cachemode+cachefilepath', 'warnrecommended':'warnrecommended'} -configpsr = configparser.ConfigParser() -config = { - 'logpath': './logs', 'schemasuffix': '_v1.xml', 'timeout': 30, 'authtype': 'basic', 'certificatebundle': "", - 'httpproxy': "", 'httpsproxy': "", 'configset': '0', 'cachemode': 'Off', 'payloadmode': 'Default', - 'cachefilepath': None, 'payloadfilepath': None} -commonHeader = {'OData-Version': '4.0'} -proxies = {'http': None, 'https': None} +traverseLogger.addHandler(ch) -currentSession = rfSession() +commonHeader = {'OData-Version': '4.0'} requests.packages.urllib3.disable_warnings(InsecureRequestWarning) +# dictionary to hold sampling notation strings for URIs +uri_sample_map = dict() + +currentService = None + + +class AuthenticationError(Exception): + """Exception used for failed basic auth or token auth""" + def __init__(self, msg=None): + super(AuthenticationError, self).__init__(msg) + def getLogger(): """ @@ -46,109 +50,182 @@ def getLogger(): """ return traverseLogger +# default config +argparse2configparser = { + 'user': 'username', 'nochkcert': '!certificatecheck', 'ca_bundle': 'certificatebundle', 'schemamode': 'schemamode', + 'suffix': 'schemasuffix', 'schemadir': 'metadatafilepath', 'nossl': '!usessl', 'timeout': 'timeout', 'service': 'servicemode', + 'http_proxy': 'httpproxy', 'localonly': 'localonlymode', 'https_proxy': 'httpsproxy', 'passwd': 'password', + 'ip': 'targetip', 'logdir': 'logpath', 'desc': 'systeminfo', 'authtype': 'authtype', + 'payload': 'payloadmode+payloadfilepath', 'cache': 'cachemode+cachefilepath', 'token': 'token', + 'linklimit': 'linklimit', 'sample': 'sample', 'nooemcheck': '!oemcheck' + } + +configset = { + "targetip": str, "username": str, "password": str, "authtype": str, "usessl": bool, "certificatecheck": bool, "certificatebundle": str, + "metadatafilepath": str, "cachemode": (bool, str), "cachefilepath": str, "schemasuffix": str, "timeout": int, "httpproxy": str, "httpsproxy": str, + "systeminfo": str, "localonlymode": bool, "servicemode": bool, "token": str, 'linklimit': dict, 'sample': int, 'extrajsonheaders': dict, 'extraxmlheaders': dict, "schema_pack": str, + "forceauth": bool, "oemcheck": bool + } + +defaultconfig = { + 'authtype': 'basic', 'username': "", 'password': "", 'token': '', 'oemcheck': True, + 'certificatecheck': True, 'certificatebundle': "", 'metadatafilepath': './SchemaFiles/metadata', + 'cachemode': 'Off', 'cachefilepath': './cache', 'schemasuffix': '_v1.xml', 'httpproxy': "", 'httpsproxy': "", + 'localonlymode': False, 'servicemode': False, 'linklimit': {'LogEntry': 20}, 'sample': 0, 'schema_pack': None, 'forceauth': False + } + +config = dict(defaultconfig) + +configSet = False + + +def startService(): + global currentService + if currentService is not None: + currentService.close() + currentService = rfService(config) + return currentService + +def convertConfigParserToDict(configpsr): + cdict = {} + for category in configpsr: + for option in configpsr[category]: + val = configpsr[category][option] + if option not in configset.keys() and category not in ['Information', 'Validator']: + traverseLogger.error('Config option {} in {} unsupported!'.format(option, category)) + if val in ['', None]: + continue + if val.isdigit(): + val = int(val) + elif option == 'linklimit': + val = re.findall('[A-Za-z_]+:[0-9]+', val) + elif str(val).lower() in ['on', 'true', 'yes']: + val = True + elif str(val).lower() in ['off', 'false', 'no']: + val = False + cdict[option] = val + return cdict + + +def setByArgparse(args): + ch.setLevel(args.verbose_checks) + if args.v: + ch.setLevel(logging.DEBUG) + if args.config is not None: + configpsr = configparser.ConfigParser() + configpsr.read(args.config) + cdict = convertConfigParserToDict(configpsr) + else: + cdict = {} + for param in args.__dict__: + if param in argparse2configparser: + if isinstance(args.__dict__[param], list): + for cnt, item in enumerate(argparse2configparser[param].split('+')): + cdict[item] = args.__dict__[param][cnt] + elif '+' not in argparse2configparser[param]: + if '!' in argparse2configparser[param]: + cdict[argparse2configparser[param].replace('!', '')] = not args.__dict__[param] + else: + cdict[argparse2configparser[param]] = args.__dict__[param] + else: + cdict[param] = args.__dict__[param] -def setConfigNamespace(args): - # both config functions should conflate no extra info to log, unless it errors out or defaultsi - # any printouts should go to RSV, it's responsible for most logging initiative to file - # consider this: traverse has its own logging, rsv has its own logging - # info: xxx - """ - Provided a namespace, modify args based on it - """ - innerconfig = dict() - for param in args.__dict__: - if param in argparse2configparser: - if isinstance(args.__dict__[param], list): - for cnt, item in enumerate(argparse2configparser[param].split('+')): - innerconfig[item] = args.__dict__[param][cnt] - elif '+' not in argparse2configparser[param]: - innerconfig[argparse2configparser[param]] = args.__dict__[param] - setConfig('', innerconfig) - -def setConfig(filename, cdict=None): + setConfig(cdict) + + +def setConfig(cdict): """ - Set config based on config file read from location filename + Set config based on configurable dictionary """ - midconfig = dict() - if cdict is None: - configpsr.read(filename) - for x in configpsr: - for y in configpsr[x]: - val = configpsr[x][y] - midconfig[y] = val - else: - midconfig.update(cdict) - for item in midconfig: - val = midconfig.get(item) - if val is None: - pass - elif item == 'timeout': - val = int(val) - elif str(val).lower() in ['on', 'true', 'yes']: - val = True - elif str(val).lower() in ['off', 'false', 'no']: - val = False - if '!' in item: - item = item.replace('!', '') - val = not val - config[item] = val - - User, Passwd, Ip, ChkCert, UseSSL = config['username'], config['password'], config['targetip'], config['certificatecheck'], config['usessl'] - - config['configuri'] = ('https' if UseSSL else 'http') + '://' + Ip - - config['certificatecheck'] = ChkCert and UseSSL - - chkcertbundle = config['certificatebundle'] - if chkcertbundle not in [None, ""] and config['certificatecheck']: - if not os.path.isfile(chkcertbundle): - chkcertbundle = None - traverseLogger.error('ChkCertBundle is not found, defaulting to None') - else: - config['certificatebundle'] = None + # Send config only with keys supported by program + linklimitdict = {} + if cdict.get('linklimit') is not None: + for item in cdict.get('linklimit'): + if re.match('[A-Za-z_]+:[0-9]+', item) is not None: + typename, count = tuple(item.split(':')[:2]) + if typename not in linklimitdict: + linklimitdict[typename] = int(count) + else: + traverseLogger.error('Limit already exists for {}'.format(typename)) + cdict['linklimit'] = linklimitdict + + for item in cdict: + if item not in configset: + traverseLogger.debug('Unsupported {}'.format(item)) + elif not isinstance(cdict[item], configset[item]): + traverseLogger.error('Unsupported {}, expected type {}'.format(item, configset[item])) + + global config + config = dict(defaultconfig) + + # set linklimit + defaultlinklimit = config['linklimit'] + + config.update(cdict) + + config['configuri'] = ('https' if config.get('usessl', True) else 'http') + '://' + config['targetip'] + config['certificatecheck'] = config.get('certificatecheck', True) and config.get('usessl', True) - httpprox = config['httpproxy'] - httpsprox = config['httpsproxy'] - proxies['http'] = httpprox if httpprox != "" else None - proxies['https'] = httpsprox if httpsprox != "" else None + defaultlinklimit.update(config['linklimit']) + config['linklimit'] = defaultlinklimit if config['cachemode'] not in ['Off', 'Fallback', 'Prefer']: + if config['cachemode'] is not False: + traverseLogger.error('CacheMode or path invalid, defaulting to Off') config['cachemode'] = 'Off' - traverseLogger.error('CacheMode or path invalid, defaulting to Off') AuthType = config['authtype'] - if AuthType not in ['None', 'Basic', 'Session']: + if AuthType not in ['None', 'Basic', 'Session', 'Token']: config['authtype'] = 'Basic' traverseLogger.error('AuthType invalid, defaulting to Basic') - if AuthType == 'Session': - certVal = chkcertbundle if ChkCert and chkcertbundle is not None else ChkCert - success = currentSession.startSession(User, Passwd, config['configuri'], certVal, proxies) - if not success: - raise RuntimeError("Session could not start") - if 'description' in config: - del config['description'] - if 'updated' in config: - del config['updated'] - config['configset'] = '1' - - -def isConfigSet(): - """ - Check if the library is configured - """ - if config['configset'] == '1': - return True - else: - raise RuntimeError("Configuration is not set") - - -def isNonService(uri): - """ - Checks if a uri is within the service - """ - return 'http' in uri[:8] +class rfService(): + def __init__(self, config): + traverseLogger.info('Setting up service...') + global currentService + currentService = self + self.config = config + self.proxies = dict() + self.active = False + + config['configuri'] = ('https' if config.get('usessl', True) else 'http') + '://' + config['targetip'] + httpprox = config['httpproxy'] + httpsprox = config['httpsproxy'] + self.proxies['http'] = httpprox if httpprox != "" else None + self.proxies['https'] = httpsprox if httpsprox != "" else None + + # Convert list of strings to dict + self.chkcertbundle = config['certificatebundle'] + chkcertbundle = self.chkcertbundle + if chkcertbundle not in [None, ""] and config['certificatecheck']: + if not os.path.isfile(chkcertbundle) and not os.path.isdir(chkcertbundle): + self.chkcertbundle = None + traverseLogger.error('ChkCertBundle is not found, defaulting to None') + else: + config['certificatebundle'] = None + + ChkCert = config['certificatecheck'] + AuthType = config['authtype'] + + self.currentSession = None + if not config.get('usessl', True) and not config['forceauth']: + if config['username'] not in ['', None] or config['password'] not in ['', None]: + traverseLogger.warning('Attempting to authenticate on unchecked http/https protocol is insecure, if necessary please use ForceAuth option. Clearing auth credentials...') + config['username'] = '' + config['password'] = '' + if AuthType == 'Session': + certVal = chkcertbundle if ChkCert and chkcertbundle is not None else ChkCert + # no proxy for system under test + self.currentSession = rfSession(config['username'], config['password'], config['configuri'], None, certVal, self.proxies) + self.currentSession.startSession() + self.metadata = md.Metadata(traverseLogger) + self.active = True + + def close(self): + if self.currentSession is not None and self.currentSession.started: + self.currentSession.killSession() + self.active = False def navigateJsonFragment(decoded, URILink): if '#' in URILink: @@ -160,11 +237,14 @@ def navigateJsonFragment(decoded, URILink): if isinstance(decoded, dict): decoded = decoded.get(item) elif isinstance(decoded, list): - decoded = decoded[int(item)] if int( - item) < len(decoded) else None + if not item.isdigit(): + traverseLogger.error("This is an Array, but this is not an index, aborting: {} {}".format(URILink, item)) + return None + decoded = decoded[int(item)] if int(item) < len(decoded) else None if not isinstance(decoded, dict): - traverseLogger.warn( + traverseLogger.error( "Decoded object no longer a dictionary {}".format(URILink)) + return None return decoded @@ -179,23 +259,37 @@ def callResourceURI(URILink): # rs-assertions: 6.4.1, including accept, content-type and odata-versions # rs-assertion: handle redirects? and target permissions # rs-assertion: require no auth for serviceroot calls - ConfigURI, UseSSL, AuthType, ChkCert, ChkCertBundle, timeout = config['configuri'], config['usessl'], config['authtype'], \ - config['certificatecheck'], config['certificatebundle'], config['timeout'] - CacheMode, CacheDir = config['cachemode'], config['cachefilepath'] - if URILink is None: - traverseLogger.debug("This URI is empty!") + traverseLogger.warn("This URI is empty!") return False, None, -1, 0 + + if currentService is None: + traverseLogger.warn("The current service is not setup! Program must configure the service before contacting URIs") + + URILink = URILink.rstrip('/') + config = currentService.config + proxies = currentService.proxies + ConfigURI, UseSSL, AuthType, ChkCert, ChkCertBundle, timeout, Token = config['configuri'], config['usessl'], config['authtype'], \ + config['certificatecheck'], config['certificatebundle'], config['timeout'], config['token'] + CacheMode, CacheDir = config['cachemode'], config['cachefilepath'] + nonService = isNonService(URILink) - payload = None - statusCode = '' - elapsed = 0 + payload, statusCode, elapsed, auth, noauthchk = None, '', 0, None, True + + isXML = False + if "$metadata" in URILink or ".xml" in URILink: + isXML = True + traverseLogger.debug('Should be XML') + + ExtraHeaders = None + if 'extrajsonheaders' in config and not isXML: + ExtraHeaders = eval(config['extrajsonheaders']) + elif 'extraxmlheaders' in config and isXML: + ExtraHeaders = eval(config['extraxmlheaders']) # determine if we need to Auth... if not nonService: - noauthchk = \ - ('/redfish' in URILink and '/redfish/v1' not in URILink) or\ - URILink in ['/redfish/v1', '/redfish/v1/', '/redfish/v1/odata', 'redfish/v1/odata/'] or\ + noauthchk = URILink in ['/redfish', '/redfish/v1', '/redfish/v1/odata'] or\ '/redfish/v1/$metadata' in URILink if noauthchk: traverseLogger.debug('dont chkauth') @@ -215,31 +309,39 @@ def callResourceURI(URILink): payload = json.loads(f.read()) payload = navigateJsonFragment(payload, URILink) if nonService and config['servicemode']: - traverseLogger.debug('Disallowed out of service URI') + traverseLogger.warning('Disallowed out of service URI') return False, None, -1, 0 # rs-assertion: do not send auth over http # remove UseSSL if necessary if you require unsecure auth - if not UseSSL or nonService or AuthType != 'Basic': + if (not UseSSL and not config['forceauth']) or nonService or AuthType != 'Basic': auth = None # only send token when we're required to chkauth, during a Session, and on Service and Secure if UseSSL and not nonService and AuthType == 'Session' and not noauthchk: + currentSession = currentService.currentSession headers = {"X-Auth-Token": currentSession.getSessionKey()} headers.update(commonHeader) + elif UseSSL and not nonService and AuthType == 'Token' and not noauthchk: + headers = {"Authorization": "Bearer "+Token} + headers.update(commonHeader) else: - headers = commonHeader + headers = copy.copy(commonHeader) + + if ExtraHeaders is not None: + headers.update(ExtraHeaders) - certVal = ChkCertBundle if ChkCert and ChkCertBundle is not None else ChkCert + certVal = ChkCertBundle if ChkCert and ChkCertBundle not in [None, ""] else ChkCert # rs-assertion: must have application/json or application/xml - traverseLogger.debug('callingResourceURI{}with authtype {} and ssl {}: {}'.format( - ' out of service ' if nonService else ' ', AuthType, UseSSL, URILink)) + traverseLogger.debug('callingResourceURI{}with authtype {} and ssl {}: {} {}'.format( + ' out of service ' if nonService else ' ', AuthType, UseSSL, URILink, headers)) try: if payload is not None and CacheMode == 'Prefer': return True, payload, -1, 0 response = requests.get(ConfigURI + URILink if not nonService else URILink, - headers=headers, auth=auth, verify=certVal, timeout=timeout, proxies=proxies) + headers=headers, auth=auth, verify=certVal, timeout=timeout, + proxies=proxies if nonService else None) # only proxy non-service expCode = [200] elapsed = response.elapsed.total_seconds() statusCode = response.status_code @@ -252,13 +354,27 @@ def callResourceURI(URILink): decoded = response.json(object_pairs_hook=OrderedDict) # navigate fragment decoded = navigateJsonFragment(decoded, URILink) + if decoded is None: + traverseLogger.error( + "The JSON pointer in the fragment of this URI is not constructed properly: {}".format(URILink)) elif contenttype is not None and 'application/xml' in contenttype: decoded = response.text + elif nonService and contenttype is not None and 'text/xml' in contenttype: + # non-service schemas can use "text/xml" Content-Type + decoded = response.text else: traverseLogger.error( "This URI did NOT return XML or Json, this is not a Redfish resource (is this redirected?): {}".format(URILink)) return False, response.text, statusCode, elapsed return decoded is not None, decoded, statusCode, elapsed + elif statusCode == 401: + if not nonService and AuthType in ['Basic', 'Token']: + if AuthType == 'Token': + cred_type = 'token' + else: + cred_type = 'username and password' + raise AuthenticationError('Error accessing URI {}. Status code "{} {}". Check {} supplied for "{}" authentication.' + .format(URILink, statusCode, responses[statusCode], cred_type, AuthType)) except requests.exceptions.SSLError as e: traverseLogger.error("SSLError on {}".format(URILink)) @@ -271,347 +387,204 @@ def callResourceURI(URILink): traverseLogger.debug("output: ", exc_info=True) except requests.exceptions.RequestException as e: traverseLogger.error("Request has encounted a problem when getting resource {}".format(URILink)) - traverseLogger.warn("output: ", exc_info=True) - except Exception as ex: + traverseLogger.warning("output: ", exc_info=True) + except AuthenticationError as e: + raise e # re-raise exception + except Exception: traverseLogger.error("A problem when getting resource has occurred {}".format(URILink)) - traverseLogger.warn("output: ", exc_info=True) + traverseLogger.warning("output: ", exc_info=True) if payload is not None and CacheMode == 'Fallback': return True, payload, -1, 0 return False, None, statusCode, elapsed -# note: Use some sort of re expression to parse SchemaType -# ex: #Power.1.1.1.Power , #Power.v1_0_0.Power -def getNamespace(string): - return string.replace('#', '').rsplit('.', 1)[0] - - -def getType(string): - return string.replace('#', '').rsplit('.', 1)[-1] - - -@lru_cache(maxsize=64) -def getSchemaDetails(SchemaType, SchemaURI): +def createResourceObject(name, uri, jsondata=None, typename=None, context=None, parent=None, isComplex=False): """ - Find Schema file for given Namespace. - - param arg1: Schema Namespace, such as ServiceRoot - param SchemaURI: uri to grab schema, given LocalOnly is False - return: (success boolean, a Soup object) + Factory for resource object, move certain work here """ - traverseLogger.debug('getting Schema of {} {}'.format(SchemaType, SchemaURI)) + traverseLogger.debug( + 'Creating ResourceObject {} {} {}'.format(name, uri, typename)) - if SchemaType is None: - return False, None, None - - LocalOnly, SchemaLocation, ServiceOnly = config['localonlymode'], config['metadatafilepath'], config['servicemode'] - - if SchemaURI is not None and not LocalOnly: - # Get our expected Schema file here - # if success, generate Soup, then check for frags to parse - # start by parsing references, then check for the refLink - if '#' in SchemaURI: - SchemaURI, frag = tuple(SchemaURI.rsplit('#', 1)) - else: - frag = None - success, data, status, elapsed = callResourceURI(SchemaURI) - if success: - soup = BeautifulSoup(data, "xml") - # if frag, look inside xml for real target as a reference - if frag is not None: - # prefer type over frag, truncated down - # using frag, check references - frag = getNamespace(SchemaType) - frag = frag.split('.', 1)[0] - refType, refLink = getReferenceDetails( - soup, name=SchemaURI).get(frag, (None, None)) - if refLink is not None: - success, linksoup, newlink = getSchemaDetails(refType, refLink) - if success: - return True, linksoup, newlink - else: - traverseLogger.error( - "SchemaURI couldn't call reference link {} inside {}".format(frag, SchemaURI)) - else: - traverseLogger.error( - "SchemaURI missing reference link {} inside {}".format(frag, SchemaURI)) - else: - return True, soup, SchemaURI - if isNonService(SchemaURI) and ServiceOnly: - traverseLogger.info("Nonservice URI skipped: {}".format(SchemaURI)) - else: - traverseLogger.debug("SchemaURI called unsuccessfully: {}".format(SchemaURI)) - if LocalOnly: - traverseLogger.debug("This program is currently LOCAL ONLY") - if ServiceOnly: - traverseLogger.debug("This program is currently SERVICE ONLY") - if not LocalOnly and not ServiceOnly and isNonService(SchemaURI): - traverseLogger.warn("SchemaURI {} was unable to be called, defaulting to local storage in {}".format(SchemaURI, SchemaLocation)) - return getSchemaDetailsLocal(SchemaType, SchemaURI) - - -def getSchemaDetailsLocal(SchemaType, SchemaURI): - # Use local if no URI or LocalOnly - # What are we looking for? Parse from URI - # if we're not able to use URI to get suffix, work with option fallback - Alias = getNamespace(SchemaType).split('.')[0] - SchemaLocation, SchemaSuffix = config['metadatafilepath'], config['schemasuffix'] - if SchemaURI is not None: - uriparse = SchemaURI.split('/')[-1].split('#') - xml = uriparse[0] - else: - traverseLogger.warn("SchemaURI was empty, must generate xml name from type {}".format(SchemaType)), - return getSchemaDetailsLocal(SchemaType, SchemaType + SchemaSuffix) - traverseLogger.debug((SchemaType, SchemaURI, SchemaLocation + '/' + xml)) - pout = Alias + SchemaSuffix if xml is None else xml - try: - # get file - filehandle = open(SchemaLocation + '/' + xml, "r") - data = filehandle.read() - filehandle.close() - # get tags - soup = BeautifulSoup(data, "xml") - edmxTag = soup.find('edmx:Edmx', recursive=False) - parentTag = edmxTag.find('edmx:DataServices', recursive=False) - child = parentTag.find('Schema', recursive=False) - SchemaNamespace = child['Namespace'] - FoundAlias = SchemaNamespace.split(".")[0] - traverseLogger.debug(FoundAlias) - if '/redfish/v1/$metadata' in SchemaURI: - if len(uriparse) > 1: - frag = getNamespace(SchemaType) - frag = frag.split('.', 1)[0] - refType, refLink = getReferenceDetails( - soup, name=SchemaLocation+'/'+pout).get(frag, (None, None)) - if refLink is not None: - traverseLogger.debug('Entering {} inside {}, pulled from $metadata'.format(refType, refLink)) - return getSchemaDetails(refType, refLink) - else: - traverseLogger.error('Could not find item in $metadata {}'.format(frag)) - return False, None, None - else: - return True, soup, "local" + SchemaLocation + '/' + pout - if FoundAlias in Alias: - return True, soup, "local" + SchemaLocation + '/' + pout - except FileNotFoundError as ex: - # if we're looking for $metadata locally... ditch looking for it, go straight to file - if '/redfish/v1/$metadata' in SchemaURI and Alias != '$metadata': - traverseLogger.error("Unable to find a harddrive stored $metadata at {}, defaulting to {}".format(SchemaLocation, Alias + SchemaSuffix)) - return getSchemaDetailsLocal(SchemaType, Alias + SchemaSuffix) - else: + # Create json from service or from given + if jsondata is None and not isComplex: + success, jsondata, status, rtime = callResourceURI(uri) + traverseLogger.debug('{}, {}, {}'.format(success, jsondata, status)) + if not success: traverseLogger.error( - "File not found in {} for {}: ".format(SchemaLocation, pout)) - if Alias == '$metadata': - traverseLogger.error( - "If $metadata cannot be found, Annotations may be unverifiable") - except Exception as ex: - traverseLogger.error("A problem when getting a local schema has occurred {}".format(SchemaURI)) - traverseLogger.warn("output: ", exc_info=True) - return False, None, None - - -def getReferenceDetails(soup, metadata_dict=None, name='xml'): - """ - Create a reference dictionary from a soup file - - param arg1: soup - param metadata_dict: dictionary of service metadata, compare with - return: dictionary - """ - refDict = {} - ServiceOnly = config['servicemode'] - - maintag = soup.find("edmx:Edmx", recursive=False) - refs = maintag.find_all('edmx:Reference', recursive=False) - for ref in refs: - includes = ref.find_all('edmx:Include', recursive=False) - for item in includes: - if item.get('Namespace') is None or ref.get('Uri') is None: - traverseLogger.error("Reference incorrect for: {}".format(item)) - continue - if item.get('Alias') is not None: - refDict[item['Alias']] = (item['Namespace'], ref['Uri']) - else: - refDict[item['Namespace']] = (item['Namespace'], ref['Uri']) - - cntref = len(refDict) - if metadata_dict is not None: - refDict.update(metadata_dict) - if len(refDict.keys()) > len(metadata_dict.keys()): - diff_keys = [key for key in refDict if key not in metadata_dict] - traverseLogger.log( - logging.ERROR if ServiceOnly else logging.DEBUG, - "Reference in a Schema {} not in metadata, this may not be compatible with ServiceMode".format(name)) - traverseLogger.log( - logging.ERROR if ServiceOnly else logging.DEBUG, - "References missing in metadata: {}".format(str(diff_keys))) - traverseLogger.debug("References generated from {}: {} out of {}".format(name, cntref, len(refDict))) - return refDict - - -def getParentType(soup, refs, currentType, tagType='EntityType'): - # overhauling needed: deprecated function that should be realigned with the current type function - # debug: what are we working towards? did we get it? it's fine if we didn't - # error: none, should lend that to whatever calls it - """ - Get parent type of given type. - - param arg1: soup - param arg2: refs - param arg3: current type - param tagType: the type of tag for inheritance, default 'EntityType' - return: success, associated soup, associated ref, new type - """ - pnamespace, ptype = getNamespace(currentType), getType(currentType) - - propSchema = soup.find( # BS4 line - 'Schema', attrs={'Namespace': pnamespace}) - - if propSchema is None: - return False, None, None, None - - propEntity = propSchema.find(tagType, attrs={'Name': ptype}, recursive=False) # BS4 line - - if propEntity is None: - return False, None, None, None - - currentType = propEntity.get('BaseType') - - if currentType is None: - return False, None, None, None + '{}: URI could not be acquired: {}'.format(uri, status)) + return None + else: + jsondata, rtime = jsondata, 0 - currentType = currentType.replace('#', '') - SchemaNamespace, SchemaType = getNamespace( - currentType), getType(currentType) - propSchema = soup.find('Schema', attrs={'Namespace': SchemaNamespace}) # BS4 line + if not isinstance(jsondata, dict): + if not isComplex: + traverseLogger.error("Resource no longer a dictionary...") + else: + traverseLogger.debug("ComplexType does not have val") + return None - if propSchema is None: - success, innerSoup, uri = getSchemaDetails( - *refs.get(SchemaNamespace, (None, None))) - if not success: - return False, None, None, None - innerRefs = getReferenceDetails(innerSoup, refs, uri) - propSchema = innerSoup.find( - 'Schema', attrs={'Namespace': SchemaNamespace}) - if propSchema is None: - return False, None, None, None - else: - innerSoup = soup - innerRefs = refs + newResource = ResourceObj(name, uri, jsondata, typename, context, parent, isComplex) + newResource.rtime = rtime - return True, innerSoup, innerRefs, currentType + return newResource class ResourceObj: robjcache = {} - - def __init__(self, name, uri, expectedType=None, expectedSchema=None, expectedJson=None, parent=None): + + def __init__(self, name: str, uri: str, jsondata: dict, typename: str, context: str, parent=None, isComplex=False): self.initiated = False self.parent = parent self.uri, self.name = uri, name self.rtime = 0 + self.isRegistry = False + self.errorindex = { + "badtype": 0 + + } + + # Check if this is a Registry resource + parent_type = parent.typeobj.stype if parent is not None and parent.typeobj is not None else None + if parent_type == 'MessageRegistryFile': + traverseLogger.debug('{} is a Registry resource'.format(self.uri)) + self.isRegistry = True + + # Check if we provide a valid json + self.jsondata = jsondata - # Check if we provide a json - if expectedJson is None: - success, self.jsondata, status, self.rtime = callResourceURI(self.uri) - traverseLogger.debug('{}, {}, {}'.format(success, self.jsondata, status)) - if not success: - traverseLogger.error( - '{}: URI could not be acquired: {}'.format(self.uri, status)) - return - else: - self.jsondata = expectedJson - traverseLogger.debug("payload: {}".format(json.dumps(self.jsondata, indent=4, sort_keys=True))) + if not isinstance(self.jsondata, dict): traverseLogger.error("Resource no longer a dictionary...") - return + raise ValueError('This Resource is no longer a Dictionary') - # Check if we provide a type besides json's - if expectedType is None: - fullType = self.jsondata.get('@odata.type') - if fullType is None: - traverseLogger.error( - '{}: Json does not contain @odata.type'.format(self.uri)) - return - else: - fullType = self.jsondata.get('@odata.type', expectedType) + # Check if this is a Registry resource + parent_type = parent.typeobj.stype if parent is not None and parent.typeobj is not None else None - # Provide a context for this - if expectedSchema is None: - self.context = self.jsondata.get('@odata.context') - expectedSchema = self.context - if expectedSchema is None: - traverseLogger.error( - '{}: Json does not contain @odata.context'.format(self.uri)) - else: - self.context = expectedSchema + # Check for @odata.id (todo: regex) + odata_id = self.jsondata.get('@odata.id') + if odata_id is None and not isComplex: + if self.isRegistry: + traverseLogger.debug('{}: @odata.id missing, but not required for Registry resource' + .format(self.uri)) + else: + traverseLogger.error('{}: Json does not contain @odata.id'.format(self.uri)) - success, typesoup, self.context = getSchemaDetails( - fullType, SchemaURI=self.context) + # Get our real type (check for version) + acquiredtype = jsondata.get('@odata.type', typename) + if acquiredtype is None: + traverseLogger.error( + '{}: Json does not contain @odata.type or NavType'.format(uri)) + raise ValueError + if acquiredtype is not typename and isComplex: + context = None + + if currentService: + if jsondata.get('@odata.type') is not None: + currentService.metadata.add_service_namespace(getNamespace(jsondata.get('@odata.type'))) + if jsondata.get('@odata.context') is not None: + # add the namespace to the set of namespaces referenced by this service + ns = getNamespace(jsondata.get('@odata.context').split('#')[-1]) + if '/' not in ns and not ns.endswith('$entity'): + currentService.metadata.add_service_namespace(ns) + + # Provide a context for this (todo: regex) + if context is None: + context = self.jsondata.get('@odata.context') + if context is None and not isComplex: + context = createContext(acquiredtype) + if self.isRegistry: + # If this is a Registry resource, @odata.context is not required; do our best to construct one + traverseLogger.debug('{}: @odata.context missing from Registry resource; constructed context {}' + .format(acquiredtype, context)) + else: + traverseLogger.error('{}: Json does not contain @odata.context'.format(uri)) + if isComplex: + context = createContext(acquiredtype) - if not success: - traverseLogger.error("validateURI: No schema XML for {}".format(fullType)) - return + self.context = context + + # Get Schema object + self.schemaObj = rfSchema.getSchemaObject(acquiredtype, self.context) + + if self.schemaObj is None: + traverseLogger.error("ResourceObject creation: No schema XML for {} {} {}".format(typename, acquiredtype, self.context)) + raise ValueError # Use string comprehension to get highest type - if fullType is expectedType: - typelist = list() - schlist = list() - for schema in typesoup.find_all('Schema'): - newNamespace = schema.get('Namespace') - typelist.append(newNamespace) - schlist.append(schema) - for item, schema in reversed(sorted(zip(typelist, schlist))): - traverseLogger.debug( - "{} {}".format(item, getType(fullType))) - if schema.find('EntityType', attrs={'Name': getType(fullType)}, recursive=False): - fullType = item + '.' + getType(fullType) - break - traverseLogger.warn( - 'No @odata.type present, assuming highest type {}'.format(fullType)) + if acquiredtype is typename: + acquiredtype = self.schemaObj.getHighestType(typename) + if not isComplex: + traverseLogger.warning( + 'No @odata.type present, assuming highest type {}'.format(typename)) + + # Check if we provide a valid type (todo: regex) + self.typename = acquiredtype + typename = self.typename - self.additionalList = [] self.initiated = True - idtag = (fullType, self.context) # 🔫 - serviceRefs = None - successService, serviceSchemaSoup, SchemaServiceURI = getSchemaDetails( - '$metadata', '/redfish/v1/$metadata') - if successService: - serviceRefs = getReferenceDetails(serviceSchemaSoup, name=SchemaServiceURI) - successService, additionalProps = getAnnotations( - serviceSchemaSoup, serviceRefs, self.jsondata) - for prop in additionalProps: - self.additionalList.append(prop) + # get our metadata + metadata = currentService.metadata if currentService else None - # if we've generated this type, use it, else generate type + idtag = (typename, context) if idtag in ResourceObj.robjcache: self.typeobj = ResourceObj.robjcache[idtag] else: - typerefs = getReferenceDetails(typesoup, serviceRefs, self.context) self.typeobj = PropType( - fullType, typesoup, typerefs, 'EntityType', topVersion=getNamespace(fullType)) - ResourceObj.robjcache[idtag] = self.typeobj + typename, self.schemaObj, topVersion=getNamespace(typename)) + + self.propertyList = self.typeobj.getProperties(self.jsondata) + propertyList = [prop.propChild for prop in self.propertyList] + + + # get additional + self.additionalList = [] + propTypeObj = self.typeobj + if propTypeObj.propPattern is not None and len(propTypeObj.propPattern) > 0: + prop_pattern = propTypeObj.propPattern.get('Pattern', '.*') + prop_type = propTypeObj.propPattern.get('Type','Resource.OemObject') + + regex = re.compile(prop_pattern) + for key in [k for k in self.jsondata if k not in propertyList and regex.match(k)]: + val = self.jsondata.get(key) + value_obj = PropItem(propTypeObj.schemaObj, propTypeObj.fulltype, key, val, customType=prop_type) + self.additionalList.append(value_obj) + + # get annotation + successService, annotationProps = getAnnotations(metadata, self.jsondata) + if successService: + self.additionalList.extend(annotationProps) + + # list illegitimate properties together + self.unknownProperties = [k for k in self.jsondata if k not in propertyList + + [prop.propChild for prop in self.additionalList] and '@odata' not in k] self.links = OrderedDict() - node = self.typeobj - while node is not None: - self.links.update(getAllLinks( - self.jsondata, node.propList, node.refs, context=expectedSchema)) - node = node.parent + oem = config.get('oemcheck', True) + sample = config.get('sample') + linklimits = config.get('linklimits', {}) + self.links.update(self.typeobj.getLinksFromType(self.jsondata, self.context, self.propertyList, oem, linklimits, sample)) + + self.links.update(getAllLinks( + self.jsondata, self.additionalList, self.schemaObj, context=context, linklimits=linklimits, + sample_size=sample, oemCheck=oem)) + + def getResourceProperties(self): + allprops = self.propertyList + self.additionalList[:min(len(self.additionalList), 100)] + return allprops class PropItem: - def __init__(self, soup, refs, propOwner, propChild, tagType, topVersion): + def __init__(self, schemaObj, propOwner, propChild, val, topVersion=None, customType=None): try: self.name = propOwner + ':' + propChild self.propOwner, self.propChild = propOwner, propChild self.propDict = getPropertyDetails( - soup, refs, propOwner, propChild, tagType, topVersion) + schemaObj, propOwner, propChild, val, topVersion, customType) self.attr = self.propDict['attrs'] - except Exception as ex: + except Exception: traverseLogger.exception("Something went wrong") traverseLogger.error( '{}:{} : Could not get details on this property'.format(str(propOwner),str(propChild))) @@ -619,44 +592,100 @@ def __init__(self, soup, refs, propOwner, propChild, tagType, topVersion): return pass +class PropAction: + def __init__(self, propOwner, propChild, act): + try: + self.name = '#{}.{}'.format(propOwner, propChild) + self.propOwner, self.propChild = propOwner, propChild + self.actTag = act + except Exception: + traverseLogger.exception("Something went wrong") + traverseLogger.error( + '{}:{} : Could not get details on this action'.format(str(propOwner),str(propChild))) + self.actTag = None + class PropType: - def __init__(self, fulltype, soup, refs, tagType, topVersion=None): + def __init__(self, typename, schemaObj, topVersion=None): + # if we've generated this type, use it, else generate type self.initiated = False - self.fulltype = fulltype - self.soup, self.refs = soup, refs + self.fulltype = typename + self.schemaObj = schemaObj self.snamespace, self.stype = getNamespace( self.fulltype), getType(self.fulltype) self.additional = False - self.tagType = tagType self.isNav = False self.propList = [] + self.actionList = [] self.parent = None + self.propPattern = None - propertyList = self.propList - success, baseSoup, baseRefs, baseType = True, self.soup, self.refs, self.fulltype + # get all properties and actions in Type chain + success, currentSchemaObj, baseType = True, self.schemaObj, self.fulltype try: - self.additional, newList = getTypeDetails( - baseSoup, baseRefs, baseType, self.tagType, topVersion) - propertyList.extend(newList) - success, baseSoup, baseRefs, baseType = getParentType( - baseSoup, baseRefs, baseType, self.tagType) + newPropList, newActionList, self.additional, self.propPattern = getTypeDetails( + currentSchemaObj, baseType, topVersion) + + self.propList.extend(newPropList) + self.actionList.extend(newActionList) + + success, currentSchemaObj, baseType = currentSchemaObj.getParentType(baseType) if success: self.parent = PropType( - baseType, baseSoup, baseRefs, self.tagType, topVersion=topVersion) + baseType, currentSchemaObj, topVersion=topVersion) if not self.additional: self.additional = self.parent.additional - self.initiated = True except Exception as ex: traverseLogger.exception("Something went wrong") traverseLogger.error( '{}: Getting type failed for {}'.format(str(self.fulltype), str(baseType))) - return + raise ex + + self.initiated = True + + def getTypeChain(self): + if self.fulltype is None: + raise StopIteration + else: + node = self + tlist = [] + while node is not None: + tlist.append(node.fulltype) + yield node.fulltype + node = node.parent + raise StopIteration + + def getLinksFromType(self, jsondata, context, propList=None, oemCheck=True, linklimits={}, sample=None): + node = self + links = OrderedDict() + while node is not None: + links.update(getAllLinks(jsondata, node.getProperties(jsondata) if propList is None else propList, node.schemaObj, context=context, linklimits=linklimits, sample_size=sample, oemCheck=oemCheck)) + node = node.parent + return links + + def getProperties(self, jsondata): + node = self + props = [] + while node is not None: + for prop in node.propList: + schemaObj, newPropOwner, newProp, topVersion = prop + val = jsondata.get(newProp) + props.append(PropItem(schemaObj, newPropOwner, newProp, val, topVersion=topVersion)) + node = node.parent + return props + def getActions(self): + node = self + while node is not None: + for prop in node.actionList: + yield prop + node = node.parent + raise StopIteration -def getTypeDetails(soup, refs, SchemaAlias, tagType, topVersion=None): - # spits out information on the type we have, prone to issues if references/soup is ungettable, this shouldn't be ran without it + +def getTypeDetails(schemaObj, SchemaAlias, topVersion=None): + # spits out information on the type we have, prone to issues if references/soup is ungettable, this shouldn't be ran without it # has been prone to a lot of confusing errors: rehaul information that user expects to know before this point is reached # info: works undercover, but maybe can point out what type was generated and how many properties were found, if additional props allowed... # debug: all typegen info @@ -666,29 +695,40 @@ def getTypeDetails(soup, refs, SchemaAlias, tagType, topVersion=None): Gets list of surface level properties for a given SchemaType, """ PropertyList = list() + ActionList = list() + PropertyPattern = None additional = False + soup, refs = schemaObj.soup, schemaObj.refs + SchemaNamespace, SchemaType = getNamespace( SchemaAlias), getType(SchemaAlias) - traverseLogger.debug("Generating type: {} of tagType {}".format(SchemaAlias, tagType)) + traverseLogger.debug("Generating type: {}".format(SchemaAlias)) traverseLogger.debug("Schema is {}, {}".format( SchemaType, SchemaNamespace)) innerschema = soup.find('Schema', attrs={'Namespace': SchemaNamespace}) if innerschema is None: - traverseLogger.error("Got XML, but expected schema doesn't exist...? {}, {}\n... we will be unable to generate properties".format( - SchemaNamespace, SchemaType)) - return False, PropertyList + uri = schemaObj.origin + traverseLogger.error('Schema namespace {} not found in schema file {}. Will not be able to gather type details.' + .format(SchemaNamespace, uri)) + return PropertyList, ActionList, False, PropertyPattern + + element = innerschema.find(['EntityType', 'ComplexType'], attrs={'Name': SchemaType}, recursive=False) + + if element is None: + uri = schemaObj.origin + traverseLogger.error('Element {} not found in schema namespace {}. Will not be able to gather type details.' + .format(SchemaType, SchemaNamespace)) + return PropertyList, ActionList, False, PropertyPattern - element = innerschema.find(tagType, attrs={'Name': SchemaType}, recursive=False) traverseLogger.debug("___") - traverseLogger.debug(element['Name']) + traverseLogger.debug(element.get('Name')) traverseLogger.debug(element.attrs) traverseLogger.debug(element.get('BaseType')) - usableProperties = element.find_all(['NavigationProperty', 'Property'], recursive=False) additionalElement = element.find( 'Annotation', attrs={'Term': 'OData.AdditionalProperties'}) additionalElementOther = element.find( @@ -702,8 +742,26 @@ def getTypeDetails(soup, refs, SchemaAlias, tagType, topVersion=None): else: additional = False if additionalElementOther is not None: + # create PropertyPattern dict containing pattern and type for DynamicPropertyPatterns validation + traverseLogger.debug('getTypeDetails: Redfish.DynamicPropertyPatterns found, element = {}, SchemaAlias = {}' + .format(element, SchemaAlias)) + pattern_elem = additionalElementOther.find("PropertyValue", Property="Pattern") + pattern = prop_type = None + if pattern_elem is not None: + pattern = pattern_elem.get("String") + type_elem = additionalElementOther.find("PropertyValue", Property="Type") + if type_elem is not None: + prop_type = type_elem.get("String") + traverseLogger.debug('getTypeDetails: pattern = {}, type = {}'.format(pattern, prop_type)) + if pattern is not None and prop_type is not None: + PropertyPattern = dict() + PropertyPattern['Pattern'] = pattern + PropertyPattern['Type'] = prop_type additional = True + # get properties + usableProperties = element.find_all(['NavigationProperty', 'Property'], recursive=False) + for innerelement in usableProperties: traverseLogger.debug(innerelement['Name']) traverseLogger.debug(innerelement.get('Type')) @@ -711,111 +769,152 @@ def getTypeDetails(soup, refs, SchemaAlias, tagType, topVersion=None): newPropOwner = SchemaAlias if SchemaAlias is not None else 'SomeSchema' newProp = innerelement['Name'] traverseLogger.debug("ADDING :::: {}:{}".format(newPropOwner, newProp)) - if newProp not in PropertyList: - PropertyList.append( - PropItem(soup, refs, newPropOwner, newProp, tagType=tagType, topVersion=topVersion)) - - return additional, PropertyList - - -def getPropertyDetails(soup, refs, propOwner, propChild, tagType='EntityType', topVersion=None): - # gets an individual property's details, can be prone to problems if info does not exist in soup or is bad - # HOWEVER, this will rarely be the case: a property that does not exist in soup would never be expected to generate - # info: under the hood, too much info to be worth showing - # debug: however, individual property concerns can go here - # error: much like above function, what if we can't find the type we need? should not happen... - # if this happens, is it necessarily an error? could be an outbound referenced type that isn't needed or stored - # example-- if we have a type for StorageXxx but don't have it stored on our system, why bother? we don't use it - # the above is not technically error, pass it on? + PropertyList.append( + (schemaObj, newPropOwner, newProp, topVersion)) + + # get actions + usableActions = innerschema.find_all(['Action'], recursive=False) + + for act in usableActions: + newPropOwner = getNamespace(SchemaAlias) if SchemaAlias is not None else 'SomeSchema' + newProp = act['Name'] + traverseLogger.debug("ADDING ACTION :::: {}:{}".format(newPropOwner, newProp)) + ActionList.append( + PropAction(newPropOwner, newProp, act)) + + return PropertyList, ActionList, additional, PropertyPattern + + +def getPropertyDetails(schemaObj, propertyOwner, propertyName, val, topVersion=None, customType=None): """ Get dictionary of tag attributes for properties given, including basetypes. param arg1: soup data param arg2: references - param arg3: a property string + ... """ propEntry = dict() + propEntry['val'] = val + OwnerNamespace, OwnerType = getNamespace(propertyOwner), getType(propertyOwner) + traverseLogger.debug('___') + traverseLogger.debug('{}, {}:{}'.format(OwnerNamespace, propertyOwner, propertyName)) + + soup, refs = schemaObj.soup, schemaObj.refs + + if customType is None: + # Get Schema of the Owner that owns this prop + ownerSchema = soup.find('Schema', attrs={'Namespace': OwnerNamespace}) + + if ownerSchema is None: + traverseLogger.warning( + "getPropertyDetails: Schema could not be acquired, {}".format(OwnerNamespace)) + return None + + # Get Entity of Owner, then the property of the Property we're targeting + ownerEntity = ownerSchema.find( + ['EntityType', 'ComplexType'], attrs={'Name': OwnerType}, recursive=False) # BS4 line + + # check if this property is a nav property + # Checks if this prop is an annotation + success, propertySoup, propertyRefs, propertyFullType = True, soup, refs, OwnerType + + if '@' not in propertyName: + propEntry['isTerm'] = False # not an @ annotation + propertyTag = ownerEntity.find( + ['NavigationProperty', 'Property'], attrs={'Name': propertyName}, recursive=False) # BS4 line + + # start adding attrs and props together + propertyInnerTags = propertyTag.find_all() # BS4 line + for tag in propertyInnerTags: + propEntry[tag['Term']] = tag.attrs + propertyFullType = propertyTag.get('Type') + else: + propEntry['isTerm'] = True + ownerEntity = ownerSchema.find( + ['Term'], attrs={'Name': OwnerType}, recursive=False) # BS4 line + if ownerEntity is None: + ownerEntity = ownerSchema.find( + ['EntityType', 'ComplexType'], attrs={'Name': OwnerType}, recursive=False) # BS4 line + propertyTag = ownerEntity + propertyFullType = propertyTag.get('Type', propertyOwner) - SchemaNamespace, SchemaType = getNamespace(propOwner), getType(propOwner) - traverseLogger.debug('___') - traverseLogger.debug('{}, {}:{}, {}'.format(SchemaNamespace, propOwner, propChild, tagType)) + propEntry['isNav'] = propertyTag.name == 'NavigationProperty' + propEntry['attrs'] = propertyTag.attrs + traverseLogger.debug(propEntry) - propSchema = soup.find('Schema', attrs={'Namespace': SchemaNamespace}) - if propSchema is None: - traverseLogger.warn( - "getPropertyDetails: Schema could not be acquired, {}".format(SchemaNamespace)) - return None + propEntry['realtype'] = 'none' - # get type tag and tag of property in type - propEntity = propSchema.find(tagType, attrs={'Name': SchemaType}, recursive=False) # BS4 line - propTag = propEntity.find(['NavigationProperty', 'Property'], attrs={'Name': propChild}, recursive=False) # BS4 line - - # check if this property is a nav property - # Checks if this prop is an annotation - success, typeSoup, typeRefs, propType = getParentType( - soup, refs, SchemaType, tagType) - if '@' not in propChild: - propEntry['isTerm'] = False - # start adding attrs and props together - propAll = propTag.find_all() # BS4 line - for tag in propAll: - propEntry[tag['Term']] = tag.attrs - propType = propTag.get('Type') else: - propEntry['isTerm'] = True - propTag = propEntity - propType = propTag.get('Type', propOwner) - - propEntry['isNav'] = propTag.name == 'NavigationProperty' - propEntry['attrs'] = propTag.attrs - traverseLogger.debug(propEntry) - - propEntry['realtype'] = 'none' + propertyFullType = customType + propEntry['realtype'] = 'none' + propEntry['attrs'] = dict() + propEntry['attrs']['Type'] = customType + metadata = currentService.metadata + serviceRefs = currentService.metadata.get_service_refs() + serviceSchemaSoup = currentService.metadata.get_soup() + success, propertySoup, propertyRefs, propertyFullType = True, serviceSchemaSoup, serviceRefs, customType # find the real type of this, by inheritance - while propType is not None: + while propertyFullType is not None: traverseLogger.debug("HASTYPE") - TypeNamespace, TypeSpec = getNamespace(propType), getType(propType) + PropertyNamespace, PropertyType = getNamespace(propertyFullType), getType(propertyFullType) + + traverseLogger.debug('{}, {}'.format(PropertyNamespace, propertyFullType)) - traverseLogger.debug('{}, {}'.format(TypeNamespace, propType)) # Type='Collection(Edm.String)' # If collection, check its inside type - if re.match('Collection\(.*\)', propType) is not None: - propType = propType.replace('Collection(', "").replace(')', "") - propEntry['isCollection'] = propType + if re.match('Collection\(.*\)', propertyFullType) is not None: + if val is not None and not isinstance(val, list): + raise TypeError('This collection is not a List') + propertyFullType = propertyFullType.replace('Collection(', "").replace(')', "") + propEntry['isCollection'] = propertyFullType continue - if 'Edm' in propType: - propEntry['realtype'] = propType + else: + if val is not None and isinstance(val, list) and propEntry.get('isCollection') is None : + raise TypeError('This item should not be a List') + + # If basic, just pass itself + if 'Edm' in propertyFullType: + propEntry['realtype'] = propertyFullType break - # get proper soup - if TypeNamespace.split('.')[0] != SchemaNamespace.split('.')[0]: - success, typeSoup, uri = getSchemaDetails( - *refs.get(TypeNamespace, (None, None))) + # get proper soup, check if this Namespace is the same as its Owner, otherwise find its SchemaXml + if PropertyNamespace.split('.')[0] != OwnerNamespace.split('.')[0]: + schemaObj = schemaObj.getSchemaFromReference(PropertyNamespace) + success = schemaObj is not None + if success: + propertySoup = schemaObj.soup + propertyRefs = schemaObj.refs else: - success, typeSoup, uri = True, soup, 'of parent' + success, propertySoup, uri = True, soup, 'of parent' if not success: - traverseLogger.error( - "getPropertyDetails: InnerType could not be acquired, {}".format(uri)) + traverseLogger.warning( + "getPropertyDetails: Could not acquire appropriate Schema for this item, {} {} {}".format(propertyOwner, PropertyNamespace, propertyName)) return propEntry # traverse tags to find the type - typeRefs = getReferenceDetails(typeSoup, refs, name=uri) - typeSchema = typeSoup.find( # BS4 line - 'Schema', attrs={'Namespace': TypeNamespace}) - typeTag = typeSchema.find( # BS4 line - ['EnumType', 'ComplexType', 'EntityType', 'TypeDefinition'], attrs={'Name': TypeSpec}, recursive=False) - nameOfTag = typeTag.name if typeTag is not None else 'None' + propertySchema = propertySoup.find( + 'Schema', attrs={'Namespace': PropertyNamespace}) + if propertySchema is None: + traverseLogger.warning('Schema element with Namespace attribute of {} not found in schema file {}' + .format(PropertyNamespace, uri)) + break + propertyTypeTag = propertySchema.find( + ['EnumType', 'ComplexType', 'EntityType', 'TypeDefinition'], attrs={'Name': PropertyType}, recursive=False) + nameOfTag = propertyTypeTag.name if propertyTypeTag is not None else 'None' + # perform more logic for each type - if nameOfTag == 'TypeDefinition': - propType = typeTag.get('UnderlyingType') + if nameOfTag == 'TypeDefinition': # Basic type # This piece of code is rather simple UNLESS this is an "enumeration" # this is a unique deprecated enum, labeled as Edm.String - isEnum = typeTag.find( # BS4 line + + propertyFullType = propertyTypeTag.get('UnderlyingType') + isEnum = propertyTypeTag.find( # BS4 line 'Annotation', attrs={'Term': 'Redfish.Enumeration'}, recursive=False) - if propType == 'Edm.String' and isEnum is not None: + + if propertyFullType == 'Edm.String' and isEnum is not None: propEntry['realtype'] = 'deprecatedEnum' propEntry['typeprops'] = list() memberList = isEnum.find( # BS4 line @@ -828,12 +927,29 @@ def getPropertyDetails(soup, refs, propOwner, propChild, tagType='EntityType', t else: continue - elif nameOfTag == 'ComplexType': + elif nameOfTag == 'ComplexType': # go deeper into this type traverseLogger.debug("go deeper in type") + # We need to find the highest existence of this type vs topVersion schema # not ideal, but works for this solution - success, baseSoup, baseRefs, baseType = True, typeSoup, typeRefs, propType - if topVersion is not None and topVersion != SchemaNamespace: + success, baseSoup, baseRefs, baseType = True, propertySoup, propertyRefs, propertyFullType + + # If we're outside of our normal Soup, then do something different, otherwise elif + if PropertyNamespace.split('.')[0] != OwnerNamespace.split('.')[0] and not customType: + typelist = [] + schlist = [] + for schema in baseSoup.find_all('Schema'): + if schema.find('ComplexType', attrs={'Name': PropertyType}) is None: + continue + newNamespace = schema.get('Namespace') + typelist.append(newNamespace) + schlist.append(schema) + for item, schema in reversed(sorted(zip(typelist, schlist))): + traverseLogger.debug( + "Working backwards: {} {}".format(item, getType(baseType))) + baseType = item + '.' + getType(baseType) + break + elif topVersion is not None and (topVersion != OwnerNamespace): currentVersion = topVersion currentSchema = baseSoup.find( # BS4 line 'Schema', attrs={'Namespace': currentVersion}) @@ -842,56 +958,91 @@ def getPropertyDetails(soup, refs, propOwner, propChild, tagType='EntityType', t # if it does, use our new expectedType, else continue down parent types # until we exhaust all schematags in file while currentSchema is not None: - expectedType = currentVersion + '.' + getType(propType) + expectedType = currentVersion + '.' + PropertyType currentTypeTag = currentSchema.find( # BS4 line - 'ComplexType', attrs={'Name': getType(propType)}) + 'ComplexType', attrs={'Name': PropertyType}) if currentTypeTag is not None: baseType = expectedType traverseLogger.debug('new type: ' + baseType) # Printout FORMAT break else: nextEntity = currentSchema.find( # BS4 line - 'EntityType', attrs={'Name': SchemaType}) + ['EntityType', 'ComplexType'], attrs={'Name': OwnerType}) nextType = nextEntity.get('BaseType') currentVersion = getNamespace(nextType) currentSchema = baseSoup.find( # BS4 line 'Schema', attrs={'Namespace': currentVersion}) continue propEntry['realtype'] = 'complex' - propEntry['typeprops'] = PropType( - baseType, baseSoup, baseRefs, 'ComplexType') + if propEntry.get('isCollection') is None: + propEntry['typeprops'] = createResourceObject(propertyName, 'complex', val, context=schemaObj.context, typename=baseType, isComplex=True) + else: + val = val if val is not None else {} + propEntry['typeprops'] = [createResourceObject(propertyName, 'complex', item, context=schemaObj.context, typename=baseType, isComplex=True) for item in val] break - elif nameOfTag == 'EnumType': - # If enum, get all members + elif nameOfTag == 'EnumType': # If enum, get all members propEntry['realtype'] = 'enum' propEntry['typeprops'] = list() - for MemberName in typeTag.find_all('Member'): # BS4 line + for MemberName in propertyTypeTag.find_all('Member'): # BS4 line propEntry['typeprops'].append(MemberName['Name']) break - elif nameOfTag == 'EntityType': - # If entity, do nothing special (it's a reference link) + elif nameOfTag == 'EntityType': # If entity, do nothing special (it's a reference link) propEntry['realtype'] = 'entity' propEntry['typeprops'] = dict() - traverseLogger.debug("typeEntityTag found {}".format(propTag['Name'])) + traverseLogger.debug("typeEntityTag found {}".format(propertyTypeTag['Name'])) break else: - traverseLogger.error("type doesn't exist? {}".format(propType)) - raise Exception( - "getPropertyDetails: problem grabbing type: " + propType) + traverseLogger.error('Type {} not found under namespace {} in schema {}' + .format(PropertyType, PropertyNamespace, uri)) break return propEntry -def getAllLinks(jsonData, propList, refDict, prefix='', context=''): - # gets all links, this can miss something if it is not designated navigatable or properly autoextended, collections, etc - # info: works underneath, can maybe report how many links it has gotten or leave that to whatever calls it? - # debug: should be reported by what calls it? not much debug is neede besides what is already generated earlier, - # error: it really depends on what type generation has done: if done correctly, this should have no problem, if propList is empty, it does nothing - # cannot think of errors that would be neccesary to know +def enumerate_collection(items, cTypeName, linklimits, sample_size): + """ + Generator function to enumerate the items in a collection, applying the link limit or sample size if applicable. + If a link limit is specified for this cTypeName, return the first N items as specified by the limit value. + If a sample size greater than zero is specified, return a random sample of items specified by the sample_size. + In both the above cases, if the limit value or sample size is greater than or equal to the number of items in the + collection, return all the items. + If a limit value for this cTypeName and a sample size are both provided, the limit value takes precedence. + :param items: the collection of items to enumerate + :param cTypeName: the type name of this collection + :param linklimits: a dictionary mapping type names to their limit values + :param sample_size: the number of items to sample from large collections + :return: enumeration of the items to be processed + """ + if cTypeName in linklimits: + # "link limit" case + limit = min(linklimits[cTypeName], len(items)) + traverseLogger.debug('Limiting "{}" to first {} links'.format(cTypeName, limit)) + for i in range(limit): + if linklimits[cTypeName] < len(items): + uri = items[i].get('@odata.id') + if uri is not None: + uri_sample_map[uri] = 'Collection limit {} of {}'.format(i + 1, limit) + yield i, items[i] + elif 0 < sample_size < len(items): + # "sample size" case + traverseLogger.debug('Limiting "{}" to sample of {} links'.format(cTypeName, sample_size)) + sample = 0 + for i in sorted(random.sample(range(len(items)), sample_size)): + sample += 1 + uri = items[i].get('@odata.id') + if uri is not None: + uri_sample_map[uri] = 'Collection sample {} of {}'.format(sample, sample_size) + yield i, items[i] + else: + # "all" case + traverseLogger.debug('Processing all links for "{}"'.format(cTypeName)) + yield from enumerate(items) + + +def getAllLinks(jsonData, propList, schemaObj, prefix='', context='', linklimits=None, sample_size=0, oemCheck=True): """ Function that returns all links provided in a given JSON response. This result will include a link to itself. @@ -904,6 +1055,8 @@ def getAllLinks(jsonData, propList, refDict, prefix='', context=''): :return: list of links """ linkList = OrderedDict() + if linklimits is None: + linklimits = {} # check keys in propertyDictionary # if it is a Nav property, check that it exists # if it is not a Nav Collection, add it to list @@ -912,23 +1065,33 @@ def getAllLinks(jsonData, propList, refDict, prefix='', context=''): # if it is, recurse on collection or individual item if not isinstance(jsonData, dict): traverseLogger.error("Generating links requires a dict") + refDict = schemaObj.refs try: for propx in propList: propDict = propx.propDict + if propDict is None: + continue + + isNav = propDict.get('isNav', False) key = propx.name item = getType(key).split(':')[-1] - if propDict['isNav']: - insideItem = jsonData.get(item) + + insideItem = jsonData.get(item) + autoExpand = propDict.get('OData.AutoExpand', None) is not None or\ + propDict.get('OData.AutoExpand'.lower(), None) is not None + cType = propDict.get('isCollection') + ownerNS = propx.propOwner.split('.')[0] + ownerType = propx.propOwner.split('.')[-1] + + if isNav: if insideItem is not None: - cType = propDict.get('isCollection') - autoExpand = propDict.get('OData.AutoExpand', None) is not None or\ - propDict.get('OData.AutoExpand'.lower(), None) is not None if cType is not None: + cTypeName = getType(cType) cSchema = refDict.get(getNamespace(cType), (None, None))[1] if cSchema is None: cSchema = context - for cnt, listItem in enumerate(insideItem): - linkList[prefix + str(item) + '.' + getType(propDict['isCollection']) + + for cnt, listItem in enumerate_collection(insideItem, cTypeName, linklimits, sample_size): + linkList[prefix + str(item) + '.' + cTypeName + '#' + str(cnt)] = (listItem.get('@odata.id'), autoExpand, cType, cSchema, listItem) else: cType = propDict['attrs'].get('Type') @@ -937,29 +1100,74 @@ def getAllLinks(jsonData, propList, refDict, prefix='', context=''): cSchema = context linkList[prefix + str(item) + '.' + getType(propDict['attrs']['Name'])] = ( insideItem.get('@odata.id'), autoExpand, cType, cSchema, insideItem) + elif item == 'Uri' and ownerNS == 'MessageRegistryFile' and ownerType == 'Location': + # special handling for MessageRegistryFile Location Uri + if insideItem is not None and isinstance(insideItem, str) and len(insideItem) > 0: + uriItem = {'@odata.id': insideItem} + cType = ownerNS + '.' + ownerNS + cSchema = refDict.get(getNamespace(cType), (None, None))[1] + if cSchema is None: + cSchema = context + traverseLogger.debug('Registry Location Uri: resource = {}, type = {}, schema = {}' + .format(insideItem, cType, cSchema)) + linkList[prefix + str(item) + '.' + getType(propDict['attrs']['Name'])] = ( + uriItem.get('@odata.id'), autoExpand, cType, cSchema, uriItem) + elif item == 'Actions': + # special handling for @Redfish.ActionInfo payload annotations + if isinstance(insideItem, dict): + cType = 'ActionInfo.ActionInfo' + cSchema = refDict.get(getNamespace(cType), (None, None))[1] + for k, v in insideItem.items(): + if not isinstance(v, dict): + continue + uri = v.get('@Redfish.ActionInfo') + if isinstance(uri, str): + uriItem = {'@odata.id': uri} + traverseLogger.debug('{}{}: @Redfish.ActionInfo annotation uri = {}'.format(item, k, uri)) + linkList[prefix + str(item) + k + '.' + cType] = ( + uriItem.get('@odata.id'), autoExpand, cType, cSchema, uriItem) + for propx in propList: propDict = propx.propDict + if propDict is None: + continue + propDict = propx.propDict key = propx.name item = getType(key).split(':')[-1] - if propDict['realtype'] == 'complex': - if jsonData.get(item) is not None: - if propDict.get('isCollection') is not None: - for listItem in jsonData[item]: - linkList.update(getAllLinks( - listItem, propDict['typeprops'].propList, refDict, prefix + item + '.', context)) + if 'Oem' in item and not oemCheck: + continue + cType = propDict.get('isCollection') + if propDict is None: + continue + elif propDict['realtype'] == 'complex': + tp = propDict['typeprops'] + if jsonData.get(item) is not None and tp is not None: + if cType is not None: + cTypeName = getType(cType) + for item in tp: + linkList.update(item.links) else: - linkList.update(getAllLinks( - jsonData[item], propDict['typeprops'].propList, refDict, prefix + item + '.', context)) + linkList.update(tp.links) traverseLogger.debug(str(linkList)) - except Exception as ex: + except Exception: traverseLogger.exception("Something went wrong") + # contents of Registries may be needed to validate other resources (like Bios), so move to front of linkList + if 'Registries.Registries' in linkList: + linkList.move_to_end('Registries.Registries', last=False) + traverseLogger.debug('getAllLinks: Moved Registries.Registries to front of list') return linkList -def getAnnotations(soup, refs, decoded, prefix=''): +def getAnnotations(metadata, decoded, prefix=''): """ Function to gather @ additional props in a payload """ + allowed_annotations = ['odata', 'Redfish', 'Privileges', 'Message'] + if metadata is not None: + schemaObj = metadata.schema_obj + else: + traverseLogger.warn("Cannot work on annotations without a service or metadata") + return False, [] additionalProps = list() # For every ...@ in decoded, check for its presence in refs # get the schema file for it @@ -969,22 +1177,18 @@ def getAnnotations(soup, refs, decoded, prefix=''): annotationsFound += 1 splitKey = key.split('@', 1) fullItem = splitKey[1] - realType, refLink = refs.get(getNamespace(fullItem), (None, None)) - success, annotationSoup, uri = getSchemaDetails(realType, refLink) - traverseLogger.debug('{}, {}, {}, {}, {}'.format( - str(success), key, splitKey, decoded[key], realType)) - if success: - annotationRefs = getReferenceDetails(annotationSoup, refs, uri) - if isinstance(decoded[key], dict) and decoded[key].get('@odata.type') is not None: - payloadType = decoded[key].get('@odata.type').replace('#', '') - realType, refLink = annotationRefs.get(getNamespace(payloadType).split('.')[0], (None, None)) - success, annotationSoup, uri = getSchemaDetails(realType, refLink) - realItem = payloadType - tagtype = 'ComplexType' - else: - realItem = realType + '.' + fullItem.split('.', 1)[1] - tagtype = 'Term' + if getNamespace(fullItem) not in allowed_annotations: + traverseLogger.error("getAnnotations: {} is not an allowed annotation namespace, please check spelling/capitalization.".format(fullItem)) + continue + elif metadata is not None: + # add the namespace to the set of namespaces referenced by this service + metadata.add_service_namespace(getNamespace(fullItem)) + annotationSchemaObj = schemaObj.getSchemaFromReference(getNamespace(fullItem)) + traverseLogger.debug('{}, {}, {}'.format(key, splitKey, decoded[key])) + if annotationSchemaObj is not None: + realType = annotationSchemaObj.name + realItem = realType + '.' + fullItem.split('.', 1)[1] additionalProps.append( - PropItem(annotationSoup, annotationRefs, realItem, key, tagtype, None)) - traverseLogger.info("Annotations generated: {} out of {}".format(len(additionalProps), annotationsFound)) + PropItem(annotationSchemaObj, realItem, key, decoded[key])) + traverseLogger.debug("Annotations generated: {} out of {}".format(len(additionalProps), annotationsFound)) return True, additionalProps
'.format(cnt) + if results[item]['messages'] is not None: + for i in results[item]['messages']: + htmlStr += '' + htmlStr += '' + htmlStr += '' + htmlStr += '' + htmlStr += '' + htmlStr += ''.format(str(i.success.value).lower(), str(i.success.value)) + htmlStr += '' + htmlStr += '
Name Entry Value must be Service Value Success
' + str(i.name) + '' + str(i.entry) + '' + str(i.expected) + '' + str(i.actual) + '{}