Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

MAPEX-68: Add Support for Groups #16

Merged
merged 26 commits into from
Nov 13, 2023
Merged
Show file tree
Hide file tree
Changes from 24 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6,856 changes: 3,436 additions & 3,420 deletions mappings/cve/cve_attack-9.0.json → mappings/cve/cve-21.10.21_attack-9.0.json

Large diffs are not rendered by default.

18,786 changes: 9,400 additions & 9,386 deletions mappings/nist/10.1/r4/nist-800-r4_attack-10.1.json

Large diffs are not rendered by default.

19,039 changes: 9,527 additions & 9,512 deletions mappings/nist/10.1/r5/nist-800-r5_attack-10.1.json

Large diffs are not rendered by default.

19,479 changes: 9,747 additions & 9,732 deletions mappings/nist/12.1/r4/nist-800-r4_attack-12.1.json

Large diffs are not rendered by default.

19,736 changes: 9,876 additions & 9,860 deletions mappings/nist/12.1/r5/nist-800-r5_attack-12.1.json

Large diffs are not rendered by default.

16,494 changes: 8,254 additions & 8,240 deletions mappings/nist/8.2/r4/nist-800-r4_attack-8.2.json

Large diffs are not rendered by default.

16,783 changes: 8,399 additions & 8,384 deletions mappings/nist/8.2/r5/nist-800-r5_attack-8.2.json

Large diffs are not rendered by default.

17,329 changes: 8,671 additions & 8,658 deletions mappings/nist/9.0/r4/nist-800-r4_attack-9.0.json

Large diffs are not rendered by default.

17,531 changes: 8,773 additions & 8,758 deletions mappings/nist/9.0/r5/nist-800-r5_attack-9.0.json

Large diffs are not rendered by default.

Large diffs are not rendered by default.

Large diffs are not rendered by default.

14,966 changes: 0 additions & 14,966 deletions mappings/security_stack/Azure/Azure_attack-8.2.json

This file was deleted.

1 change: 1 addition & 0 deletions mappings/security_stack/GCP/GCP-22.06.28_attack-10.json

Large diffs are not rendered by default.

7,021 changes: 0 additions & 7,021 deletions mappings/security_stack/GCP/GCP_attack-10.json

This file was deleted.

5,500 changes: 2,757 additions & 2,743 deletions mappings/veris/1.3.5/veris-1.3.5_attack-9.0.json

Large diffs are not rendered by default.

6,573 changes: 3,293 additions & 3,280 deletions mappings/veris/1.3.7/veris-1.3.7_attack-12.1.json

Large diffs are not rendered by default.

98 changes: 51 additions & 47 deletions schema/mapex-unified-data-schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,12 @@
"title": "ATT&CK Mappings Unified Schema",
"description": "Defines the data elements and properties of Mappings Explorer projects",
"type": "object",
"additionalProperties": false,
"properties": {
"additionalProperties": false,
"metadata": {
"description": "Properties that apply to all mappings in a project.",
"type": "object",
"additionalProperties": false,
"properties": {
"mapping_version": {
"description": "The version of the mapping project.",
Expand All @@ -20,9 +21,7 @@
"technology_domain": {
"description": "ATT&CK Technology Domain (Enterprise, Mobile, ICS).",
"type": "string",
"items": {
"enum": ["enterprise", "mobile", "ics"]
}
"enum": ["enterprise", "mobile", "ics"]
},
"creation_date": {
"description": "Creation date of this mapping file. Format: (MM/DD/YYYY) 1/21/2021",
Expand All @@ -37,20 +36,16 @@
"mapping_framework": {
"description": "The group of objects being mapped to ATT&CK.",
"type": "string",
"items": {
"enum": ["veris", "nist_800_53", "cve", "aws", "gcp", "azure"]
}
"enum": ["veris", "nist_800_53", "cve", "aws", "gcp", "azure"]
},
"mapping_framework_version": {
"description": "The Mapping Framework's version.",
"type": "string"
},
"mapping_framework_version_scheme": {
"mapping_framework_version_schema": {
"description": "How the Mapping Framework is versioned.",
"type": "array",
"items": {
"enum": ["framework", "date_accessed"]
}
"type": "string",
"enum": ["FRAMEWORK_VERSION", "ACCESS_DATE"]
},
"author": {
"description": "The author of this mapping file.",
Expand Down Expand Up @@ -86,6 +81,25 @@
}
}
}
},
"groups": {
"description": "The groups that the mappings can be organized in.",
"type": "array",
"items": {
"type": "object",
"additionalProperties": false,
"required": ["id", "name"],
"properties": {
"id": {
"description": "The group's identifier.",
"type": "string"
},
"name": {
"description": "The group's name.",
"type": "string"
}
}
}
}
},
"required": [
Expand All @@ -95,14 +109,16 @@
"creation_date",
"mapping_framework",
"mapping_framework_version",
"mapping_types"
"mapping_types",
"groups"
]
},
"mapping_object": {
"mapping_objects": {
"description": "A single mapping between framework object and ATT&CK object.",
"type": "array",
"items": {
"type": "object",
"additionalProperties": false,
"properties": {
"attack_object_id": {
"description": "The unique identifier of the ATT&CK object being mapped. (T1648)",
Expand All @@ -126,47 +142,35 @@
},
"references": {
"description": "A list of links to documentation that provides additional detail in understanding the data contained in this mapping. (A link to the documentation for a security control.)",
"type": "string"
"type": "array",
"items": {
"type": "string"
}
},
"mapping_type": {
"description": "Mapping type has framework-specific values. This property describes how the capability_id and attack_object_id are related.",
"type": "string"
},
"if": {
"properties": {
"mapping_type": {
"const": "technique_scores"
}
}
"group": {
"description": "This property describes which group this mapping falls into.",
"type": "string"
},
"then": {
"properties": {
"score_category": {
"description": "Assessment of the effectiveness of the mitigations provided. (Protect, Detect, Respond)",
"type": "array",
"items": {
"enum": ["protect", "detect", "respond"]
}
},
"score_value": {
"description": "The score. (e.g. Minimal, Partial, Significant)",
"type": "array",
"items": {
"enum": ["minimal", "partial", "significant"]
}
},
"related_score": {
"description": "When the ATT&CK object ID is a sub-technique, related score contains the parent technique ID.",
"type": "string"
}
}
"score_category": {
"description": "Assessment of the effectiveness of the mitigations provided. (Protect, Detect, Respond)",
"type": "string",
"enum": ["protect", "detect", "respond"]
},
"else": {}
"score_value": {
"description": "The score. (e.g. Minimal, Partial, Significant)",
"type": "string",
"enum": ["minimal", "partial", "significant"]
},
"related_score": {
"description": "When the ATT&CK object ID is a sub-technique, related score contains the parent technique ID.",
"type": "string"
}
},
"required": ["mapping_type", "capability_id"],
"dependentRequired": {
"mapping_type": ["valid_mapping_types"]
}
"required": ["mapping_type", "capability_id"]
}
}
}
Expand Down
50 changes: 23 additions & 27 deletions src/mapex/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import json
import os
import sys
from pathlib import Path

from jsonschema import validate
from mapex.write_parsed_mappings import (
Expand All @@ -11,53 +12,50 @@
write_parsed_mappings_yaml,
)

ROOT_DIR = os.path.abspath(os.curdir)
PARSED_MAPPINGS_DIR = f"{ROOT_DIR}/mappings"
MAPEX_DIR = f"{ROOT_DIR}/src/mapex"
ROOT_DIR = Path.cwd()
PARSED_MAPPINGS_DIR = ROOT_DIR / "mappings"
MAPEX_DIR = ROOT_DIR / "src" / "mapex"


def main():
"""Main entry point for `mapex` command line."""
args = _parse_args()
input_file = args.input_file
input_file_str = args.input_file
input_file_path = Path(args.input_file)
if args.command == "export":
output_file = args.output_file
output_file = Path(args.output_file)
file_type = args.file_type

# if input filepath is a file, export file
if os.path.isfile(input_file):
export_file(input_file, output_file, file_type)
if os.path.isfile(input_file_path):
export_file(input_file_path, output_file, file_type)

# if input filepath is a directory, walk through nested directories until file
# is found. Output files will go into the output filepath given within the
# nested directories it is in the input direcotry
elif os.path.isdir(input_file):
for dirpath, dirnames, filenames in os.walk(input_file):
for dir_name in dirnames:
nested_dirs = dirpath.replace(input_file, "")
if not os.path.exists(f"{output_file}{nested_dirs}/{dir_name}"):
os.makedirs(f"{output_file}{nested_dirs}/{dir_name}")

elif os.path.isdir(input_file_path):
for dirpath, _, filenames in os.walk(input_file_path):
if len(filenames):
for file in filenames:
input_filepath = f"{dirpath}/{file}"
nested_dirs = dirpath.replace(input_file, "")
output_filepath = f"{output_file}{nested_dirs}"
input_filepath = Path(dirpath) / file
nested_dirs = dirpath.replace(input_file_str + "/", "")
output_filepath = output_file / Path(nested_dirs)
output_filepath.mkdir(parents=True, exist_ok=True)
export_file(input_filepath, output_filepath, file_type)
else:
print("Input file must be a valid file or directory")
sys.exit(1)

elif args.command == "validate":
if os.path.isfile(input_file):
validation_errors = validate_file(input_file)
if os.path.isfile(input_file_path):
validation_errors = validate_file(input_file_path)
if validation_errors is not None:
sys.exit(1)

elif os.path.isdir(input_file):
for dirpath, _, filenames in os.walk(input_file):
elif os.path.isdir(input_file_path):
for dirpath, _, filenames in os.walk(input_file_path):
for file in filenames:
input_filepath = f"{dirpath}/{file}"
input_filepath = Path(dirpath) / file
validation_errors = validate_file(input_filepath)
if validation_errors is not None:
sys.exit(1)
Expand Down Expand Up @@ -96,10 +94,8 @@ def export_file(input_file, output_file, file_type):
parsed_mappings = read_json_file(input_file)

# assign output filename and filepath
filepath_parts = input_file.split("/")
input_filename = filepath_parts[-1]
output_filename = input_filename[0 : input_filename.rindex(".")]
output_filepath = f"{output_file}/{output_filename}"
output_filename = input_file.stem
output_filepath = output_file / output_filename

# export mappings
if file_type is None:
Expand All @@ -121,6 +117,6 @@ def export_file(input_file, output_file, file_type):

def validate_file(input_file):
parsed_mappings = read_json_file(input_file)
schema_filepath = f"{ROOT_DIR}/schema/mapex-unified-data-schema.json"
schema_filepath = ROOT_DIR / "schema" / "mapex-unified-data-schema.json"
schema = json.loads(open(schema_filepath, "r", encoding="UTF-8").read())
return validate(instance=parsed_mappings, schema=schema)
Loading