Skip to content

Commit

Permalink
fix typos
Browse files Browse the repository at this point in the history
  • Loading branch information
RainRat committed Feb 29, 2024
1 parent d63c6f1 commit 898d892
Show file tree
Hide file tree
Showing 25 changed files with 52 additions and 52 deletions.
2 changes: 1 addition & 1 deletion .github/CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ When we make a significant decision in how we maintain the project and what we c
we will document it in the [capa issues tracker](https://github.com/mandiant/capa/issues).
This is the best place review our discussions about what/how/why we do things in the project.
If you have a question, check to see if it is documented there.
If it is *not* documented there, or you can't find an answer, please open a issue.
If it is *not* documented there, or you can't find an answer, please open an issue.
We'll link to existing issues when appropriate to keep discussions in one place.

## How Can I Contribute?
Expand Down
2 changes: 1 addition & 1 deletion .github/pyinstaller/hooks/hook-vivisect.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
"pyqtwebengine",
# the above are imported by these viv modules.
# so really, we'd want to exclude these submodules of viv.
# but i dont think this works.
# but i don't think this works.
"vqt",
"vdb.qt",
"envi.qt",
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ function @ 0x4011C0
...
```

Additionally, capa also supports analyzing [CAPE](https://github.com/kevoreilly/CAPEv2) sandbox reports for dynamic capabilty extraction.
Additionally, capa also supports analyzing [CAPE](https://github.com/kevoreilly/CAPEv2) sandbox reports for dynamic capability extraction.
In order to use this, you first submit your sample to CAPE for analysis, and then run capa against the generated report (JSON).

Here's an example of running capa against a packed binary, and then running capa against the CAPE report of that binary:
Expand Down
6 changes: 3 additions & 3 deletions capa/capabilities/dynamic.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def find_thread_capabilities(
features: FeatureSet = collections.defaultdict(set)

# matches found at the call scope.
# might be found at different calls, thats ok.
# might be found at different calls, that's ok.
call_matches: MatchResults = collections.defaultdict(list)

for ch in extractor.get_calls(ph, th):
Expand Down Expand Up @@ -103,11 +103,11 @@ def find_process_capabilities(
process_features: FeatureSet = collections.defaultdict(set)

# matches found at the basic threads.
# might be found at different threads, thats ok.
# might be found at different threads, that's ok.
thread_matches: MatchResults = collections.defaultdict(list)

# matches found at the call scope.
# might be found at different calls, thats ok.
# might be found at different calls, that's ok.
call_matches: MatchResults = collections.defaultdict(list)

for th in extractor.get_threads(ph):
Expand Down
6 changes: 3 additions & 3 deletions capa/capabilities/static.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def find_basic_block_capabilities(
features: FeatureSet = collections.defaultdict(set)

# matches found at the instruction scope.
# might be found at different instructions, thats ok.
# might be found at different instructions, that's ok.
insn_matches: MatchResults = collections.defaultdict(list)

for insn in extractor.get_instructions(f, bb):
Expand Down Expand Up @@ -106,11 +106,11 @@ def find_code_capabilities(
function_features: FeatureSet = collections.defaultdict(set)

# matches found at the basic block scope.
# might be found at different basic blocks, thats ok.
# might be found at different basic blocks, that's ok.
bb_matches: MatchResults = collections.defaultdict(list)

# matches found at the instruction scope.
# might be found at different instructions, thats ok.
# might be found at different instructions, that's ok.
insn_matches: MatchResults = collections.defaultdict(list)

for bb in extractor.get_basic_blocks(fh):
Expand Down
2 changes: 1 addition & 1 deletion capa/features/address.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def __lt__(self, other):


class DynamicCallAddress(Address):
"""addesses a call in a dynamic execution trace"""
"""addresses a call in a dynamic execution trace"""

def __init__(self, thread: ThreadAddress, id: int):
assert id >= 0
Expand Down
4 changes: 2 additions & 2 deletions capa/features/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,7 +227,7 @@ def evaluate(self, ctx, short_circuit=True):
if self.value in feature.value:
matches[feature.value].update(locations)
if short_circuit:
# we found one matching string, thats sufficient to match.
# we found one matching string, that's sufficient to match.
# don't collect other matching strings in this mode.
break

Expand Down Expand Up @@ -322,7 +322,7 @@ def evaluate(self, ctx, short_circuit=True):
if self.re.search(feature.value):
matches[feature.value].update(locations)
if short_circuit:
# we found one matching string, thats sufficient to match.
# we found one matching string, that's sufficient to match.
# don't collect other matching strings in this mode.
break

Expand Down
2 changes: 1 addition & 1 deletion capa/features/extractors/base_extractor.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ class BBHandle:

@dataclass
class InsnHandle:
"""reference to a instruction recognized by a feature extractor.
"""reference to an instruction recognized by a feature extractor.
Attributes:
address: the address of the instruction address.
Expand Down
6 changes: 3 additions & 3 deletions capa/features/extractors/cape/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ class FlexibleModel(BaseModel):


# use this type to indicate that we won't model this data.
# because its not relevant to our use in capa.
# because it's not relevant to our use in capa.
#
# while its nice to have full coverage of the data shape,
# it can easily change and break our parsing.
Expand Down Expand Up @@ -356,8 +356,8 @@ class Behavior(ExactModel):
anomaly: List[str]
encryptedbuffers: List[EncryptedBuffer]
# these are small objects that describe atomic events,
# like file move, registery access.
# we'll detect the same with our API call analyis.
# like file move, registry access.
# we'll detect the same with our API call analysis.
enhanced: Skip = None


Expand Down
2 changes: 1 addition & 1 deletion capa/features/extractors/elf.py
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@ def _parse(self):
15: OS.AROS,
16: OS.FENIXOS,
17: OS.CLOUD,
# 53: "SORTFIX", # i can't find any reference to this OS, i dont think it exists
# 53: "SORTFIX", # i can't find any reference to this OS, i don't think it exists
# 64: "ARM_AEABI", # not an OS
# 97: "ARM", # not an OS
# 255: "STANDALONE", # not an OS
Expand Down
2 changes: 1 addition & 1 deletion capa/features/extractors/ghidra/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ def dereference_ptr(insn: ghidra.program.database.code.InstructionDB):
if thfunc and thfunc.isThunk():
return handle_thunk(to_deref)
else:
# if it doesn't poin to a thunk, it's usually a jmp to a label
# if it doesn't point to a thunk, it's usually a jmp to a label
return to_deref
if not dat:
return to_deref
Expand Down
4 changes: 2 additions & 2 deletions capa/features/extractors/viv/insn.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def extract_insn_api_features(fh: FunctionHandle, bb, ih: InsnHandle) -> Iterato
if f.vw.metadata["Format"] == "elf":
if "symtab" not in fh.ctx["cache"]:
# the symbol table gets stored as a function's attribute in order to avoid running
# this code everytime the call is made, thus preventing the computational overhead.
# this code every time the call is made, thus preventing the computational overhead.
try:
fh.ctx["cache"]["symtab"] = SymTab.from_viv(f.vw.parsedbin)
except Exception:
Expand Down Expand Up @@ -598,7 +598,7 @@ def extract_op_number_features(

if f.vw.probeMemory(v, 1, envi.memory.MM_READ):
# this is a valid address
# assume its not also a constant.
# assume it's not also a constant.
return

if insn.mnem == "add" and insn.opers[0].isReg() and insn.opers[0].reg == envi.archs.i386.regs.REG_ESP:
Expand Down
8 changes: 4 additions & 4 deletions capa/features/freeze/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -382,7 +382,7 @@ def dumps_static(extractor: StaticFeatureExtractor) -> str:
address=Address.from_capa(addr),
feature=feature_from_capa(feature),
) # type: ignore
# Mypy is unable to recognise `basic_block` as a argument due to alias
# Mypy is unable to recognise `basic_block` as an argument due to alias
for feature, addr in extractor.extract_basic_block_features(f, bb)
]

Expand Down Expand Up @@ -419,15 +419,15 @@ def dumps_static(extractor: StaticFeatureExtractor) -> str:
features=tuple(ffeatures),
basic_blocks=basic_blocks,
) # type: ignore
# Mypy is unable to recognise `basic_blocks` as a argument due to alias
# Mypy is unable to recognise `basic_blocks` as an argument due to alias
)

features = StaticFeatures(
global_=global_features,
file=tuple(file_features),
functions=tuple(function_features),
) # type: ignore
# Mypy is unable to recognise `global_` as a argument due to alias
# Mypy is unable to recognise `global_` as an argument due to alias

freeze = Freeze(
version=CURRENT_VERSION,
Expand All @@ -437,7 +437,7 @@ def dumps_static(extractor: StaticFeatureExtractor) -> str:
extractor=Extractor(name=extractor.__class__.__name__),
features=features,
) # type: ignore
# Mypy is unable to recognise `base_address` as a argument due to alias
# Mypy is unable to recognise `base_address` as an argument due to alias

return freeze.model_dump_json()

Expand Down
10 changes: 5 additions & 5 deletions capa/features/freeze/features.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ def feature_from_capa(f: capa.features.common.Feature) -> "Feature":
elif isinstance(f, capa.features.file.Import):
assert isinstance(f.value, str)
return ImportFeature(import_=f.value, description=f.description) # type: ignore
# Mypy is unable to recognise `import_` as a argument due to alias
# Mypy is unable to recognise `import_` as an argument due to alias

elif isinstance(f, capa.features.file.Section):
assert isinstance(f.value, str)
Expand All @@ -141,7 +141,7 @@ def feature_from_capa(f: capa.features.common.Feature) -> "Feature":
elif isinstance(f, capa.features.file.FunctionName):
assert isinstance(f.value, str)
return FunctionNameFeature(function_name=f.value, description=f.description) # type: ignore
# Mypy is unable to recognise `function_name` as a argument due to alias
# Mypy is unable to recognise `function_name` as an argument due to alias

# must come before check for String due to inheritance
elif isinstance(f, capa.features.common.Substring):
Expand All @@ -160,7 +160,7 @@ def feature_from_capa(f: capa.features.common.Feature) -> "Feature":
elif isinstance(f, capa.features.common.Class):
assert isinstance(f.value, str)
return ClassFeature(class_=f.value, description=f.description) # type: ignore
# Mypy is unable to recognise `class_` as a argument due to alias
# Mypy is unable to recognise `class_` as an argument due to alias

elif isinstance(f, capa.features.common.Namespace):
assert isinstance(f.value, str)
Expand Down Expand Up @@ -197,12 +197,12 @@ def feature_from_capa(f: capa.features.common.Feature) -> "Feature":
elif isinstance(f, capa.features.insn.OperandNumber):
assert isinstance(f.value, int)
return OperandNumberFeature(index=f.index, operand_number=f.value, description=f.description) # type: ignore
# Mypy is unable to recognise `operand_number` as a argument due to alias
# Mypy is unable to recognise `operand_number` as an argument due to alias

elif isinstance(f, capa.features.insn.OperandOffset):
assert isinstance(f.value, int)
return OperandOffsetFeature(index=f.index, operand_offset=f.value, description=f.description) # type: ignore
# Mypy is unable to recognise `operand_offset` as a argument due to alias
# Mypy is unable to recognise `operand_offset` as an argument due to alias

else:
raise NotImplementedError(f"feature_from_capa({type(f)}) not implemented")
Expand Down
2 changes: 1 addition & 1 deletion capa/ghidra/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ Comments are added at the beginning of matched functions indicating matched capa

### Bookmarks

Bookmarks are added to functions that matched a capabilitiy that is mapped to a MITRE ATT&CK and/or Malware Behavior Catalog (MBC) technique. You can view these bookmarks in Ghidra's Bookmarks window.
Bookmarks are added to functions that matched a capability that is mapped to a MITRE ATT&CK and/or Malware Behavior Catalog (MBC) technique. You can view these bookmarks in Ghidra's Bookmarks window.
<div align="center">
<img src="https://github.com/mandiant/capa/assets/66766340/7f9a66a9-7be7-4223-91c6-4b8fc4651336" width=825>
</div>
Expand Down
2 changes: 1 addition & 1 deletion capa/ida/plugin/view.py
Original file line number Diff line number Diff line change
Expand Up @@ -764,7 +764,7 @@ def load_features_from_yaml(self, rule_text, update_preview=False):

node = self.make_child_node_from_feature(parent, parse_yaml_line(line.strip()))

# append our new node in case its a parent for another node
# append our new node in case it's a parent for another node
if node:
stack.append(node)

Expand Down
4 changes: 2 additions & 2 deletions capa/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,7 @@ def install_common_args(parser, wanted=None):

if "backend" in wanted:
backends = [
(BACKEND_AUTO, "(default) detect apppropriate backend automatically"),
(BACKEND_AUTO, "(default) detect appropriate backend automatically"),
(BACKEND_VIV, "vivisect"),
(BACKEND_PEFILE, "pefile (file features only)"),
(BACKEND_BINJA, "Binary Ninja"),
Expand Down Expand Up @@ -317,7 +317,7 @@ def install_common_args(parser, wanted=None):
# Library code should *not* call these functions.
#
# These main routines may raise `ShouldExitError` to indicate the program
# ...should exit. Its a tiny step away from doing `sys.exit()` directly.
# ...should exit. It's a tiny step away from doing `sys.exit()` directly.
# I'm not sure if we should just do that. In the meantime, programs should
# handle `ShouldExitError` and pass the status code to `sys.exit()`.
#
Expand Down
2 changes: 1 addition & 1 deletion capa/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def get_node_cost(node):

else:
# this should be all hash-lookup features.
# we give this a arbitrary weight of 1.
# we give this an arbitrary weight of 1.
# the only thing more "important" than this is checking OS/Arch/Format.
return 1

Expand Down
6 changes: 3 additions & 3 deletions capa/render/result_document.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,7 +306,7 @@ class Match(FrozenModel):
args:
success: did the node match?
node: the logic node or feature node.
children: any children of the logic node. not relevent for features, can be empty.
children: any children of the logic node. not relevant for features, can be empty.
locations: where the feature matched. not relevant for logic nodes (except range), can be empty.
captures: captured values from the string/regex feature, and the locations of those values.
"""
Expand Down Expand Up @@ -418,7 +418,7 @@ def from_capa(
# doc[locations] contains all matches for the given namespace.
# for example, the feature might be `match: anti-analysis/packer`
# which matches against "generic unpacker" and "UPX".
# in this case, doc[locations] contains locations for *both* of thse.
# in this case, doc[locations] contains locations for *both* of those.
#
# rule_matches contains the matches for the specific rule.
# this is a subset of doc[locations].
Expand Down Expand Up @@ -459,7 +459,7 @@ def to_capa(self, rules_by_name: Dict[str, capa.rules.Rule]) -> capa.engine.Resu
#
# children contains a single tree of results, corresponding to the logic of the matched rule.
# self.node.feature.match contains the name of the rule that was matched.
# so its all available to reconstruct, if necessary.
# so it's all available to reconstruct, if necessary.

return capa.features.common.Result(
success=self.success,
Expand Down
2 changes: 1 addition & 1 deletion capa/render/vverbose.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def hanging_indent(s: str, indent: int) -> str:
def render_locations(ostream, layout: rd.Layout, locations: Iterable[frz.Address], indent: int):
import capa.render.verbose as v

# its possible to have an empty locations array here,
# it's possible to have an empty locations array here,
# such as when we're in MODE_FAILURE and showing the logic
# under a `not` statement (which will have no matched locations).
locations = sorted(locations)
Expand Down
18 changes: 9 additions & 9 deletions capa/rules/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -867,14 +867,14 @@ def rec(statement):
# we'll give precedence to namespaces, and then assume if that does work,
# that it must be a rule name.
#
# we don't expect any collisions between namespaces and rule names, but its possible.
# we don't expect any collisions between namespaces and rule names, but it's possible.
# most likely would be collision between top level namespace (e.g. `host-interaction`) and rule name.
# but, namespaces tend to use `-` while rule names use ` `. so, unlikely, but possible.
if statement.value in namespaces:
# matches a namespace, so take precedence and don't even check rule names.
deps.update(r.name for r in namespaces[statement.value])
else:
# not a namespace, assume its a rule name.
# not a namespace, assume it's a rule name.
assert isinstance(statement.value, str)
deps.add(statement.value)

Expand Down Expand Up @@ -934,7 +934,7 @@ def _extract_subscope_rules_rec(self, statement):

# now recurse to other nodes in the logic tree.
# note: we cannot recurse into the subscope sub-tree,
# because its been replaced by a `match` statement.
# because it's been replaced by a `match` statement.
for child in statement.get_children():
yield from self._extract_subscope_rules_rec(child)

Expand Down Expand Up @@ -1224,7 +1224,7 @@ def get_rules_and_dependencies(rules: List[Rule], rule_name: str) -> Iterator[Ru
"""
from the given collection of rules, select a rule and its dependencies (transitively).
"""
# we evaluate `rules` multiple times, so if its a generator, realize it into a list.
# we evaluate `rules` multiple times, so if it's a generator, realize it into a list.
rules = list(rules)
namespaces = index_rules_by_namespace(rules)
rules_by_name = {rule.name: rule for rule in rules}
Expand Down Expand Up @@ -1257,7 +1257,7 @@ def ensure_rule_dependencies_are_met(rules: List[Rule]) -> None:
raises:
InvalidRule: if a dependency is not met.
"""
# we evaluate `rules` multiple times, so if its a generator, realize it into a list.
# we evaluate `rules` multiple times, so if it's a generator, realize it into a list.
rules = list(rules)
namespaces = index_rules_by_namespace(rules)
rules_by_name = {rule.name: rule for rule in rules}
Expand Down Expand Up @@ -1304,7 +1304,7 @@ def topologically_order_rules(rules: List[Rule]) -> List[Rule]:
assumes that the rule dependency graph is a DAG.
"""
# we evaluate `rules` multiple times, so if its a generator, realize it into a list.
# we evaluate `rules` multiple times, so if it's a generator, realize it into a list.
rules = list(rules)
namespaces = index_rules_by_namespace(rules)
rules_by_name = {rule.name: rule for rule in rules}
Expand Down Expand Up @@ -1465,7 +1465,7 @@ def rec(rule_name: str, node: Union[Feature, Statement]):
#
# they're global, so if they match at one location in a file,
# they'll match at every location in a file.
# so thats not helpful to decide how to downselect.
# so that's not helpful to decide how to downselect.
#
# and, a global rule will never be the sole selector in a rule.
pass
Expand Down Expand Up @@ -1535,10 +1535,10 @@ def rec(rule_name: str, node: Union[Feature, Statement]):
rec(rule_name, root)

# if a rule has a hard feature,
# dont consider it easy, and therefore,
# don't consider it easy, and therefore,
# don't index any of its features.
#
# otherwise, its an easy rule, and index its features
# otherwise, it's an easy rule, and index its features
for rules_with_feature in rules_by_feature.values():
rules_with_feature.difference_update(rules_with_hard_features)
easy_rules_by_feature = rules_by_feature
Expand Down
Loading

0 comments on commit 898d892

Please sign in to comment.