diff --git a/.github/workflows/build_and_test_run_fuzzer_benchmarks.py b/.github/workflows/build_and_test_run_fuzzer_benchmarks.py index 023ec75f2..a710d5588 100644 --- a/.github/workflows/build_and_test_run_fuzzer_benchmarks.py +++ b/.github/workflows/build_and_test_run_fuzzer_benchmarks.py @@ -92,7 +92,7 @@ def make_builds(benchmarks, fuzzer): # Sort benchmarks so that they get built in a deterministic order. fuzzer_benchmark_pairs = sorted(fuzzer_benchmark_pairs, key=lambda pair: pair[1]) - print('Building fuzzer-benchmark pairs: {}'.format(fuzzer_benchmark_pairs)) + print(f'Building fuzzer-benchmark pairs: {fuzzer_benchmark_pairs}') for _, benchmark in fuzzer_benchmark_pairs: make_target = get_make_target(fuzzer, benchmark) make_command = ['make', 'RUNNING_ON_CI=yes', '-j', make_target] @@ -116,7 +116,7 @@ def do_build(build_type, fuzzer, always_build): elif build_type == 'bug': benchmarks = benchmark_utils.get_bug_benchmarks() else: - raise Exception('Invalid build_type: %s' % build_type) + raise Exception(f'Invalid build_type: {build_type}') if always_build: # Always do a build if always_build is True. @@ -138,7 +138,7 @@ def do_build(build_type, fuzzer, always_build): def main(): """Build OSS-Fuzz or standard benchmarks with a fuzzer.""" if len(sys.argv) != 3: - print('Usage: %s ' % sys.argv[0]) + print(f'Usage: {sys.argv[0]} ') return 1 build_type = sys.argv[1] fuzzer = sys.argv[2] diff --git a/.github/workflows/fuzzers.yml b/.github/workflows/fuzzers.yml index accaef8c9..82da32c02 100644 --- a/.github/workflows/fuzzers.yml +++ b/.github/workflows/fuzzers.yml @@ -17,97 +17,101 @@ jobs: fail-fast: false matrix: fuzzer: + # Default fuzzers general comparison evaluation. - afl - aflfast - - afl_um_prioritize - - afl_um_random - - afl_um_parallel - aflplusplus - - aflplusplus_optimal - - aflplusplus_tracepc - - aflplusplus_muttfuzz - - aflplusplus_um_prioritize - - aflplusplus_um_prioritize_75 - - aflplusplus_um_random - - aflplusplus_um_random_75 - - aflplusplus_um_random_3 - - aflplusplus_um_random_6 - - aflplusplus_um_parallel - aflsmart - centipede - - entropic + - eclipser - fairfuzz - honggfuzz - - honggfuzz_um_random - - honggfuzz_um_random_75 - - honggfuzz_um_prioritize - - honggfuzz_um_prioritize_75 - - honggfuzz_um_parallel - - lafintel - - klee + - libafl - libfuzzer - - libfuzzer_um_random - - libfuzzer_um_random_75 - - libfuzzer_um_prioritize - - libfuzzer_um_prioritize_75 - - libfuzzer_um_parallel - - libfuzzer_dataflow - - libfuzzer_dataflow_load - - libfuzzer_dataflow_store - - libfuzzer_dataflow_pre - mopt - - neuzz - - libafl - - libafl_text - - pythia_effect_bb - - pythia_bb - - fafuzz - - tortoisefuzz + # Greybox fuzzers. - wingfuzz - # Binary-only (greybox) fuzzers. - - eclipser - - eclipser_um_prioritize - - eclipser_um_prioritize_75 - - eclipser_um_random - - eclipser_um_random_75 - - eclipser_um_parallel - - afl_qemu - - honggfuzz_qemu - - weizz_qemu - - aflplusplus_qemu - - aflplusplus_frida - # Concolic execution - - fuzzolic_aflplusplus_z3 - - fuzzolic_aflplusplus_fuzzy - - eclipser_aflplusplus - - symqemu_aflplusplus - - symcc_aflplusplus - - symcc_aflplusplus_single - - symcc_afl - - symcc_afl_single - # Grammar fuzzers - - nautilus - - gramatron - - token_level - - grimoire + # - tortoisefuzz # To Be Fixed. + # Symbolic ececution. + # - klee # To Be Fixed. + # Concolic execution. + # - symcc_aflplusplus # To Be Fixed. + # Grammar fuzzers. + # - nautilus # To Be Fixed. + # - gramatron # To Be Fixed. + # - token_level # To Be Fixed. + # - grimoire # To Be Fixed. # Temporary variants. - - aflplusplus_dict2file - - afl_2_52_b - - aflplusplus_cmplog - - aflplusplus_cmplog_r - - aflplusplus_cmplog_1587 - - aflplusplus_cmplog_1585 - - afl_random_favored - - entropic_execute_final - - libfuzzer_exeute_final - - introspector_driven_focus - - libfuzzer_fork_parallel - - centipede_function_filter + # - aflplusplus_um_prioritize + # - aflplusplus_um_prioritize_75 + # - aflplusplus_um_random + # - aflplusplus_um_random_3 + # - aflplusplus_um_random_6 + # - aflplusplus_um_random_75 + # To be Removed. + # - introspector_driven_focus + # - centipede_function_filter + # - aflplusplus_dict2file + # - afl_2_52_b + # - aflplusplus_cmplog + # - afl_random_favored + # - entropic_execute_final + # - libfuzzer_exeute_final + # - libfuzzer_fork_parallel + # - afl_um_prioritize + # - afl_um_random + # - afl_um_parallel + # - aflplusplus_optimal + # - aflplusplus_tracepc + # - aflplusplus_um_parallel + # - honggfuzz_um_random + # - honggfuzz_um_random_75 + # - honggfuzz_um_prioritize + # - honggfuzz_um_prioritize_75 + # - honggfuzz_um_parallel + # - libfuzzer_um_random + # - libfuzzer_um_random_75 + # - libfuzzer_um_prioritize + # - libfuzzer_um_prioritize_75 + # - libfuzzer_um_parallel + # - libfuzzer_dataflow + # - libfuzzer_dataflow_load + # - libfuzzer_dataflow_store + # - libfuzzer_dataflow_pre + # - libafl_text + # - pythia_effect_bb + ## Binary-only (greybox) fuzzers. + # - eclipser_um_prioritize + # - eclipser_um_prioritize_75 + # - eclipser_um_random + # - eclipser_um_random_75 + # - eclipser_um_parallel + ## Binary-only (greybox) fuzzers. + # - afl_qemu + # - honggfuzz_qemu + # - weizz_qemu + # - aflplusplus_qemu + # - aflplusplus_frida + ## Concolic fuzzers. + # - fuzzolic_aflplusplus_z3 + # - fuzzolic_aflplusplus_fuzzy + # - eclipser_aflplusplus + # - symqemu_aflplusplus + # - symcc_aflplusplus_single + # - symcc_afl + ## Concolic execution + # - symcc_afl_single + ## Deprecated. + # - entropic + # - lafintel + # - neuzz + # - pythia_bb + # - fafuzz benchmark_type: - oss-fuzz - standard - - bug + # - bug steps: - uses: actions/checkout@v2 @@ -126,7 +130,7 @@ jobs: - name: Setup Python environment uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: 3.10.8 # Copied from: # https://docs.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions diff --git a/.github/workflows/presubmit.yml b/.github/workflows/presubmit.yml index 40c05e91f..68c293f99 100644 --- a/.github/workflows/presubmit.yml +++ b/.github/workflows/presubmit.yml @@ -21,7 +21,7 @@ jobs: - name: Setup Python environment uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: 3.10.8 # Copied from: # https://docs.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions diff --git a/.pylintrc b/.pylintrc index 5918b3f2d..f727d250f 100644 --- a/.pylintrc +++ b/.pylintrc @@ -1,28 +1,82 @@ # Generated using `.pylintrc. --generate-rcfile` and then modified. -[MASTER] +[MAIN] + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Load and enable all available extensions. Use --list-extensions to see a list +# all available extensions. +#enable-all-extensions= + +# In error mode, messages with a category besides ERROR or FATAL are +# suppressed, and no reports are done by default. Error mode is compatible with +# disabling specific errors. +#errors-only= + +# Always return a 0 (non-error) status code, even if lint errors are found. +# This is primarily useful in continuous integration scripts. +#exit-zero= # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code. +extension-pkg-allow-list= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. (This is an alternative name to extension-pkg-allow-list +# for backward compatibility.) extension-pkg-whitelist= # Add files or directories to the blocklist. They should be base names, not # paths. -# -# Files under alembic are generated and are nonconforming. -ignore=alembic +ignore=CVS, + alembic, # Files under alembic are generated and are nonconforming. + +# Return non-zero exit code if any of these messages/categories are detected, +# even if score is above --fail-under value. Syntax same as enable. Messages +# specified are enabled, while categories only check already-enabled messages. +fail-on= + +# Specify a score threshold under which the program will exit with error. +fail-under=10 + +# Interpret the stdin as a python script, whose filename needs to be passed as +# the module_or_package argument. +#from-stdin= + +# Add files or directories matching the regular expressions patterns to the +# ignore-list. The regex matches against paths and can be in Posix or Windows +# format. Because '\' represents the directory delimiter on Windows systems, it +# can't be used as an escape character. +ignore-paths=fuzzers/libfuzzer_focus_.*, + fuzzers/.*_um_.*, + fuzzers/.*_muttfuzz, + fuzzers/libafl_text, + fuzzers/.*_cmplog_.*, + -# Add files or directories matching the regex patterns to the blocklist. The -# regex matches against base names, not paths. -ignore-patterns= +# Files or directories matching the regular expression patterns are skipped. +# The regex matches against base names, not paths. The default value ignores +# Emacs file locks +ignore-patterns=^\.# + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis). It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). #init-hook= # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use. +# number of processors available to use, and will cap the count on Windows to +# avoid hangs. jobs=0 # Control the amount of potential inferred values when inferring a single @@ -30,15 +84,19 @@ jobs=0 # complex, nested conditions. limit-inference-results=100 -# List of plugins (as comma separated values of python modules names) to load, +# List of plugins (as comma separated values of python module names) to load, # usually to register additional checkers. load-plugins= # Pickle collected data for later comparisons. persistent=yes -# Specify a configuration file. -#rcfile= +# Minimum Python version to use for version dependent checks. Will default to +# the version used to run pylint. +py-version=3.10 + +# Discover python modules and packages in the file system subtree. +recursive=no # When enabled, pylint would attempt to guess common misconfiguration and emit # user-friendly hints instead of false-positive error messages. @@ -48,32 +106,56 @@ suggestion-mode=yes # active Python interpreter and may run arbitrary code. unsafe-load-any-extension=no +# In verbose mode, extra non-checker-related info will be displayed. +#verbose= + + +[REPORTS] + +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'fatal', 'error', 'warning', 'refactor', +# 'convention', and 'info' which contain the number of messages in each +# category, as well as 'statement' which is the total number of statements +# analyzed. This score is used by the global evaluation report (RP0004). +evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +#output-format= + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + [MESSAGES CONTROL] # Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. -confidence= +# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, +# UNDEFINED. +confidence=HIGH, + CONTROL_FLOW, + INFERENCE, + INFERENCE_FAILURE, + UNDEFINED # Disable the message, report, category or checker with the given id(s). You # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if +# disable everything first and then re-enable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use "--disable=all --enable=classes # --disable=W". -disable=parameter-unpacking, - unpacking-in-except, - old-raise-syntax, - backtick, - long-suffix, - old-ne-operator, - old-octal-literal, - import-star-module-level, - non-ascii-bytes-literal, - raw-checker-failed, +disable=raw-checker-failed, bad-inline-option, locally-disabled, file-ignored, @@ -81,67 +163,7 @@ disable=parameter-unpacking, useless-suppression, deprecated-pragma, use-symbolic-message-instead, - apply-builtin, - basestring-builtin, - buffer-builtin, - cmp-builtin, - coerce-builtin, - execfile-builtin, - file-builtin, - long-builtin, - raw_input-builtin, - reduce-builtin, - standarderror-builtin, - unicode-builtin, - xrange-builtin, - coerce-method, - delslice-method, - getslice-method, - setslice-method, - no-absolute-import, - old-division, - dict-iter-method, - dict-view-method, - next-method-called, - metaclass-assignment, - indexing-exception, - raising-string, - reload-builtin, - oct-method, - hex-method, - nonzero-method, - cmp-method, - input-builtin, - round-builtin, - intern-builtin, - unichr-builtin, - map-builtin-not-iterating, - zip-builtin-not-iterating, - range-builtin-not-iterating, - filter-builtin-not-iterating, - using-cmp-argument, - eq-without-hash, - div-method, - idiv-method, - rdiv-method, - exception-message-attribute, - invalid-str-codec, - sys-max-int, - bad-python3-import, - deprecated-string-function, - deprecated-str-translate-call, - deprecated-itertools-function, - deprecated-types-field, - next-method-defined, - dict-items-not-iterating, - dict-keys-not-iterating, - dict-values-not-iterating, - deprecated-operator-function, - deprecated-urllib-function, - xreadlines-attribute, - deprecated-sys-function, - exception-escape, - comprehension-escape, + raw-checker-failed, fixme, too-few-public-methods, duplicate-code, # Too many false positives. @@ -154,206 +176,43 @@ disable=parameter-unpacking, enable=c-extension-no-member -[REPORTS] - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages. -reports=no - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=optparse.Values,sys.exit - - -[BASIC] - -# Naming style matching correct argument names. -argument-naming-style=snake_case - -# Regular expression matching correct argument names. Overrides argument- -# naming-style. -#argument-rgx= - -# Naming style matching correct attribute names. -attr-naming-style=snake_case - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style. -#attr-rgx= - -# Bad variable names which should always be refused, separated by a comma. -bad-names=foo, - bar, - baz, - toto, - tutu, - tata - -# Naming style matching correct class attribute names. -class-attribute-naming-style=any - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. -#class-attribute-rgx= - -# Naming style matching correct class names. -class-naming-style=PascalCase - -# Regular expression matching correct class names. Overrides class-naming- -# style. -#class-rgx= - -# Naming style matching correct constant names. -const-naming-style=UPPER_CASE - -# Regular expression matching correct constant names. Overrides const-naming- -# style. -#const-rgx= - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming style matching correct function names. -function-naming-style=snake_case - -# Regular expression matching correct function names. Overrides function- -# naming-style. -#function-rgx= - -# Good variable names which should always be accepted, separated by a comma. -good-names=i, - j, - k, - q, - ex, - Run, - logger, - _, - df, - fs - -# Include a hint for the correct naming format with invalid-name. -include-naming-hint=no - -# Naming style matching correct inline iteration names. -inlinevar-naming-style=any - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. -#inlinevar-rgx= - -# Naming style matching correct method names. -method-naming-style=snake_case - -# Regular expression matching correct method names. Overrides method-naming- -# style. -#method-rgx= - -# Naming style matching correct module names. -module-naming-style=snake_case - -# Regular expression matching correct module names. Overrides module-naming- -# style. -#module-rgx= - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -# These decorators are taken in consideration only for invalid-name. -property-classes=abc.abstractproperty - -# Naming style matching correct variable names. -variable-naming-style=snake_case - -# Regular expression matching correct variable names. Overrides variable- -# naming-style. -#variable-rgx= - - -[SIMILARITIES] - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=no - -# Minimum lines number of a similarity. -min-similarity-lines=4 - - -[FORMAT] +[IMPORTS] -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=\s*(# )?? +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules=regsub, + TERMIOS, + Bastion, + rexec -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' +# Output a graph (.gv or any supported image format) of external dependencies +# to the given file (report RP0402 must not be disabled). +ext-import-graph= -# Maximum number of characters on a single line. -max-line-length=80 +# Output a graph (.gv or any supported image format) of all (i.e. internal and +# external) dependencies to the given file (report RP0402 must not be +# disabled). +import-graph= -# Maximum number of lines in a module. -max-module-lines=1000 +# Output a graph (.gv or any supported image format) of internal dependencies +# to the given file (report RP0402 must not be disabled). +int-import-graph= -# List of optional constructs for which whitespace checking is disabled. `dict- -# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. -# `trailing-comma` allows a space between comma and closing bracket: (a, ). -# `empty-line` allows space-only lines. -no-space-check=trailing-comma, - dict-separator +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= [MISCELLANEOUS] @@ -363,6 +222,9 @@ notes=FIXME, XXX, TODO +# Regular expression of note tags to take in consideration. +notes-rgx= + [TYPECHECK] @@ -374,11 +236,7 @@ contextmanager-decorators=contextlib.contextmanager # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E1101 when accessed. Python regular # expressions are accepted. -generated-members=experiment_df.fuzzer,session - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes +generated-members= # Tells whether to warn about missing members when the owner of the attribute # is inferred to be None. @@ -392,16 +250,16 @@ ignore-none=yes # the rest of the inferred objects. ignore-on-opaque-inference=yes +# List of symbolic message names to ignore for Mixin members. +ignored-checks-for-mixins=no-member, + not-async-context-manager, + not-context-manager, + attribute-defined-outside-init + # List of class names for which member attributes should not be checked (useful # for classes with dynamically set attributes). This supports the use of # qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis. It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= +ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace # Show a hint with possible names when a member name was not found. The aspect # of finding the hint is based on edit distance. @@ -415,25 +273,22 @@ missing-member-hint-distance=1 # showing a hint for a missing member. missing-member-max-choices=1 +# Regex pattern to define which classes are considered mixins. +mixin-class-rgx=.*[Mm]ixin -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes. -max-spelling-suggestions=4 +# List of decorators that change the signature of a decorated function. +signature-mutators= -# Spelling dictionary name. Available dictionaries: none. To make it working -# install python-enchant package.. -spelling-dict= -# List of comma separated words that should not be checked. -spelling-ignore-words= +[LOGGING] -# A path to a file that contains private dictionary; one word per line. -spelling-private-dict-file= +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. +logging-format-style=old -# Tells whether to store unknown words to indicated private dictionary in -# --spelling-private-dict-file option instead of raising a message. -spelling-store-unknown-words=no +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging [VARIABLES] @@ -445,6 +300,9 @@ additional-builtins= # Tells whether unused global variables should be treated as a violation. allow-global-unused-variables=yes +# List of names allowed to shadow builtins +allowed-redefined-builtins= + # List of strings which can identify a callback function by name. A callback # name must start or end with one of those strings. callbacks=cb_, @@ -454,8 +312,7 @@ callbacks=cb_, # not be used). dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ -# Argument names that match this expression will be ignored. Default to name -# with leading underscore. +# Argument names that match this expression will be ignored. ignored-argument-names=_.*|^ignored_|^unused_ # Tells whether we should check for unused import in __init__ files. @@ -463,87 +320,60 @@ init-import=no # List of qualified module names which can have objects that can redefine # builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins,io,builtins - - -[LOGGING] - -# Format style used to check logging format string. `old` means using % -# formatting, while `new` is for `{}` formatting. -logging-format-style=old - -# Logging modules to check that the string format arguments are in logging -# function parameter format. -logging-modules=logging - - -[STRING] +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io -# This flag controls whether the implicit-str-concat-in-sequence should -# generate a warning on implicit string concatenation in sequences defined over -# several lines. -check-str-concat-over-line-jumps=no +[SPELLING] -[IMPORTS] +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no +# Spelling dictionary name. Available dictionaries: none. To make it work, +# install the 'python-enchant' package. +spelling-dict= -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no +# List of comma separated words that should be considered directives if they +# appear at the beginning of a comment and should not be checked. +spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: -# Deprecated modules which should not be used, separated by a comma. -deprecated-modules=regsub, - TERMIOS, - Bastion, - rexec +# List of comma separated words that should not be checked. +spelling-ignore-words= -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled). -ext-import-graph= +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file= -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled). -import-graph= +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +spelling-store-unknown-words=no -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled). -int-import-graph= -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= +[STRING] -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant +# This flag controls whether inconsistent-quotes generates a warning when the +# character used as a quote delimiter is used inconsistently within a module. +check-quote-consistency=yes +# This flag controls whether the implicit-str-concat should generate a warning +# on implicit string concatenation in sequences defined over several lines. +check-str-concat-over-line-jumps=no -[CLASSES] -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp +[EXCEPTIONS] -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make +# Exceptions that will emit a warning when caught. +overgeneral-exceptions=BaseException, + Exception -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=mcs +[DESIGN] +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +exclude-too-few-public-methods= -[DESIGN] +# List of qualified class names to ignore when counting class parents (see +# R0901) +ignored-parents= # Maximum number of arguments for function / method. max-args=5 @@ -551,7 +381,7 @@ max-args=5 # Maximum number of attributes for a class (see R0902). max-attributes=7 -# Maximum number of boolean expressions in an if statement. +# Maximum number of boolean expressions in an if statement (see R0916). max-bool-expr=5 # Maximum number of branch for function / method body. @@ -576,8 +406,234 @@ max-statements=50 min-public-methods=2 -[EXCEPTIONS] +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=80 + +# Maximum number of lines in a module. +max-module-lines=1000 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[SIMILARITIES] + +# Comments are removed from the similarity computation +ignore-comments=yes + +# Docstrings are removed from the similarity computation +ignore-docstrings=yes + +# Imports are removed from the similarity computation +ignore-imports=no + +# Signatures are removed from the similarity computation +ignore-signatures=yes + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[BASIC] + +# Naming style matching correct argument names. +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style. If left empty, argument names will be checked with the set +# naming style. +#argument-rgx= + +# Naming style matching correct attribute names. +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. If left empty, attribute names will be checked with the set naming +# style. +#attr-rgx= + +# Bad variable names which should always be refused, separated by a comma. +bad-names=foo, + bar, + baz, + toto, + tutu, + tata + +# Bad variable names regexes, separated by a comma. If names match any regex, +# they will always be refused +bad-names-rgxs= + +# Naming style matching correct class attribute names. +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. If left empty, class attribute names will be checked +# with the set naming style. +#class-attribute-rgx= + +# Naming style matching correct class constant names. +class-const-naming-style=UPPER_CASE + +# Regular expression matching correct class constant names. Overrides class- +# const-naming-style. If left empty, class constant names will be checked with +# the set naming style. +#class-const-rgx= + +# Naming style matching correct class names. +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming- +# style. If left empty, class names will be checked with the set naming style. +#class-rgx= + +# Naming style matching correct constant names. +const-naming-style=UPPER_CASE + +# Regular expression matching correct constant names. Overrides const-naming- +# style. If left empty, constant names will be checked with the set naming +# style. +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names. +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style. If left empty, function names will be checked with the set +# naming style. +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma. +good-names=i, + j, + k, + q, + ex, + Run, + logger, + _, + df, + fs + +# Good variable names regexes, separated by a comma. If names match any regex, +# they will always be accepted +good-names-rgxs= + +# Include a hint for the correct naming format with invalid-name. +include-naming-hint=no + +# Naming style matching correct inline iteration names. +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. If left empty, inline iteration names will be checked +# with the set naming style. +#inlinevar-rgx= + +# Naming style matching correct method names. +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style. If left empty, method names will be checked with the set naming style. +#method-rgx= + +# Naming style matching correct module names. +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style. If left empty, module names will be checked with the set naming style. +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +# These decorators are taken in consideration only for invalid-name. +property-classes=abc.abstractproperty + +# Regular expression matching correct type variable names. If left empty, type +# variable names will be checked with the set naming style. +#typevar-rgx= + +# Naming style matching correct variable names. +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style. If left empty, variable names will be checked with the set +# naming style. +#variable-rgx= + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit,argparse.parse_error + + +[CLASSES] + +# Warn about protected attribute access inside special methods +check-protected-access-in-special-methods=no + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp, + __post_init__ + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict, + _fields, + _replace, + _source, + _make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=cls + + +[METHOD_ARGS] -# Exceptions that will emit a warning when being caught. Defaults to -# "BaseException, Exception". -overgeneral-exceptions=Exception +# List of qualified names (i.e., library.method) which require a timeout +# parameter e.g. 'requests.api.get,requests.api.post' +timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request diff --git a/Makefile b/Makefile index 62f2c0ee7..50bf99277 100644 --- a/Makefile +++ b/Makefile @@ -48,7 +48,7 @@ SHELL := /bin/bash VENV_ACTIVATE := .venv/bin/activate ${VENV_ACTIVATE}: requirements.txt - python3.9 -m venv .venv || python3 -m venv .venv + python3.10.8 -m venv .venv || python3 -m venv .venv source ${VENV_ACTIVATE} && python3 -m pip install --upgrade pip setuptools && python3 -m pip install -r requirements.txt install-dependencies: ${VENV_ACTIVATE} diff --git a/analysis/benchmark_results.py b/analysis/benchmark_results.py index 178723fdb..a9dd8f918 100644 --- a/analysis/benchmark_results.py +++ b/analysis/benchmark_results.py @@ -73,10 +73,7 @@ def _relevant_column(self): the analysis (e.g., 'edges_covered', or 'bugs_covered').""" return 'edges_covered' if self.type == 'code' else 'bugs_covered' - @property - @functools.lru_cache() - # TODO(lszekeres): With python3.8+, replace above two decorators with: - # @functools.cached_property + @functools.cached_property def _benchmark_df(self): exp_df = self._experiment_df return exp_df[exp_df.benchmark == self.name] diff --git a/analysis/coverage_data_utils.py b/analysis/coverage_data_utils.py index 90cb22d7c..30a7d2a40 100644 --- a/analysis/coverage_data_utils.py +++ b/analysis/coverage_data_utils.py @@ -107,7 +107,7 @@ def get_fuzzer_covered_branches(fuzzer: str, benchmark: str, filestore: str): logger.warning( 'covered_branches.json file: %s could not be copied.', src_file) return {} - with open(dst_file.name) as json_file: + with open(dst_file.name, encoding='utf-8') as json_file: return json.load(json_file) diff --git a/analysis/data_utils.py b/analysis/data_utils.py index c437d8c95..4fb2dfaf4 100644 --- a/analysis/data_utils.py +++ b/analysis/data_utils.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. """Utility functions for data (frame) transformations.""" +import pandas as pd + from analysis import stat_tests from common import benchmark_utils from common import environment @@ -47,7 +49,7 @@ def validate_data(experiment_df): missing_columns = expected_columns.difference(experiment_df.columns) if missing_columns: raise ValueError( - 'Missing columns in experiment data: {}'.format(missing_columns)) + f'Missing columns in experiment data: {missing_columns}') def drop_uninteresting_columns(experiment_df): @@ -89,7 +91,7 @@ def clobber_experiments_data(df, experiments): experiment_pairs = experiment_data[['benchmark', 'fuzzer']].apply(tuple, axis=1) to_include = experiment_data[~experiment_pairs.isin(covered_pairs)] - result = result.append(to_include) + result = pd.concat([result, to_include]) return result @@ -232,7 +234,7 @@ def experiment_summary(experiment_snapshots_df): def benchmark_rank_by_mean(benchmark_snapshot_df, key='edges_covered'): """Returns ranking of fuzzers based on mean coverage.""" assert benchmark_snapshot_df.time.nunique() == 1, 'Not a snapshot!' - means = benchmark_snapshot_df.groupby('fuzzer')[key].mean() + means = benchmark_snapshot_df.groupby('fuzzer')[key].mean().astype(int) means.rename('mean cov', inplace=True) return means.sort_values(ascending=False) @@ -240,7 +242,7 @@ def benchmark_rank_by_mean(benchmark_snapshot_df, key='edges_covered'): def benchmark_rank_by_median(benchmark_snapshot_df, key='edges_covered'): """Returns ranking of fuzzers based on median coverage.""" assert benchmark_snapshot_df.time.nunique() == 1, 'Not a snapshot!' - medians = benchmark_snapshot_df.groupby('fuzzer')[key].median() + medians = benchmark_snapshot_df.groupby('fuzzer')[key].median().astype(int) medians.rename('median cov', inplace=True) return medians.sort_values(ascending=False) @@ -248,8 +250,9 @@ def benchmark_rank_by_median(benchmark_snapshot_df, key='edges_covered'): def benchmark_rank_by_percent(benchmark_snapshot_df, key='edges_covered'): """Returns ranking of fuzzers based on median (normalized/%) coverage.""" assert benchmark_snapshot_df.time.nunique() == 1, 'Not a snapshot!' - max_key = "{}_percent_max".format(key) - medians = benchmark_snapshot_df.groupby('fuzzer')[max_key].median() + max_key = f'{key}_percent_max' + medians = benchmark_snapshot_df.groupby('fuzzer')[max_key].median().astype( + int) return medians.sort_values(ascending=False) @@ -378,11 +381,11 @@ def add_relative_columns(experiment_df): for key in ['edges_covered', 'bugs_covered']: if key not in df.columns: continue - new_col = "{}_percent_max".format(key) + new_col = f'{key}_percent_max' df[new_col] = df[key] / df.groupby('benchmark')[key].transform( 'max') * 100.0 - new_col = "{}_percent_fmax".format(key) + new_col = f'{key}_percent_fmax' df[new_col] = df[key] / df.groupby(['benchmark', 'fuzzer' ])[key].transform('max') * 100 return df diff --git a/analysis/experiment_results.py b/analysis/experiment_results.py index 9c123f3db..a60d6d9b5 100644 --- a/analysis/experiment_results.py +++ b/analysis/experiment_results.py @@ -114,10 +114,7 @@ def description_link(commit, fuzzer): df.index = df.index.map(lambda fuzzer: description_link(commit, fuzzer)) return df - @property - @functools.lru_cache() - # TODO(lszekeres): With python3.8+, replace above two decorators with: - # @functools.cached_property + @functools.cached_property def _experiment_snapshots_df(self): """Data frame containing only the time snapshots, for each benchmark, based on which we do further analysis, i.e., statistical tests and @@ -197,7 +194,7 @@ def _relative_summary_table(self, key_column='edges_covered'): pivot = pivot.style\ .background_gradient(axis=1, cmap=whbl, vmin=95, vmax=100)\ .highlight_max(axis=1, color='lightgreen')\ - .format("{:.2f}")\ + .format('{:.2f}')\ .apply(data_utils.underline_row, axis=1, subset=idx)\ .set_table_styles(self._SUMMARY_TABLE_STYLE) return pivot @@ -223,7 +220,7 @@ def found_bugs_summary_table(self): groups = groups.reset_index() pivot = groups.pivot(index='benchmark', columns='fuzzer', - values="crash_key") + values='crash_key') # save fuzzer names fuzzer_names = pivot.columns pivot['Total'] = self._full_experiment_df.groupby( @@ -251,7 +248,7 @@ def highlight_max(row): # Sort fuzzers left to right by FuzzerSum pivot = pivot.sort_values(by='FuzzerSum', axis=1, ascending=False) pivot = pivot.style\ - .format("{:.0f}")\ + .format('{:.0f}')\ .apply(highlight_max, axis=1, subset=fuzzer_names)\ .apply(data_utils.underline_row, axis=1, subset=idx)\ .set_table_styles(self._SUMMARY_TABLE_STYLE) diff --git a/analysis/plotting.py b/analysis/plotting.py index 523d1224f..ce2ea0d8f 100644 --- a/analysis/plotting.py +++ b/analysis/plotting.py @@ -13,12 +13,12 @@ # limitations under the License. """Plotting functions.""" -import matplotlib.pyplot as plt -import matplotlib.colors as colors import numpy as np import Orange import seaborn as sns +from matplotlib import colors +from matplotlib import pyplot as plt from analysis import data_utils from common import experiment_utils @@ -36,11 +36,11 @@ def _formatted_hour_min(seconds): hours = int(seconds / 60 / 60) minutes = int(seconds / 60) % 60 if hours: - time_string += '%dh' % hours + time_string += f'{hours}h' if minutes: if hours: time_string += ':' - time_string += '%dm' % minutes + time_string += f'{minutes}m' return time_string @@ -54,7 +54,7 @@ def _formatted_title(benchmark_snapshot_df): stats_string += _formatted_hour_min(snapshot_time) trial_count = benchmark_snapshot_df.fuzzer.value_counts().min() - stats_string += ', %d trials/fuzzer' % trial_count + stats_string += f', {trial_count} trials/fuzzer' stats_string += ')' return stats_string @@ -109,7 +109,6 @@ def __init__(self, fuzzers, quick=False, logscale=False): self._quick = quick self._logscale = logscale - # pylint: disable=no-self-use def _write_plot_to_image(self, plot_function, data, @@ -126,7 +125,7 @@ def _write_plot_to_image(self, fig, axes = plt.subplots(figsize=figsize) try: plot_function(data, axes=axes, **kwargs) - fig.savefig(image_path, bbox_inches="tight") + fig.savefig(image_path, bbox_inches='tight') finally: plt.close(fig) @@ -257,10 +256,10 @@ def box_or_violin_plot(self, showmeans=True, meanprops=mean_props) - sns.stripplot(**common_args, size=3, color="black", alpha=0.6) + sns.stripplot(**common_args, size=3, color='black', alpha=0.6) axes.set_title(_formatted_title(benchmark_snapshot_df)) - ylabel = 'Reached {} coverage'.format('bug' if bugs else 'branch') + ylabel = f'Reached {"bug" if bugs else "branch"} coverage' axes.set(ylabel=ylabel) axes.set(xlabel='Fuzzer (highest median coverage on the left)') axes.set_xticklabels(axes.get_xticklabels(), @@ -339,7 +338,7 @@ def ranking_plot(self, benchmark_snapshot_df, axes=None, bugs=False): ax=axes) axes.set_title(_formatted_title(benchmark_snapshot_df)) - ylabel = 'Reached {} coverage'.format('bug' if bugs else 'branch') + ylabel = f'Reached {"bug" if bugs else "branch"} coverage' axes.set(ylabel=ylabel) axes.set(xlabel='Fuzzer (highest median coverage on the left)') axes.set_xticklabels(axes.get_xticklabels(), @@ -387,8 +386,8 @@ def _generic_heatmap_plot(values, axes, args, shrink_cbar=0.2): args['annot'] = False axis = sns.heatmap(values, ax=axes, **args) - axis.set_ylabel("") - axis.set_xlabel("") + axis.set_ylabel('') + axis.set_xlabel('') label_args = {'rotation': 0, 'horizontalalignment': 'right'} axis.set_yticklabels(axis.get_yticklabels(), **label_args) label_args = {'rotation': 270, 'horizontalalignment': 'right'} @@ -424,7 +423,7 @@ def _pvalue_heatmap_plot(self, p_values, axes=None, symmetric=False): heatmap_args = { 'cmap': cmap, 'mask': mask if symmetric else None, - 'fmt': ".3f", + 'fmt': '.3f', 'norm': norm } @@ -464,7 +463,7 @@ def _a12_heatmap_plot(self, a12_values, axes=None): 'vmax': 1.0, 'square': True, 'annot': True, - 'fmt': ".2f" + 'fmt': '.2f' } return self._generic_heatmap_plot(a12_values, axes, @@ -486,7 +485,7 @@ def write_critical_difference_plot(self, average_ranks, num_of_benchmarks, critical_difference) fig = plt.gcf() try: - fig.savefig(image_path, bbox_inches="tight") + fig.savefig(image_path, bbox_inches='tight') finally: plt.close(fig) diff --git a/analysis/stat_tests.py b/analysis/stat_tests.py index 4b15ea4a9..f6a2fbe8d 100644 --- a/analysis/stat_tests.py +++ b/analysis/stat_tests.py @@ -198,7 +198,8 @@ def a12(measurements_x, measurements_y): rank_x_sum = rank_x.sum() # A = (R1/n1 - (n1+1)/2)/n2 # formula (14) in Vargha and Delaney, 2000 # The formula to compute A has been transformed to minimize accuracy errors. - # See: http://mtorchiano.wordpress.com/2014/05/19/effect-size-of-r-precision/ + # See: + # http://mtorchiano.wordpress.com/2014/05/19/effect-size-of-r-precision/ a12_measure = (2 * rank_x_sum - x_size * (x_size + 1)) / ( 2 * y_size * x_size) # equivalent formula to avoid accuracy errors diff --git a/analysis/test_coverage_data_utils.py b/analysis/test_coverage_data_utils.py index 18b2b1c65..533d9b830 100644 --- a/analysis/test_coverage_data_utils.py +++ b/analysis/test_coverage_data_utils.py @@ -88,8 +88,8 @@ def test_get_benchmark_cov_dict(): benchmark_cov_dict = coverage_data_utils.get_benchmark_cov_dict( coverage_dict, benchmark) expected_cov_dict = { - "afl": {(0, 0, 3, 3), (0, 0, 2, 2), (0, 0, 1, 1)}, - "libfuzzer": {(0, 0, 4, 4), (0, 0, 3, 3), (0, 0, 2, 3), (0, 0, 1, 1)} + 'afl': {(0, 0, 3, 3), (0, 0, 2, 2), (0, 0, 1, 1)}, + 'libfuzzer': {(0, 0, 4, 4), (0, 0, 3, 3), (0, 0, 2, 3), (0, 0, 1, 1)} } assert expected_cov_dict == benchmark_cov_dict diff --git a/analysis/test_data/pairwise_unique_coverage_heatmap-failed-diff.png b/analysis/test_data/pairwise_unique_coverage_heatmap-failed-diff.png index c38ed38e2..5dc618e44 100644 Binary files a/analysis/test_data/pairwise_unique_coverage_heatmap-failed-diff.png and b/analysis/test_data/pairwise_unique_coverage_heatmap-failed-diff.png differ diff --git a/analysis/test_data_utils.py b/analysis/test_data_utils.py index 967a39a8e..76a7e5074 100644 --- a/analysis/test_data_utils.py +++ b/analysis/test_data_utils.py @@ -67,14 +67,14 @@ def create_experiment_data(experiment='test_experiment', def test_validate_data_empty(): experiment_df = pd.DataFrame() - with pytest.raises(ValueError, match="Empty"): + with pytest.raises(ValueError, match='Empty'): data_utils.validate_data(experiment_df) def test_validate_data_missing_columns(): experiment_df = create_experiment_data() experiment_df.drop(columns=['trial_id', 'time'], inplace=True) - with pytest.raises(ValueError, match="Missing columns.*trial_id"): + with pytest.raises(ValueError, match='Missing columns.*trial_id'): data_utils.validate_data(experiment_df) @@ -89,7 +89,7 @@ def test_clobber_experiments_data(): """Tests that clobber experiments data clobbers stale snapshots from earlier experiments.""" df = pd.concat( - create_experiment_data('experiment-%d' % experiment_num) + create_experiment_data(f'experiment-{experiment_num}') for experiment_num in range(3)) df.reset_index(inplace=True) @@ -146,7 +146,7 @@ def test_filter_max_time(): assert filtered_df.time.unique().tolist() == list(expected_times) -@pytest.mark.parametrize("threshold", [0.3, 0.8, 1.0]) +@pytest.mark.parametrize('threshold', [0.3, 0.8, 1.0]) def test_benchmark_snapshot_complete(threshold): """Tests that the snapshot data contains only the latest timestamp for all trials, in case all trials have the same lengths. This should happen @@ -168,7 +168,7 @@ def test_benchmark_snapshot_complete(threshold): @pytest.mark.parametrize( - "threshold, expected_snapshot_time, expected_trials_left", [ + 'threshold, expected_snapshot_time, expected_trials_left', [ (1.0, 5, 4), (0.8, 5, 4), (0.7, 7, 3), @@ -355,4 +355,4 @@ def test_experiment_rank_by_average_normalized_score(): pd_test.assert_series_equal(ranking, expected_ranking, check_names=False, - check_less_precise=True) + rtol=10**-3) diff --git a/benchmarks/arrow_parquet-arrow-fuzz/Dockerfile b/benchmarks/arrow_parquet-arrow-fuzz/Dockerfile index 321e76b83..ed0ea59ec 100644 --- a/benchmarks/arrow_parquet-arrow-fuzz/Dockerfile +++ b/benchmarks/arrow_parquet-arrow-fuzz/Dockerfile @@ -14,11 +14,10 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c ENV DEBIAN_FRONTEND noninteractive -RUN apt-get update -y -q && \ - apt-get upgrade -y -q && \ +RUN apt-get update && \ apt-get install -y -q --no-install-recommends \ bison \ build-essential \ @@ -28,6 +27,11 @@ RUN apt-get update -y -q && \ ninja-build \ python3 -RUN git clone --depth=1 https://github.com/apache/arrow.git $SRC/arrow +RUN git clone \ + --depth=1 \ + --branch apache-arrow-10.0.0 \ + --recurse-submodules \ + https://github.com/apache/arrow.git \ + $SRC/arrow COPY build.sh thrift.patch $SRC/ diff --git a/benchmarks/arrow_parquet-arrow-fuzz/benchmark.yaml b/benchmarks/arrow_parquet-arrow-fuzz/benchmark.yaml index 3509f6ab3..dd902d883 100644 --- a/benchmarks/arrow_parquet-arrow-fuzz/benchmark.yaml +++ b/benchmarks/arrow_parquet-arrow-fuzz/benchmark.yaml @@ -1,9 +1,8 @@ -commit: fb8868d25570234f0f18e8bcdb6ccb0c3b63d0f0 -commit_date: 2020-02-25 02:36:00+00:00 fuzz_target: parquet-arrow-fuzz project: arrow -type: bug unsupported_fuzzers: + - honggfuzz # To Be Fixed. + - libafl # To Be Fixed. - aflcc - afl_qemu - aflplusplus_qemu diff --git a/benchmarks/arrow_parquet-arrow-fuzz/build.sh b/benchmarks/arrow_parquet-arrow-fuzz/build.sh index 8c377a604..31e12b625 100755 --- a/benchmarks/arrow_parquet-arrow-fuzz/build.sh +++ b/benchmarks/arrow_parquet-arrow-fuzz/build.sh @@ -53,7 +53,7 @@ cmake ${ARROW} -GNinja \ -DPARQUET_BUILD_EXAMPLES=off \ -DPARQUET_BUILD_EXECUTABLES=off \ -DPARQUET_REQUIRE_ENCRYPTION=off \ - -DARROW_WITH_BROTLI=off \ + -DARROW_WITH_BROTLI=on \ -DARROW_WITH_BZ2=off \ -DARROW_WITH_LZ4=off \ -DARROW_WITH_SNAPPY=off \ diff --git a/benchmarks/aspell_aspell_fuzzer/Dockerfile b/benchmarks/aspell_aspell_fuzzer/Dockerfile index 44327831c..75f40119d 100644 --- a/benchmarks/aspell_aspell_fuzzer/Dockerfile +++ b/benchmarks/aspell_aspell_fuzzer/Dockerfile @@ -14,12 +14,19 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && apt-get upgrade -y && apt-get install -y pkg-config wget -RUN git clone https://github.com/gnuaspell/aspell.git $SRC/aspell -RUN git clone --depth 1 -b master https://github.com/gnuaspell/aspell-fuzz.git $SRC/aspell-fuzz +RUN git clone \ + --depth 1 \ + --branch rel-0.60.8 \ + https://github.com/gnuaspell/aspell.git \ + $SRC/aspell + +RUN git clone \ + https://github.com/gnuaspell/aspell-fuzz.git \ + $SRC/aspell-fuzz # Suppress an immediate UBSan violation that prevents fuzzing RUN wget https://github.com/GNUAspell/aspell/commit/a2cd7ffd25e6213f36139cda4a911e2e03ed417c.patch -O $SRC/aspell/fix_aspell_ub.patch diff --git a/benchmarks/aspell_aspell_fuzzer/benchmark.yaml b/benchmarks/aspell_aspell_fuzzer/benchmark.yaml index ba25ed4f8..ce42d61d8 100644 --- a/benchmarks/aspell_aspell_fuzzer/benchmark.yaml +++ b/benchmarks/aspell_aspell_fuzzer/benchmark.yaml @@ -1,8 +1,5 @@ -commit: b503ec3e3e134dbc28bf129c012e5d6245a39472 -commit_date: 2019-08-05 23:54:02+00:00 fuzz_target: aspell_fuzzer project: aspell -type: bug unsupported_fuzzers: - aflcc - afl_qemu diff --git a/benchmarks/bloaty_fuzz_target/Dockerfile b/benchmarks/bloaty_fuzz_target/Dockerfile index 30b627deb..f64e085d8 100644 --- a/benchmarks/bloaty_fuzz_target/Dockerfile +++ b/benchmarks/bloaty_fuzz_target/Dockerfile @@ -14,8 +14,19 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a -RUN apt-get update && apt-get upgrade -y && apt-get install -y cmake ninja-build g++ -RUN git clone --depth 1 https://github.com/google/bloaty.git bloaty +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c + +RUN apt-get update && \ + apt-get install -y \ + cmake \ + ninja-build \ + g++ \ + libz-dev + +RUN git clone \ + --depth 1 \ + --branch v1.1\ + https://github.com/google/bloaty.git + WORKDIR bloaty COPY build.sh $SRC/ diff --git a/benchmarks/curl_curl_fuzzer_http/Dockerfile b/benchmarks/curl_curl_fuzzer_http/Dockerfile index 5cfeb382b..499528599 100644 --- a/benchmarks/curl_curl_fuzzer_http/Dockerfile +++ b/benchmarks/curl_curl_fuzzer_http/Dockerfile @@ -14,7 +14,7 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c # Curl will be checked out to the commit hash specified in benchmark.yaml. RUN git clone https://github.com/curl/curl-fuzzer /src/curl_fuzzer diff --git a/benchmarks/ffmpeg_ffmpeg_demuxer_fuzzer/Dockerfile b/benchmarks/ffmpeg_ffmpeg_demuxer_fuzzer/Dockerfile index 6f519d7b1..1148f8968 100644 --- a/benchmarks/ffmpeg_ffmpeg_demuxer_fuzzer/Dockerfile +++ b/benchmarks/ffmpeg_ffmpeg_demuxer_fuzzer/Dockerfile @@ -14,37 +14,98 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:c0eeba3437a2173c6a7115cf43062b351ed48cc2b54f54f895423d6a5af1dc3e -ADD bionic.list /etc/apt/sources.list.d/bionic.list -ADD nasm_apt.pin /etc/apt/preferences -RUN apt-get update && apt-get upgrade -y && apt-get install -y make autoconf automake libtool build-essential \ - libass-dev libfreetype6-dev libsdl1.2-dev \ - libvdpau-dev libxcb1-dev libxcb-shm0-dev \ - pkg-config texinfo libbz2-dev zlib1g-dev yasm cmake mercurial wget \ - xutils-dev libpciaccess-dev nasm - -RUN git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg - -RUN wget https://www.alsa-project.org/files/pub/lib/alsa-lib-1.1.0.tar.bz2 -RUN git clone -n https://gitlab.freedesktop.org/mesa/drm.git -RUN cd drm; git checkout 5db0f7692d1fdf05f9f6c0c02ffa5a5f4379c1f3 -RUN git clone --depth 1 https://github.com/mstorsjo/fdk-aac.git -ADD https://sourceforge.net/projects/lame/files/latest/download lame.tar.gz -RUN git clone git://anongit.freedesktop.org/xorg/lib/libXext && \ - cd /src/libXext && \ - git checkout d965a1a8ce9331d2aaf1c697a29455ad55171b36 - -RUN git clone -n git://anongit.freedesktop.org/git/xorg/lib/libXfixes -RUN cd libXfixes; git checkout 174a94975af710247719310cfc53bd13e1f3b44d -RUN git clone --depth 1 https://github.com/intel/libva -RUN git clone --depth 1 -b libvdpau-1.2 git://people.freedesktop.org/~aplattner/libvdpau -RUN git clone --depth 1 https://chromium.googlesource.com/webm/libvpx -RUN git clone --depth 1 https://github.com/xiph/ogg -RUN git clone --depth 1 https://github.com/xiph/opus -RUN git clone --depth 1 https://github.com/xiph/theora -RUN git clone --depth 1 https://github.com/xiph/vorbis -RUN git clone --depth 1 https://code.videolan.org/videolan/x264.git -RUN git clone --depth 1 https://bitbucket.org/multicoreware/x265_git.git -RUN mv x265_git x265 +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c + +RUN apt-get update && \ + apt-get install -y \ + make \ + autoconf \ + libtool \ + build-essential \ + libass-dev \ + libfreetype6-dev \ + libsdl1.2-dev \ + libvdpau-dev \ + libxcb1-dev \ + libxcb-shm0-dev \ + libdrm-dev \ + pkg-config \ + texinfo \ + libbz2-dev \ + zlib1g-dev \ + yasm \ + cmake \ + mercurial \ + wget \ + xutils-dev \ + libpciaccess-dev \ + nasm \ + meson \ + rsync && \ + curl \ + -LO \ + http://mirrors.kernel.org/ubuntu/pool/main/a/automake-1.16/automake_1.16.5-1.3_all.deb && \ + apt install ./automake_1.16.5-1.3_all.deb && \ + rm automake_1.16.5-1.3_all.deb + +RUN git clone \ + --branch v1.2.8 \ + --depth 1 \ + https://github.com/alsa-project/alsa-lib.git + +RUN git clone \ + --branch v2.0.2 \ + --depth 1 \ + https://github.com/mstorsjo/fdk-aac.git + +RUN git clone \ + --branch libXext-1.3.5 \ + --depth 1 \ + https://gitlab.freedesktop.org/xorg/lib/libxext.git + +RUN git clone \ + --depth 1 \ + --branch 2.16.0 \ + https://github.com/intel/libva + +RUN git clone \ + --depth 1 \ + --branch libvdpau-1.2 \ + https://gitlab.freedesktop.org/vdpau/libvdpau.git + +RUN git clone \ + --depth 1 \ + --branch v1.12.0 \ + https://chromium.googlesource.com/webm/libvpx + +RUN git clone \ + --depth 1 \ + --branch v1.3.5 \ + https://github.com/xiph/ogg + +RUN git clone \ + --depth 1 \ + --branch v1.3.1 \ + https://github.com/xiph/opus + +RUN git clone \ + --depth 1 \ + --branch v1.1.1 \ + https://github.com/xiph/theora + +RUN git clone \ + --depth 1 \ + --branch v1.3.7 \ + https://github.com/xiph/vorbis + +RUN git clone \ + --depth 1 \ + --branch v2.10.3 \ + https://gitlab.gnome.org/GNOME/libxml2.git + +RUN git clone \ + --branch n5.1.2 \ + --depth 1 \ + https://git.ffmpeg.org/ffmpeg.git COPY build.sh group_seed_corpus.py $SRC/ diff --git a/benchmarks/ffmpeg_ffmpeg_demuxer_fuzzer/benchmark.yaml b/benchmarks/ffmpeg_ffmpeg_demuxer_fuzzer/benchmark.yaml index 5c52edf4c..3040df087 100644 --- a/benchmarks/ffmpeg_ffmpeg_demuxer_fuzzer/benchmark.yaml +++ b/benchmarks/ffmpeg_ffmpeg_demuxer_fuzzer/benchmark.yaml @@ -1,9 +1,7 @@ -commit: 6473a5d35c16b1e673c07f0927ec5d2e10433e79 -commit_date: 2019-06-14 01:51:00+00:00 fuzz_target: ffmpeg_DEMUXER_fuzzer project: ffmpeg -type: bug unsupported_fuzzers: + - libafl - aflcc - afl_qemu - aflplusplus_qemu diff --git a/benchmarks/ffmpeg_ffmpeg_demuxer_fuzzer/build.sh b/benchmarks/ffmpeg_ffmpeg_demuxer_fuzzer/build.sh index 9619d7bfd..d45d543c2 100755 --- a/benchmarks/ffmpeg_ffmpeg_demuxer_fuzzer/build.sh +++ b/benchmarks/ffmpeg_ffmpeg_demuxer_fuzzer/build.sh @@ -20,97 +20,74 @@ export CFLAGS="$CFLAGS -fno-sanitize=vptr" export CXXFLAGS="$CXXFLAGS -fno-sanitize=vptr" # Build dependencies. -export FFMPEG_DEPS_PATH=$SRC/ffmpeg_deps +export FFMPEG_DEPS_PATH="$SRC/ffmpeg_deps" mkdir -p $FFMPEG_DEPS_PATH export PATH="$FFMPEG_DEPS_PATH/bin:$PATH" export LD_LIBRARY_PATH="$FFMPEG_DEPS_PATH/lib" +export PKG_CONFIG_PATH="$LD_LIBRARY_PATH/pkgconfig:$LD_LIBRARY_PATH/x86_64-linux-gnu/pkgconfig" -cd $SRC -bzip2 -f -d alsa-lib-* -tar xf alsa-lib-* -cd alsa-lib-* +(cd $SRC/alsa-lib +./gitcompile ./configure --prefix="$FFMPEG_DEPS_PATH" --enable-static --disable-shared make clean make -j$(nproc) all -make install +make install) -cd $SRC/drm -# Requires xutils-dev libpciaccess-dev -./autogen.sh -./configure --prefix="$FFMPEG_DEPS_PATH" --enable-static -make clean -make -j$(nproc) -make install - -cd $SRC/fdk-aac +(cd $SRC/fdk-aac autoreconf -fiv CXXFLAGS="$CXXFLAGS -fno-sanitize=shift-base" \ ./configure --prefix="$FFMPEG_DEPS_PATH" --disable-shared make clean make -j$(nproc) all -make install - -cd $SRC -tar xzf lame.tar.gz -cd lame-* -./configure --prefix="$FFMPEG_DEPS_PATH" --enable-static -make clean -make -j$(nproc) -make install - -cd $SRC/libXext -./autogen.sh -./configure --prefix="$FFMPEG_DEPS_PATH" --enable-static -make clean -make -j$(nproc) -make install +make install) -cd $SRC/libXfixes +(cd $SRC/libxext ./autogen.sh ./configure --prefix="$FFMPEG_DEPS_PATH" --enable-static make clean make -j$(nproc) -make install +make install) -cd $SRC/libva +(cd $SRC/libva ./autogen.sh ./configure --prefix="$FFMPEG_DEPS_PATH" --enable-static --disable-shared make clean make -j$(nproc) all -make install +make install) -cd $SRC/libvdpau +(cd $SRC/libvdpau +# Requires libpciaccess-dev ./autogen.sh ./configure --prefix="$FFMPEG_DEPS_PATH" --enable-static --disable-shared make clean make -j$(nproc) all -make install +make install) -cd $SRC/libvpx +(cd $SRC/libvpx LDFLAGS="$CXXFLAGS" ./configure --prefix="$FFMPEG_DEPS_PATH" \ --disable-examples --disable-unit-tests \ --size-limit=12288x12288 \ --extra-cflags="-DVPX_MAX_ALLOCABLE_MEMORY=1073741824" make clean make -j$(nproc) all -make install +make install) -cd $SRC/ogg +(cd $SRC/ogg ./autogen.sh ./configure --prefix="$FFMPEG_DEPS_PATH" --enable-static --disable-crc make clean make -j$(nproc) -make install +make install) -cd $SRC/opus +(cd $SRC/opus ./autogen.sh ./configure --prefix="$FFMPEG_DEPS_PATH" --enable-static make clean make -j$(nproc) all -make install +make install) -cd $SRC/theora +(cd $SRC/theora # theora requires ogg, need to pass its location to the "configure" script. CFLAGS="$CFLAGS -fPIC" LDFLAGS="-L$FFMPEG_DEPS_PATH/lib/" \ CPPFLAGS="$CXXFLAGS -I$FFMPEG_DEPS_PATH/include/" \ @@ -120,31 +97,22 @@ CFLAGS="$CFLAGS -fPIC" LDFLAGS="-L$FFMPEG_DEPS_PATH/lib/" \ --enable-static --disable-examples make clean make -j$(nproc) -make install +make install) -cd $SRC/vorbis +(cd $SRC/vorbis ./autogen.sh ./configure --prefix="$FFMPEG_DEPS_PATH" --enable-static make clean make -j$(nproc) -make install +make install) -cd $SRC/x264 -LDFLAGS="$CXXFLAGS" ./configure --prefix="$FFMPEG_DEPS_PATH" \ - --enable-static +(cd $SRC/libxml2 +./autogen.sh --prefix="$FFMPEG_DEPS_PATH" --enable-static \ + --without-debug --without-ftp --without-http \ + --without-legacy --without-python make clean make -j$(nproc) -make install - -cd $SRC/x265/build/linux -cmake -G "Unix Makefiles" \ - -DCMAKE_C_COMPILER=$CC -DCMAKE_CXX_COMPILER=$CXX \ - -DCMAKE_C_FLAGS="$CFLAGS" -DCMAKE_CXX_FLAGS="$CXXFLAGS" \ - -DCMAKE_INSTALL_PREFIX="$FFMPEG_DEPS_PATH" -DENABLE_SHARED:bool=off \ - ../../source -make clean -make -j$(nproc) x265-static -make install +make install) # Remove shared libraries to avoid accidental linking against them. rm $FFMPEG_DEPS_PATH/lib/*.so @@ -165,14 +133,16 @@ PKG_CONFIG_PATH="$FFMPEG_DEPS_PATH/lib/pkgconfig" ./configure \ --enable-libass \ --enable-libfdk-aac \ --enable-libfreetype \ - --enable-libmp3lame \ --enable-libopus \ --enable-libtheora \ --enable-libvorbis \ --enable-libvpx \ - --enable-libx264 \ - --enable-libx265 \ + --enable-libxml2 \ --enable-nonfree \ + --disable-muxers \ + --disable-protocols \ + --disable-demuxer=rtp,rtsp,sdp \ + --disable-devices \ --disable-shared make clean make -j$(nproc) install diff --git a/benchmarks/file_magic_fuzzer/Dockerfile b/benchmarks/file_magic_fuzzer/Dockerfile index dc12ea768..d05b3852e 100644 --- a/benchmarks/file_magic_fuzzer/Dockerfile +++ b/benchmarks/file_magic_fuzzer/Dockerfile @@ -14,9 +14,12 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c MAINTAINER mike.aizatsky@gmail.com RUN apt-get update && apt-get install -y make autoconf automake libtool shtool zlib1g-dev -RUN git clone --depth 1 https://github.com/file/file.git +RUN git clone \ + --depth 1 \ + --branch FILE5_43 \ + https://github.com/file/file.git WORKDIR file COPY build.sh magic_fuzzer.cc $SRC/ diff --git a/benchmarks/file_magic_fuzzer/benchmark.yaml b/benchmarks/file_magic_fuzzer/benchmark.yaml index 8b70c8548..aa1f8eeb9 100644 --- a/benchmarks/file_magic_fuzzer/benchmark.yaml +++ b/benchmarks/file_magic_fuzzer/benchmark.yaml @@ -1,9 +1,7 @@ -commit: d1ff3af7a2c6b38bdbdde7af26b59e3c50a48fff -commit_date: 2018-10-18 23:35:42+00:00 fuzz_target: magic_fuzzer project: file -type: bug unsupported_fuzzers: - aflcc - klee - lafintel + - honggfuzz diff --git a/benchmarks/freetype2-2017/Dockerfile b/benchmarks/freetype2-2017/Dockerfile index 66ca5a8f8..fc92d83a1 100644 --- a/benchmarks/freetype2-2017/Dockerfile +++ b/benchmarks/freetype2-2017/Dockerfile @@ -14,7 +14,7 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && \ apt-get upgrade -y && \ diff --git a/benchmarks/grok_grk_decompress_fuzzer/Dockerfile b/benchmarks/grok_grk_decompress_fuzzer/Dockerfile index 60479478b..7d66be178 100644 --- a/benchmarks/grok_grk_decompress_fuzzer/Dockerfile +++ b/benchmarks/grok_grk_decompress_fuzzer/Dockerfile @@ -14,10 +14,16 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c + +RUN git clone \ + --depth 1 \ + --branch v10.0.4 \ + https://github.com/GrokImageCompression/grok.git \ + grok +RUN git clone https://github.com/GrokImageCompression/grok-test-data.git \ + grok-data -RUN git clone https://github.com/GrokImageCompression/grok.git grok -RUN git clone https://github.com/GrokImageCompression/grok-test-data.git grok/data WORKDIR grok COPY build.sh $SRC/ diff --git a/benchmarks/grok_grk_decompress_fuzzer/benchmark.yaml b/benchmarks/grok_grk_decompress_fuzzer/benchmark.yaml index 154fc829b..c3cb521b9 100644 --- a/benchmarks/grok_grk_decompress_fuzzer/benchmark.yaml +++ b/benchmarks/grok_grk_decompress_fuzzer/benchmark.yaml @@ -1,9 +1,7 @@ -commit: c007abeb226caef9c23bd786a36614b94703ff87 -commit_date: 2020-11-09 02:44:57+00:00 fuzz_target: grk_decompress_fuzzer project: grok -type: bug unsupported_fuzzers: + - centipede - aflcc - afl_qemu - aflplusplus_qemu diff --git a/benchmarks/grok_grk_decompress_fuzzer/build.sh b/benchmarks/grok_grk_decompress_fuzzer/build.sh index 4ec01fbf3..21ee2269b 100755 --- a/benchmarks/grok_grk_decompress_fuzzer/build.sh +++ b/benchmarks/grok_grk_decompress_fuzzer/build.sh @@ -17,7 +17,7 @@ mkdir build cd build -cmake .. +cmake .. -DGRK_BUILD_CODEC=OFF -DBUILD_SHARED_LIBS=OFF -DGRK_BUILD_THIRDPARY=ON make clean -s make -j$(nproc) -s cd .. diff --git a/benchmarks/harfbuzz-1.3.2/Dockerfile b/benchmarks/harfbuzz-1.3.2/Dockerfile index 2c9430ab3..4063700ac 100644 --- a/benchmarks/harfbuzz-1.3.2/Dockerfile +++ b/benchmarks/harfbuzz-1.3.2/Dockerfile @@ -14,7 +14,7 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && \ apt-get install -y \ diff --git a/benchmarks/jsoncpp_jsoncpp_fuzzer/Dockerfile b/benchmarks/jsoncpp_jsoncpp_fuzzer/Dockerfile index 865673826..8d953abbb 100644 --- a/benchmarks/jsoncpp_jsoncpp_fuzzer/Dockerfile +++ b/benchmarks/jsoncpp_jsoncpp_fuzzer/Dockerfile @@ -14,7 +14,7 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && apt-get install -y build-essential make curl wget # Install latest cmake. diff --git a/benchmarks/lcms-2017-03-21/Dockerfile b/benchmarks/lcms-2017-03-21/Dockerfile index ea17bafb6..25d53a9c3 100644 --- a/benchmarks/lcms-2017-03-21/Dockerfile +++ b/benchmarks/lcms-2017-03-21/Dockerfile @@ -14,7 +14,7 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && \ apt-get install -y \ diff --git a/benchmarks/libarchive_libarchive_fuzzer/Dockerfile b/benchmarks/libarchive_libarchive_fuzzer/Dockerfile index a43ecdbf7..6f0990e99 100644 --- a/benchmarks/libarchive_libarchive_fuzzer/Dockerfile +++ b/benchmarks/libarchive_libarchive_fuzzer/Dockerfile @@ -14,13 +14,16 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c # Installing optional libraries can utilize more code path and/or improve # performance (avoid calling external programs). RUN apt-get update && apt-get install -y make autoconf automake libtool pkg-config \ libbz2-dev liblzo2-dev liblzma-dev liblz4-dev libz-dev \ libxml2-dev libssl-dev libacl1-dev libattr1-dev -RUN git clone --depth 1 https://github.com/libarchive/libarchive.git +RUN git clone \ + --depth 1 \ + --branch v3.6.1 \ + https://github.com/libarchive/libarchive.git WORKDIR libarchive COPY build.sh libarchive_fuzzer.cc $SRC/ diff --git a/benchmarks/libarchive_libarchive_fuzzer/benchmark.yaml b/benchmarks/libarchive_libarchive_fuzzer/benchmark.yaml index 67fc11dc6..bb2852f70 100644 --- a/benchmarks/libarchive_libarchive_fuzzer/benchmark.yaml +++ b/benchmarks/libarchive_libarchive_fuzzer/benchmark.yaml @@ -1,8 +1,5 @@ -commit: 94ca3f0734f71a0d9389ceaa237ce5a4ed8a21cd -commit_date: 2019-05-03 02:15:00+00:00 fuzz_target: libarchive_fuzzer project: libarchive -type: bug unsupported_fuzzers: - aflcc - afl_qemu diff --git a/benchmarks/libarchive_libarchive_fuzzer/build.sh b/benchmarks/libarchive_libarchive_fuzzer/build.sh index 84c5f9c13..149a74384 100755 --- a/benchmarks/libarchive_libarchive_fuzzer/build.sh +++ b/benchmarks/libarchive_libarchive_fuzzer/build.sh @@ -23,6 +23,7 @@ make -j$(nproc) all # build fuzzer(s) $CXX $CXXFLAGS -Ilibarchive \ $SRC/libarchive_fuzzer.cc -o $OUT/libarchive_fuzzer \ + -stdlib=libstdc++ \ $LIB_FUZZING_ENGINE .libs/libarchive.a \ -Wl,-Bstatic -lbz2 -llzo2 -lxml2 -llzma -lz -lcrypto -llz4 -licuuc \ - -licudata -Wl,-Bdynamic -ldl + -licudata -Wl,-Bdynamic -ldl -lc++ diff --git a/benchmarks/libgit2_objects_fuzzer/Dockerfile b/benchmarks/libgit2_objects_fuzzer/Dockerfile index 010ed81ee..e96cc53f7 100644 --- a/benchmarks/libgit2_objects_fuzzer/Dockerfile +++ b/benchmarks/libgit2_objects_fuzzer/Dockerfile @@ -14,10 +14,22 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c + +RUN apt-get update && \ + apt-get install -y \ + make \ + autoconf \ + automake \ + libtool \ + cmake + +RUN git clone \ + --depth 1 \ + --branch v1.5.0 \ + https://github.com/libgit2/libgit2 \ + libgit2 -RUN apt-get update && apt-get install -y make autoconf automake libtool cmake -RUN git clone https://github.com/libgit2/libgit2 libgit2 WORKDIR libgit2 COPY build.sh $SRC/ diff --git a/benchmarks/libgit2_objects_fuzzer/benchmark.yaml b/benchmarks/libgit2_objects_fuzzer/benchmark.yaml index dc849c1f7..50a72ed86 100644 --- a/benchmarks/libgit2_objects_fuzzer/benchmark.yaml +++ b/benchmarks/libgit2_objects_fuzzer/benchmark.yaml @@ -1,9 +1,7 @@ -commit: 20cb30b6b8e269d2ce3474523562b2739a8efea2 -commit_date: 2018-11-13 12:40:17+00:00 fuzz_target: objects_fuzzer project: libgit2 -type: bug unsupported_fuzzers: + - libafl - aflcc - afl_qemu - aflplusplus_qemu diff --git a/benchmarks/libgit2_objects_fuzzer/build.sh b/benchmarks/libgit2_objects_fuzzer/build.sh index 87067ec3f..9e72ca70e 100755 --- a/benchmarks/libgit2_objects_fuzzer/build.sh +++ b/benchmarks/libgit2_objects_fuzzer/build.sh @@ -27,17 +27,14 @@ cmake .. -DCMAKE_INSTALL_PREFIX="$WORK" \ make -j$(nproc) make install - -for fuzzer in "../fuzzers/objects_fuzzer.c" +for fuzzer in ../fuzzers/*_fuzzer.c do fuzzer_name=$(basename "${fuzzer%.c}") - $CC $CFLAGS -c -I"$WORK/include" -I"$SRC/libgit2/src" \ - -DLIBGIT2_NO_FEATURES_H \ + $CC $CFLAGS -c -I./src -I../src/libgit2 -I../src/util -I../include \ "$fuzzer" -o "$WORK/$fuzzer_name.o" $CXX $CXXFLAGS -std=c++11 -o "$OUT/$fuzzer_name" \ - -lFuzzingEngine "$WORK/$fuzzer_name.o" "$WORK/lib/libgit2.a" \ - -pthread -ldl + $LIB_FUZZING_ENGINE "$WORK/$fuzzer_name.o" "$WORK/lib/libgit2.a" zip -j "$OUT/${fuzzer_name}_seed_corpus.zip" \ ../fuzzers/corpora/${fuzzer_name%_fuzzer}/* diff --git a/benchmarks/libhevc_hevc_dec_fuzzer/Dockerfile b/benchmarks/libhevc_hevc_dec_fuzzer/Dockerfile index 95710ef4e..491ed22a4 100644 --- a/benchmarks/libhevc_hevc_dec_fuzzer/Dockerfile +++ b/benchmarks/libhevc_hevc_dec_fuzzer/Dockerfile @@ -14,9 +14,12 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c MAINTAINER harish.mahendrakar@ittiam.com RUN apt-get update && apt-get install -y wget cmake -RUN git clone https://android.googlesource.com/platform/external/libhevc +RUN git clone \ + --depth 1 \ + --branch android-vts-13.0_r2 \ + https://android.googlesource.com/platform/external/libhevc COPY build.sh $SRC/ WORKDIR libhevc diff --git a/benchmarks/libhevc_hevc_dec_fuzzer/benchmark.yaml b/benchmarks/libhevc_hevc_dec_fuzzer/benchmark.yaml index 74779f9c6..746f9be46 100644 --- a/benchmarks/libhevc_hevc_dec_fuzzer/benchmark.yaml +++ b/benchmarks/libhevc_hevc_dec_fuzzer/benchmark.yaml @@ -1,8 +1,5 @@ -commit: d28f2210ee8e65afdfb07a1fd6582285d3d178e0 -commit_date: 2019-09-06 01:29:00+00:00 fuzz_target: hevc_dec_fuzzer project: libhevc -type: bug unsupported_fuzzers: - aflcc - afl_qemu diff --git a/benchmarks/libhtp_fuzz_htp/Dockerfile b/benchmarks/libhtp_fuzz_htp/Dockerfile index 77fd1487e..5ca143447 100644 --- a/benchmarks/libhtp_fuzz_htp/Dockerfile +++ b/benchmarks/libhtp_fuzz_htp/Dockerfile @@ -14,10 +14,15 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && apt-get install -y make autoconf automake libtool zlib1g-dev liblzma-dev -RUN git clone https://github.com/OISF/libhtp.git libhtp +RUN git clone \ + --depth 1 \ + --branch 0.5.41 \ + https://github.com/OISF/libhtp.git \ + libhtp + WORKDIR $SRC COPY build.sh $SRC/ diff --git a/benchmarks/libhtp_fuzz_htp/benchmark.yaml b/benchmarks/libhtp_fuzz_htp/benchmark.yaml index de933dc10..adc08c159 100644 --- a/benchmarks/libhtp_fuzz_htp/benchmark.yaml +++ b/benchmarks/libhtp_fuzz_htp/benchmark.yaml @@ -1,8 +1,5 @@ -commit: 75cbbbd405695e97567931655fd5a441f86e5836 -commit_date: 2019-09-14 08:28:41+00:00 fuzz_target: fuzz_htp project: libhtp -type: bug unsupported_fuzzers: - aflcc - afl_qemu diff --git a/benchmarks/libjpeg-turbo-07-2017/Dockerfile b/benchmarks/libjpeg-turbo-07-2017/Dockerfile index c0726938a..6d4593e46 100644 --- a/benchmarks/libjpeg-turbo-07-2017/Dockerfile +++ b/benchmarks/libjpeg-turbo-07-2017/Dockerfile @@ -14,7 +14,7 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && \ apt-get install -y \ diff --git a/benchmarks/libpcap_fuzz_both/Dockerfile b/benchmarks/libpcap_fuzz_both/Dockerfile index 6be82c520..61fe44522 100644 --- a/benchmarks/libpcap_fuzz_both/Dockerfile +++ b/benchmarks/libpcap_fuzz_both/Dockerfile @@ -14,11 +14,21 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a -RUN apt-get update && apt-get install -y make cmake flex bison -RUN git clone --depth 1 https://github.com/the-tcpdump-group/libpcap.git libpcap -# for corpus as wireshark -RUN git clone --depth=1 https://github.com/the-tcpdump-group/tcpdump.git tcpdump +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c +RUN apt-get update && \ + apt-get install -y make cmake flex bison + +RUN git clone \ + --depth 1 \ + --branch libpcap-1.10.1 \ + https://github.com/the-tcpdump-group/libpcap.git libpcap + +# For corpus as wireshark. +RUN git clone \ + --depth=1 \ + --branch tcpdump-4.99.1 \ + https://github.com/the-tcpdump-group/tcpdump.git tcpdump + WORKDIR $SRC COPY build.sh $SRC/ COPY patch.diff $SRC/ diff --git a/benchmarks/libpcap_fuzz_both/build.sh b/benchmarks/libpcap_fuzz_both/build.sh index e44f8b83f..694833b79 100755 --- a/benchmarks/libpcap_fuzz_both/build.sh +++ b/benchmarks/libpcap_fuzz_both/build.sh @@ -23,12 +23,12 @@ cd build cmake .. make - # build fuzz targets for target in pcap filter both do $CC $CFLAGS -I.. -c ../testprogs/fuzz/fuzz_$target.c -o fuzz_$target.o - $CXX $CXXFLAGS fuzz_$target.o -o $OUT/fuzz_$target libpcap.a $LIB_FUZZING_ENGINE + $CXX $CXXFLAGS fuzz_$target.o -o $OUT/fuzz_$target \ + libpcap.a $LIB_FUZZING_ENGINE $EXTRA_LIBS done # export other associated stuff diff --git a/benchmarks/libpng-1.2.56/build.sh b/benchmarks/libpng-1.2.56/build.sh deleted file mode 100755 index 533226b4c..000000000 --- a/benchmarks/libpng-1.2.56/build.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/bash -ex -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -tar xf libpng-1.2.56.tar.gz - -cd libpng-1.2.56 -./configure -make -j $(nproc) - -$CXX $CXXFLAGS -std=c++11 $SRC/target.cc .libs/libpng12.a $FUZZER_LIB -I . -lz \ - -o $OUT/libpng_read_fuzzer -cp -r /opt/seeds $OUT/ diff --git a/benchmarks/libpng-1.2.56/png_mutator.h b/benchmarks/libpng-1.2.56/png_mutator.h deleted file mode 100644 index a16d28270..000000000 --- a/benchmarks/libpng-1.2.56/png_mutator.h +++ /dev/null @@ -1,331 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include -#include -#include -#include -#include -#include -#include -#include - -#include - -// A simple class for parsing, serializing, and mutating an PNG file. -// https://en.wikipedia.org/wiki/Portable_Network_Graphics -// It is an example of a custom mutator for libFuzzer -// (https://llvm.org/docs/LibFuzzer.html) used for -// "structure-aware coverage-guided fuzzing". -// -// If you have a non structure-aware fuzz target for any API that handles -// PNG inputs, you can turn that fuzz target into a structure-aware one -// by defining PNG_MUTATOR_DEFINE_LIBFUZZER_CUSTOM_MUTATOR and then -// including this file. -class PngMutator { - using V = std::vector; - - public: - - // Parse the input stream as a PNG file, - // put every chunk into its own vector, - // uncompress chunk data when needed, - // merge the IDAT chunks into one vector. - PngMutator(std::istream &in) { - ihdr_.resize(13); - Read4(in); - Read4(in); // Skip the 8-byte magic value. - // read IHDR. - if (ReadInteger(in) != 13) return; - if (Read4(in) != Type("IHDR")) return; - // Read 13 values. - in.read((char*)ihdr_.data(), ihdr_.size()); - Read4(in); // ignore CRC - ssize_t idat_idx = -1; - - while (in) { - uint32_t len = ReadInteger(in); - uint32_t type = Read4(in); - if (type == Type("IEND")) break; // do nothing - char chunk_name[5]; - memcpy(chunk_name, &type, 4); - chunk_name[4] = 0; - if (len > (1 << 20)) return; - V v(len); - in.read((char *)v.data(), len); - Read4(in); // ignore CRC - - if (type == Type("IDAT")) { - if (idat_idx != -1) - Append(&chunks_[idat_idx].v, v); - else { - idat_idx = chunks_.size(); - chunks_.push_back({type, v}); - } - } else if (type == Type("iCCP")) { - auto it = v.begin(); - while (it < v.end() && isprint(*it)) it++; - if (it < v.end() && !*it) it++; - if (it < v.end() && !*it) it++; - v = V(it, v.end()); - auto uncompressed = Uncompress(v); - chunks_.push_back({type, uncompressed}); - auto compressed = Compress(uncompressed); - } else { - chunks_.push_back({type, v}); - } - // std::cerr << "CHUNK: " << chunk_name << std::endl; - } - if (idat_idx != -1) - chunks_[idat_idx].v = Uncompress(chunks_[idat_idx].v); - } - - // Write back the PNG file. - void Serialize(std::ostream &out) { - const unsigned char header[] = {0x89, 0x50, 0x4e, 0x47, - 0x0d, 0x0a, 0x1a, 0x0a}; - out.write((const char*)header, sizeof(header)); - WriteChunk(out, "IHDR", ihdr_); - for (auto &ch : chunks_) { - if (ch.type == Type("iCCP")) { - V v; - v.push_back('x'); // assuming the iCCP name doesn't matter. - v.push_back(0); - v.push_back(0); - auto compressed = Compress(ch.v); - Append(&v, compressed); - WriteChunk(out, ch.type, v); - } else { - WriteChunk(out, ch.type, ch.v); - } - } - - WriteChunk(out, "IEND", {}); - } - - // Raw byte array mutator, like that provided by libFuzzer. - using Mutator = size_t (*)(uint8_t *Data, size_t Size, size_t MaxSize); - - // Mutate the in-memory representation of a PNG file. - // Given the same Seed, the same mutation is performed. - void Mutate(Mutator m, unsigned int Seed) { - std::minstd_rand rnd(Seed); - auto M = [&](V *v) { - if (v->empty()) - v->resize(v->size() + 1 + rnd() % 256); - v->resize(m(v->data(), v->size(), v->size())); - }; - switch (rnd() % 6) { - // Mutate IHDR. - case 0: - m(ihdr_.data(), ihdr_.size(), ihdr_.size()); - break; - // Mutate some other chunk. - case 1: - if (!chunks_.empty()) M(&chunks_[rnd() % chunks_.size()].v); - break; - // Shuffle the chunks. - case 2: - std::shuffle(chunks_.begin(), chunks_.end(), rnd); - break; - // Delete a random chunk. - case 3: - if (!chunks_.empty()) - chunks_.erase(chunks_.begin() + rnd() % chunks_.size()); - break; - // Insert a random chunk with one of the known types, or a random type. - case 4: { - static const char *types[] = { - "IATx", "sTER", "hIST", "sPLT", "mkBF", "mkBS", "mkTS", "prVW", - "oFFs", "iDOT", "zTXt", "mkBT", "acTL", "iTXt", "sBIT", "tIME", - "iCCP", "vpAg", "tRNS", "cHRM", "PLTE", "bKGD", "gAMA", "sRGB", - "pHYs", "fdAT", "fcTL", "tEXt", "IDAT", - "pCAL", "sCAL", "eXIf", - "fUZz", // special chunk for extra fuzzing hints. - }; - static const size_t n_types = sizeof(types) / sizeof(types[0]); - uint32_t type = - (rnd() % 10 <= 8) ? Type(types[rnd() % n_types]) : (uint32_t)rnd(); - size_t len = rnd() % 256; - if (type == Type("fUZz")) - len = 16; - V v(len); - for (auto &b : v) b = rnd(); - size_t pos = rnd() % (chunks_.size() + 1); - chunks_.insert(chunks_.begin() + pos, {type, v}); - } break; - // Any more interesting mutations with a PNG file? - case 5: { - auto it = std::find_if( - chunks_.begin(), chunks_.end(), - [](const Chunk &ch) { return ch.type == Type("fUZz"); }); - if (it != chunks_.end()) - m(it->v.data(), it->v.size(), it->v.size()); - } - - } - } - - // Takes a random chunk from p and inserts into *this. - void CrossOver(const PngMutator &p, unsigned int Seed) { - if (p.chunks_.empty()) return; - std::minstd_rand rnd(Seed); - size_t idx = rnd() % p.chunks_.size(); - auto &ch = p.chunks_[idx]; - size_t pos = rnd() % (chunks_.size() + 1); - chunks_.insert(chunks_.begin() + pos, ch); - } - - private: - void Append(V *to, const V &from) { - to->insert(to->end(), from.begin(), from.end()); - } - - uint32_t Read4(std::istream &in) { - uint32_t res = 0; - in.read((char *)&res, sizeof(res)); - return res; - } - uint32_t ReadInteger(std::istream &in) { - return __builtin_bswap32(Read4(in)); - } - static uint32_t Type(const char *tagname) { - uint32_t res; - assert(strlen(tagname) == 4); - memcpy(&res, tagname, 4); - return res; - } - - void WriteInt(std::ostream &out, uint32_t x) { - x = __builtin_bswap32(x); - out.write((char *)&x, sizeof(x)); - } - - // Chunk is written as: - // * 4-byte length - // * 4-byte type - // * the data itself - // * 4-byte crc (of type and data) - void WriteChunk(std::ostream &out, const char *type, const V &chunk, - bool compress = false) { - V compressed; - const V *v = &chunk; - if (compress) { - compressed = Compress(chunk); - v = &compressed; - } - uint32_t len = v->size(); - uint32_t crc = crc32(0, (const unsigned char *)type, 4); - if (v->size()) - crc = crc32(crc, (const unsigned char *)v->data(), v->size()); - WriteInt(out, len); - out.write(type, 4); - out.write((const char*)v->data(), v->size()); - WriteInt(out, crc); - } - - void WriteChunk(std::ostream &out, uint32_t type, const V &chunk) { - char type_s[5]; - memcpy(type_s, &type, 4); - type_s[4] = 0; - WriteChunk(out, type_s, chunk); - } - - V Uncompress(const V &compressed) { - V v; - static const size_t kMaxBuffer = 1 << 28; - for (size_t sz = compressed.size() * 4; sz < kMaxBuffer; sz *= 2) { - v.resize(sz); - size_t len = sz; - auto res = - uncompress(v.data(), &len, compressed.data(), compressed.size()); - if (res == Z_BUF_ERROR) continue; - if (res != Z_OK) return {}; - v.resize(len); - break; - } - return v; - } - - V Compress(const V &uncompressed) { - V v; - static const size_t kMaxBuffer = 1 << 28; - for (size_t sz = uncompressed.size(); sz < kMaxBuffer; sz *= 2) { - v.resize(sz); - size_t len = sz; - auto res = - compress(v.data(), &len, uncompressed.data(), uncompressed.size()); - if (res == Z_BUF_ERROR) continue; - if (res != Z_OK) return {}; - v.resize(len); - break; - } - return v; - } - - void PrintHex(const V &v, size_t max_n) { - for (size_t i = 0; i < max_n && i < v.size(); i++) { - std::cerr << "0x" << std::hex << (unsigned)v[i] << " " << std::dec; - } - std::cerr << std::endl; - } - - V ihdr_; - - struct Chunk { - uint32_t type; - V v; - }; - std::vector chunks_; -}; - - -#ifdef PNG_MUTATOR_DEFINE_LIBFUZZER_CUSTOM_MUTATOR - -extern "C" size_t LLVMFuzzerMutate(uint8_t *Data, size_t Size, size_t MaxSize); - -extern "C" size_t LLVMFuzzerCustomMutator(uint8_t *Data, size_t Size, - size_t MaxSize, unsigned int Seed) { - std::string s(reinterpret_cast(Data), Size); - std::stringstream in(s); - std::stringstream out; - PngMutator p(in); - p.Mutate(LLVMFuzzerMutate, Seed); - p.Serialize(out); - const auto &str = out.str(); - if (str.size() > MaxSize) return Size; - memcpy(Data, str.data(), str.size()); - return str.size(); -} - -extern "C" size_t LLVMFuzzerCustomCrossOver(const uint8_t *Data1, size_t Size1, - const uint8_t *Data2, size_t Size2, - uint8_t *Out, size_t MaxOutSize, - unsigned int Seed) { - std::stringstream in1( - std::string(reinterpret_cast(Data1), Size1)); - std::stringstream in2( - std::string(reinterpret_cast(Data2), Size2)); - PngMutator p1(in1); - PngMutator p2(in2); - p1.CrossOver(p2, Seed); - std::stringstream out; - p1.Serialize(out); - const auto &str = out.str(); - if (str.size() > MaxOutSize) return 0; - memcpy(Out, str.data(), str.size()); - return str.size(); -} - -#endif // PNG_MUTATOR_DEFINE_LIBFUZZER_CUSTOM_MUTATOR diff --git a/benchmarks/libpng-1.2.56/target.cc b/benchmarks/libpng-1.2.56/target.cc deleted file mode 100644 index 95867c62a..000000000 --- a/benchmarks/libpng-1.2.56/target.cc +++ /dev/null @@ -1,143 +0,0 @@ -// Copyright 2020 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include -#include -#include -#include - -#define PNG_INTERNAL // For PNG_FLAG_CRC_CRITICAL_MASK, etc. -#include "png.h" - -struct BufState { - const uint8_t* data; - size_t bytes_left; -}; - -void user_read_data(png_structp png_ptr, png_bytep data, png_size_t length) { - BufState* buf_state = static_cast(png_get_io_ptr(png_ptr)); - if (length > buf_state->bytes_left) { - png_error(png_ptr, "read error"); - } - memcpy(data, buf_state->data, length); - buf_state->bytes_left -= length; - buf_state->data += length; -} - -static const int kPngHeaderSize = 8; - -struct ScopedPngObject { - ~ScopedPngObject() { - if (row && png_ptr) { - png_free(png_ptr, row); - } - if (png_ptr && info_ptr) { - png_destroy_read_struct(&png_ptr, &info_ptr, nullptr); - } - delete buf_state; - } - png_infop info_ptr = nullptr; - png_voidp row = 0; - png_structp png_ptr = nullptr; - BufState *buf_state = nullptr; -}; - -bool DetectLargeSize(const uint8_t *data, size_t size) { - uint8_t *ihdr = reinterpret_cast(memmem(data, size, "IHDR", 4)); - if (!ihdr) return false; - if (ihdr + 12 > data + size) return false; - uint32_t W = *(uint32_t*)(ihdr + 4); - uint32_t H = *(uint32_t*)(ihdr + 8); - W = __builtin_bswap32(W); - H = __builtin_bswap32(H); - uint64_t WxH = static_cast(W) * H; - if (WxH > 100000ULL) { - // fprintf(stderr, "ZZZ %zu %u %u\n", WxH, W, H); - return true; - } - return false; -} - -// Fuzzing entry point. Roughly follows the libpng book example: -// http://www.libpng.org/pub/png/book/chapter13.html -extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) { - if (size < kPngHeaderSize) { - return 0; - } - ScopedPngObject O; - if (png_sig_cmp(const_cast(data), 0, kPngHeaderSize)) { - // not a PNG. - return 0; - } - - // if (DetectLargeSize(data, size)) return 0; - - auto &png_ptr = O.png_ptr; - png_ptr = png_create_read_struct - (PNG_LIBPNG_VER_STRING, nullptr, nullptr, nullptr); - assert(png_ptr); - - png_ptr->flags &= ~PNG_FLAG_CRC_CRITICAL_MASK; - png_ptr->flags |= PNG_FLAG_CRC_CRITICAL_IGNORE; - - png_ptr->flags &= ~PNG_FLAG_CRC_ANCILLARY_MASK; - png_ptr->flags |= PNG_FLAG_CRC_ANCILLARY_NOWARN; - - auto &info_ptr = O.info_ptr; - info_ptr = png_create_info_struct(png_ptr); - assert(info_ptr); - - // Setting up reading from buffer. - auto &buf_state = O.buf_state; - buf_state = new BufState(); - buf_state->data = data + kPngHeaderSize; - buf_state->bytes_left = size - kPngHeaderSize; - png_set_read_fn(png_ptr, buf_state, user_read_data); - png_set_sig_bytes(png_ptr, kPngHeaderSize); - int passes = 0; - - // libpng error handling. - if (setjmp(png_ptr->jmpbuf)) { - return 0; - } - - // png_ptr->mode & PNG_HAVE_IDAT - // Reading - png_read_info(png_ptr, info_ptr); - - png_uint_32 width, height; - int bit_depth, color_type, interlace_type, compression_type; - int filter_type; - - if (!png_get_IHDR(png_ptr, info_ptr, &width, &height, - &bit_depth, &color_type, &interlace_type, - &compression_type, &filter_type)) { - return 0; - } - - if (height * width > 2000000) return 0; // This is going to be too slow. - - - passes = png_set_interlace_handling(png_ptr); - png_start_read_image(png_ptr); - - O.row = png_malloc(png_ptr, png_get_rowbytes(png_ptr, info_ptr)); - - for (int pass = 0; pass < passes; ++pass) { - for (png_uint_32 y = 0; y < height; ++y) { - png_read_row(png_ptr, static_cast(O.row), NULL); - } - } - return 0; -} diff --git a/benchmarks/libpng-1.2.56/Dockerfile b/benchmarks/libpng-1.6.38/Dockerfile similarity index 71% rename from benchmarks/libpng-1.2.56/Dockerfile rename to benchmarks/libpng-1.6.38/Dockerfile index 26c463c56..b655ee6be 100644 --- a/benchmarks/libpng-1.2.56/Dockerfile +++ b/benchmarks/libpng-1.6.38/Dockerfile @@ -14,7 +14,7 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && \ apt-get install -y \ @@ -22,13 +22,23 @@ RUN apt-get update && \ autoconf \ automake \ libtool \ - wget \ zlib1g-dev -RUN wget --no-check-certificate \ - https://downloads.sourceforge.net/project/libpng/libpng12/older-releases/1.2.56/libpng-1.2.56.tar.gz +RUN git clone \ + --depth 1 \ + --branch v1.2.13 \ + https://github.com/madler/zlib.git + +RUN git clone \ + --depth 1 \ + --branch v1.6.38 \ + https://github.com/glennrp/libpng.git +RUN cp libpng/contrib/oss-fuzz/build.sh $SRC + +WORKDIR libpng RUN wget --no-check-certificate -qO $OUT/libpng_read_fuzzer.dict \ https://raw.githubusercontent.com/google/fuzzing/master/dictionaries/png.dict + ADD seeds /opt/seeds COPY * $SRC/ diff --git a/benchmarks/libpng-1.2.56/benchmark.yaml b/benchmarks/libpng-1.6.38/benchmark.yaml similarity index 100% rename from benchmarks/libpng-1.2.56/benchmark.yaml rename to benchmarks/libpng-1.6.38/benchmark.yaml diff --git a/benchmarks/libpng-1.2.56/seeds/seed.png b/benchmarks/libpng-1.6.38/seeds/seed.png similarity index 100% rename from benchmarks/libpng-1.2.56/seeds/seed.png rename to benchmarks/libpng-1.6.38/seeds/seed.png diff --git a/benchmarks/libxml2-v2.9.2/Dockerfile b/benchmarks/libxml2-v2.9.2/Dockerfile index 27566d64f..6af065776 100644 --- a/benchmarks/libxml2-v2.9.2/Dockerfile +++ b/benchmarks/libxml2-v2.9.2/Dockerfile @@ -14,7 +14,7 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && \ apt-get upgrade -y ca-certificates && \ diff --git a/benchmarks/libxml2_libxml2_xml_reader_for_file_fuzzer/Dockerfile b/benchmarks/libxml2_libxml2_xml_reader_for_file_fuzzer/Dockerfile index 60a457081..bf425a005 100644 --- a/benchmarks/libxml2_libxml2_xml_reader_for_file_fuzzer/Dockerfile +++ b/benchmarks/libxml2_libxml2_xml_reader_for_file_fuzzer/Dockerfile @@ -14,14 +14,23 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c # Upgrade to avoid certs errors -RUN apt-get update && apt-get upgrade -y && \ - apt-get install -y make autoconf automake libtool pkg-config python-dev python3-dev +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + make autoconf libtool pkg-config \ + zlib1g-dev zlib1g-dev:i386 liblzma-dev liblzma-dev:i386 -RUN git clone https://gitlab.gnome.org/GNOME/libxml2.git +# Build requires automake 1.16.3 +RUN curl -LO \ + http://mirrors.kernel.org/ubuntu/pool/main/a/automake-1.16/automake_1.16.5-1.3_all.deb && \ + apt install ./automake_1.16.5-1.3_all.deb + +RUN git clone \ + --depth 1 \ + --branch v2.10.3 \ + https://gitlab.gnome.org/GNOME/libxml2.git WORKDIR libxml2 COPY build.sh $SRC/ -COPY *.cc *.h *.dict $SRC/ diff --git a/benchmarks/libxml2_libxml2_xml_reader_for_file_fuzzer/benchmark.yaml b/benchmarks/libxml2_libxml2_xml_reader_for_file_fuzzer/benchmark.yaml index 988efed5b..774302df6 100644 --- a/benchmarks/libxml2_libxml2_xml_reader_for_file_fuzzer/benchmark.yaml +++ b/benchmarks/libxml2_libxml2_xml_reader_for_file_fuzzer/benchmark.yaml @@ -1,8 +1,5 @@ -commit: 99a864a1f7a9cb59865f803770d7d62fb47cad69 -commit_date: 2019-09-25 13:27:45+00:00 fuzz_target: libxml2_xml_reader_for_file_fuzzer project: libxml2 -type: bug unsupported_fuzzers: - aflcc - afl_qemu diff --git a/benchmarks/libxml2_libxml2_xml_reader_for_file_fuzzer/build.sh b/benchmarks/libxml2_libxml2_xml_reader_for_file_fuzzer/build.sh index 771f30d1c..efe429e38 100755 --- a/benchmarks/libxml2_libxml2_xml_reader_for_file_fuzzer/build.sh +++ b/benchmarks/libxml2_libxml2_xml_reader_for_file_fuzzer/build.sh @@ -16,15 +16,37 @@ # ################################################################################ -./autogen.sh -./configure --without-python --with-threads=no --with-zlib=no --with-lzma=no -make -j$(nproc) clean -make -j$(nproc) all +if [ "$SANITIZER" = undefined ]; then + export CFLAGS="$CFLAGS -fsanitize=unsigned-integer-overflow -fno-sanitize-recover=unsigned-integer-overflow" + export CXXFLAGS="$CXXFLAGS -fsanitize=unsigned-integer-overflow -fno-sanitize-recover=unsigned-integer-overflow" +fi -$CXX $CXXFLAGS -std=c++11 -Iinclude/ \ - $SRC/libxml2_xml_reader_for_file_fuzzer.cc \ - -o $OUT/libxml2_xml_reader_for_file_fuzzer \ - $LIB_FUZZING_ENGINE .libs/libxml2.a +export V=1 -cp $SRC/xml.dict $OUT/libxml2_xml_reader_for_file_fuzzer.dict -zip -r $OUT/libxml2_xml_reader_for_file_fuzzer_seed_corpus.zip $SRC/libxml2/test +./autogen.sh \ + --disable-shared \ + --without-debug \ + --without-ftp \ + --without-http \ + --without-legacy \ + --without-python +make -j$(nproc) + +cd fuzz +make clean-corpus +make fuzz.o + +for fuzzer in html regexp schema uri xml xpath; do + make $fuzzer.o + # Link with $CXX + $CXX $CXXFLAGS \ + $fuzzer.o fuzz.o \ + -o $OUT/libxml2_xml_reader_for_file_fuzzer \ + $LIB_FUZZING_ENGINE \ + ../.libs/libxml2.a -Wl,-Bstatic -lz -llzma -Wl,-Bdynamic + + [ -e seed/$fuzzer ] || make seed/$fuzzer.stamp + zip -j $OUT/${fuzzer}_seed_corpus.zip seed/$fuzzer/* +done + +cp *.dict *.options $OUT/ diff --git a/benchmarks/libxslt_xpath/Dockerfile b/benchmarks/libxslt_xpath/Dockerfile index 79aedcd0d..6e146b548 100644 --- a/benchmarks/libxslt_xpath/Dockerfile +++ b/benchmarks/libxslt_xpath/Dockerfile @@ -14,14 +14,59 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c # Note that we don't use the system libxml2 but a custom instrumented build. # libgcrypt is required for the crypto extensions of libexslt. -RUN apt-get update && apt-get install -y --no-install-recommends \ - make autoconf automake libtool pkg-config \ - libgcrypt-dev -RUN git clone --depth 1 https://gitlab.gnome.org/GNOME/libxml2.git -RUN git clone --depth 1 https://gitlab.gnome.org/GNOME/libxslt.git +# Need to build M4-1.4.19, autoconf-2.71, automake-1.16.3 manually in Ubuntu:20. +ENV M4_VERSION=1.4.19 +ENV AUTOCONF_VERSION=2.71 +ENV AUTOMAKE_VERSION=1.16.5 +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + make \ + libtool \ + pkg-config \ + libgcrypt-dev && \ + wget https://ftp.gnu.org/gnu/m4/m4-$M4_VERSION.tar.gz && \ + tar xf m4-$M4_VERSION.tar.gz && \ + rm m4-$M4_VERSION.tar.gz && \ + ( \ + cd m4-$M4_VERSION/ && \ + ./configure && \ + make && \ + make install \ + ) && \ + wget https://ftp.gnu.org/gnu/autoconf/autoconf-$AUTOCONF_VERSION.tar.gz && \ + tar xf autoconf-$AUTOCONF_VERSION.tar.gz && \ + rm autoconf-$AUTOCONF_VERSION.tar.gz && \ + ( \ + cd autoconf-$AUTOCONF_VERSION/ && \ + ./configure && \ + make && \ + make install \ + ) && \ + wget https://ftp.gnu.org/gnu/automake/automake-$AUTOMAKE_VERSION.tar.gz && \ + tar xf automake-$AUTOMAKE_VERSION.tar.gz && \ + rm automake-$AUTOMAKE_VERSION.tar.gz && \ + ( \ + cd automake-$AUTOMAKE_VERSION && \ + ./configure && \ + make && \ + make install \ + ) + + +RUN git clone \ + --branch v2.10.3 \ + --depth 1 \ + https://gitlab.gnome.org/GNOME/libxml2.git && \ + git clone \ + --branch v1.1.37 \ + --depth 1 \ + https://gitlab.gnome.org/GNOME/libxslt.git + +ENV ACLOCAL_PATH='/usr/share/aclocal/' + WORKDIR libxslt COPY build.sh $SRC/ diff --git a/benchmarks/libxslt_xpath/benchmark.yaml b/benchmarks/libxslt_xpath/benchmark.yaml index c09f4ae14..c40124433 100644 --- a/benchmarks/libxslt_xpath/benchmark.yaml +++ b/benchmarks/libxslt_xpath/benchmark.yaml @@ -1,5 +1,5 @@ -commit: 2c20c70cd81e5ba51dc8e160fbd1c855eb97f065 -commit_date: 2020-09-20 15:01:51+00:00 +commit: 7bcc8dca27041e2b03855508fe54d435d816a78f +commit_date: 2022-09-22 10:59:13+00:00 fuzz_target: xpath project: libxslt unsupported_fuzzers: diff --git a/benchmarks/libxslt_xpath/build.sh b/benchmarks/libxslt_xpath/build.sh index fcfa95acf..bdf0e00ec 100755 --- a/benchmarks/libxslt_xpath/build.sh +++ b/benchmarks/libxslt_xpath/build.sh @@ -16,9 +16,19 @@ # ################################################################################ -# This would require an instrumented libgcrypt build. -CRYPTO_CONF=--with-crypto -CRYPTO_LIBS=-lgcrypt +if [ "$SANITIZER" = undefined ]; then + export CFLAGS="$CFLAGS -fsanitize=unsigned-integer-overflow -fno-sanitize-recover=unsigned-integer-overflow" + export CXXFLAGS="$CXXFLAGS -fsanitize=unsigned-integer-overflow -fno-sanitize-recover=unsigned-integer-overflow" +fi + +if [ "$SANITIZER" = memory ]; then + # This would require an instrumented libgcrypt build. + CRYPTO_CONF=--without-crypto + CRYPTO_LIBS= +else + CRYPTO_CONF=--with-crypto + CRYPTO_LIBS=-lgcrypt +fi cd ../libxml2 ./autogen.sh \ @@ -49,7 +59,7 @@ cd ../libxslt --without-profiler make -j$(nproc) V=1 -for file in xpath fuzz; do +for file in xpath xslt fuzz; do # Compile as C $CC $CFLAGS \ -I. -I../libxml2/include \ @@ -57,7 +67,7 @@ for file in xpath fuzz; do -o tests/fuzz/$file.o done -for fuzzer in xpath; do +for fuzzer in xpath xslt; do # Link with $CXX $CXX $CXXFLAGS \ tests/fuzz/$fuzzer.o tests/fuzz/fuzz.o \ @@ -70,4 +80,4 @@ for fuzzer in xpath; do zip -j $OUT/${fuzzer}_seed_corpus.zip tests/fuzz/seed/$fuzzer/* done -cp tests/fuzz/xpath.dict tests/fuzz/xpath.xml $OUT/ +cp tests/fuzz/*.dict tests/fuzz/*.xml $OUT/ diff --git a/benchmarks/matio_matio_fuzzer/Dockerfile b/benchmarks/matio_matio_fuzzer/Dockerfile index ad912ea56..31e495409 100644 --- a/benchmarks/matio_matio_fuzzer/Dockerfile +++ b/benchmarks/matio_matio_fuzzer/Dockerfile @@ -14,11 +14,22 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c MAINTAINER t-beu@users.sourceforge.net -RUN apt-get update && apt-get install -y make autoconf automake libhdf5-dev libtool zlib1g-dev -ENV HDF5_DIR /usr/lib/x86_64-linux-gnu/hdf5/serial -RUN git clone --depth 1 git://git.code.sf.net/p/matio/matio matio +RUN apt-get update && \ + apt-get install -y \ + make autoconf automake libtool + +RUN git clone \ + --depth 1 \ + --branch v1.2.13 \ + https://github.com/madler/zlib +RUN git clone \ + --depth 1 \ + --branch v1.5.23 \ + https://github.com/tbeu/matio.git matio +ADD https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.12/hdf5-1.12.1/src/hdf5-1.12.1.tar.gz \ + hdf5-1.12.1.tar.gz + WORKDIR matio -ADD https://support.hdfgroup.org/ftp/lib-external/szip/2.1.1/src/szip-2.1.1.tar.gz szip.tar.gz COPY build.sh $SRC/ diff --git a/benchmarks/matio_matio_fuzzer/benchmark.yaml b/benchmarks/matio_matio_fuzzer/benchmark.yaml index 88ec4e66a..83388cc6f 100644 --- a/benchmarks/matio_matio_fuzzer/benchmark.yaml +++ b/benchmarks/matio_matio_fuzzer/benchmark.yaml @@ -1,8 +1,5 @@ -commit: aefd09e9d20aebb4a0b0a5e5c619f46038dba3bc -commit_date: 2019-09-11 02:28:00+00:00 fuzz_target: matio_fuzzer project: matio -type: bug unsupported_fuzzers: - aflcc - afl_qemu diff --git a/benchmarks/mbedtls_fuzz_dtlsclient/Dockerfile b/benchmarks/mbedtls_fuzz_dtlsclient/Dockerfile index d53911da9..f3f699992 100644 --- a/benchmarks/mbedtls_fuzz_dtlsclient/Dockerfile +++ b/benchmarks/mbedtls_fuzz_dtlsclient/Dockerfile @@ -14,7 +14,7 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && apt-get install -y make cmake RUN git clone --recursive --depth 1 https://github.com/ARMmbed/mbedtls.git mbedtls RUN git clone --depth 1 https://github.com/google/boringssl.git boringssl diff --git a/benchmarks/mruby-2018-05-23/Dockerfile b/benchmarks/mruby-2018-05-23/Dockerfile index 520bf30e2..85c0ed04b 100644 --- a/benchmarks/mruby-2018-05-23/Dockerfile +++ b/benchmarks/mruby-2018-05-23/Dockerfile @@ -14,10 +14,13 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && apt-get install -y build-essential ruby bison ninja-build \ cmake zlib1g-dev libbz2-dev liblzma-dev -RUN git clone --depth 1 https://github.com/mruby/mruby mruby +RUN git clone \ + --depth 1 \ + --branch 3.1.0 \ + https://github.com/mruby/mruby RUN git clone --depth 1 https://github.com/bshastry/mruby_seeds.git mruby_seeds WORKDIR mruby COPY build.sh *.c *.options *.dict $SRC/ diff --git a/benchmarks/mruby-2018-05-23/benchmark.yaml b/benchmarks/mruby-2018-05-23/benchmark.yaml index ef3d80de9..a9f18ff06 100644 --- a/benchmarks/mruby-2018-05-23/benchmark.yaml +++ b/benchmarks/mruby-2018-05-23/benchmark.yaml @@ -1,6 +1,4 @@ -commit: 14c21793a01e4caa802e600540628fe9ae622d54 -commit_date: 2018-05-23 08:30:50+09:00 fuzz_target: mruby_fuzzer project: mruby -type: bug unsupported_fuzzers: + - libafl diff --git a/benchmarks/mruby-2018-05-23/build.sh b/benchmarks/mruby-2018-05-23/build.sh index 0f4ee4fc7..dfaabe407 100755 --- a/benchmarks/mruby-2018-05-23/build.sh +++ b/benchmarks/mruby-2018-05-23/build.sh @@ -19,8 +19,9 @@ ( cd $SRC/mruby export LD=$CC -export LDFLAGS="$CFLAGS" -rake -m || true +export LDFLAGS="$CFLAGS -fPIE" + +LD=/usr/local/bin/clang CC=/usr/local/bin/clang CXX=/usr/local/bin/clang++ rake -m || true test -f $SRC/mruby/build/host/lib/libmruby.a diff --git a/benchmarks/muparser_set_eval_fuzzer/Dockerfile b/benchmarks/muparser_set_eval_fuzzer/Dockerfile index 2cc4c4048..a848b9a58 100644 --- a/benchmarks/muparser_set_eval_fuzzer/Dockerfile +++ b/benchmarks/muparser_set_eval_fuzzer/Dockerfile @@ -14,9 +14,13 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && apt-get install -y make autoconf automake libtool RUN apt-get install -y build-essential cmake pkg-config -RUN git clone https://github.com/beltoforion/muparser.git muparser +RUN git clone \ + --depth 1 \ + --branch v2.3.4 \ + https://github.com/beltoforion/muparser.git \ + muparser WORKDIR muparser COPY build.sh set_eval_fuzzer.cc $SRC/ diff --git a/benchmarks/muparser_set_eval_fuzzer/benchmark.yaml b/benchmarks/muparser_set_eval_fuzzer/benchmark.yaml index 8d8b9d0c0..c7c0bc466 100644 --- a/benchmarks/muparser_set_eval_fuzzer/benchmark.yaml +++ b/benchmarks/muparser_set_eval_fuzzer/benchmark.yaml @@ -1,8 +1,5 @@ -commit: dd0efc8aee586eb3370025677f6ec9dee1da4729 -commit_date: 2020-08-19 20:10:49+00:00 fuzz_target: set_eval_fuzzer project: muparser -type: bug unsupported_fuzzers: - aflcc - afl_qemu diff --git a/benchmarks/njs_njs_process_script_fuzzer/Dockerfile b/benchmarks/njs_njs_process_script_fuzzer/Dockerfile index a3a2fe366..3c980dabd 100644 --- a/benchmarks/njs_njs_process_script_fuzzer/Dockerfile +++ b/benchmarks/njs_njs_process_script_fuzzer/Dockerfile @@ -14,11 +14,18 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a -RUN apt-get update && apt-get install -y make autoconf automake libtool \ - mercurial libpcre3-dev subversion -RUN hg clone http://hg.nginx.org/njs -RUN svn co svn://vcs.exim.org/pcre/code/trunk pcre +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c +RUN apt-get update && \ + apt-get install -y \ + make autoconf automake libtool \ + mercurial libpcre2-dev +RUN hg clone \ + --updaterev 0.7.8 \ + http://hg.nginx.org/njs +RUN git clone \ + --depth 1 \ + --branch pcre2-10.40 \ + https://github.com/PCRE2Project/pcre2 pcre WORKDIR njs COPY build.sh $SRC/ diff --git a/benchmarks/njs_njs_process_script_fuzzer/benchmark.yaml b/benchmarks/njs_njs_process_script_fuzzer/benchmark.yaml index 2896128b0..7bbaf9e12 100644 --- a/benchmarks/njs_njs_process_script_fuzzer/benchmark.yaml +++ b/benchmarks/njs_njs_process_script_fuzzer/benchmark.yaml @@ -1,8 +1,5 @@ -commit: d2877d602d3923cb2ce84a04ab14f1b48567426e -commit_date: 2020-02-18 04:02:00+00:00 fuzz_target: njs_process_script_fuzzer project: njs -type: bug unsupported_fuzzers: - aflcc - afl_qemu diff --git a/benchmarks/njs_njs_process_script_fuzzer/build.sh b/benchmarks/njs_njs_process_script_fuzzer/build.sh index 229a35385..ffb1c550b 100755 --- a/benchmarks/njs_njs_process_script_fuzzer/build.sh +++ b/benchmarks/njs_njs_process_script_fuzzer/build.sh @@ -21,6 +21,8 @@ pushd $SRC/pcre CFLAGS="$CFLAGS -fno-use-cxa-atexit" CXXFLAGS="$CXXFLAGS -fno-use-cxa-atexit" ./configure make -j$(nproc) clean make -j$(nproc) all +make install +sed -i "s/\$libS\$libR \(-lpcre2-8$\)/\$libS\$libR -Wl,-Bstatic \1 -Wl,-Bdynamic/" /usr/local/bin/pcre2-config popd # build project @@ -35,14 +37,17 @@ SEED_CORPUS_PATH=$OUT/njs_process_script_fuzzer_seed_corpus mkdir -p $SEED_CORPUS_PATH set +x -cat src/test/njs_interactive_test.c src/test/njs_unit_test.c \ +cat src/test/njs_unit_test.c \ | egrep -o '".*"' | awk '{print substr($0,2,length($0)-2)}' | sort | uniq \ | while IFS= read -r line; do echo $line > $SEED_CORPUS_PATH/$(echo $line | sha1sum | awk '{ print $1 }'); done -set -x -cp -r test/fs test/module $SEED_CORPUS_PATH +find test/ -name *.t.js \ + | while IFS= read -r testname; do + cp $testname $SEED_CORPUS_PATH/$(echo $testname | sha1sum | awk '{ print $1 }'); + done +set -x zip -q $SEED_CORPUS_PATH.zip $SEED_CORPUS_PATH rm -rf $SEED_CORPUS_PATH diff --git a/benchmarks/openh264_decoder_fuzzer/Dockerfile b/benchmarks/openh264_decoder_fuzzer/Dockerfile index 29c9a69ce..d124f7b43 100644 --- a/benchmarks/openh264_decoder_fuzzer/Dockerfile +++ b/benchmarks/openh264_decoder_fuzzer/Dockerfile @@ -14,11 +14,18 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c MAINTAINER twsmith@mozilla.com + RUN dpkg --add-architecture i386 && \ apt-get update && \ - apt-get install -y libstdc++-5-dev libstdc++-5-dev:i386 nasm subversion -RUN git clone --depth 1 https://github.com/cisco/openh264.git openh264 + apt-get install -y \ + libstdc++-9-dev libstdc++-9-dev:i386 nasm subversion + +RUN git clone \ + --branch v2.3.1 \ + --depth 1 \ + https://github.com/cisco/openh264.git + WORKDIR openh264 COPY build.sh decoder_fuzzer.cpp $SRC/ diff --git a/benchmarks/openh264_decoder_fuzzer/benchmark.yaml b/benchmarks/openh264_decoder_fuzzer/benchmark.yaml index 2e157ab59..3593ff4b1 100644 --- a/benchmarks/openh264_decoder_fuzzer/benchmark.yaml +++ b/benchmarks/openh264_decoder_fuzzer/benchmark.yaml @@ -1,8 +1,5 @@ -commit: c185ac351eff0ae277bfe14a3b2ad52e9ed8ab81 -commit_date: 2019-10-22 04:51:00+00:00 fuzz_target: decoder_fuzzer project: openh264 -type: bug unsupported_fuzzers: - aflcc - afl_qemu diff --git a/benchmarks/openh264_decoder_fuzzer/build.sh b/benchmarks/openh264_decoder_fuzzer/build.sh index b0352aecd..814113860 100755 --- a/benchmarks/openh264_decoder_fuzzer/build.sh +++ b/benchmarks/openh264_decoder_fuzzer/build.sh @@ -20,11 +20,11 @@ svn export https://github.com/mozillasecurity/fuzzdata.git/trunk/samples/h264 co mv ./res/*.264 ./corpus/ zip -j0r ${OUT}/decoder_fuzzer_seed_corpus.zip ./corpus/ -# build +# build if [[ $CXXFLAGS = *sanitize=memory* ]]; then ASM_BUILD=No else ASM_BUILD=Yes fi make -j$(nproc) ARCH=$ARCHITECTURE USE_ASM=$ASM_BUILD BUILDTYPE=Debug libraries -$CXX $CXXFLAGS -o $OUT/decoder_fuzzer -I./codec/api/svc -I./codec/console/common/inc -I./codec/common/inc -L. $LIB_FUZZING_ENGINE $SRC/decoder_fuzzer.cpp libopenh264.a +$CXX $CXXFLAGS -o $OUT/decoder_fuzzer -I./codec/api/wels -I./codec/console/common/inc -I./codec/common/inc -L. $LIB_FUZZING_ENGINE $SRC/decoder_fuzzer.cpp libopenh264.a diff --git a/benchmarks/openssl_x509/Dockerfile b/benchmarks/openssl_x509/Dockerfile index 64b5e2b45..01937f43b 100644 --- a/benchmarks/openssl_x509/Dockerfile +++ b/benchmarks/openssl_x509/Dockerfile @@ -14,8 +14,11 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && apt-get install -y make -RUN git clone --depth 1 https://github.com/openssl/openssl.git +RUN git clone \ + --depth 1 \ + --branch openssl-3.0.7 \ + https://github.com/openssl/openssl.git WORKDIR openssl COPY build.sh $SRC/ diff --git a/benchmarks/openthread-2019-12-23/Dockerfile b/benchmarks/openthread-2019-12-23/Dockerfile index 6f2a3e955..020b0552f 100644 --- a/benchmarks/openthread-2019-12-23/Dockerfile +++ b/benchmarks/openthread-2019-12-23/Dockerfile @@ -14,7 +14,7 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && \ apt-get install -y \ diff --git a/benchmarks/oss_fuzz_benchmark_integration.py b/benchmarks/oss_fuzz_benchmark_integration.py index 8c6b69d63..ea3bd82fb 100755 --- a/benchmarks/oss_fuzz_benchmark_integration.py +++ b/benchmarks/oss_fuzz_benchmark_integration.py @@ -165,7 +165,11 @@ def replace_base_builder(benchmark_dir, commit_date): base_builder_name = _get_base_builder(dockerfile_path) base_builder_repo = _load_docker_repo(base_builder_name) if base_builder_repo: - base_builder_digest = base_builder_repo.find_digest(commit_date) + # base_builder_digest = base_builder_repo.find_digest(commit_date) + base_builder_digest = ('sha256:fb1a9a49752c9e504687448d1f1a048ec1e0' + '62e2e40f7e8a23e86b63ff3dad7c') + print(f'Using image {base_builder_digest}. ' + 'See https://github.com/google/oss-fuzz/issues/8625') logs.info('Using base-builder with digest %s.', base_builder_digest) _replace_base_builder_digest( dockerfile_path, base_builder_name, base_builder_digest) @@ -196,9 +200,9 @@ def integrate_benchmark(project, fuzz_target, benchmark_name, commit, # work on arbitrary iso format strings. commit_date = datetime.datetime.fromisoformat(commit_date).astimezone( datetime.timezone.utc) - if commit_date >= OSS_FUZZ_IMAGE_UPGRADE_DATE: + if commit_date <= OSS_FUZZ_IMAGE_UPGRADE_DATE: raise ValueError( - f'Cannot integrate benchmark after {OSS_FUZZ_IMAGE_UPGRADE_DATE}. ' + f'Cannot integrate benchmark before {OSS_FUZZ_IMAGE_UPGRADE_DATE}. ' 'See https://github.com/google/fuzzbench/issues/1353') copy_oss_fuzz_files(project, commit_date, benchmark_dir) replace_base_builder(benchmark_dir, commit_date) @@ -225,14 +229,19 @@ def main(): '--benchmark-name', help='Benchmark name. Defaults to _', required=False) - parser.add_argument('-c', '--commit', help='Project commit hash.') + parser.add_argument('-c', '--commit', help='Project commit hash.', + required=True) parser.add_argument( '-d', '--date', - help='Date of the commit. Example: 2019-10-19T09:07:25+01:00') + help='Date of the commit. Example: 2019-10-19T09:07:25+01:00', + required=True) logs.initialize() args = parser.parse_args() + if args.date is None and args.commit is None: + args.date = str(datetime.datetime.utcnow()) + print('Neither date nor commit specified, using time now: ', args.date) benchmark = integrate_benchmark( args.project, args.fuzz_target, args.benchmark_name, args.commit, args.date) diff --git a/benchmarks/php_php-fuzz-execute/Dockerfile b/benchmarks/php_php-fuzz-execute/Dockerfile index abf51f5b5..8945430b5 100644 --- a/benchmarks/php_php-fuzz-execute/Dockerfile +++ b/benchmarks/php_php-fuzz-execute/Dockerfile @@ -14,10 +14,16 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c + RUN apt-get update && \ - apt-get install -y autoconf automake libtool bison re2c pkg-config -RUN git clone --depth 1 --branch master https://github.com/php/php-src.git php-src -RUN git clone https://github.com/kkos/oniguruma.git php-src/oniguruma + apt-get install -y \ + autoconf automake libtool bison re2c pkg-config + +RUN git clone \ + --depth 1 \ + --branch php-8.2.0RC6 \ + https://github.com/php/php-src.git + WORKDIR php-src COPY build.sh *.options $SRC/ diff --git a/benchmarks/php_php-fuzz-execute/benchmark.yaml b/benchmarks/php_php-fuzz-execute/benchmark.yaml index 2e94e9664..b1193628c 100644 --- a/benchmarks/php_php-fuzz-execute/benchmark.yaml +++ b/benchmarks/php_php-fuzz-execute/benchmark.yaml @@ -1,8 +1,5 @@ -commit: 1902f730ee2bda60552f34c0643e2d7b47e4fb64 -commit_date: 2020-08-29 05:28:00+00:00 fuzz_target: php-fuzz-execute project: php -type: bug unsupported_fuzzers: - aflcc - afl_qemu diff --git a/benchmarks/php_php-fuzz-execute/build.sh b/benchmarks/php_php-fuzz-execute/build.sh index c85f9b106..bb3046bb2 100755 --- a/benchmarks/php_php-fuzz-execute/build.sh +++ b/benchmarks/php_php-fuzz-execute/build.sh @@ -15,28 +15,28 @@ # ################################################################################ -# build oniguruma and link statically -pushd oniguruma -autoreconf -vfi -./configure -make -j$(nproc) -popd -export ONIG_CFLAGS="-I$PWD/oniguruma/src" -export ONIG_LIBS="-L$PWD/oniguruma/src/.libs -l:libonig.a" - # PHP's zend_function union is incompatible with the object-size sanitizer export CFLAGS="$CFLAGS -fno-sanitize=object-size" export CXXFLAGS="$CXXFLAGS -fno-sanitize=object-size" +# Disable JIT profitability checks. +export CFLAGS="$CFLAGS -DPROFITABILITY_CHECKS=0" + +# Make sure the right assembly files are picked +BUILD_FLAG="" +if [ "$ARCHITECTURE" = "i386" ]; then + BUILD_FLAG="--build=i686-pc-linux-gnu" +fi + # build project -./buildconf -./configure \ +./buildconf --force +./configure $BUILD_FLAG \ --disable-all \ --enable-debug-assertions \ --enable-option-checking=fatal \ --enable-fuzzer \ --enable-exif \ - --enable-mbstring \ + --enable-opcache \ --without-pcre-jit \ --disable-phpdbg \ --disable-cgi \ @@ -46,12 +46,34 @@ make -j$(nproc) # Generate corpuses and dictionaries. sapi/cli/php sapi/fuzzer/generate_all.php -FUZZERS="php-fuzz-execute" +# Copy dictionaries to expected locations. +cp sapi/fuzzer/dict/unserialize $OUT/php-fuzz-unserialize.dict +cp sapi/fuzzer/dict/parser $OUT/php-fuzz-parser.dict +cp sapi/fuzzer/json.dict $OUT/php-fuzz-json.dict + +FUZZERS="php-fuzz-json +php-fuzz-exif +php-fuzz-unserialize +php-fuzz-unserializehash +php-fuzz-parser +php-fuzz-execute" for fuzzerName in $FUZZERS; do - cp sapi/fuzzer/$fuzzerName $OUT/ + cp sapi/fuzzer/$fuzzerName $OUT/ done + +# The JIT fuzzer is fundamentally incompatible with memory sanitizer, +# as that would require the JIT to emit msan instrumentation itself. +# In practice it is currently also incompatible with ubsan. +if [ "$SANITIZER" != "memory" ] && [ "$SANITIZER" != "undefined" ]; then + cp sapi/fuzzer/php-fuzz-function-jit $OUT/ + cp sapi/fuzzer/php-fuzz-tracing-jit $OUT/ + + # Copy opcache.so extension, which does not support static linking. + mkdir -p $OUT/modules + cp modules/opcache.so $OUT/modules +fi + # copy corpora from source for fuzzerName in `ls sapi/fuzzer/corpus`; do - zip -j $OUT/php-fuzz-${fuzzerName}_seed_corpus.zip sapi/fuzzer/corpus/${fuzzerName}/* + zip -j $OUT/php-fuzz-${fuzzerName}_seed_corpus.zip sapi/fuzzer/corpus/${fuzzerName}/* done - diff --git a/benchmarks/php_php-fuzz-parser-2020-07-25/Dockerfile b/benchmarks/php_php-fuzz-parser-2020-07-25/Dockerfile index abf51f5b5..8945430b5 100644 --- a/benchmarks/php_php-fuzz-parser-2020-07-25/Dockerfile +++ b/benchmarks/php_php-fuzz-parser-2020-07-25/Dockerfile @@ -14,10 +14,16 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c + RUN apt-get update && \ - apt-get install -y autoconf automake libtool bison re2c pkg-config -RUN git clone --depth 1 --branch master https://github.com/php/php-src.git php-src -RUN git clone https://github.com/kkos/oniguruma.git php-src/oniguruma + apt-get install -y \ + autoconf automake libtool bison re2c pkg-config + +RUN git clone \ + --depth 1 \ + --branch php-8.2.0RC6 \ + https://github.com/php/php-src.git + WORKDIR php-src COPY build.sh *.options $SRC/ diff --git a/benchmarks/php_php-fuzz-parser-2020-07-25/benchmark.yaml b/benchmarks/php_php-fuzz-parser-2020-07-25/benchmark.yaml index 875800592..5e0d12b93 100644 --- a/benchmarks/php_php-fuzz-parser-2020-07-25/benchmark.yaml +++ b/benchmarks/php_php-fuzz-parser-2020-07-25/benchmark.yaml @@ -1,8 +1,5 @@ -commit: 8664ff7ae174c610769c36a712eeea80cc3ad933 -commit_date: 2020-07-25 01:16:00+00:00 fuzz_target: php-fuzz-parser project: php -type: bug unsupported_fuzzers: - aflcc - afl_qemu diff --git a/benchmarks/php_php-fuzz-parser-2020-07-25/build.sh b/benchmarks/php_php-fuzz-parser-2020-07-25/build.sh index fd11356a1..bb3046bb2 100755 --- a/benchmarks/php_php-fuzz-parser-2020-07-25/build.sh +++ b/benchmarks/php_php-fuzz-parser-2020-07-25/build.sh @@ -15,27 +15,28 @@ # ################################################################################ -# build oniguruma and link statically -pushd oniguruma -autoreconf -vfi -./configure -make -j$(nproc) -popd -export ONIG_CFLAGS="-I$PWD/oniguruma/src" -export ONIG_LIBS="-L$PWD/oniguruma/src/.libs -l:libonig.a" - # PHP's zend_function union is incompatible with the object-size sanitizer export CFLAGS="$CFLAGS -fno-sanitize=object-size" export CXXFLAGS="$CXXFLAGS -fno-sanitize=object-size" +# Disable JIT profitability checks. +export CFLAGS="$CFLAGS -DPROFITABILITY_CHECKS=0" + +# Make sure the right assembly files are picked +BUILD_FLAG="" +if [ "$ARCHITECTURE" = "i386" ]; then + BUILD_FLAG="--build=i686-pc-linux-gnu" +fi + # build project -./buildconf -./configure \ +./buildconf --force +./configure $BUILD_FLAG \ --disable-all \ + --enable-debug-assertions \ --enable-option-checking=fatal \ --enable-fuzzer \ --enable-exif \ - --enable-mbstring \ + --enable-opcache \ --without-pcre-jit \ --disable-phpdbg \ --disable-cgi \ @@ -52,15 +53,27 @@ cp sapi/fuzzer/json.dict $OUT/php-fuzz-json.dict FUZZERS="php-fuzz-json php-fuzz-exif -php-fuzz-mbstring php-fuzz-unserialize php-fuzz-unserializehash -php-fuzz-parser" +php-fuzz-parser +php-fuzz-execute" for fuzzerName in $FUZZERS; do - cp sapi/fuzzer/$fuzzerName $OUT/ + cp sapi/fuzzer/$fuzzerName $OUT/ done + +# The JIT fuzzer is fundamentally incompatible with memory sanitizer, +# as that would require the JIT to emit msan instrumentation itself. +# In practice it is currently also incompatible with ubsan. +if [ "$SANITIZER" != "memory" ] && [ "$SANITIZER" != "undefined" ]; then + cp sapi/fuzzer/php-fuzz-function-jit $OUT/ + cp sapi/fuzzer/php-fuzz-tracing-jit $OUT/ + + # Copy opcache.so extension, which does not support static linking. + mkdir -p $OUT/modules + cp modules/opcache.so $OUT/modules +fi + # copy corpora from source for fuzzerName in `ls sapi/fuzzer/corpus`; do - zip -j $OUT/php-fuzz-${fuzzerName}_seed_corpus.zip sapi/fuzzer/corpus/${fuzzerName}/* + zip -j $OUT/php-fuzz-${fuzzerName}_seed_corpus.zip sapi/fuzzer/corpus/${fuzzerName}/* done - diff --git a/benchmarks/php_php-fuzz-parser/Dockerfile b/benchmarks/php_php-fuzz-parser/Dockerfile index 18e18c4d5..7d8934275 100644 --- a/benchmarks/php_php-fuzz-parser/Dockerfile +++ b/benchmarks/php_php-fuzz-parser/Dockerfile @@ -14,7 +14,7 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && \ apt-get install -y autoconf automake libtool bison re2c pkg-config diff --git a/benchmarks/poppler_pdf_fuzzer/Dockerfile b/benchmarks/poppler_pdf_fuzzer/Dockerfile index 484fb42fc..bf1ab0990 100644 --- a/benchmarks/poppler_pdf_fuzzer/Dockerfile +++ b/benchmarks/poppler_pdf_fuzzer/Dockerfile @@ -14,15 +14,69 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c MAINTAINER jonathan@titanous.com -RUN apt-get update && apt-get upgrade -y && apt-get install -y make autoconf libz-dev lib32z1-dev zlib1g-dev automake libtool pkg-config cmake -RUN git clone --depth 1 https://anongit.freedesktop.org/git/poppler/poppler.git -RUN git clone --depth 1 git://git.sv.nongnu.org/freetype/freetype2.git -RUN git clone --depth 1 https://github.com/mozilla/pdf.js pdf.js && \ + +RUN apt-get update && \ + apt-get install -y \ + wget autoconf automake libtool pkg-config gperf +RUN pip3 install \ + meson ninja gyp-next + +RUN git clone \ + --depth 1 \ + --branch v1.2.13 \ + https://github.com/madler/zlib.git +RUN git clone \ + --depth 1 \ + --branch VER-2-12-1 \ + https://gitlab.freedesktop.org/freetype/freetype.git +RUN git clone \ + --depth 1 \ + --branch lcms2.14 \ + https://github.com/mm2/Little-CMS.git +RUN git clone \ + --depth 1 \ + --branch v2.5.0 \ + https://github.com/uclouvain/openjpeg +RUN git clone \ + --depth 1 \ + --branch v1.6.38 \ + https://github.com/glennrp/libpng.git +RUN git clone \ + --depth 1 \ + --branch 2.14.1 \ + https://gitlab.freedesktop.org/fontconfig/fontconfig.git +RUN git clone \ + --depth 1 \ + --branch 1.17.6 \ + https://gitlab.freedesktop.org/cairo/cairo.git +RUN git clone \ + --depth 1 \ + --branch=5.15 \ + git://code.qt.io/qt/qtbase.git +RUN git clone \ + --depth 1 \ + --branch 1.50.11 \ + https://gitlab.gnome.org/GNOME/pango.git + +ADD https://ftp.gnome.org/pub/gnome/sources/glib/2.70/glib-2.70.0.tar.xz $SRC +RUN tar xvJf $SRC/glib-2.70.0.tar.xz +RUN wget https://boostorg.jfrog.io/artifactory/main/release/1.76.0/source/boost_1_76_0.tar.bz2 +RUN wget https://ftp.mozilla.org/pub/security/nss/releases/NSS_3_75_RTM/src/nss-3.75-with-nspr-4.32.tar.gz + +RUN git clone \ + --depth 1 \ + --branch poppler-22.02.0 \ + https://github.com/freedesktop/poppler.git + +RUN git clone \ + --depth 1 \ + --branch v3.0.279 \ + https://github.com/mozilla/pdf.js pdf.js && \ zip -q $SRC/pdf_fuzzer_seed_corpus.zip pdf.js/test/pdfs/*.pdf && \ rm -rf pdf.js -ADD https://raw.githubusercontent.com/rc0r/afl-fuzz/master/dictionaries/pdf.dict $SRC/pdf_fuzzer.dict -WORKDIR $SRC -COPY *.cc poppler/fuzz/ + +ADD https://raw.githubusercontent.com/google/fuzzing/master/dictionaries/pdf.dict $SRC/poppler.dict +WORKDIR $SRC/poppler COPY build.sh $SRC/ diff --git a/benchmarks/poppler_pdf_fuzzer/benchmark.yaml b/benchmarks/poppler_pdf_fuzzer/benchmark.yaml index b9d34d7da..4b8c7b1ac 100644 --- a/benchmarks/poppler_pdf_fuzzer/benchmark.yaml +++ b/benchmarks/poppler_pdf_fuzzer/benchmark.yaml @@ -1,8 +1,5 @@ -commit: 987bbb684e688651e3d06502e2ff9b6f08130538 -commit_date: 2018-08-31 03:01:00+00:00 fuzz_target: pdf_fuzzer project: poppler -type: bug unsupported_fuzzers: - aflcc - afl_qemu @@ -24,3 +21,4 @@ unsupported_fuzzers: - fuzzolic_aflplusplus_z3dict - aflplusplus_gcc - tortoisefuzz + - wingfuzz diff --git a/benchmarks/poppler_pdf_fuzzer/build.sh b/benchmarks/poppler_pdf_fuzzer/build.sh index fa4f5b4ac..4863f3820 100755 --- a/benchmarks/poppler_pdf_fuzzer/build.sh +++ b/benchmarks/poppler_pdf_fuzzer/build.sh @@ -15,50 +15,278 @@ # ################################################################################ -pushd $SRC/freetype2 +PREFIX=$WORK/prefix +mkdir -p $PREFIX + +export PKG_CONFIG="`which pkg-config` --static" +export PKG_CONFIG_PATH=$PREFIX/lib/pkgconfig +export PATH=$PREFIX/bin:$PATH + +BUILD=$WORK/build + +rm -rf $WORK/* +rm -rf $BUILD +mkdir -p $BUILD + +# Install Boost headers +cd $SRC/ +tar jxf boost_1_76_0.tar.bz2 +cd boost_1_76_0/ +CFLAGS="" CXXFLAGS="" ./bootstrap.sh +CFLAGS="" CXXFLAGS="" ./b2 headers +cp -R boost/ /usr/include/ + +pushd $SRC/zlib +CFLAGS=-fPIC ./configure --static --prefix=$PREFIX +make install -j$(nproc) + +pushd $SRC +tar zxf nss-3.75-with-nspr-4.32.tar.gz +cd nss-3.75 +nss_flag="" +SAVE_CFLAGS="$CFLAGS" +SAVE_CXXFLAGS="$CXXFLAGS" +if [ "$SANITIZER" = "memory" ]; then + nss_flag="--msan" +elif [ "$SANITIZER" = "address" ]; then + nss_flag="--asan" +elif [ "$SANITIZER" = "undefined" ]; then + nss_flag="--ubsan" +elif [ "$SANITIZER" = "coverage" ]; then + # some parts of nss don't like -fcoverage-mapping nor -fprofile-instr-generate :/ + CFLAGS="${CFLAGS/"-fcoverage-mapping"/" "}" + CFLAGS="${CFLAGS/"-fprofile-instr-generate"/" "}" + CXXFLAGS="${CXXFLAGS/"-fcoverage-mapping"/" "}" + CXXFLAGS="${CXXFLAGS/"-fprofile-instr-generate"/" "}" +fi + +./nss/build.sh $nss_flag --disable-tests --static -v -Dmozilla_client=1 -Dzlib_libs=$PREFIX/lib/libz.a + +CFLAGS="$SAVE_CFLAGS" +CXXFLAGS="$SAVE_CXXFLAGS" + +# NSS has a .pc.in file but doesn't do anything with it +cp nss/pkg/pkg-config/nss.pc.in $PREFIX/lib/pkgconfig/nss.pc +sed -i "s#\${libdir}#${SRC}/nss-3.75/dist/Debug/lib#g" $PREFIX/lib/pkgconfig/nss.pc +sed -i "s#\${includedir}#${SRC}/nss-3.75/dist/public/nss#g" $PREFIX/lib/pkgconfig/nss.pc +sed -i "s#%NSS_VERSION%#3.75#g" $PREFIX/lib/pkgconfig/nss.pc +cp dist/Debug/lib/pkgconfig/nspr.pc $PREFIX/lib/pkgconfig/ + +pushd $SRC/freetype ./autogen.sh -./configure --prefix="$WORK" --disable-shared PKG_CONFIG_PATH="$WORK/lib/pkgconfig" +./configure --prefix="$PREFIX" --disable-shared PKG_CONFIG_PATH="$PKG_CONFIG_PATH" +make -j$(nproc) +make install + +pushd $SRC/Little-CMS +./autogen.sh --prefix="$PREFIX" --disable-shared PKG_CONFIG_PATH="$PKG_CONFIG_PATH" +make -j$(nproc) +make install + +mkdir -p $SRC/openjpeg/build +pushd $SRC/openjpeg/build +cmake .. -DBUILD_SHARED_LIBS=OFF -DCMAKE_INSTALL_PREFIX=$PREFIX +make -j$(nproc) install + +if [ "$SANITIZER" != "memory" ]; then + + pushd $SRC/fontconfig + meson \ + --prefix=$PREFIX \ + --libdir=lib \ + --default-library=static \ + _builddir + ninja -C _builddir + ninja -C _builddir install + popd + + pushd $SRC/glib-2.70.0 + # remove once there's a released glib that contains https://gitlab.gnome.org/GNOME/glib/-/merge_requests/2324 + sed -i s#https://ftp.pcre.org/pub/pcre/pcre-8.37.tar.bz2#https://sourceforge.net/projects/pcre/files/pcre/8.37/pcre-8.37.tar.bz2# subprojects/libpcre.wrap + meson \ + --prefix=$PREFIX \ + --libdir=lib \ + --default-library=static \ + -Db_lundef=false \ + -Doss_fuzz=enabled \ + -Dlibmount=disabled \ + _builddir + ninja -C _builddir + ninja -C _builddir install + popd + + pushd $SRC/libpng + autoreconf -fi + CPPFLAGS=-I$PREFIX/include LDFLAGS=-L$PREFIX/lib ./configure --prefix="$PREFIX" --disable-shared --disable-dependency-tracking + make -j$(nproc) + make install + + pushd $SRC/cairo + meson \ + --prefix=$PREFIX \ + --libdir=lib \ + --default-library=static \ + _builddir + ninja -C _builddir + ninja -C _builddir install + popd + + pushd $SRC/pango + meson \ + -Ddefault_library=static \ + --prefix=$PREFIX \ + --libdir=lib \ + _builddir + sed -i -e 's/ -Werror=implicit-fallthrough//g' _builddir/build.ninja + ninja -C _builddir + ninja -C _builddir install + popd +fi + +pushd $SRC/qtbase +# add the flags to Qt build too +sed -i -e "s/QMAKE_CXXFLAGS += -stdlib=libc++/QMAKE_CXXFLAGS += -stdlib=libc++ $CXXFLAGS\nQMAKE_CFLAGS += $CFLAGS/g" mkspecs/linux-clang-libc++/qmake.conf +sed -i -e "s/QMAKE_LFLAGS += -stdlib=libc++/QMAKE_LFLAGS += -stdlib=libc++ -lpthread $CXXFLAGS/g" mkspecs/linux-clang-libc++/qmake.conf +# disable sanitize=vptr for harfbuzz since it compiles without rtti +sed -i -e "s/TARGET = qtharfbuzz/TARGET = qtharfbuzz\nQMAKE_CXXFLAGS += -fno-sanitize=vptr/g" src/3rdparty/harfbuzz-ng/harfbuzz-ng.pro +# make qmake compile faster +sed -i -e "s/MAKE\")/MAKE\" -j$(nproc))/g" configure +./configure --glib=no --libpng=qt -opensource -confirm-license -static -no-opengl -no-icu -no-pkg-config -platform linux-clang-libc++ -nomake tests -nomake examples -prefix $PREFIX -D QT_NO_DEPRECATED_WARNINGS make -j$(nproc) make install +popd + +# Poppler complains when PKG_CONFIG is set to `which pkg-config --static` so +# temporarily removing it +export PKG_CONFIG="`which pkg-config`" -mkdir -p $WORK/poppler -pushd $WORK/poppler -cmake $SRC/poppler \ +if [ "$SANITIZER" != "memory" ]; then + POPPLER_ENABLE_GLIB=ON + POPPLER_FONT_CONFIGURATION=fontconfig +else + POPPLER_ENABLE_GLIB=OFF + POPPLER_FONT_CONFIGURATION=generic +fi + +mkdir -p $SRC/poppler/build +pushd $SRC/poppler/build +cmake .. \ -DCMAKE_BUILD_TYPE=debug \ -DBUILD_SHARED_LIBS=OFF \ - -DFONT_CONFIGURATION=generic \ + -DENABLE_FUZZER=OFF \ + -DFONT_CONFIGURATION=$POPPLER_FONT_CONFIGURATION \ -DENABLE_DCTDECODER=none \ - -DENABLE_LIBOPENJPEG=none \ - -DENABLE_CMS=none \ + -DENABLE_GOBJECT_INTROSPECTION=OFF \ -DENABLE_LIBPNG=OFF \ -DENABLE_ZLIB=OFF \ -DENABLE_LIBTIFF=OFF \ -DENABLE_LIBJPEG=OFF \ - -DENABLE_GLIB=OFF \ + -DENABLE_GLIB=$POPPLER_ENABLE_GLIB \ -DENABLE_LIBCURL=OFF \ - -DENABLE_QT5=OFF \ + -DENABLE_QT5=ON \ -DENABLE_UTILS=OFF \ - -DWITH_Cairo=OFF \ - -DWITH_NSS3=OFF \ - -DFREETYPE_INCLUDE_DIRS=$WORK/include/freetype2 \ - -DFREETYPE_LIBRARY=$WORK/lib -make -j$(nproc) poppler poppler-cpp + -DWITH_Cairo=$POPPLER_ENABLE_GLIB \ + -DCMAKE_INSTALL_PREFIX=$PREFIX + +export PKG_CONFIG="`which pkg-config` --static" +make -j$(nproc) poppler poppler-cpp poppler-qt5 +if [ "$SANITIZER" != "memory" ]; then + make -j$(nproc) poppler-glib +fi + +PREDEPS_LDFLAGS="-Wl,-Bdynamic -ldl -lm -lc -lz -pthread -lrt -lpthread" +DEPS="freetype2 lcms2 libopenjp2" +if [ "$SANITIZER" != "memory" ]; then + DEPS="$DEPS fontconfig libpng" +fi +BUILD_CFLAGS="$CFLAGS `pkg-config --static --cflags $DEPS`" +BUILD_LDFLAGS="-Wl,-static `pkg-config --static --libs $DEPS`" +# static linking is hard ^_^ +NSS_STATIC_LIBS=`ls $SRC/nss-3.75/dist/Debug/lib/lib*.a` +NSS_STATIC_LIBS="$NSS_STATIC_LIBS $NSS_STATIC_LIBS $NSS_STATIC_LIBS" +BUILD_LDFLAGS="$BUILD_LDFLAGS $NSS_STATIC_LIBS" -fuzz_target=pdf_fuzzer +fuzzers=$(find $SRC/poppler/cpp/tests/fuzzing/ -name "*_fuzzer.cc") -pushd $SRC/poppler -$CXX $CXXFLAGS -std=c++11 -Icpp \ - fuzz/pdf_fuzzer.cc -lz -o $OUT/$fuzz_target \ - -lFuzzingEngine $WORK/poppler/cpp/libpoppler-cpp.a $WORK/poppler/libpoppler.a $WORK/lib/libfreetype.a +for f in $fuzzers; do + fuzzer_name=$(basename $f .cc) + + $CXX $CXXFLAGS -std=c++11 -I$SRC/poppler/cpp -I$SRC/poppler/build/cpp \ + $BUILD_CFLAGS \ + $f -o $OUT/$fuzzer_name \ + $PREDEPS_LDFLAGS \ + $SRC/poppler/build/cpp/libpoppler-cpp.a \ + $SRC/poppler/build/libpoppler.a \ + $BUILD_LDFLAGS \ + $LIB_FUZZING_ENGINE \ + $LIB_FUZZING_ENGINE \ + -Wl,-Bdynamic +done + +if [ "$SANITIZER" != "memory" ]; then + DEPS="gmodule-2.0 glib-2.0 gio-2.0 gobject-2.0 freetype2 lcms2 libopenjp2 cairo cairo-gobject pango fontconfig libpng" + BUILD_CFLAGS="$CFLAGS `pkg-config --static --cflags $DEPS`" + BUILD_LDFLAGS="-Wl,-static `pkg-config --static --libs $DEPS`" + BUILD_LDFLAGS="$BUILD_LDFLAGS $NSS_STATIC_LIBS" + + fuzzers=$(find $SRC/poppler/glib/tests/fuzzing/ -name "*_fuzzer.cc") + for f in $fuzzers; do + fuzzer_name=$(basename $f .cc) + + $CXX $CXXFLAGS -std=c++11 -I$SRC/poppler/glib -I$SRC/poppler/build/glib \ + $BUILD_CFLAGS \ + $f -o $OUT/$fuzzer_name \ + $PREDEPS_LDFLAGS \ + $SRC/poppler/build/glib/libpoppler-glib.a \ + $SRC/poppler/build/cpp/libpoppler-cpp.a \ + $SRC/poppler/build/libpoppler.a \ + $BUILD_LDFLAGS \ + $LIB_FUZZING_ENGINE \ + -Wl,-Bdynamic + done +fi + +PREDEPS_LDFLAGS="-Wl,-Bdynamic -ldl -lm -lc -lz -pthread -lrt -lpthread" +DEPS="freetype2 lcms2 libopenjp2 Qt5Core Qt5Gui Qt5Xml" +if [ "$SANITIZER" != "memory" ]; then + DEPS="$DEPS fontconfig libpng" +fi +BUILD_CFLAGS="$CFLAGS `pkg-config --static --cflags $DEPS`" +BUILD_LDFLAGS="-Wl,-static `pkg-config --static --libs $DEPS`" +BUILD_LDFLAGS="$BUILD_LDFLAGS $NSS_STATIC_LIBS" + +fuzzers=$(find $SRC/poppler/qt5/tests/fuzzing/ -name "*_fuzzer.cc") +for f in $fuzzers; do + fuzzer_name=$(basename $f .cc) + + $CXX $CXXFLAGS -std=c++11 -fPIC \ + -I$SRC/poppler/qt5/src -I$SRC/poppler/build/qt5/src \ + $BUILD_CFLAGS \ + $f -o $OUT/$fuzzer_name \ + $PREDEPS_LDFLAGS \ + $SRC/poppler/build/qt5/src/libpoppler-qt5.a \ + $SRC/poppler/build/cpp/libpoppler-cpp.a \ + $SRC/poppler/build/libpoppler.a \ + $BUILD_LDFLAGS \ + $LIB_FUZZING_ENGINE \ + -Wl,-Bdynamic +done mv $SRC/{*.zip,*.dict} $OUT -if [ ! -f "${OUT}/${fuzz_target}_seed_corpus.zip" ]; then - echo "missing seed corpus" - exit 1 +if [ ! -f "${OUT}/poppler_seed_corpus.zip" ]; then + echo "missing seed corpus" + exit 1 fi -if [ ! -f "${OUT}/${fuzz_target}.dict" ]; then - echo "missing dictionary" - exit 1 +if [ ! -f "${OUT}/poppler.dict" ]; then + echo "missing dictionary" + exit 1 fi + +fuzzers=$(find $OUT -name "*_fuzzer") +for f in $fuzzers; do + fuzzer_name=$(basename $f) + ln -sf $OUT/poppler_seed_corpus.zip $OUT/${fuzzer_name}_seed_corpus.zip + ln -sf $OUT/poppler.dict $OUT/${fuzzer_name}.dict +done diff --git a/benchmarks/proj4-2017-08-14/Dockerfile b/benchmarks/proj4-2017-08-14/Dockerfile index 44e51f680..e8bbe1b2a 100644 --- a/benchmarks/proj4-2017-08-14/Dockerfile +++ b/benchmarks/proj4-2017-08-14/Dockerfile @@ -14,7 +14,7 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && \ apt-get install -y \ diff --git a/benchmarks/proj4_standard_fuzzer/Dockerfile b/benchmarks/proj4_standard_fuzzer/Dockerfile index 8da1d80e6..701d0c298 100644 --- a/benchmarks/proj4_standard_fuzzer/Dockerfile +++ b/benchmarks/proj4_standard_fuzzer/Dockerfile @@ -14,8 +14,35 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a -RUN apt-get update && apt-get install -y make autoconf automake libtool g++ sqlite3 libsqlite3-dev pkg-config -RUN git clone --depth 1 https://github.com/OSGeo/proj.4 proj.4 -WORKDIR proj.4 -COPY build.sh $SRC/ +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c + +RUN apt-get update && \ + apt-get install -y \ + make autoconf automake libtool g++ sqlite3 pkg-config wget + +RUN git clone \ + --depth 1 \ + --branch 9.1.0 \ + https://github.com/OSGeo/PROJ \ + PROJ +RUN git clone \ + --depth 1 \ + --branch curl-7_86_0 \ + https://github.com/curl/curl.git \ + PROJ/curl +# Use a commit hash instead of the latest release of libtiff because the latter +# does not use autogen.sh, which is required in the lateest build.sh. +# Feel free to change back to release tag in the next update. +RUN git clone \ + https://gitlab.com/libtiff/libtiff.git \ + PROJ/libtiff && \ + (cd PROJ/libtiff && \ + git checkout 31d9e9f73dc8b6fa599c931d9ff058d8ea32795c) + +WORKDIR PROJ + +RUN cp test/fuzzers/build.sh $SRC/ +# This is to fix Fuzz Introspector build by using LLVM old pass manager +# re https://github.com/ossf/fuzz-introspector/issues/305 +ENV OLD_LLVMPASS 1 +ENV fuzzerName=standard_fuzzer diff --git a/benchmarks/proj4_standard_fuzzer/benchmark.yaml b/benchmarks/proj4_standard_fuzzer/benchmark.yaml index fbcd21a81..e090cb27b 100644 --- a/benchmarks/proj4_standard_fuzzer/benchmark.yaml +++ b/benchmarks/proj4_standard_fuzzer/benchmark.yaml @@ -1,8 +1,5 @@ -commit: c288debe029667515656213b0ba81956c3ede26d -commit_date: 2019-03-15 01:51:00+00:00 fuzz_target: standard_fuzzer project: proj4 -type: bug unsupported_fuzzers: - aflcc - afl_qemu diff --git a/benchmarks/proj4_standard_fuzzer/build.sh b/benchmarks/proj4_standard_fuzzer/build.sh deleted file mode 100755 index 4dc9c6741..000000000 --- a/benchmarks/proj4_standard_fuzzer/build.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/bash -eu -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -################################################################################ - -./autogen.sh -./configure --disable-shared -make clean -s -(cd src && make -j$(nproc) -s) - -./test/fuzzers/build_google_oss_fuzzers.sh -./test/fuzzers/build_seed_corpus.sh diff --git a/benchmarks/proj4_standard_fuzzer/testcases/11697 b/benchmarks/proj4_standard_fuzzer/testcases/11697 deleted file mode 100644 index 8dac58f55..000000000 Binary files a/benchmarks/proj4_standard_fuzzer/testcases/11697 and /dev/null differ diff --git a/benchmarks/proj4_standard_fuzzer/testcases/11893 b/benchmarks/proj4_standard_fuzzer/testcases/11893 deleted file mode 100644 index 5794d954b..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/11893 +++ /dev/null @@ -1,3 +0,0 @@ -+o_lat_p +proj=ob_tran +o_proj=helmert -+proj=cc -BINARY_3D: ÿÿÿ \ No newline at end of file diff --git a/benchmarks/proj4_standard_fuzzer/testcases/12799 b/benchmarks/proj4_standard_fuzzer/testcases/12799 deleted file mode 100644 index a014a73a8..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/12799 +++ /dev/null @@ -1,3 +0,0 @@ -+proj=cc -+proj=hammer +W=9 -BINARY_3D: ÿ ÿÿÿÿ \ No newline at end of file diff --git a/benchmarks/proj4_standard_fuzzer/testcases/12909 b/benchmarks/proj4_standard_fuzzer/testcases/12909 deleted file mode 100644 index d10b1aa2d..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/12909 +++ /dev/null @@ -1,2 +0,0 @@ -+proj=wag3 +R_a +a=2 +f=2 - diff --git a/benchmarks/proj4_standard_fuzzer/testcases/13028 b/benchmarks/proj4_standard_fuzzer/testcases/13028 deleted file mode 100644 index 9ce2f4f95..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/13028 +++ /dev/null @@ -1,2 +0,0 @@ -+proj=aea +lat_1=900 - diff --git a/benchmarks/proj4_standard_fuzzer/testcases/13069 b/benchmarks/proj4_standard_fuzzer/testcases/13069 deleted file mode 100644 index fc8cbd2a6..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/13069 +++ /dev/null @@ -1,3 +0,0 @@ -+proj=poly -+proj=pipeline +R=2 da=2 dy dz proj=molodensky dx df +step +step -BINARY_3D:ÿ ÿÿ \ No newline at end of file diff --git a/benchmarks/proj4_standard_fuzzer/testcases/13790 b/benchmarks/proj4_standard_fuzzer/testcases/13790 deleted file mode 100644 index f792737c7..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/13790 +++ /dev/null @@ -1,3 +0,0 @@ -+proj=sterea -+proj=sterea -BINARY_3D: ÿ \ No newline at end of file diff --git a/benchmarks/proj4_standard_fuzzer/testcases/13828 b/benchmarks/proj4_standard_fuzzer/testcases/13828 deleted file mode 100644 index 5047496d9..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/13828 +++ /dev/null @@ -1,4 +0,0 @@ -+o_lat_p=0000000000000001@ups000 +proj=ob_tran +o_proj +o_p00+o_+stp +s +lat_1=1J +lat_2 +proj=imw_p +9GGGGGCGw -+proj=ups +nadgrids -3 -00+proj>”8”””””” \ No newline at end of file diff --git a/benchmarks/proj4_standard_fuzzer/testcases/13829 b/benchmarks/proj4_standard_fuzzer/testcases/13829 deleted file mode 100644 index 298349fac..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/13829 +++ /dev/null @@ -1,2 +0,0 @@ -+proj=pipeline +lat_0=9090 proj=laea +step - diff --git a/benchmarks/proj4_standard_fuzzer/testcases/13830 b/benchmarks/proj4_standard_fuzzer/testcases/13830 deleted file mode 100644 index 907d3ef7a..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/13830 +++ /dev/null @@ -1,2 +0,0 @@ -+proj=pipeline +f=1 +step - diff --git a/benchmarks/proj4_standard_fuzzer/testcases/13893 b/benchmarks/proj4_standard_fuzzer/testcases/13893 deleted file mode 100644 index 2a49549f2..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/13893 +++ /dev/null @@ -1,3 +0,0 @@ -+a=6 +proj=tmerc -+proj=aeqd -BINARY_3D: ÿ \ No newline at end of file diff --git a/benchmarks/proj4_standard_fuzzer/testcases/13894 b/benchmarks/proj4_standard_fuzzer/testcases/13894 deleted file mode 100644 index 1a55fdf7d..000000000 Binary files a/benchmarks/proj4_standard_fuzzer/testcases/13894 and /dev/null differ diff --git a/benchmarks/proj4_standard_fuzzer/testcases/13895 b/benchmarks/proj4_standard_fuzzer/testcases/13895 deleted file mode 100644 index ea4b4984e..000000000 Binary files a/benchmarks/proj4_standard_fuzzer/testcases/13895 and /dev/null differ diff --git a/benchmarks/proj4_standard_fuzzer/testcases/13947 b/benchmarks/proj4_standard_fuzzer/testcases/13947 deleted file mode 100644 index ffd0ba979..000000000 Binary files a/benchmarks/proj4_standard_fuzzer/testcases/13947 and /dev/null differ diff --git a/benchmarks/proj4_standard_fuzzer/testcases/13948 b/benchmarks/proj4_standard_fuzzer/testcases/13948 deleted file mode 100644 index a42feb284..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/13948 +++ /dev/null @@ -1,3 +0,0 @@ - +proj=pipeline +ÿÿF +proj=tpeqd +lon_2=1=j +lat_1=90 +sa +step +q +lat_2=90 +sts+š -==aaaa -aaa== \ No newline at end of file diff --git a/benchmarks/proj4_standard_fuzzer/testcases/14010 b/benchmarks/proj4_standard_fuzzer/testcases/14010 deleted file mode 100644 index 91f35b6da..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/14010 +++ /dev/null @@ -1,3 +0,0 @@ -+proj=bonne +lat_1=90 -+proj=bonne +lat_1=90 -BINARY_3D: \ No newline at end of file diff --git a/benchmarks/proj4_standard_fuzzer/testcases/14015 b/benchmarks/proj4_standard_fuzzer/testcases/14015 deleted file mode 100644 index 02e481571..000000000 Binary files a/benchmarks/proj4_standard_fuzzer/testcases/14015 and /dev/null differ diff --git a/benchmarks/proj4_standard_fuzzer/testcases/14061 b/benchmarks/proj4_standard_fuzzer/testcases/14061 deleted file mode 100644 index 693015383..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/14061 +++ /dev/null @@ -1,2 +0,0 @@ -+proj=pipeline +lat_0=-90 proj=krovak +step - diff --git a/benchmarks/proj4_standard_fuzzer/testcases/14135 b/benchmarks/proj4_standard_fuzzer/testcases/14135 deleted file mode 100644 index e05313198..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/14135 +++ /dev/null @@ -1,3 +0,0 @@ -+path=1 +lsat=5 +proj=lsat +init=IGNF:="ÿ proj=webmerc type=crs "ÿ -+proj=stere -BINARY_3D: ÿ \ No newline at end of file diff --git a/benchmarks/proj4_standard_fuzzer/testcases/14136 b/benchmarks/proj4_standard_fuzzer/testcases/14136 deleted file mode 100644 index 4e5601c98..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/14136 +++ /dev/null @@ -1,2 +0,0 @@ -+++proj=omerc +alpha +proj=ob_tran +o_proj= +lat_0=90s. - diff --git a/benchmarks/proj4_standard_fuzzer/testcases/14211 b/benchmarks/proj4_standard_fuzzer/testcases/14211 deleted file mode 100644 index 4c2d0d547..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/14211 +++ /dev/null @@ -1,2 +0,0 @@ - +proj=eqc -+proj=pipeline +lat_2=90e3 +proj=eqdc +step diff --git a/benchmarks/proj4_standard_fuzzer/testcases/14248 b/benchmarks/proj4_standard_fuzzer/testcases/14248 deleted file mode 100644 index f83adb2c3..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/14248 +++ /dev/null @@ -1,3 +0,0 @@ -+proj=pipeline +lat_1=5 lat_2 proj=imw_p +step -+proj=aeqd -BINARY_3D: @ + \ No newline at end of file diff --git a/benchmarks/proj4_standard_fuzzer/testcases/14286 b/benchmarks/proj4_standard_fuzzer/testcases/14286 deleted file mode 100644 index c0cf8fac6..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/14286 +++ /dev/null @@ -1,3 +0,0 @@ -+proj=ups -+mode=hex +proj=isea +resolution=31 -2 2 \ No newline at end of file diff --git a/benchmarks/proj4_standard_fuzzer/testcases/14342 b/benchmarks/proj4_standard_fuzzer/testcases/14342 deleted file mode 100644 index ec6eadc57..000000000 Binary files a/benchmarks/proj4_standard_fuzzer/testcases/14342 and /dev/null differ diff --git a/benchmarks/proj4_standard_fuzzer/testcases/14410 b/benchmarks/proj4_standard_fuzzer/testcases/14410 deleted file mode 100644 index ce3b2f3f2..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/14410 +++ /dev/null @@ -1,3 +0,0 @@ -+proj=geocent -+proj=pipeline +no_cut proj=airy +step -BINARY_3D: Ô \ No newline at end of file diff --git a/benchmarks/proj4_standard_fuzzer/testcases/14421 b/benchmarks/proj4_standard_fuzzer/testcases/14421 deleted file mode 100644 index 733815414..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/14421 +++ /dev/null @@ -1,3 +0,0 @@ -+proj=pipeline +proj=gs50 towgs84 +inv +step +step +inv -+proj=aeqd -BINARY_3D: ÿ ÿ \ No newline at end of file diff --git a/benchmarks/proj4_standard_fuzzer/testcases/14477 b/benchmarks/proj4_standard_fuzzer/testcases/14477 deleted file mode 100644 index c599ee420..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/14477 +++ /dev/null @@ -1,3 +0,0 @@ -+proj=ups -+proj=pipeline +proj=pipeline proj=lagrng step +inv +step lat_1=89 +step +step +step +step +step -BINARY_3D: \ No newline at end of file diff --git a/benchmarks/proj4_standard_fuzzer/testcases/14602 b/benchmarks/proj4_standard_fuzzer/testcases/14602 deleted file mode 100644 index d1c8a9985..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/14602 +++ /dev/null @@ -1,3 +0,0 @@ -+proj=ups -+proj=pipeline +inv +proj=geos h=5e71 +step -BINARY_3D: \ No newline at end of file diff --git a/benchmarks/proj4_standard_fuzzer/testcases/14666 b/benchmarks/proj4_standard_fuzzer/testcases/14666 deleted file mode 100644 index ed1f9535e..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/14666 +++ /dev/null @@ -1,3 +0,0 @@ -+proj=pipeline +proj=geos h=5 a=66666666666666666 +step -+proj=aeqd -BINARY_3D: ÿÿ \ No newline at end of file diff --git a/benchmarks/proj4_standard_fuzzer/testcases/14766 b/benchmarks/proj4_standard_fuzzer/testcases/14766 deleted file mode 100644 index 39cbf2d07..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/14766 +++ /dev/null @@ -1,3 +0,0 @@ -+b=2 +init=epsg:4376 -+proj=vandg -BINARY_3D: Ô \ No newline at end of file diff --git a/benchmarks/proj4_standard_fuzzer/testcases/15009 b/benchmarks/proj4_standard_fuzzer/testcases/15009 deleted file mode 100644 index 4214d562f..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/15009 +++ /dev/null @@ -1,3 +0,0 @@ -+proj=ups -+mode=hex +proj=isea +resolution=2684354706 +aperture -BINARY_3D: ÿNA \ No newline at end of file diff --git a/benchmarks/proj4_standard_fuzzer/testcases/15148 b/benchmarks/proj4_standard_fuzzer/testcases/15148 deleted file mode 100644 index 49ad20f8e..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/15148 +++ /dev/null @@ -1,3 +0,0 @@ -+proj=cc -+proj=pipeline +proj=pipeline proj=sch plon_0=90 plat_0 phdg_0 step step inv plon_0=222 +step -BINARY_3D: a \ No newline at end of file diff --git a/benchmarks/proj4_standard_fuzzer/testcases/15336 b/benchmarks/proj4_standard_fuzzer/testcases/15336 deleted file mode 100644 index 70bad4ce0..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/15336 +++ /dev/null @@ -1,3 +0,0 @@ -+proj=ups -+proj=pipeline +a=1 proj=leac rf=2e98 +step +inv +step +step +inv +step +proj=aeqd -BINARY_3D: ÿ ÿÿÿÿ \ No newline at end of file diff --git a/benchmarks/proj4_standard_fuzzer/testcases/16130 b/benchmarks/proj4_standard_fuzzer/testcases/16130 deleted file mode 100644 index 0cb249bee..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/16130 +++ /dev/null @@ -1,2 +0,0 @@ -+a=1E77 +R_lat_a=90 +proj=aeqd +b=1 - diff --git a/benchmarks/proj4_standard_fuzzer/testcases/17575 b/benchmarks/proj4_standard_fuzzer/testcases/17575 deleted file mode 100644 index 6ca8e90cd..000000000 --- a/benchmarks/proj4_standard_fuzzer/testcases/17575 +++ /dev/null @@ -1,3 +0,0 @@ - +proj=aeqd - +proj=pipeline +proj=pipeline ÿ +proj=murd3 +lat_2 lat_1=3e249ÿÿÿÿ step ÿÿ +ÿ step ++ step inv step ++ step inv step step inv step ++ step inv ÿ ÿ ÿ ÿ ÿ ÿ ÿ ÿ ÿ +step + -BINARY_3D: ÿ \ No newline at end of file diff --git a/benchmarks/proj4_standard_fuzzer/testcases/2241 b/benchmarks/proj4_standard_fuzzer/testcases/2241 deleted file mode 100644 index 032e090cf..000000000 Binary files a/benchmarks/proj4_standard_fuzzer/testcases/2241 and /dev/null differ diff --git a/benchmarks/quickjs_eval-2020-01-05/Dockerfile b/benchmarks/quickjs_eval-2020-01-05/Dockerfile index 270633b67..ab84d1fbb 100644 --- a/benchmarks/quickjs_eval-2020-01-05/Dockerfile +++ b/benchmarks/quickjs_eval-2020-01-05/Dockerfile @@ -14,10 +14,17 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && \ apt-get install -y autoconf automake libtool bison pkg-config -RUN git clone --depth 1 --branch master https://github.com/bellard/quickjs quickjs + +# This project does not have any release/tag. +RUN git clone \ + --depth 1 \ + --branch master \ + https://github.com/bellard/quickjs \ + quickjs + WORKDIR quickjs COPY build.sh $SRC/ COPY *.c *.h *.dict $SRC/ diff --git a/benchmarks/quickjs_eval-2020-01-05/benchmark.yaml b/benchmarks/quickjs_eval-2020-01-05/benchmark.yaml index f6d82ca1c..f7d721d23 100644 --- a/benchmarks/quickjs_eval-2020-01-05/benchmark.yaml +++ b/benchmarks/quickjs_eval-2020-01-05/benchmark.yaml @@ -1,6 +1,3 @@ -commit: 91459fb6723e29e923380cec0023af93819ae69d -commit_date: 2020-09-06 18:47:30+00:00 fuzz_target: fuzz_eval project: quickjs -type: bug unsupported_fuzzers: diff --git a/benchmarks/re2-2014-12-09/Dockerfile b/benchmarks/re2-2014-12-09/Dockerfile index ba4d7cf8b..355704ca3 100644 --- a/benchmarks/re2-2014-12-09/Dockerfile +++ b/benchmarks/re2-2014-12-09/Dockerfile @@ -14,7 +14,7 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && \ apt-get install -y \ diff --git a/benchmarks/sqlite3_ossfuzz/Dockerfile b/benchmarks/sqlite3_ossfuzz/Dockerfile index dd1e33a5b..550814da6 100644 --- a/benchmarks/sqlite3_ossfuzz/Dockerfile +++ b/benchmarks/sqlite3_ossfuzz/Dockerfile @@ -14,7 +14,7 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && apt-get upgrade -y && apt-get install -y make autoconf automake libtool curl tcl zlib1g-dev RUN mkdir $SRC/sqlite3 && \ diff --git a/benchmarks/stb_stbi_read_fuzzer/Dockerfile b/benchmarks/stb_stbi_read_fuzzer/Dockerfile index 250736ee1..dd6d94caa 100644 --- a/benchmarks/stb_stbi_read_fuzzer/Dockerfile +++ b/benchmarks/stb_stbi_read_fuzzer/Dockerfile @@ -14,15 +14,39 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && \ - apt-get install -y wget tar + apt-get install -y \ + wget tar -RUN git clone --depth 1 https://github.com/nothings/stb.git +# This project does not have any release/tag. +RUN git clone \ + --depth 1 \ + --branch master \ + https://github.com/nothings/stb.git -RUN wget -O $SRC/stb/gif.tar.gz https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/imagetestsuite/imagetestsuite-gif-1.00.tar.gz -RUN wget -O $SRC/stb/jpg.tar.gz https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/imagetestsuite/imagetestsuite-jpg-1.00.tar.gz -RUN wget -O $SRC/stb/tests/gif.dict https://raw.githubusercontent.com/mirrorer/afl/master/dictionaries/gif.dict &> /dev/null +RUN mkdir $SRC/stbi # CIFuzz workaround +RUN wget -O \ + $SRC/stbi/gif.tar.gz https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/imagetestsuite/imagetestsuite-gif-1.00.tar.gz +RUN wget -O \ + $SRC/stbi/jpg.tar.gz https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/imagetestsuite/imagetestsuite-jpg-1.00.tar.gz +RUN wget -O \ + $SRC/stbi/bmp.zip http://entropymine.com/jason/bmpsuite/releases/bmpsuite-2.6.zip +RUN wget -O \ + $SRC/stbi/tga.zip https://github.com/richgel999/tga_test_files/archive/master.zip + +RUN wget -O \ + $SRC/stbi/gif.dict https://raw.githubusercontent.com/mirrorer/afl/master/dictionaries/gif.dict + +# Maintain compatibility with master branch until a new release +RUN cp \ + $SRC/stbi/gif.tar.gz \ + $SRC/stbi/jpg.tar.gz \ + $SRC/stbi/bmp.zip \ + $SRC/stbi/gif.dict \ + $SRC/stb + +WORKDIR stb COPY build.sh $SRC/ diff --git a/benchmarks/stb_stbi_read_fuzzer/benchmark.yaml b/benchmarks/stb_stbi_read_fuzzer/benchmark.yaml index ad1947df8..9239229d9 100644 --- a/benchmarks/stb_stbi_read_fuzzer/benchmark.yaml +++ b/benchmarks/stb_stbi_read_fuzzer/benchmark.yaml @@ -1,8 +1,5 @@ -commit: f54acd4e13430c5122cab4ca657705c84aa61b08 -commit_date: 2020-05-27 02:31:00+00:00 fuzz_target: stbi_read_fuzzer project: stb -type: bug unsupported_fuzzers: - aflcc - afl_qemu diff --git a/benchmarks/stb_stbi_read_fuzzer/build.sh b/benchmarks/stb_stbi_read_fuzzer/build.sh index 6cad52718..e32e0e64b 100644 --- a/benchmarks/stb_stbi_read_fuzzer/build.sh +++ b/benchmarks/stb_stbi_read_fuzzer/build.sh @@ -14,22 +14,5 @@ # limitations under the License. # ################################################################################ -sed '2d' $SRC/stb/tests/stb_png_read_fuzzer.cpp > $SRC/stb/tests/stbi_read_fuzzer.c - -$CXX $CXXFLAGS -std=c++11 -I. \ - $SRC/stb/tests/stbi_read_fuzzer.c \ - -o $OUT/stbi_read_fuzzer $LIB_FUZZING_ENGINE - -find $SRC/stb/tests/pngsuite -name "*.png" | \ - xargs zip $OUT/stb_png_read_fuzzer_seed_corpus.zip - -cp $SRC/stb/tests/stb_png.dict $OUT/stb_png_read_fuzzer.dict - -tar xvzf $SRC/stb/jpg.tar.gz --directory $SRC/stb/tests -tar xvzf $SRC/stb/gif.tar.gz --directory $SRC/stb/tests - -find $SRC/stb/tests -name "*.png" -o -name "*.jpg" -o -name ".gif" | \ - xargs zip $OUT/stbi_read_fuzzer_seed_corpus.zip - -echo "" >> $SRC/stb/tests/gif.dict -cat $SRC/stb/tests/gif.dict $SRC/stb/tests/stb_png.dict > $OUT/stbi_read_fuzzer.dict +# Run the OSS-Fuzz script in the project +$SRC/stb/tests/ossfuzz.sh diff --git a/benchmarks/systemd_fuzz-link-parser/Dockerfile b/benchmarks/systemd_fuzz-link-parser/Dockerfile index 26c882881..c2c21ed38 100644 --- a/benchmarks/systemd_fuzz-link-parser/Dockerfile +++ b/benchmarks/systemd_fuzz-link-parser/Dockerfile @@ -14,12 +14,17 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a -RUN apt-get update &&\ - apt-get install -y gperf m4 gettext python3-pip \ - libcap-dev libmount-dev libkmod-dev \ - pkg-config wget &&\ - pip3 install meson==0.55.3 ninja==1.10.0 -RUN git clone --depth 1 https://github.com/systemd/systemd systemd +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c + +RUN apt-get update && \ + apt-get install -y libcap-dev + +RUN git clone \ + --depth 1 \ + --branch v252 \ + https://github.com/systemd/systemd && \ + cp $SRC/systemd/tools/oss-fuzz.sh $SRC/build.sh && \ + # Move shared libraries and tweak rpath for all $ARCHITECTURE. + sed -i '119d;126d' $SRC/build.sh + WORKDIR systemd -COPY build.sh $SRC/ diff --git a/benchmarks/systemd_fuzz-link-parser/benchmark.yaml b/benchmarks/systemd_fuzz-link-parser/benchmark.yaml index a40435c78..2d04855fe 100644 --- a/benchmarks/systemd_fuzz-link-parser/benchmark.yaml +++ b/benchmarks/systemd_fuzz-link-parser/benchmark.yaml @@ -1,31 +1,7 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -commit: 99fdffaa194cbfed659b0c1bfd0ace4bfcd2a245 -commit_date: 2020-02-10 16:19:52+00:00 +commit: 07faa4990fcc1e80c9ef63c09eb91bb73dab19cb +commit_date: 2022-09-28 12:03:03+00:00 fuzz_target: fuzz-link-parser project: systemd unsupported_fuzzers: - - aflplusplus_gcc - - aflcc - - klee - - aflplusplus_cmplog_double - - symcc_aflplusplus_single - - eclipser_aflplusplus - - aflplusplus_qemu_double - - aflplusplus_frida - - fuzzolic_aflplusplus_z3 - - symqemu_aflplusplus - - fuzzolic_aflplusplus_fuzzy - - fuzzolic_aflplusplus_z3dict + - centipede + - wingfuzz diff --git a/benchmarks/systemd_fuzz-link-parser/build.sh b/benchmarks/systemd_fuzz-link-parser/build.sh deleted file mode 100755 index 379920c9f..000000000 --- a/benchmarks/systemd_fuzz-link-parser/build.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash -eu -# Copyright 2018 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -################################################################################ - -tools/oss-fuzz.sh diff --git a/benchmarks/systemd_fuzz-varlink/Dockerfile b/benchmarks/systemd_fuzz-varlink/Dockerfile index 02da09273..6bf7a0c35 100644 --- a/benchmarks/systemd_fuzz-varlink/Dockerfile +++ b/benchmarks/systemd_fuzz-varlink/Dockerfile @@ -1,3 +1,4 @@ +# syntax=docker/dockerfile:1.3-labs # Copyright 2018 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,14 +15,32 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && \ - apt-get install -y \ - gperf m4 gettext libcap-dev libmount-dev libkmod-dev \ - pkg-config wget && \ - pip3 install meson==0.55.3 ninja==1.10.0 -RUN git clone https://github.com/systemd/systemd systemd -WORKDIR systemd -COPY build.sh $SRC/ + apt-get install -y libcap-dev \ + gperf m4 gettext libmount-dev libkmod-dev pkg-config wget \ + ninja-build=1.10.0-1build1 && \ + pip3 install meson==0.55.3 + +RUN git clone https://github.com/systemd/systemd && \ + git -C "$SRC/systemd" checkout cb367b17853d215ebcf2816118c1f53d003e5088 && \ + cp $SRC/systemd/tools/oss-fuzz.sh $SRC/build.sh +# Some fixes to make the old version build & run. +# Fix ninja version compatibility error. +ENV PATH='/usr/bin':$PATH +# Remove a buggy lines that zip corpus files that do not exist. +RUN sed -i '37,42d' $SRC/build.sh && \ + # Copy shared lib and patch rpath so that they can be found in the runner. + cat >> "$SRC/build.sh" <<'EOF' +for lib_path in $(ldd "$OUT"/src/shared/libsystemd-shared-*.so \ + | perl -lne 'print $1 if m{=>\s+(/lib\S+)}'); do + lib_name=$(basename "$lib_path") + cp "$lib_path" "$OUT/src/shared" + patchelf --set-rpath \$ORIGIN "$OUT/src/shared/$lib_name" +done +patchelf --set-rpath \$ORIGIN "$OUT"/src/shared/libsystemd-shared-*.so +EOF + +WORKDIR systemd diff --git a/benchmarks/systemd_fuzz-varlink/benchmark.yaml b/benchmarks/systemd_fuzz-varlink/benchmark.yaml index 3fcf482f4..b92123a7b 100644 --- a/benchmarks/systemd_fuzz-varlink/benchmark.yaml +++ b/benchmarks/systemd_fuzz-varlink/benchmark.yaml @@ -1,8 +1,5 @@ -commit: cb367b17853d215ebcf2816118c1f53d003e5088 -commit_date: 2019-05-10 19:14:59+00:00 fuzz_target: fuzz-varlink project: systemd -type: bug unsupported_fuzzers: - aflcc - afl_qemu @@ -21,3 +18,4 @@ unsupported_fuzzers: - symqemu_aflplusplus - fuzzolic_aflplusplus_fuzzy - fuzzolic_aflplusplus_z3dict + - wingfuzz diff --git a/benchmarks/systemd_fuzz-varlink/build.sh b/benchmarks/systemd_fuzz-varlink/build.sh deleted file mode 100755 index 379920c9f..000000000 --- a/benchmarks/systemd_fuzz-varlink/build.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash -eu -# Copyright 2018 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -################################################################################ - -tools/oss-fuzz.sh diff --git a/benchmarks/usrsctp_fuzzer_connect/Dockerfile b/benchmarks/usrsctp_fuzzer_connect/Dockerfile index 5374e9404..200eb972b 100644 --- a/benchmarks/usrsctp_fuzzer_connect/Dockerfile +++ b/benchmarks/usrsctp_fuzzer_connect/Dockerfile @@ -14,9 +14,13 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c MAINTAINER weinrank@fh-muenster.de RUN apt-get update && apt-get install -y make cmake pkg-config -RUN git clone --branch oss-fuzz https://github.com/weinrank/usrsctp usrsctp +RUN git clone \ + --depth 1 \ + --branch 0.9.5.0 \ + https://github.com/sctplab/usrsctp.git \ + usrsctp WORKDIR usrsctp COPY build.sh $SRC/ diff --git a/benchmarks/usrsctp_fuzzer_connect/benchmark.yaml b/benchmarks/usrsctp_fuzzer_connect/benchmark.yaml index 8c90ef270..944ed0d0d 100644 --- a/benchmarks/usrsctp_fuzzer_connect/benchmark.yaml +++ b/benchmarks/usrsctp_fuzzer_connect/benchmark.yaml @@ -1,8 +1,5 @@ -commit: e08eacffd438cb0760c926fbe60ccda011f6ce70 -commit_date: 2019-10-06 10:51:51+00:00 fuzz_target: fuzzer_connect project: usrsctp -type: bug unsupported_fuzzers: - aflcc - afl_qemu diff --git a/benchmarks/vorbis-2017-12-11/Dockerfile b/benchmarks/vorbis-2017-12-11/Dockerfile index 8732dcfb7..1f91edebc 100644 --- a/benchmarks/vorbis-2017-12-11/Dockerfile +++ b/benchmarks/vorbis-2017-12-11/Dockerfile @@ -14,7 +14,7 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && \ apt-get install -y \ diff --git a/benchmarks/wireshark_fuzzshark_ip/Dockerfile b/benchmarks/wireshark_fuzzshark_ip/Dockerfile index ce7468c91..dc03fabe6 100644 --- a/benchmarks/wireshark_fuzzshark_ip/Dockerfile +++ b/benchmarks/wireshark_fuzzshark_ip/Dockerfile @@ -14,14 +14,27 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c -RUN apt-get update && apt-get install -y make cmake \ - flex bison \ - libglib2.0-dev libgcrypt20-dev +RUN apt-get update && \ + apt-get install -y \ + ninja-build \ + cmake \ + flex \ + bison \ + libc-ares-dev \ + libglib2.0-dev \ + libgcrypt20-dev -RUN git clone --depth=1 https://gitlab.com/wireshark/wireshark.git -RUN git clone --depth=1 https://bitbucket.org/jwzawadzki/wireshark-fuzzdb.git +RUN git clone \ + --depth 1 \ + --branch wireshark-3.6.9 \ + https://gitlab.com/wireshark/wireshark.git + +RUN git clone \ + --depth=1 \ + --branch master \ + https://bitbucket.org/jwzawadzki/wireshark-fuzzdb.git WORKDIR wireshark COPY build.sh $SRC/ diff --git a/benchmarks/wireshark_fuzzshark_ip/benchmark.yaml b/benchmarks/wireshark_fuzzshark_ip/benchmark.yaml index f45fc6288..5901a71d8 100644 --- a/benchmarks/wireshark_fuzzshark_ip/benchmark.yaml +++ b/benchmarks/wireshark_fuzzshark_ip/benchmark.yaml @@ -1,8 +1,5 @@ -commit: bc4d7c46f391971f0e2c875c9b2d63c1b9152f6d -commit_date: 2018-10-07 01:19:00+00:00 fuzz_target: fuzzshark_ip project: wireshark -type: bug unsupported_fuzzers: - aflcc - afl_qemu diff --git a/benchmarks/wireshark_fuzzshark_ip/build.sh b/benchmarks/wireshark_fuzzshark_ip/build.sh index 0a56a3af7..82a0279ab 100755 --- a/benchmarks/wireshark_fuzzshark_ip/build.sh +++ b/benchmarks/wireshark_fuzzshark_ip/build.sh @@ -18,47 +18,34 @@ WIRESHARK_BUILD_PATH="$WORK/build" mkdir -p "$WIRESHARK_BUILD_PATH" -export WIRESHARK_INSTALL_PATH="$WORK/install" -mkdir -p "$WIRESHARK_INSTALL_PATH" - # Prepare Samples directory export SAMPLES_DIR="$WORK/samples" mkdir -p "$SAMPLES_DIR" cp -a $SRC/wireshark-fuzzdb/samples/* "$SAMPLES_DIR" +# Make sure we build fuzzshark. +CMAKE_DEFINES="-DBUILD_fuzzshark=ON" + # compile static version of libs -# XXX, with static wireshark linking each fuzzer binary is ~338 MB (just libwireshark.a is 623 MBs). +# XXX, with static wireshark linking each fuzzer binary is ~346 MB (just libwireshark.a is 761 MB). # XXX, wireshark is not ready for including static plugins into binaries. -CMAKE_DEFINES="-DENABLE_STATIC=ON -DENABLE_PLUGINS=OFF" +CMAKE_DEFINES="$CMAKE_DEFINES -DENABLE_STATIC=ON -DENABLE_PLUGINS=OFF" # disable optional dependencies CMAKE_DEFINES="$CMAKE_DEFINES -DENABLE_PCAP=OFF -DENABLE_GNUTLS=OFF" -# need only libs, disable programs -# TODO, add something like --without-extcap, which would disable all extcap binaries -CMAKE_DEFINES="$CMAKE_DEFINES -DBUILD_wireshark=OFF -DBUILD_tshark=OFF -DBUILD_sharkd=OFF \ - -DBUILD_dumpcap=OFF -DBUILD_capinfos=OFF -DBUILD_captype=OFF -DBUILD_randpkt=OFF -DBUILD_dftest=OFF \ - -DBUILD_editcap=OFF -DBUILD_mergecap=OFF -DBUILD_reordercap=OFF -DBUILD_text2pcap=OFF \ - -DBUILD_fuzzshark=OFF \ - -DBUILD_androiddump=OFF -DBUILD_randpktdump=OFF -DBUILD_udpdump=OFF \ - " - -# Fortify and asan don't like each other ... :( -# TODO, right now -D_FORTIFY_SOURCE=2 is not added in cmake builds. -# sed -i '/AC_WIRESHARK_GCC_FORTIFY_SOURCE_CHECK/d' configure.ac +# There is no need to manually disable programs via BUILD_xxx=OFF since the +# all-fuzzers targets builds the minimum required binaries. However we do have +# to disable the Qt GUI or else the cmake step will fail. +CMAKE_DEFINES="$CMAKE_DEFINES -DBUILD_wireshark=OFF" cd "$WIRESHARK_BUILD_PATH" -cmake -DCMAKE_C_COMPILER=$CC -DCMAKE_CXX_COMPILER=$CXX \ +cmake -GNinja \ + -DCMAKE_C_COMPILER=$CC -DCMAKE_CXX_COMPILER=$CXX \ -DCMAKE_C_FLAGS="$CFLAGS" -DCMAKE_CXX_FLAGS="$CXXFLAGS" \ - -DCMAKE_INSTALL_PREFIX="$WIRESHARK_INSTALL_PATH" $CMAKE_DEFINES -DDISABLE_WERROR=ON $SRC/wireshark/ - -# disable leak checks, lemon is build with ASAN, and it leaks memory during building. -export ASAN_OPTIONS="detect_leaks=0" -make "-j$(nproc)" -make install + -DDISABLE_WERROR=ON -DOSS_FUZZ=ON $CMAKE_DEFINES $SRC/wireshark/ -# make install didn't install config.h, install it manually -cp "$WIRESHARK_BUILD_PATH/config.h" "$WIRESHARK_INSTALL_PATH/include/wireshark/" +ninja all-fuzzers $SRC/wireshark/tools/oss-fuzzshark/build.sh all diff --git a/benchmarks/woff2-2016-05-06/Dockerfile b/benchmarks/woff2-2016-05-06/Dockerfile index 2d59303af..e96ef5885 100644 --- a/benchmarks/woff2-2016-05-06/Dockerfile +++ b/benchmarks/woff2-2016-05-06/Dockerfile @@ -14,7 +14,7 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && \ apt-get install -y \ diff --git a/benchmarks/zlib_zlib_uncompress_fuzzer/Dockerfile b/benchmarks/zlib_zlib_uncompress_fuzzer/Dockerfile index c1ff52339..e84e45c15 100644 --- a/benchmarks/zlib_zlib_uncompress_fuzzer/Dockerfile +++ b/benchmarks/zlib_zlib_uncompress_fuzzer/Dockerfile @@ -14,7 +14,7 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && apt-get install -y make autoconf automake libtool RUN git clone --depth 1 -b develop https://github.com/madler/zlib.git WORKDIR zlib diff --git a/benchmarks/zstd_stream_decompress/Dockerfile b/benchmarks/zstd_stream_decompress/Dockerfile index f7a687af6..fc6f27442 100644 --- a/benchmarks/zstd_stream_decompress/Dockerfile +++ b/benchmarks/zstd_stream_decompress/Dockerfile @@ -14,11 +14,14 @@ # ################################################################################ -FROM gcr.io/oss-fuzz-base/base-builder@sha256:1b6a6993690fa947df74ceabbf6a1f89a46d7e4277492addcd45a8525e34be5a +FROM gcr.io/oss-fuzz-base/base-builder@sha256:fb1a9a49752c9e504687448d1f1a048ec1e062e2e40f7e8a23e86b63ff3dad7c RUN apt-get update && apt-get upgrade -y && apt-get install -y make python wget # Clone source -RUN git clone https://github.com/facebook/zstd +RUN git clone \ + --depth 1 \ + --branch v1.5.2 \ + https://github.com/facebook/zstd WORKDIR zstd COPY build.sh $SRC/ diff --git a/benchmarks/zstd_stream_decompress/benchmark.yaml b/benchmarks/zstd_stream_decompress/benchmark.yaml index 9cb111812..4d291f02a 100644 --- a/benchmarks/zstd_stream_decompress/benchmark.yaml +++ b/benchmarks/zstd_stream_decompress/benchmark.yaml @@ -1,8 +1,5 @@ -commit: 9ad7ea44ec9644c618c2e82be5960d868e48745d -commit_date: 2019-04-19 00:29:32+00:00 fuzz_target: stream_decompress project: zstd -type: bug unsupported_fuzzers: - aflcc - afl_qemu diff --git a/common/benchmark_utils.py b/common/benchmark_utils.py index 7eb3b1ee5..70b7a6ab0 100644 --- a/common/benchmark_utils.py +++ b/common/benchmark_utils.py @@ -64,18 +64,13 @@ def get_runner_image_url(experiment, benchmark, fuzzer, docker_registry): """Get the URL of the docker runner image for fuzzing the benchmark with fuzzer.""" tag = 'latest' if environment.get('LOCAL_EXPERIMENT') else experiment - return '{docker_registry}/runners/{fuzzer}/{benchmark}:{tag}'.format( - docker_registry=docker_registry, - fuzzer=fuzzer, - benchmark=benchmark, - tag=tag) + return f'{docker_registry}/runners/{fuzzer}/{benchmark}:{tag}' def get_builder_image_url(benchmark, fuzzer, docker_registry): """Get the URL of the docker builder image for fuzzing the benchmark with fuzzer.""" - return '{docker_registry}/builders/{fuzzer}/{benchmark}'.format( - docker_registry=docker_registry, fuzzer=fuzzer, benchmark=benchmark) + return f'{docker_registry}/builders/{fuzzer}/{benchmark}' def validate_name(benchmark): diff --git a/common/experiment_utils.py b/common/experiment_utils.py index a0a5d2b8e..604d0218f 100644 --- a/common/experiment_utils.py +++ b/common/experiment_utils.py @@ -99,19 +99,18 @@ def get_custom_seed_corpora_filestore_path(): def get_dispatcher_instance_name(experiment: str) -> str: """Returns a dispatcher instance name for an experiment.""" - return 'd-%s' % experiment + return f'd-{experiment}' def get_trial_instance_name(experiment: str, trial_id: int) -> str: """Returns a unique instance name for each trial of an experiment.""" - return 'r-%s-%d' % (experiment, trial_id) + return f'r-{experiment}-{trial_id}' def get_cycle_filename(basename: str, cycle: int) -> str: """Returns a filename for a file that is relevant to a particular snapshot cycle.""" - filename = basename + '-' + ('%04d' % cycle) - return filename + return f'{basename}-{cycle:04d}' def get_corpus_archive_name(cycle: int) -> str: @@ -143,13 +142,13 @@ def get_trial_dir(fuzzer, benchmark, trial_id): """Returns the unique directory for |fuzzer|, |benchmark|, and |trial_id|.""" benchmark_fuzzer_directory = get_benchmark_fuzzer_dir(benchmark, fuzzer) - trial_subdir = 'trial-%d' % trial_id + trial_subdir = f'trial-{trial_id}' return posixpath.join(benchmark_fuzzer_directory, trial_subdir) def get_benchmark_fuzzer_dir(benchmark, fuzzer): """Returns the directory for |benchmark| and |fuzzer|.""" - return '%s-%s' % (benchmark, fuzzer) + return f'{benchmark}-{fuzzer}' def get_trial_bucket_dir(fuzzer, benchmark, trial_id): diff --git a/common/filesystem.py b/common/filesystem.py index c3b0cdd3a..427e18583 100644 --- a/common/filesystem.py +++ b/common/filesystem.py @@ -40,8 +40,10 @@ def recreate_directory(directory, create_parents=True): os.mkdir(directory) +# TODO: Fix this function. def write(path, contents, open_flags='w'): """Opens file at |path| with |open_flags| and writes |contents| to it.""" + # pylint: disable=unspecified-encoding with open(path, open_flags) as file_handle: return file_handle.write(contents) @@ -54,7 +56,7 @@ def append(path, line): def read(path, open_flags='r'): """Opens file at |path| with |open_flags| reads it and then returns the result.""" - with open(path, open_flags) as file_handle: + with open(path, open_flags, encoding='utf-8') as file_handle: return file_handle.read() @@ -96,7 +98,7 @@ def replace_dir(src_dir, dst_dir, move=True): it.""" if not os.path.isdir(src_dir): raise NotADirectoryError( - 'src_dir must be a directory. %s is not a directory.' % src_dir) + f'src_dir must be a directory. {src_dir} is not a directory.') shutil.rmtree(dst_dir, ignore_errors=True) if move: shutil.move(src_dir, dst_dir) diff --git a/common/fuzzer_utils.py b/common/fuzzer_utils.py index 3d5cd84f1..049ba8ce3 100644 --- a/common/fuzzer_utils.py +++ b/common/fuzzer_utils.py @@ -118,7 +118,7 @@ def validate(fuzzer): return False # Try importing the fuzzer module. - module_name = 'fuzzers.{}.fuzzer'.format(fuzzer) + module_name = f'fuzzers.{fuzzer}.fuzzer' try: importlib.import_module(module_name) return True diff --git a/common/gcloud.py b/common/gcloud.py index 3ccc0a5fb..f213e7773 100644 --- a/common/gcloud.py +++ b/common/gcloud.py @@ -16,7 +16,7 @@ import enum import posixpath import subprocess -from typing import List +from typing import List, Optional from common import experiment_utils from common import logs @@ -47,7 +47,7 @@ class InstanceType(enum.Enum): def create_instance(instance_name: str, instance_type: InstanceType, config: dict, - startup_script: str = None, + startup_script: Optional[str] = None, preemptible: bool = False, **kwargs) -> bool: """Creates a GCE instance with name, |instance_name|, type, |instance_type| @@ -64,36 +64,36 @@ def create_instance(instance_name: str, instance_name, '--image-family=cos-stable', '--image-project=cos-cloud', - '--zone=%s' % config['cloud_compute_zone'], + f'--zone={config["cloud_compute_zone"]}', '--scopes=cloud-platform', ] if instance_type == InstanceType.DISPATCHER: command.extend([ - '--machine-type=%s' % DISPATCHER_MACHINE_TYPE, - '--boot-disk-size=%s' % DISPATCHER_BOOT_DISK_SIZE, - '--boot-disk-type=%s' % DISPATCHER_BOOT_DISK_TYPE, + f'--machine-type={DISPATCHER_MACHINE_TYPE}', + f'--boot-disk-size={DISPATCHER_BOOT_DISK_SIZE}', + f'--boot-disk-type={DISPATCHER_BOOT_DISK_TYPE}', ]) else: machine_type = config['runner_machine_type'] if machine_type is not None: - command.append('--machine-type=%s' % machine_type) + command.append(f'--machine-type={machine_type}') else: # Do this to support KLEE experiments. command.append([ - '--custom-memory=%s' % config['runner_memory'], - '--custom-cpu=%s' % config['runner_num_cpu_cores'] + f'--custom-memory={config["runner_memory"]}', + f'--custom-cpu={config["runner_num_cpu_cores"]}', ]) command.extend([ '--no-address', - '--boot-disk-size=%s' % RUNNER_BOOT_DISK_SIZE, + f'--boot-disk-size={RUNNER_BOOT_DISK_SIZE}', ]) if preemptible: command.append('--preemptible') if startup_script: command.extend( - ['--metadata-from-file', 'startup-script=' + startup_script]) + ['--metadata-from-file', f'startup-script={startup_script}']) result = new_process.execute(command, expect_zero=False, **kwargs) if result.retcode == 0: @@ -126,12 +126,14 @@ def set_default_project(cloud_project: str): ['gcloud', 'config', 'set', 'project', cloud_project]) -def run_local_instance(startup_script: str = None) -> bool: +def run_local_instance(startup_script: Optional[str] = None) -> bool: """Does the equivalent of "create_instance" for local experiments, runs |startup_script| in the background.""" command = ['/bin/bash', startup_script] - subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - return new_process.ProcessResult(0, '', False) + with subprocess.Popen(command, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT): + return new_process.ProcessResult(0, '', False) def create_instance_template(template_name, docker_image, env, project, zone): @@ -144,13 +146,13 @@ def create_instance_template(template_name, docker_image, env, project, zone): 'gcloud', 'compute', '--project', project, 'instance-templates', 'create-with-container', template_name, '--no-address', '--image-family=cos-stable', '--image-project=cos-cloud', - '--region=%s' % zone, '--scopes=cloud-platform', - '--machine-type=%s' % MEASURER_WORKER_MACHINE_TYPE, - '--boot-disk-size=%s' % MEASURER_WORKER_BOOT_DISK_SIZE, '--preemptible', + f'--region={zone}', '--scopes=cloud-platform', + f'--machine-type={MEASURER_WORKER_MACHINE_TYPE}', + f'--boot-disk-size={MEASURER_WORKER_BOOT_DISK_SIZE}', '--preemptible', '--container-image', docker_image ] for item in env.items(): - command.extend(['--container-env', '%s=%s' % item]) + command.extend(['--container-env', f'{item[0]}={item[1]}']) new_process.execute(command) return posixpath.join('https://www.googleapis.com/compute/v1/projects/', project, 'global', 'instanceTemplates', template_name) diff --git a/common/logs.py b/common/logs.py index 6dfc0fdb0..d55efbf98 100644 --- a/common/logs.py +++ b/common/logs.py @@ -19,7 +19,7 @@ import traceback import google.cloud.logging -from google.cloud.logging.handlers.handlers import CloudLoggingHandler +from google.cloud.logging_v2.handlers.handlers import CloudLoggingHandler from google.cloud import error_reporting # Disable this check since we have a bunch of non-constant globals in this file. @@ -72,6 +72,7 @@ def initialize(name='fuzzbench', default_extras=None, log_level=logging.INFO): _set_instance_name(default_extras) _set_experiment(default_extras) + # pylint: disable=global-variable-not-assigned global _default_extras _default_extras.update(default_extras) diff --git a/common/new_process.py b/common/new_process.py index dca5df613..fd54c5cab 100644 --- a/common/new_process.py +++ b/common/new_process.py @@ -17,7 +17,7 @@ import signal import subprocess import threading -from typing import List +from typing import List, Optional from common import logs @@ -72,10 +72,10 @@ def execute( # pylint: disable=too-many-locals,too-many-branches command: List[str], *args, expect_zero: bool = True, - timeout: int = None, + timeout: Optional[int] = None, write_to_stdout=False, # If not set, will default to PIPE. - output_file=None, + output_file: Optional[int] = None, # Not True by default because we can't always set group on processes. kill_children: bool = False, **kwargs) -> ProcessResult: @@ -92,6 +92,7 @@ def execute( # pylint: disable=too-many-locals,too-many-branches if kill_children: kwargs['preexec_fn'] = os.setsid + # pylint: disable=consider-using-with process = subprocess.Popen(command, *args, **kwargs) process_group_id = os.getpgid(process.pid) diff --git a/common/sanitizer.py b/common/sanitizer.py index a921e0674..be4964bc9 100644 --- a/common/sanitizer.py +++ b/common/sanitizer.py @@ -13,8 +13,8 @@ # limitations under the License. """Sanitizer helpers.""" -# Matches ClusterFuzz configuration. -# See https://github.com/google/clusterfuzz/blob/master/src/python/system/environment.py. +# Matches ClusterFuzz configuration. See +# https://github.com/google/clusterfuzz/blob/master/src/python/system/environment.py. SANITIZER_OPTIONS = { 'handle_abort': 2, 'handle_sigbus': 2, @@ -48,7 +48,7 @@ def _join_memory_tool_options(options): """Joins a dict holding memory tool options into a string that can be set in the environment.""" return ':'.join( - '%s=%s' % (key, str(value)) for key, value in sorted(options.items())) + f'{key}={str(value)}' for key, value in sorted(options.items())) def set_sanitizer_options(env, is_fuzz_run=False): diff --git a/common/test_filesystem.py b/common/test_filesystem.py index d044e746c..ad05b224e 100644 --- a/common/test_filesystem.py +++ b/common/test_filesystem.py @@ -31,7 +31,7 @@ def test_recreate_directory_existing(fs): new_directory = 'new-directory' os.mkdir(new_directory) new_file = os.path.join(new_directory, 'file') - with open(new_file, 'w') as file_handle: + with open(new_file, 'w', encoding='utf-8') as file_handle: file_handle.write('hi') filesystem.recreate_directory(new_directory) @@ -65,7 +65,7 @@ def test_copy(fs): dst = 'destination_file' filesystem.copy(src, dst, ignore_errors=True) assert os.path.exists(dst) - with open(dst) as file_handle: + with open(dst, encoding='utf-8') as file_handle: assert file_handle.read() == contents @@ -148,7 +148,7 @@ def _assert_has_source_dir_contents(directory): for idx in range(3): file_path = os.path.join(directory, str(idx)) assert os.path.exists(file_path) - with open(file_path) as file_handle: + with open(file_path, encoding='utf-8') as file_handle: assert file_handle.read() == 'srcfile' @@ -180,7 +180,8 @@ def test_make_dir_copy(fs): new_filename = 'new-file' copied_new_file_path = os.path.join(copy_dir, new_filename) assert not os.path.exists(copied_new_file_path) - with open(os.path.join(SOURCE_DIR, new_filename), 'w') as file_handle: + with open(os.path.join(SOURCE_DIR, new_filename), 'w', + encoding='utf-8') as file_handle: file_handle.write('') copy_dir = filesystem.make_dir_copy(SOURCE_DIR) _assert_has_source_dir_contents(copy_dir) diff --git a/common/test_gcloud.py b/common/test_gcloud.py index eb5e8e21c..f1e36217a 100644 --- a/common/test_gcloud.py +++ b/common/test_gcloud.py @@ -110,7 +110,7 @@ def test_create_instance_failed_create(mocked_execute): def test_delete_instances_less_than_batch_size(mocked_execute): """Test that delete_instances works as intended when instance count is less than batch size.""" - instances = ['instance-%d' % i for i in range(5)] + instances = [f'instance-{i}' for i in range(5)] mocked_execute.return_value = new_process.ProcessResult(0, '', False) zone = 'us-central1-a' expected_command = (['gcloud', 'compute', 'instances', 'delete', '-q'] + @@ -124,16 +124,16 @@ def test_delete_instances_less_than_batch_size(mocked_execute): def test_delete_instances_greater_than_batch_size(mocked_execute): """Test that delete_instances works as intended when instance count is more than batch size.""" - instances = ['instance-%d' % i for i in range(103)] + instances = [f'instance-{i}' for i in range(103)] mocked_execute.return_value = new_process.ProcessResult(0, '', False) zone = 'us-central1-a' result = gcloud.delete_instances(instances, zone) assert result expected_command_1 = (['gcloud', 'compute', 'instances', 'delete', '-q'] + - ['instance-%d' % i for i in range(100)] + + [f'instance-{i}' for i in range(100)] + ['--zone', zone]) expected_command_2 = (['gcloud', 'compute', 'instances', 'delete', '-q'] + - ['instance-%d' % i for i in range(100, 103)] + + [f'instance-{i}' for i in range(100, 103)] + ['--zone', zone]) mocked_execute.assert_has_calls([ mock.call(expected_command_1, expect_zero=False), @@ -144,7 +144,7 @@ def test_delete_instances_greater_than_batch_size(mocked_execute): @mock.patch('common.new_process.execute') def test_delete_instances_fail(mocked_execute): """Test that delete_instances returns False when instance deletion fails.""" - instances = ['instance-%d' % i for i in range(5)] + instances = [f'instance-{i}' for i in range(5)] mocked_execute.return_value = new_process.ProcessResult(1, 'Error', False) zone = 'us-central1-a' expected_command = (['gcloud', 'compute', 'instances', 'delete', '-q'] + @@ -175,9 +175,8 @@ def test_create_instance_template(mocked_execute): ] mocked_execute.assert_called_with(expected_command) expected_result = ( - 'https://www.googleapis.com/compute/v1/projects/{project}' - '/global/instanceTemplates/{name}').format(project=project, - name=template_name) + f'https://www.googleapis.com/compute/v1/projects/{project}' + f'/global/instanceTemplates/{template_name}') assert result == expected_result diff --git a/common/test_local_filestore.py b/common/test_local_filestore.py index 6781adfd5..c08869180 100644 --- a/common/test_local_filestore.py +++ b/common/test_local_filestore.py @@ -26,7 +26,7 @@ def test_rm(tmp_path): """Tests rm works as expected.""" file_path = tmp_path / 'file' data = 'hello' - with open(file_path, 'w') as file_handle: + with open(file_path, 'w', encoding='utf-8') as file_handle: file_handle.write(data) local_filestore.rm(str(file_path)) assert not os.path.exists(file_path) @@ -52,9 +52,9 @@ def test_ls_one_file_per_line(tmp_path): dir_path = tmp_path file1 = dir_path / 'file1' file2 = dir_path / 'file2' - with open(file1, 'w+'): + with open(file1, 'w+', encoding='utf-8'): pass - with open(file2, 'w+'): + with open(file2, 'w+', encoding='utf-8'): pass assert local_filestore.ls(str(dir_path)).output == 'file1\nfile2\n' @@ -63,11 +63,11 @@ def test_cp(tmp_path): """Tests cp works as expected.""" source = tmp_path / 'source' data = 'hello' - with open(source, 'w') as file_handle: + with open(source, 'w', encoding='utf-8') as file_handle: file_handle.write(data) destination = tmp_path / 'destination' local_filestore.cp(str(source), str(destination)) - with open(destination) as file_handle: + with open(destination, encoding='utf-8') as file_handle: assert file_handle.read() == data @@ -77,7 +77,7 @@ def test_cp_nonexistent_dest(tmp_path): source_dir.mkdir() source_file = source_dir / 'file1' cp_dest_dir = tmp_path / 'cp_test' / 'intermediate' / 'cp_dest' - with open(source_file, 'w'): + with open(source_file, 'w', encoding='utf-8'): pass # Should run without exceptions. @@ -90,7 +90,7 @@ def test_rsync_nonexistent_dest(tmp_path): source_dir.mkdir() source_file = source_dir / 'file1' rsync_dest_dir = tmp_path / 'rsync_test' / 'intermediate' / 'rsync_dest' - with open(source_file, 'w'): + with open(source_file, 'w', encoding='utf-8'): pass # Should run without exceptions. diff --git a/common/test_new_process.py b/common/test_new_process.py index c1f596d7f..e527dcd16 100644 --- a/common/test_new_process.py +++ b/common/test_new_process.py @@ -53,11 +53,11 @@ def test_timeout(self): def test_output_file(self, mocked_info, tmp_path): """Test that execute handles the output_file argument as intended.""" output_file_path = tmp_path / 'output' - with open(output_file_path, 'w') as output_file: + with open(output_file_path, 'w', encoding='utf-8') as output_file: new_process.execute(self.COMMAND, timeout=1, output_file=output_file, expect_zero=False) - with open(output_file_path, 'r') as output_file: + with open(output_file_path, 'r', encoding='utf-8') as output_file: assert output_file.read() == 'Hello, World!\n' diff --git a/common/utils.py b/common/utils.py index 3b508f23f..066aeac70 100644 --- a/common/utils.py +++ b/common/utils.py @@ -47,7 +47,8 @@ def is_local(): return _is_local try: # TODO(github.com/google/fuzzbench/issues/82): Get rid of this. - urllib.request.urlopen('http://metadata.google.internal') + with urllib.request.urlopen('http://metadata.google.internal'): + pass _is_local = False except urllib.error.URLError: _is_local = True diff --git a/common/yaml_utils.py b/common/yaml_utils.py index 54191ec8b..6b003d0f0 100644 --- a/common/yaml_utils.py +++ b/common/yaml_utils.py @@ -17,11 +17,11 @@ def read(yaml_filename): """Reads and loads yaml file specified by |yaml_filename|.""" - with open(yaml_filename) as file_handle: + with open(yaml_filename, encoding='utf-8') as file_handle: return yaml.load(file_handle, yaml.SafeLoader) def write(yaml_filename, data): """Writes |data| to a new yaml file at |yaml_filename|.""" - with open(yaml_filename, 'w') as file_handle: + with open(yaml_filename, 'w', encoding='utf-8') as file_handle: return yaml.dump(data, file_handle) diff --git a/config/experiment.yaml b/config/experiment.yaml new file mode 100644 index 000000000..4f48d8196 --- /dev/null +++ b/config/experiment.yaml @@ -0,0 +1,52 @@ +benchmarks: +- curl_curl_fuzzer_http +- freetype2-2017 +- harfbuzz-1.3.2 +- jsoncpp_jsoncpp_fuzzer +- lcms-2017-03-21 +- libjpeg-turbo-07-2017 +- libpcap_fuzz_both +- libpng-1.2.56 +- libxml2-v2.9.2 +- libxslt_xpath +- mbedtls_fuzz_dtlsclient +- openssl_x509 +- openthread-2019-12-23 +- php_php-fuzz-parser +- proj4-2017-08-14 +- re2-2014-12-09 +- sqlite3_ossfuzz +- systemd_fuzz-link-parser +- vorbis-2017-12-11 +- woff2-2016-05-06 +- zlib_zlib_uncompress_fuzzer +cloud_compute_zone: us-central1-b +cloud_project: fuzzbench +cloud_sql_instance_connection_name: fuzzbench:us-central1:postgres-experiment-db=tcp:5432 +concurrent_builds: 30 +custom_seed_corpus_dir: null +description: null +docker_registry: gcr.io/fuzzbench +experiment: 2022-11-22-02-33-15m-dongge +experiment_filestore: gs://fuzzbench-data +fuzzers: +- libfuzzer +git_hash: c0608813379f2dce012fcb59d2dd12f581f8bc30 +local_experiment: false +max_total_time: 910 +measurers_cpus: null +merge_with_nonprivate: true +no_dictionaries: false +no_seeds: false +oss_fuzz_corpus: false +preemptible_runners: true +private: false +region_coverage: false +report_filestore: gs://www.fuzzbench.com/reports +runner_machine_type: n1-standard-1 +runner_memory: 12GB +runner_num_cpu_cores: 1 +runners_cpus: null +snapshot_period: 900 +trials: 2 +worker_pool_name: projects/fuzzbench/locations/us-central1/workerPools/buildpool diff --git a/database/utils.py b/database/utils.py index cde16f1f2..f1f6ec37c 100644 --- a/database/utils.py +++ b/database/utils.py @@ -34,8 +34,8 @@ def initialize(): postgres_password = os.getenv('POSTGRES_PASSWORD') assert postgres_password, 'POSTGRES_PASSWORD needs to be set.' database_url = ( - 'postgresql+psycopg2://postgres:{password}@127.0.0.1:5432'.format( - password=postgres_password)) + f'postgresql+psycopg2://postgres:{postgres_password}@127.0.0.1:5432' + ) global engine engine = sqlalchemy.create_engine(database_url) @@ -67,6 +67,7 @@ def cleanup(): @contextmanager def session_scope(): """Provide a transactional scope around a series of operations.""" + # pylint: disable=global-variable-not-assigned global session global engine global lock diff --git a/docker/base-image/Dockerfile b/docker/base-image/Dockerfile index 38fe27a6a..ef7de62cf 100644 --- a/docker/base-image/Dockerfile +++ b/docker/base-image/Dockerfile @@ -12,47 +12,49 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM ubuntu:xenial +FROM ubuntu:focal -# Build Python 3.8.6 from source because pandas doesn't support xenial's -# Python3 version (3.5.2). -ENV PYTHON_VERSION 3.8.6 -RUN apt-get update -y && apt-get install -y \ - build-essential \ - rsync \ +# Python 3.10.8 is not the default version in Ubuntu 20.04 (Focal Fossa). +ENV PYTHON_VERSION 3.10.8 +# Install dependencies required by Python3 or Pip3. +RUN apt-get update && \ + apt-get upgrade -y && \ + apt-get install -y \ curl \ + xz-utils \ + build-essential \ zlib1g-dev \ - libncurses5-dev \ - libgdbm-dev \ - libnss3-dev \ libssl-dev \ - libreadline-dev \ - libffi-dev \ - virtualenv \ - libbz2-dev \ - liblzma-dev \ - libsqlite3-dev + libffi-dev RUN cd /tmp/ && \ curl -O https://www.python.org/ftp/python/$PYTHON_VERSION/Python-$PYTHON_VERSION.tar.xz && \ - tar -xvf Python-$PYTHON_VERSION.tar.xz && \ + tar -xvf Python-$PYTHON_VERSION.tar.xz > /dev/null && \ cd Python-$PYTHON_VERSION && \ - ./configure --enable-loadable-sqlite-extensions --enable-optimizations && \ - make -j install && \ - rm -r /tmp/Python-$PYTHON_VERSION.tar.xz /tmp/Python-$PYTHON_VERSION + ./configure \ + --enable-loadable-sqlite-extensions \ + --enable-optimizations \ + > /dev/null && \ + make -j install > /dev/null && \ + rm -r /tmp/Python-$PYTHON_VERSION.tar.xz /tmp/Python-$PYTHON_VERSION && \ + ln -s /usr/local/bin/python3 /usr/local/bin/python && \ + ln -s /usr/local/bin/pip3 /usr/local/bin/pip # Install common python dependencies. -COPY ./requirements.txt / -RUN pip3 install -r /requirements.txt +COPY ./requirements.txt /tmp +RUN pip install -r /tmp/requirements.txt -# Install google-cloud-sdk. -RUN apt-get update -y && apt-get install -y \ +# Install dependencies required by google-cloud-cli. +RUN apt-get update && \ + apt-get install -y \ apt-transport-https \ - lsb-release -RUN CLOUD_SDK_REPO="cloud-sdk-$(lsb_release -c -s)" && \ - echo "deb https://packages.cloud.google.com/apt $CLOUD_SDK_REPO main" \ + ca-certificates \ + gnupg + +# Install google-cloud-cli (previously named google-cloud-sdk). +RUN echo "deb https://packages.cloud.google.com/apt cloud-sdk main" \ | tee -a /etc/apt/sources.list.d/google-cloud-sdk.list && \ curl https://packages.cloud.google.com/apt/doc/apt-key.gpg \ | apt-key add - && \ - apt-get update -y && \ - apt-get install -y google-cloud-sdk + apt-get update && \ + apt-get install -y google-cloud-cli diff --git a/docker/benchmark-builder/Dockerfile b/docker/benchmark-builder/Dockerfile index 18d7710d8..f28f965c9 100755 --- a/docker/benchmark-builder/Dockerfile +++ b/docker/benchmark-builder/Dockerfile @@ -14,9 +14,6 @@ ARG parent_image -# Using multi-stage build to copy latest Python 3. -FROM gcr.io/fuzzbench/base-image AS base-image - FROM $parent_image ARG fuzzer @@ -27,11 +24,36 @@ ENV FUZZER $fuzzer ENV BENCHMARK $benchmark ENV DEBUG_BUILDER $debug_builder -# Copy latest python3 from base-image into local. -COPY --from=base-image /usr/local/bin/python3* /usr/local/bin/ -COPY --from=base-image /usr/local/lib/python3.8 /usr/local/lib/python3.8 -COPY --from=base-image /usr/local/include/python3.8 /usr/local/include/python3.8 -COPY --from=base-image /usr/local/lib/python3.8/site-packages /usr/local/lib/python3.8/site-packages +# Python 3.10.8 is not the default version in Ubuntu 20.04 (Focal Fossa). +ENV PYTHON_VERSION 3.10.8 + +# Install dependencies required by Python3 or Pip3. +RUN apt-get update && \ + apt-get upgrade -y && \ + apt-get install -y \ + curl \ + xz-utils \ + build-essential \ + zlib1g-dev \ + libssl-dev \ + libffi-dev + +RUN cd /tmp/ && \ + curl -O \ + https://www.python.org/ftp/python/$PYTHON_VERSION/Python-$PYTHON_VERSION.tar.xz && \ + tar -xvf Python-$PYTHON_VERSION.tar.xz > /dev/null && \ + cd Python-$PYTHON_VERSION && \ + ./configure \ + --enable-loadable-sqlite-extensions \ + --enable-optimizations \ + > /dev/null && \ + make -j install > /dev/null && \ + rm -r /tmp/Python-$PYTHON_VERSION.tar.xz /tmp/Python-$PYTHON_VERSION + +# Install common python dependencies. +COPY ./requirements.txt /tmp +RUN pip3 install -r /tmp/requirements.txt + # Copy the entire fuzzers directory tree to allow for module dependencies. COPY fuzzers $SRC/fuzzers diff --git a/docker/benchmark-runner/Dockerfile b/docker/benchmark-runner/Dockerfile index 7f4f195d3..68809836e 100644 --- a/docker/benchmark-runner/Dockerfile +++ b/docker/benchmark-runner/Dockerfile @@ -42,7 +42,8 @@ FROM gcr.io/fuzzbench/runners/$fuzzer/$benchmark-intermediate # Install runtime dependencies for benchmarks. -RUN apt-get update -y && apt-get install -y \ +RUN apt-get update -y && \ + DEBIAN_FRONTEND="noninteractive" apt-get install -y \ libglib2.0-0 \ libxml2 \ libarchive13 \ diff --git a/docker/dispatcher-image/Dockerfile b/docker/dispatcher-image/Dockerfile index 6f75163bb..44e23a96d 100644 --- a/docker/dispatcher-image/Dockerfile +++ b/docker/dispatcher-image/Dockerfile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM gcr.io/oss-fuzz-base/base-clang@sha256:30706816922bf9c141b15ff4a5a44af8c0ec5700d4b46e0572029c15e495d45b AS base-clang +FROM gcr.io/oss-fuzz-base/base-clang@sha256:05774d8e388cb4975662c8eb25217e38a799d720b622e5c5d8abb79a76b49e53 AS base-clang FROM gcr.io/fuzzbench/base-image diff --git a/docker/fuzzbench/Dockerfile b/docker/fuzzbench/Dockerfile index 4137f9a7b..4e9d1fdb4 100644 --- a/docker/fuzzbench/Dockerfile +++ b/docker/fuzzbench/Dockerfile @@ -12,10 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -FROM python:3.7 +FROM python:3.10.8 # Install the docker CLI. -ENV DOCKER_VERSION=19.03.12 +ENV DOCKER_VERSION=20.10.19 RUN wget https://download.docker.com/linux/static/stable/x86_64/docker-${DOCKER_VERSION}.tgz \ && tar xzvf docker-${DOCKER_VERSION}.tgz --strip 1 -C /usr/local/bin docker/docker \ && rm -rf docker-${DOCKER_VERSION}.tgz diff --git a/docker/gcb/base-images.yaml b/docker/gcb/base-images.yaml index a4f194bc6..93b2aa8ac 100644 --- a/docker/gcb/base-images.yaml +++ b/docker/gcb/base-images.yaml @@ -20,10 +20,10 @@ images: steps: - args: - pull - - ubuntu:xenial + - ubuntu:focal env: - DOCKER_BUILDKIT=1 - id: pull-ubuntu-xenial + id: pull-ubuntu-focal name: docker:19.03.12 - args: - build diff --git a/docker/generate_makefile.py b/docker/generate_makefile.py index 154c3aeda..fff26cd92 100755 --- a/docker/generate_makefile.py +++ b/docker/generate_makefile.py @@ -21,7 +21,7 @@ from common import fuzzer_utils from experiment.build import docker_images -BASE_TAG = "gcr.io/fuzzbench" +BASE_TAG = 'gcr.io/fuzzbench' BENCHMARK_DIR = benchmark_utils.BENCHMARKS_DIR @@ -116,7 +116,7 @@ def get_rules_for_image(name, image): section += ' .' + dep section += '\n' if 'base-' in name: - section += '\tdocker pull ubuntu:xenial\n' + section += '\tdocker pull ubuntu:focal\n' section += '\tdocker build \\\n' section += '\t--tag ' + os.path.join(BASE_TAG, image['tag']) + ' \\\n' section += '\t--build-arg BUILDKIT_INLINE_CACHE=1 \\\n' @@ -144,7 +144,7 @@ def main(): return 1 makefile_path = sys.argv[1] makefile_contents = generate_makefile() - with open(makefile_path, 'w') as file_handle: + with open(makefile_path, 'w', encoding='utf-8') as file_handle: file_handle.write(makefile_contents) return 0 diff --git a/docs/developing-fuzzbench/adding_a_new_benchmark.md b/docs/developing-fuzzbench/adding_a_new_benchmark.md index bf10478a3..78a89825c 100755 --- a/docs/developing-fuzzbench/adding_a_new_benchmark.md +++ b/docs/developing-fuzzbench/adding_a_new_benchmark.md @@ -258,7 +258,7 @@ Building benchmarks and fuzzers entails the following process: image. This is the first image in this build process that is defined by the main FuzzBench code (e.g. not fuzzers, benchmarks, or OSS-Fuzz). Its first function is to copy the FuzzBench code and install packages needed to run - FuzzBench like Python3.7 For benchmarks that define a `commit` in their + FuzzBench like Python3.10. For benchmarks that define a `commit` in their `benchmark.yaml` (i.e. OSS-Fuzz benchmarks) the build process for this image checks out the source code of that project at the specified commit. Then the process defines the environment variables `CC`, `CXX`, `CXXFLAGS`, `CFLAGS` diff --git a/docs/getting-started/adding_a_new_fuzzer.md b/docs/getting-started/adding_a_new_fuzzer.md index ba8b8c218..74aed671a 100644 --- a/docs/getting-started/adding_a_new_fuzzer.md +++ b/docs/getting-started/adding_a_new_fuzzer.md @@ -41,7 +41,7 @@ with your fuzzer. For most projects, this will look like: ```dockerfile ARG parent_image -FROM $parent_image # Base builder image (Ubuntu 16.04, with latest Clang). +FROM $parent_image # Base builder image (Ubuntu 20.04, with latest Clang). RUN apt-get update && \ # Install any system dependencies to build your fuzzer. apt-get install -y pkg1 pkg2 @@ -65,7 +65,7 @@ This file defines the image that will be used to run benchmarks with your fuzzer. Making this lightweight allows trial instances to be spun up fast. ```dockerfile -FROM gcr.io/fuzzbench/base-image # Base image (Ubuntu 16.04). +FROM gcr.io/fuzzbench/base-image # Base image (Ubuntu 20.04). RUN apt-get update && \ # Install any runtime dependencies for your fuzzer. apt-get install pkg1 pkg2 diff --git a/docs/getting-started/prerequisites.md b/docs/getting-started/prerequisites.md index 19e1d94c1..67a18707d 100644 --- a/docs/getting-started/prerequisites.md +++ b/docs/getting-started/prerequisites.md @@ -57,18 +57,18 @@ sudo apt-get install build-essential ### Python programming language -[Download Python 3.8](https://www.python.org/downloads/release/python-386/), +[Download Python 3.10](https://www.python.org/downloads/release/python-3108/), then install it. If you already have Python installed, you can verify its version by running -`python3 --version`. The minimum required version is 3.8. +`python3 --version`. The minimum required version is 3.10.8. ### Python package dependencies Install the python dependencies by running the following command: ```bash -sudo apt-get install python3.8-dev python3.8-venv +sudo apt-get install python3.10-dev python3.10-venv make install-dependencies ``` diff --git a/docs/reference/benchmarks.py b/docs/reference/benchmarks.py index e2e269a01..fdb61006f 100644 --- a/docs/reference/benchmarks.py +++ b/docs/reference/benchmarks.py @@ -25,8 +25,8 @@ from common import benchmark_utils from common import filesystem +from common import fuzzer_config from common import fuzzer_utils as common_fuzzer_utils -from common import oss_fuzz from common import utils from fuzzers import utils as fuzzer_utils @@ -68,10 +68,10 @@ def get_real_benchmark_name(benchmark): if not os.path.isdir(os.path.join(benchmarks_dir, real_benchmark)): continue - if not benchmark_utils.is_oss_fuzz(real_benchmark): + if not benchmark_utils.is_oss_fuzz_benchmark(real_benchmark): continue - config = oss_fuzz.get_config(real_benchmark) + config = fuzzer_config.get_config(real_benchmark) if config['project'] == benchmark: return real_benchmark @@ -126,8 +126,8 @@ def get_binary_size_mb(fuzz_target_path): def get_fuzz_target(benchmark, benchmark_path): """Returns the fuzz target and its path for |benchmark|.""" - if benchmark_utils.is_oss_fuzz(benchmark): - fuzz_target = oss_fuzz.get_config(benchmark)['fuzz_target'] + if benchmark_utils.is_oss_fuzz_benchmark(benchmark): + fuzz_target = fuzzer_config.get_config(benchmark)['fuzz_target'] else: fuzz_target = common_fuzzer_utils.DEFAULT_FUZZ_TARGET_NAME diff --git a/experiment/build/build_utils.py b/experiment/build/build_utils.py index 47fcbc92f..94f1c1b7e 100644 --- a/experiment/build/build_utils.py +++ b/experiment/build/build_utils.py @@ -21,8 +21,8 @@ def store_build_logs(build_config, build_result): """Save build results in the build logs bucket.""" - build_output = ('Command returned {retcode}.\nOutput: {output}'.format( - retcode=build_result.retcode, output=build_result.output)) + build_output = (f'Command returned {build_result.retcode}.\n' + f'Output: {build_result.output}') with tempfile.NamedTemporaryFile(mode='w') as tmp: tmp.write(build_output) tmp.flush() diff --git a/experiment/build/gcb_build.py b/experiment/build/gcb_build.py index 5d0c7c008..1401876c7 100644 --- a/experiment/build/gcb_build.py +++ b/experiment/build/gcb_build.py @@ -64,7 +64,7 @@ def build_coverage(benchmark): config = generate_cloudbuild.create_cloudbuild_spec(image_templates, benchmark=benchmark, fuzzer='coverage') - config_name = 'benchmark-{benchmark}-coverage'.format(benchmark=benchmark) + config_name = f'benchmark-{benchmark}-coverage' _build(config, config_name) @@ -75,11 +75,12 @@ def _build( """Submit build to GCB.""" with tempfile.NamedTemporaryFile() as config_file: yaml_utils.write(config_file.name, config) - logger.debug('Using build configuration: %s' % config) + logger.debug('Using build configuration: %s', config) + + config_arg = f'--config={config_file.name}' - config_arg = '--config=%s' % config_file.name # Use "s" suffix to denote seconds. - timeout_arg = '--timeout=%ds' % timeout_seconds + timeout_arg = f'--timeout={timeout_seconds}s' command = [ 'gcloud', @@ -118,8 +119,7 @@ def build_fuzzer_benchmark(fuzzer: str, benchmark: str): if image_specs['type'] in ('base', 'coverage', 'dispatcher'): continue image_templates[image_name] = image_specs - config_name = 'benchmark-{benchmark}-fuzzer-{fuzzer}'.format( - benchmark=benchmark, fuzzer=fuzzer) + config_name = f'benchmark-{benchmark}-fuzzer-{fuzzer}' config = generate_cloudbuild.create_cloudbuild_spec(image_templates, benchmark=benchmark, fuzzer=fuzzer) diff --git a/experiment/build/generate_cloudbuild.py b/experiment/build/generate_cloudbuild.py index 8063e38a0..1c3197623 100644 --- a/experiment/build/generate_cloudbuild.py +++ b/experiment/build/generate_cloudbuild.py @@ -136,16 +136,6 @@ def create_cloudbuild_spec(image_templates, if cloudbuild_tag is not None: cloudbuild_spec['tags'] = [f'fuzzer-{fuzzer}', f'benchmark-{benchmark}'] - # Workaround for bug https://github.com/moby/moby/issues/40262. - # This is only needed for base-image as it inherits from ubuntu:xenial. - if build_base_images: - cloudbuild_spec['steps'].append({ - 'id': 'pull-ubuntu-xenial', - 'env': ['DOCKER_BUILDKIT=1'], - 'name': DOCKER_IMAGE, - 'args': ['pull', 'ubuntu:xenial'], - }) - # TODO(metzman): Figure out how to do this to solve log length issue. # cloudbuild_spec['steps'].append({ # 'id': 'buildx-create', diff --git a/experiment/build/local_build.py b/experiment/build/local_build.py index 1f6fd386c..532f1e9f3 100644 --- a/experiment/build/local_build.py +++ b/experiment/build/local_build.py @@ -54,7 +54,7 @@ def make_shared_coverage_binaries_dir(): def build_coverage(benchmark): """Build (locally) coverage image for benchmark.""" - image_name = 'build-coverage-{}'.format(benchmark) + image_name = f'build-coverage-{benchmark}' result = make([image_name]) if result.retcode: return result @@ -66,14 +66,15 @@ def build_coverage(benchmark): def copy_coverage_binaries(benchmark): """Copy coverage binaries in a local experiment.""" shared_coverage_binaries_dir = get_shared_coverage_binaries_dir() - mount_arg = '{0}:{0}'.format(shared_coverage_binaries_dir) + mount_arg = f'{shared_coverage_binaries_dir}:{shared_coverage_binaries_dir}' builder_image_url = benchmark_utils.get_builder_image_url( benchmark, 'coverage', environment.get('DOCKER_REGISTRY')) - coverage_build_archive = 'coverage-build-{}.tar.gz'.format(benchmark) + coverage_build_archive = f'coverage-build-{benchmark}.tar.gz' coverage_build_archive_shared_dir_path = os.path.join( shared_coverage_binaries_dir, coverage_build_archive) - command = 'cd /out; tar -czvf {} * /src /work'.format( - coverage_build_archive_shared_dir_path) + command = ( + '(cd /out; ' + f'tar -czvf {coverage_build_archive_shared_dir_path} * /src /work)') return new_process.execute([ 'docker', 'run', '-v', mount_arg, builder_image_url, '/bin/bash', '-c', command @@ -82,5 +83,5 @@ def copy_coverage_binaries(benchmark): def build_fuzzer_benchmark(fuzzer: str, benchmark: str) -> bool: """Builds |benchmark| for |fuzzer|.""" - image_name = 'build-{}-{}'.format(fuzzer, benchmark) + image_name = f'build-{fuzzer}-{benchmark}' make([image_name]) diff --git a/experiment/build/test_builder.py b/experiment/build/test_builder.py index 3cffa1b26..5f9e79153 100644 --- a/experiment/build/test_builder.py +++ b/experiment/build/test_builder.py @@ -19,8 +19,8 @@ import pytest -from common import utils from experiment.build import builder +from experiment.run_experiment import DEFAULT_CONCURRENT_BUILDS SRC_ROOT = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir) @@ -62,17 +62,21 @@ def get_benchmarks_or_fuzzers(benchmarks_or_fuzzers_directory, filename, ] -@pytest.mark.skipif(sys.version_info.minor > 7, +@pytest.mark.skipif(sys.version_info.minor > 10, reason='Test can stop responding on versions greater than ' - '3.7') -@mock.patch('experiment.build.builder.build_measurer') -@mock.patch('time.sleep') + '3.10') @pytest.mark.parametrize('build_measurer_return_value', [True, False]) -def test_build_all_measurers(_, mocked_build_measurer, - build_measurer_return_value, experiment, fs): +@mock.patch('experiment.build.builder.build_measurer') +@mock.patch('experiment.build.builder.time') +@mock.patch('experiment.build.builder.filesystem') +@mock.patch('experiment.build.builder.build_utils') +@mock.patch.dict(os.environ, + {'CONCURRENT_BUILDS': str(DEFAULT_CONCURRENT_BUILDS)}) +def test_build_all_measurers(mocked_build_utils, mocked_fs, mocked_time, + mocked_build_measurer, + build_measurer_return_value): """Tests that build_all_measurers works as intendend when build_measurer calls fail.""" - fs.add_real_directory(utils.ROOT_DIR) mocked_build_measurer.return_value = build_measurer_return_value benchmarks = get_regular_benchmarks() result = builder.build_all_measurers(benchmarks) @@ -91,7 +95,6 @@ def builder_integration(experiment): yield -# pylint: disable=no-self-use @pytest.mark.skipif( not os.getenv('TEST_INTEGRATION_ALL'), reason='Tests take too long and can interfere with real ' diff --git a/experiment/build/test_generate_cloudbuild.py b/experiment/build/test_generate_cloudbuild.py index ce48f1e91..444614b54 100644 --- a/experiment/build/test_generate_cloudbuild.py +++ b/experiment/build/test_generate_cloudbuild.py @@ -37,11 +37,6 @@ def test_generate_cloudbuild_spec_build_base_image(experiment): expected_spec = { 'steps': [{ - 'id': 'pull-ubuntu-xenial', - 'env': ['DOCKER_BUILDKIT=1'], - 'name': 'gcr.io/cloud-builders/docker', - 'args': ['pull', 'ubuntu:xenial'] - }, { 'id': 'base-image', 'env': ['DOCKER_BUILDKIT=1'], 'name': 'gcr.io/cloud-builders/docker', @@ -85,11 +80,6 @@ def test_generate_cloudbuild_spec_other_registry(experiment): expected_spec = { 'steps': [{ - 'id': 'pull-ubuntu-xenial', - 'env': ['DOCKER_BUILDKIT=1'], - 'name': 'gcr.io/cloud-builders/docker', - 'args': ['pull', 'ubuntu:xenial'] - }, { 'id': 'base-image', 'env': ['DOCKER_BUILDKIT=1'], 'name': 'gcr.io/cloud-builders/docker', diff --git a/experiment/conftest.py b/experiment/conftest.py index 8e517ca89..e7ef5a402 100644 --- a/experiment/conftest.py +++ b/experiment/conftest.py @@ -25,7 +25,7 @@ def experiment_config(): config_filepath = os.path.join(os.path.dirname(__file__), 'test_data', 'experiment-config.yaml') - with open(config_filepath) as file_handle: + with open(config_filepath, encoding='utf-8') as file_handle: return yaml.load(file_handle, yaml.SafeLoader) @@ -36,5 +36,5 @@ def local_experiment_config(): config_filepath = os.path.join(os.path.dirname(__file__), 'test_data', 'local-experiment-config.yaml') - with open(config_filepath) as file_handle: + with open(config_filepath, encoding='utf-8') as file_handle: return yaml.load(file_handle, yaml.SafeLoader) diff --git a/experiment/measurer/coverage_utils.py b/experiment/measurer/coverage_utils.py index 7123ed4e2..20333b2d3 100644 --- a/experiment/measurer/coverage_utils.py +++ b/experiment/measurer/coverage_utils.py @@ -60,10 +60,8 @@ def generate_coverage_reports(experiment_config: dict): def generate_coverage_report(experiment, benchmark, fuzzer, region_coverage): """Generates the coverage report for one pair of benchmark and fuzzer.""" - logger.info( - ('Generating coverage report for ' - 'benchmark: {benchmark} fuzzer: {fuzzer}.').format(benchmark=benchmark, - fuzzer=fuzzer)) + logger.info('Generating coverage report for benchmark: %s fuzzer: %s.', + benchmark, fuzzer) try: coverage_reporter = CoverageReporter(experiment, fuzzer, benchmark, @@ -122,9 +120,8 @@ def __init__(self, experiment, fuzzer, benchmark, region_coverage): def merge_profdata_files(self): """Merge profdata files from |src_files| to |dst_files|.""" - logger.info('Merging profdata for fuzzer: ' - '{fuzzer},benchmark: {benchmark}.'.format( - fuzzer=self.fuzzer, benchmark=self.benchmark)) + logger.info('Merging profdata for fuzzer: %s, benchmark: %s.', + self.fuzzer, self.benchmark) files_to_merge = [] for trial_id in self.trial_ids: @@ -148,24 +145,27 @@ def generate_coverage_summary_json(self): if result.retcode != 0: logger.error( 'Merged coverage summary json file generation failed for ' - 'fuzzer: {fuzzer},benchmark: {benchmark}.'.format( - fuzzer=self.fuzzer, benchmark=self.benchmark)) + f'fuzzer: {self.fuzzer},benchmark: {self.benchmark}.') def generate_coverage_report(self): """Generates the coverage report and stores in bucket.""" command = [ - 'llvm-cov', 'show', '-format=html', - '-path-equivalence=/,{prefix}'.format(prefix=self.source_files_dir), - '-output-dir={dst_dir}'.format(dst_dir=self.report_dir), - '-Xdemangler', 'c++filt', '-Xdemangler', '-n', self.binary_file, - '-instr-profile={profdata}'.format( - profdata=self.merged_profdata_file) + 'llvm-cov', + 'show', + '-format=html', + f'-path-equivalence=/,{self.source_files_dir}', + f'-output-dir={self.report_dir}', + '-Xdemangler', + 'c++filt', + '-Xdemangler', + '-n', + self.binary_file, + f'-instr-profile={self.merged_profdata_file}', ] result = new_process.execute(command, expect_zero=False) if result.retcode != 0: logger.error('Coverage report generation failed for ' - 'fuzzer: {fuzzer},benchmark: {benchmark}.'.format( - fuzzer=self.fuzzer, benchmark=self.benchmark)) + f'fuzzer: {self.fuzzer},benchmark: {self.benchmark}.') return src_dir = self.report_dir @@ -183,7 +183,7 @@ def generate_coverage_branches_json(self): coverage_json_src = os.path.join(self.data_dir, 'covered_branches.json') coverage_json_dst = exp_path.filestore(coverage_json_src) filesystem.create_directory(self.data_dir) - with open(coverage_json_src, 'w') as file_handle: + with open(coverage_json_src, 'w', encoding='utf-8') as file_handle: json.dump(edges_covered, file_handle) filestore_utils.cp(coverage_json_src, coverage_json_dst, @@ -192,12 +192,12 @@ def generate_coverage_branches_json(self): def get_coverage_archive_name(benchmark): """Gets the archive name for |benchmark|.""" - return 'coverage-build-%s.tar.gz' % benchmark + return f'coverage-build-{benchmark}.tar.gz' def get_profdata_file_name(trial_id): """Returns the profdata file name for |trial_id|.""" - return 'data-{id}.profdata'.format(id=trial_id) + return f'data-{trial_id}.profdata' def get_coverage_binary(benchmark: str) -> str: @@ -234,7 +234,7 @@ def merge_profdata_files(src_files, dst_file): def get_coverage_infomation(coverage_summary_file): """Reads the coverage information from |coverage_summary_file| and skip possible warnings in the file.""" - with open(coverage_summary_file) as summary: + with open(coverage_summary_file, encoding='utf-8') as summary: return json.loads(summary.readlines()[-1]) @@ -264,15 +264,20 @@ def generate_json_summary(coverage_binary, """Generates the json summary file from |coverage_binary| and |profdata_file|.""" command = [ - 'llvm-cov', 'export', '-format=text', '-num-threads=1', - '-region-coverage-gt=0', '-skip-expansions', coverage_binary, - '-instr-profile=%s' % profdata_file + 'llvm-cov', + 'export', + '-format=text', + '-num-threads=1', + '-region-coverage-gt=0', + '-skip-expansions', + coverage_binary, + f'-instr-profile={profdata_file}', ] if summary_only: command.append('-summary-only') - with open(output_file, 'w') as dst_file: + with open(output_file, 'w', encoding='utf-8') as dst_file: result = new_process.execute(command, output_file=dst_file, expect_zero=False) diff --git a/experiment/measurer/measure_manager.py b/experiment/measurer/measure_manager.py index b926c2515..bfca81a6e 100644 --- a/experiment/measurer/measure_manager.py +++ b/experiment/measurer/measure_manager.py @@ -48,7 +48,7 @@ from experiment.measurer import run_crashes from experiment import scheduler -logger = logs.Logger('measurer') # pylint: disable=invalid-name +logger = logs.Logger('measurer') SnapshotMeasureRequest = collections.namedtuple( 'SnapshotMeasureRequest', ['fuzzer', 'benchmark', 'trial_id', 'cycle']) @@ -109,8 +109,8 @@ def measure_loop(experiment: str, local_experiment = experiment_utils.is_local_experiment() if local_experiment: cores_queue = multiprocessing.Queue() - logger.info('Scheduling measurers from core %d to %d.' % - (runners_cpus, runners_cpus + measurers_cpus - 1)) + logger.info('Scheduling measurers from core %d to %d.', + runners_cpus, runners_cpus + measurers_cpus - 1) for cpu in range(runners_cpus, runners_cpus + measurers_cpus): cores_queue.put(cpu) pool_args = (measurers_cpus, _process_init, (cores_queue,)) @@ -122,14 +122,16 @@ def measure_loop(experiment: str, set_up_coverage_binaries(pool, experiment) # Using Multiprocessing.Queue will fail with a complaint about # inheriting queue. - q = manager.Queue() # pytype: disable=attribute-error + # pytype: disable=attribute-error + multiprocessing_queue = manager.Queue() while True: try: # Get whether all trials have ended before we measure to prevent # races. all_trials_ended = scheduler.all_trials_ended(experiment) - if not measure_all_trials(experiment, max_total_time, pool, q, + if not measure_all_trials(experiment, max_total_time, pool, + multiprocessing_queue, region_coverage): # We didn't measure any trials. if all_trials_ended: @@ -145,8 +147,8 @@ def measure_loop(experiment: str, logger.info('Finished measure loop.') -def measure_all_trials(experiment: str, max_total_time: int, pool, q, - region_coverage) -> bool: # pylint: disable=invalid-name +def measure_all_trials(experiment: str, max_total_time: int, pool, + multiprocessing_queue, region_coverage) -> bool: """Get coverage data (with coverage runs) for all active trials. Note that this should not be called unless multiprocessing.set_start_method('spawn') was called first. Otherwise it will use fork which breaks logging.""" @@ -163,7 +165,7 @@ def measure_all_trials(experiment: str, max_total_time: int, pool, q, return False measure_trial_coverage_args = [ - (unmeasured_snapshot, max_cycle, q, region_coverage) + (unmeasured_snapshot, max_cycle, multiprocessing_queue, region_coverage) for unmeasured_snapshot in unmeasured_snapshots ] @@ -189,7 +191,8 @@ def save_snapshots(): while True: try: - snapshot = q.get(timeout=SNAPSHOT_QUEUE_GET_TIMEOUT) + snapshot = multiprocessing_queue.get( + timeout=SNAPSHOT_QUEUE_GET_TIMEOUT) snapshots.append(snapshot) except queue.Empty: if result.ready(): @@ -329,30 +332,31 @@ def extract_corpus(corpus_archive: str, sha_blacklist: Set[str], output_directory: str): """Extract a corpus from |corpus_archive| to |output_directory|.""" pathlib.Path(output_directory).mkdir(exist_ok=True) - tar = tarfile.open(corpus_archive, 'r:gz') - for member in tar.getmembers(): + with tarfile.open(corpus_archive, 'r:gz') as tar: + for member in tar.getmembers(): - if not member.isfile(): - # We don't care about directory structure. So skip if not a file. - continue + if not member.isfile(): + # We don't care about directory structure. + # So skip if not a file. + continue - member_file_handle = tar.extractfile(member) - if not member_file_handle: - logger.info('Failed to get handle to %s', member) - continue + member_file_handle = tar.extractfile(member) + if not member_file_handle: + logger.info('Failed to get handle to %s.', member) + continue - member_contents = member_file_handle.read() - filename = utils.string_hash(member_contents) - if filename in sha_blacklist: - continue + member_contents = member_file_handle.read() + filename = utils.string_hash(member_contents) + if filename in sha_blacklist: + continue - file_path = os.path.join(output_directory, filename) + file_path = os.path.join(output_directory, filename) - if os.path.exists(file_path): - # Don't write out duplicates in the archive. - continue + if os.path.exists(file_path): + # Don't write out duplicates in the archive. + continue - filesystem.write(file_path, member_contents, 'wb') + filesystem.write(file_path, member_contents, 'wb') class SnapshotMeasurer(coverage_utils.TrialCoverage): # pylint: disable=too-many-instance-attributes @@ -572,8 +576,7 @@ def process_crashes(self, cycle): crash_metadata = run_crashes.do_crashes_run(app_binary, self.crashes_dir) crashes = [] - for crash_key in crash_metadata: - crash = crash_metadata[crash_key] + for crash_key, crash in crash_metadata.items(): crashes.append( models.Crash(crash_key=crash_key, crash_testcase=crash.crash_testcase, @@ -623,9 +626,9 @@ def get_fuzzer_stats(stats_filestore_path): return json.loads(stats_str) -def measure_trial_coverage( # pylint: disable=invalid-name - measure_req, max_cycle: int, q: multiprocessing.Queue, - region_coverage) -> models.Snapshot: +def measure_trial_coverage(measure_req, max_cycle: int, + multiprocessing_queue: multiprocessing.Queue, + region_coverage) -> models.Snapshot: """Measure the coverage obtained by |trial_num| on |benchmark| using |fuzzer|.""" initialize_logs() @@ -640,7 +643,7 @@ def measure_trial_coverage( # pylint: disable=invalid-name region_coverage) if not snapshot: break - q.put(snapshot) + multiprocessing_queue.put(snapshot) except Exception: # pylint: disable=broad-except logger.error('Error measuring cycle.', extras={ @@ -748,15 +751,15 @@ def set_up_coverage_binary(benchmark): coverage_binaries_dir = build_utils.get_coverage_binaries_dir() benchmark_coverage_binary_dir = coverage_binaries_dir / benchmark filesystem.create_directory(benchmark_coverage_binary_dir) - archive_name = 'coverage-build-%s.tar.gz' % benchmark + archive_name = f'coverage-build-{benchmark}.tar.gz' archive_filestore_path = exp_path.filestore(coverage_binaries_dir / archive_name) filestore_utils.cp(archive_filestore_path, str(benchmark_coverage_binary_dir)) archive_path = benchmark_coverage_binary_dir / archive_name - tar = tarfile.open(archive_path, 'r:gz') - tar.extractall(benchmark_coverage_binary_dir) - os.remove(archive_path) + with tarfile.open(archive_path, 'r:gz') as tar: + tar.extractall(benchmark_coverage_binary_dir) + os.remove(archive_path) def initialize_logs(): diff --git a/experiment/measurer/run_coverage.py b/experiment/measurer/run_coverage.py index 4f83d8a94..d031efe96 100644 --- a/experiment/measurer/run_coverage.py +++ b/experiment/measurer/run_coverage.py @@ -57,10 +57,9 @@ def do_coverage_run( # pylint: disable=too-many-locals with tempfile.TemporaryDirectory() as merge_dir: command = [ coverage_binary, '-merge=1', '-dump_coverage=1', - '-artifact_prefix=%s/' % crashes_dir, - '-timeout=%d' % UNIT_TIMEOUT, - '-rss_limit_mb=%d' % RSS_LIMIT_MB, - '-max_total_time=%d' % (MAX_TOTAL_TIME - EXIT_BUFFER), merge_dir, + f'-artifact_prefix={crashes_dir}/', f'-timeout={UNIT_TIMEOUT}', + f'-rss_limit_mb={RSS_LIMIT_MB}', + f'-max_total_time={MAX_TOTAL_TIME - EXIT_BUFFER}', merge_dir, new_units_dir ] coverage_binary_dir = os.path.dirname(coverage_binary) diff --git a/experiment/measurer/run_crashes.py b/experiment/measurer/run_crashes.py index f63daa8a1..4816efd7e 100644 --- a/experiment/measurer/run_crashes.py +++ b/experiment/measurer/run_crashes.py @@ -59,9 +59,8 @@ def process_crash(app_binary, crash_testcase_path, crashes_dir): env = os.environ.copy() sanitizer.set_sanitizer_options(env) command = [ - app_binary, - '-timeout=%d' % run_coverage.UNIT_TIMEOUT, - '-rss_limit_mb=%d' % run_coverage.RSS_LIMIT_MB, crash_testcase_path + app_binary, f'-timeout={run_coverage.UNIT_TIMEOUT}', + f'-rss_limit_mb={run_coverage.RSS_LIMIT_MB}', crash_testcase_path ] app_binary_dir = os.path.dirname(app_binary) result = new_process.execute(command, diff --git a/experiment/measurer/test_measure_manager.py b/experiment/measurer/test_measure_manager.py index 29be769b5..26b0ecc2e 100644 --- a/experiment/measurer/test_measure_manager.py +++ b/experiment/measurer/test_measure_manager.py @@ -146,8 +146,8 @@ def test_generate_summary(mocked_get_coverage_binary, mocked_execute, snapshot_measurer = measure_manager.SnapshotMeasurer( FUZZER, BENCHMARK, TRIAL_NUM, SNAPSHOT_LOGGER, REGION_COVERAGE) - snapshot_measurer.cov_summary_file = "/reports/cov_summary.txt" - snapshot_measurer.profdata_file = "/reports/data.profdata" + snapshot_measurer.cov_summary_file = '/reports/cov_summary.txt' + snapshot_measurer.profdata_file = '/reports/data.profdata' fs.create_dir('/reports') fs.create_file(snapshot_measurer.profdata_file, contents='fake_contents') snapshot_measurer.generate_summary(CYCLE) @@ -162,7 +162,7 @@ def test_generate_summary(mocked_get_coverage_binary, mocked_execute, assert (len(mocked_execute.call_args_list)) == 1 args = mocked_execute.call_args_list[0] assert args[0][0] == expected - assert args[1]['output_file'].name == "/reports/cov_summary.txt" + assert args[1]['output_file'].name == '/reports/cov_summary.txt' @mock.patch('common.logs.error') @@ -199,7 +199,7 @@ def test_measure_all_trials_not_ready(mocked_rsync, mocked_ls, experiment): @mock.patch('multiprocessing.pool.ThreadPool', test_utils.MockPool) @mock.patch('common.new_process.execute') @mock.patch('common.filesystem.directories_have_same_files') -@pytest.mark.skip(reason="See crbug.com/1012329") +@pytest.mark.skip(reason='See crbug.com/1012329') def test_measure_all_trials_no_more(mocked_directories_have_same_files, mocked_execute): """Test measure_all_trials does what is intended when the experiment is @@ -359,9 +359,6 @@ def get_test_data_path(*subpaths): return os.path.join(TEST_DATA_PATH, *subpaths) -# pylint: disable=no-self-use - - class TestIntegrationMeasurement: """Integration tests for measurement.""" @@ -409,7 +406,7 @@ def test_measure_snapshot_coverage( # pylint: disable=too-many-locals # Set up the snapshot archive. cycle = 1 archive = get_test_data_path('test_measure_snapshot_coverage', - 'corpus-archive-%04d.tar.gz' % cycle) + f'corpus-archive-{cycle:04d}.tar.gz') corpus_dir = os.path.join(snapshot_measurer.trial_dir, 'corpus') os.makedirs(corpus_dir) shutil.copy(archive, corpus_dir) diff --git a/experiment/measurer/test_run_coverage.py b/experiment/measurer/test_run_coverage.py index 8477caca6..9a5b000b9 100644 --- a/experiment/measurer/test_run_coverage.py +++ b/experiment/measurer/test_run_coverage.py @@ -13,8 +13,6 @@ # limitations under the License. """Tests for run_crashes.py.""" -# pylint: disable=no-self-use - import os from unittest import mock import glob diff --git a/experiment/measurer/test_run_crashes.py b/experiment/measurer/test_run_crashes.py index 3c26a2776..f967a864a 100644 --- a/experiment/measurer/test_run_crashes.py +++ b/experiment/measurer/test_run_crashes.py @@ -13,8 +13,6 @@ # limitations under the License. """Tests for run_coverage.py.""" -# pylint: disable=no-self-use - import os import pytest @@ -36,7 +34,7 @@ def test_integration_do_coverage_run_crash(self): """Test that do_coverage_run returns crashing inputs.""" llvm_tools_path = os.path.abspath( os.path.join(TEST_DATA_PATH, '..', 'llvm_tools')) - os.environ["PATH"] = llvm_tools_path + os.pathsep + os.environ["PATH"] + os.environ['PATH'] = llvm_tools_path + os.pathsep + os.environ['PATH'] crashes_dir = os.path.join(TEST_DATA_PATH, 'crash-corpus') crashes = run_crashes.do_crashes_run(self.APP_BINARY_PATH, crashes_dir) diff --git a/experiment/run_experiment.py b/experiment/run_experiment.py index 08c3365d7..e807f8f23 100644 --- a/experiment/run_experiment.py +++ b/experiment/run_experiment.py @@ -23,7 +23,7 @@ import tarfile import tempfile from collections import namedtuple -from typing import Dict, List, Union, NamedTuple +from typing import Dict, List, Optional, Union import jinja2 import yaml @@ -62,6 +62,9 @@ 'libFuzzer/{fuzz_target}/public.zip') DEFAULT_CONCURRENT_BUILDS = 30 +Requirement = namedtuple('Requirement', + ['mandatory', 'type', 'lowercase', 'startswith']) + def _set_default_config_values(config: Dict[str, Union[int, str, bool]], local_experiment: bool): @@ -74,7 +77,7 @@ def _set_default_config_values(config: Dict[str, Union[int, str, bool]], def _validate_config_parameters( config: Dict[str, Union[int, str, bool]], - config_requirements: Dict[str, NamedTuple]) -> bool: + config_requirements: Dict[str, Requirement]) -> bool: """Validates if the required |params| exist in |config|.""" if 'cloud_experiment_bucket' in config or 'cloud_web_bucket' in config: logs.error('"cloud_experiment_bucket" and "cloud_web_bucket" are now ' @@ -100,8 +103,9 @@ def _validate_config_parameters( # pylint: disable=too-many-arguments -def _validate_config_values(config: Dict[str, Union[str, int, bool]], - config_requirements: Dict[str, NamedTuple]) -> bool: +def _validate_config_values( + config: Dict[str, Union[str, int, bool]], + config_requirements: Dict[str, Requirement]) -> bool: """Validates if |params| types and formats in |config| are correct.""" valid = True @@ -151,8 +155,6 @@ def read_and_validate_experiment_config(config_filename: str) -> Dict: local_experiment = config.get('local_experiment', False) # Requirement of each config field. - Requirement = namedtuple('Requirement', - ['mandatory', 'type', 'lowercase', 'startswith']) config_requirements = { 'experiment_filestore': Requirement(True, str, True, '/' if local_experiment else 'gs://'), @@ -189,7 +191,7 @@ def read_and_validate_experiment_config(config_filename: str) -> Dict: all_params_valid = _validate_config_parameters(config, config_requirements) all_values_valid = _validate_config_values(config, config_requirements) if not all_params_valid or not all_values_valid: - raise ValidationError('Config: %s is invalid.' % config_filename) + raise ValidationError(f'Config: {config_filename} is invalid.') _set_default_config_values(config, local_experiment) return config @@ -211,20 +213,20 @@ def get_directories(parent_dir): def validate_custom_seed_corpus(custom_seed_corpus_dir, benchmarks): """Validate seed corpus provided by user""" if not os.path.isdir(custom_seed_corpus_dir): - raise ValidationError('Corpus location "%s" is invalid.' % - custom_seed_corpus_dir) + raise ValidationError( + f'Corpus location "{custom_seed_corpus_dir}" is invalid.') for benchmark in benchmarks: benchmark_corpus_dir = os.path.join(custom_seed_corpus_dir, benchmark) if not os.path.exists(benchmark_corpus_dir): raise ValidationError('Custom seed corpus directory for ' - 'benchmark "%s" does not exist.' % benchmark) + f'benchmark "{benchmark}" does not exist.') if not os.path.isdir(benchmark_corpus_dir): - raise ValidationError('Seed corpus of benchmark "%s" must be ' - 'a directory.' % benchmark) + raise ValidationError( + f'Seed corpus of benchmark "{benchmark}" must be a directory.') if not os.listdir(benchmark_corpus_dir): - raise ValidationError('Seed corpus of benchmark "%s" is empty.' % - benchmark) + raise ValidationError( + f'Seed corpus of benchmark "{benchmark}" is empty.') def validate_benchmarks(benchmarks: List[str]): @@ -232,13 +234,13 @@ def validate_benchmarks(benchmarks: List[str]): benchmark_types = set() for benchmark in set(benchmarks): if benchmarks.count(benchmark) > 1: - raise ValidationError('Benchmark "%s" is included more than once.' % - benchmark) + raise ValidationError( + f'Benchmark "{benchmark}" is included more than once.') # Validate benchmarks here. It's possible someone might run an # experiment without going through presubmit. Better to catch an invalid # benchmark than see it in production. if not benchmark_utils.validate(benchmark): - raise ValidationError('Benchmark "%s" is invalid.' % benchmark) + raise ValidationError(f'Benchmark "{benchmark}" is invalid.') benchmark_types.add(benchmark_utils.get_type(benchmark)) @@ -251,7 +253,7 @@ def validate_benchmarks(benchmarks: List[str]): def validate_fuzzer(fuzzer: str): """Parses and validates a fuzzer name.""" if not fuzzer_utils.validate(fuzzer): - raise ValidationError('Fuzzer: %s is invalid.' % fuzzer) + raise ValidationError(f'Fuzzer: {fuzzer} is invalid.') def validate_experiment_name(experiment_name: str): @@ -259,8 +261,8 @@ def validate_experiment_name(experiment_name: str): instances.""" if not re.match(EXPERIMENT_CONFIG_REGEX, experiment_name): raise ValidationError( - 'Experiment name "%s" is invalid. Must match: "%s"' % - (experiment_name, EXPERIMENT_CONFIG_REGEX.pattern)) + f'Experiment name "{experiment_name}" is invalid. ' + f'Must match: "{EXPERIMENT_CONFIG_REGEX.pattern}"') def set_up_experiment_config_file(config): @@ -269,7 +271,8 @@ def set_up_experiment_config_file(config): filesystem.recreate_directory(experiment_utils.CONFIG_DIR) experiment_config_filename = ( experiment_utils.get_internal_experiment_config_relative_path()) - with open(experiment_config_filename, 'w') as experiment_config_file: + with open(experiment_config_filename, 'w', + encoding='utf-8') as experiment_config_file: yaml.dump(config, experiment_config_file, default_flow_style=False) @@ -291,16 +294,16 @@ def start_experiment( # pylint: disable=too-many-arguments config_filename: str, benchmarks: List[str], fuzzers: List[str], - description: str = None, - no_seeds=False, - no_dictionaries=False, - oss_fuzz_corpus=False, - allow_uncommitted_changes=False, - concurrent_builds=DEFAULT_CONCURRENT_BUILDS, - measurers_cpus=None, - runners_cpus=None, - region_coverage=False, - custom_seed_corpus_dir=None): + description: Optional[str] = None, + no_seeds: bool = False, + no_dictionaries: bool = False, + oss_fuzz_corpus: bool = False, + allow_uncommitted_changes: bool = False, + concurrent_builds: Optional[int] = None, + measurers_cpus: Optional[int] = None, + runners_cpus: Optional[int] = None, + region_coverage: bool = False, + custom_seed_corpus_dir: Optional[str] = None): """Start a fuzzer benchmarking experiment.""" if not allow_uncommitted_changes: check_no_uncommitted_changes() @@ -371,7 +374,7 @@ def add_oss_fuzz_corpus(benchmark, oss_fuzz_corpora_dir): fuzz_target = benchmark_utils.get_fuzz_target(benchmark) if not fuzz_target.startswith(project): - full_fuzz_target = '%s_%s' % (project, fuzz_target) + full_fuzz_target = f'{project}_{fuzz_target}' else: full_fuzz_target = fuzz_target @@ -456,35 +459,31 @@ def start(self): self.config['experiment_filestore']) filesystem.create_directory(experiment_filestore_path) sql_database_arg = ( - 'SQL_DATABASE_URL=sqlite:///{}?check_same_thread=False'.format( - os.path.join(experiment_filestore_path, 'local.db'))) + 'SQL_DATABASE_URL=sqlite:///' + f'{os.path.join(experiment_filestore_path, "local.db")}' + '?check_same_thread=False') docker_registry = self.config['docker_registry'] - set_instance_name_arg = 'INSTANCE_NAME={instance_name}'.format( - instance_name=self.instance_name) - set_experiment_arg = 'EXPERIMENT={experiment}'.format( - experiment=self.config['experiment']) - shared_experiment_filestore_arg = '{0}:{0}'.format( - self.config['experiment_filestore']) + set_instance_name_arg = f'INSTANCE_NAME={self.instance_name}' + set_experiment_arg = f'EXPERIMENT={self.config["experiment"]}' + filestore = self.config['experiment_filestore'] + shared_experiment_filestore_arg = f'{filestore}:{filestore}' # TODO: (#484) Use config in function args or set as environment # variables. - set_docker_registry_arg = 'DOCKER_REGISTRY={}'.format(docker_registry) + set_docker_registry_arg = f'DOCKER_REGISTRY={docker_registry}' set_experiment_filestore_arg = ( - 'EXPERIMENT_FILESTORE={experiment_filestore}'.format( - experiment_filestore=self.config['experiment_filestore'])) - shared_report_filestore_arg = '{0}:{0}'.format( - self.config['report_filestore']) - set_report_filestore_arg = ( - 'REPORT_FILESTORE={report_filestore}'.format( - report_filestore=self.config['report_filestore'])) - set_snapshot_period_arg = 'SNAPSHOT_PERIOD={snapshot_period}'.format( - snapshot_period=self.config['snapshot_period']) + f'EXPERIMENT_FILESTORE={self.config["experiment_filestore"]}') + + filestore = self.config['report_filestore'] + shared_report_filestore_arg = f'{filestore}:{filestore}' + set_report_filestore_arg = f'REPORT_FILESTORE={filestore}' + set_snapshot_period_arg = ( + f'SNAPSHOT_PERIOD={self.config["snapshot_period"]}') + docker_image_url = f'{docker_registry}/dispatcher-image' set_concurrent_builds_arg = ( f'CONCURRENT_BUILDS={self.config["concurrent_builds"]}') set_worker_pool_name_arg = ( f'WORKER_POOL_NAME={self.config["worker_pool_name"]}') - docker_image_url = '{docker_registry}/dispatcher-image'.format( - docker_registry=docker_registry) environment_args = [ '-e', 'LOCAL_EXPERIMENT=True', @@ -522,7 +521,7 @@ def start(self): '--shm-size=2g', '--cap-add=SYS_PTRACE', '--cap-add=SYS_NICE', - '--name=%s' % container_name, + f'--name={container_name}', docker_image_url, '/bin/bash', '-c', diff --git a/experiment/runner.py b/experiment/runner.py index 292fbb256..899adad14 100644 --- a/experiment/runner.py +++ b/experiment/runner.py @@ -116,7 +116,7 @@ def get_clusterfuzz_seed_corpus_path(fuzz_target_path): def _copy_custom_seed_corpus(corpus_directory): - "Copy custom seed corpus provided by user" + """Copy custom seed corpus provided by user""" shutil.rmtree(corpus_directory) benchmark = environment.get('BENCHMARK') benchmark_custom_corpus_dir = posixpath.join( @@ -162,7 +162,7 @@ def _unpack_clusterfuzz_seed_corpus(fuzz_target_path, corpus_directory): if seed_corpus_file.file_size > CORPUS_ELEMENT_BYTES_LIMIT: continue - output_filename = '%016d' % idx + output_filename = f'{idx:016d}' output_file_path = os.path.join(corpus_directory, output_filename) zip_file.extract(seed_corpus_file, output_file_path) idx += 1 @@ -208,13 +208,10 @@ def run_fuzzer(max_total_time, log_filename): command = [ 'nice', '-n', str(0 - runner_niceness), 'python3', '-u', '-c', - ('from fuzzers.{fuzzer} import fuzzer; ' + (f'from fuzzers.{environment.get("FUZZER")} import fuzzer; ' 'fuzzer.fuzz(' - "'{input_corpus}', '{output_corpus}', '{target_binary}')").format( - fuzzer=environment.get('FUZZER'), - input_corpus=shlex.quote(input_corpus), - output_corpus=shlex.quote(output_corpus), - target_binary=shlex.quote(target_binary)) + f'"{shlex.quote(input_corpus)}", "{shlex.quote(output_corpus)}", ' + f'"{shlex.quote(target_binary)}")') ] # Write output to stdout if user is fuzzing from command line. @@ -403,7 +400,7 @@ def record_stats(self): stats_filename = experiment_utils.get_stats_filename(self.cycle) stats_path = os.path.join(self.results_dir, stats_filename) - with open(stats_path, 'w') as stats_file_handle: + with open(stats_path, 'w', encoding='utf-8') as stats_file_handle: stats_file_handle.write(stats_json_str) def archive_corpus(self): @@ -463,7 +460,7 @@ def get_fuzzer_module(fuzzer): """Returns the fuzzer.py module for |fuzzer|. We made this function so that we can mock the module because importing modules makes hard to undo changes to the python process.""" - fuzzer_module_name = 'fuzzers.{fuzzer}.fuzzer'.format(fuzzer=fuzzer) + fuzzer_module_name = f'fuzzers.{fuzzer}.fuzzer' fuzzer_module = importlib.import_module(fuzzer_module_name) return fuzzer_module diff --git a/experiment/schedule_measure_workers.py b/experiment/schedule_measure_workers.py index d4cfc4e90..14e9a9e94 100644 --- a/experiment/schedule_measure_workers.py +++ b/experiment/schedule_measure_workers.py @@ -60,7 +60,7 @@ def initialize(experiment_config: dict): instance_template_name = get_measure_worker_instance_template_name( experiment) docker_image = posixpath.join(experiment_config['docker_registry'], - 'measure-worker:{}'.format(experiment)) + f'measure-worker:{experiment}') redis_host = experiment_config['redis_host'] experiment_filestore = experiment_config['experiment_filestore'] diff --git a/experiment/scheduler.py b/experiment/scheduler.py index 5213d844b..c503c1fc8 100644 --- a/experiment/scheduler.py +++ b/experiment/scheduler.py @@ -586,13 +586,13 @@ def schedule_loop(experiment_config: dict): if local_experiment: runner_num_cpu_cores = experiment_config['runner_num_cpu_cores'] processes = runners_cpus // runner_num_cpu_cores - logger.info('Scheduling runners from core 0 to %d.' % - (runner_num_cpu_cores * processes - 1)) + logger.info('Scheduling runners from core 0 to %d.', + runner_num_cpu_cores * processes - 1) core_allocation = {} for cpu in range(0, runner_num_cpu_cores * processes, runner_num_cpu_cores): - core_allocation['%d-%d' % - (cpu, cpu + runner_num_cpu_cores - 1)] = None + core_allocation[ + f'{cpu}-{cpu + runner_num_cpu_cores - 1}'] = None pool_args = (processes,) else: pool_args = (runners_cpus,) @@ -798,8 +798,8 @@ def create_trial_instance( # pylint: disable=too-many-arguments startup_script = render_startup_script_template(instance_name, fuzzer, benchmark, trial_id, experiment_config, cpuset) - startup_script_path = '/tmp/%s-start-docker.sh' % instance_name - with open(startup_script_path, 'w') as file_handle: + startup_script_path = f'/tmp/{instance_name}-start-docker.sh' + with open(startup_script_path, 'w', encoding='utf-8') as file_handle: file_handle.write(startup_script) return gcloud.create_instance(instance_name, @@ -817,7 +817,7 @@ def main(): }) if len(sys.argv) != 2: - print('Usage: {} '.format(sys.argv[0])) + print(f'Usage: {sys.argv[0]} ') return 1 experiment_config = yaml_utils.read(sys.argv[1]) diff --git a/experiment/stop_experiment.py b/experiment/stop_experiment.py index 44aed9935..01cd94de0 100644 --- a/experiment/stop_experiment.py +++ b/experiment/stop_experiment.py @@ -67,7 +67,7 @@ def stop_experiment(experiment_name, experiment_config_filename): def main(): """Stop the experiment.""" if len(sys.argv) != 3: - print("Usage {0} ") + print('Usage {0} ') return 1 logs.initialize() return 0 if stop_experiment(sys.argv[1], sys.argv[2]) else 1 diff --git a/experiment/test_runner.py b/experiment/test_runner.py index ed7082954..31482abbf 100644 --- a/experiment/test_runner.py +++ b/experiment/test_runner.py @@ -51,7 +51,7 @@ def test_run_fuzzer_log_file(mocked_communicate, fs, environ): assert mocked_popen.commands == [[ 'nice', '-n', '5', 'python3', '-u', '-c', 'from fuzzers.afl import fuzzer; ' - "fuzzer.fuzz('/out/seeds', '/out/corpus', '/out/fuzz-target')" + 'fuzzer.fuzz("/out/seeds", "/out/corpus", "/out/fuzz-target")' ]] assert os.path.exists(log_filename) @@ -105,9 +105,9 @@ def test_record_stats(trial_runner, fuzzer_module): cycle = 1337 trial_runner.cycle = cycle - stats_file = os.path.join(trial_runner.results_dir, 'stats-%d.json' % cycle) + stats_file = os.path.join(trial_runner.results_dir, f'stats-{cycle}.json') trial_runner.record_stats() - with open(stats_file) as file_handle: + with open(stats_file, encoding='utf-8') as file_handle: stats_file_contents = file_handle.read() assert stats_file_contents == FuzzerAModule.DEFAULT_STATS @@ -126,7 +126,7 @@ class FuzzerAModuleNoGetStats: return_value=FuzzerAModuleNoGetStats): trial_runner.record_stats() - stats_file = os.path.join(trial_runner.results_dir, 'stats-%d.json' % cycle) + stats_file = os.path.join(trial_runner.results_dir, f'stats-{cycle}.json') assert not os.path.exists(stats_file) @@ -151,7 +151,7 @@ def get_stats(output_directory, log_filename): with mock.patch('common.logs.error') as mocked_log_error: trial_runner.record_stats() - stats_file = os.path.join(trial_runner.results_dir, 'stats-%d.json' % cycle) + stats_file = os.path.join(trial_runner.results_dir, f'stats-{cycle}.json') assert not os.path.exists(stats_file) mocked_log_error.assert_called_with('Stats are invalid.') @@ -175,7 +175,7 @@ def get_stats(output_directory, log_filename): return_value=FuzzerAModuleGetStatsException): trial_runner.record_stats() - stats_file = os.path.join(trial_runner.results_dir, 'stats-%d.json' % cycle) + stats_file = os.path.join(trial_runner.results_dir, f'stats-{cycle}.json') assert not os.path.exists(stats_file) mocked_log_error.assert_called_with( 'Call to %s failed.', FuzzerAModuleGetStatsException.get_stats) @@ -234,7 +234,7 @@ def test_do_sync_unchanged(mocked_is_corpus_dir_same, mocked_debug, mocked_debug.assert_any_call('Cycle: %d unchanged.', trial_runner.cycle) unchanged_cycles_path = os.path.join(trial_runner.results_dir, 'unchanged-cycles') - with open(unchanged_cycles_path) as file_handle: + with open(unchanged_cycles_path, encoding='utf-8') as file_handle: assert str(trial_runner.cycle) == file_handle.read().strip() assert not os.listdir(trial_runner.corpus_archives_dir) @@ -312,7 +312,7 @@ def test_is_corpus_dir_same_modified(trial_runner, fs): file_path = os.path.join(trial_runner.corpus_dir, 'f') fs.create_file(file_path) trial_runner._set_corpus_dir_contents() # pylint: disable=protected-access - with open(file_path, 'w') as file_handle: + with open(file_path, 'w', encoding='utf-8') as file_handle: file_handle.write('hi') assert not trial_runner.is_corpus_dir_same() @@ -323,8 +323,9 @@ class TestIntegrationRunner: @pytest.mark.skipif(not os.environ.get('TEST_EXPERIMENT_FILESTORE'), reason='TEST_EXPERIMENT_FILESTORE is not set, ' 'skipping integration test.') - @mock.patch('common.logs.error') # pylint: disable=no-self-use,too-many-locals + @mock.patch('common.logs.error') def test_integration_runner(self, mocked_error, tmp_path, environ): + # pylint: disable=too-many-locals """Test that runner can run libFuzzer and saves snapshots to GCS.""" # Switch cwd so that fuzzers don't create tons of files in the repo. os.chdir(tmp_path) @@ -351,7 +352,7 @@ def test_integration_runner(self, mocked_error, tmp_path, environ): experiment = 'integration-test-experiment' gcs_directory = posixpath.join(test_experiment_bucket, experiment, 'experiment-folders', - '%s-%s' % (benchmark, fuzzer), 'trial-1') + f'{benchmark}-{fuzzer}', 'trial-1') filestore_utils.rm(gcs_directory, force=True) # Add fuzzer directory to make it easy to run fuzzer.py in local # configuration. @@ -430,15 +431,13 @@ def test_clean_seed_corpus(fs): assert not os.path.exists(os.path.join(seed_corpus_dir, 'efg')) assert len(os.listdir(seed_corpus_dir)) == 3 # Directory 'a' and two files. - with open( - os.path.join( - seed_corpus_dir, - 'a9993e364706816aba3e25717850c26c9cd0d89d')) as file_handle: + with open(os.path.join(seed_corpus_dir, + 'a9993e364706816aba3e25717850c26c9cd0d89d'), + encoding='utf-8') as file_handle: assert file_handle.read() == 'abc' - with open( - os.path.join( - seed_corpus_dir, - '589c22335a381f122d129225f5c0ba3056ed5811')) as file_handle: + with open(os.path.join(seed_corpus_dir, + '589c22335a381f122d129225f5c0ba3056ed5811'), + encoding='utf-8') as file_handle: assert file_handle.read() == 'def' diff --git a/experiment/test_scheduler.py b/experiment/test_scheduler.py index cc7490b0b..b94be0e9c 100644 --- a/experiment/test_scheduler.py +++ b/experiment/test_scheduler.py @@ -193,7 +193,7 @@ def _test_create_trial_instance( # pylint: disable=too-many-locals scheduler.create_trial_instance(fuzzer_param, benchmark, trial, experiment_config, preemptible) instance_name = 'r-test-experiment-9' - expected_startup_script_path = '/tmp/%s-start-docker.sh' % instance_name + expected_startup_script_path = f'/tmp/{instance_name}-start-docker.sh' mocked_create_instance.assert_called_with( instance_name, @@ -202,7 +202,7 @@ def _test_create_trial_instance( # pylint: disable=too-many-locals startup_script=expected_startup_script_path, preemptible=preemptible) - with open(expected_startup_script_path) as file_handle: + with open(expected_startup_script_path, encoding='utf-8') as file_handle: content = file_handle.read() check_from = '# Start docker.' assert check_from in content @@ -223,7 +223,7 @@ def test_start_trials_not_started(mocked_create_instance, pending_trials, mocked_create_instance.return_value = False with ThreadPool() as pool: result = scheduler.start_trials(pending_trials, experiment_config, pool) - assert result == [] + assert not result @mock.patch('common.new_process.execute') @@ -373,7 +373,7 @@ def test_get_preempted_trials_nonpreemptible(experiment_config, db): """Tests that TrialInstanceManager.get_preempted_trials returns no trials for a nonpreemptible experiment.""" trial_instance_manager = get_trial_instance_manager(experiment_config) - assert trial_instance_manager.get_preempted_trials() == [] + assert not trial_instance_manager.get_preempted_trials() @mock.patch('common.gce._get_instance_items', return_value=[]) @@ -392,7 +392,7 @@ def test_get_preempted_trials_stale_preempted(_, preempt_exp_conf): 'experiment.scheduler.TrialInstanceManager.' '_get_started_unfinished_instances', return_value=[instance_name]): - assert trial_instance_manager.get_preempted_trials() == [] + assert not trial_instance_manager.get_preempted_trials() def _get_preempted_instance_item(trial_id, exp_conf): diff --git a/fuzzbench/run_experiment.py b/fuzzbench/run_experiment.py index 6a2c60787..fc4b2c76c 100644 --- a/fuzzbench/run_experiment.py +++ b/fuzzbench/run_experiment.py @@ -36,8 +36,9 @@ def run_experiment(config): for name, image in images_to_build.items(): depends = image.get('depends_on', None) if depends is not None: - assert len(depends) == 1, 'image %s has %d dependencies. Multiple '\ - 'dependencies are currently not supported.' % (name, len(depends)) + assert len(depends) == 1, ( + f'image {name} has {len(depends)} dependencies. ' + 'Multiple dependencies are currently not supported.') jobs_list.append( queue.enqueue( jobs.build_image, @@ -49,13 +50,13 @@ def run_experiment(config): while True: print('Current status of jobs:') - print('\tqueued:\t%d' % queue.count) - print('\tstarted:\t%d' % queue.started_job_registry.count) - print('\tdeferred:\t%d' % queue.deferred_job_registry.count) - print('\tfinished:\t%d' % queue.finished_job_registry.count) - print('\tfailed:\t%d' % queue.failed_job_registry.count) + print(f'\tqueued:\t{queue.count}') + print(f'\tstarted:\t{queue.started_job_registry.count}') + print(f'\tdeferred:\t{queue.deferred_job_registry.count}') + print(f'\tfinished:\t{queue.finished_job_registry.count}') + print(f'\tfailed:\t{queue.failed_job_registry.count}') for job in jobs_list: - print(' %s : %s\t(%s)' % (job.func_name, job.get_status(), job.id)) + print(f' {job.func_name} : {job.get_status()}\t({job.id})') if all([job.result is not None for job in jobs_list]): # pylint: disable=use-a-generator break @@ -65,7 +66,7 @@ def run_experiment(config): def main(): """Set up Redis connection and start the experiment.""" - redis_connection = redis.Redis(host="queue-server") + redis_connection = redis.Redis(host='queue-server') config_path = environment.get('EXPERIMENT_CONFIG', 'fuzzbench/local-experiment-config.yaml') diff --git a/fuzzbench/test_e2e/test_e2e_run.py b/fuzzbench/test_e2e/test_e2e_run.py index def9af807..2c77d77cb 100644 --- a/fuzzbench/test_e2e/test_e2e_run.py +++ b/fuzzbench/test_e2e/test_e2e_run.py @@ -38,7 +38,7 @@ def redis_connection(): return redis.Redis(host='queue-server') -# pylint: disable=no-self-use,redefined-outer-name +# pylint: disable=redefined-outer-name @pytest.mark.skipif('E2E_INTEGRATION_TEST' not in os.environ, reason='Not running end-to-end test.') @pytest.mark.usefixtures('redis_connection', 'experiment_config') diff --git a/fuzzbench/worker.py b/fuzzbench/worker.py index 531720d5d..6f5daf23f 100644 --- a/fuzzbench/worker.py +++ b/fuzzbench/worker.py @@ -20,7 +20,7 @@ def main(): """Sets up Redis connection and starts the worker.""" - redis_connection = redis.Redis(host="queue-server") + redis_connection = redis.Redis(host='queue-server') with rq.Connection(redis_connection): queue = rq.Queue('build_n_run_queue') worker = rq.Worker([queue]) diff --git a/fuzzers/afl/builder.Dockerfile b/fuzzers/afl/builder.Dockerfile index d4beed782..94d7f5076 100644 --- a/fuzzers/afl/builder.Dockerfile +++ b/fuzzers/afl/builder.Dockerfile @@ -17,10 +17,12 @@ FROM $parent_image # Download and compile AFL v2.57b. # Set AFL_NO_X86 to skip flaky tests. -RUN git clone https://github.com/google/AFL.git /afl && \ +RUN git clone \ + --depth 1 \ + --branch v2.57b \ + https://github.com/google/AFL.git /afl && \ cd /afl && \ - git checkout 61037103ae3722c8060ff7082994836a794f978e && \ - AFL_NO_X86=1 make + CFLAGS= CXXFLAGS= AFL_NO_X86=1 make # Use afl_driver.cpp from LLVM as our fuzzing library. RUN apt-get update && \ diff --git a/fuzzers/afl/fuzzer.py b/fuzzers/afl/fuzzer.py index 853c0890c..18cb71229 100755 --- a/fuzzers/afl/fuzzer.py +++ b/fuzzers/afl/fuzzer.py @@ -48,7 +48,10 @@ def get_stats(output_corpus, fuzzer_log): # pylint: disable=unused-argument """Gets fuzzer stats for AFL.""" # Get a dictionary containing the stats AFL reports. stats_file = os.path.join(output_corpus, 'fuzzer_stats') - with open(stats_file) as file_handle: + if not os.path.exists(stats_file): + print('Can\'t find fuzzer_stats') + return '{}' + with open(stats_file, encoding='utf-8') as file_handle: stats_file_lines = file_handle.read().splitlines() stats_file_dict = {} for stats_line in stats_file_lines: diff --git a/fuzzers/afl_2_52_b/fuzzer.py b/fuzzers/afl_2_52_b/fuzzer.py index 853c0890c..7c4c44180 100755 --- a/fuzzers/afl_2_52_b/fuzzer.py +++ b/fuzzers/afl_2_52_b/fuzzer.py @@ -48,7 +48,7 @@ def get_stats(output_corpus, fuzzer_log): # pylint: disable=unused-argument """Gets fuzzer stats for AFL.""" # Get a dictionary containing the stats AFL reports. stats_file = os.path.join(output_corpus, 'fuzzer_stats') - with open(stats_file) as file_handle: + with open(stats_file, encoding='utf-8') as file_handle: stats_file_lines = file_handle.read().splitlines() stats_file_dict = {} for stats_line in stats_file_lines: diff --git a/fuzzers/afl_collision_free/fuzzer.py b/fuzzers/afl_collision_free/fuzzer.py new file mode 100755 index 000000000..8c4a5be4e --- /dev/null +++ b/fuzzers/afl_collision_free/fuzzer.py @@ -0,0 +1,140 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Integration code for AFL fuzzer.""" + +import json +import os +import shutil +import subprocess + +from fuzzers import utils + + +def prepare_build_environment(): + """Set environment variables used to build targets for AFL-based + fuzzers.""" + cflags = ['-fsanitize-coverage=trace-pc-guard'] + utils.append_flags('CFLAGS', cflags) + utils.append_flags('CXXFLAGS', cflags) + + os.environ['CC'] = 'clang' + os.environ['CXX'] = 'clang++' + os.environ['FUZZER_LIB'] = '/libAFL.a' + + +def build(): + """Build benchmark.""" + prepare_build_environment() + + utils.build_benchmark() + + print('[post_build] Copying afl-fuzz to $OUT directory') + # Copy out the afl-fuzz binary as a build artifact. + shutil.copy('/afl/afl-fuzz', os.environ['OUT']) + + +def get_stats(output_corpus, fuzzer_log): # pylint: disable=unused-argument + """Gets fuzzer stats for AFL.""" + # Get a dictionary containing the stats AFL reports. + stats_file = os.path.join(output_corpus, 'fuzzer_stats') + with open(stats_file, encoding='utf-8') as file_handle: + stats_file_lines = file_handle.read().splitlines() + stats_file_dict = {} + for stats_line in stats_file_lines: + key, value = stats_line.split(': ') + stats_file_dict[key.strip()] = value.strip() + + # Report to FuzzBench the stats it accepts. + stats = {'execs_per_sec': float(stats_file_dict['execs_per_sec'])} + return json.dumps(stats) + + +def prepare_fuzz_environment(input_corpus): + """Prepare to fuzz with AFL or another AFL-based fuzzer.""" + # Tell AFL to not use its terminal UI so we get usable logs. + os.environ['AFL_NO_UI'] = '1' + # Skip AFL's CPU frequency check (fails on Docker). + os.environ['AFL_SKIP_CPUFREQ'] = '1' + # No need to bind affinity to one core, Docker enforces 1 core usage. + os.environ['AFL_NO_AFFINITY'] = '1' + # AFL will abort on startup if the core pattern sends notifications to + # external programs. We don't care about this. + os.environ['AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES'] = '1' + # Don't exit when crashes are found. This can happen when corpus from + # OSS-Fuzz is used. + os.environ['AFL_SKIP_CRASHES'] = '1' + # Shuffle the queue + os.environ['AFL_SHUFFLE_QUEUE'] = '1' + # Sets AFL to avoid collisions + os.environ['AFL_COLLISION_FREE'] = '1' + + # AFL needs at least one non-empty seed to start. + utils.create_seed_file_for_empty_corpus(input_corpus) + + +def check_skip_det_compatible(additional_flags): + """ Checks if additional flags are compatible with '-d' option""" + # AFL refuses to take in '-d' with '-M' or '-S' options for parallel mode. + # (cf. https://github.com/google/AFL/blob/8da80951/afl-fuzz.c#L7477) + if '-M' in additional_flags or '-S' in additional_flags: + return False + return True + + +def run_afl_fuzz(input_corpus, + output_corpus, + target_binary, + additional_flags=None, + hide_output=False): + """Run afl-fuzz.""" + # Spawn the afl fuzzing process. + print('[run_afl_fuzz] Running target with afl-fuzz') + command = [ + './afl-fuzz', + '-i', + input_corpus, + '-o', + output_corpus, + # Use no memory limit as ASAN doesn't play nicely with one. + '-m', + 'none', + '-t', + '1000+', # Use same default 1 sec timeout, but add '+' to skip hangs. + ] + # Use '-d' to skip deterministic mode, as long as it it compatible with + # additional flags. + if not additional_flags or check_skip_det_compatible(additional_flags): + command.append('-d') + if additional_flags: + command.extend(additional_flags) + dictionary_path = utils.get_dictionary_path(target_binary) + if dictionary_path: + command.extend(['-x', dictionary_path]) + command += [ + '--', + target_binary, + # Pass INT_MAX to afl the maximize the number of persistent loops it + # performs. + '2147483647' + ] + print('[run_afl_fuzz] Running command: ' + ' '.join(command)) + output_stream = subprocess.DEVNULL if hide_output else None + subprocess.check_call(command, stdout=output_stream, stderr=output_stream) + + +def fuzz(input_corpus, output_corpus, target_binary): + """Run afl-fuzz on target.""" + prepare_fuzz_environment(input_corpus) + + run_afl_fuzz(input_corpus, output_corpus, target_binary) diff --git a/fuzzers/afl_double_timeout/fuzzer.py b/fuzzers/afl_double_timeout/fuzzer.py new file mode 100755 index 000000000..9876a0e47 --- /dev/null +++ b/fuzzers/afl_double_timeout/fuzzer.py @@ -0,0 +1,140 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Integration code for AFL fuzzer.""" + +import json +import os +import shutil +import subprocess + +from fuzzers import utils + + +def prepare_build_environment(): + """Set environment variables used to build targets for AFL-based + fuzzers.""" + cflags = ['-fsanitize-coverage=trace-pc-guard'] + utils.append_flags('CFLAGS', cflags) + utils.append_flags('CXXFLAGS', cflags) + + os.environ['CC'] = 'clang' + os.environ['CXX'] = 'clang++' + os.environ['FUZZER_LIB'] = '/libAFL.a' + + +def build(): + """Build benchmark.""" + prepare_build_environment() + + utils.build_benchmark() + + print('[post_build] Copying afl-fuzz to $OUT directory') + # Copy out the afl-fuzz binary as a build artifact. + shutil.copy('/afl/afl-fuzz', os.environ['OUT']) + + +def get_stats(output_corpus, fuzzer_log): # pylint: disable=unused-argument + """Gets fuzzer stats for AFL.""" + # Get a dictionary containing the stats AFL reports. + stats_file = os.path.join(output_corpus, 'fuzzer_stats') + with open(stats_file, encoding='utf-8') as file_handle: + stats_file_lines = file_handle.read().splitlines() + stats_file_dict = {} + for stats_line in stats_file_lines: + key, value = stats_line.split(': ') + stats_file_dict[key.strip()] = value.strip() + + # Report to FuzzBench the stats it accepts. + stats = {'execs_per_sec': float(stats_file_dict['execs_per_sec'])} + return json.dumps(stats) + + +def prepare_fuzz_environment(input_corpus): + """Prepare to fuzz with AFL or another AFL-based fuzzer.""" + # Tell AFL to not use its terminal UI so we get usable logs. + os.environ['AFL_NO_UI'] = '1' + # Skip AFL's CPU frequency check (fails on Docker). + os.environ['AFL_SKIP_CPUFREQ'] = '1' + # No need to bind affinity to one core, Docker enforces 1 core usage. + os.environ['AFL_NO_AFFINITY'] = '1' + # AFL will abort on startup if the core pattern sends notifications to + # external programs. We don't care about this. + os.environ['AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES'] = '1' + # Don't exit when crashes are found. This can happen when corpus from + # OSS-Fuzz is used. + os.environ['AFL_SKIP_CRASHES'] = '1' + # Shuffle the queue + os.environ['AFL_SHUFFLE_QUEUE'] = '1' + # Sets AFL timeout to be the double compared to the default heuristics + os.environ['AFL_DOUBLE_TIMEOUT'] = '1' + + # AFL needs at least one non-empty seed to start. + utils.create_seed_file_for_empty_corpus(input_corpus) + + +def check_skip_det_compatible(additional_flags): + """ Checks if additional flags are compatible with '-d' option""" + # AFL refuses to take in '-d' with '-M' or '-S' options for parallel mode. + # (cf. https://github.com/google/AFL/blob/8da80951/afl-fuzz.c#L7477) + if '-M' in additional_flags or '-S' in additional_flags: + return False + return True + + +def run_afl_fuzz(input_corpus, + output_corpus, + target_binary, + additional_flags=None, + hide_output=False): + """Run afl-fuzz.""" + # Spawn the afl fuzzing process. + print('[run_afl_fuzz] Running target with afl-fuzz') + command = [ + './afl-fuzz', + '-i', + input_corpus, + '-o', + output_corpus, + # Use no memory limit as ASAN doesn't play nicely with one. + '-m', + 'none', + '-t', + '1000+', # Use same default 1 sec timeout, but add '+' to skip hangs. + ] + # Use '-d' to skip deterministic mode, as long as it it compatible with + # additional flags. + if not additional_flags or check_skip_det_compatible(additional_flags): + command.append('-d') + if additional_flags: + command.extend(additional_flags) + dictionary_path = utils.get_dictionary_path(target_binary) + if dictionary_path: + command.extend(['-x', dictionary_path]) + command += [ + '--', + target_binary, + # Pass INT_MAX to afl the maximize the number of persistent loops it + # performs. + '2147483647' + ] + print('[run_afl_fuzz] Running command: ' + ' '.join(command)) + output_stream = subprocess.DEVNULL if hide_output else None + subprocess.check_call(command, stdout=output_stream, stderr=output_stream) + + +def fuzz(input_corpus, output_corpus, target_binary): + """Run afl-fuzz on target.""" + prepare_fuzz_environment(input_corpus) + + run_afl_fuzz(input_corpus, output_corpus, target_binary) diff --git a/fuzzers/afl_maxmap/fuzzer.py b/fuzzers/afl_maxmap/fuzzer.py new file mode 100755 index 000000000..7c4c44180 --- /dev/null +++ b/fuzzers/afl_maxmap/fuzzer.py @@ -0,0 +1,138 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Integration code for AFL fuzzer.""" + +import json +import os +import shutil +import subprocess + +from fuzzers import utils + + +def prepare_build_environment(): + """Set environment variables used to build targets for AFL-based + fuzzers.""" + cflags = ['-fsanitize-coverage=trace-pc-guard'] + utils.append_flags('CFLAGS', cflags) + utils.append_flags('CXXFLAGS', cflags) + + os.environ['CC'] = 'clang' + os.environ['CXX'] = 'clang++' + os.environ['FUZZER_LIB'] = '/libAFL.a' + + +def build(): + """Build benchmark.""" + prepare_build_environment() + + utils.build_benchmark() + + print('[post_build] Copying afl-fuzz to $OUT directory') + # Copy out the afl-fuzz binary as a build artifact. + shutil.copy('/afl/afl-fuzz', os.environ['OUT']) + + +def get_stats(output_corpus, fuzzer_log): # pylint: disable=unused-argument + """Gets fuzzer stats for AFL.""" + # Get a dictionary containing the stats AFL reports. + stats_file = os.path.join(output_corpus, 'fuzzer_stats') + with open(stats_file, encoding='utf-8') as file_handle: + stats_file_lines = file_handle.read().splitlines() + stats_file_dict = {} + for stats_line in stats_file_lines: + key, value = stats_line.split(': ') + stats_file_dict[key.strip()] = value.strip() + + # Report to FuzzBench the stats it accepts. + stats = {'execs_per_sec': float(stats_file_dict['execs_per_sec'])} + return json.dumps(stats) + + +def prepare_fuzz_environment(input_corpus): + """Prepare to fuzz with AFL or another AFL-based fuzzer.""" + # Tell AFL to not use its terminal UI so we get usable logs. + os.environ['AFL_NO_UI'] = '1' + # Skip AFL's CPU frequency check (fails on Docker). + os.environ['AFL_SKIP_CPUFREQ'] = '1' + # No need to bind affinity to one core, Docker enforces 1 core usage. + os.environ['AFL_NO_AFFINITY'] = '1' + # AFL will abort on startup if the core pattern sends notifications to + # external programs. We don't care about this. + os.environ['AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES'] = '1' + # Don't exit when crashes are found. This can happen when corpus from + # OSS-Fuzz is used. + os.environ['AFL_SKIP_CRASHES'] = '1' + # Shuffle the queue + os.environ['AFL_SHUFFLE_QUEUE'] = '1' + + # AFL needs at least one non-empty seed to start. + utils.create_seed_file_for_empty_corpus(input_corpus) + + +def check_skip_det_compatible(additional_flags): + """ Checks if additional flags are compatible with '-d' option""" + # AFL refuses to take in '-d' with '-M' or '-S' options for parallel mode. + # (cf. https://github.com/google/AFL/blob/8da80951/afl-fuzz.c#L7477) + if '-M' in additional_flags or '-S' in additional_flags: + return False + return True + + +def run_afl_fuzz(input_corpus, + output_corpus, + target_binary, + additional_flags=None, + hide_output=False): + """Run afl-fuzz.""" + # Spawn the afl fuzzing process. + print('[run_afl_fuzz] Running target with afl-fuzz') + command = [ + './afl-fuzz', + '-i', + input_corpus, + '-o', + output_corpus, + # Use no memory limit as ASAN doesn't play nicely with one. + '-m', + 'none', + '-t', + '1000+', # Use same default 1 sec timeout, but add '+' to skip hangs. + ] + # Use '-d' to skip deterministic mode, as long as it it compatible with + # additional flags. + if not additional_flags or check_skip_det_compatible(additional_flags): + command.append('-d') + if additional_flags: + command.extend(additional_flags) + dictionary_path = utils.get_dictionary_path(target_binary) + if dictionary_path: + command.extend(['-x', dictionary_path]) + command += [ + '--', + target_binary, + # Pass INT_MAX to afl the maximize the number of persistent loops it + # performs. + '2147483647' + ] + print('[run_afl_fuzz] Running command: ' + ' '.join(command)) + output_stream = subprocess.DEVNULL if hide_output else None + subprocess.check_call(command, stdout=output_stream, stderr=output_stream) + + +def fuzz(input_corpus, output_corpus, target_binary): + """Run afl-fuzz on target.""" + prepare_fuzz_environment(input_corpus) + + run_afl_fuzz(input_corpus, output_corpus, target_binary) diff --git a/fuzzers/afl_no_favfactor/fuzzer.py b/fuzzers/afl_no_favfactor/fuzzer.py new file mode 100755 index 000000000..ec2e3d5ac --- /dev/null +++ b/fuzzers/afl_no_favfactor/fuzzer.py @@ -0,0 +1,140 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Integration code for AFL fuzzer.""" + +import json +import os +import shutil +import subprocess + +from fuzzers import utils + + +def prepare_build_environment(): + """Set environment variables used to build targets for AFL-based + fuzzers.""" + cflags = ['-fsanitize-coverage=trace-pc-guard'] + utils.append_flags('CFLAGS', cflags) + utils.append_flags('CXXFLAGS', cflags) + + os.environ['CC'] = 'clang' + os.environ['CXX'] = 'clang++' + os.environ['FUZZER_LIB'] = '/libAFL.a' + + +def build(): + """Build benchmark.""" + prepare_build_environment() + + utils.build_benchmark() + + print('[post_build] Copying afl-fuzz to $OUT directory') + # Copy out the afl-fuzz binary as a build artifact. + shutil.copy('/afl/afl-fuzz', os.environ['OUT']) + + +def get_stats(output_corpus, fuzzer_log): # pylint: disable=unused-argument + """Gets fuzzer stats for AFL.""" + # Get a dictionary containing the stats AFL reports. + stats_file = os.path.join(output_corpus, 'fuzzer_stats') + with open(stats_file, encoding='utf-8') as file_handle: + stats_file_lines = file_handle.read().splitlines() + stats_file_dict = {} + for stats_line in stats_file_lines: + key, value = stats_line.split(': ') + stats_file_dict[key.strip()] = value.strip() + + # Report to FuzzBench the stats it accepts. + stats = {'execs_per_sec': float(stats_file_dict['execs_per_sec'])} + return json.dumps(stats) + + +def prepare_fuzz_environment(input_corpus): + """Prepare to fuzz with AFL or another AFL-based fuzzer.""" + # Tell AFL to not use its terminal UI so we get usable logs. + os.environ['AFL_NO_UI'] = '1' + # Skip AFL's CPU frequency check (fails on Docker). + os.environ['AFL_SKIP_CPUFREQ'] = '1' + # No need to bind affinity to one core, Docker enforces 1 core usage. + os.environ['AFL_NO_AFFINITY'] = '1' + # AFL will abort on startup if the core pattern sends notifications to + # external programs. We don't care about this. + os.environ['AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES'] = '1' + # Don't exit when crashes are found. This can happen when corpus from + # OSS-Fuzz is used. + os.environ['AFL_SKIP_CRASHES'] = '1' + # Shuffle the queue + os.environ['AFL_SHUFFLE_QUEUE'] = '1' + # Disables AFL corpus culling prioritization + os.environ['AFL_NO_FAV_FACTOR'] = '1' + + # AFL needs at least one non-empty seed to start. + utils.create_seed_file_for_empty_corpus(input_corpus) + + +def check_skip_det_compatible(additional_flags): + """ Checks if additional flags are compatible with '-d' option""" + # AFL refuses to take in '-d' with '-M' or '-S' options for parallel mode. + # (cf. https://github.com/google/AFL/blob/8da80951/afl-fuzz.c#L7477) + if '-M' in additional_flags or '-S' in additional_flags: + return False + return True + + +def run_afl_fuzz(input_corpus, + output_corpus, + target_binary, + additional_flags=None, + hide_output=False): + """Run afl-fuzz.""" + # Spawn the afl fuzzing process. + print('[run_afl_fuzz] Running target with afl-fuzz') + command = [ + './afl-fuzz', + '-i', + input_corpus, + '-o', + output_corpus, + # Use no memory limit as ASAN doesn't play nicely with one. + '-m', + 'none', + '-t', + '1000+', # Use same default 1 sec timeout, but add '+' to skip hangs. + ] + # Use '-d' to skip deterministic mode, as long as it it compatible with + # additional flags. + if not additional_flags or check_skip_det_compatible(additional_flags): + command.append('-d') + if additional_flags: + command.extend(additional_flags) + dictionary_path = utils.get_dictionary_path(target_binary) + if dictionary_path: + command.extend(['-x', dictionary_path]) + command += [ + '--', + target_binary, + # Pass INT_MAX to afl the maximize the number of persistent loops it + # performs. + '2147483647' + ] + print('[run_afl_fuzz] Running command: ' + ' '.join(command)) + output_stream = subprocess.DEVNULL if hide_output else None + subprocess.check_call(command, stdout=output_stream, stderr=output_stream) + + +def fuzz(input_corpus, output_corpus, target_binary): + """Run afl-fuzz on target.""" + prepare_fuzz_environment(input_corpus) + + run_afl_fuzz(input_corpus, output_corpus, target_binary) diff --git a/fuzzers/afl_no_favored/fuzzer.py b/fuzzers/afl_no_favored/fuzzer.py new file mode 100755 index 000000000..90c7a08ab --- /dev/null +++ b/fuzzers/afl_no_favored/fuzzer.py @@ -0,0 +1,140 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Integration code for AFL fuzzer.""" + +import json +import os +import shutil +import subprocess + +from fuzzers import utils + + +def prepare_build_environment(): + """Set environment variables used to build targets for AFL-based + fuzzers.""" + cflags = ['-fsanitize-coverage=trace-pc-guard'] + utils.append_flags('CFLAGS', cflags) + utils.append_flags('CXXFLAGS', cflags) + + os.environ['CC'] = 'clang' + os.environ['CXX'] = 'clang++' + os.environ['FUZZER_LIB'] = '/libAFL.a' + + +def build(): + """Build benchmark.""" + prepare_build_environment() + + utils.build_benchmark() + + print('[post_build] Copying afl-fuzz to $OUT directory') + # Copy out the afl-fuzz binary as a build artifact. + shutil.copy('/afl/afl-fuzz', os.environ['OUT']) + + +def get_stats(output_corpus, fuzzer_log): # pylint: disable=unused-argument + """Gets fuzzer stats for AFL.""" + # Get a dictionary containing the stats AFL reports. + stats_file = os.path.join(output_corpus, 'fuzzer_stats') + with open(stats_file, encoding='utf-8') as file_handle: + stats_file_lines = file_handle.read().splitlines() + stats_file_dict = {} + for stats_line in stats_file_lines: + key, value = stats_line.split(': ') + stats_file_dict[key.strip()] = value.strip() + + # Report to FuzzBench the stats it accepts. + stats = {'execs_per_sec': float(stats_file_dict['execs_per_sec'])} + return json.dumps(stats) + + +def prepare_fuzz_environment(input_corpus): + """Prepare to fuzz with AFL or another AFL-based fuzzer.""" + # Tell AFL to not use its terminal UI so we get usable logs. + os.environ['AFL_NO_UI'] = '1' + # Skip AFL's CPU frequency check (fails on Docker). + os.environ['AFL_SKIP_CPUFREQ'] = '1' + # No need to bind affinity to one core, Docker enforces 1 core usage. + os.environ['AFL_NO_AFFINITY'] = '1' + # AFL will abort on startup if the core pattern sends notifications to + # external programs. We don't care about this. + os.environ['AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES'] = '1' + # Don't exit when crashes are found. This can happen when corpus from + # OSS-Fuzz is used. + os.environ['AFL_SKIP_CRASHES'] = '1' + # Shuffle the queue + os.environ['AFL_SHUFFLE_QUEUE'] = '1' + # Disables AFL corpus culling + os.environ['AFL_NO_FAVORED'] = '1' + + # AFL needs at least one non-empty seed to start. + utils.create_seed_file_for_empty_corpus(input_corpus) + + +def check_skip_det_compatible(additional_flags): + """ Checks if additional flags are compatible with '-d' option""" + # AFL refuses to take in '-d' with '-M' or '-S' options for parallel mode. + # (cf. https://github.com/google/AFL/blob/8da80951/afl-fuzz.c#L7477) + if '-M' in additional_flags or '-S' in additional_flags: + return False + return True + + +def run_afl_fuzz(input_corpus, + output_corpus, + target_binary, + additional_flags=None, + hide_output=False): + """Run afl-fuzz.""" + # Spawn the afl fuzzing process. + print('[run_afl_fuzz] Running target with afl-fuzz') + command = [ + './afl-fuzz', + '-i', + input_corpus, + '-o', + output_corpus, + # Use no memory limit as ASAN doesn't play nicely with one. + '-m', + 'none', + '-t', + '1000+', # Use same default 1 sec timeout, but add '+' to skip hangs. + ] + # Use '-d' to skip deterministic mode, as long as it it compatible with + # additional flags. + if not additional_flags or check_skip_det_compatible(additional_flags): + command.append('-d') + if additional_flags: + command.extend(additional_flags) + dictionary_path = utils.get_dictionary_path(target_binary) + if dictionary_path: + command.extend(['-x', dictionary_path]) + command += [ + '--', + target_binary, + # Pass INT_MAX to afl the maximize the number of persistent loops it + # performs. + '2147483647' + ] + print('[run_afl_fuzz] Running command: ' + ' '.join(command)) + output_stream = subprocess.DEVNULL if hide_output else None + subprocess.check_call(command, stdout=output_stream, stderr=output_stream) + + +def fuzz(input_corpus, output_corpus, target_binary): + """Run afl-fuzz on target.""" + prepare_fuzz_environment(input_corpus) + + run_afl_fuzz(input_corpus, output_corpus, target_binary) diff --git a/fuzzers/afl_no_trim/fuzzer.py b/fuzzers/afl_no_trim/fuzzer.py new file mode 100755 index 000000000..7430d9316 --- /dev/null +++ b/fuzzers/afl_no_trim/fuzzer.py @@ -0,0 +1,140 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Integration code for AFL fuzzer.""" + +import json +import os +import shutil +import subprocess + +from fuzzers import utils + + +def prepare_build_environment(): + """Set environment variables used to build targets for AFL-based + fuzzers.""" + cflags = ['-fsanitize-coverage=trace-pc-guard'] + utils.append_flags('CFLAGS', cflags) + utils.append_flags('CXXFLAGS', cflags) + + os.environ['CC'] = 'clang' + os.environ['CXX'] = 'clang++' + os.environ['FUZZER_LIB'] = '/libAFL.a' + + +def build(): + """Build benchmark.""" + prepare_build_environment() + + utils.build_benchmark() + + print('[post_build] Copying afl-fuzz to $OUT directory') + # Copy out the afl-fuzz binary as a build artifact. + shutil.copy('/afl/afl-fuzz', os.environ['OUT']) + + +def get_stats(output_corpus, fuzzer_log): # pylint: disable=unused-argument + """Gets fuzzer stats for AFL.""" + # Get a dictionary containing the stats AFL reports. + stats_file = os.path.join(output_corpus, 'fuzzer_stats') + with open(stats_file, encoding='utf-8') as file_handle: + stats_file_lines = file_handle.read().splitlines() + stats_file_dict = {} + for stats_line in stats_file_lines: + key, value = stats_line.split(': ') + stats_file_dict[key.strip()] = value.strip() + + # Report to FuzzBench the stats it accepts. + stats = {'execs_per_sec': float(stats_file_dict['execs_per_sec'])} + return json.dumps(stats) + + +def prepare_fuzz_environment(input_corpus): + """Prepare to fuzz with AFL or another AFL-based fuzzer.""" + # Tell AFL to not use its terminal UI so we get usable logs. + os.environ['AFL_NO_UI'] = '1' + # Skip AFL's CPU frequency check (fails on Docker). + os.environ['AFL_SKIP_CPUFREQ'] = '1' + # No need to bind affinity to one core, Docker enforces 1 core usage. + os.environ['AFL_NO_AFFINITY'] = '1' + # AFL will abort on startup if the core pattern sends notifications to + # external programs. We don't care about this. + os.environ['AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES'] = '1' + # Don't exit when crashes are found. This can happen when corpus from + # OSS-Fuzz is used. + os.environ['AFL_SKIP_CRASHES'] = '1' + # Shuffle the queue + os.environ['AFL_SHUFFLE_QUEUE'] = '1' + # Disables AFL testcase trimming + os.environ['AFL_DISABLE_TRIM'] = '1' + + # AFL needs at least one non-empty seed to start. + utils.create_seed_file_for_empty_corpus(input_corpus) + + +def check_skip_det_compatible(additional_flags): + """ Checks if additional flags are compatible with '-d' option""" + # AFL refuses to take in '-d' with '-M' or '-S' options for parallel mode. + # (cf. https://github.com/google/AFL/blob/8da80951/afl-fuzz.c#L7477) + if '-M' in additional_flags or '-S' in additional_flags: + return False + return True + + +def run_afl_fuzz(input_corpus, + output_corpus, + target_binary, + additional_flags=None, + hide_output=False): + """Run afl-fuzz.""" + # Spawn the afl fuzzing process. + print('[run_afl_fuzz] Running target with afl-fuzz') + command = [ + './afl-fuzz', + '-i', + input_corpus, + '-o', + output_corpus, + # Use no memory limit as ASAN doesn't play nicely with one. + '-m', + 'none', + '-t', + '1000+', # Use same default 1 sec timeout, but add '+' to skip hangs. + ] + # Use '-d' to skip deterministic mode, as long as it it compatible with + # additional flags. + if not additional_flags or check_skip_det_compatible(additional_flags): + command.append('-d') + if additional_flags: + command.extend(additional_flags) + dictionary_path = utils.get_dictionary_path(target_binary) + if dictionary_path: + command.extend(['-x', dictionary_path]) + command += [ + '--', + target_binary, + # Pass INT_MAX to afl the maximize the number of persistent loops it + # performs. + '2147483647' + ] + print('[run_afl_fuzz] Running command: ' + ' '.join(command)) + output_stream = subprocess.DEVNULL if hide_output else None + subprocess.check_call(command, stdout=output_stream, stderr=output_stream) + + +def fuzz(input_corpus, output_corpus, target_binary): + """Run afl-fuzz on target.""" + prepare_fuzz_environment(input_corpus) + + run_afl_fuzz(input_corpus, output_corpus, target_binary) diff --git a/fuzzers/afl_random_favored/fuzzer.py b/fuzzers/afl_random_favored/fuzzer.py index 853c0890c..7c4c44180 100755 --- a/fuzzers/afl_random_favored/fuzzer.py +++ b/fuzzers/afl_random_favored/fuzzer.py @@ -48,7 +48,7 @@ def get_stats(output_corpus, fuzzer_log): # pylint: disable=unused-argument """Gets fuzzer stats for AFL.""" # Get a dictionary containing the stats AFL reports. stats_file = os.path.join(output_corpus, 'fuzzer_stats') - with open(stats_file) as file_handle: + with open(stats_file, encoding='utf-8') as file_handle: stats_file_lines = file_handle.read().splitlines() stats_file_dict = {} for stats_line in stats_file_lines: diff --git a/fuzzers/afl_scheduling_lifo/fuzzer.py b/fuzzers/afl_scheduling_lifo/fuzzer.py new file mode 100755 index 000000000..328b3b718 --- /dev/null +++ b/fuzzers/afl_scheduling_lifo/fuzzer.py @@ -0,0 +1,140 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Integration code for AFL fuzzer.""" + +import json +import os +import shutil +import subprocess + +from fuzzers import utils + + +def prepare_build_environment(): + """Set environment variables used to build targets for AFL-based + fuzzers.""" + cflags = ['-fsanitize-coverage=trace-pc-guard'] + utils.append_flags('CFLAGS', cflags) + utils.append_flags('CXXFLAGS', cflags) + + os.environ['CC'] = 'clang' + os.environ['CXX'] = 'clang++' + os.environ['FUZZER_LIB'] = '/libAFL.a' + + +def build(): + """Build benchmark.""" + prepare_build_environment() + + utils.build_benchmark() + + print('[post_build] Copying afl-fuzz to $OUT directory') + # Copy out the afl-fuzz binary as a build artifact. + shutil.copy('/afl/afl-fuzz', os.environ['OUT']) + + +def get_stats(output_corpus, fuzzer_log): # pylint: disable=unused-argument + """Gets fuzzer stats for AFL.""" + # Get a dictionary containing the stats AFL reports. + stats_file = os.path.join(output_corpus, 'fuzzer_stats') + with open(stats_file, encoding='utf-8') as file_handle: + stats_file_lines = file_handle.read().splitlines() + stats_file_dict = {} + for stats_line in stats_file_lines: + key, value = stats_line.split(': ') + stats_file_dict[key.strip()] = value.strip() + + # Report to FuzzBench the stats it accepts. + stats = {'execs_per_sec': float(stats_file_dict['execs_per_sec'])} + return json.dumps(stats) + + +def prepare_fuzz_environment(input_corpus): + """Prepare to fuzz with AFL or another AFL-based fuzzer.""" + # Tell AFL to not use its terminal UI so we get usable logs. + os.environ['AFL_NO_UI'] = '1' + # Skip AFL's CPU frequency check (fails on Docker). + os.environ['AFL_SKIP_CPUFREQ'] = '1' + # No need to bind affinity to one core, Docker enforces 1 core usage. + os.environ['AFL_NO_AFFINITY'] = '1' + # AFL will abort on startup if the core pattern sends notifications to + # external programs. We don't care about this. + os.environ['AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES'] = '1' + # Don't exit when crashes are found. This can happen when corpus from + # OSS-Fuzz is used. + os.environ['AFL_SKIP_CRASHES'] = '1' + # Shuffle the queue + os.environ['AFL_SHUFFLE_QUEUE'] = '1' + # Sets corpus scheduling with LIFO policy + os.environ['AFL_LIFO_CORPUS'] = '1' + + # AFL needs at least one non-empty seed to start. + utils.create_seed_file_for_empty_corpus(input_corpus) + + +def check_skip_det_compatible(additional_flags): + """ Checks if additional flags are compatible with '-d' option""" + # AFL refuses to take in '-d' with '-M' or '-S' options for parallel mode. + # (cf. https://github.com/google/AFL/blob/8da80951/afl-fuzz.c#L7477) + if '-M' in additional_flags or '-S' in additional_flags: + return False + return True + + +def run_afl_fuzz(input_corpus, + output_corpus, + target_binary, + additional_flags=None, + hide_output=False): + """Run afl-fuzz.""" + # Spawn the afl fuzzing process. + print('[run_afl_fuzz] Running target with afl-fuzz') + command = [ + './afl-fuzz', + '-i', + input_corpus, + '-o', + output_corpus, + # Use no memory limit as ASAN doesn't play nicely with one. + '-m', + 'none', + '-t', + '1000+', # Use same default 1 sec timeout, but add '+' to skip hangs. + ] + # Use '-d' to skip deterministic mode, as long as it it compatible with + # additional flags. + if not additional_flags or check_skip_det_compatible(additional_flags): + command.append('-d') + if additional_flags: + command.extend(additional_flags) + dictionary_path = utils.get_dictionary_path(target_binary) + if dictionary_path: + command.extend(['-x', dictionary_path]) + command += [ + '--', + target_binary, + # Pass INT_MAX to afl the maximize the number of persistent loops it + # performs. + '2147483647' + ] + print('[run_afl_fuzz] Running command: ' + ' '.join(command)) + output_stream = subprocess.DEVNULL if hide_output else None + subprocess.check_call(command, stdout=output_stream, stderr=output_stream) + + +def fuzz(input_corpus, output_corpus, target_binary): + """Run afl-fuzz on target.""" + prepare_fuzz_environment(input_corpus) + + run_afl_fuzz(input_corpus, output_corpus, target_binary) diff --git a/fuzzers/afl_scheduling_random/fuzzer.py b/fuzzers/afl_scheduling_random/fuzzer.py new file mode 100755 index 000000000..af9ebc313 --- /dev/null +++ b/fuzzers/afl_scheduling_random/fuzzer.py @@ -0,0 +1,140 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Integration code for AFL fuzzer.""" + +import json +import os +import shutil +import subprocess + +from fuzzers import utils + + +def prepare_build_environment(): + """Set environment variables used to build targets for AFL-based + fuzzers.""" + cflags = ['-fsanitize-coverage=trace-pc-guard'] + utils.append_flags('CFLAGS', cflags) + utils.append_flags('CXXFLAGS', cflags) + + os.environ['CC'] = 'clang' + os.environ['CXX'] = 'clang++' + os.environ['FUZZER_LIB'] = '/libAFL.a' + + +def build(): + """Build benchmark.""" + prepare_build_environment() + + utils.build_benchmark() + + print('[post_build] Copying afl-fuzz to $OUT directory') + # Copy out the afl-fuzz binary as a build artifact. + shutil.copy('/afl/afl-fuzz', os.environ['OUT']) + + +def get_stats(output_corpus, fuzzer_log): # pylint: disable=unused-argument + """Gets fuzzer stats for AFL.""" + # Get a dictionary containing the stats AFL reports. + stats_file = os.path.join(output_corpus, 'fuzzer_stats') + with open(stats_file, encoding='utf-8') as file_handle: + stats_file_lines = file_handle.read().splitlines() + stats_file_dict = {} + for stats_line in stats_file_lines: + key, value = stats_line.split(': ') + stats_file_dict[key.strip()] = value.strip() + + # Report to FuzzBench the stats it accepts. + stats = {'execs_per_sec': float(stats_file_dict['execs_per_sec'])} + return json.dumps(stats) + + +def prepare_fuzz_environment(input_corpus): + """Prepare to fuzz with AFL or another AFL-based fuzzer.""" + # Tell AFL to not use its terminal UI so we get usable logs. + os.environ['AFL_NO_UI'] = '1' + # Skip AFL's CPU frequency check (fails on Docker). + os.environ['AFL_SKIP_CPUFREQ'] = '1' + # No need to bind affinity to one core, Docker enforces 1 core usage. + os.environ['AFL_NO_AFFINITY'] = '1' + # AFL will abort on startup if the core pattern sends notifications to + # external programs. We don't care about this. + os.environ['AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES'] = '1' + # Don't exit when crashes are found. This can happen when corpus from + # OSS-Fuzz is used. + os.environ['AFL_SKIP_CRASHES'] = '1' + # Shuffle the queue + os.environ['AFL_SHUFFLE_QUEUE'] = '1' + # Sets AFL to use a random selection for the corpus scheduling + os.environ['AFL_RANDOMIC_CORPUS'] = '1' + + # AFL needs at least one non-empty seed to start. + utils.create_seed_file_for_empty_corpus(input_corpus) + + +def check_skip_det_compatible(additional_flags): + """ Checks if additional flags are compatible with '-d' option""" + # AFL refuses to take in '-d' with '-M' or '-S' options for parallel mode. + # (cf. https://github.com/google/AFL/blob/8da80951/afl-fuzz.c#L7477) + if '-M' in additional_flags or '-S' in additional_flags: + return False + return True + + +def run_afl_fuzz(input_corpus, + output_corpus, + target_binary, + additional_flags=None, + hide_output=False): + """Run afl-fuzz.""" + # Spawn the afl fuzzing process. + print('[run_afl_fuzz] Running target with afl-fuzz') + command = [ + './afl-fuzz', + '-i', + input_corpus, + '-o', + output_corpus, + # Use no memory limit as ASAN doesn't play nicely with one. + '-m', + 'none', + '-t', + '1000+', # Use same default 1 sec timeout, but add '+' to skip hangs. + ] + # Use '-d' to skip deterministic mode, as long as it it compatible with + # additional flags. + if not additional_flags or check_skip_det_compatible(additional_flags): + command.append('-d') + if additional_flags: + command.extend(additional_flags) + dictionary_path = utils.get_dictionary_path(target_binary) + if dictionary_path: + command.extend(['-x', dictionary_path]) + command += [ + '--', + target_binary, + # Pass INT_MAX to afl the maximize the number of persistent loops it + # performs. + '2147483647' + ] + print('[run_afl_fuzz] Running command: ' + ' '.join(command)) + output_stream = subprocess.DEVNULL if hide_output else None + subprocess.check_call(command, stdout=output_stream, stderr=output_stream) + + +def fuzz(input_corpus, output_corpus, target_binary): + """Run afl-fuzz on target.""" + prepare_fuzz_environment(input_corpus) + + run_afl_fuzz(input_corpus, output_corpus, target_binary) diff --git a/fuzzers/afl_score_max/fuzzer.py b/fuzzers/afl_score_max/fuzzer.py new file mode 100755 index 000000000..58b8860f4 --- /dev/null +++ b/fuzzers/afl_score_max/fuzzer.py @@ -0,0 +1,140 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Integration code for AFL fuzzer.""" + +import json +import os +import shutil +import subprocess + +from fuzzers import utils + + +def prepare_build_environment(): + """Set environment variables used to build targets for AFL-based + fuzzers.""" + cflags = ['-fsanitize-coverage=trace-pc-guard'] + utils.append_flags('CFLAGS', cflags) + utils.append_flags('CXXFLAGS', cflags) + + os.environ['CC'] = 'clang' + os.environ['CXX'] = 'clang++' + os.environ['FUZZER_LIB'] = '/libAFL.a' + + +def build(): + """Build benchmark.""" + prepare_build_environment() + + utils.build_benchmark() + + print('[post_build] Copying afl-fuzz to $OUT directory') + # Copy out the afl-fuzz binary as a build artifact. + shutil.copy('/afl/afl-fuzz', os.environ['OUT']) + + +def get_stats(output_corpus, fuzzer_log): # pylint: disable=unused-argument + """Gets fuzzer stats for AFL.""" + # Get a dictionary containing the stats AFL reports. + stats_file = os.path.join(output_corpus, 'fuzzer_stats') + with open(stats_file, encoding='utf-8') as file_handle: + stats_file_lines = file_handle.read().splitlines() + stats_file_dict = {} + for stats_line in stats_file_lines: + key, value = stats_line.split(': ') + stats_file_dict[key.strip()] = value.strip() + + # Report to FuzzBench the stats it accepts. + stats = {'execs_per_sec': float(stats_file_dict['execs_per_sec'])} + return json.dumps(stats) + + +def prepare_fuzz_environment(input_corpus): + """Prepare to fuzz with AFL or another AFL-based fuzzer.""" + # Tell AFL to not use its terminal UI so we get usable logs. + os.environ['AFL_NO_UI'] = '1' + # Skip AFL's CPU frequency check (fails on Docker). + os.environ['AFL_SKIP_CPUFREQ'] = '1' + # No need to bind affinity to one core, Docker enforces 1 core usage. + os.environ['AFL_NO_AFFINITY'] = '1' + # AFL will abort on startup if the core pattern sends notifications to + # external programs. We don't care about this. + os.environ['AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES'] = '1' + # Don't exit when crashes are found. This can happen when corpus from + # OSS-Fuzz is used. + os.environ['AFL_SKIP_CRASHES'] = '1' + # Shuffle the queue + os.environ['AFL_SHUFFLE_QUEUE'] = '1' + # Maximize the score + os.environ['AFL_MAX_ENERGY'] = '1' + + # AFL needs at least one non-empty seed to start. + utils.create_seed_file_for_empty_corpus(input_corpus) + + +def check_skip_det_compatible(additional_flags): + """ Checks if additional flags are compatible with '-d' option""" + # AFL refuses to take in '-d' with '-M' or '-S' options for parallel mode. + # (cf. https://github.com/google/AFL/blob/8da80951/afl-fuzz.c#L7477) + if '-M' in additional_flags or '-S' in additional_flags: + return False + return True + + +def run_afl_fuzz(input_corpus, + output_corpus, + target_binary, + additional_flags=None, + hide_output=False): + """Run afl-fuzz.""" + # Spawn the afl fuzzing process. + print('[run_afl_fuzz] Running target with afl-fuzz') + command = [ + './afl-fuzz', + '-i', + input_corpus, + '-o', + output_corpus, + # Use no memory limit as ASAN doesn't play nicely with one. + '-m', + 'none', + '-t', + '1000+', # Use same default 1 sec timeout, but add '+' to skip hangs. + ] + # Use '-d' to skip deterministic mode, as long as it it compatible with + # additional flags. + if not additional_flags or check_skip_det_compatible(additional_flags): + command.append('-d') + if additional_flags: + command.extend(additional_flags) + dictionary_path = utils.get_dictionary_path(target_binary) + if dictionary_path: + command.extend(['-x', dictionary_path]) + command += [ + '--', + target_binary, + # Pass INT_MAX to afl the maximize the number of persistent loops it + # performs. + '2147483647' + ] + print('[run_afl_fuzz] Running command: ' + ' '.join(command)) + output_stream = subprocess.DEVNULL if hide_output else None + subprocess.check_call(command, stdout=output_stream, stderr=output_stream) + + +def fuzz(input_corpus, output_corpus, target_binary): + """Run afl-fuzz on target.""" + prepare_fuzz_environment(input_corpus) + + run_afl_fuzz(input_corpus, output_corpus, target_binary) diff --git a/fuzzers/afl_score_min/fuzzer.py b/fuzzers/afl_score_min/fuzzer.py new file mode 100755 index 000000000..e0ec33354 --- /dev/null +++ b/fuzzers/afl_score_min/fuzzer.py @@ -0,0 +1,140 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Integration code for AFL fuzzer.""" + +import json +import os +import shutil +import subprocess + +from fuzzers import utils + + +def prepare_build_environment(): + """Set environment variables used to build targets for AFL-based + fuzzers.""" + cflags = ['-fsanitize-coverage=trace-pc-guard'] + utils.append_flags('CFLAGS', cflags) + utils.append_flags('CXXFLAGS', cflags) + + os.environ['CC'] = 'clang' + os.environ['CXX'] = 'clang++' + os.environ['FUZZER_LIB'] = '/libAFL.a' + + +def build(): + """Build benchmark.""" + prepare_build_environment() + + utils.build_benchmark() + + print('[post_build] Copying afl-fuzz to $OUT directory') + # Copy out the afl-fuzz binary as a build artifact. + shutil.copy('/afl/afl-fuzz', os.environ['OUT']) + + +def get_stats(output_corpus, fuzzer_log): # pylint: disable=unused-argument + """Gets fuzzer stats for AFL.""" + # Get a dictionary containing the stats AFL reports. + stats_file = os.path.join(output_corpus, 'fuzzer_stats') + with open(stats_file, encoding='utf-8') as file_handle: + stats_file_lines = file_handle.read().splitlines() + stats_file_dict = {} + for stats_line in stats_file_lines: + key, value = stats_line.split(': ') + stats_file_dict[key.strip()] = value.strip() + + # Report to FuzzBench the stats it accepts. + stats = {'execs_per_sec': float(stats_file_dict['execs_per_sec'])} + return json.dumps(stats) + + +def prepare_fuzz_environment(input_corpus): + """Prepare to fuzz with AFL or another AFL-based fuzzer.""" + # Tell AFL to not use its terminal UI so we get usable logs. + os.environ['AFL_NO_UI'] = '1' + # Skip AFL's CPU frequency check (fails on Docker). + os.environ['AFL_SKIP_CPUFREQ'] = '1' + # No need to bind affinity to one core, Docker enforces 1 core usage. + os.environ['AFL_NO_AFFINITY'] = '1' + # AFL will abort on startup if the core pattern sends notifications to + # external programs. We don't care about this. + os.environ['AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES'] = '1' + # Don't exit when crashes are found. This can happen when corpus from + # OSS-Fuzz is used. + os.environ['AFL_SKIP_CRASHES'] = '1' + # Shuffle the queue + os.environ['AFL_SHUFFLE_QUEUE'] = '1' + # Sets AFL minimum score + os.environ['AFL_MIN_ENERGY'] = '1' + + # AFL needs at least one non-empty seed to start. + utils.create_seed_file_for_empty_corpus(input_corpus) + + +def check_skip_det_compatible(additional_flags): + """ Checks if additional flags are compatible with '-d' option""" + # AFL refuses to take in '-d' with '-M' or '-S' options for parallel mode. + # (cf. https://github.com/google/AFL/blob/8da80951/afl-fuzz.c#L7477) + if '-M' in additional_flags or '-S' in additional_flags: + return False + return True + + +def run_afl_fuzz(input_corpus, + output_corpus, + target_binary, + additional_flags=None, + hide_output=False): + """Run afl-fuzz.""" + # Spawn the afl fuzzing process. + print('[run_afl_fuzz] Running target with afl-fuzz') + command = [ + './afl-fuzz', + '-i', + input_corpus, + '-o', + output_corpus, + # Use no memory limit as ASAN doesn't play nicely with one. + '-m', + 'none', + '-t', + '1000+', # Use same default 1 sec timeout, but add '+' to skip hangs. + ] + # Use '-d' to skip deterministic mode, as long as it it compatible with + # additional flags. + if not additional_flags or check_skip_det_compatible(additional_flags): + command.append('-d') + if additional_flags: + command.extend(additional_flags) + dictionary_path = utils.get_dictionary_path(target_binary) + if dictionary_path: + command.extend(['-x', dictionary_path]) + command += [ + '--', + target_binary, + # Pass INT_MAX to afl the maximize the number of persistent loops it + # performs. + '2147483647' + ] + print('[run_afl_fuzz] Running command: ' + ' '.join(command)) + output_stream = subprocess.DEVNULL if hide_output else None + subprocess.check_call(command, stdout=output_stream, stderr=output_stream) + + +def fuzz(input_corpus, output_corpus, target_binary): + """Run afl-fuzz on target.""" + prepare_fuzz_environment(input_corpus) + + run_afl_fuzz(input_corpus, output_corpus, target_binary) diff --git a/fuzzers/afl_score_no_novel_prioritization/fuzzer.py b/fuzzers/afl_score_no_novel_prioritization/fuzzer.py new file mode 100755 index 000000000..ab296b70a --- /dev/null +++ b/fuzzers/afl_score_no_novel_prioritization/fuzzer.py @@ -0,0 +1,140 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Integration code for AFL fuzzer.""" + +import json +import os +import shutil +import subprocess + +from fuzzers import utils + + +def prepare_build_environment(): + """Set environment variables used to build targets for AFL-based + fuzzers.""" + cflags = ['-fsanitize-coverage=trace-pc-guard'] + utils.append_flags('CFLAGS', cflags) + utils.append_flags('CXXFLAGS', cflags) + + os.environ['CC'] = 'clang' + os.environ['CXX'] = 'clang++' + os.environ['FUZZER_LIB'] = '/libAFL.a' + + +def build(): + """Build benchmark.""" + prepare_build_environment() + + utils.build_benchmark() + + print('[post_build] Copying afl-fuzz to $OUT directory') + # Copy out the afl-fuzz binary as a build artifact. + shutil.copy('/afl/afl-fuzz', os.environ['OUT']) + + +def get_stats(output_corpus, fuzzer_log): # pylint: disable=unused-argument + """Gets fuzzer stats for AFL.""" + # Get a dictionary containing the stats AFL reports. + stats_file = os.path.join(output_corpus, 'fuzzer_stats') + with open(stats_file, encoding='utf-8') as file_handle: + stats_file_lines = file_handle.read().splitlines() + stats_file_dict = {} + for stats_line in stats_file_lines: + key, value = stats_line.split(': ') + stats_file_dict[key.strip()] = value.strip() + + # Report to FuzzBench the stats it accepts. + stats = {'execs_per_sec': float(stats_file_dict['execs_per_sec'])} + return json.dumps(stats) + + +def prepare_fuzz_environment(input_corpus): + """Prepare to fuzz with AFL or another AFL-based fuzzer.""" + # Tell AFL to not use its terminal UI so we get usable logs. + os.environ['AFL_NO_UI'] = '1' + # Skip AFL's CPU frequency check (fails on Docker). + os.environ['AFL_SKIP_CPUFREQ'] = '1' + # No need to bind affinity to one core, Docker enforces 1 core usage. + os.environ['AFL_NO_AFFINITY'] = '1' + # AFL will abort on startup if the core pattern sends notifications to + # external programs. We don't care about this. + os.environ['AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES'] = '1' + # Don't exit when crashes are found. This can happen when corpus from + # OSS-Fuzz is used. + os.environ['AFL_SKIP_CRASHES'] = '1' + # Shuffle the queue + os.environ['AFL_SHUFFLE_QUEUE'] = '1' + # Disable handicap + os.environ['AFL_DISABLE_HANDICAP'] = '1' + + # AFL needs at least one non-empty seed to start. + utils.create_seed_file_for_empty_corpus(input_corpus) + + +def check_skip_det_compatible(additional_flags): + """ Checks if additional flags are compatible with '-d' option""" + # AFL refuses to take in '-d' with '-M' or '-S' options for parallel mode. + # (cf. https://github.com/google/AFL/blob/8da80951/afl-fuzz.c#L7477) + if '-M' in additional_flags or '-S' in additional_flags: + return False + return True + + +def run_afl_fuzz(input_corpus, + output_corpus, + target_binary, + additional_flags=None, + hide_output=False): + """Run afl-fuzz.""" + # Spawn the afl fuzzing process. + print('[run_afl_fuzz] Running target with afl-fuzz') + command = [ + './afl-fuzz', + '-i', + input_corpus, + '-o', + output_corpus, + # Use no memory limit as ASAN doesn't play nicely with one. + '-m', + 'none', + '-t', + '1000+', # Use same default 1 sec timeout, but add '+' to skip hangs. + ] + # Use '-d' to skip deterministic mode, as long as it it compatible with + # additional flags. + if not additional_flags or check_skip_det_compatible(additional_flags): + command.append('-d') + if additional_flags: + command.extend(additional_flags) + dictionary_path = utils.get_dictionary_path(target_binary) + if dictionary_path: + command.extend(['-x', dictionary_path]) + command += [ + '--', + target_binary, + # Pass INT_MAX to afl the maximize the number of persistent loops it + # performs. + '2147483647' + ] + print('[run_afl_fuzz] Running command: ' + ' '.join(command)) + output_stream = subprocess.DEVNULL if hide_output else None + subprocess.check_call(command, stdout=output_stream, stderr=output_stream) + + +def fuzz(input_corpus, output_corpus, target_binary): + """Run afl-fuzz on target.""" + prepare_fuzz_environment(input_corpus) + + run_afl_fuzz(input_corpus, output_corpus, target_binary) diff --git a/fuzzers/afl_score_random/fuzzer.py b/fuzzers/afl_score_random/fuzzer.py new file mode 100755 index 000000000..d951d749f --- /dev/null +++ b/fuzzers/afl_score_random/fuzzer.py @@ -0,0 +1,140 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Integration code for AFL fuzzer.""" + +import json +import os +import shutil +import subprocess + +from fuzzers import utils + + +def prepare_build_environment(): + """Set environment variables used to build targets for AFL-based + fuzzers.""" + cflags = ['-fsanitize-coverage=trace-pc-guard'] + utils.append_flags('CFLAGS', cflags) + utils.append_flags('CXXFLAGS', cflags) + + os.environ['CC'] = 'clang' + os.environ['CXX'] = 'clang++' + os.environ['FUZZER_LIB'] = '/libAFL.a' + + +def build(): + """Build benchmark.""" + prepare_build_environment() + + utils.build_benchmark() + + print('[post_build] Copying afl-fuzz to $OUT directory') + # Copy out the afl-fuzz binary as a build artifact. + shutil.copy('/afl/afl-fuzz', os.environ['OUT']) + + +def get_stats(output_corpus, fuzzer_log): # pylint: disable=unused-argument + """Gets fuzzer stats for AFL.""" + # Get a dictionary containing the stats AFL reports. + stats_file = os.path.join(output_corpus, 'fuzzer_stats') + with open(stats_file, encoding='utf-8') as file_handle: + stats_file_lines = file_handle.read().splitlines() + stats_file_dict = {} + for stats_line in stats_file_lines: + key, value = stats_line.split(': ') + stats_file_dict[key.strip()] = value.strip() + + # Report to FuzzBench the stats it accepts. + stats = {'execs_per_sec': float(stats_file_dict['execs_per_sec'])} + return json.dumps(stats) + + +def prepare_fuzz_environment(input_corpus): + """Prepare to fuzz with AFL or another AFL-based fuzzer.""" + # Tell AFL to not use its terminal UI so we get usable logs. + os.environ['AFL_NO_UI'] = '1' + # Skip AFL's CPU frequency check (fails on Docker). + os.environ['AFL_SKIP_CPUFREQ'] = '1' + # No need to bind affinity to one core, Docker enforces 1 core usage. + os.environ['AFL_NO_AFFINITY'] = '1' + # AFL will abort on startup if the core pattern sends notifications to + # external programs. We don't care about this. + os.environ['AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES'] = '1' + # Don't exit when crashes are found. This can happen when corpus from + # OSS-Fuzz is used. + os.environ['AFL_SKIP_CRASHES'] = '1' + # Shuffle the queue + os.environ['AFL_SHUFFLE_QUEUE'] = '1' + # Sets AFL score to random constant + os.environ['AFL_RANDOM_ENERGY'] = '1' + + # AFL needs at least one non-empty seed to start. + utils.create_seed_file_for_empty_corpus(input_corpus) + + +def check_skip_det_compatible(additional_flags): + """ Checks if additional flags are compatible with '-d' option""" + # AFL refuses to take in '-d' with '-M' or '-S' options for parallel mode. + # (cf. https://github.com/google/AFL/blob/8da80951/afl-fuzz.c#L7477) + if '-M' in additional_flags or '-S' in additional_flags: + return False + return True + + +def run_afl_fuzz(input_corpus, + output_corpus, + target_binary, + additional_flags=None, + hide_output=False): + """Run afl-fuzz.""" + # Spawn the afl fuzzing process. + print('[run_afl_fuzz] Running target with afl-fuzz') + command = [ + './afl-fuzz', + '-i', + input_corpus, + '-o', + output_corpus, + # Use no memory limit as ASAN doesn't play nicely with one. + '-m', + 'none', + '-t', + '1000+', # Use same default 1 sec timeout, but add '+' to skip hangs. + ] + # Use '-d' to skip deterministic mode, as long as it it compatible with + # additional flags. + if not additional_flags or check_skip_det_compatible(additional_flags): + command.append('-d') + if additional_flags: + command.extend(additional_flags) + dictionary_path = utils.get_dictionary_path(target_binary) + if dictionary_path: + command.extend(['-x', dictionary_path]) + command += [ + '--', + target_binary, + # Pass INT_MAX to afl the maximize the number of persistent loops it + # performs. + '2147483647' + ] + print('[run_afl_fuzz] Running command: ' + ' '.join(command)) + output_stream = subprocess.DEVNULL if hide_output else None + subprocess.check_call(command, stdout=output_stream, stderr=output_stream) + + +def fuzz(input_corpus, output_corpus, target_binary): + """Run afl-fuzz on target.""" + prepare_fuzz_environment(input_corpus) + + run_afl_fuzz(input_corpus, output_corpus, target_binary) diff --git a/fuzzers/afl_splicing_mutation/fuzzer.py b/fuzzers/afl_splicing_mutation/fuzzer.py new file mode 100755 index 000000000..7c4c44180 --- /dev/null +++ b/fuzzers/afl_splicing_mutation/fuzzer.py @@ -0,0 +1,138 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Integration code for AFL fuzzer.""" + +import json +import os +import shutil +import subprocess + +from fuzzers import utils + + +def prepare_build_environment(): + """Set environment variables used to build targets for AFL-based + fuzzers.""" + cflags = ['-fsanitize-coverage=trace-pc-guard'] + utils.append_flags('CFLAGS', cflags) + utils.append_flags('CXXFLAGS', cflags) + + os.environ['CC'] = 'clang' + os.environ['CXX'] = 'clang++' + os.environ['FUZZER_LIB'] = '/libAFL.a' + + +def build(): + """Build benchmark.""" + prepare_build_environment() + + utils.build_benchmark() + + print('[post_build] Copying afl-fuzz to $OUT directory') + # Copy out the afl-fuzz binary as a build artifact. + shutil.copy('/afl/afl-fuzz', os.environ['OUT']) + + +def get_stats(output_corpus, fuzzer_log): # pylint: disable=unused-argument + """Gets fuzzer stats for AFL.""" + # Get a dictionary containing the stats AFL reports. + stats_file = os.path.join(output_corpus, 'fuzzer_stats') + with open(stats_file, encoding='utf-8') as file_handle: + stats_file_lines = file_handle.read().splitlines() + stats_file_dict = {} + for stats_line in stats_file_lines: + key, value = stats_line.split(': ') + stats_file_dict[key.strip()] = value.strip() + + # Report to FuzzBench the stats it accepts. + stats = {'execs_per_sec': float(stats_file_dict['execs_per_sec'])} + return json.dumps(stats) + + +def prepare_fuzz_environment(input_corpus): + """Prepare to fuzz with AFL or another AFL-based fuzzer.""" + # Tell AFL to not use its terminal UI so we get usable logs. + os.environ['AFL_NO_UI'] = '1' + # Skip AFL's CPU frequency check (fails on Docker). + os.environ['AFL_SKIP_CPUFREQ'] = '1' + # No need to bind affinity to one core, Docker enforces 1 core usage. + os.environ['AFL_NO_AFFINITY'] = '1' + # AFL will abort on startup if the core pattern sends notifications to + # external programs. We don't care about this. + os.environ['AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES'] = '1' + # Don't exit when crashes are found. This can happen when corpus from + # OSS-Fuzz is used. + os.environ['AFL_SKIP_CRASHES'] = '1' + # Shuffle the queue + os.environ['AFL_SHUFFLE_QUEUE'] = '1' + + # AFL needs at least one non-empty seed to start. + utils.create_seed_file_for_empty_corpus(input_corpus) + + +def check_skip_det_compatible(additional_flags): + """ Checks if additional flags are compatible with '-d' option""" + # AFL refuses to take in '-d' with '-M' or '-S' options for parallel mode. + # (cf. https://github.com/google/AFL/blob/8da80951/afl-fuzz.c#L7477) + if '-M' in additional_flags or '-S' in additional_flags: + return False + return True + + +def run_afl_fuzz(input_corpus, + output_corpus, + target_binary, + additional_flags=None, + hide_output=False): + """Run afl-fuzz.""" + # Spawn the afl fuzzing process. + print('[run_afl_fuzz] Running target with afl-fuzz') + command = [ + './afl-fuzz', + '-i', + input_corpus, + '-o', + output_corpus, + # Use no memory limit as ASAN doesn't play nicely with one. + '-m', + 'none', + '-t', + '1000+', # Use same default 1 sec timeout, but add '+' to skip hangs. + ] + # Use '-d' to skip deterministic mode, as long as it it compatible with + # additional flags. + if not additional_flags or check_skip_det_compatible(additional_flags): + command.append('-d') + if additional_flags: + command.extend(additional_flags) + dictionary_path = utils.get_dictionary_path(target_binary) + if dictionary_path: + command.extend(['-x', dictionary_path]) + command += [ + '--', + target_binary, + # Pass INT_MAX to afl the maximize the number of persistent loops it + # performs. + '2147483647' + ] + print('[run_afl_fuzz] Running command: ' + ' '.join(command)) + output_stream = subprocess.DEVNULL if hide_output else None + subprocess.check_call(command, stdout=output_stream, stderr=output_stream) + + +def fuzz(input_corpus, output_corpus, target_binary): + """Run afl-fuzz on target.""" + prepare_fuzz_environment(input_corpus) + + run_afl_fuzz(input_corpus, output_corpus, target_binary) diff --git a/fuzzers/afl_virginmap/fuzzer.py b/fuzzers/afl_virginmap/fuzzer.py index 853c0890c..7c4c44180 100755 --- a/fuzzers/afl_virginmap/fuzzer.py +++ b/fuzzers/afl_virginmap/fuzzer.py @@ -48,7 +48,7 @@ def get_stats(output_corpus, fuzzer_log): # pylint: disable=unused-argument """Gets fuzzer stats for AFL.""" # Get a dictionary containing the stats AFL reports. stats_file = os.path.join(output_corpus, 'fuzzer_stats') - with open(stats_file) as file_handle: + with open(stats_file, encoding='utf-8') as file_handle: stats_file_lines = file_handle.read().splitlines() stats_file_dict = {} for stats_line in stats_file_lines: diff --git a/fuzzers/aflcc/fuzzer.py b/fuzzers/aflcc/fuzzer.py index 16852f096..b402d5ff1 100644 --- a/fuzzers/aflcc/fuzzer.py +++ b/fuzzers/aflcc/fuzzer.py @@ -154,11 +154,11 @@ def prepare_build_environment(): def post_build(fuzz_target): """Perform the post-processing for a target""" - print('Fuzz-target: {target}'.format(target=fuzz_target)) + print(f'Fuzz-target: {fuzz_target}') - getbc_cmd = "/afl/aflc-get-bc {target}".format(target=fuzz_target) + getbc_cmd = f'/afl/aflc-get-bc {fuzz_target}' if os.system(getbc_cmd) != 0: - raise ValueError("get-bc failed") + raise ValueError('get-bc failed') # Set the flags. ldflags is here temporarily until the benchmarks # are cleaned up and standalone. @@ -192,7 +192,7 @@ def post_build(fuzz_target): target=fuzz_target, ldflags=ldflags) if os.system(bin1_cmd) != 0: - raise ValueError('command "{command}" failed'.format(command=bin1_cmd)) + raise ValueError(f'command "{bin1_cmd}" failed') # The normalized build with non-optimized dictionary. print('[post_build] Generating normalized-none-nopt') @@ -206,7 +206,7 @@ def post_build(fuzz_target): target=fuzz_target, ldflags=ldflags) if os.system(bin2_cmd) != 0: - raise ValueError('command "{command}" failed'.format(command=bin2_cmd)) + raise ValueError(f'command "{bin2_cmd}" failed') # The no-collision split-condition optimized dictionary. print('[post_build] Generating no-collision-all-opt build') @@ -220,7 +220,7 @@ def post_build(fuzz_target): target=fuzz_target, ldflags=ldflags) if os.system(bin3_cmd) != 0: - raise ValueError('command "{command}" failed'.format(command=bin3_cmd)) + raise ValueError(f'command "{bin3_cmd}" failed') print('[post_build] Copying afl-fuzz to $OUT directory') # Copy out the afl-fuzz binary as a build artifact. @@ -285,30 +285,29 @@ def fuzz(input_corpus, output_corpus, target_binary): # Use a dictionary for original afl as well. print('[fuzz] Running AFL for original binary') - src_file = '{target}-normalized-none-nopt.dict'.format(target=target_binary) - dst_file = '{target}-original.dict'.format(target=target_binary) + src_file = f'{target_binary}-normalized-none-nopt.dict' + dst_file = f'{target_binary}-original.dict' shutil.copy(src_file, dst_file) # Instead of generating a new dict, just hack this one # to be non-optimized to prevent AFL from aborting. - os.system('sed -i \'s/OPTIMIZED/NORMAL/g\' {dict}'.format(dict=dst_file)) - afl_fuzz_thread1 = threading.Thread( - target=run_fuzzer, - args=(input_corpus, output_corpus, - '{target}-original'.format(target=target_binary), - ['-S', 'secondary-original'])) + os.system(f'sed -i \'s/OPTIMIZED/NORMAL/g\' {dst_file}') + afl_fuzz_thread1 = threading.Thread(target=run_fuzzer, + args=(input_corpus, output_corpus, + f'{target_binary}-original', + ['-S', 'secondary-original'])) afl_fuzz_thread1.start() print('[run_fuzzer] Running AFL for normalized and optimized dictionary') afl_fuzz_thread2 = threading.Thread( target=run_fuzzer, args=(input_corpus, output_corpus, - '{target}-normalized-none-nopt'.format(target=target_binary), + f'{target_binary}-normalized-none-nopt', ['-S', 'secondary-normalized-nopt'])) afl_fuzz_thread2.start() print('[run_fuzzer] Running AFL for FBSP and optimized dictionary') run_fuzzer(input_corpus, output_corpus, - '{target}-no-collision-all-opt'.format(target=target_binary), + f'{target_binary}-no-collision-all-opt', ['-S', 'secondary-no-collision-all-opt'], hide_output=False) diff --git a/fuzzers/aflfast/builder.Dockerfile b/fuzzers/aflfast/builder.Dockerfile index f4761372e..b38039810 100644 --- a/fuzzers/aflfast/builder.Dockerfile +++ b/fuzzers/aflfast/builder.Dockerfile @@ -19,7 +19,7 @@ FROM $parent_image # Set AFL_NO_X86 to skip flaky tests. RUN git clone https://github.com/mboehme/aflfast.git /afl && \ cd /afl && \ - git checkout 11ec1828448d27bdcc54fdeb91bf3215d4d8c583 && \ + git checkout d1d54caf9850ca4afe2ac634a2a212aa6bb40032 && \ AFL_NO_X86=1 make # Use afl_driver.cpp from LLVM as our fuzzing library. diff --git a/fuzzers/aflplusplus/builder.Dockerfile b/fuzzers/aflplusplus/builder.Dockerfile index 70c1d9427..ee5c4389e 100644 --- a/fuzzers/aflplusplus/builder.Dockerfile +++ b/fuzzers/aflplusplus/builder.Dockerfile @@ -15,21 +15,36 @@ ARG parent_image FROM $parent_image -# Install libstdc++ to use llvm_mode. RUN apt-get update && \ - apt-get install -y wget libstdc++-5-dev libtool-bin automake flex bison \ - libglib2.0-dev libpixman-1-dev python3-setuptools unzip \ - apt-utils apt-transport-https ca-certificates + apt-get install -y \ + build-essential \ + python3-dev \ + python3-setuptools \ + automake \ + cmake \ + git \ + flex \ + bison \ + libglib2.0-dev \ + libpixman-1-dev \ + cargo \ + libgtk-3-dev \ + # for QEMU mode + ninja-build \ + gcc-$(gcc --version|head -n1|sed 's/\..*//'|sed 's/.* //')-plugin-dev \ + libstdc++-$(gcc --version|head -n1|sed 's/\..*//'|sed 's/.* //')-dev -# Download and compile afl++. -RUN git clone https://github.com/AFLplusplus/AFLplusplus.git /afl && \ - cd /afl && \ - git checkout 4124a272d821629adce648fb37ca1e7f0ce0e84f +# Download afl++. +RUN git clone \ + --depth 1 \ + --branch 4.04c \ + https://github.com/AFLplusplus/AFLplusplus /afl # Build without Python support as we don't need it. # Set AFL_NO_X86 to skip flaky tests. -RUN cd /afl && unset CFLAGS && unset CXXFLAGS && \ - export CC=clang && export AFL_NO_X86=1 && \ - PYTHON_INCLUDE=/ make && make install && \ - make -C utils/aflpp_driver && \ +RUN cd /afl && \ + unset CFLAGS CXXFLAGS && \ + export CC=clang AFL_NO_X86=1 && \ + PYTHON_INCLUDE=/ make distrib && \ + make install && \ cp utils/aflpp_driver/libAFLDriver.a / diff --git a/fuzzers/aflplusplus/fuzzer.py b/fuzzers/aflplusplus/fuzzer.py index b77b86ad8..55bc4b4c0 100755 --- a/fuzzers/aflplusplus/fuzzer.py +++ b/fuzzers/aflplusplus/fuzzer.py @@ -36,6 +36,10 @@ def build(*args): # pylint: disable=too-many-branches,too-many-statements # BUILD_MODES is not already supported by fuzzbench, meanwhile we provide # a default configuration. + # Add required libs for libpcap_fuzz_both. + os.environ['EXTRA_LIBS'] = ('/usr/lib/x86_64-linux-gnu/libdbus-1.a ' + '/lib/x86_64-linux-gnu/libsystemd.so.0') + build_modes = list(args) if 'BUILD_MODES' in os.environ: build_modes = os.environ['BUILD_MODES'].split(',') @@ -159,8 +163,8 @@ def build(*args): # pylint: disable=too-many-branches,too-many-statements else: os.environ['FUZZER_LIB'] = '/libAFLDriver.a' - # Some benchmarks like lcms - # (see: https://github.com/mm2/Little-CMS/commit/ab1093539b4287c233aca6a3cf53b234faceb792#diff-f0e6d05e72548974e852e8e55dffc4ccR212) + # Some benchmarks like lcms. (see: + # https://github.com/mm2/Little-CMS/commit/ab1093539b4287c233aca6a3cf53b234faceb792#diff-f0e6d05e72548974e852e8e55dffc4ccR212) # fail to compile if the compiler outputs things to stderr in unexpected # cases. Prevent these failures by using AFL_QUIET to stop afl-clang-fast # from writing AFL specific messages to stderr. @@ -206,12 +210,12 @@ def build(*args): # pylint: disable=too-many-branches,too-many-statements new_env['CC'] = '/symcc/build/symcc' new_env['CXX'] = '/symcc/build/sym++' new_env['SYMCC_OUTPUT_DIR'] = '/tmp' - new_env['CXXFLAGS'] = new_env['CXXFLAGS'].replace("-stlib=libc++", "") + new_env['CXXFLAGS'] = new_env['CXXFLAGS'].replace('-stlib=libc++', '') new_env['FUZZER_LIB'] = '/libfuzzer-harness.o' new_env['OUT'] = symcc_build_directory - new_env['SYMCC_LIBCXX_PATH'] = "/libcxx_native_build" - new_env['SYMCC_NO_SYMBOLIC_INPUT'] = "1" - new_env['SYMCC_SILENT'] = "1" + new_env['SYMCC_LIBCXX_PATH'] = '/libcxx_native_build' + new_env['SYMCC_NO_SYMBOLIC_INPUT'] = '1' + new_env['SYMCC_SILENT'] = '1' # For symcc build, set the OUT and FUZZ_TARGET environment # variable to point to the new symcc build directory. @@ -265,7 +269,7 @@ def fuzz(input_corpus, flags += ['-c', cmplog_target_binary] if not skip: - os.environ['AFL_DISABLE_TRIM'] = "1" + os.environ['AFL_DISABLE_TRIM'] = '1' # os.environ['AFL_FAST_CAL'] = '1' os.environ['AFL_CMPLOG_ONLY_NEW'] = '1' if 'ADDITIONAL_ARGS' in os.environ: diff --git a/fuzzers/aflplusplus_cmplog/fuzzer.py b/fuzzers/aflplusplus_cmplog/fuzzer.py index 7f4c55f35..f5d88c0f1 100755 --- a/fuzzers/aflplusplus_cmplog/fuzzer.py +++ b/fuzzers/aflplusplus_cmplog/fuzzer.py @@ -25,7 +25,7 @@ def build(): # pylint: disable=too-many-branches,too-many-statements """Build benchmark.""" - aflplusplus_fuzzer.build("tracepc", "cmplog") + aflplusplus_fuzzer.build('tracepc', 'cmplog') def fuzz(input_corpus, output_corpus, target_binary): diff --git a/fuzzers/aflplusplus_dict2file/fuzzer.py b/fuzzers/aflplusplus_dict2file/fuzzer.py index ccbd4f5e1..f1c6887e6 100755 --- a/fuzzers/aflplusplus_dict2file/fuzzer.py +++ b/fuzzers/aflplusplus_dict2file/fuzzer.py @@ -25,7 +25,7 @@ def build(): # pylint: disable=too-many-branches,too-many-statements """Build benchmark.""" - aflplusplus_fuzzer.build("tracepc", "dict2file") + aflplusplus_fuzzer.build('tracepc', 'dict2file') def fuzz(input_corpus, output_corpus, target_binary): diff --git a/fuzzers/aflplusplus_frida/fuzzer.py b/fuzzers/aflplusplus_frida/fuzzer.py index eaa5a8018..520bbdbf2 100755 --- a/fuzzers/aflplusplus_frida/fuzzer.py +++ b/fuzzers/aflplusplus_frida/fuzzer.py @@ -36,7 +36,7 @@ def fuzz(input_corpus, output_corpus, target_binary): ], stdout=subprocess.PIPE, check=True) - target_func = nm_proc.stdout.split()[0].decode("utf-8") + target_func = nm_proc.stdout.split()[0].decode('utf-8') print('[fuzz] LLVMFuzzerTestOneInput() address =', target_func) # Fuzzer options for qemu_mode. @@ -44,9 +44,9 @@ def fuzz(input_corpus, output_corpus, target_binary): os.environ['AFL_FRIDA_PERSISTENT_ADDR'] = target_func os.environ['AFL_ENTRYPOINT'] = target_func - os.environ['AFL_FRIDA_PERSISTENT_CNT'] = "1000000" - os.environ['AFL_FRIDA_PERSISTENT_HOOK'] = "/out/frida_hook.so" - os.environ['AFL_PATH'] = "/out" + os.environ['AFL_FRIDA_PERSISTENT_CNT'] = '1000000' + os.environ['AFL_FRIDA_PERSISTENT_HOOK'] = '/out/frida_hook.so' + os.environ['AFL_PATH'] = '/out' # resource.setrlimit(resource.RLIMIT_CORE, # (resource.RLIM_INFINITY, resource.RLIM_INFINITY)) @@ -54,13 +54,13 @@ def fuzz(input_corpus, output_corpus, target_binary): # The systemd benchmark fails without full library instrumentation :( benchmark_name = os.environ['BENCHMARK'] if benchmark_name == 'systemd_fuzz-link-parser': - os.environ['AFL_INST_LIBS'] = "1" + os.environ['AFL_INST_LIBS'] = '1' aflplusplus_fuzzer.fuzz(input_corpus, output_corpus, target_binary, flags=flags) - # sts = os.system("cp -v *core* corpus") + # sts = os.system('cp -v *core* corpus') # if sts == 0: # print('Copied cores') diff --git a/fuzzers/aflplusplus_optimal/builder.Dockerfile b/fuzzers/aflplusplus_optimal/builder.Dockerfile index dd79e60cc..bbcaa4090 100644 --- a/fuzzers/aflplusplus_optimal/builder.Dockerfile +++ b/fuzzers/aflplusplus_optimal/builder.Dockerfile @@ -16,7 +16,7 @@ ARG parent_image FROM $parent_image RUN apt-get update && \ - apt-get install -y wget libstdc++-5-dev libexpat1-dev \ + apt-get install -y wget libstdc++-10-dev libexpat1-dev \ apt-utils apt-transport-https ca-certificates # Download afl++ diff --git a/fuzzers/aflplusplus_optimal/fuzzer.py b/fuzzers/aflplusplus_optimal/fuzzer.py index c298b2060..425b50944 100755 --- a/fuzzers/aflplusplus_optimal/fuzzer.py +++ b/fuzzers/aflplusplus_optimal/fuzzer.py @@ -32,49 +32,49 @@ def build(): # pylint: disable=too-many-branches,too-many-statements benchmark_name = os.environ['BENCHMARK'] if benchmark_name == 'bloaty_fuzz_target': - aflplusplus_fuzzer.build("lto") + aflplusplus_fuzzer.build('lto') elif benchmark_name == 'curl_curl_fuzzer_http': - aflplusplus_fuzzer.build("lto") + aflplusplus_fuzzer.build('lto') elif benchmark_name == 'freetype2-2017': - aflplusplus_fuzzer.build("tracepc", "cmplog", "dict2file") + aflplusplus_fuzzer.build('tracepc', 'cmplog', 'dict2file') elif benchmark_name == 'harfbuzz-1.3.2': - aflplusplus_fuzzer.build("tracepc", "cmplog", "dict2file") + aflplusplus_fuzzer.build('tracepc', 'cmplog', 'dict2file') elif benchmark_name == 'jsoncpp_jsoncpp_fuzzer': - aflplusplus_fuzzer.build("lto", "laf") + aflplusplus_fuzzer.build('lto', 'laf') elif benchmark_name == 'lcms-2017-03-21': - aflplusplus_fuzzer.build("tracepc", "cmplog", "dict2file") + aflplusplus_fuzzer.build('tracepc', 'cmplog', 'dict2file') elif benchmark_name == 'libjpeg-turbo-07-2017': - aflplusplus_fuzzer.build("tracepc", "cmplog", "dict2file") + aflplusplus_fuzzer.build('tracepc', 'cmplog', 'dict2file') elif benchmark_name == 'libxslt_xpath': - aflplusplus_fuzzer.build("lto", "cmplog") + aflplusplus_fuzzer.build('lto', 'cmplog') elif benchmark_name == 'openh264_decoder_fuzzer': - aflplusplus_fuzzer.build("lto", "cmplog") + aflplusplus_fuzzer.build('lto', 'cmplog') elif benchmark_name == 'openssl_x509': - aflplusplus_fuzzer.build("tracepc", "dict2file") + aflplusplus_fuzzer.build('tracepc', 'dict2file') elif benchmark_name == 'php_php-fuzz-parser': - aflplusplus_fuzzer.build("tracepc", "cmplog", "dict2file") + aflplusplus_fuzzer.build('tracepc', 'cmplog', 'dict2file') elif benchmark_name == 'proj4-2017-08-14': - aflplusplus_fuzzer.build("tracepc", "cmplog", "dict2file") + aflplusplus_fuzzer.build('tracepc', 'cmplog', 'dict2file') elif benchmark_name == 'sqlite3_ossfuzz': - aflplusplus_fuzzer.build("tracepc", "cmplog", "dict2file") + aflplusplus_fuzzer.build('tracepc', 'cmplog', 'dict2file') elif benchmark_name == 'stb_stbi_read_fuzzer': - aflplusplus_fuzzer.build("lto", "cmplog") + aflplusplus_fuzzer.build('lto', 'cmplog') elif benchmark_name == 'systemd_fuzz-link-parser': - aflplusplus_fuzzer.build("tracepc", "dict2file") + aflplusplus_fuzzer.build('tracepc', 'dict2file') elif benchmark_name == 'vorbis-2017-12-11': - aflplusplus_fuzzer.build("lto", "laf") + aflplusplus_fuzzer.build('lto', 'laf') elif benchmark_name == 'woff2-2016-05-06': - aflplusplus_fuzzer.build("tracepc", "cmplog", "dict2file") + aflplusplus_fuzzer.build('tracepc', 'cmplog', 'dict2file') elif benchmark_name == 'zlib_zlib_uncompress_fuzzer': - aflplusplus_fuzzer.build("tracepc", "cmplog", "dict2file") + aflplusplus_fuzzer.build('tracepc', 'cmplog', 'dict2file') else: build_flags = os.environ['CFLAGS'] if build_flags.find('array-bounds') != -1: - aflplusplus_fuzzer.build("tracepc", "dict2file") + aflplusplus_fuzzer.build('tracepc', 'dict2file') else: - aflplusplus_fuzzer.build("lto", "cmplog") + aflplusplus_fuzzer.build('lto', 'cmplog') - for copy_file in glob.glob("/afl/libc*"): + for copy_file in glob.glob('/afl/libc*'): shutil.copy(copy_file, os.environ['OUT']) diff --git a/fuzzers/aflplusplus_qemu/fuzzer.py b/fuzzers/aflplusplus_qemu/fuzzer.py index ca1dd7902..f2f6c2945 100755 --- a/fuzzers/aflplusplus_qemu/fuzzer.py +++ b/fuzzers/aflplusplus_qemu/fuzzer.py @@ -33,7 +33,7 @@ def fuzz(input_corpus, output_corpus, target_binary): ], stdout=subprocess.PIPE, check=True) - target_func = "0x" + nm_proc.stdout.split()[0].decode("utf-8") + target_func = '0x' + nm_proc.stdout.split()[0].decode('utf-8') print('[fuzz] afl_qemu_driver_stdin_input() address =', target_func) # Fuzzer options for qemu_mode. @@ -41,8 +41,8 @@ def fuzz(input_corpus, output_corpus, target_binary): os.environ['AFL_QEMU_PERSISTENT_ADDR'] = target_func os.environ['AFL_ENTRYPOINT'] = target_func - os.environ['AFL_QEMU_PERSISTENT_CNT'] = "1000000" - os.environ['AFL_QEMU_DRIVER_NO_HOOK'] = "1" + os.environ['AFL_QEMU_PERSISTENT_CNT'] = '1000000' + os.environ['AFL_QEMU_DRIVER_NO_HOOK'] = '1' aflplusplus_fuzzer.fuzz(input_corpus, output_corpus, target_binary, diff --git a/fuzzers/aflplusplus_qemu_tracepc/fuzzer.py b/fuzzers/aflplusplus_qemu_tracepc/fuzzer.py index b9bfec40c..e2f5908c5 100755 --- a/fuzzers/aflplusplus_qemu_tracepc/fuzzer.py +++ b/fuzzers/aflplusplus_qemu_tracepc/fuzzer.py @@ -33,7 +33,7 @@ def fuzz(input_corpus, output_corpus, target_binary): ], stdout=subprocess.PIPE, check=True) - target_func = "0x" + nm_proc.stdout.split()[0].decode("utf-8") + target_func = '0x' + nm_proc.stdout.split()[0].decode('utf-8') print('[fuzz] afl_qemu_driver_stdin_input() address =', target_func) # Fuzzer options for qemu_mode. @@ -41,8 +41,8 @@ def fuzz(input_corpus, output_corpus, target_binary): os.environ['AFL_QEMU_PERSISTENT_ADDR'] = target_func os.environ['AFL_ENTRYPOINT'] = target_func - os.environ['AFL_QEMU_PERSISTENT_CNT'] = "1000000" - os.environ['AFL_QEMU_DRIVER_NO_HOOK'] = "1" + os.environ['AFL_QEMU_PERSISTENT_CNT'] = '1000000' + os.environ['AFL_QEMU_DRIVER_NO_HOOK'] = '1' aflplusplus_fuzzer.fuzz(input_corpus, output_corpus, target_binary, diff --git a/fuzzers/aflplusplus_tracepc/builder.Dockerfile b/fuzzers/aflplusplus_tracepc/builder.Dockerfile index 70c1d9427..1a694609e 100644 --- a/fuzzers/aflplusplus_tracepc/builder.Dockerfile +++ b/fuzzers/aflplusplus_tracepc/builder.Dockerfile @@ -17,7 +17,7 @@ FROM $parent_image # Install libstdc++ to use llvm_mode. RUN apt-get update && \ - apt-get install -y wget libstdc++-5-dev libtool-bin automake flex bison \ + apt-get install -y wget libstdc++-10-dev libtool-bin automake flex bison \ libglib2.0-dev libpixman-1-dev python3-setuptools unzip \ apt-utils apt-transport-https ca-certificates diff --git a/fuzzers/aflplusplus_um_parallel/builder.Dockerfile b/fuzzers/aflplusplus_um_parallel/builder.Dockerfile index abd77021b..33c94647b 100644 --- a/fuzzers/aflplusplus_um_parallel/builder.Dockerfile +++ b/fuzzers/aflplusplus_um_parallel/builder.Dockerfile @@ -21,7 +21,7 @@ RUN pip install universalmutator # Install libstdc++ to use llvm_mode. RUN apt-get update && \ - apt-get install -y wget libstdc++-5-dev libtool-bin automake flex bison \ + apt-get install -y wget libstdc++-10-dev libtool-bin automake flex bison \ libglib2.0-dev libpixman-1-dev python3-setuptools unzip \ apt-utils apt-transport-https ca-certificates diff --git a/fuzzers/aflplusplus_um_prioritize/builder.Dockerfile b/fuzzers/aflplusplus_um_prioritize/builder.Dockerfile index abd77021b..33c94647b 100644 --- a/fuzzers/aflplusplus_um_prioritize/builder.Dockerfile +++ b/fuzzers/aflplusplus_um_prioritize/builder.Dockerfile @@ -21,7 +21,7 @@ RUN pip install universalmutator # Install libstdc++ to use llvm_mode. RUN apt-get update && \ - apt-get install -y wget libstdc++-5-dev libtool-bin automake flex bison \ + apt-get install -y wget libstdc++-10-dev libtool-bin automake flex bison \ libglib2.0-dev libpixman-1-dev python3-setuptools unzip \ apt-utils apt-transport-https ca-certificates diff --git a/fuzzers/aflplusplus_um_prioritize_75/builder.Dockerfile b/fuzzers/aflplusplus_um_prioritize_75/builder.Dockerfile index abd77021b..33c94647b 100644 --- a/fuzzers/aflplusplus_um_prioritize_75/builder.Dockerfile +++ b/fuzzers/aflplusplus_um_prioritize_75/builder.Dockerfile @@ -21,7 +21,7 @@ RUN pip install universalmutator # Install libstdc++ to use llvm_mode. RUN apt-get update && \ - apt-get install -y wget libstdc++-5-dev libtool-bin automake flex bison \ + apt-get install -y wget libstdc++-10-dev libtool-bin automake flex bison \ libglib2.0-dev libpixman-1-dev python3-setuptools unzip \ apt-utils apt-transport-https ca-certificates diff --git a/fuzzers/aflplusplus_um_random_3/builder.Dockerfile b/fuzzers/aflplusplus_um_random_3/builder.Dockerfile index abd77021b..33c94647b 100644 --- a/fuzzers/aflplusplus_um_random_3/builder.Dockerfile +++ b/fuzzers/aflplusplus_um_random_3/builder.Dockerfile @@ -21,7 +21,7 @@ RUN pip install universalmutator # Install libstdc++ to use llvm_mode. RUN apt-get update && \ - apt-get install -y wget libstdc++-5-dev libtool-bin automake flex bison \ + apt-get install -y wget libstdc++-10-dev libtool-bin automake flex bison \ libglib2.0-dev libpixman-1-dev python3-setuptools unzip \ apt-utils apt-transport-https ca-certificates diff --git a/fuzzers/aflplusplus_um_random_6/builder.Dockerfile b/fuzzers/aflplusplus_um_random_6/builder.Dockerfile index abd77021b..33c94647b 100644 --- a/fuzzers/aflplusplus_um_random_6/builder.Dockerfile +++ b/fuzzers/aflplusplus_um_random_6/builder.Dockerfile @@ -21,7 +21,7 @@ RUN pip install universalmutator # Install libstdc++ to use llvm_mode. RUN apt-get update && \ - apt-get install -y wget libstdc++-5-dev libtool-bin automake flex bison \ + apt-get install -y wget libstdc++-10-dev libtool-bin automake flex bison \ libglib2.0-dev libpixman-1-dev python3-setuptools unzip \ apt-utils apt-transport-https ca-certificates diff --git a/fuzzers/aflplusplus_um_random_75/builder.Dockerfile b/fuzzers/aflplusplus_um_random_75/builder.Dockerfile index abd77021b..33c94647b 100644 --- a/fuzzers/aflplusplus_um_random_75/builder.Dockerfile +++ b/fuzzers/aflplusplus_um_random_75/builder.Dockerfile @@ -21,7 +21,7 @@ RUN pip install universalmutator # Install libstdc++ to use llvm_mode. RUN apt-get update && \ - apt-get install -y wget libstdc++-5-dev libtool-bin automake flex bison \ + apt-get install -y wget libstdc++-10-dev libtool-bin automake flex bison \ libglib2.0-dev libpixman-1-dev python3-setuptools unzip \ apt-utils apt-transport-https ca-certificates diff --git a/fuzzers/aflplusplus_zafl/fuzzer.py b/fuzzers/aflplusplus_zafl/fuzzer.py index 2217e9c0c..113e8eb70 100755 --- a/fuzzers/aflplusplus_zafl/fuzzer.py +++ b/fuzzers/aflplusplus_zafl/fuzzer.py @@ -31,9 +31,9 @@ def build(): os.environ['CC'] = '/cc.sh' os.environ['CXX'] = '/cxx.sh' if 'LD_LIBRARY_PATH' in os.environ: - os.environ['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH'] + ":/out" + os.environ['LD_LIBRARY_PATH'] = os.environ['LD_LIBRARY_PATH'] + ':/out' else: - os.environ['LD_LIBRARY_PATH'] = "/out" + os.environ['LD_LIBRARY_PATH'] = '/out' utils.append_flags('CFLAGS', ['-fPIC', '-lpthread']) utils.append_flags('CXXFLAGS', ['-fPIC', '-lpthread']) diff --git a/fuzzers/aflpp_random_default/fuzzer.py b/fuzzers/aflpp_random_default/fuzzer.py index 354c5457e..f51c59195 100755 --- a/fuzzers/aflpp_random_default/fuzzer.py +++ b/fuzzers/aflpp_random_default/fuzzer.py @@ -150,8 +150,8 @@ def build(*args): # pylint: disable=too-many-branches,too-many-statements else: os.environ['FUZZER_LIB'] = '/libAFLDriver.a' - # Some benchmarks like lcms - # (see: https://github.com/mm2/Little-CMS/commit/ab1093539b4287c233aca6a3cf53b234faceb792#diff-f0e6d05e72548974e852e8e55dffc4ccR212) + # Some benchmarks like lcms. (see: + # https://github.com/mm2/Little-CMS/commit/ab1093539b4287c233aca6a3cf53b234faceb792#diff-f0e6d05e72548974e852e8e55dffc4ccR212) # fail to compile if the compiler outputs things to stderr in unexpected # cases. Prevent these failures by using AFL_QUIET to stop afl-clang-fast # from writing AFL specific messages to stderr. @@ -197,12 +197,12 @@ def build(*args): # pylint: disable=too-many-branches,too-many-statements new_env['CC'] = '/symcc/build/symcc' new_env['CXX'] = '/symcc/build/sym++' new_env['SYMCC_OUTPUT_DIR'] = '/tmp' - new_env['CXXFLAGS'] = new_env['CXXFLAGS'].replace("-stlib=libc++", "") + new_env['CXXFLAGS'] = new_env['CXXFLAGS'].replace('-stlib=libc++', '') new_env['FUZZER_LIB'] = '/libfuzzer-harness.o' new_env['OUT'] = symcc_build_directory - new_env['SYMCC_LIBCXX_PATH'] = "/libcxx_native_build" - new_env['SYMCC_NO_SYMBOLIC_INPUT'] = "1" - new_env['SYMCC_SILENT'] = "1" + new_env['SYMCC_LIBCXX_PATH'] = '/libcxx_native_build' + new_env['SYMCC_NO_SYMBOLIC_INPUT'] = '1' + new_env['SYMCC_SILENT'] = '1' # For CmpLog build, set the OUT and FUZZ_TARGET environment # variable to point to the new CmpLog build directory. @@ -256,7 +256,7 @@ def fuzz(input_corpus, flags += ['-c', cmplog_target_binary] if not skip: - os.environ['AFL_DISABLE_TRIM'] = "1" + os.environ['AFL_DISABLE_TRIM'] = '1' # os.environ['AFL_FAST_CAL'] = '1' os.environ['AFL_CMPLOG_ONLY_NEW'] = '1' if 'ADDITIONAL_ARGS' in os.environ: diff --git a/fuzzers/aflpp_random_no_favs/fuzzer.py b/fuzzers/aflpp_random_no_favs/fuzzer.py index d2411e822..d8a93b36c 100755 --- a/fuzzers/aflpp_random_no_favs/fuzzer.py +++ b/fuzzers/aflpp_random_no_favs/fuzzer.py @@ -150,8 +150,8 @@ def build(*args): # pylint: disable=too-many-branches,too-many-statements else: os.environ['FUZZER_LIB'] = '/libAFLDriver.a' - # Some benchmarks like lcms - # (see: https://github.com/mm2/Little-CMS/commit/ab1093539b4287c233aca6a3cf53b234faceb792#diff-f0e6d05e72548974e852e8e55dffc4ccR212) + # Some benchmarks like lcms. (see: + # https://github.com/mm2/Little-CMS/commit/ab1093539b4287c233aca6a3cf53b234faceb792#diff-f0e6d05e72548974e852e8e55dffc4ccR212) # fail to compile if the compiler outputs things to stderr in unexpected # cases. Prevent these failures by using AFL_QUIET to stop afl-clang-fast # from writing AFL specific messages to stderr. @@ -197,12 +197,12 @@ def build(*args): # pylint: disable=too-many-branches,too-many-statements new_env['CC'] = '/symcc/build/symcc' new_env['CXX'] = '/symcc/build/sym++' new_env['SYMCC_OUTPUT_DIR'] = '/tmp' - new_env['CXXFLAGS'] = new_env['CXXFLAGS'].replace("-stlib=libc++", "") + new_env['CXXFLAGS'] = new_env['CXXFLAGS'].replace('-stlib=libc++', '') new_env['FUZZER_LIB'] = '/libfuzzer-harness.o' new_env['OUT'] = symcc_build_directory - new_env['SYMCC_LIBCXX_PATH'] = "/libcxx_native_build" - new_env['SYMCC_NO_SYMBOLIC_INPUT'] = "1" - new_env['SYMCC_SILENT'] = "1" + new_env['SYMCC_LIBCXX_PATH'] = '/libcxx_native_build' + new_env['SYMCC_NO_SYMBOLIC_INPUT'] = '1' + new_env['SYMCC_SILENT'] = '1' # For CmpLog build, set the OUT and FUZZ_TARGET environment # variable to point to the new CmpLog build directory. @@ -256,7 +256,7 @@ def fuzz(input_corpus, flags += ['-c', cmplog_target_binary] if not skip: - os.environ['AFL_DISABLE_TRIM'] = "1" + os.environ['AFL_DISABLE_TRIM'] = '1' # os.environ['AFL_FAST_CAL'] = '1' os.environ['AFL_CMPLOG_ONLY_NEW'] = '1' if 'ADDITIONAL_ARGS' in os.environ: diff --git a/fuzzers/aflpp_random_wrs/fuzzer.py b/fuzzers/aflpp_random_wrs/fuzzer.py index c551c1af4..f561625fa 100755 --- a/fuzzers/aflpp_random_wrs/fuzzer.py +++ b/fuzzers/aflpp_random_wrs/fuzzer.py @@ -150,8 +150,8 @@ def build(*args): # pylint: disable=too-many-branches,too-many-statements else: os.environ['FUZZER_LIB'] = '/libAFLDriver.a' - # Some benchmarks like lcms - # (see: https://github.com/mm2/Little-CMS/commit/ab1093539b4287c233aca6a3cf53b234faceb792#diff-f0e6d05e72548974e852e8e55dffc4ccR212) + # Some benchmarks like lcms. (see: + # https://github.com/mm2/Little-CMS/commit/ab1093539b4287c233aca6a3cf53b234faceb792#diff-f0e6d05e72548974e852e8e55dffc4ccR212) # fail to compile if the compiler outputs things to stderr in unexpected # cases. Prevent these failures by using AFL_QUIET to stop afl-clang-fast # from writing AFL specific messages to stderr. @@ -197,12 +197,12 @@ def build(*args): # pylint: disable=too-many-branches,too-many-statements new_env['CC'] = '/symcc/build/symcc' new_env['CXX'] = '/symcc/build/sym++' new_env['SYMCC_OUTPUT_DIR'] = '/tmp' - new_env['CXXFLAGS'] = new_env['CXXFLAGS'].replace("-stlib=libc++", "") + new_env['CXXFLAGS'] = new_env['CXXFLAGS'].replace('-stlib=libc++', '') new_env['FUZZER_LIB'] = '/libfuzzer-harness.o' new_env['OUT'] = symcc_build_directory - new_env['SYMCC_LIBCXX_PATH'] = "/libcxx_native_build" - new_env['SYMCC_NO_SYMBOLIC_INPUT'] = "1" - new_env['SYMCC_SILENT'] = "1" + new_env['SYMCC_LIBCXX_PATH'] = '/libcxx_native_build' + new_env['SYMCC_NO_SYMBOLIC_INPUT'] = '1' + new_env['SYMCC_SILENT'] = '1' # For CmpLog build, set the OUT and FUZZ_TARGET environment # variable to point to the new CmpLog build directory. @@ -256,7 +256,7 @@ def fuzz(input_corpus, flags += ['-c', cmplog_target_binary] if not skip: - os.environ['AFL_DISABLE_TRIM'] = "1" + os.environ['AFL_DISABLE_TRIM'] = '1' # os.environ['AFL_FAST_CAL'] = '1' os.environ['AFL_CMPLOG_ONLY_NEW'] = '1' if 'ADDITIONAL_ARGS' in os.environ: diff --git a/fuzzers/aflpp_random_wrs_rf/fuzzer.py b/fuzzers/aflpp_random_wrs_rf/fuzzer.py index b350390e2..50a073a99 100755 --- a/fuzzers/aflpp_random_wrs_rf/fuzzer.py +++ b/fuzzers/aflpp_random_wrs_rf/fuzzer.py @@ -150,8 +150,8 @@ def build(*args): # pylint: disable=too-many-branches,too-many-statements else: os.environ['FUZZER_LIB'] = '/libAFLDriver.a' - # Some benchmarks like lcms - # (see: https://github.com/mm2/Little-CMS/commit/ab1093539b4287c233aca6a3cf53b234faceb792#diff-f0e6d05e72548974e852e8e55dffc4ccR212) + # Some benchmarks like lcms. (see: + # https://github.com/mm2/Little-CMS/commit/ab1093539b4287c233aca6a3cf53b234faceb792#diff-f0e6d05e72548974e852e8e55dffc4ccR212) # fail to compile if the compiler outputs things to stderr in unexpected # cases. Prevent these failures by using AFL_QUIET to stop afl-clang-fast # from writing AFL specific messages to stderr. @@ -197,12 +197,12 @@ def build(*args): # pylint: disable=too-many-branches,too-many-statements new_env['CC'] = '/symcc/build/symcc' new_env['CXX'] = '/symcc/build/sym++' new_env['SYMCC_OUTPUT_DIR'] = '/tmp' - new_env['CXXFLAGS'] = new_env['CXXFLAGS'].replace("-stlib=libc++", "") + new_env['CXXFLAGS'] = new_env['CXXFLAGS'].replace('-stlib=libc++', '') new_env['FUZZER_LIB'] = '/libfuzzer-harness.o' new_env['OUT'] = symcc_build_directory - new_env['SYMCC_LIBCXX_PATH'] = "/libcxx_native_build" - new_env['SYMCC_NO_SYMBOLIC_INPUT'] = "1" - new_env['SYMCC_SILENT'] = "1" + new_env['SYMCC_LIBCXX_PATH'] = '/libcxx_native_build' + new_env['SYMCC_NO_SYMBOLIC_INPUT'] = '1' + new_env['SYMCC_SILENT'] = '1' # For CmpLog build, set the OUT and FUZZ_TARGET environment # variable to point to the new CmpLog build directory. @@ -256,7 +256,7 @@ def fuzz(input_corpus, flags += ['-c', cmplog_target_binary] if not skip: - os.environ['AFL_DISABLE_TRIM'] = "1" + os.environ['AFL_DISABLE_TRIM'] = '1' # os.environ['AFL_FAST_CAL'] = '1' os.environ['AFL_CMPLOG_ONLY_NEW'] = '1' if 'ADDITIONAL_ARGS' in os.environ: diff --git a/fuzzers/aflpp_random_wrs_rf_rp/fuzzer.py b/fuzzers/aflpp_random_wrs_rf_rp/fuzzer.py index 354c5457e..f51c59195 100755 --- a/fuzzers/aflpp_random_wrs_rf_rp/fuzzer.py +++ b/fuzzers/aflpp_random_wrs_rf_rp/fuzzer.py @@ -150,8 +150,8 @@ def build(*args): # pylint: disable=too-many-branches,too-many-statements else: os.environ['FUZZER_LIB'] = '/libAFLDriver.a' - # Some benchmarks like lcms - # (see: https://github.com/mm2/Little-CMS/commit/ab1093539b4287c233aca6a3cf53b234faceb792#diff-f0e6d05e72548974e852e8e55dffc4ccR212) + # Some benchmarks like lcms. (see: + # https://github.com/mm2/Little-CMS/commit/ab1093539b4287c233aca6a3cf53b234faceb792#diff-f0e6d05e72548974e852e8e55dffc4ccR212) # fail to compile if the compiler outputs things to stderr in unexpected # cases. Prevent these failures by using AFL_QUIET to stop afl-clang-fast # from writing AFL specific messages to stderr. @@ -197,12 +197,12 @@ def build(*args): # pylint: disable=too-many-branches,too-many-statements new_env['CC'] = '/symcc/build/symcc' new_env['CXX'] = '/symcc/build/sym++' new_env['SYMCC_OUTPUT_DIR'] = '/tmp' - new_env['CXXFLAGS'] = new_env['CXXFLAGS'].replace("-stlib=libc++", "") + new_env['CXXFLAGS'] = new_env['CXXFLAGS'].replace('-stlib=libc++', '') new_env['FUZZER_LIB'] = '/libfuzzer-harness.o' new_env['OUT'] = symcc_build_directory - new_env['SYMCC_LIBCXX_PATH'] = "/libcxx_native_build" - new_env['SYMCC_NO_SYMBOLIC_INPUT'] = "1" - new_env['SYMCC_SILENT'] = "1" + new_env['SYMCC_LIBCXX_PATH'] = '/libcxx_native_build' + new_env['SYMCC_NO_SYMBOLIC_INPUT'] = '1' + new_env['SYMCC_SILENT'] = '1' # For CmpLog build, set the OUT and FUZZ_TARGET environment # variable to point to the new CmpLog build directory. @@ -256,7 +256,7 @@ def fuzz(input_corpus, flags += ['-c', cmplog_target_binary] if not skip: - os.environ['AFL_DISABLE_TRIM'] = "1" + os.environ['AFL_DISABLE_TRIM'] = '1' # os.environ['AFL_FAST_CAL'] = '1' os.environ['AFL_CMPLOG_ONLY_NEW'] = '1' if 'ADDITIONAL_ARGS' in os.environ: diff --git a/fuzzers/aflpp_random_wrs_rp/fuzzer.py b/fuzzers/aflpp_random_wrs_rp/fuzzer.py index 2d0e41b19..e6fe85980 100755 --- a/fuzzers/aflpp_random_wrs_rp/fuzzer.py +++ b/fuzzers/aflpp_random_wrs_rp/fuzzer.py @@ -150,8 +150,8 @@ def build(*args): # pylint: disable=too-many-branches,too-many-statements else: os.environ['FUZZER_LIB'] = '/libAFLDriver.a' - # Some benchmarks like lcms - # (see: https://github.com/mm2/Little-CMS/commit/ab1093539b4287c233aca6a3cf53b234faceb792#diff-f0e6d05e72548974e852e8e55dffc4ccR212) + # Some benchmarks like lcms. (see: + # https://github.com/mm2/Little-CMS/commit/ab1093539b4287c233aca6a3cf53b234faceb792#diff-f0e6d05e72548974e852e8e55dffc4ccR212) # fail to compile if the compiler outputs things to stderr in unexpected # cases. Prevent these failures by using AFL_QUIET to stop afl-clang-fast # from writing AFL specific messages to stderr. @@ -197,12 +197,12 @@ def build(*args): # pylint: disable=too-many-branches,too-many-statements new_env['CC'] = '/symcc/build/symcc' new_env['CXX'] = '/symcc/build/sym++' new_env['SYMCC_OUTPUT_DIR'] = '/tmp' - new_env['CXXFLAGS'] = new_env['CXXFLAGS'].replace("-stlib=libc++", "") + new_env['CXXFLAGS'] = new_env['CXXFLAGS'].replace('-stlib=libc++', '') new_env['FUZZER_LIB'] = '/libfuzzer-harness.o' new_env['OUT'] = symcc_build_directory - new_env['SYMCC_LIBCXX_PATH'] = "/libcxx_native_build" - new_env['SYMCC_NO_SYMBOLIC_INPUT'] = "1" - new_env['SYMCC_SILENT'] = "1" + new_env['SYMCC_LIBCXX_PATH'] = '/libcxx_native_build' + new_env['SYMCC_NO_SYMBOLIC_INPUT'] = '1' + new_env['SYMCC_SILENT'] = '1' # For CmpLog build, set the OUT and FUZZ_TARGET environment # variable to point to the new CmpLog build directory. @@ -256,7 +256,7 @@ def fuzz(input_corpus, flags += ['-c', cmplog_target_binary] if not skip: - os.environ['AFL_DISABLE_TRIM'] = "1" + os.environ['AFL_DISABLE_TRIM'] = '1' # os.environ['AFL_FAST_CAL'] = '1' os.environ['AFL_CMPLOG_ONLY_NEW'] = '1' if 'ADDITIONAL_ARGS' in os.environ: diff --git a/fuzzers/aflsmart/builder.Dockerfile b/fuzzers/aflsmart/builder.Dockerfile index 5b9f84ff1..dcb8eb7a9 100644 --- a/fuzzers/aflsmart/builder.Dockerfile +++ b/fuzzers/aflsmart/builder.Dockerfile @@ -15,32 +15,39 @@ ARG parent_image FROM $parent_image -# install AFLSmart dependencies +# Install gcc-4.4 & g++-4.4 required by Peach while running on Ubuntu 16.04. +# Install Python2 and Pip2 required by AFLSmart on Ubuntu:20.04. +RUN echo 'deb http://dk.archive.ubuntu.com/ubuntu/ trusty main' >> \ + /etc/apt/sources.list && \ + echo 'deb http://dk.archive.ubuntu.com/ubuntu/ trusty universe' >> \ + /etc/apt/sources.list && \ + apt-get update && \ + apt-get install -y \ + gcc-4.4 \ + g++-4.4 \ + unzip \ + wget \ + tzdata \ + python2 && \ + curl https://bootstrap.pypa.io/pip/2.7/get-pip.py --output get-pip.py && \ + python2 get-pip.py && \ + rm /usr/bin/python && \ + ln -s /usr/bin/python2.7 /usr/bin/python + +# Install AFLSmart dependencies. RUN dpkg --add-architecture i386 && \ - apt-get update -y && apt-get install -y \ + apt-get update && \ + apt-get install -y \ apt-utils \ libc6-dev-i386 \ - python-pip \ g++-multilib \ mono-complete \ - gnupg-curl \ software-properties-common -# install gcc-4.4 & g++-4.4 required by Peach while running on Ubuntu 16.04 -RUN apt-get update -y && \ - apt-get upgrade -y && \ - add-apt-repository --keyserver hkps://keyserver.ubuntu.com:443 ppa:ubuntu-toolchain-r/test -y && \ - apt-get update -y && apt-get install -y \ - gcc-4.4 \ - g++-4.4 \ - unzip \ - wget \ - tzdata - -# Download and compile AFLSmart +# Download and compile AFLSmart. RUN git clone https://github.com/aflsmart/aflsmart /afl && \ cd /afl && \ - git checkout a9d60257a6b5a7df2e177bddc6982376723bfd90 && \ + git checkout 4286ae47e0e5d8c412f91aae94ef9d11fb97dfd8 && \ AFL_NO_X86=1 make # Setup Peach. diff --git a/fuzzers/aflsmart/runner.Dockerfile b/fuzzers/aflsmart/runner.Dockerfile index 1809c815c..1e9046888 100644 --- a/fuzzers/aflsmart/runner.Dockerfile +++ b/fuzzers/aflsmart/runner.Dockerfile @@ -14,6 +14,8 @@ FROM gcr.io/fuzzbench/base-image -RUN apt-get update -y && apt-get install -y \ +RUN apt-get update -y && \ + DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC \ + apt-get install -y \ mono-complete \ tzdata diff --git a/fuzzers/centipede/builder.Dockerfile b/fuzzers/centipede/builder.Dockerfile index a56736220..a1cd4e3d2 100644 --- a/fuzzers/centipede/builder.Dockerfile +++ b/fuzzers/centipede/builder.Dockerfile @@ -17,8 +17,9 @@ FROM $parent_image ENV CENTIPEDE_SRC=/src/centipede -# Build centipede. -RUN git clone -n \ +# Remove the Centipede from OSS-Fuzz base-builder and rebuild centipede. +RUN rm -rf "$CENTIPEDE_SRC" && \ + git clone -n \ https://github.com/google/centipede.git "$CENTIPEDE_SRC" && \ echo 'build --client_env=CC=clang --cxxopt=-std=c++17 ' \ '--cxxopt=-stdlib=libc++ --linkopt=-lc++' >> ~/.bazelrc && \ diff --git a/fuzzers/centipede/fuzzer.py b/fuzzers/centipede/fuzzer.py index 2a862b501..63b9cd7f6 100755 --- a/fuzzers/centipede/fuzzer.py +++ b/fuzzers/centipede/fuzzer.py @@ -24,6 +24,7 @@ def build(): san_cflags = ['-fsanitize-coverage=trace-loads'] link_cflags = [ + '-Wno-error=unused-command-line-argument', '-ldl', '-lrt', '-lpthread', @@ -31,7 +32,8 @@ def build(): ] # TODO(Dongge): Build targets with sanitizers. - with open('/src/centipede/clang-flags.txt', 'r') as clang_flags_handle: + with open('/src/centipede/clang-flags.txt', 'r', + encoding='utf-8') as clang_flags_handle: centipede_cflags = [ line.strip() for line in clang_flags_handle.readlines() ] diff --git a/fuzzers/centipede_function_filter/builder.Dockerfile b/fuzzers/centipede_function_filter/builder.Dockerfile index a56736220..a1cd4e3d2 100644 --- a/fuzzers/centipede_function_filter/builder.Dockerfile +++ b/fuzzers/centipede_function_filter/builder.Dockerfile @@ -17,8 +17,9 @@ FROM $parent_image ENV CENTIPEDE_SRC=/src/centipede -# Build centipede. -RUN git clone -n \ +# Remove the Centipede from OSS-Fuzz base-builder and rebuild centipede. +RUN rm -rf "$CENTIPEDE_SRC" && \ + git clone -n \ https://github.com/google/centipede.git "$CENTIPEDE_SRC" && \ echo 'build --client_env=CC=clang --cxxopt=-std=c++17 ' \ '--cxxopt=-stdlib=libc++ --linkopt=-lc++' >> ~/.bazelrc && \ diff --git a/fuzzers/centipede_function_filter/fuzzer.py b/fuzzers/centipede_function_filter/fuzzer.py index e72cd24a2..7aa904996 100755 --- a/fuzzers/centipede_function_filter/fuzzer.py +++ b/fuzzers/centipede_function_filter/fuzzer.py @@ -26,7 +26,7 @@ def build(): def fuzz(input_corpus, output_corpus, target_binary): """Run fuzzer. Wrapper that uses the defaults when calling run_fuzzer.""" - with open('/focus_map.yaml', 'r') as focus_file: + with open('/focus_map.yaml', 'r', encoding='utf-8') as focus_file: focus_map = yaml.safe_load(focus_file) benchmark = os.getenv('BENCHMARK', None) if benchmark not in focus_map: diff --git a/fuzzers/eclipser/fuzzer.py b/fuzzers/eclipser/fuzzer.py index c36d898b1..19e69f6fa 100644 --- a/fuzzers/eclipser/fuzzer.py +++ b/fuzzers/eclipser/fuzzer.py @@ -72,7 +72,7 @@ def eclipser(input_corpus, output_corpus, target_binary): # We will use output_corpus as a directory where AFL and Eclipser sync their # test cases with each other. For Eclipser, we should explicitly specify an # output directory under this sync directory. - eclipser_out = os.path.join(output_corpus, "eclipser_output") + eclipser_out = os.path.join(output_corpus, 'eclipser_output') command = [ 'dotnet', '/Eclipser/build/Eclipser.dll', @@ -94,7 +94,8 @@ def eclipser(input_corpus, output_corpus, target_binary): if os.listdir(input_corpus): # Specify inputs only if any seed exists. command += ['-i', input_corpus] print('[eclipser] Run Eclipser with command: ' + ' '.join(command)) - subprocess.Popen(command) + with subprocess.Popen(command): + pass def afl_worker(input_corpus, output_corpus, target_binary): diff --git a/fuzzers/eclipser_aflplusplus/fuzzer.py b/fuzzers/eclipser_aflplusplus/fuzzer.py index 6ea6ba318..87235b012 100644 --- a/fuzzers/eclipser_aflplusplus/fuzzer.py +++ b/fuzzers/eclipser_aflplusplus/fuzzer.py @@ -38,7 +38,7 @@ def build(): fuzz_target = os.getenv('FUZZ_TARGET') # First, build an uninstrumented binary for Eclipser. - aflplusplus_fuzzer.build("qemu", "eclipser") + aflplusplus_fuzzer.build('qemu', 'eclipser') eclipser_dir = get_uninstrumented_outdir(build_directory) os.mkdir(eclipser_dir) fuzz_binary = build_directory + '/' + fuzz_target @@ -48,7 +48,7 @@ def build(): # Second, build an instrumented binary for AFL++. os.environ = orig_env - aflplusplus_fuzzer.build("tracepc") + aflplusplus_fuzzer.build('tracepc') print('[build] Copying afl-fuzz to $OUT directory') # Copy afl-fuzz @@ -60,7 +60,7 @@ def eclipser(input_corpus, output_corpus, target_binary): # We will use output_corpus as a directory where AFL and Eclipser sync their # test cases with each other. For Eclipser, we should explicitly specify an # output directory under this sync directory. - eclipser_out = os.path.join(output_corpus, "eclipser_output") + eclipser_out = os.path.join(output_corpus, 'eclipser_output') command = [ 'dotnet', '/Eclipser/build/Eclipser.dll', @@ -82,7 +82,8 @@ def eclipser(input_corpus, output_corpus, target_binary): if os.listdir(input_corpus): # Specify inputs only if any seed exists. command += ['-i', input_corpus] print('[eclipser] Run Eclipser with command: ' + ' '.join(command)) - subprocess.Popen(command) + with subprocess.Popen(command): + pass def afl_worker(input_corpus, output_corpus, target_binary): @@ -105,13 +106,13 @@ def fuzz(input_corpus, output_corpus, target_binary): uninstrumented_target_binary = os.path.join( uninstrumented_target_binary_directory, target_binary_name) if not os.path.isdir(input_corpus): - raise Exception("invalid input directory") + raise Exception('invalid input directory') afl_args = (input_corpus, output_corpus, target_binary) eclipser_args = (input_corpus, output_corpus, uninstrumented_target_binary) # Do not launch AFL master instance for now, to reduce memory usage and # align with the vanilla AFL. - os.environ['AFL_DISABLE_TRIM'] = "1" + os.environ['AFL_DISABLE_TRIM'] = '1' print('[fuzz] Running AFL worker') afl_worker_thread = threading.Thread(target=afl_worker, args=afl_args) afl_worker_thread.start() diff --git a/fuzzers/fafuzz/fuzzer.py b/fuzzers/fafuzz/fuzzer.py index a1116a55e..8713d76d2 100644 --- a/fuzzers/fafuzz/fuzzer.py +++ b/fuzzers/fafuzz/fuzzer.py @@ -48,7 +48,7 @@ def get_stats(output_corpus, fuzzer_log): # pylint: disable=unused-argument """Gets fuzzer stats for AFL.""" # Get a dictionary containing the stats AFL reports. stats_file = os.path.join(output_corpus, 'fuzzer_stats') - with open(stats_file) as file_handle: + with open(stats_file, encoding='utf-8') as file_handle: stats_file_lines = file_handle.read().splitlines() stats_file_dict = {} for stats_line in stats_file_lines: diff --git a/fuzzers/fuzzolic_aflplusplus_fuzzy/fuzzer.py b/fuzzers/fuzzolic_aflplusplus_fuzzy/fuzzer.py index 838fbf053..e3e785e2c 100644 --- a/fuzzers/fuzzolic_aflplusplus_fuzzy/fuzzer.py +++ b/fuzzers/fuzzolic_aflplusplus_fuzzy/fuzzer.py @@ -78,9 +78,9 @@ def fuzzolic(input_corpus, output_corpus, target_binary): # test cases with each other. For Fuzzolic, we should explicitly specify an # output directory under this sync directory. if input_corpus: - fuzzolic_out = os.path.join(output_corpus, "fuzzolic_output") - afl_out = os.path.join(output_corpus, "afl-worker") - afl_queue = os.path.join(afl_out, "queue") + fuzzolic_out = os.path.join(output_corpus, 'fuzzolic_output') + afl_out = os.path.join(output_corpus, 'afl-worker') + afl_queue = os.path.join(afl_out, 'queue') command = [ '/out/fuzzolic/fuzzolic/fuzzolic.py', '-f', # fuzzy-sat solver @@ -99,7 +99,8 @@ def fuzzolic(input_corpus, output_corpus, target_binary): target_binary, ] print('[fuzzolic] Running Fuzzolic with command: ' + ' '.join(command)) - subprocess.Popen(command) + with subprocess.Popen(command): + pass def afl_worker(input_corpus, output_corpus, target_binary): @@ -118,7 +119,7 @@ def fuzz(input_corpus, output_corpus, target_binary): afl_fuzzer.prepare_fuzz_environment(input_corpus) print('[fuzz] Running AFL worker') - os.environ['AFL_DISABLE_TRIM'] = "1" + os.environ['AFL_DISABLE_TRIM'] = '1' afl_args = (input_corpus, output_corpus, target_binary) afl_worker_thread = threading.Thread(target=afl_worker, args=afl_args) afl_worker_thread.start() diff --git a/fuzzers/fuzzolic_aflplusplus_z3/fuzzer.py b/fuzzers/fuzzolic_aflplusplus_z3/fuzzer.py index 2fec3bb33..5fe9ff931 100644 --- a/fuzzers/fuzzolic_aflplusplus_z3/fuzzer.py +++ b/fuzzers/fuzzolic_aflplusplus_z3/fuzzer.py @@ -78,9 +78,9 @@ def fuzzolic(input_corpus, output_corpus, target_binary): # test cases with each other. For Fuzzolic, we should explicitly specify an # output directory under this sync directory. if input_corpus: - fuzzolic_out = os.path.join(output_corpus, "fuzzolic_output") - afl_out = os.path.join(output_corpus, "afl-worker") - afl_queue = os.path.join(afl_out, "queue") + fuzzolic_out = os.path.join(output_corpus, 'fuzzolic_output') + afl_out = os.path.join(output_corpus, 'afl-worker') + afl_queue = os.path.join(afl_out, 'queue') command = [ '/out/fuzzolic/fuzzolic/fuzzolic.py', '-p', # optimistic solving @@ -98,7 +98,8 @@ def fuzzolic(input_corpus, output_corpus, target_binary): target_binary, ] print('[fuzzolic] Running Fuzzolic with command: ' + ' '.join(command)) - subprocess.Popen(command) + with subprocess.Popen(command): + pass def afl_worker(input_corpus, output_corpus, target_binary): @@ -117,7 +118,7 @@ def fuzz(input_corpus, output_corpus, target_binary): afl_fuzzer.prepare_fuzz_environment(input_corpus) print('[fuzz] Running AFL worker') - os.environ['AFL_DISABLE_TRIM'] = "1" + os.environ['AFL_DISABLE_TRIM'] = '1' afl_args = (input_corpus, output_corpus, target_binary) afl_worker_thread = threading.Thread(target=afl_worker, args=afl_args) afl_worker_thread.start() diff --git a/fuzzers/gramatron/fuzzer.py b/fuzzers/gramatron/fuzzer.py index e4cf668d7..3174f8894 100755 --- a/fuzzers/gramatron/fuzzer.py +++ b/fuzzers/gramatron/fuzzer.py @@ -23,18 +23,18 @@ def prepare_fuzz_environment(input_corpus): """Prepare to fuzz with a LibAFL-based fuzzer.""" - os.environ['ASAN_OPTIONS'] = "abort_on_error=1:detect_leaks=0:"\ - "malloc_context_size=0:symbolize=0:"\ - "allocator_may_return_null=1:"\ - "detect_odr_violation=0:handle_segv=0:"\ - "handle_sigbus=0:handle_abort=0:"\ - "handle_sigfpe=0:handle_sigill=0" - os.environ['UBSAN_OPTIONS'] = "abort_on_error=1:"\ - "allocator_release_to_os_interval_ms=500:"\ - "handle_abort=0:handle_segv=0:"\ - "handle_sigbus=0:handle_sigfpe=0:"\ - "handle_sigill=0:print_stacktrace=0:"\ - "symbolize=0:symbolize_inline_frames=0" + os.environ['ASAN_OPTIONS'] = 'abort_on_error=1:detect_leaks=0:'\ + 'malloc_context_size=0:symbolize=0:'\ + 'allocator_may_return_null=1:'\ + 'detect_odr_violation=0:handle_segv=0:'\ + 'handle_sigbus=0:handle_abort=0:'\ + 'handle_sigfpe=0:handle_sigill=0' + os.environ['UBSAN_OPTIONS'] = 'abort_on_error=1:'\ + 'allocator_release_to_os_interval_ms=500:'\ + 'handle_abort=0:handle_segv=0:'\ + 'handle_sigbus=0:handle_sigfpe=0:'\ + 'handle_sigill=0:print_stacktrace=0:'\ + 'symbolize=0:symbolize_inline_frames=0' # Create at least one non-empty seed to start. utils.create_seed_file_for_empty_corpus(input_corpus) @@ -52,7 +52,7 @@ def build(): # pylint: disable=too-many-branches,too-many-statements raise RuntimeError('Unsupported benchmark, unavailable grammar') dest = os.path.join(os.environ['OUT'], 'grammar.json.gz') shutil.copy(copy_file, dest) - os.system("gzip -d '%s'" % dest) + os.system(f'gzip -d "{dest}"') os.environ['CC'] = '/libafl_fuzzbench/target/release/gramatron_cc' os.environ['CXX'] = '/libafl_fuzzbench/target/release/gramatron_cxx' diff --git a/fuzzers/grimoire/fuzzer.py b/fuzzers/grimoire/fuzzer.py index 87f9d7f54..1c156412a 100755 --- a/fuzzers/grimoire/fuzzer.py +++ b/fuzzers/grimoire/fuzzer.py @@ -23,18 +23,18 @@ def prepare_fuzz_environment(input_corpus): """Prepare to fuzz with a LibAFL-based fuzzer.""" - os.environ['ASAN_OPTIONS'] = "abort_on_error=1:detect_leaks=0:"\ - "malloc_context_size=0:symbolize=0:"\ - "allocator_may_return_null=1:"\ - "detect_odr_violation=0:handle_segv=0:"\ - "handle_sigbus=0:handle_abort=0:"\ - "handle_sigfpe=0:handle_sigill=0" - os.environ['UBSAN_OPTIONS'] = "abort_on_error=1:"\ - "allocator_release_to_os_interval_ms=500:"\ - "handle_abort=0:handle_segv=0:"\ - "handle_sigbus=0:handle_sigfpe=0:"\ - "handle_sigill=0:print_stacktrace=0:"\ - "symbolize=0:symbolize_inline_frames=0" + os.environ['ASAN_OPTIONS'] = 'abort_on_error=1:detect_leaks=0:'\ + 'malloc_context_size=0:symbolize=0:'\ + 'allocator_may_return_null=1:'\ + 'detect_odr_violation=0:handle_segv=0:'\ + 'handle_sigbus=0:handle_abort=0:'\ + 'handle_sigfpe=0:handle_sigill=0' + os.environ['UBSAN_OPTIONS'] = 'abort_on_error=1:'\ + 'allocator_release_to_os_interval_ms=500:'\ + 'handle_abort=0:handle_segv=0:'\ + 'handle_sigbus=0:handle_sigfpe=0:'\ + 'handle_sigill=0:print_stacktrace=0:'\ + 'symbolize=0:symbolize_inline_frames=0' # Create at least one non-empty seed to start. utils.create_seed_file_for_empty_corpus(input_corpus) diff --git a/fuzzers/honggfuzz/builder.Dockerfile b/fuzzers/honggfuzz/builder.Dockerfile index 392f430a0..61b3f41c6 100644 --- a/fuzzers/honggfuzz/builder.Dockerfile +++ b/fuzzers/honggfuzz/builder.Dockerfile @@ -30,7 +30,7 @@ RUN apt-get update -y && \ # honggfuzz doesn't need this when hfuzz-clang(++) is used). RUN git clone https://github.com/google/honggfuzz.git /honggfuzz && \ cd /honggfuzz && \ - git checkout 0b4cd5b1c4cf26b7e022dc1deb931d9318c054cb && \ + git checkout oss-fuzz && \ CFLAGS="-O3 -funroll-loops" make && \ touch empty_lib.c && \ cc -c -o empty_lib.o empty_lib.c diff --git a/fuzzers/introspector_driven_focus/fuzzer.py b/fuzzers/introspector_driven_focus/fuzzer.py index d4cb1c7f9..cde79ba4b 100755 --- a/fuzzers/introspector_driven_focus/fuzzer.py +++ b/fuzzers/introspector_driven_focus/fuzzer.py @@ -28,7 +28,7 @@ def fuzz(input_corpus, output_corpus, target_binary): """Run fuzzer. Wrapper that uses the defaults when calling run_fuzzer.""" - with open('/focus_map.yaml', 'r') as focus_file: + with open('/focus_map.yaml', 'r', encoding='utf-8') as focus_file: focus_map = yaml.safe_load(focus_file) # This fuzzer just uses the first function from the list to focus benchmark = os.getenv('BENCHMARK', None) diff --git a/fuzzers/klee/builder.Dockerfile b/fuzzers/klee/builder.Dockerfile index 0b7ea1ad9..30036824e 100644 --- a/fuzzers/klee/builder.Dockerfile +++ b/fuzzers/klee/builder.Dockerfile @@ -15,89 +15,249 @@ ARG parent_image FROM $parent_image -# Install Clang/LLVM 6.0. -RUN apt-get update -y && \ - apt-get -y install llvm-6.0 \ - clang-6.0 llvm-6.0-dev llvm-6.0-tools \ - wget +# The following installation Steps 1-8 are from KLEE's recommended build guide: +# https://klee.github.io/build-llvm11/ +# We should merge some of them to minimise Dockerfile / docker image. + +# Step 1: Install dependencies. +# Install dependencies for KLEE. +RUN apt-get update && \ + apt-get install -y \ + build-essential \ + cmake \ + curl \ + file \ + g++-multilib \ + gcc-multilib \ + git \ + libcap-dev \ + libgoogle-perftools-dev \ + libncurses5-dev \ + libsqlite3-dev \ + libtcmalloc-minimal4 \ + python3-pip \ + unzip \ + graphviz \ + doxygen + +# Install dependencies for testing and additional features. +RUN pip3 install lit wllvm && \ + apt-get install -y python3-tabulate +ENV PATH=$PATH:'~/.local/bin' + +# Step 2: Install LLVM 11. +RUN apt-get install -y clang-11 llvm-11 llvm-11-dev llvm-11-tools +ENV PATH='/usr/lib/llvm-11/bin':$PATH +ENV LD_LIBRARY_PATH='/usr/lib/llvm-11/lib':$LD_LIBRARY_PATH +# ENV LD_LIBRARY_PATH='/usr/lib/clang/11.0.0/lib/linux':$LD_LIBRARY_PATH +# ENV LDFLAGS="$LDFLAGS -pthread" -# Install KLEE dependencies. +# Step 3: Install constraint solver (STP). +# Install STP dependencies. RUN apt-get install -y \ - cmake-data build-essential curl libcap-dev \ - git cmake libncurses5-dev unzip libtcmalloc-minimal4 \ - libgoogle-perftools-dev bison flex libboost-all-dev \ - perl zlib1g-dev libsqlite3-dev doxygen + cmake \ + bison \ + flex \ + libboost-all-dev \ + python \ + perl \ + zlib1g-dev \ + minisat \ + libboost-all-dev \ + perl \ + zlib1g-dev ENV INSTALL_DIR=/out # Install minisat. -RUN git clone https://github.com/stp/minisat.git /minisat && \ - cd /minisat && mkdir build && cd build && \ +RUN git clone https://github.com/stp/minisat.git /src/minisat && \ + mkdir /src/minisat/build && \ + (cd /src/minisat/build && \ CXXFLAGS= cmake -DSTATIC_BINARIES=ON \ -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR -DCMAKE_BUILD_TYPE=Release ../ && \ - make -j`nproc` && make install + make -j`nproc` && make install) # Install STP solver. -RUN git clone https://github.com/stp/stp.git /stp && \ - cd /stp && git checkout tags/2.1.2 && \ - mkdir build && cd build && \ - CXXFLAGS= cmake -DBUILD_SHARED_LIBS:BOOL=OFF \ - -DENABLE_PYTHON_INTERFACE:BOOL=OFF \ - -DMINISAT_LIBRARY=$INSTALL_DIR/lib/libminisat.so \ - -DMINISAT_INCLUDE_DIR=$INSTALL_DIR/include \ - -DCMAKE_INSTALL_PREFIX=/user/local/ -DCMAKE_BUILD_TYPE=Release .. && \ - make -j`nproc` && make install - -RUN git clone https://github.com/klee/klee-uclibc.git /klee-uclibc && \ - cd /klee-uclibc && \ - CC=`which clang-6.0` CXX=`which clang++-6.0` \ - ./configure --make-llvm-lib --with-llvm-config=`which llvm-config-6.0` && \ - make -j`nproc` && make install - -# Install KLEE. Use my personal repo containing seed conversion scripts for now. -# TODO: Include seed conversion scripts in fuzzbench repo. -# Note: don't use the 'debug' branch because it has checks for non-initialized values -# that need to be fixed for certain syscalls. -# When we use it, be sure to also use klee-uclibc from https://github.com/lmrs2/klee-uclibc.git. -RUN git clone https://github.com/lmrs2/klee.git /klee && \ - cd /klee && \ - git checkout 3810917841c1cb58587719c1d3d47181a2401324 && \ - wget -O tools/ktest-tool/ktest-tool https://raw.githubusercontent.com/lmrs2/klee/debug/tools/ktest-tool/ktest-tool - -# The libcxx build script in the KLEE repo depends on wllvm: -RUN pip3 install wllvm - -# Before building KLEE, build libcxx. -RUN cd /klee && \ - LLVM_VERSION=6.0 SANITIZER_BUILD= ENABLE_OPTIMIZED=0 ENABLE_DEBUG=1 \ - DISABLE_ASSERTIONS=1 REQUIRES_RTTI=1 \ - BASE=/out \ - ./scripts/build/build.sh libcxx - -RUN cd /klee && \ - mkdir build && cd build && \ - CXXFLAGS= cmake -DENABLE_SOLVER_STP=ON -DENABLE_POSIX_RUNTIME=ON \ - -DENABLE_KLEE_LIBCXX=ON -DKLEE_LIBCXX_DIR=/out/libc++-install-60/ \ - -DKLEE_LIBCXX_INCLUDE_DIR=/out/libc++-install-60/include/c++/v1/ \ - -DENABLE_KLEE_UCLIBC=ON -DKLEE_UCLIBC_PATH=/klee-uclibc/ \ - -DENABLE_SYSTEM_TESTS=OFF -DENABLE_UNIT_TESTS=OFF \ - -DLLVM_CONFIG_BINARY=`which llvm-config-6.0` -DLLVMCC=`which clang-6.0` \ - -DLLVMCXX=`which clang++-6.0` -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR ../ \ - -DCMAKE_BUILD_TYPE=Release && \ - make -j`nproc` && make install - -ENV LLVM_CC_NAME=clang-6.0 -ENV LLVM_CXX_NAME=clang++-6.0 -ENV LLVM_AR_NAME=llvm-ar-6.0 -ENV LLVM_LINK_NAME=llvm-link-6.0 -ENV LLVM_COMPILER=clang -ENV CC=wllvm -ENV CXX=wllvm++ - -# Compile the harness klee_driver.cpp. -COPY klee_driver.cpp /klee_driver.cpp -COPY klee_mock.c /klee_mock.c -RUN $CXX -stdlib=libc++ -std=c++11 -O2 -c /klee_driver.cpp -o /klee_driver.o && \ - ar r /libAFL.a /klee_driver.o && \ - $LLVM_CC_NAME -O2 -c -fPIC /klee_mock.c -o /klee_mock.o && \ - $LLVM_CC_NAME -shared -o /libKleeMock.so /klee_mock.o +RUN git clone \ + --depth 1 \ + --branch 2.3.3\ + https://github.com/stp/stp.git /src/stp && \ + mkdir /src/stp/build && \ + (cd /src/stp/build && \ + CXXFLAGS= cmake -DBUILD_SHARED_LIBS:BOOL=ON \ + -DENABLE_PYTHON_INTERFACE:BOOL=OFF \ + -DMINISAT_LIBRARY=$INSTALL_DIR/lib/libminisat.so.2.1.0 \ + -DMINISAT_INCLUDE_DIR=$INSTALL_DIR/include \ + -DCMAKE_INSTALL_PREFIX=/user/local/ -DCMAKE_BUILD_TYPE=Release .. && \ + make -j`nproc` && make install) + +# Step 4 (Optional): Get Google test sources. +RUN curl \ + -o /src/release-1.11.0.zip \ + -L https://github.com/google/googletest/archive/release-1.11.0.zip && \ + unzip /src/release-1.11.0.zip -d /src && \ + rm /src/release-1.11.0.zip + +# Step 5(Optional): Build uClibc and the POSIX environment model. +# Enable the KLEE POSIX runtime to run on real programs. +ENV KLEE_UCLIBC='/src/klee-uclibc' +RUN git clone https://github.com/klee/klee-uclibc.git $KLEE_UCLIBC && \ + (cd $KLEE_UCLIBC && \ + ./configure --make-llvm-lib && \ +# --make-llvm-lib \ +# --with-cc clang-11 \ +# --with-llvm-config llvm-config-11 && \ + make -j`nproc`) + +# Step 6: Get KLEE source. +ENV KLEE_DIR=/src/klee +RUN git clone https://github.com/klee/klee.git $KLEE_DIR + +# Step 7 (Optional): Build libc++. +ENV LIBCXX_DIR=/src/libcxx +RUN mkdir $LIBCXX_DIR && \ + (cd $KLEE_DIR && \ + LLVM_VERSION=11 BASE=$LIBCXX_DIR ./scripts/build/build.sh libcxx) + +# Step 8: Configure KLEE. +RUN mkdir $KLEE_DIR/build && \ + (cd $KLEE_DIR/build && \ + cmake \ + -DENABLE_SOLVER_STP=ON \ + -DENABLE_POSIX_RUNTIME=ON \ + -DKLEE_UCLIBC_PATH=/src/klee-uclibc \ + -DENABLE_UNIT_TESTS=ON \ + -DLLVM_CONFIG_BINARY=/usr/bin/llvm-config-11 \ + -DGTEST_SRC_DIR=/src/googletest-release-1.11.0/ \ + -DENABLE_KLEE_LIBCXX=ON \ + -DKLEE_LIBCXX_DIR=/src/libcxx/libc++-install-110/ \ + -DKLEE_LIBCXX_INCLUDE_DIR=/src/libcxx/libc++-install-110/include/c++/v1/ \ + -DENABLE_KLEE_EH_CXX=ON \ + -DKLEE_LIBCXXABI_SRC_DIR=/src/libcxx/llvm-110/libcxxabi/ \ + ..) + +# Step 9: Build KLEE. +RUN (cd $KLEE_DIR/build && \ + make) + + +# Install Clang/LLVM 6.0. +# RUN apt-get update -y && \ +# apt-get -y install llvm-11.0 \ +# clang-6.0 llvm-6.0-dev llvm-6.0-tools \ +# wget + +# # Install KLEE. +# ENV LIBCXX_DIR=/src/libcxx +# RUN mkdir $LIBCXX_DIR && \ +# git clone https://github.com/klee/klee.git && \ +# cd klee && \ +# LLVM_VERSION=11 BASE=$LIBCXX_DIR \ +# ./scripts/build/build.sh libcxx \ +# mkdir build && \ +# cd build && \ +# cmake \ +# -DENABLE_SOLVER_STP=ON \ +# -DENABLE_POSIX_RUNTIME=ON \ +# -DKLEE_UCLIBC_PATH=/src/klee-uclibc \ +# -DENABLE_UNIT_TESTS=ON \ +# -DLLVM_CONFIG_BINARY=/usr/bin/llvm-config-11 \ +# -DGTEST_SRC_DIR=/src/googletest-release-1.11.0/ \ +# -DENABLE_KLEE_LIBCXX=ON \ +# -DKLEE_LIBCXX_DIR=/src/libcxx/libc++-install-110/ \ +# -DKLEE_LIBCXX_INCLUDE_DIR=/src/libcxx/libc++-install-110/include/c++/v1/ \ +# -DENABLE_KLEE_EH_CXX=ON \ +# -DKLEE_LIBCXXABI_SRC_DIR=/src/libcxx/llvm-110/libcxxabi/ \ +# .. && \ +# make && \ +# make systemtests && \ +# lit test/ && \ +# make unittests + + +# # Install libstdc++-4.8. +# RUN echo 'deb http://dk.archive.ubuntu.com/ubuntu/ trusty main' >> /etc/apt/sources.list && \ +# echo 'deb http://dk.archive.ubuntu.com/ubuntu/ trusty universe' >> /etc/apt/sources && \ +# apt-get update && \ +# apt-get install -y libstdc++-4.8-dev +# +# # Install KLEE dependencies. +# RUN apt-get install -y \ +# cmake-data build-essential curl libcap-dev \ +# git cmake libncurses5-dev unzip libtcmalloc-minimal4 \ +# libgoogle-perftools-dev bison flex libboost-all-dev \ +# perl zlib1g-dev libsqlite3-dev doxygen +# +# ENV INSTALL_DIR=/out +# +# # Install minisat. +# RUN git clone https://github.com/stp/minisat.git /minisat && \ +# cd /minisat && mkdir build && cd build && \ +# CXXFLAGS= cmake -DSTATIC_BINARIES=ON \ +# -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR -DCMAKE_BUILD_TYPE=Release ../ && \ +# make -j`nproc` && make install +# +# # Install STP solver. +# RUN git clone https://github.com/stp/stp.git /stp && \ +# cd /stp && git checkout tags/2.1.2 && \ +# mkdir build && cd build && \ +# CXXFLAGS= cmake -DBUILD_SHARED_LIBS:BOOL=OFF \ +# -DENABLE_PYTHON_INTERFACE:BOOL=OFF \ +# -DMINISAT_LIBRARY=$INSTALL_DIR/lib/libminisat.so \ +# -DMINISAT_INCLUDE_DIR=$INSTALL_DIR/include \ +# -DCMAKE_INSTALL_PREFIX=/user/local/ -DCMAKE_BUILD_TYPE=Release .. && \ +# make -j`nproc` && make install +# +# RUN git clone https://github.com/klee/klee-uclibc.git /klee-uclibc && \ +# cd /klee-uclibc && \ +# CC=`which clang-6.0` CXX=`which clang++-6.0` \ +# ./configure --make-llvm-lib --with-llvm-config=`which llvm-config-6.0` && \ +# make -j`nproc` && make install +# +# # Install KLEE. Use my personal repo containing seed conversion scripts for now. +# # TODO: Include seed conversion scripts in fuzzbench repo. +# # Note: don't use the 'debug' branch because it has checks for non-initialized values +# # that need to be fixed for certain syscalls. +# # When we use it, be sure to also use klee-uclibc from https://github.com/lmrs2/klee-uclibc.git. +# RUN git clone https://github.com/lmrs2/klee.git /klee && \ +# cd /klee && \ +# git checkout 3810917841c1cb58587719c1d3d47181a2401324 && \ +# wget -O tools/ktest-tool/ktest-tool https://raw.githubusercontent.com/lmrs2/klee/debug/tools/ktest-tool/ktest-tool +# +# # The libcxx build script in the KLEE repo depends on wllvm: +# RUN pip3 install wllvm + +# # Before building KLEE, build libcxx. +# RUN cd /klee && \ +# LLVM_VERSION=6.0 SANITIZER_BUILD= ENABLE_OPTIMIZED=0 ENABLE_DEBUG=1 \ +# DISABLE_ASSERTIONS=1 REQUIRES_RTTI=1 \ +# BASE=/out \ +# ./scripts/build/build.sh libcxx +# +# RUN cd /klee && \ +# mkdir build && cd build && \ +# CXXFLAGS= cmake -DENABLE_SOLVER_STP=ON -DENABLE_POSIX_RUNTIME=ON \ +# -DENABLE_KLEE_LIBCXX=ON -DKLEE_LIBCXX_DIR=/out/libc++-install-60/ \ +# -DKLEE_LIBCXX_INCLUDE_DIR=/out/libc++-install-60/include/c++/v1/ \ +# -DENABLE_KLEE_UCLIBC=ON -DKLEE_UCLIBC_PATH=/klee-uclibc/ \ +# -DENABLE_SYSTEM_TESTS=OFF -DENABLE_UNIT_TESTS=OFF \ +# -DLLVM_CONFIG_BINARY=`which llvm-config-6.0` -DLLVMCC=`which clang-6.0` \ +# -DLLVMCXX=`which clang++-6.0` -DCMAKE_INSTALL_PREFIX=$INSTALL_DIR ../ \ +# -DCMAKE_BUILD_TYPE=Release && \ +# make -j`nproc` && make install +# +# ENV LLVM_CC_NAME=clang-6.0 +# ENV LLVM_CXX_NAME=clang++-6.0 +# ENV LLVM_AR_NAME=llvm-ar-6.0 +# ENV LLVM_LINK_NAME=llvm-link-6.0 +# ENV LLVM_COMPILER=clang +# ENV CC=wllvm +# ENV CXX=wllvm++ +# +# # Compile the harness klee_driver.cpp. +# COPY klee_driver.cpp /klee_driver.cpp +# COPY klee_mock.c /klee_mock.c +# RUN $CXX -stdlib=libc++ -std=c++11 -O2 -c /klee_driver.cpp -o /klee_driver.o && \ +# ar r /libAFL.a /klee_driver.o && \ +# $LLVM_CC_NAME -O2 -c -fPIC /klee_mock.c -o /klee_mock.o && \ +# $LLVM_CC_NAME -shared -o /libKleeMock.so /klee_mock.o diff --git a/fuzzers/klee/fuzzer.py b/fuzzers/klee/fuzzer.py index 06c843dc7..2fc7b4d7d 100644 --- a/fuzzers/klee/fuzzer.py +++ b/fuzzers/klee/fuzzer.py @@ -104,7 +104,7 @@ def get_size_for_benchmark(): def get_bcs_for_shared_libs(fuzz_target): """Get shared libs paths for the fuzz_target""" - ldd_cmd = ['/usr/bin/ldd', '{target}'.format(target=fuzz_target)] + ldd_cmd = ['/usr/bin/ldd', f'{fuzz_target}'] output = '' try: output = subprocess.check_output(ldd_cmd, universal_newlines=True) @@ -115,23 +115,20 @@ def get_bcs_for_shared_libs(fuzz_target): if '=>' not in line: continue - out_dir = '{out}/{lib_bc_dir}'.format(out=os.environ['OUT'], - lib_bc_dir=LIB_BC_DIR) + out_dir = f'{os.environ["OUT"]}/{LIB_BC_DIR}' path = pathlib.Path(out_dir) path.mkdir(exist_ok=True) so_path = line.split('=>')[1].split(' ')[1] so_name = so_path.split('/')[-1].split('.')[0] if so_name: - getbc_cmd = 'extract-bc -o {out_dir}/{so_name}.bc {target}'.format( - target=so_path, out_dir=out_dir, so_name=so_name) - print('[extract-bc command] | {getbc_cmd}'.format( - getbc_cmd=getbc_cmd)) + getbc_cmd = f'extract-bc -o {out_dir}/{so_name}.bc {so_path}' + print(f'[extract-bc command] | {getbc_cmd}') # This will fail for most of the dependencies, which is fine. We # want to grab the .bc files for dependencies built in any given # benchmark's build.sh file. success = os.system(getbc_cmd) if success == 1: - print('Got a bc file for {target}'.format(target=so_path)) + print(f'Got a bc file for {so_path}') def get_bc_files(): @@ -222,13 +219,13 @@ def emptydir(path): def run(command, hide_output=False, ulimit_cmd=None): """Run the command |command|, optionally, run |ulimit_cmd| first.""" cmd = ' '.join(command) - print('[run_cmd] {}'.format(cmd)) + print(f'[run_cmd] {cmd}') output_stream = subprocess.DEVNULL if hide_output else None if ulimit_cmd: ulimit_command = [ulimit_cmd + ';'] ulimit_command.extend(command) - print('[ulimit_command] {}'.format(' '.join(ulimit_command))) + print(f'[ulimit_command] {" ".join(ulimit_command)}') ret = subprocess.call(' '.join(ulimit_command), stdout=output_stream, stderr=output_stream, @@ -238,8 +235,7 @@ def run(command, hide_output=False, ulimit_cmd=None): stdout=output_stream, stderr=output_stream) if ret != 0: - raise ValueError('command failed: {ret} - {cmd}'.format(ret=ret, - cmd=cmd)) + raise ValueError(f'command failed: {ret} - {cmd}') def convert_seed_inputs(ktest_tool, input_klee, input_corpus): @@ -268,25 +264,21 @@ def convert_seed_inputs(ktest_tool, input_klee, input_corpus): file_size = os.path.getsize(seedfile) benchmark_size = get_size_for_benchmark() if file_size > benchmark_size: - print('[run_fuzzer] Truncating {path} ({file_size}) to \ - {benchmark_size}'.format(path=seedfile, - file_size=file_size, - benchmark_size=benchmark_size)) + print(f'[run_fuzzer] Truncating {seedfile} ({file_size}) to ' + f'{benchmark_size}') os.truncate(seedfile, benchmark_size) - seed_in = '{seed}.ktest'.format(seed=seedfile) + seed_in = f'{seedfile}.ktest' seed_out = os.path.join(input_klee, os.path.basename(seed_in)) # Create file for symblic buffer - input_file = '{seed}.ktest.{symbolic}'.format(seed=seedfile, - symbolic=SYMBOLIC_BUFFER) - output_kfile = '{seed}.ktest'.format(seed=seedfile) + input_file = f'{seedfile}.ktest.{SYMBOLIC_BUFFER}' + output_kfile = f'{seedfile}.ktest' shutil.copyfile(seedfile, input_file) os.rename(seedfile, input_file) # Create file for mode version - model_input_file = '{seed}.ktest.{symbolic}'.format( - seed=seedfile, symbolic=MODEL_VERSION) + model_input_file = f'{seedfile}.ktest.{MODEL_VERSION}' with open(model_input_file, 'wb') as mfile: mfile.write(model) @@ -303,8 +295,7 @@ def convert_seed_inputs(ktest_tool, input_klee, input_corpus): n_converted += 1 - print('[run_fuzzer] Converted {converted} seed files'.format( - converted=n_converted)) + print(f'[run_fuzzer] Converted {n_converted} seed files') return n_converted @@ -322,12 +313,12 @@ def convert_individual_ktest(ktest_tool, kfile, queue_dir, output_klee, # And copy the resulting file in output_corpus ktest_fn = os.path.splitext(kfile)[0] - file_in = '{file}.{symbuf}'.format(file=kfile, symbuf=SYMBOLIC_BUFFER) + file_in = f'{kfile}.{SYMBOLIC_BUFFER}' file_out = os.path.join(queue_dir, os.path.basename(ktest_fn)) os.rename(file_in, file_out) # Check if this is a crash - crash_regex = os.path.join(output_klee, '{fn}.*.err'.format(fn=ktest_fn)) + crash_regex = os.path.join(output_klee, f'{ktest_fn}.*.err') crashes = glob.glob(crash_regex) n_crashes = 0 if len(crashes) == 1: @@ -350,14 +341,12 @@ def monitor_resource_usage(): start = datetime.now() while True: time.sleep(60 * 5) - message = '{cputimes}\n{virtmem}\n{swap}'.format( - cputimes=psutil.cpu_times_percent(percpu=False), - virtmem=psutil.virtual_memory(), - swap=psutil.swap_memory()) + message = (f'{psutil.cpu_times_percent(percpu=False)}\n' + f'{psutil.virtual_memory()}\n' + f'{psutil.swap_memory()}') now = datetime.now() print( - '[resource_thread] Resource usage after {time}:\n{message}'.format( - time=now - start, message=message)) + f'[resource_thread] Resource usage after {now - start}:\n{message}') # pylint: disable=import-error @@ -396,7 +385,7 @@ def fuzz(input_corpus, output_corpus, target_binary): print('[run_fuzzer] Running target with klee') klee_bin = os.path.join(out_dir, 'bin/klee') - target_binary_bc = '{}.bc'.format(target_binary) + target_binary_bc = f'{target_binary}.bc' max_time_seconds = ( int(os.getenv('MAX_TOTAL_TIME', str(MAX_TOTAL_TIME_DEFAULT))) * 4) // 5 @@ -405,8 +394,7 @@ def fuzz(input_corpus, output_corpus, target_binary): llvm_link_libs = [] for filename in get_bc_files(): - llvm_link_libs.append('-link-llvm-lib=./{lib_bc}/{filename}'.format( - lib_bc=LIB_BC_DIR, filename=filename)) + llvm_link_libs.append(f'-link-llvm-lib=./{LIB_BC_DIR}/{filename}') max_memory_mb = str(int(psutil.virtual_memory().available // 10**6 * 0.9)) diff --git a/fuzzers/lafintel/fuzzer.py b/fuzzers/lafintel/fuzzer.py index 29048d587..3cfc082c9 100644 --- a/fuzzers/lafintel/fuzzer.py +++ b/fuzzers/lafintel/fuzzer.py @@ -34,10 +34,10 @@ def remove_builtin(flag): return split[0] + '=' + ','.join(options) return flag - cflags = map(remove_builtin, os.environ["CFLAGS"].split()) - cxxflags = map(remove_builtin, os.environ["CXXFLAGS"].split()) - os.environ["CFLAGS"] = ' '.join(cflags) - os.environ["CXXFLAGS"] = ' '.join(cxxflags) + cflags = map(remove_builtin, os.environ['CFLAGS'].split()) + cxxflags = map(remove_builtin, os.environ['CXXFLAGS'].split()) + os.environ['CFLAGS'] = ' '.join(cflags) + os.environ['CXXFLAGS'] = ' '.join(cxxflags) # In php benchmark, there is a call to __builtin_cpu_supports("ssse3") # (see https://github.com/php/php-src/blob/master/Zend/zend_cpuinfo.h). # It is not supported by clang-3.8, so we define the MACRO below diff --git a/fuzzers/libafl/builder.Dockerfile b/fuzzers/libafl/builder.Dockerfile index 322c73378..ba8945840 100644 --- a/fuzzers/libafl/builder.Dockerfile +++ b/fuzzers/libafl/builder.Dockerfile @@ -15,34 +15,39 @@ ARG parent_image FROM $parent_image -# Install libstdc++ to use llvm_mode. -RUN apt-get update && \ - apt-get install -y wget libstdc++-5-dev libtool-bin automake flex bison \ - libglib2.0-dev libpixman-1-dev python3-setuptools unzip \ - apt-utils apt-transport-https ca-certificates joe curl - -# Uninstall old Rust -RUN if which rustup; then rustup self uninstall -y; fi - -# Install latest Rust -RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs > /rustup.sh && \ - sh /rustup.sh -y +# Uninstall old Rust & Install the latest one. +RUN if which rustup; then rustup self uninstall -y; fi && \ + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs > /rustup.sh && \ + sh /rustup.sh -y && \ + rm /rustup.sh -# Switch to nightly -RUN PATH="$PATH:/root/.cargo/bin/" rustup default nightly - -# Download libafl -RUN git clone https://github.com/AFLplusplus/libafl /libafl && \ - cd /libafl && \ - git checkout ebdab32b36fd2e22025a3d47dc996b5bc8121c95 - -# Compile libafl -RUN cd /libafl && unset CFLAGS && unset CXXFLAGS && \ - export CC=clang && export CXX=clang++ && \ +# Install dependencies. +RUN apt-get update && \ + apt-get remove -y llvm-10 && \ + apt-get install -y \ + build-essential \ + llvm-11 \ + clang-12 \ + cargo && \ + apt-get install -y wget libstdc++5 libtool-bin automake flex bison \ + libglib2.0-dev libpixman-1-dev python3-setuptools unzip \ + apt-utils apt-transport-https ca-certificates joe curl && \ + PATH="/root/.cargo/bin/:$PATH" cargo install cargo-make + +# Download libafl. +RUN git clone \ + --depth 1 \ + --branch 0.8.2 \ + https://github.com/AFLplusplus/libafl /libafl + +# Compile libafl. +RUN cd /libafl && \ + unset CFLAGS CXXFLAGS && \ export LIBAFL_EDGES_MAP_SIZE=2621440 && \ cd ./fuzzers/fuzzbench && \ - PATH="$PATH:/root/.cargo/bin/" cargo build --release + PATH="/root/.cargo/bin/:$PATH" cargo build --release +# Auxiliary weak references. RUN wget https://gist.githubusercontent.com/andreafioraldi/e5f60d68c98b31665a274207cfd05541/raw/4da351a321f1408df566a9cf2ce7cde6eeab3904/empty_fuzzer_lib.c -O /empty_fuzzer_lib.c && \ clang -c /empty_fuzzer_lib.c && \ ar r /emptylib.a *.o diff --git a/fuzzers/libafl/fuzzer.py b/fuzzers/libafl/fuzzer.py index f706d4aa0..63a4607ff 100755 --- a/fuzzers/libafl/fuzzer.py +++ b/fuzzers/libafl/fuzzer.py @@ -22,18 +22,18 @@ def prepare_fuzz_environment(input_corpus): """Prepare to fuzz with a LibAFL-based fuzzer.""" - os.environ['ASAN_OPTIONS'] = "abort_on_error=1:detect_leaks=0:"\ - "malloc_context_size=0:symbolize=0:"\ - "allocator_may_return_null=1:"\ - "detect_odr_violation=0:handle_segv=0:"\ - "handle_sigbus=0:handle_abort=0:"\ - "handle_sigfpe=0:handle_sigill=0" - os.environ['UBSAN_OPTIONS'] = "abort_on_error=1:"\ - "allocator_release_to_os_interval_ms=500:"\ - "handle_abort=0:handle_segv=0:"\ - "handle_sigbus=0:handle_sigfpe=0:"\ - "handle_sigill=0:print_stacktrace=0:"\ - "symbolize=0:symbolize_inline_frames=0" + os.environ['ASAN_OPTIONS'] = 'abort_on_error=1:detect_leaks=0:'\ + 'malloc_context_size=0:symbolize=0:'\ + 'allocator_may_return_null=1:'\ + 'detect_odr_violation=0:handle_segv=0:'\ + 'handle_sigbus=0:handle_abort=0:'\ + 'handle_sigfpe=0:handle_sigill=0' + os.environ['UBSAN_OPTIONS'] = 'abort_on_error=1:'\ + 'allocator_release_to_os_interval_ms=500:'\ + 'handle_abort=0:handle_segv=0:'\ + 'handle_sigbus=0:handle_sigfpe=0:'\ + 'handle_sigill=0:print_stacktrace=0:'\ + 'symbolize=0:symbolize_inline_frames=0' # Create at least one non-empty seed to start. utils.create_seed_file_for_empty_corpus(input_corpus) diff --git a/fuzzers/libfuzzer_dataflow_load/fuzzer.py b/fuzzers/libfuzzer_dataflow_load/fuzzer.py index 6acfbc905..f981e1cfc 100755 --- a/fuzzers/libfuzzer_dataflow_load/fuzzer.py +++ b/fuzzers/libfuzzer_dataflow_load/fuzzer.py @@ -21,7 +21,7 @@ def build(): """Build benchmark.""" - os.system("/src/fuzzers/libfuzzer_dataflow/variant-build.sh") + os.system('/src/fuzzers/libfuzzer_dataflow/variant-build.sh') # With LibFuzzer we use -fsanitize=fuzzer-no-link for build CFLAGS and then # /usr/lib/libFuzzer.a as the FUZZER_LIB for the main fuzzing binary. This diff --git a/fuzzers/libfuzzer_dataflow_store/fuzzer.py b/fuzzers/libfuzzer_dataflow_store/fuzzer.py index 48fbbf243..657c25912 100755 --- a/fuzzers/libfuzzer_dataflow_store/fuzzer.py +++ b/fuzzers/libfuzzer_dataflow_store/fuzzer.py @@ -21,7 +21,7 @@ def build(): """Build benchmark.""" - os.system("/src/fuzzers/libfuzzer_dataflow/variant-build.sh") + os.system('/src/fuzzers/libfuzzer_dataflow/variant-build.sh') # With LibFuzzer we use -fsanitize=fuzzer-no-link for build CFLAGS and then # /usr/lib/libFuzzer.a as the FUZZER_LIB for the main fuzzing binary. This diff --git a/fuzzers/nautilus/fuzzer.py b/fuzzers/nautilus/fuzzer.py index 34c0411a0..8cf7b4a0f 100755 --- a/fuzzers/nautilus/fuzzer.py +++ b/fuzzers/nautilus/fuzzer.py @@ -23,18 +23,18 @@ def prepare_fuzz_environment(input_corpus): """Prepare to fuzz with a LibAFL-based fuzzer.""" - os.environ['ASAN_OPTIONS'] = "abort_on_error=1:detect_leaks=0:"\ - "malloc_context_size=0:symbolize=0:"\ - "allocator_may_return_null=1:"\ - "detect_odr_violation=0:handle_segv=0:"\ - "handle_sigbus=0:handle_abort=0:"\ - "handle_sigfpe=0:handle_sigill=0" - os.environ['UBSAN_OPTIONS'] = "abort_on_error=1:"\ - "allocator_release_to_os_interval_ms=500:"\ - "handle_abort=0:handle_segv=0:"\ - "handle_sigbus=0:handle_sigfpe=0:"\ - "handle_sigill=0:print_stacktrace=0:"\ - "symbolize=0:symbolize_inline_frames=0" + os.environ['ASAN_OPTIONS'] = 'abort_on_error=1:detect_leaks=0:'\ + 'malloc_context_size=0:symbolize=0:'\ + 'allocator_may_return_null=1:'\ + 'detect_odr_violation=0:handle_segv=0:'\ + 'handle_sigbus=0:handle_abort=0:'\ + 'handle_sigfpe=0:handle_sigill=0' + os.environ['UBSAN_OPTIONS'] = 'abort_on_error=1:'\ + 'allocator_release_to_os_interval_ms=500:'\ + 'handle_abort=0:handle_segv=0:'\ + 'handle_sigbus=0:handle_sigfpe=0:'\ + 'handle_sigill=0:print_stacktrace=0:'\ + 'symbolize=0:symbolize_inline_frames=0' # Create at least one non-empty seed to start. utils.create_seed_file_for_empty_corpus(input_corpus) diff --git a/fuzzers/neuzz/builder.Dockerfile b/fuzzers/neuzz/builder.Dockerfile index 7ee9ad0df..4fa94d123 100644 --- a/fuzzers/neuzz/builder.Dockerfile +++ b/fuzzers/neuzz/builder.Dockerfile @@ -16,6 +16,13 @@ ARG parent_image FROM $parent_image +# Install and setup clang-11 for AFL/NEUZZ. +RUN apt install -y clang-11 && \ + ln -s /usr/bin/clang-11 /usr/bin/clang && \ + ln -s /usr/bin/clang++-11 /usr/bin/clang++ +ENV PATH="/usr/bin:${PATH}" +ENV LD_LIBRARY_PATH="/usr/lib/clang/11.0.0/lib/linux:${LD_LIBRARY_PATH}" + # Download and compile AFL v2.56b. # Set AFL_NO_X86 to skip flaky tests. RUN git clone https://github.com/google/AFL.git /afl && \ diff --git a/fuzzers/neuzz/fuzzer.py b/fuzzers/neuzz/fuzzer.py index 2cb5275d0..cd4eef3ae 100644 --- a/fuzzers/neuzz/fuzzer.py +++ b/fuzzers/neuzz/fuzzer.py @@ -53,10 +53,10 @@ def build(): def kill_afl(output_stream=subprocess.DEVNULL): """kill afl-fuzz process.""" - print("Warmed up!") + print('Warmed up!') # Can't avoid this because 'run_afl_fuzz' doesn't return a handle to # 'afl-fuzz' process so that we can kill it with subprocess.terminate() - subprocess.call(["pkill", "-f", "afl-fuzz"], + subprocess.call(['pkill', '-f', 'afl-fuzz'], stdout=output_stream, stderr=output_stream) @@ -73,9 +73,9 @@ def run_neuzz(input_corpus, afl.run_afl_fuzz(input_corpus, output_corpus, target_binary, additional_flags, hide_output) # After warming up, copy the 'queue' to use for neuzz input - print("[run_neuzz] Warmed up!") + print('[run_neuzz] Warmed up!') command = [ - "cp", "-RT", f"{output_corpus}/queue/", f"{input_corpus}_neuzzin/" + 'cp', '-RT', f'{output_corpus}/queue/', f'{input_corpus}_neuzzin/' ] print('[run_neuzz] Running command: ' + ' '.join(command)) @@ -88,22 +88,22 @@ def run_neuzz(input_corpus, # Spinning up the neural network command = [ - "python2", "./nn.py", '--output-folder', afl_output_dir, target_binary + 'python2', './nn.py', '--output-folder', afl_output_dir, target_binary ] print('[run_neuzz] Running command: ' + ' '.join(command)) - subprocess.Popen(command, stdout=output_stream, stderr=output_stream) + with subprocess.Popen(command, stdout=output_stream, stderr=output_stream): + pass time.sleep(40) target_rel_path = os.path.relpath(target_binary, os.getcwd()) # Spinning up neuzz command = [ - "./neuzz", "-m", "none", "-i", neuzz_input_dir, "-o", afl_output_dir, - target_rel_path, "@@" + './neuzz', '-m', 'none', '-i', neuzz_input_dir, '-o', afl_output_dir, + target_rel_path, '@@' ] print('[run_neuzz] Running command: ' + ' '.join(command)) - neuzz_proc = subprocess.Popen(command, - stdout=output_stream, - stderr=output_stream) - neuzz_proc.wait() + with subprocess.Popen(command, stdout=output_stream, + stderr=output_stream) as neuzz_proc: + neuzz_proc.wait() def fuzz(input_corpus, output_corpus, target_binary): diff --git a/fuzzers/neuzz/runner.Dockerfile b/fuzzers/neuzz/runner.Dockerfile index 66a371f43..40a42c8f3 100644 --- a/fuzzers/neuzz/runner.Dockerfile +++ b/fuzzers/neuzz/runner.Dockerfile @@ -14,10 +14,30 @@ FROM gcr.io/fuzzbench/base-image +# Install and setup clang-11 for AFL/NEUZZ. +RUN apt install -y clang-11 && \ + ln -s /usr/bin/clang-11 /usr/bin/clang && \ + ln -s /usr/bin/clang++-11 /usr/bin/clang++ +ENV PATH="/usr/bin:${PATH}" +ENV LD_LIBRARY_PATH="/usr/lib/clang/11.0.0/lib/linux:${LD_LIBRARY_PATH}" + +# Install Python2 and Pip2 on Ubuntu:20.04. +RUN DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC \ + apt-get install -y software-properties-common && \ + apt-get update && \ + add-apt-repository universe && \ + apt-get install -y python-dev && \ + curl https://bootstrap.pypa.io/pip/2.7/get-pip.py --output get-pip.py && \ + python2 get-pip.py && \ + rm /usr/bin/python && \ + ln -s /usr/bin/python2.7 /usr/bin/python + RUN apt-get update && \ apt-get install wget -y && \ - apt-get install python-pip -y && \ - python --version && \ - python -m pip install --upgrade pip==20.3 && \ + python -m pip install --upgrade pip==20.3.4 && \ python -m pip install tensorflow==1.8.0 && \ python -m pip install keras==2.2.3 + +# Use Python3.10 by default. +RUN rm /usr/bin/python3 && \ + ln -s /usr/local/bin/python3 /usr/bin/python3 diff --git a/fuzzers/pythia_bb/fuzzer.py b/fuzzers/pythia_bb/fuzzer.py index 853c0890c..7c4c44180 100755 --- a/fuzzers/pythia_bb/fuzzer.py +++ b/fuzzers/pythia_bb/fuzzer.py @@ -48,7 +48,7 @@ def get_stats(output_corpus, fuzzer_log): # pylint: disable=unused-argument """Gets fuzzer stats for AFL.""" # Get a dictionary containing the stats AFL reports. stats_file = os.path.join(output_corpus, 'fuzzer_stats') - with open(stats_file) as file_handle: + with open(stats_file, encoding='utf-8') as file_handle: stats_file_lines = file_handle.read().splitlines() stats_file_dict = {} for stats_line in stats_file_lines: diff --git a/fuzzers/pythia_effect_bb/fuzzer.py b/fuzzers/pythia_effect_bb/fuzzer.py index 853c0890c..7c4c44180 100755 --- a/fuzzers/pythia_effect_bb/fuzzer.py +++ b/fuzzers/pythia_effect_bb/fuzzer.py @@ -48,7 +48,7 @@ def get_stats(output_corpus, fuzzer_log): # pylint: disable=unused-argument """Gets fuzzer stats for AFL.""" # Get a dictionary containing the stats AFL reports. stats_file = os.path.join(output_corpus, 'fuzzer_stats') - with open(stats_file) as file_handle: + with open(stats_file, encoding='utf-8') as file_handle: stats_file_lines = file_handle.read().splitlines() stats_file_dict = {} for stats_line in stats_file_lines: diff --git a/fuzzers/symcc_afl/fuzzer.py b/fuzzers/symcc_afl/fuzzer.py index ff5f49e27..0c92eaa2c 100644 --- a/fuzzers/symcc_afl/fuzzer.py +++ b/fuzzers/symcc_afl/fuzzer.py @@ -30,7 +30,7 @@ def get_symcc_build_dir(target_directory): def build(): """Build an AFL version and SymCC version of the benchmark""" - print("Step 1: Building with AFL") + print('Step 1: Building with AFL') build_directory = os.environ['OUT'] # First build with AFL. @@ -42,46 +42,46 @@ def build(): # twice in the same directory without this. afl_fuzzer.build() - print("Step 2: Completed AFL build") + print('Step 2: Completed AFL build') # Copy over AFL artifacts needed by SymCC. - shutil.copy("/afl/afl-fuzz", build_directory) - shutil.copy("/afl/afl-showmap", build_directory) + shutil.copy('/afl/afl-fuzz', build_directory) + shutil.copy('/afl/afl-showmap', build_directory) # Build the SymCC-instrumented target. - print("Step 3: Building the benchmark with SymCC") + print('Step 3: Building the benchmark with SymCC') symcc_build_dir = get_symcc_build_dir(os.environ['OUT']) os.mkdir(symcc_build_dir) # Set flags to ensure compilation with SymCC. new_env = os.environ.copy() - new_env['CC'] = "/symcc/build/symcc" - new_env['CXX'] = "/symcc/build/sym++" - new_env['CXXFLAGS'] = new_env['CXXFLAGS'].replace("-stlib=libc++", "") + new_env['CC'] = '/symcc/build/symcc' + new_env['CXX'] = '/symcc/build/sym++' + new_env['CXXFLAGS'] = new_env['CXXFLAGS'].replace('-stlib=libc++', '') new_env['FUZZER_LIB'] = '/libfuzzer-harness.o' new_env['OUT'] = symcc_build_dir - new_env['CXXFLAGS'] += " -fno-sanitize=all " - new_env['CFLAGS'] += " -fno-sanitize=all " + new_env['CXXFLAGS'] += ' -fno-sanitize=all ' + new_env['CFLAGS'] += ' -fno-sanitize=all ' # Setting this environment variable instructs SymCC to use the # libcxx library compiled with SymCC instrumentation. - new_env['SYMCC_LIBCXX_PATH'] = "/libcxx_native_build" + new_env['SYMCC_LIBCXX_PATH'] = '/libcxx_native_build' # Instructs SymCC to consider no symbolic inputs at runtime. This is needed # if, for example, some tests are run during compilation of the benchmark. - new_env['SYMCC_NO_SYMBOLIC_INPUT'] = "1" + new_env['SYMCC_NO_SYMBOLIC_INPUT'] = '1' # Build benchmark. utils.build_benchmark(env=new_env) # Copy over symcc artifacts and symbolic libc++. shutil.copy( - "/symcc/build//SymRuntime-prefix/src/SymRuntime-build/libSymRuntime.so", + '/symcc/build//SymRuntime-prefix/src/SymRuntime-build/libSymRuntime.so', symcc_build_dir) - shutil.copy("/usr/lib/libz3.so", os.path.join(symcc_build_dir, "libz3.so")) - shutil.copy("/libcxx_native_build/lib/libc++.so.1", symcc_build_dir) - shutil.copy("/libcxx_native_build/lib/libc++abi.so.1", symcc_build_dir) - shutil.copy("/rust/bin/symcc_fuzzing_helper", symcc_build_dir) + shutil.copy('/usr/lib/libz3.so', os.path.join(symcc_build_dir, 'libz3.so')) + shutil.copy('/libcxx_native_build/lib/libc++.so.1', symcc_build_dir) + shutil.copy('/libcxx_native_build/lib/libc++abi.so.1', symcc_build_dir) + shutil.copy('/rust/bin/symcc_fuzzing_helper', symcc_build_dir) def launch_afl_thread(input_corpus, output_corpus, target_binary, @@ -109,25 +109,26 @@ def fuzz(input_corpus, output_corpus, target_binary, master_only=False): print('[run_fuzzer] Running AFL for SymCC') afl_fuzzer.prepare_fuzz_environment(input_corpus) launch_afl_thread(input_corpus, output_corpus, target_binary, - ["-M", "afl-master"]) + ['-M', 'afl-master']) time.sleep(5) if master_only: - sharing_dir = "afl-master" + sharing_dir = 'afl-master' else: launch_afl_thread(input_corpus, output_corpus, target_binary, - ["-S", "afl-secondary"]) + ['-S', 'afl-secondary']) time.sleep(5) - sharing_dir = "afl-secondary" + sharing_dir = 'afl-secondary' # Start an instance of SymCC. # We need to ensure it uses the symbolic version of libc++. - print("Starting the SymCC helper") + print('Starting the SymCC helper') new_environ = os.environ.copy() new_environ['LD_LIBRARY_PATH'] = symcc_workdir cmd = [ os.path.join(symcc_workdir, - "symcc_fuzzing_helper"), "-o", output_corpus, "-a", - sharing_dir, "-n", "symcc", "--", symcc_target_binary, "@@" + 'symcc_fuzzing_helper'), '-o', output_corpus, '-a', + sharing_dir, '-n', 'symcc', '--', symcc_target_binary, '@@' ] - subprocess.Popen(cmd, env=new_environ) + with subprocess.Popen(cmd, env=new_environ): + pass diff --git a/fuzzers/symcc_aflplusplus/fuzzer.py b/fuzzers/symcc_aflplusplus/fuzzer.py index 9a1b38da2..1737d0567 100644 --- a/fuzzers/symcc_aflplusplus/fuzzer.py +++ b/fuzzers/symcc_aflplusplus/fuzzer.py @@ -31,7 +31,7 @@ def get_symcc_build_dir(target_directory): def build(): """Build an AFL version and SymCC version of the benchmark""" - print("Step 1: Building with AFL") + print('Step 1: Building with AFL') build_directory = os.environ['OUT'] # Save the environment for use in SymCC @@ -44,48 +44,48 @@ def build(): # Restore SRC to its initial state so we can build again without any # trouble. For some OSS-Fuzz projects, build_benchmark cannot be run # twice in the same directory without this. - aflplusplus_fuzzer.build("tracepc") + aflplusplus_fuzzer.build('tracepc') - print("Step 2: Completed AFL build") + print('Step 2: Completed AFL build') # Copy over AFL artifacts needed by SymCC. - shutil.copy("/afl/afl-fuzz", build_directory) - shutil.copy("/afl/afl-showmap", build_directory) + shutil.copy('/afl/afl-fuzz', build_directory) + shutil.copy('/afl/afl-showmap', build_directory) # Build the SymCC-instrumented target. - print("Step 3: Building the benchmark with SymCC") + print('Step 3: Building the benchmark with SymCC') symcc_build_dir = get_symcc_build_dir(os.environ['OUT']) os.mkdir(symcc_build_dir) # Set flags to ensure compilation with SymCC. - new_env['CC'] = "/symcc/build/symcc" - new_env['CXX'] = "/symcc/build/sym++" - new_env['CXXFLAGS'] = new_env['CXXFLAGS'].replace("-stlib=libc++", "") + new_env['CC'] = '/symcc/build/symcc' + new_env['CXX'] = '/symcc/build/sym++' + new_env['CXXFLAGS'] = new_env['CXXFLAGS'].replace('-stlib=libc++', '') new_env['CXXFLAGS'] += ' -ldl' new_env['FUZZER_LIB'] = '/libfuzzer-harness.o' new_env['OUT'] = symcc_build_dir - new_env['CXXFLAGS'] += " -fno-sanitize=all " - new_env['CFLAGS'] += " -fno-sanitize=all " + new_env['CXXFLAGS'] += ' -fno-sanitize=all ' + new_env['CFLAGS'] += ' -fno-sanitize=all ' # Setting this environment variable instructs SymCC to use the # libcxx library compiled with SymCC instrumentation. - new_env['SYMCC_LIBCXX_PATH'] = "/libcxx_native_build" + new_env['SYMCC_LIBCXX_PATH'] = '/libcxx_native_build' # Instructs SymCC to consider no symbolic inputs at runtime. This is needed # if, for example, some tests are run during compilation of the benchmark. - new_env['SYMCC_NO_SYMBOLIC_INPUT'] = "1" + new_env['SYMCC_NO_SYMBOLIC_INPUT'] = '1' # Build benchmark. utils.build_benchmark(env=new_env) # Copy over symcc artifacts and symbolic libc++. shutil.copy( - "/symcc/build//SymRuntime-prefix/src/SymRuntime-build/libSymRuntime.so", + '/symcc/build//SymRuntime-prefix/src/SymRuntime-build/libSymRuntime.so', symcc_build_dir) - shutil.copy("/usr/lib/libz3.so", os.path.join(symcc_build_dir, "libz3.so")) - shutil.copy("/libcxx_native_build/lib/libc++.so.1", symcc_build_dir) - shutil.copy("/libcxx_native_build/lib/libc++abi.so.1", symcc_build_dir) - shutil.copy("/rust/bin/symcc_fuzzing_helper", symcc_build_dir) + shutil.copy('/usr/lib/libz3.so', os.path.join(symcc_build_dir, 'libz3.so')) + shutil.copy('/libcxx_native_build/lib/libc++.so.1', symcc_build_dir) + shutil.copy('/libcxx_native_build/lib/libc++abi.so.1', symcc_build_dir) + shutil.copy('/rust/bin/symcc_fuzzing_helper', symcc_build_dir) def launch_afl_thread(input_corpus, output_corpus, target_binary, @@ -108,26 +108,27 @@ def fuzz(input_corpus, output_corpus, target_binary): target_binary_name = os.path.basename(target_binary) symcc_target_binary = os.path.join(symcc_workdir, target_binary_name) - os.environ['AFL_DISABLE_TRIM'] = "1" + os.environ['AFL_DISABLE_TRIM'] = '1' # Start a master and secondary instance of AFL. # We need both because of the way SymCC works. print('[run_fuzzer] Running AFL for SymCC') afl_fuzzer.prepare_fuzz_environment(input_corpus) - launch_afl_thread(input_corpus, output_corpus, target_binary, ["-S", "afl"]) + launch_afl_thread(input_corpus, output_corpus, target_binary, ['-S', 'afl']) time.sleep(5) launch_afl_thread(input_corpus, output_corpus, target_binary, - ["-S", "afl-secondary"]) + ['-S', 'afl-secondary']) time.sleep(5) # Start an instance of SymCC. # We need to ensure it uses the symbolic version of libc++. - print("Starting the SymCC helper") + print('Starting the SymCC helper') new_environ = os.environ.copy() new_environ['LD_LIBRARY_PATH'] = symcc_workdir cmd = [ os.path.join(symcc_workdir, - "symcc_fuzzing_helper"), "-o", output_corpus, "-a", - "afl-secondary", "-n", "symcc", "-m", "--", symcc_target_binary, "@@" + 'symcc_fuzzing_helper'), '-o', output_corpus, '-a', + 'afl-secondary', '-n', 'symcc', '-m', '--', symcc_target_binary, '@@' ] - subprocess.Popen(cmd, env=new_environ) + with subprocess.Popen(cmd, env=new_environ): + pass diff --git a/fuzzers/symcc_aflplusplus_single/fuzzer.py b/fuzzers/symcc_aflplusplus_single/fuzzer.py index 1bea1a42d..15b4cfd02 100644 --- a/fuzzers/symcc_aflplusplus_single/fuzzer.py +++ b/fuzzers/symcc_aflplusplus_single/fuzzer.py @@ -31,7 +31,7 @@ def get_symcc_build_dir(target_directory): def build(): """Build an AFL version and SymCC version of the benchmark""" - print("Step 1: Building with AFL and SymCC") + print('Step 1: Building with AFL and SymCC') build_directory = os.environ['OUT'] # First build with AFL. @@ -41,23 +41,23 @@ def build(): # Restore SRC to its initial state so we can build again without any # trouble. For some OSS-Fuzz projects, build_benchmark cannot be run # twice in the same directory without this. - aflplusplus_fuzzer.build("tracepc", "symcc") + aflplusplus_fuzzer.build('tracepc', 'symcc') - print("Step 2: Completed AFL build") + print('Step 2: Completed AFL build') # Copy over AFL artifacts needed by SymCC. - shutil.copy("/afl/afl-fuzz", build_directory) - shutil.copy("/afl/afl-showmap", build_directory) + shutil.copy('/afl/afl-fuzz', build_directory) + shutil.copy('/afl/afl-showmap', build_directory) # Copy over symcc artifacts and symbolic libc++. - print("Step 3: Copying SymCC files") + print('Step 3: Copying SymCC files') symcc_build_dir = get_symcc_build_dir(os.environ['OUT']) shutil.copy( - "/symcc/build//SymRuntime-prefix/src/SymRuntime-build/libSymRuntime.so", + '/symcc/build//SymRuntime-prefix/src/SymRuntime-build/libSymRuntime.so', symcc_build_dir) - shutil.copy("/usr/lib/libz3.so", os.path.join(symcc_build_dir, "libz3.so")) - shutil.copy("/libcxx_native_build/lib/libc++.so.1", symcc_build_dir) - shutil.copy("/libcxx_native_build/lib/libc++abi.so.1", symcc_build_dir) - shutil.copy("/rust/bin/symcc_fuzzing_helper", symcc_build_dir) + shutil.copy('/usr/lib/libz3.so', os.path.join(symcc_build_dir, 'libz3.so')) + shutil.copy('/libcxx_native_build/lib/libc++.so.1', symcc_build_dir) + shutil.copy('/libcxx_native_build/lib/libc++abi.so.1', symcc_build_dir) + shutil.copy('/rust/bin/symcc_fuzzing_helper', symcc_build_dir) def launch_afl_thread(input_corpus, output_corpus, target_binary, @@ -80,24 +80,25 @@ def fuzz(input_corpus, output_corpus, target_binary): target_binary_name = os.path.basename(target_binary) symcc_target_binary = os.path.join(symcc_workdir, target_binary_name) - os.environ['AFL_DISABLE_TRIM'] = "1" + os.environ['AFL_DISABLE_TRIM'] = '1' # Start a master and secondary instance of AFL. # We need both because of the way SymCC works. print('[run_fuzzer] Running AFL for SymCC') afl_fuzzer.prepare_fuzz_environment(input_corpus) launch_afl_thread(input_corpus, output_corpus, target_binary, - ["-S", "afl-secondary"]) + ['-S', 'afl-secondary']) time.sleep(5) # Start an instance of SymCC. # We need to ensure it uses the symbolic version of libc++. - print("Starting the SymCC helper") + print('Starting the SymCC helper') new_environ = os.environ.copy() new_environ['LD_LIBRARY_PATH'] = symcc_workdir cmd = [ os.path.join(symcc_workdir, - "symcc_fuzzing_helper"), "-o", output_corpus, "-a", - "afl-secondary", "-n", "symcc", "-m", "--", symcc_target_binary, "@@" + 'symcc_fuzzing_helper'), '-o', output_corpus, '-a', + 'afl-secondary', '-n', 'symcc', '-m', '--', symcc_target_binary, '@@' ] - subprocess.Popen(cmd, env=new_environ) + with subprocess.Popen(cmd, env=new_environ): + pass diff --git a/fuzzers/symqemu_aflplusplus/fuzzer.py b/fuzzers/symqemu_aflplusplus/fuzzer.py index 47348dcf6..bb8e1c0ec 100644 --- a/fuzzers/symqemu_aflplusplus/fuzzer.py +++ b/fuzzers/symqemu_aflplusplus/fuzzer.py @@ -39,7 +39,7 @@ def build(): fuzz_target = os.getenv('FUZZ_TARGET') # First, build an uninstrumented binary for Eclipser. - aflplusplus_fuzzer.build("qemu", "eclipser") + aflplusplus_fuzzer.build('qemu', 'eclipser') eclipser_dir = get_symcc_build_dir(build_directory) os.mkdir(eclipser_dir) fuzz_binary = build_directory + '/' + fuzz_target @@ -49,23 +49,23 @@ def build(): # Second, build an instrumented binary for AFL++. os.environ = orig_env - aflplusplus_fuzzer.build("tracepc") + aflplusplus_fuzzer.build('tracepc') print('[build] Copying afl-fuzz to $OUT directory') # Copy afl-fuzz shutil.copy('/afl/afl-fuzz', build_directory) - shutil.copy("/afl/afl-showmap", build_directory) - shutil.copy("/rust/bin/symcc_fuzzing_helper", eclipser_dir) + shutil.copy('/afl/afl-showmap', build_directory) + shutil.copy('/rust/bin/symcc_fuzzing_helper', eclipser_dir) symcc_build_dir = get_symcc_build_dir(os.environ['OUT']) # Copy over symcc artifacts and symbolic libc++. shutil.copy( - "/symcc/build//SymRuntime-prefix/src/SymRuntime-build/libSymRuntime.so", + '/symcc/build//SymRuntime-prefix/src/SymRuntime-build/libSymRuntime.so', symcc_build_dir) - shutil.copy("/usr/lib/libz3.so", os.path.join(symcc_build_dir, "libz3.so")) - shutil.copy("/rust/bin/symcc_fuzzing_helper", symcc_build_dir) - shutil.copy("/symqemu/build/x86_64-linux-user/symqemu-x86_64", + shutil.copy('/usr/lib/libz3.so', os.path.join(symcc_build_dir, 'libz3.so')) + shutil.copy('/rust/bin/symcc_fuzzing_helper', symcc_build_dir) + shutil.copy('/symqemu/build/x86_64-linux-user/symqemu-x86_64', symcc_build_dir) @@ -89,31 +89,32 @@ def fuzz(input_corpus, output_corpus, target_binary): target_binary_name = os.path.basename(target_binary) symcc_target_binary = os.path.join(symcc_workdir, target_binary_name) - os.environ['AFL_DISABLE_TRIM'] = "1" + os.environ['AFL_DISABLE_TRIM'] = '1' # Start a master and secondary instance of AFL. # We need both because of the way SymCC works. print('[run_fuzzer] Running AFL for SymCC') afl_fuzzer.prepare_fuzz_environment(input_corpus) launch_afl_thread(input_corpus, output_corpus, target_binary, - ["-S", "afl-secondary"]) + ['-S', 'afl-secondary']) time.sleep(5) # Start an instance of SymCC. # We need to ensure it uses the symbolic version of libc++. - symqemu_target = os.path.join(symcc_workdir, "symqemu-x86_64") + symqemu_target = os.path.join(symcc_workdir, 'symqemu-x86_64') if os.path.isfile(symqemu_target): - print("Found symqemu target") + print('Found symqemu target') else: - print("Did not find symqemu target") + print('Did not find symqemu target') - print("Starting the SymCC helper") + print('Starting the SymCC helper') new_environ = os.environ.copy() new_environ['LD_LIBRARY_PATH'] = symcc_workdir cmd = [ - os.path.join(symcc_workdir, "symcc_fuzzing_helper"), "-o", - output_corpus, "-a", "afl-secondary", "-n", "symqemu", "-m", "--", - symqemu_target, symcc_target_binary, "@@" + os.path.join(symcc_workdir, 'symcc_fuzzing_helper'), '-o', + output_corpus, '-a', 'afl-secondary', '-n', 'symqemu', '-m', '--', + symqemu_target, symcc_target_binary, '@@' ] - print("Running command: %s" % (" ".join(cmd))) - subprocess.Popen(cmd, env=new_environ) + print(f'Running command: {" ".join(cmd)}') + with subprocess.Popen(cmd, env=new_environ): + pass diff --git a/fuzzers/test_fuzzers.py b/fuzzers/test_fuzzers.py index c67a337c8..7e927b367 100644 --- a/fuzzers/test_fuzzers.py +++ b/fuzzers/test_fuzzers.py @@ -39,7 +39,7 @@ def get_all_fuzzer_dirs(): def _get_fuzzer_module(fuzzer): """Get the module for |fuzzer|'s fuzzer.py.""" - return 'fuzzers.{}.fuzzer'.format(fuzzer) + return f'fuzzers.{fuzzer}.fuzzer' def _get_all_fuzzer_modules(): diff --git a/fuzzers/token_level/fuzzer.py b/fuzzers/token_level/fuzzer.py index d85de4da0..8a9023aa4 100755 --- a/fuzzers/token_level/fuzzer.py +++ b/fuzzers/token_level/fuzzer.py @@ -23,18 +23,18 @@ def prepare_fuzz_environment(input_corpus): """Prepare to fuzz with a LibAFL-based fuzzer.""" - os.environ['ASAN_OPTIONS'] = "abort_on_error=1:detect_leaks=0:"\ - "malloc_context_size=0:symbolize=0:"\ - "allocator_may_return_null=1:"\ - "detect_odr_violation=0:handle_segv=0:"\ - "handle_sigbus=0:handle_abort=0:"\ - "handle_sigfpe=0:handle_sigill=0" - os.environ['UBSAN_OPTIONS'] = "abort_on_error=1:"\ - "allocator_release_to_os_interval_ms=500:"\ - "handle_abort=0:handle_segv=0:"\ - "handle_sigbus=0:handle_sigfpe=0:"\ - "handle_sigill=0:print_stacktrace=0:"\ - "symbolize=0:symbolize_inline_frames=0" + os.environ['ASAN_OPTIONS'] = 'abort_on_error=1:detect_leaks=0:'\ + 'malloc_context_size=0:symbolize=0:'\ + 'allocator_may_return_null=1:'\ + 'detect_odr_violation=0:handle_segv=0:'\ + 'handle_sigbus=0:handle_abort=0:'\ + 'handle_sigfpe=0:handle_sigill=0' + os.environ['UBSAN_OPTIONS'] = 'abort_on_error=1:'\ + 'allocator_release_to_os_interval_ms=500:'\ + 'handle_abort=0:handle_segv=0:'\ + 'handle_sigbus=0:handle_sigfpe=0:'\ + 'handle_sigill=0:print_stacktrace=0:'\ + 'symbolize=0:symbolize_inline_frames=0' # Create at least one non-empty seed to start. utils.create_seed_file_for_empty_corpus(input_corpus) diff --git a/fuzzers/utils.py b/fuzzers/utils.py index 0a2bca0e6..c44169c37 100644 --- a/fuzzers/utils.py +++ b/fuzzers/utils.py @@ -33,8 +33,8 @@ # Flags to use when using sanitizer for bug based benchmarking. SANITIZER_FLAGS = [ '-fsanitize=address', - # Matches UBSan features enabled in OSS-Fuzz. - # See https://github.com/google/oss-fuzz/blob/master/infra/base-images/base-builder/Dockerfile#L94 + # Matches UBSan features enabled in OSS-Fuzz. See + # https://github.com/google/oss-fuzz/blob/master/infra/base-images/base-builder/Dockerfile#L94 '-fsanitize=array-bounds,bool,builtin,enum,float-divide-by-zero,function,' 'integer-divide-by-zero,null,object-size,return,returns-nonnull-attribute,' 'shift,signed-integer-overflow,unreachable,vla-bound,vptr', @@ -77,8 +77,7 @@ def build_benchmark(env=None): benchmark = os.getenv('BENCHMARK') fuzzer = os.getenv('FUZZER') - print('Building benchmark {benchmark} with fuzzer {fuzzer}'.format( - benchmark=benchmark, fuzzer=fuzzer)) + print(f'Building benchmark {benchmark} with fuzzer {fuzzer}') subprocess.check_call(['/bin/bash', '-ex', build_script], env=env) @@ -96,7 +95,7 @@ def append_flags(env_var, additional_flags, env=None): def get_config_value(attribute): """Gets config attribute value from benchmark config yaml file.""" - with open(BENCHMARK_CONFIG_YAML_PATH) as file_handle: + with open(BENCHMARK_CONFIG_YAML_PATH, encoding='utf-8') as file_handle: config = yaml.load(file_handle, yaml.SafeLoader) return config.get(attribute) @@ -150,7 +149,7 @@ def get_dictionary_path(target_binary): return None config = configparser.ConfigParser() - with open(options_file_path, 'r') as file_handle: + with open(options_file_path, 'r', encoding='utf-8') as file_handle: try: config.read_file(file_handle) except configparser.Error as error: @@ -211,8 +210,7 @@ def initialize_env(env=None): set_compilation_flags(env) for env_var in ['FUZZ_TARGET', 'CFLAGS', 'CXXFLAGS']: - print('{env_var} = {env_value}'.format(env_var=env_var, - env_value=os.getenv(env_var))) + print(f'{env_var} = {os.getenv(env_var)}') def get_env(env_var, default_value=None): @@ -240,5 +238,5 @@ def create_seed_file_for_empty_corpus(input_corpus): print('Creating a fake seed file in empty corpus directory.') default_seed_file = os.path.join(input_corpus, 'default_seed') - with open(default_seed_file, 'w') as file_handle: + with open(default_seed_file, 'w', encoding='utf-8') as file_handle: file_handle.write('hi') diff --git a/fuzzers/weizz_qemu/fuzzer.py b/fuzzers/weizz_qemu/fuzzer.py index 215c611fe..739ec3e1b 100644 --- a/fuzzers/weizz_qemu/fuzzer.py +++ b/fuzzers/weizz_qemu/fuzzer.py @@ -27,11 +27,11 @@ def build(): os.environ['FUZZER_LIB'] = '/libQEMU.a' # QEMU doesn't like ASan cflags = filter(lambda flag: not flag.startswith('-fsanitize=address'), - os.environ["CFLAGS"].split()) + os.environ['CFLAGS'].split()) cxxflags = filter(lambda flag: not flag.startswith('-fsanitize=address'), - os.environ["CXXFLAGS"].split()) - os.environ["CFLAGS"] = ' '.join(cflags) - os.environ["CXXFLAGS"] = ' '.join(cxxflags) + os.environ['CXXFLAGS'].split()) + os.environ['CFLAGS'] = ' '.join(cflags) + os.environ['CXXFLAGS'] = ' '.join(cxxflags) utils.build_benchmark() diff --git a/presubmit.py b/presubmit.py index 1be78153e..7f99b0096 100644 --- a/presubmit.py +++ b/presubmit.py @@ -241,9 +241,6 @@ def pytype(paths: List[Path]) -> bool: """Run pytype on |path| if it is a python file. Return False if it fails type checking.""" paths = [path for path in paths if is_python(path)] - if not paths: - return True - base_command = ['python3', '-m', 'pytype'] success = True @@ -286,8 +283,8 @@ def validate_experiment_requests(paths: List[Path]): experiment_requests = yaml_utils.read( automatic_run_experiment.REQUESTED_EXPERIMENTS_PATH) except yaml.parser.ParserError: - print('Error parsing %s.' % - automatic_run_experiment.REQUESTED_EXPERIMENTS_PATH) + print('Error parsing ' + f'{automatic_run_experiment.REQUESTED_EXPERIMENTS_PATH}.') return False # Only validate the latest request. @@ -295,8 +292,8 @@ def validate_experiment_requests(paths: List[Path]): experiment_requests[:1]) if not result: - print('%s is not valid.' % - automatic_run_experiment.REQUESTED_EXPERIMENTS_PATH) + print(f'{automatic_run_experiment.REQUESTED_EXPERIMENTS_PATH}' + 'is not valid.') return result @@ -332,9 +329,9 @@ def license_check(paths: List[Path]) -> bool: if is_path_ignored(path): continue - with open(path) as file_handle: + with open(path, encoding='utf-8') as file_handle: if _LICENSE_CHECK_STRING not in file_handle.read(): - print('Missing license header in file %s.' % str(path)) + print(f'Missing license header in file {str(path)}.') success = False return success @@ -378,11 +375,11 @@ def do_default_checks(file_paths: List[Path], checks) -> bool: continue if not check(file_paths): - print('ERROR: %s failed, see errors above.' % check_name) + print(f'ERROR: {check_name} failed, see errors above.') failed_checks.append(check_name) if failed_checks: - print('Failed checks: %s' % ' '.join(failed_checks)) + print(f'Failed checks: {" ".join(failed_checks)}') return False return True @@ -443,7 +440,7 @@ def do_single_check(command: str, relevant_files: List[Path], else: success = check(relevant_files) if not success: - print('ERROR: %s failed, see errors above.' % check.__name__) + print(f'ERROR: {check.__name__} failed, see errors above.') return success diff --git a/requirements.txt b/requirements.txt index 8be91dc29..ccb8d5b33 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,31 +1,31 @@ -alembic==1.4.0 -google-api-python-client==2.5.0 -google-auth==1.30.1 -google-cloud-error-reporting==1.1.2 -google-cloud-logging==1.15.1 -google-cloud-secret-manager==2.4.0 -clusterfuzz==0.0.1a0 -Jinja2==2.11.3 -numpy==1.22.0 -MarkupSafe==2.0.1 -Orange3==3.28.0 -pandas==1.2.4 -psutil==5.9.0 -psycopg2-binary==2.8.4 -pyfakefs==3.7.1 -pytest==6.1.2 -python-dateutil==2.8.1 -pytz==2019.3 -PyYAML==5.4 -redis==3.5.3 -rq==1.4.3 -scikit-posthocs==0.6.2 -scipy==1.6.2 -seaborn==0.11.1 -sqlalchemy==1.3.19 -protobuf==3.20.2 +alembic==1.8.1 +google-api-python-client==2.64.0 +google-auth==2.12.0 +google-cloud-error-reporting==1.6.3 +google-cloud-logging==3.1.2 +google-cloud-secret-manager==2.12.6 +clusterfuzz==2.5.6 +Jinja2==3.1.2 +numpy==1.23.4 +MarkupSafe==2.1.1 +Orange3==3.33.0 +pandas==1.4.4 +psutil==5.9.2 +psycopg2-binary==2.9.4 +pyfakefs==5.0.0 +pytest==7.1.3 +python-dateutil==2.8.2 +pytz==2020.1 +PyYAML==6.0 +redis==4.3.4 +rq==1.11.1 +scikit-posthocs==0.7.0 +scipy==1.9.2 +seaborn==0.12.0 +sqlalchemy==1.4.41 +protobuf==3.20.3 # Needed for development. -pylint==2.7.4 -pytype==2021.4.15 -yapf==0.30.0 +pylint==2.15.4 +pytype==2022.10.13 +yapf==0.32.0 diff --git a/service/core-fuzzers.yaml b/service/core-fuzzers.yaml index dc86b833c..e5a0d98a4 100644 --- a/service/core-fuzzers.yaml +++ b/service/core-fuzzers.yaml @@ -5,11 +5,12 @@ fuzzers: - aflfast - aflplusplus - aflsmart - - entropic + - centipede - eclipser - fairfuzz - honggfuzz - - lafintel + - libafl - libfuzzer - mopt - - libafl + # - klee # To be supported later. + # - symcc_aflplusplus # To be supported later. diff --git a/service/setup.bash b/service/setup.bash index d6f8784a7..2b9c26a16 100755 --- a/service/setup.bash +++ b/service/setup.bash @@ -16,7 +16,7 @@ # Use this script once to setup a machine for running the fuzzbench service. # Install a supported python version. -export PYTHON_VERSION=3.7.6 +export PYTHON_VERSION=3.10.8 sudo apt-get update -y && sudo apt-get install -y \ build-essential \ diff --git a/service/test_automatic_run_experiment.py b/service/test_automatic_run_experiment.py index 60e1a796e..f5f822ac1 100644 --- a/service/test_automatic_run_experiment.py +++ b/service/test_automatic_run_experiment.py @@ -83,17 +83,42 @@ def test_run_requested_experiment(mocked_get_requested_experiments, 'sqlite3_ossfuzz', 'systemd_fuzz-link-parser', 'zlib_zlib_uncompress_fuzzer', + 'arrow_parquet-arrow-fuzz', + 'aspell_aspell_fuzzer', + 'ffmpeg_ffmpeg_demuxer_fuzzer', + 'file_magic_fuzzer', 'freetype2-2017', + 'grok_grk_decompress_fuzzer', 'harfbuzz-1.3.2', 'lcms-2017-03-21', + 'libarchive_libarchive_fuzzer', + 'libgit2_objects_fuzzer', + 'libhevc_hevc_dec_fuzzer', + 'libhtp_fuzz_htp', 'libjpeg-turbo-07-2017', - 'libpng-1.2.56', + 'libpng-1.6.38', 'libxml2-v2.9.2', + 'libxml2_libxml2_xml_reader_for_file_fuzzer', + 'matio_matio_fuzzer', + 'mruby-2018-05-23', + 'muparser_set_eval_fuzzer', + 'njs_njs_process_script_fuzzer', + 'openh264_decoder_fuzzer', 'openthread-2019-12-23', + 'php_php-fuzz-execute', + 'php_php-fuzz-parser-2020-07-25', + 'poppler_pdf_fuzzer', 'proj4-2017-08-14', + 'proj4_standard_fuzzer', + 'quickjs_eval-2020-01-05', 're2-2014-12-09', + 'stb_stbi_read_fuzzer', + 'systemd_fuzz-varlink', + 'usrsctp_fuzzer_connect', 'vorbis-2017-12-11', + 'wireshark_fuzzshark_ip', 'woff2-2016-05-06', + 'zstd_stream_decompress', ] expected_call = mock.call(expected_experiment_name, expected_config_file, diff --git a/src_analysis/diff_utils.py b/src_analysis/diff_utils.py index a05bc051f..476e0baf2 100644 --- a/src_analysis/diff_utils.py +++ b/src_analysis/diff_utils.py @@ -50,10 +50,9 @@ def get_changed_files(commit_name: str = 'origin...') -> List[str]: # This probably won't happen to anyone. It can happen if your copy # of the repo wasn't cloned so give instructions on how to handle. pass - raise DiffError(( - '"%s" failed.\n' + raise DiffError( + f'"{" ".join(committed_diff_command)}" failed.\n' 'Please run "git fetch origin master --unshallow && ' 'git symbolic-ref refs/remotes/origin/HEAD refs/remotes/origin/master" ' 'and try again.\n' - 'Please file an issue if this doesn\'t fix things.') % - ' '.join(committed_diff_command)) + 'Please file an issue if this doesn\'t fix things.') diff --git a/src_analysis/fuzzer_dependencies.py b/src_analysis/fuzzer_dependencies.py index 7cf6fb9f9..e8dcf0f36 100644 --- a/src_analysis/fuzzer_dependencies.py +++ b/src_analysis/fuzzer_dependencies.py @@ -48,7 +48,7 @@ def _get_fuzzer_module_name(fuzzer: str) -> str: """Returns the name of the fuzzer.py module of |fuzzer|. Assumes |fuzzer| is an underlying fuzzer.""" - return 'fuzzers.{}.fuzzer'.format(fuzzer) + return f'fuzzers.{fuzzer}.fuzzer' def is_builtin_module(module: types.ModuleType) -> bool: diff --git a/src_analysis/test_benchmark_dependencies.py b/src_analysis/test_benchmark_dependencies.py index 21e08a861..ea310ebb2 100644 --- a/src_analysis/test_benchmark_dependencies.py +++ b/src_analysis/test_benchmark_dependencies.py @@ -18,7 +18,7 @@ from src_analysis import benchmark_dependencies OSS_FUZZ_BENCHMARK = 'curl_curl_fuzzer_http' -STANDARD_BENCHMARK = 'libpng-1.2.56' +STANDARD_BENCHMARK = 'libpng-1.6.38' BENCHMARK_YAML_PATH = os.path.join(benchmark_utils.BENCHMARKS_DIR, OSS_FUZZ_BENCHMARK, 'benchmark.yaml') STANDARD_BUILD_SH_PATH = os.path.join(benchmark_utils.BENCHMARKS_DIR, @@ -52,4 +52,4 @@ def test_get_files_dependent_benchmarks(): benchmark_dependencies.get_files_dependent_benchmarks( [fake_build_sh_path])) - assert dependent_benchmarks == [] + assert not dependent_benchmarks