From de7eeba84d2416b5e532b7537e2bb3b57039885d Mon Sep 17 00:00:00 2001 From: ragusaa <54862477+ragusaa@users.noreply.github.com> Date: Thu, 13 Jun 2024 13:27:40 -0400 Subject: [PATCH] Apply suggestions from code review Co-authored-by: Micah Snyder Signed-off-by: Andy Ragusa --- libclamav/ole2_extract.c | 9 ++--- unit_tests/clamscan/ole2_encryption_test.py | 41 ++++++++------------- 2 files changed, 20 insertions(+), 30 deletions(-) diff --git a/libclamav/ole2_extract.c b/libclamav/ole2_extract.c index ac77dbcb24..201f62dd60 100644 --- a/libclamav/ole2_extract.c +++ b/libclamav/ole2_extract.c @@ -708,11 +708,11 @@ const uint16_t XLS_XOR_OBFUSCATION = 0; const uint16_t XLS_RC4_ENCRYPTION = 1; const uint32_t MINISTREAM_CUTOFF_SIZE = 0x1000; -static uint32_t get_stream_data_offset(ole2_header_t *hdr, const property_t *word_block, uint16_t sector) +static size_t get_stream_data_offset(ole2_header_t *hdr, const property_t *word_block, uint16_t sector) { - uint32_t offset = (1 << hdr->log2_big_block_size); - uint32_t sector_size = offset; - uint32_t fib_offset = 0; + size_t offset = (1 << hdr->log2_big_block_size); + size_t sector_size = offset; + size_t fib_offset = 0; if (word_block->size < MINISTREAM_CUTOFF_SIZE) { fib_offset = offset + sector_size * hdr->sbat_root_start; @@ -839,7 +839,6 @@ static void test_for_xls_encryption(const property_t *word_block, ole2_header_t } /*Skip past this size.*/ - memcpy(&tmp16, &(ptr[idx]), 2); if (sizeof(uint16_t) != read_uint16(ptr, block_size, &idx, &tmp16)) { return; } diff --git a/unit_tests/clamscan/ole2_encryption_test.py b/unit_tests/clamscan/ole2_encryption_test.py index 06e91ecf5c..23146a4d74 100644 --- a/unit_tests/clamscan/ole2_encryption_test.py +++ b/unit_tests/clamscan/ole2_encryption_test.py @@ -21,38 +21,29 @@ def setUpClass(cls): def tearDownClass(cls): super(TC, cls).tearDownClass() - @classmethod - def assertStrings(self, tempdir, strings): - foundList = [] - for s in strings: - foundList.append(False) - + # Find the metadata.json file and verify its contents. + def verify_metadata_json(self, tempdir, expected=[], unexpected=[]): for parent, dirs, files in os.walk(tempdir): for f in files: if "metadata.json" == f: - mdf = os.path.join(parent, f) - handle = open(mdf) - lines = handle.readlines() - handle.close() - for l in lines: - for i in range(0, len(strings)): - m = re.search(strings[i], l) - if m: - foundList[i] = True - break - - if not (False in foundList): - break - - #Only one metadata.json + with open(os.path.join(parent, f)) as handle: + metadata_json = handle.read() + self.verify_output(metadata_json, expected=expected, unexpected=unexpected) + + # There is only one metadata.json per scan. + # We found it, so we can break out of the loop. break - assert not (False in foundList) def setUp(self): super(TC, self).setUp() def tearDown(self): super(TC, self).tearDown() + + # Remove scan temps directory between tests + if (self.path_tmp / "TD").exists(): + shutil.rmtree(self.path_tmp / "TD") + self.verify_valgrind_log() def test_FAT_doc(self): @@ -91,9 +82,9 @@ def test_FAT_doc_metadata(self): assert output.ec == 0 # clean - neededStrings = [ '"Encrypted":1' - , '"EncryptedWithVelvetSweatshop":0' - ] + expected_strings = [ + '"Encrypted":1', + ] self.assertStrings(tempdir, neededStrings) def test_ministream_doc(self):