From 3cc89f2eb1fad4fac8194dfb94d0b372239b9d2e Mon Sep 17 00:00:00 2001 From: jeFF0Falltrades <8444166+jeFF0Falltrades@users.noreply.github.com> Date: Sun, 1 Sep 2024 10:20:31 -0400 Subject: [PATCH 01/33] Updates RAT King Parser to commit b85abe5 --- lib/parsers_aux/ratking/__init__.py | 50 ++++--- lib/parsers_aux/ratking/utils/config_item.py | 7 +- .../decryptors/config_decryptor_aes_ecb.py | 1 + modules/processing/parsers/CAPE/AsyncRat.py | 88 +---------- modules/processing/parsers/CAPE/XWorm.py | 137 +----------------- 5 files changed, 44 insertions(+), 239 deletions(-) diff --git a/lib/parsers_aux/ratking/__init__.py b/lib/parsers_aux/ratking/__init__.py index 821b0b402d6..93359cf676f 100644 --- a/lib/parsers_aux/ratking/__init__.py +++ b/lib/parsers_aux/ratking/__init__.py @@ -5,7 +5,7 @@ # Author: jeFF0Falltrades # # Provides the primary functionality for parsing configurations from the -# AsyncRAT, DcRAT, QuasarRAT, VenomRAT, etc. RAT families +# AsyncRAT, DcRAT, QuasarRAT, VenomRAT, XWorm, XenoRAT, etc. RAT families # # MIT License # @@ -49,7 +49,9 @@ class RATConfigParser: config_item.SpecialFolderConfigItem(), config_item.EncryptedStringConfigItem(), ] - MIN_CONFIG_LEN = 7 + # Min and max number of items in a potential config section + MIN_CONFIG_LEN_FLOOR = 5 + MIN_CONFIG_LEN_CEILING = 7 PATTERN_VERIFY_HASH = rb"(?:\x7e.{3}\x04(?:\x6f.{3}\x0a){2}\x74.{3}\x01.+?\x2a.+?\x00{6,})" def __init__(self, file_data=False): @@ -73,7 +75,7 @@ def __init__(self, file_data=False): self.report["config"] = f"Exception encountered: {e}" # Decrypts/decodes values from an encrypted config - def decrypt_and_decode_config(self, encrypted_config): + def decrypt_and_decode_config(self, encrypted_config, min_config_len): decoded_config = {} selected_decryptor = 0 for item in self.CONFIG_ITEM_TYPES: @@ -101,8 +103,8 @@ def decrypt_and_decode_config(self, encrypted_config): arr_size, arr_rva = item_data[k] item_data[k] = self.dnpp.byte_array_from_size_and_rva(arr_size, arr_rva).hex() decoded_config.update(item_data) - if len(decoded_config) < self.MIN_CONFIG_LEN: - raise ConfigParserException("Minimum threshold of config items not met") + if len(decoded_config) < min_config_len: + raise ConfigParserException(f"Minimum threshold of config items not met for threshold: {len(decoded_config)}/{min_config_len}") return decoded_config # Searches for the RAT configuration in the Settings module @@ -130,17 +132,20 @@ def get_config_cctor_brute_force(self): # Get each .cctor method RVA and bytes content up to a RET op candidate_data = {rva: self.dnpp.string_from_offset(self.dnpp.offset_from_rva(rva), OPCODE_RET) for rva in candidates} config_start, decrypted_config = None, None - for method_rva, method_ins in candidate_data.items(): - logger.debug(f"Attempting brute force at .cctor method at {hex(method_rva)}") - try: - config_start, decrypted_config = ( - method_rva, - self.decrypt_and_decode_config(method_ins), - ) - break - except Exception as e: - logger.debug(e) - continue + min_config_len = self.MIN_CONFIG_LEN_CEILING + while decrypted_config is None and min_config_len >= self.MIN_CONFIG_LEN_FLOOR: + for method_rva, method_ins in candidate_data.items(): + logger.debug(f"Attempting brute force at .cctor method at {hex(method_rva)}") + try: + config_start, decrypted_config = ( + method_rva, + self.decrypt_and_decode_config(method_ins, min_config_len), + ) + break + except Exception as e: + logger.debug(e) + continue + min_config_len -= 1 if decrypted_config is None: raise ConfigParserException("No valid configuration could be parsed from any .cctor methods") return config_start, decrypted_config @@ -159,8 +164,17 @@ def get_config_verify_hash_method(self): config_start = self.dnpp.next_method_from_instruction_offset(hit.start()) # Configuration ends with ret operation, so use that as our terminator encrypted_config = self.dnpp.string_from_offset(config_start, OPCODE_RET) - decrypted_config = self.decrypt_and_decode_config(encrypted_config) - return config_start, decrypted_config + min_config_len = self.MIN_CONFIG_LEN_CEILING + while True: + try: + decrypted_config = self.decrypt_and_decode_config( + encrypted_config, min_config_len + ) + return config_start, decrypted_config + except Exception as e: + if min_config_len < self.MIN_CONFIG_LEN_FLOOR: + raise e + min_config_len -= 1 # Sorts the config by field name RVA prior to replacing RVAs with field # name strings (this is done last to preserve config ordering) diff --git a/lib/parsers_aux/ratking/utils/config_item.py b/lib/parsers_aux/ratking/utils/config_item.py index 2192f30917e..5bfbf78d73c 100644 --- a/lib/parsers_aux/ratking/utils/config_item.py +++ b/lib/parsers_aux/ratking/utils/config_item.py @@ -61,8 +61,11 @@ def parse_from(self, data): except Exception: logger.debug(f"Could not parse value from {obj} at {string_rva}") continue - fields[field_rva] = field_value - found_items += 1 + if field_rva not in fields: + fields[field_rva] = field_value + found_items += 1 + else: + logger.warning(f"Overlapping Field RVAs detected in config at {field_rva}") logger.debug(f"Parsed {found_items} {self.label} values") return fields diff --git a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_ecb.py b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_ecb.py index cb0578fce86..943fb58ddfa 100644 --- a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_ecb.py +++ b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_ecb.py @@ -100,6 +100,7 @@ def decrypt_encrypted_strings(self): last_exc = e if result is None: logger.debug(f"Decryption failed for item {v}: {last_exc}") + result = v logger.debug(f"Key: {k}, Value: {result}") decrypted_config_strings[k] = result logger.debug("Successfully decrypted strings") diff --git a/modules/processing/parsers/CAPE/AsyncRat.py b/modules/processing/parsers/CAPE/AsyncRat.py index 040c41c084f..29c59a04fa2 100644 --- a/modules/processing/parsers/CAPE/AsyncRat.py +++ b/modules/processing/parsers/CAPE/AsyncRat.py @@ -1,87 +1,5 @@ -# based on https://github.com/c3rb3ru5d3d53c/mwcfg-modules/blob/master/asyncrat/asyncrat.py +from lib.parsers_aux.ratking import RATConfigParser -import base64 -import binascii -import re -import string -import struct -from contextlib import suppress -from Cryptodome.Cipher import AES -from Cryptodome.Protocol.KDF import PBKDF2 - - -def get_string(data, index, offset): - return data[index][offset:].decode("utf-8", "ignore") - - -def get_wide_string(data, index, offset): - return (data[index][offset:] + b"\x00").decode("utf-16") - - -def get_salt(): - return bytes.fromhex("BFEB1E56FBCD973BB219022430A57843003D5644D21E62B9D4F180E7E6C33941") - - -def decrypt(key, ciphertext): - aes_key = PBKDF2(key, get_salt(), 32, 50000) - cipher = AES.new(aes_key, AES.MODE_CBC, ciphertext[32 : 32 + 16]) - plaintext = cipher.decrypt(ciphertext[48:]).decode("ascii", "ignore").strip() - return plaintext - - -def decrypt_config_string(key, data, index): - return "".join(filter(lambda x: x in string.printable, decrypt(key, base64.b64decode(data[index][2:])))) - - -def decrypt_config_list(key, data, index): - result = decrypt_config_string(key, data, index) - if result == "null": - return [] - return result.split(",") - - -def extract_config(filebuf): - config = {} - addr = re.search(b"BSJB", filebuf).start() - if not addr: - return - - strings_offset = struct.unpack(" 1: - return filtered_bytes - return "".join(filtered_bytes) - - -def extract_config(data): - config_dict = {} - with suppress(Exception): - if data[:2] == b"MZ": - dn = dnfile.dnPE(data=data) - extracted = [] - conf = [] - - ## Mutex is used to derive AES key, so if it's not found, the extractor is useless - ## The main problem is Mutex is not found in fixed location, so this trick is used to find the Mutex - for pattern in mutexPatterns: - mutexMatched = pattern.findall(data) - if mutexMatched: - mutex = dn.net.user_strings.get(int.from_bytes(mutexMatched[0], "little")).value - AESKey = deriveAESKey(mutex) - break - else: - return - - for match in confPattern.findall(data): - er_string = dn.net.user_strings.get(int.from_bytes(match, "little")).value - extracted.append(er_string) - - for i in range(5): - with suppress(Exception): - conf.append(decryptAES(AESKey, extracted[i], AES.MODE_ECB)) - - config_dict["C2"] = conf[0] - - ## Sometimes the port is not found in configs and 'AES Key (connections)' is shifted with SPL' - if 1 <= int(conf[1]) <= 65535: - config_dict["Port"] = conf[1] - config_dict["AES Key (connections)"] = conf[2] - config_dict["SPL"] = conf[3] - else: - config_dict["Port"] = "" - config_dict["AES Key (connections)"] = conf[1] - config_dict["SPL"] = conf[2] - config_dict["AES Key (configs)"] = AESKey - config_dict["Mutex"] = mutex - - installBinMatch = installBinNamePattern.findall(data) - installDirMatch = installDirPattern.findall(data) - - if installDirMatch: - installDir = dn.net.user_strings.get(int.from_bytes(installDirMatch[0], "little")).value - config_dict["InstallDir"] = decryptAES(AESKey, installDir, AES.MODE_ECB) - if installBinMatch: - installBinName = dn.net.user_strings.get(int.from_bytes(installBinMatch[0], "little")).value - config_dict["InstallBinName"] = decryptAES(AESKey, installBinName, AES.MODE_ECB) - else: - lines = data.decode().split("\n") - if "," in lines[0]: - c2_list = lines[0].split(",") - config_dict["C2s"] = c2_list - else: - config_dict["C2"] = lines[0] - config_dict["Port"] = lines[1] - config_dict["AES Key (connections)"] = lines[2] - config_dict["SPL"] = lines[3] - config_dict["USBNM"] = lines[4] - - return config_dict +def extract_config(data: bytes): + return RATConfigParser(data).report.get("config", {}) From bdac3e0acbf524af38c57cb79f5595af8d7906f3 Mon Sep 17 00:00:00 2001 From: jeFF0Falltrades <8444166+jeFF0Falltrades@users.noreply.github.com> Date: Thu, 19 Sep 2024 23:44:34 -0400 Subject: [PATCH 02/33] Brings RAT King Parser to parity with v3.0.0 --- lib/parsers_aux/ratking/__init__.py | 243 +++++++++++------- .../{utils => }/config_parser_exception.py | 3 +- lib/parsers_aux/ratking/utils/__init__.py | 25 ++ lib/parsers_aux/ratking/utils/config_item.py | 75 +++--- lib/parsers_aux/ratking/utils/data_utils.py | 23 +- .../ratking/utils/decryptors/__init__.py | 18 +- .../utils/decryptors/config_decryptor.py | 29 ++- .../decryptors/config_decryptor_aes_cbc.py | 209 +++++++++------ .../decryptors/config_decryptor_aes_ecb.py | 83 ++++-- .../config_decryptor_decrypt_xor.py | 128 +++++++++ .../decryptors/config_decryptor_plaintext.py | 97 ++++++- .../config_decryptor_random_hardcoded.py | 108 ++++++++ .../ratking/utils/dotnet_constants.py | 9 +- .../ratking/utils/dotnetpe_payload.py | 202 +++++++++------ 14 files changed, 917 insertions(+), 335 deletions(-) rename lib/parsers_aux/ratking/{utils => }/config_parser_exception.py (93%) create mode 100644 lib/parsers_aux/ratking/utils/decryptors/config_decryptor_decrypt_xor.py create mode 100644 lib/parsers_aux/ratking/utils/decryptors/config_decryptor_random_hardcoded.py diff --git a/lib/parsers_aux/ratking/__init__.py b/lib/parsers_aux/ratking/__init__.py index 93359cf676f..6b724e46bbf 100644 --- a/lib/parsers_aux/ratking/__init__.py +++ b/lib/parsers_aux/ratking/__init__.py @@ -29,159 +29,220 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from logging import getLogger -from re import DOTALL, search +# from os.path import isfile +from re import DOTALL, compile, search +from typing import Any, Tuple + +# from yara import Rules + +from .config_parser_exception import ConfigParserException from .utils import config_item -from .utils.config_parser_exception import ConfigParserException -from .utils.decryptors import SUPPORTED_DECRYPTORS -from .utils.dotnet_constants import OPCODE_RET +from .utils.decryptors import ( + SUPPORTED_DECRYPTORS, + ConfigDecryptor, + IncompatibleDecryptorException, +) from .utils.dotnetpe_payload import DotNetPEPayload logger = getLogger(__name__) class RATConfigParser: - CONFIG_ITEM_TYPES = [ - config_item.BoolConfigItem(), - config_item.ByteArrayConfigItem(), - config_item.IntConfigItem(), - config_item.NullConfigItem(), - config_item.SpecialFolderConfigItem(), - config_item.EncryptedStringConfigItem(), - ] # Min and max number of items in a potential config section - MIN_CONFIG_LEN_FLOOR = 5 - MIN_CONFIG_LEN_CEILING = 7 - PATTERN_VERIFY_HASH = rb"(?:\x7e.{3}\x04(?:\x6f.{3}\x0a){2}\x74.{3}\x01.+?\x2a.+?\x00{6,})" + _MIN_CONFIG_LEN_FLOOR = 5 + _MIN_CONFIG_LEN_CEILING = 9 + + # Pattern to find the VerifyHash() method + _PATTERN_VERIFY_HASH = compile( + rb"\x7e.{3}\x04(?:\x6f.{3}\x0a){2}\x74.{3}\x01", DOTALL + ) - def __init__(self, file_data=False): - self.report = {"config": {}} + # def __init__(self, file_path: str, yara_rule: Rules = None) -> None: + def __init__(self, file_data: bytes = None) -> None: + self.report = { + "config": {}, + } try: + # Filled in _decrypt_and_decode_config() + self._incompatible_decryptors: list[int] = [] + try: + self._dnpp = DotNetPEPayload(file_data) + except Exception as e: + raise e + # self.report["sha256"] = self._dnpp.sha256 + # self.report["yara_possible_family"] = self._dnpp.yara_match - self.dnpp = DotNetPEPayload(file_data) - # self.report["sha256"] = self.dnpp.sha256 - # self.report["possible_yara_family"] = self.dnpp.yara_match - if self.dnpp.dotnetpe is None: - raise ConfigParserException("Failed to load file as .NET executable") - self.decryptor = None # Created in decrypt_and_decode_config() - self.report["config"] = self.get_config() - self.report["config"]["aes_key"] = ( - self.decryptor.key.hex() if self.decryptor is not None and self.decryptor.key is not None else "None" + # Assigned in _decrypt_and_decode_config() + self._decryptor: ConfigDecryptor = None + self.report["config"] = self._get_config() + self.report["key"] = ( + self._decryptor.key.hex() + if self._decryptor is not None and self._decryptor.key is not None + else "None" ) - self.report["config"]["aes_salt"] = ( - self.decryptor.salt.hex() if self.decryptor is not None and self.decryptor.salt is not None else "None" + self.report["salt"] = ( + self._decryptor.salt.hex() + if self._decryptor is not None and self._decryptor.salt is not None + else "None" ) except Exception as e: + # self.report["config"] = f"Exception encountered for {file_path}: {e}" self.report["config"] = f"Exception encountered: {e}" - # Decrypts/decodes values from an encrypted config - def decrypt_and_decode_config(self, encrypted_config, min_config_len): + # Decrypts/decodes values from an encrypted config and returns the + # decrypted/decoded config + def _decrypt_and_decode_config( + self, encrypted_config: bytes, min_config_len: int + ) -> dict[str, Any]: decoded_config = {} - selected_decryptor = 0 - for item in self.CONFIG_ITEM_TYPES: - item_data = item.parse_from(encrypted_config) + + for item_class in config_item.SUPPORTED_CONFIG_ITEMS: + item = item_class() + # Translate config Field RVAs to Field names + item_data = { + self._dnpp.field_name_from_rva(k): v + for k, v in item.parse_from(encrypted_config).items() + } + if len(item_data) > 0: if type(item) is config_item.EncryptedStringConfigItem: - # Translate encrypted string RVAs to encrypted values + # Translate config value RVAs to string values for k in item_data: - item_data[k] = self.dnpp.user_string_from_rva(item_data[k]) - # Decrypt the values - while selected_decryptor < len(SUPPORTED_DECRYPTORS): + item_data[k] = self._dnpp.user_string_from_rva(item_data[k]) + + # Attempt to decrypt encrypted values + for decryptor in SUPPORTED_DECRYPTORS: + if decryptor in self._incompatible_decryptors: + continue + + if self._decryptor is None: + # Try to instantiate the selected decryptor + # Add to incompatible list and move on upon failure + try: + self._decryptor = decryptor(self._dnpp) + except IncompatibleDecryptorException as ide: + logger.debug( + f"Decryptor incompatible {decryptor} : {ide}" + ) + self._incompatible_decryptors.append(decryptor) + continue try: - if self.decryptor is None: - self.decryptor = SUPPORTED_DECRYPTORS[selected_decryptor](self.dnpp, item_data) - item_data = self.decryptor.decrypt_encrypted_strings() + # Try to decrypt the encrypted strings + # Continue to next compatible decryptor on failure + item_data = self._decryptor.decrypt_encrypted_strings( + item_data + ) break except Exception as e: logger.debug( - f"Decryption failed with decryptor {SUPPORTED_DECRYPTORS[selected_decryptor]} : {e}, trying next decryptor..." + f"Decryption failed with decryptor {decryptor} : {e}" ) - self.decryptor = None - selected_decryptor += 1 + self._decryptor = None + + if self._decryptor is None: + raise ConfigParserException("All decryptors failed") + elif type(item) is config_item.ByteArrayConfigItem: for k in item_data: arr_size, arr_rva = item_data[k] - item_data[k] = self.dnpp.byte_array_from_size_and_rva(arr_size, arr_rva).hex() + item_data[k] = self._dnpp.byte_array_from_size_and_rva( + arr_size, arr_rva + ).hex() + decoded_config.update(item_data) + if len(decoded_config) < min_config_len: - raise ConfigParserException(f"Minimum threshold of config items not met for threshold: {len(decoded_config)}/{min_config_len}") + raise ConfigParserException( + f"Minimum threshold of config items not met: {len(decoded_config)}/{min_config_len}" + ) return decoded_config - # Searches for the RAT configuration in the Settings module - def get_config(self): + # Searches for the RAT configuration section, using the VerifyHash() marker + # or brute-force, returning the decrypted config on success + def _get_config(self) -> dict[str, Any]: logger.debug("Extracting config...") try: - config_start, decrypted_config = self.get_config_verify_hash_method() + config_start, decrypted_config = self._get_config_verify_hash_method() except Exception: logger.debug("VerifyHash() method failed; Attempting .cctor brute force...") - # If the typical patterns are not found, start brute-forcing + # If the VerifyHash() method does not work, move to brute-forcing + # static constructors try: - config_start, decrypted_config = self.get_config_cctor_brute_force() + config_start, decrypted_config = self._get_config_cctor_brute_force() except Exception as e: - raise ConfigParserException("Could not identify config") from e - logger.debug(f"Config found at offset {hex(config_start)}...") - return self.translate_config_field_names(decrypted_config) + raise ConfigParserException(f"Could not identify config: {e}") + logger.debug(f"Config found at RVA {hex(config_start)}...") + return decrypted_config # Attempts to retrieve the config via brute-force, looking through every # static constructor (.cctor) and attempting to decode/decrypt a valid - # config from that constructor - def get_config_cctor_brute_force(self): - candidates = self.dnpp.method_rvas_from_name(".cctor") + # config from that constructor, returning the config RVA and decrypted + # config on success + def _get_config_cctor_brute_force(self) -> Tuple[int, dict[str, Any]]: + candidates = self._dnpp.methods_from_name(".cctor") if len(candidates) == 0: raise ConfigParserException("No .cctor method could be found") - # Get each .cctor method RVA and bytes content up to a RET op - candidate_data = {rva: self.dnpp.string_from_offset(self.dnpp.offset_from_rva(rva), OPCODE_RET) for rva in candidates} + + # For each .cctor method, map its RVA and body (in raw bytes) + candidate_cctor_data = { + method.rva: self._dnpp.method_body_from_method(method) + for method in candidates + } + config_start, decrypted_config = None, None - min_config_len = self.MIN_CONFIG_LEN_CEILING - while decrypted_config is None and min_config_len >= self.MIN_CONFIG_LEN_FLOOR: - for method_rva, method_ins in candidate_data.items(): - logger.debug(f"Attempting brute force at .cctor method at {hex(method_rva)}") + # Start at our ceiling value for number of config items + min_config_len = self._MIN_CONFIG_LEN_CEILING + + while decrypted_config is None and min_config_len >= self._MIN_CONFIG_LEN_FLOOR: + for method_rva, method_body in candidate_cctor_data.items(): + logger.debug( + f"Attempting brute force at .cctor method at {hex(method_rva)}" + ) try: config_start, decrypted_config = ( method_rva, - self.decrypt_and_decode_config(method_ins, min_config_len), + self._decrypt_and_decode_config(method_body, min_config_len), ) break except Exception as e: - logger.debug(e) + logger.debug( + f"Brute force failed for method at {hex(method_rva)}: {e}" + ) continue + # Reduce the minimum config length until we reach our floor min_config_len -= 1 + if decrypted_config is None: - raise ConfigParserException("No valid configuration could be parsed from any .cctor methods") + raise ConfigParserException( + "No valid configuration could be parsed from any .cctor methods" + ) return config_start, decrypted_config # Attempts to retrieve the config via looking for a config section preceded - # by the "VerifyHash()" function that is typically found in the Settings - # module - def get_config_verify_hash_method(self): + # by the VerifyHash() method typically found in a Settings module, + # returning the config RVA and decrypted config on success + def _get_config_verify_hash_method(self) -> Tuple[int, dict[str, Any]]: # Identify the VerifyHash() Method code - hit = search(self.PATTERN_VERIFY_HASH, self.dnpp.data, DOTALL) - if hit is None: - raise ConfigParserException("Could not identify VerifyHash() marker method") - # Reverse the VerifyHash() instruction offset, look up VerifyHash() in - # the MethodDef metadata table, and then get the offset to the - # subsequent function, which should be our config constructor - config_start = self.dnpp.next_method_from_instruction_offset(hit.start()) - # Configuration ends with ret operation, so use that as our terminator - encrypted_config = self.dnpp.string_from_offset(config_start, OPCODE_RET) - min_config_len = self.MIN_CONFIG_LEN_CEILING + verify_hash_hit = search(self._PATTERN_VERIFY_HASH, self._dnpp.data) + if verify_hash_hit is None: + raise ConfigParserException("Could not identify VerifyHash() marker") + + # Reverse the hit to find the VerifyHash() method, then grab the + # subsequent function + config_method = self._dnpp.method_from_instruction_offset( + verify_hash_hit.start(), 1 + ) + encrypted_config = self._dnpp.method_body_from_method(config_method) + min_config_len = self._MIN_CONFIG_LEN_CEILING while True: try: - decrypted_config = self.decrypt_and_decode_config( + decrypted_config = self._decrypt_and_decode_config( encrypted_config, min_config_len ) - return config_start, decrypted_config + return config_method.rva, decrypted_config except Exception as e: - if min_config_len < self.MIN_CONFIG_LEN_FLOOR: + # Reduce the minimum config length until we reach our floor + if min_config_len < self._MIN_CONFIG_LEN_FLOOR: raise e min_config_len -= 1 - - # Sorts the config by field name RVA prior to replacing RVAs with field - # name strings (this is done last to preserve config ordering) - def translate_config_field_names(self, decrypted_config): - translated_config = {} - for field_rva, field_value in sorted(decrypted_config.items()): - key = self.dnpp.field_name_from_rva(field_rva) - translated_config[key] = field_value - logger.debug(f"Config item parsed {key}: {field_value}") - return translated_config diff --git a/lib/parsers_aux/ratking/utils/config_parser_exception.py b/lib/parsers_aux/ratking/config_parser_exception.py similarity index 93% rename from lib/parsers_aux/ratking/utils/config_parser_exception.py rename to lib/parsers_aux/ratking/config_parser_exception.py index c1d84e341b1..2b8c1b06282 100644 --- a/lib/parsers_aux/ratking/utils/config_parser_exception.py +++ b/lib/parsers_aux/ratking/config_parser_exception.py @@ -4,8 +4,7 @@ # # Author: jeFF0Falltrades # -# Provides a simple custom Exception class for use with configuration parsing -# actions +# A simple custom Exception class for use with configuration parsing actions # # MIT License # diff --git a/lib/parsers_aux/ratking/utils/__init__.py b/lib/parsers_aux/ratking/utils/__init__.py index e69de29bb2d..716cb99880a 100644 --- a/lib/parsers_aux/ratking/utils/__init__.py +++ b/lib/parsers_aux/ratking/utils/__init__.py @@ -0,0 +1,25 @@ +#!/usr/bin/env python3 +# +# __init__.py +# +# Author: jeFF0Falltrades +# +# Copyright (c) 2024 Jeff Archer +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. diff --git a/lib/parsers_aux/ratking/utils/config_item.py b/lib/parsers_aux/ratking/utils/config_item.py index 5bfbf78d73c..cba36ed272c 100644 --- a/lib/parsers_aux/ratking/utils/config_item.py +++ b/lib/parsers_aux/ratking/utils/config_item.py @@ -28,8 +28,10 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +from abc import ABC, abstractmethod from logging import getLogger -from re import DOTALL, findall +from re import DOTALL, compile, findall +from typing import Any, Tuple from .data_utils import bytes_to_int from .dotnet_constants import OPCODE_LDC_I4_0, SpecialFolder @@ -38,68 +40,71 @@ # Provides an abstract class for config items -class ConfigItem: - def __init__(self, label, pattern): - self.label = label - self.pattern = pattern +class ConfigItem(ABC): + def __init__(self, label: str, pattern: bytes) -> None: + self._label = label + self._pattern = compile(pattern, flags=DOTALL) # Should be overridden by children to provide a meaningful value - def derive_item_value(self): - return None + @abstractmethod + def _derive_item_value(self) -> Any: + pass - # Derives config field RVAs and values from data using the specified + # Derives config Field RVAs and values from data using the specified # ConfigItem's pattern - def parse_from(self, data): - logger.debug(f"Parsing {self.label} values from data...") + def parse_from(self, data: bytes) -> dict[int, Any]: + logger.debug(f"Parsing {self._label} values from data...") fields = {} - raw_data = findall(self.pattern, data, DOTALL) + raw_data = findall(self._pattern, data) found_items = 0 - for obj, string_rva in raw_data: + for obj, bytes_rva in raw_data: try: - field_value = self.derive_item_value(obj) - field_rva = bytes_to_int(string_rva) + field_value = self._derive_item_value(obj) + field_rva = bytes_to_int(bytes_rva) except Exception: - logger.debug(f"Could not parse value from {obj} at {string_rva}") + logger.debug(f"Could not parse value from {obj} at {hex(bytes_rva)}") continue if field_rva not in fields: fields[field_rva] = field_value found_items += 1 else: - logger.warning(f"Overlapping Field RVAs detected in config at {field_rva}") - logger.debug(f"Parsed {found_items} {self.label} values") + logger.debug( + f"Overlapping Field RVAs detected in config at {hex(field_rva)}" + ) + logger.debug(f"Parsed {found_items} {self._label} values") return fields class BoolConfigItem(ConfigItem): - def __init__(self): + def __init__(self) -> None: super().__init__("boolean", b"(\x16|\x17)\x80(.{3}\x04)") # Boolean values are derived by examing if the opcode is "ldc.i4.0" (False) # or "ldc.i4.1" (True) - def derive_item_value(self, opcode): + def _derive_item_value(self, opcode: bytes) -> bool: return bool(bytes_to_int(opcode) - bytes_to_int(OPCODE_LDC_I4_0)) class ByteArrayConfigItem(ConfigItem): - def __init__(self): + def __init__(self) -> None: super().__init__( "byte array", rb"\x1f(.\x8d.{3}\x01\x25\xd0.{3}\x04)\x28.{3}\x0a\x80(.{3}\x04)", ) - # Byte array size and RVA is returned, as these are needed to + # Byte array size and RVA are returned, as these are needed to # extract the value of the bytes from the payload - def derive_item_value(self, byte_data): + def _derive_item_value(self, byte_data: bytes) -> Tuple[int, int]: arr_size = byte_data[0] arr_rva = bytes_to_int(byte_data[-4:]) return (arr_size, arr_rva) class IntConfigItem(ConfigItem): - def __init__(self): + def __init__(self) -> None: super().__init__("int", b"(\x20.{4}|[\x18-\x1e])\x80(.{3}\x04)") - def derive_item_value(self, int_bytes): + def _derive_item_value(self, int_bytes: bytes) -> int: # If single byte, must be value 2-8, represented by opcodes 0x18-0x1e # Subtract 0x16 to get the int value, e.g.: # ldc.i4.8 == 0x1e - 0x16 == 8 @@ -110,27 +115,37 @@ def derive_item_value(self, int_bytes): class NullConfigItem(ConfigItem): - def __init__(self): + def __init__(self) -> None: super().__init__("null", b"(\x14\x80)(.{3}\x04)") # If "ldnull" is being used, simply return "null" - def derive_item_value(self, _): + def _derive_item_value(self, _: bytes) -> str: return "null" class SpecialFolderConfigItem(ConfigItem): - def __init__(self): + def __init__(self) -> None: super().__init__("special folder", b"\x1f(.)\x80(.{3}\x04)") # Translates SpecialFolder ID to name - def derive_item_value(self, folder_id): + def _derive_item_value(self, folder_id: bytes) -> str: return SpecialFolder(bytes_to_int(folder_id)).name class EncryptedStringConfigItem(ConfigItem): - def __init__(self): + def __init__(self) -> None: super().__init__("encrypted string", b"\x72(.{3}\x70)\x80(.{3}\x04)") # Returns the encrypted string's RVA - def derive_item_value(self, enc_str_rva): + def _derive_item_value(self, enc_str_rva: bytes) -> int: return bytes_to_int(enc_str_rva) + + +SUPPORTED_CONFIG_ITEMS = [ + BoolConfigItem, + ByteArrayConfigItem, + IntConfigItem, + NullConfigItem, + SpecialFolderConfigItem, + EncryptedStringConfigItem, +] diff --git a/lib/parsers_aux/ratking/utils/data_utils.py b/lib/parsers_aux/ratking/utils/data_utils.py index 6e0ea6c8723..34d96ce3964 100644 --- a/lib/parsers_aux/ratking/utils/data_utils.py +++ b/lib/parsers_aux/ratking/utils/data_utils.py @@ -27,20 +27,20 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -from .config_parser_exception import ConfigParserException +from ..config_parser_exception import ConfigParserException # Converts a bytes object to an int object using the specified byte order -def bytes_to_int(bytes, order="little"): +def bytes_to_int(bytes: bytes, order: str = "little") -> int: try: return int.from_bytes(bytes, byteorder=order) - except Exception as e: - raise ConfigParserException(f"Error parsing int from value: {bytes}") from e + except Exception: + raise ConfigParserException(f"Error parsing int from value: {bytes}") # Decodes a bytes object to a Unicode string, using UTF-16LE for byte values # with null bytes still embedded in them, and UTF-8 for all other values -def decode_bytes(byte_str): +def decode_bytes(byte_str: bytes | str) -> str: if isinstance(byte_str, str): return byte_str.strip() result = None @@ -49,13 +49,16 @@ def decode_bytes(byte_str): result = byte_str.decode("utf-16le") else: result = byte_str.decode("utf-8") - except Exception as e: - raise ConfigParserException(f"Error decoding bytes object to Unicode: {byte_str}") from e + except Exception: + raise ConfigParserException( + f"Error decoding bytes object to Unicode: {byte_str}" + ) return result -def int_to_bytes(int, length=4, order="little"): +# Converts an int to a bytes object, with the specified length and order +def int_to_bytes(int: int, length: int = 4, order: str = "little") -> bytes: try: return int.to_bytes(length, order) - except Exception as e: - raise ConfigParserException(f"Error parsing bytes from value: {int}") from e + except Exception: + raise ConfigParserException(f"Error parsing bytes from value: {int}") diff --git a/lib/parsers_aux/ratking/utils/decryptors/__init__.py b/lib/parsers_aux/ratking/utils/decryptors/__init__.py index 9a9176ae343..a340a598f31 100644 --- a/lib/parsers_aux/ratking/utils/decryptors/__init__.py +++ b/lib/parsers_aux/ratking/utils/decryptors/__init__.py @@ -4,8 +4,6 @@ # # Author: jeFF0Falltrades # -# MIT License -# # Copyright (c) 2024 Jeff Archer # # Permission is hereby granted, free of charge, to any person obtaining a copy @@ -25,12 +23,28 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +from .config_decryptor import ConfigDecryptor, IncompatibleDecryptorException from .config_decryptor_aes_cbc import ConfigDecryptorAESCBC from .config_decryptor_aes_ecb import ConfigDecryptorAESECB +from .config_decryptor_decrypt_xor import ConfigDecryptorDecryptXOR from .config_decryptor_plaintext import ConfigDecryptorPlaintext +from .config_decryptor_random_hardcoded import ConfigDecryptorRandomHardcoded + +__all__ = [ + ConfigDecryptor, + IncompatibleDecryptorException, + ConfigDecryptorAESCBC, + ConfigDecryptorAESECB, + ConfigDecryptorDecryptXOR, + ConfigDecryptorRandomHardcoded, + ConfigDecryptorPlaintext, +] +# ConfigDecryptorPlaintext should always be the last fallthrough case SUPPORTED_DECRYPTORS = [ ConfigDecryptorAESCBC, ConfigDecryptorAESECB, + ConfigDecryptorDecryptXOR, + ConfigDecryptorRandomHardcoded, ConfigDecryptorPlaintext, ] diff --git a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor.py b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor.py index 9df3620f373..16084742d56 100644 --- a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor.py +++ b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor.py @@ -28,19 +28,28 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from abc import ABC, abstractmethod +from logging import getLogger +from ..dotnetpe_payload import DotNetPEPayload -class ConfigDecryptor(ABC): - def __init__(self, payload, config_strings): - self.payload = payload - self.config_strings = config_strings - self.key = None - self.salt = None +logger = getLogger(__name__) - @abstractmethod - def decrypt(self, ciphertext): - pass +# Custom Exception to denote that a decryptor is incompatible with a payload +class IncompatibleDecryptorException(Exception): + pass + + +class ConfigDecryptor(ABC): + def __init__(self, payload: DotNetPEPayload) -> None: + self.key: bytes | str = None + self._payload = payload + self.salt: bytes = None + + # Abstract method to take in a map representing a configuration of config + # Field names and values and return a decoded/decrypted configuration @abstractmethod - def decrypt_encrypted_strings(self): + def decrypt_encrypted_strings( + self, encrypted_strings: dict[str, str] + ) -> dict[str, list[str] | str]: pass diff --git a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_cbc.py b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_cbc.py index fdc2a1bf5a7..8389886bcfc 100644 --- a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_cbc.py +++ b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_cbc.py @@ -1,11 +1,12 @@ #!/usr/bin/env python3 # -# config_aes_decryptor.py +# config_decryptor_aes_cbc.py # # Author: jeFF0Falltrades # -# Provides a custom AES decryptor for RAT payloads utilizing the known -# encryption patterns of AsyncRAT, DcRAT, QuasarRAT, VenomRAT, etc. +# Provides a custom AES decryptor for RAT payloads utilizing CBC mode +# +# Example Hash: 6b99acfa5961591c39b3f889cf29970c1dd48ddb0e274f14317940cf279a4412 # # MIT License # @@ -30,7 +31,8 @@ # SOFTWARE. from base64 import b64decode from logging import getLogger -from re import DOTALL, search +from re import DOTALL, compile, search +from typing import Tuple from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.ciphers import Cipher @@ -40,51 +42,68 @@ from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC from cryptography.hazmat.primitives.padding import PKCS7 -from ..config_parser_exception import ConfigParserException +from ...config_parser_exception import ConfigParserException from ..data_utils import bytes_to_int, decode_bytes, int_to_bytes from ..dotnet_constants import OPCODE_LDSTR, OPCODE_LDTOKEN -from .config_decryptor import ConfigDecryptor +from ..dotnetpe_payload import DotNetPEPayload +from .config_decryptor import ConfigDecryptor, IncompatibleDecryptorException logger = getLogger(__name__) -MIN_CIPHERTEXT_LEN = 48 - class ConfigDecryptorAESCBC(ConfigDecryptor): - PATTERN_AES_KEY_AND_BLOCK_SIZE = b"[\x06-\x09]\x20(.{4})\x6f.{4}[\x06-\x09]\x20(.{4})" - PATTERN_AES_KEY_BASE = b"(.{3}\x04).%b" - PATTERN_AES_SALT_ITER = b"[\x02-\x05]\x7e(.{4})\x20(.{4})\x73" - PATTERN_AES_SALT_INIT = b"\x80%b\x2a" + # Minimum length of valid ciphertext + _MIN_CIPHERTEXT_LEN = 48 + + # Patterns for identifying AES metadata + _PATTERN_AES_KEY_AND_BLOCK_SIZE = compile( + b"[\x06-\x09]\x20(.{4})\x6f.{4}[\x06-\x09]\x20(.{4})", DOTALL + ) + # Do not compile in-line replacement patterns + _PATTERN_AES_KEY_BASE = b"(.{3}\x04).%b" + _PATTERN_AES_SALT_INIT = b"\x80%b\x2a" + _PATTERN_AES_SALT_ITER = compile(b"[\x02-\x05]\x7e(.{4})\x20(.{4})\x73", DOTALL) - def __init__(self, payload, config_strings): - super().__init__(payload, config_strings) - self.key_size = self.block_size = self.iterations = self.key_candidates = None - self.aes_metadata = self.get_aes_metadata() + def __init__(self, payload: DotNetPEPayload) -> None: + super().__init__(payload) + self._block_size: int = None + self._iterations: int = None + self._key_candidates: list[bytes] = None + self._key_size: int = None + self._key_rva: int = None + try: + self._get_aes_metadata() + except Exception as e: + raise IncompatibleDecryptorException(e) # Given an initialization vector and ciphertext, creates a Cipher # object with the AES key and specified IV and decrypts the ciphertext - def decrypt(self, iv, ciphertext): - logger.debug(f"Decrypting {ciphertext} with key {self.key.hex()} and IV {iv.hex()}...") + def _decrypt(self, iv: bytes, ciphertext: bytes) -> bytes: + logger.debug( + f"Decrypting {ciphertext} with key {self.key.hex()} and IV {iv.hex()}..." + ) aes_cipher = Cipher(AES(self.key), CBC(iv), backend=default_backend()) decryptor = aes_cipher.decryptor() # Use a PKCS7 unpadder to remove padding from decrypted value # https://cryptography.io/en/latest/hazmat/primitives/padding/ - unpadder = PKCS7(self.block_size).unpadder() + unpadder = PKCS7(self._block_size).unpadder() + try: padded_text = decryptor.update(ciphertext) + decryptor.finalize() unpadded_text = unpadder.update(padded_text) + unpadder.finalize() except Exception as e: raise ConfigParserException( - f"Error decrypting ciphertext {ciphertext} with IV {iv.hex()} and key {self.key.hex()}" - ) from e + f"Error decrypting ciphertext {ciphertext} with IV {iv.hex()} and key {self.key.hex()} : {e}" + ) + logger.debug(f"Decryption result: {unpadded_text}") return unpadded_text # Derives AES passphrase candidates from a config + # # If a passphrase is base64-encoded, both its raw value and decoded value # will be added as candidates - def derive_aes_passphrase_candidates(self, aes_key_rva): - key_val = self.config_strings[aes_key_rva] + def _derive_aes_passphrase_candidates(self, key_val: str) -> list[bytes]: passphrase_candidates = [key_val.encode()] try: passphrase_candidates.append(b64decode(key_val)) @@ -94,15 +113,21 @@ def derive_aes_passphrase_candidates(self, aes_key_rva): return passphrase_candidates # Decrypts encrypted config values with the provided cipher data - def decrypt_encrypted_strings(self): + def decrypt_encrypted_strings( + self, encrypted_strings: dict[str, str] + ) -> dict[str, str]: logger.debug("Decrypting encrypted strings...") + if self._key_candidates is None: + self._key_candidates = self._get_aes_key_candidates(encrypted_strings) + decrypted_config_strings = {} - for k, v in self.config_strings.items(): + for k, v in encrypted_strings.items(): # Leave empty strings as they are if len(v) == 0: logger.debug(f"Key: {k}, Value: {v}") decrypted_config_strings[k] = v continue + # Check if base64-encoded string b64_exception = False try: @@ -111,10 +136,11 @@ def decrypt_encrypted_strings(self): b64_exception = True # If it was not base64-encoded, or if it is less than our min length # for ciphertext, leave the value as it is - if b64_exception or len(decoded_val) < MIN_CIPHERTEXT_LEN: + if b64_exception or len(decoded_val) < self._MIN_CIPHERTEXT_LEN: logger.debug(f"Key: {k}, Value: {v}") decrypted_config_strings[k] = v continue + # Otherwise, extract the IV from the 16 bytes after the HMAC # (first 32 bytes) and the ciphertext from the rest of the data # after the IV, and run the decryption @@ -122,44 +148,36 @@ def decrypt_encrypted_strings(self): result, last_exc = None, None key_idx = 0 # Run through key candidates until suitable one found or failure - while result is None and key_idx < len(self.key_candidates): + while result is None and key_idx < len(self._key_candidates): try: - self.key = self.key_candidates[key_idx] + self.key = self._key_candidates[key_idx] key_idx += 1 - result = decode_bytes(self.decrypt(iv, ciphertext)) + result = decode_bytes(self._decrypt(iv, ciphertext)) except ConfigParserException as e: last_exc = e + if result is None: - logger.debug(f"Decryption failed for item {v}: {last_exc}; Leaving as original value...") + logger.debug( + f"Decryption failed for item {v}: {last_exc}; Leaving as original value..." + ) result = v + logger.debug(f"Key: {k}, Value: {result}") decrypted_config_strings[k] = result + logger.debug("Successfully decrypted strings") return decrypted_config_strings # Extracts AES key candidates from the payload - def get_aes_key_candidates(self, metadata_ins_offset): - logger.debug("Extracting possible AES key values...") + def _get_aes_key_candidates(self, encrypted_strings: dict[str, str]) -> list[bytes]: + logger.debug("Extracting AES key candidates...") keys = [] - # Get the RVA of the method that sets up AES256 metadata - metadata_method_rva = self.payload.next_method_from_instruction_offset(metadata_ins_offset, step_back=1, by_token=True) - - # Insert this RVA into the KEY_BASE pattern to find where the AES key - # is initialized - key_hit = search( - self.PATTERN_AES_KEY_BASE % int_to_bytes(metadata_method_rva), - self.payload.data, - DOTALL, - ) - if key_hit is None: - raise ConfigParserException("Could not find AES key pattern") - key_rva = bytes_to_int(key_hit.groups()[0]) - logger.debug(f"AES key RVA: {hex(key_rva)}") - - # Since we already have a map of all field names, use the key field - # name to index into our existing config dict - passphrase_candidates = self.derive_aes_passphrase_candidates(key_rva) + # Use the key Field name to index into our existing config + key_raw_value = encrypted_strings[ + self._payload.field_name_from_rva(self._key_rva) + ] + passphrase_candidates = self._derive_aes_passphrase_candidates(key_raw_value) for candidate in passphrase_candidates: try: @@ -167,83 +185,113 @@ def get_aes_key_candidates(self, metadata_ins_offset): # cryptography library, but we keep it here for compatibility kdf = PBKDF2HMAC( SHA1(), - length=self.key_size, + length=self._key_size, salt=self.salt, - iterations=self.iterations, + iterations=self._iterations, backend=default_backend(), ) keys.append(kdf.derive(candidate)) logger.debug(f"AES key derived: {keys[-1]}") except Exception: continue + if len(keys) == 0: - raise ConfigParserException(f"Could not derive key from passphrase candidates: {passphrase_candidates}") + raise ConfigParserException( + f"Could not derive key from passphrase candidates: {passphrase_candidates}" + ) return keys # Extracts the AES key and block size from the payload - def get_aes_key_and_block_size(self): + def _get_aes_key_and_block_size(self) -> Tuple[int, int]: logger.debug("Extracting AES key and block size...") - hit = search(self.PATTERN_AES_KEY_AND_BLOCK_SIZE, self.payload.data, DOTALL) + hit = search(self._PATTERN_AES_KEY_AND_BLOCK_SIZE, self._payload.data) if hit is None: raise ConfigParserException("Could not extract AES key or block size") + # Convert key size from bits to bytes by dividing by 8 # Note use of // instead of / to ensure integer output, not float key_size = bytes_to_int(hit.groups()[0]) // 8 block_size = bytes_to_int(hit.groups()[1]) + logger.debug(f"Found key size {key_size} and block size {block_size}") return key_size, block_size - # Identifies the initialization of the AES256 object in the payload - def get_aes_metadata(self): + # Given an offset to an instruction within the Method that sets up the + # Cipher, extracts the AES key RVA from the payload + def _get_aes_key_rva(self, metadata_ins_offset: int) -> int: + logger.debug("Extracting AES key RVA...") + + # Get the RVA of the method that sets up AES256 metadata + metadata_method_token = self._payload.method_from_instruction_offset( + metadata_ins_offset, by_token=True + ).token + + # Insert this RVA into the KEY_BASE pattern to find where the AES key + # is initialized + key_hit = search( + self._PATTERN_AES_KEY_BASE % int_to_bytes(metadata_method_token), + self._payload.data, + DOTALL, + ) + if key_hit is None: + raise ConfigParserException("Could not find AES key pattern") + + key_rva = bytes_to_int(key_hit.groups()[0]) + logger.debug(f"AES key RVA: {hex(key_rva)}") + return key_rva + + # Identifies the initialization of the AES256 object in the payload and + # sets the necessary values needed for decryption + def _get_aes_metadata(self) -> None: logger.debug("Extracting AES metadata...") - # Important to use DOTALL here (and with all regex ops to be safe) - # as we are working with bytes, and if we do not set this, and the - # byte sequence contains a byte that equates to a newline (\n or 0x0A), - # the search will fail - metadata = search(self.PATTERN_AES_SALT_ITER, self.payload.data, DOTALL) + metadata = search(self._PATTERN_AES_SALT_ITER, self._payload.data) if metadata is None: raise ConfigParserException("Could not identify AES metadata") logger.debug(f"AES metadata found at offset {hex(metadata.start())}") - self.key_size, self.block_size = self.get_aes_key_and_block_size() + self._key_size, self._block_size = self._get_aes_key_and_block_size() logger.debug("Extracting AES iterations...") - self.iterations = bytes_to_int(metadata.groups()[1]) - logger.debug(f"Found AES iteration number of {self.iterations}") + self._iterations = bytes_to_int(metadata.groups()[1]) + logger.debug(f"Found AES iteration number of {self._iterations}") - self.salt = self.get_aes_salt(metadata.groups()[0]) - self.key_candidates = self.get_aes_key_candidates(metadata.start()) - return metadata + self.salt = self._get_aes_salt(metadata.groups()[0]) + self._key_rva = self._get_aes_key_rva(metadata.start()) # Extracts the AES salt from the payload, accounting for both hardcoded # salt byte arrays, and salts derived from hardcoded strings - def get_aes_salt(self, salt_rva): + def _get_aes_salt(self, salt_rva: int) -> bytes: logger.debug("Extracting AES salt value...") + # Use % to insert our salt RVA into our match pattern # This pattern will then find the salt initialization ops, # specifically: # # stsfld uint8[] Client.Algorithm.Aes256::Salt # ret - aes_salt_initialization = self.payload.data.find(self.PATTERN_AES_SALT_INIT % salt_rva) + aes_salt_initialization = self._payload.data.find( + self._PATTERN_AES_SALT_INIT % salt_rva + ) if aes_salt_initialization == -1: raise ConfigParserException("Could not identify AES salt initialization") - # Look at opcode used to initialize the salt to decide how to - # proceed on extracting the salt value (start of pattern - 10 bytes) + # Look at the opcode used to initialize the salt to decide how to + # proceed with extracting the salt value (start of pattern - 10 bytes) salt_op_offset = aes_salt_initialization - 10 # Need to use bytes([int]) here to properly convert from int to byte # string for our comparison below - salt_op = bytes([self.payload.data[salt_op_offset]]) + salt_op = bytes([self._payload.data[salt_op_offset]]) # Get the salt RVA from the 4 bytes following the initialization op - salt_strings_rva_packed = self.payload.data[salt_op_offset + 1 : salt_op_offset + 5] + salt_strings_rva_packed = self._payload.data[ + salt_op_offset + 1 : salt_op_offset + 5 + ] salt_strings_rva = bytes_to_int(salt_strings_rva_packed) - # If the op is a ldstr op (0x72), just get the bytes value of the - # string being used to initialize the salt + # If the op is a ldstr op, just get the bytes value of the string being + # used to initialize the salt if salt_op == OPCODE_LDSTR: - salt_encoded = self.payload.user_string_from_rva(salt_strings_rva) + salt_encoded = self._payload.user_string_from_rva(salt_strings_rva) # We use decode_bytes() here to get the salt string without any # null bytes (because it's stored as UTF-16LE), then convert it # back to bytes @@ -251,9 +299,12 @@ def get_aes_salt(self, salt_rva): # If the op is a ldtoken (0xd0) operation, we need to get the salt # byte array value from the FieldRVA table elif salt_op == OPCODE_LDTOKEN: - salt_size = self.payload.data[salt_op_offset - 7] - salt = self.payload.byte_array_from_size_and_rva(salt_size, salt_strings_rva) + salt_size = self._payload.data[salt_op_offset - 7] + salt = self._payload.byte_array_from_size_and_rva( + salt_size, salt_strings_rva + ) else: raise ConfigParserException(f"Unknown salt opcode found: {salt_op.hex()}") + logger.debug(f"Found salt value: {salt.hex()}") return salt diff --git a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_ecb.py b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_ecb.py index 943fb58ddfa..75e54f3f4c6 100644 --- a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_ecb.py +++ b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_ecb.py @@ -6,6 +6,8 @@ # # Provides a custom AES decryptor for RAT payloads utilizing ECB mode # +# Example Hash: d5028e10a756f2df677f32ebde105d7de8df37e253c431837c8f810260f4428e +# # MIT License # # Copyright (c) 2024 Jeff Archer @@ -30,7 +32,7 @@ from base64 import b64decode from hashlib import md5 from logging import getLogger -from re import DOTALL, search +from re import DOTALL, compile, search from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.ciphers import Cipher @@ -38,24 +40,28 @@ from cryptography.hazmat.primitives.ciphers.modes import ECB from cryptography.hazmat.primitives.padding import PKCS7 -from ..config_parser_exception import ConfigParserException +from ...config_parser_exception import ConfigParserException from ..data_utils import bytes_to_int, decode_bytes -from .config_decryptor import ConfigDecryptor +from ..dotnetpe_payload import DotNetPEPayload +from .config_decryptor import ConfigDecryptor, IncompatibleDecryptorException logger = getLogger(__name__) class ConfigDecryptorAESECB(ConfigDecryptor): - PATTERN_MD5_HASH = rb"\x7e(.{3}\x04)\x28.{3}\x06\x6f" + # MD5 hash pattern used to detect AES key + _PATTERN_MD5_HASH = compile(rb"\x7e(.{3}\x04)\x28.{3}\x06\x6f", DOTALL) - def __init__(self, payload, config_strings): - super().__init__(payload, config_strings) + def __init__(self, payload: DotNetPEPayload) -> None: + super().__init__(payload) + try: + self._aes_key_rva = self._get_aes_key_rva() + except Exception as e: + raise IncompatibleDecryptorException(e) # Given ciphertext, creates a Cipher object with the AES key and decrypts # the ciphertext - def decrypt(self, ciphertext): - if self.key is None: - self.get_aes_key() + def _decrypt(self, ciphertext: bytes) -> bytes: logger.debug(f"Decrypting {ciphertext} with key {self.key.hex()}...") aes_cipher = Cipher(AES(self.key), ECB(), backend=default_backend()) decryptor = aes_cipher.decryptor() @@ -63,24 +69,39 @@ def decrypt(self, ciphertext): # Use a PKCS7 unpadder to remove padding from decrypted value # https://cryptography.io/en/latest/hazmat/primitives/padding/ unpadder = PKCS7(AES.block_size).unpadder() + try: padded_text = decryptor.update(ciphertext) + decryptor.finalize() unpadded_text = unpadder.update(padded_text) + unpadder.finalize() except Exception as e: - raise ConfigParserException(f"Error decrypting ciphertext {ciphertext} with key {self.key.hex()}") from e + raise ConfigParserException( + f"Error decrypting ciphertext {ciphertext} with key {self.key.hex()}: {e}" + ) + logger.debug(f"Decryption result: {unpadded_text}") return unpadded_text # Decrypts encrypted config values with the provided cipher data - def decrypt_encrypted_strings(self): + def decrypt_encrypted_strings( + self, encrypted_strings: dict[str, str] + ) -> dict[str, str]: logger.debug("Decrypting encrypted strings...") + + if self.key is None: + try: + raw_key_field = self._payload.field_name_from_rva(self._aes_key_rva) + self.key = self._derive_aes_key(encrypted_strings[raw_key_field]) + except Exception as e: + raise ConfigParserException(f"Failed to derive AES key: {e}") + decrypted_config_strings = {} - for k, v in self.config_strings.items(): + for k, v in encrypted_strings.items(): # Leave empty strings as they are if len(v) == 0: logger.debug(f"Key: {k}, Value: {v}") decrypted_config_strings[k] = v continue + # Check if base64-encoded string b64_exception = False try: @@ -92,38 +113,44 @@ def decrypt_encrypted_strings(self): logger.debug(f"Key: {k}, Value: {v}") decrypted_config_strings[k] = v continue + ciphertext = decoded_val result, last_exc = None, None try: - result = decode_bytes(self.decrypt(ciphertext)) + result = decode_bytes(self._decrypt(ciphertext)) except ConfigParserException as e: last_exc = e + if result is None: logger.debug(f"Decryption failed for item {v}: {last_exc}") result = v + logger.debug(f"Key: {k}, Value: {result}") decrypted_config_strings[k] = result + logger.debug("Successfully decrypted strings") return decrypted_config_strings - # Extracts AES key candidates from the payload - def get_aes_key(self): - logger.debug("Extracting possible AES key value...") - key_hit = search( - self.PATTERN_MD5_HASH, - self.payload.data, - DOTALL, - ) - if key_hit is None: - raise ConfigParserException("Could not find AES key pattern") - key_rva = bytes_to_int(key_hit.groups()[0]) - logger.debug(f"AES key RVA: {hex(key_rva)}") - key_unhashed = self.config_strings[key_rva] + # Given the raw bytes that will become the key value, derives the AES key + def _derive_aes_key(self, key_unhashed: str) -> bytes: # Generate the MD5 hash md5_hash = md5() md5_hash.update(key_unhashed.encode("utf-8")) md5_digest = md5_hash.digest() + # Key is a 32-byte value made up of the MD5 hash overlaying itself, # tailed with one null byte - self.key = md5_digest[:15] + md5_digest[:16] + b"\x00" - logger.debug(f"AES key derived: {self.key}") + key = md5_digest[:15] + md5_digest[:16] + b"\x00" + logger.debug(f"AES key derived: {key}") + return key + + # Extracts the AES key RVA from the payload + def _get_aes_key_rva(self) -> int: + logger.debug("Extracting AES key value...") + key_hit = search(self._PATTERN_MD5_HASH, self._payload.data) + if key_hit is None: + raise ConfigParserException("Could not find AES key pattern") + + key_rva = bytes_to_int(key_hit.groups()[0]) + logger.debug(f"AES key RVA: {hex(key_rva)}") + return key_rva diff --git a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_decrypt_xor.py b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_decrypt_xor.py new file mode 100644 index 00000000000..74311e5e7a2 --- /dev/null +++ b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_decrypt_xor.py @@ -0,0 +1,128 @@ +#!/usr/bin/env python3 +# +# config_decryptor_decrypt_xor.py +# +# Author: jeFF0Falltrades +# +# Provides a custom decryptor for RAT payloads utilizing the DecryptXOR +# method of embeddeding config strings +# +# Example Hash: 6e5671dec52db7f64557ba8ef70caf53cf0c782795236b03655623640f9e6a83 +# +# MIT License +# +# Copyright (c) 2024 Jeff Archer +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +from logging import getLogger +from re import DOTALL, compile, findall, search + +from ...config_parser_exception import ConfigParserException +from ..data_utils import bytes_to_int, decode_bytes +from ..dotnet_constants import PATTERN_LDSTR_OP +from ..dotnetpe_payload import DotNetPEPayload +from .config_decryptor import ConfigDecryptor, IncompatibleDecryptorException +from .config_decryptor_plaintext import ConfigDecryptorPlaintext + +logger = getLogger(__name__) + + +class ConfigDecryptorDecryptXOR(ConfigDecryptor): + _KEY_XOR_DECODED_STRINGS = "xor_decoded_strings" + + # Pattern to detect usage of DecryptXOR Method + _PATTERN_DECRYPT_XOR_BLOCK = compile( + rb"(\x2d.\x72.{3}\x70\x28.{3}\x06\x2a(?:\x02[\x16-\x1f].?\x33.\x72.{3}\x70\x28.{3}\x06\x2a){7,}.+?\x72.{3}\x70)", + flags=DOTALL, + ) + + def __init__(self, payload: DotNetPEPayload) -> None: + super().__init__(payload) + # Filled in _get_xor_metadata() + self._xor_strings: list[str] = [] + try: + self._get_xor_metadata() + except Exception as e: + raise IncompatibleDecryptorException(e) + + # Returns a list of decoded XOR-encoded strings found in the payload + def _decode_encoded_strings(self) -> list[str]: + decoded_strings = [] + + for string in self._xor_strings: + decoded = [] + # Do not modify unencoded strings + if ":" not in string: + decoded_strings.append(string) + continue + + # Split encoded string by ':' and run XOR decoding + arr, arr2 = (bytes.fromhex(arr) for arr in string.split(":")) + for idx, byte in enumerate(arr2): + decoded.append(byte ^ self.key[idx % len(self.key)] ^ arr[idx]) + decoded_strings.append(decode_bytes(bytes(decoded))) + + logger.debug(f"Decoded {len(decoded_strings)} strings") + return decoded_strings + + # Parses the config, adds decoded XOR strings, and returns the decoded + # config + def decrypt_encrypted_strings( + self, encrypted_strings: dict[str, str] + ) -> dict[str, list[str] | str]: + config = {} + # Pass off plaintext config to a ConfigDecryptorPlaintext + ptcd = ConfigDecryptorPlaintext(self._payload) + config.update(ptcd.decrypt_encrypted_strings(encrypted_strings)) + config[self._KEY_XOR_DECODED_STRINGS] = self._decode_encoded_strings() + return config + + # Gathers XOR metadata from the payload + def _get_xor_metadata(self): + dxor_block = search(self._PATTERN_DECRYPT_XOR_BLOCK, self._payload.data) + if dxor_block is None: + raise ConfigParserException("Could not identify DecryptXOR block") + logger.debug(f"DecryptXOR block found at offset {hex(dxor_block.start())}") + + # Derive all XOR-encoded string references in the DecryptXOR block + xor_string_rvas = findall(PATTERN_LDSTR_OP, dxor_block.groups()[0]) + self._xor_strings = list( + filter( + None, + [ + self._payload.user_string_from_rva(bytes_to_int(rva)) + for rva in xor_string_rvas + ], + ) + ) + logger.debug(f"{len(self._xor_strings)} XOR strings found") + + # Get the static constructor containing the XOR key + xor_key_cctor = self._payload.method_from_instruction_offset( + dxor_block.start(), step=1, by_token=True + ) + xor_key_cctor_body = self._payload.method_body_from_method(xor_key_cctor) + + # Derive the XOR key RVA and value + xor_rva = search(PATTERN_LDSTR_OP, xor_key_cctor_body) + if xor_rva is None: + raise ConfigParserException("Could not identify XOR key RVA") + xor_rva = bytes_to_int(xor_rva.groups()[0]) + self.key = bytes(self._payload.user_string_from_rva(xor_rva), encoding="utf-8") + logger.debug(f"XOR key found at {hex(xor_rva)} : {self.key}") diff --git a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_plaintext.py b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_plaintext.py index c6e71f8a350..ebd87c49ee8 100644 --- a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_plaintext.py +++ b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_plaintext.py @@ -5,7 +5,8 @@ # Author: jeFF0Falltrades # # Provides a fall-through decryptor that will attempt to return the plaintext -# values of a found config when all other decryptors fail +# values of a found config when all other decryptors fail by matching known +# config field names from supported RAT families # # MIT License # @@ -30,18 +31,98 @@ # SOFTWARE. from logging import getLogger +from ...config_parser_exception import ConfigParserException +from ..dotnetpe_payload import DotNetPEPayload from .config_decryptor import ConfigDecryptor logger = getLogger(__name__) +KNOWN_CONFIG_FIELD_NAMES = set( + [ + "AUTHKEY", + "An_ti", + "Anti", + "Anti_Process", + "BDOS", + "BS_OD", + "Certifi_cate", + "Certificate", + "DIRECTORY", + "De_lay", + "Delay", + "DoStartup", + "ENABLELOGGER", + "EncryptionKey", + "Groub", + "Group", + "HIDEFILE", + "HIDEINSTALLSUBDIRECTORY", + "HIDELOGDIRECTORY", + "HOSTS", + "Hos_ts", + "Hosts", + "Hw_id", + "Hwid", + "INSTALL", + "INSTALLNAME", + "In_stall", + "Install", + "InstallDir", + "InstallFile", + "InstallFolder", + "InstallStr", + "Install_File", + "Install_Folder", + "Install_path", + "KEY", + "Key", + "LOGDIRECTORYNAME", + "MTX", + "MUTEX", + "Mutex", + "Paste_bin", + "Pastebin", + "Por_ts", + "Port", + "Ports", + "RECONNECTDELAY", + "SPL", + "STARTUP", + "STARTUPKEY", + "SUBDIRECTORY", + "ServerIp", + "ServerPort", + "Server_signa_ture", + "Serversignature", + "Sleep", + "TAG", + "USBNM", + "VERSION", + "Ver_sion", + "Version", + "delay", + "mutex_string", + "startup_name", + ] +) + class ConfigDecryptorPlaintext(ConfigDecryptor): - def __init__(self, payload, config_strings): - super().__init__(payload, config_strings) + # Minimum threshold for matching Field names + MIN_THRESHOLD_MATCH = 3 - def decrypt(self, ciphertext): - return ciphertext + def __init__(self, payload: DotNetPEPayload) -> None: + super().__init__(payload) - def decrypt_encrypted_strings(self): - logger.debug("Could not find applicable decryptor, returning found config as plaintext...") - return self.config_strings + # Calculates whether the config meets the minimum threshold for known Field + # Names and returns it if it does + def decrypt_encrypted_strings( + self, encrypted_strings: dict[str, str] + ) -> dict[str, str]: + field_names = set(encrypted_strings.keys()) + num_overlapping_field_names = len(KNOWN_CONFIG_FIELD_NAMES & field_names) + if num_overlapping_field_names < self.MIN_THRESHOLD_MATCH: + raise ConfigParserException( + "Plaintext threshold of known config items not met" + ) + return encrypted_strings diff --git a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_random_hardcoded.py b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_random_hardcoded.py new file mode 100644 index 00000000000..747364b8a9d --- /dev/null +++ b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_random_hardcoded.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python3 +# +# config_decryptor_random_hardcoded.py +# +# Author: jeFF0Falltrades +# +# Provides a custom decryptor for RAT payloads utilizing the method of +# randomly selecting from an embedded list of C2 domains/supradomains +# +# Example hash: a2817702fecb280069f0723cd2d0bfdca63763b9cdc833941c4f33bbe383d93e +# +# MIT License +# +# Copyright (c) 2024 Jeff Archer +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +from logging import getLogger +from re import DOTALL, compile, findall, search + +from ...config_parser_exception import ConfigParserException +from ..data_utils import bytes_to_int +from ..dotnet_constants import PATTERN_LDSTR_OP +from ..dotnetpe_payload import DotNetPEMethod, DotNetPEPayload +from .config_decryptor import ConfigDecryptor, IncompatibleDecryptorException +from .config_decryptor_plaintext import ConfigDecryptorPlaintext + +logger = getLogger(__name__) + + +class ConfigDecryptorRandomHardcoded(ConfigDecryptor): + _KEY_HARDCODED_HOSTS = "hardcoded_hosts" + + # Pattern to find the Method that retrieves a random domain + _PATTERN_RANDOM_DOMAIN = compile( + rb"(?:\x73.{3}\x0a){2}\x25.+?\x0a\x06(?:\x6f.{3}\x0a){2}\x0b", flags=DOTALL + ) + + def __init__(self, payload: DotNetPEPayload) -> None: + super().__init__(payload) + try: + self._random_domain_method = self._get_random_domain_method() + except Exception as e: + raise IncompatibleDecryptorException(e) + + # Returns a combined config containing config fields + hardcoded hosts + def decrypt_encrypted_strings( + self, encrypted_strings: dict[str, str] + ) -> dict[str, list[str] | str]: + config = {} + # Pass off plaintext config to a ConfigDecryptorPlaintext + ptcd = ConfigDecryptorPlaintext(self._payload) + config.update(ptcd.decrypt_encrypted_strings(encrypted_strings)) + config[self._KEY_HARDCODED_HOSTS] = self._get_hardcoded_hosts() + return config + + # Retrieves and returns a list of hardcoded hosts + def _get_hardcoded_hosts(self) -> list[str]: + random_domain_method_body = self._payload.method_body_from_method( + self._random_domain_method + ) + hardcoded_host_rvas = findall(PATTERN_LDSTR_OP, random_domain_method_body) + + hardcoded_hosts = [] + for rva in hardcoded_host_rvas: + try: + harcoded_host = self._payload.user_string_from_rva(bytes_to_int(rva)) + if harcoded_host != ".": + hardcoded_hosts.append(harcoded_host) + except Exception as e: + logger.error(f"Error translating hardcoded host at {hex(rva)}: {e}") + continue + + logger.debug(f"Hardcoded hosts found: {hardcoded_hosts}") + return hardcoded_hosts + + # Retrieves the Method that randomly selects from a list of embedded hosts + def _get_random_domain_method(self) -> DotNetPEMethod: + logger.debug("Searching for random domain method") + random_domain_marker = search(self._PATTERN_RANDOM_DOMAIN, self._payload.data) + if random_domain_marker is None: + raise ConfigParserException( + "Could not identify random domain generator method" + ) + + random_domain_method = self._payload.method_from_instruction_offset( + random_domain_marker.start() + ) + + logger.debug( + f"Random domain generator found at offset {hex(random_domain_method.offset)}" + ) + return random_domain_method diff --git a/lib/parsers_aux/ratking/utils/dotnet_constants.py b/lib/parsers_aux/ratking/utils/dotnet_constants.py index 2de1a34027a..84f82e14619 100644 --- a/lib/parsers_aux/ratking/utils/dotnet_constants.py +++ b/lib/parsers_aux/ratking/utils/dotnet_constants.py @@ -28,16 +28,19 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from enum import IntEnum +from re import DOTALL, compile -# Notable CIL Opcodes and Base RVAs +# Notable CIL Opcodes and Tokens OPCODE_LDC_I4_0 = b"\x16" -OPCODE_LDC_I4_1 = b"\x17" OPCODE_LDSTR = b"\x72" OPCODE_LDTOKEN = b"\xd0" -OPCODE_RET = b"\x2a" MDT_FIELD_DEF = 0x04000000 MDT_METHOD_DEF = 0x06000000 MDT_STRING = 0x70000000 +PATTERN_LDSTR_OP = compile( + rb"\x72(.{3}\x70)", + flags=DOTALL, +) # IntEnum derivative used for translating a SpecialFolder ID to its name diff --git a/lib/parsers_aux/ratking/utils/dotnetpe_payload.py b/lib/parsers_aux/ratking/utils/dotnetpe_payload.py index be66c0433f6..d704b3397d2 100644 --- a/lib/parsers_aux/ratking/utils/dotnetpe_payload.py +++ b/lib/parsers_aux/ratking/utils/dotnetpe_payload.py @@ -5,7 +5,7 @@ # Author: jeFF0Falltrades # # Provides a wrapper class for accessing metadata from a DotNetPE object and -# performing RVA to data offset conversions +# performing data conversions # # MIT License # @@ -28,124 +28,182 @@ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +from dataclasses import dataclass from hashlib import sha256 from logging import getLogger from dnfile import dnPE +from yara import Rules -from .config_parser_exception import ConfigParserException +from ..config_parser_exception import ConfigParserException +from .data_utils import bytes_to_int from .dotnet_constants import MDT_FIELD_DEF, MDT_METHOD_DEF, MDT_STRING logger = getLogger(__name__) +# Helper class representing a single Method +@dataclass +class DotNetPEMethod: + name: str + offset: int + rva: int + size: int + token: int + + class DotNetPEPayload: - def __init__(self, file_data, yara_rule=None): + # def __init__(self, file_path: str, yara_rule: Rules = None) -> None: + def __init__(self, file_data: bytes, yara_rule: Rules = None) -> None: # self.file_path = file_path - self.data = file_data # self.get_file_data() - self.sha256 = self.calculate_sha256() - self.dotnetpe = None + self.data = file_data # self._get_file_data() + + # Calculate SHA256 + sha256_obj = sha256() + sha256_obj.update(self.data) + self.sha256 = sha256_obj.hexdigest() + + self.dotnetpe: dnPE = None try: + # self.dotnetpe = dnPE(self.file_path, clr_lazy_load=True) self.dotnetpe = dnPE(data=file_data, clr_lazy_load=True) - except Exception as e: - logger.exception(e) + except Exception: + raise ConfigParserException("Failed to load project as dotnet executable") + self.yara_match = "" if yara_rule is not None: - self.yara_match = self.match_yara(yara_rule) + self.yara_match = self._match_yara(yara_rule) + + # Pre-sort Method table for efficient lookups + self._methods = self._generate_method_list() + self._methods_by_offset = sorted(self._methods, key=lambda m: m.offset) + self._methods_by_token = sorted(self._methods, key=lambda m: m.token) # Given a byte array's size and RVA, translates the RVA to the offset of # the byte array and returns the bytes of the array as a byte string - def byte_array_from_size_and_rva(self, arr_size, arr_rva): + def byte_array_from_size_and_rva(self, arr_size: int, arr_rva: int) -> bytes: arr_field_rva = self.fieldrva_from_rva(arr_rva) arr_offset = self.offset_from_rva(arr_field_rva) - arr_value = self.data[arr_offset : arr_offset + arr_size] - return arr_value - - # Calculates the SHA256 hash of file data - def calculate_sha256(self): - sha256_hash = sha256() - sha256_hash.update(self.data) - return sha256_hash.hexdigest() - - # Given an RVA, derives the corresponding Field name from the RVA - def field_name_from_rva(self, rva): - return self.dotnetpe.net.mdtables.Field.rows[(rva ^ MDT_FIELD_DEF) - 1].Name.value + return self.data[arr_offset : arr_offset + arr_size] + + # Given an offset, and either a terminating offset or delimiter, extracts + # the byte string + def byte_string_from_offset( + self, offset_start: int, offstart_end: int = -1, delimiter: bytes = b"\0" + ) -> bytes: + if offstart_end != -1: + try: + return self.data[offset_start:offstart_end] + except Exception: + raise ConfigParserException( + f"Could not extract string value from offset range [{hex(offset_start)}:{offstart_end}]" + ) + try: + return self.data[offset_start:].partition(delimiter)[0] + except Exception: + raise ConfigParserException( + f"Could not extract string value from offset {hex(offset_start)} with delimiter {delimiter}" + ) - # Given an RVA, derives the corresponding FieldRVA value from the RVA - def fieldrva_from_rva(self, rva): + # Given an RVA, derives the corresponding Field name + def field_name_from_rva(self, rva: int) -> str: + try: + return self.dotnetpe.net.mdtables.Field.rows[ + (rva ^ MDT_FIELD_DEF) - 1 + ].Name.value + except Exception: + raise ConfigParserException(f"Could not find Field for RVA {rva}") + + # Given an RVA, derives the corresponding FieldRVA value + def fieldrva_from_rva(self, rva: int) -> int: field_id = rva ^ MDT_FIELD_DEF for row in self.dotnetpe.net.mdtables.FieldRva: if row.struct.Field_Index == field_id: return row.struct.Rva - raise ConfigParserException(f"Could not find FieldRVA for address {rva}") + raise ConfigParserException(f"Could not find FieldRVA for RVA {rva}") + + # Generates a list of DotNetPEMethod objects for efficient lookups of method + # metadata in other operations + def _generate_method_list( + self, + ) -> list[DotNetPEMethod]: + method_objs = [] + + for idx, method in enumerate(self.dotnetpe.net.mdtables.MethodDef.rows): + method_offset = self.offset_from_rva(method.Rva) + + # Parse size from flags + flags = self.data[method_offset] + method_size = 0 + if flags & 3 == 2: # Tiny format + method_size = flags >> 2 + elif flags & 3 == 3: # Fat format (add 12-byte header) + method_size = 12 + bytes_to_int( + self.data[method_offset + 4 : method_offset + 8] + ) + + method_objs.append( + DotNetPEMethod( + method.Name.value, + method_offset, + method.Rva, + method_size, + (MDT_METHOD_DEF ^ idx) + 1, + ) + ) + return method_objs - # Reads in payload binary content - def get_file_data(self): + # Returns payload binary content + def _get_file_data(self) -> bytes: logger.debug(f"Reading contents from: {self.file_path}") try: with open(self.file_path, "rb") as fp: data = fp.read() - except Exception as e: - raise ConfigParserException(f"Error reading from path: {self.file_path}") from e - logger.debug("Successfully read data") + except Exception: + raise ConfigParserException(f"Error reading from path: {self.file_path}") + logger.debug(f"Successfully read {len(data)} bytes") return data - # Tests a given YARA rule object against the file at file_path - def match_yara(self, rule): + # Tests a given YARA rule object against the file at self.file_path, + # returning the matching rule's name, or "No match" + def _match_yara(self, rule: Rules) -> str: try: - match = rule.match(data=self.file_data) + match = rule.match(self.file_path) return str(match[0]) if len(match) > 0 else "No match" except Exception as e: logger.exception(e) return f"Exception encountered: {e}" - # Given a method name, returns RVAs of methods matching that name - def method_rvas_from_name(self, name): - return [row.Rva for row in self.dotnetpe.net.mdtables.MethodDef if row.Name.value == name] + # Given a DotNetPEMethod, returns its body as raw bytes + def method_body_from_method(self, method: DotNetPEMethod) -> bytes: + return self.byte_string_from_offset(method.offset, method.offset + method.size) + + # Given a Method name, returns a list of DotNetPEMethods matching that name + def methods_from_name(self, name: str) -> list[DotNetPEMethod]: + return [method for method in self._methods if method.name == name] # Given the offset to an instruction, reverses the instruction to its - # parent Method, and then finds the subsequent Method in the MethodDef - # table and returns its offset or index - def next_method_from_instruction_offset(self, ins_offset, step_back=0, by_token=False): - # Translate the instruction offset to RVA - ins_rva = self.dotnetpe.get_rva_from_offset(ins_offset) - # Get both the regular MethodDef table and a sorted (by RVA) copy - # This is because the table is not guaranteed to be ordered by RVA - methods = self.dotnetpe.net.mdtables.MethodDef.rows - sorted_methods = sorted(methods, key=lambda m: m.Rva) - # Go through the sorted table and find the Method RVA that is greater - # than the instruction RVA (the subsequent function), and use step_back - # to get the function containing the instruction if necessary - for idx, method in enumerate(sorted_methods): - if method.Rva > ins_rva: + # parent Method, optionally returning an adjacent Method using step to + # signify the direction of adjacency, and using by_token to determine + # whether to calculate adjacency by token or offset + def method_from_instruction_offset( + self, ins_offset: int, step: int = 0, by_token: bool = False + ) -> DotNetPEMethod: + for idx, method in enumerate(self._methods_by_offset): + if method.offset <= ins_offset < method.offset + method.size: return ( - # Add 1 to token ID as table starts at index 1, not 0 - methods.index(sorted_methods[idx - step_back]) + 1 + MDT_METHOD_DEF + self._methods_by_token[self._methods_by_token.index(method) + step] if by_token - else self.offset_from_rva(methods[methods.index(sorted_methods[idx - step_back])].Rva) + else self._methods_by_offset[idx + step] ) - raise ConfigParserException(f"Could not find method from instruction offset {ins_offset}") + raise ConfigParserException( + f"Could not find method from instruction offset {hex(ins_offset)}" + ) # Given an RVA, returns a data/file offset - def offset_from_rva(self, rva): + def offset_from_rva(self, rva: int) -> int: return self.dotnetpe.get_offset_from_rva(rva) - # Given a string offset, and, optionally, a delimiter, extracts the string - def string_from_offset(self, str_offset, delimiter=b"\0"): - try: - result = self.data[str_offset:].partition(delimiter)[0] - except Exception as e: - raise ConfigParserException( - f"Could not extract string value from offset {hex(str_offset)} with delimiter {delimiter}" - ) from e - return result - - def string_from_range(self, start_offset, end_offset): - try: - return self.data[start_offset, end_offset] - except Exception as e: - raise ConfigParserException(f"Could not extract string value from range {hex(start_offset)}:{hex(end_offset)}") from e - # Given an RVA, derives the corresponding User String - def user_string_from_rva(self, rva): + def user_string_from_rva(self, rva: int) -> str: return self.dotnetpe.net.user_strings.get(rva ^ MDT_STRING).value From bcc76e21ae44a1b21abb31ac30093f4ebe53ec92 Mon Sep 17 00:00:00 2001 From: jeFF0Falltrades <8444166+jeFF0Falltrades@users.noreply.github.com> Date: Fri, 27 Sep 2024 09:42:37 -0400 Subject: [PATCH 03/33] Minor fix to escape RVAs which may contain regex escape sequences --- .../ratking/utils/decryptors/config_decryptor_aes_cbc.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_cbc.py b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_cbc.py index 8389886bcfc..13bd9b4a359 100644 --- a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_cbc.py +++ b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_cbc.py @@ -31,7 +31,7 @@ # SOFTWARE. from base64 import b64decode from logging import getLogger -from re import DOTALL, compile, search +from re import DOTALL, compile, escape, search from typing import Tuple from cryptography.hazmat.backends import default_backend @@ -229,7 +229,7 @@ def _get_aes_key_rva(self, metadata_ins_offset: int) -> int: # Insert this RVA into the KEY_BASE pattern to find where the AES key # is initialized key_hit = search( - self._PATTERN_AES_KEY_BASE % int_to_bytes(metadata_method_token), + self._PATTERN_AES_KEY_BASE % escape(int_to_bytes(metadata_method_token)), self._payload.data, DOTALL, ) @@ -270,7 +270,7 @@ def _get_aes_salt(self, salt_rva: int) -> bytes: # stsfld uint8[] Client.Algorithm.Aes256::Salt # ret aes_salt_initialization = self._payload.data.find( - self._PATTERN_AES_SALT_INIT % salt_rva + self._PATTERN_AES_SALT_INIT % escape(salt_rva) ) if aes_salt_initialization == -1: raise ConfigParserException("Could not identify AES salt initialization") @@ -308,3 +308,4 @@ def _get_aes_salt(self, salt_rva: int) -> bytes: logger.debug(f"Found salt value: {salt.hex()}") return salt + From a2962b117746e6a745c4eebabfc9244010cb73dc Mon Sep 17 00:00:00 2001 From: IridiumXOR Date: Tue, 8 Oct 2024 13:31:07 +0200 Subject: [PATCH 04/33] Change Creator ID and OEM Table ID in ACPI Tables (#2341) Patch to hide QEMU from ACPI table IDs --- installer/kvm-qemu.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/installer/kvm-qemu.sh b/installer/kvm-qemu.sh index 8323e26dab8..d25ac23d046 100644 --- a/installer/kvm-qemu.sh +++ b/installer/kvm-qemu.sh @@ -799,6 +799,7 @@ function replace_qemu_clues_public() { _sed_aux 's/"bochs"/"'"$BOCHS_BLOCK_REPLACER"'"/g' qemu*/block/bochs.c 'BOCHS was not replaced in block/bochs.c' _sed_aux 's/"BOCHS "/"ALASKA"/g' qemu*/include/hw/acpi/aml-build.h 'BOCHS was not replaced in block/bochs.c' _sed_aux 's/Bochs Pseudo/Intel RealTime/g' qemu*/roms/ipxe/src/drivers/net/pnic.c 'Bochs Pseudo was not replaced in roms/ipxe/src/drivers/net/pnic.c' + _sed_aux 's/BXPC/'"$BXPC_REPLACER"'/g' qemu*/include/hw/acpi/aml-build.h 'BXPC was not replaced in include/hw/acpi/aml-build.h' } function replace_seabios_clues_public() { From 852335c7c2d132631b2552255014677502ff88c9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 9 Oct 2024 08:16:44 +0200 Subject: [PATCH 05/33] Bump django from 4.2.15 to 4.2.16 (#2342) Bumps [django](https://github.com/django/django) from 4.2.15 to 4.2.16. - [Commits](https://github.com/django/django/compare/4.2.15...4.2.16) --- updated-dependencies: - dependency-name: django dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 31 +++++++++++++++++++++++++++---- pyproject.toml | 2 +- 2 files changed, 28 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index d817b4c22af..cec3d267ee9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -839,13 +839,13 @@ files = [ [[package]] name = "django" -version = "4.2.15" +version = "4.2.16" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false python-versions = ">=3.8" files = [ - {file = "Django-4.2.15-py3-none-any.whl", hash = "sha256:61ee4a130efb8c451ef3467c67ca99fdce400fedd768634efc86a68c18d80d30"}, - {file = "Django-4.2.15.tar.gz", hash = "sha256:c77f926b81129493961e19c0e02188f8d07c112a1162df69bfab178ae447f94a"}, + {file = "Django-4.2.16-py3-none-any.whl", hash = "sha256:1ddc333a16fc139fd253035a1606bb24261951bbc3a6ca256717fa06cc41a898"}, + {file = "Django-4.2.16.tar.gz", hash = "sha256:6f1616c2786c408ce86ab7e10f792b8f15742f7b7b7460243929cb371e7f1dad"}, ] [package.dependencies] @@ -2628,6 +2628,8 @@ files = [ {file = "pydeep2-0.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2283893e25826b547dd1e5c71a010e86ddfd7270e2f2b8c90973c1d7984c7eb7"}, {file = "pydeep2-0.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f248e3161deb53d46a9368a7c164e36d83004faf2f11625d47a5cf23a6bdd2cb"}, {file = "pydeep2-0.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a13fca9be89a9fa8d92a4f49d7b9191eef94555f8ddf030fb2be4c8c15ad618c"}, + {file = "pydeep2-0.5.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1cb4757db97ac15ddf034c21cd6bab984f841586b6d53984e63c9a7803b2cd4"}, + {file = "pydeep2-0.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7809a1d6640bdbee68f075d53229d05229e11b4711f232728dd540f68e6483a4"}, {file = "pydeep2-0.5.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fedc1c9660cb5d0b73ad0b5f1dbffe16990e6721cbfc6454571a4b9882d0ea4"}, {file = "pydeep2-0.5.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca68f7d63e2ef510d410d20b223e8e97df41707fb50c4c526b6dd1d8698d9e6"}, {file = "pydeep2-0.5.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:199d05d8b4b7544509a2ba4802ead4b41dfe7859e0ecea9d9be9e41939f11660"}, @@ -3672,30 +3674,51 @@ description = "Database Abstraction Library" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ + {file = "SQLAlchemy-1.4.50-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:54138aa80d2dedd364f4e8220eef284c364d3270aaef621570aa2bd99902e2e8"}, {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00665725063692c42badfd521d0c4392e83c6c826795d38eb88fb108e5660e5"}, {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85292ff52ddf85a39367057c3d7968a12ee1fb84565331a36a8fead346f08796"}, {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d0fed0f791d78e7767c2db28d34068649dfeea027b83ed18c45a423f741425cb"}, {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db4db3c08ffbb18582f856545f058a7a5e4ab6f17f75795ca90b3c38ee0a8ba4"}, + {file = "SQLAlchemy-1.4.50-cp310-cp310-win32.whl", hash = "sha256:6c78e3fb4a58e900ec433b6b5f4efe1a0bf81bbb366ae7761c6e0051dd310ee3"}, + {file = "SQLAlchemy-1.4.50-cp310-cp310-win_amd64.whl", hash = "sha256:d55f7a33e8631e15af1b9e67c9387c894fedf6deb1a19f94be8731263c51d515"}, + {file = "SQLAlchemy-1.4.50-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:324b1fdd50e960a93a231abb11d7e0f227989a371e3b9bd4f1259920f15d0304"}, {file = "SQLAlchemy-1.4.50-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14b0cacdc8a4759a1e1bd47dc3ee3f5db997129eb091330beda1da5a0e9e5bd7"}, {file = "SQLAlchemy-1.4.50-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fb9cb60e0f33040e4f4681e6658a7eb03b5cb4643284172f91410d8c493dace"}, + {file = "SQLAlchemy-1.4.50-cp311-cp311-win32.whl", hash = "sha256:8bdab03ff34fc91bfab005e96f672ae207d87e0ac7ee716d74e87e7046079d8b"}, + {file = "SQLAlchemy-1.4.50-cp311-cp311-win_amd64.whl", hash = "sha256:52e01d60b06f03b0a5fc303c8aada405729cbc91a56a64cead8cb7c0b9b13c1a"}, + {file = "SQLAlchemy-1.4.50-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:77fde9bf74f4659864c8e26ac08add8b084e479b9a18388e7db377afc391f926"}, {file = "SQLAlchemy-1.4.50-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cb501d585aa74a0f86d0ea6263b9c5e1d1463f8f9071392477fd401bd3c7cc"}, {file = "SQLAlchemy-1.4.50-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a7a66297e46f85a04d68981917c75723e377d2e0599d15fbe7a56abed5e2d75"}, + {file = "SQLAlchemy-1.4.50-cp312-cp312-win32.whl", hash = "sha256:e86c920b7d362cfa078c8b40e7765cbc34efb44c1007d7557920be9ddf138ec7"}, + {file = "SQLAlchemy-1.4.50-cp312-cp312-win_amd64.whl", hash = "sha256:6b3df20fbbcbcd1c1d43f49ccf3eefb370499088ca251ded632b8cbaee1d497d"}, + {file = "SQLAlchemy-1.4.50-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:fb9adc4c6752d62c6078c107d23327aa3023ef737938d0135ece8ffb67d07030"}, {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1db0221cb26d66294f4ca18c533e427211673ab86c1fbaca8d6d9ff78654293"}, {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7dbe6369677a2bea68fe9812c6e4bbca06ebfa4b5cde257b2b0bf208709131"}, {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a9bddb60566dc45c57fd0a5e14dd2d9e5f106d2241e0a2dc0c1da144f9444516"}, {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82dd4131d88395df7c318eeeef367ec768c2a6fe5bd69423f7720c4edb79473c"}, + {file = "SQLAlchemy-1.4.50-cp36-cp36m-win32.whl", hash = "sha256:1b9c4359d3198f341480e57494471201e736de459452caaacf6faa1aca852bd8"}, + {file = "SQLAlchemy-1.4.50-cp36-cp36m-win_amd64.whl", hash = "sha256:35e4520f7c33c77f2636a1e860e4f8cafaac84b0b44abe5de4c6c8890b6aaa6d"}, + {file = "SQLAlchemy-1.4.50-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:f5b1fb2943d13aba17795a770d22a2ec2214fc65cff46c487790192dda3a3ee7"}, {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:273505fcad22e58cc67329cefab2e436006fc68e3c5423056ee0513e6523268a"}, {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3257a6e09626d32b28a0c5b4f1a97bced585e319cfa90b417f9ab0f6145c33c"}, {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d69738d582e3a24125f0c246ed8d712b03bd21e148268421e4a4d09c34f521a5"}, {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34e1c5d9cd3e6bf3d1ce56971c62a40c06bfc02861728f368dcfec8aeedb2814"}, + {file = "SQLAlchemy-1.4.50-cp37-cp37m-win32.whl", hash = "sha256:7b4396452273aedda447e5aebe68077aa7516abf3b3f48408793e771d696f397"}, + {file = "SQLAlchemy-1.4.50-cp37-cp37m-win_amd64.whl", hash = "sha256:752f9df3dddbacb5f42d8405b2d5885675a93501eb5f86b88f2e47a839cf6337"}, + {file = "SQLAlchemy-1.4.50-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:35c7ed095a4b17dbc8813a2bfb38b5998318439da8e6db10a804df855e3a9e3a"}, {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1fcee5a2c859eecb4ed179edac5ffbc7c84ab09a5420219078ccc6edda45436"}, {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbaf6643a604aa17e7a7afd74f665f9db882df5c297bdd86c38368f2c471f37d"}, {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e70e0673d7d12fa6cd363453a0d22dac0d9978500aa6b46aa96e22690a55eab"}, {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b881ac07d15fb3e4f68c5a67aa5cdaf9eb8f09eb5545aaf4b0a5f5f4659be18"}, + {file = "SQLAlchemy-1.4.50-cp38-cp38-win32.whl", hash = "sha256:8a219688297ee5e887a93ce4679c87a60da4a5ce62b7cb4ee03d47e9e767f558"}, + {file = "SQLAlchemy-1.4.50-cp38-cp38-win_amd64.whl", hash = "sha256:a648770db002452703b729bdcf7d194e904aa4092b9a4d6ab185b48d13252f63"}, + {file = "SQLAlchemy-1.4.50-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:4be4da121d297ce81e1ba745a0a0521c6cf8704634d7b520e350dce5964c71ac"}, {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f6997da81114daef9203d30aabfa6b218a577fc2bd797c795c9c88c9eb78d49"}, {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdb77e1789e7596b77fd48d99ec1d2108c3349abd20227eea0d48d3f8cf398d9"}, {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:128a948bd40780667114b0297e2cc6d657b71effa942e0a368d8cc24293febb3"}, {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2d526aeea1bd6a442abc7c9b4b00386fd70253b80d54a0930c0a216230a35be"}, + {file = "SQLAlchemy-1.4.50-cp39-cp39-win32.whl", hash = "sha256:a7c9b9dca64036008962dd6b0d9fdab2dfdbf96c82f74dbd5d86006d8d24a30f"}, + {file = "SQLAlchemy-1.4.50-cp39-cp39-win_amd64.whl", hash = "sha256:df200762efbd672f7621b253721644642ff04a6ff957236e0e2fe56d9ca34d2c"}, {file = "SQLAlchemy-1.4.50.tar.gz", hash = "sha256:3b97ddf509fc21e10b09403b5219b06c5b558b27fc2453150274fa4e70707dbf"}, ] @@ -4518,4 +4541,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = ">=3.10, <4.0" -content-hash = "74bde9cd19ea301395e0dbfdcc24884dc7e34f5735beaa685fb0be0c6d9f0860" +content-hash = "60bf927752538b220c3156c89605a163fd272ecca872edc4926885ab68a1477d" diff --git a/pyproject.toml b/pyproject.toml index 9f43c919121..8d6e5a5d6eb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,7 +34,7 @@ pyzipper = "0.3.5" flare-capa = "7.3.0" Cython = "0.29.24" # pyre2 = "0.3.6" # Dead for python3.11 -Django = "4.2.15" +Django = "4.2.16" SQLAlchemy = "1.4.50" SQLAlchemy-Utils = "0.41.1" Jinja2 = "^3.1.4" From a8c6c6881c15cb454fe986555836a40a7f311703 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Wed, 9 Oct 2024 06:17:18 +0000 Subject: [PATCH 06/33] ci: Update requirements.txt --- requirements.txt | 29 ++++++++++++++++++++++++++--- 1 file changed, 26 insertions(+), 3 deletions(-) diff --git a/requirements.txt b/requirements.txt index fc3b1acafa9..8cc40264dac 100644 --- a/requirements.txt +++ b/requirements.txt @@ -356,9 +356,9 @@ django-recaptcha==3.0.0 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:253197051288923cae675d7eff91b619e3775311292a5dbaf27a8a55ffebc670 django-settings-export==1.2.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:fceeae49fc597f654c1217415d8e049fc81c930b7154f5d8f28c432db738ff79 -django==4.2.15 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:61ee4a130efb8c451ef3467c67ca99fdce400fedd768634efc86a68c18d80d30 \ - --hash=sha256:c77f926b81129493961e19c0e02188f8d07c112a1162df69bfab178ae447f94a +django==4.2.16 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:1ddc333a16fc139fd253035a1606bb24261951bbc3a6ca256717fa06cc41a898 \ + --hash=sha256:6f1616c2786c408ce86ab7e10f792b8f15742f7b7b7460243929cb371e7f1dad djangorestframework==3.15.2 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:2b8871b062ba1aefc2de01f773875441a961fefbf79f5eed1e32b2f096944b20 \ --hash=sha256:36fe88cd2d6c6bec23dca9804bab2ba5517a8bb9d8f47ebc68981b56840107ad @@ -1187,11 +1187,13 @@ pydeep2==0.5.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:2283893e25826b547dd1e5c71a010e86ddfd7270e2f2b8c90973c1d7984c7eb7 \ --hash=sha256:44ce447e3253a69d3393f3cc53e3a87a48fe3ff9861793736a7bc218a1b95d77 \ --hash=sha256:4bf00de2fe1918e4d698fe8195a5c0a3a0c3050a2e3e15583748cfd20b427153 \ + --hash=sha256:7809a1d6640bdbee68f075d53229d05229e11b4711f232728dd540f68e6483a4 \ --hash=sha256:7ca68f7d63e2ef510d410d20b223e8e97df41707fb50c4c526b6dd1d8698d9e6 \ --hash=sha256:a13fca9be89a9fa8d92a4f49d7b9191eef94555f8ddf030fb2be4c8c15ad618c \ --hash=sha256:add24d7aa0386b285fd3e99632719714efabeb13d7b03a015b7c64d1f588f815 \ --hash=sha256:c2063cbb053e5ce684cc45fff3e72c063b26aa85e41e6435cab0c658ad9e3e1e \ --hash=sha256:c65dc910d782fa2bc97e1b28a78d77c4bada037d14b63e3e75a1fa5918d642c5 \ + --hash=sha256:d1cb4757db97ac15ddf034c21cd6bab984f841586b6d53984e63c9a7803b2cd4 \ --hash=sha256:e14b310b820d895a7354be7fd025de874892df249cbfb3ad8a524459e1511fd8 \ --hash=sha256:ef00ca5681a2c4ad5dc744db5f8ae5406d3f13121b38d84cc58dfb8fce4c3dc2 \ --hash=sha256:f248e3161deb53d46a9368a7c164e36d83004faf2f11625d47a5cf23a6bdd2cb @@ -1654,27 +1656,48 @@ sqlalchemy==1.4.50 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:0b7dbe6369677a2bea68fe9812c6e4bbca06ebfa4b5cde257b2b0bf208709131 \ --hash=sha256:128a948bd40780667114b0297e2cc6d657b71effa942e0a368d8cc24293febb3 \ --hash=sha256:14b0cacdc8a4759a1e1bd47dc3ee3f5db997129eb091330beda1da5a0e9e5bd7 \ + --hash=sha256:1b9c4359d3198f341480e57494471201e736de459452caaacf6faa1aca852bd8 \ --hash=sha256:1fb9cb60e0f33040e4f4681e6658a7eb03b5cb4643284172f91410d8c493dace \ --hash=sha256:273505fcad22e58cc67329cefab2e436006fc68e3c5423056ee0513e6523268a \ --hash=sha256:2e70e0673d7d12fa6cd363453a0d22dac0d9978500aa6b46aa96e22690a55eab \ + --hash=sha256:324b1fdd50e960a93a231abb11d7e0f227989a371e3b9bd4f1259920f15d0304 \ --hash=sha256:34e1c5d9cd3e6bf3d1ce56971c62a40c06bfc02861728f368dcfec8aeedb2814 \ + --hash=sha256:35c7ed095a4b17dbc8813a2bfb38b5998318439da8e6db10a804df855e3a9e3a \ + --hash=sha256:35e4520f7c33c77f2636a1e860e4f8cafaac84b0b44abe5de4c6c8890b6aaa6d \ --hash=sha256:3b97ddf509fc21e10b09403b5219b06c5b558b27fc2453150274fa4e70707dbf \ --hash=sha256:3f6997da81114daef9203d30aabfa6b218a577fc2bd797c795c9c88c9eb78d49 \ + --hash=sha256:4be4da121d297ce81e1ba745a0a0521c6cf8704634d7b520e350dce5964c71ac \ + --hash=sha256:52e01d60b06f03b0a5fc303c8aada405729cbc91a56a64cead8cb7c0b9b13c1a \ + --hash=sha256:54138aa80d2dedd364f4e8220eef284c364d3270aaef621570aa2bd99902e2e8 \ + --hash=sha256:6b3df20fbbcbcd1c1d43f49ccf3eefb370499088ca251ded632b8cbaee1d497d \ + --hash=sha256:6c78e3fb4a58e900ec433b6b5f4efe1a0bf81bbb366ae7761c6e0051dd310ee3 \ + --hash=sha256:752f9df3dddbacb5f42d8405b2d5885675a93501eb5f86b88f2e47a839cf6337 \ + --hash=sha256:77fde9bf74f4659864c8e26ac08add8b084e479b9a18388e7db377afc391f926 \ + --hash=sha256:7b4396452273aedda447e5aebe68077aa7516abf3b3f48408793e771d696f397 \ --hash=sha256:82dd4131d88395df7c318eeeef367ec768c2a6fe5bd69423f7720c4edb79473c \ --hash=sha256:85292ff52ddf85a39367057c3d7968a12ee1fb84565331a36a8fead346f08796 \ + --hash=sha256:8a219688297ee5e887a93ce4679c87a60da4a5ce62b7cb4ee03d47e9e767f558 \ --hash=sha256:8a7a66297e46f85a04d68981917c75723e377d2e0599d15fbe7a56abed5e2d75 \ --hash=sha256:8b881ac07d15fb3e4f68c5a67aa5cdaf9eb8f09eb5545aaf4b0a5f5f4659be18 \ + --hash=sha256:8bdab03ff34fc91bfab005e96f672ae207d87e0ac7ee716d74e87e7046079d8b \ --hash=sha256:a3257a6e09626d32b28a0c5b4f1a97bced585e319cfa90b417f9ab0f6145c33c \ + --hash=sha256:a648770db002452703b729bdcf7d194e904aa4092b9a4d6ab185b48d13252f63 \ + --hash=sha256:a7c9b9dca64036008962dd6b0d9fdab2dfdbf96c82f74dbd5d86006d8d24a30f \ --hash=sha256:a9bddb60566dc45c57fd0a5e14dd2d9e5f106d2241e0a2dc0c1da144f9444516 \ --hash=sha256:bdb77e1789e7596b77fd48d99ec1d2108c3349abd20227eea0d48d3f8cf398d9 \ --hash=sha256:c1db0221cb26d66294f4ca18c533e427211673ab86c1fbaca8d6d9ff78654293 \ --hash=sha256:c4cb501d585aa74a0f86d0ea6263b9c5e1d1463f8f9071392477fd401bd3c7cc \ --hash=sha256:d00665725063692c42badfd521d0c4392e83c6c826795d38eb88fb108e5660e5 \ --hash=sha256:d0fed0f791d78e7767c2db28d34068649dfeea027b83ed18c45a423f741425cb \ + --hash=sha256:d55f7a33e8631e15af1b9e67c9387c894fedf6deb1a19f94be8731263c51d515 \ --hash=sha256:d69738d582e3a24125f0c246ed8d712b03bd21e148268421e4a4d09c34f521a5 \ --hash=sha256:db4db3c08ffbb18582f856545f058a7a5e4ab6f17f75795ca90b3c38ee0a8ba4 \ + --hash=sha256:df200762efbd672f7621b253721644642ff04a6ff957236e0e2fe56d9ca34d2c \ + --hash=sha256:e86c920b7d362cfa078c8b40e7765cbc34efb44c1007d7557920be9ddf138ec7 \ --hash=sha256:f1fcee5a2c859eecb4ed179edac5ffbc7c84ab09a5420219078ccc6edda45436 \ --hash=sha256:f2d526aeea1bd6a442abc7c9b4b00386fd70253b80d54a0930c0a216230a35be \ + --hash=sha256:f5b1fb2943d13aba17795a770d22a2ec2214fc65cff46c487790192dda3a3ee7 \ + --hash=sha256:fb9adc4c6752d62c6078c107d23327aa3023ef737938d0135ece8ffb67d07030 \ --hash=sha256:fbaf6643a604aa17e7a7afd74f665f9db882df5c297bdd86c38368f2c471f37d sqlparse==0.5.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:773dcbf9a5ab44a090f3441e2180efe2560220203dc2f8c0b0fa141e18b505e4 \ From 2283cfb4f16524d9cf23da591d79e6b72c27ac05 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Wed, 9 Oct 2024 11:44:25 +0000 Subject: [PATCH 07/33] style: Automatic code formatting --- lib/parsers_aux/ratking/__init__.py | 77 +++++-------------- lib/parsers_aux/ratking/utils/config_item.py | 4 +- lib/parsers_aux/ratking/utils/data_utils.py | 4 +- .../utils/decryptors/config_decryptor.py | 4 +- .../decryptors/config_decryptor_aes_cbc.py | 41 +++------- .../decryptors/config_decryptor_aes_ecb.py | 8 +- .../config_decryptor_decrypt_xor.py | 13 +--- .../decryptors/config_decryptor_plaintext.py | 8 +- .../config_decryptor_random_hardcoded.py | 24 ++---- .../ratking/utils/dotnetpe_payload.py | 20 ++--- 10 files changed, 51 insertions(+), 152 deletions(-) diff --git a/lib/parsers_aux/ratking/__init__.py b/lib/parsers_aux/ratking/__init__.py index 6b724e46bbf..a73ff2deaa7 100644 --- a/lib/parsers_aux/ratking/__init__.py +++ b/lib/parsers_aux/ratking/__init__.py @@ -34,17 +34,14 @@ from re import DOTALL, compile, search from typing import Any, Tuple -# from yara import Rules - from .config_parser_exception import ConfigParserException from .utils import config_item -from .utils.decryptors import ( - SUPPORTED_DECRYPTORS, - ConfigDecryptor, - IncompatibleDecryptorException, -) +from .utils.decryptors import SUPPORTED_DECRYPTORS, ConfigDecryptor, IncompatibleDecryptorException from .utils.dotnetpe_payload import DotNetPEPayload +# from yara import Rules + + logger = getLogger(__name__) @@ -54,9 +51,7 @@ class RATConfigParser: _MIN_CONFIG_LEN_CEILING = 9 # Pattern to find the VerifyHash() method - _PATTERN_VERIFY_HASH = compile( - rb"\x7e.{3}\x04(?:\x6f.{3}\x0a){2}\x74.{3}\x01", DOTALL - ) + _PATTERN_VERIFY_HASH = compile(rb"\x7e.{3}\x04(?:\x6f.{3}\x0a){2}\x74.{3}\x01", DOTALL) # def __init__(self, file_path: str, yara_rule: Rules = None) -> None: def __init__(self, file_data: bytes = None) -> None: @@ -77,14 +72,10 @@ def __init__(self, file_data: bytes = None) -> None: self._decryptor: ConfigDecryptor = None self.report["config"] = self._get_config() self.report["key"] = ( - self._decryptor.key.hex() - if self._decryptor is not None and self._decryptor.key is not None - else "None" + self._decryptor.key.hex() if self._decryptor is not None and self._decryptor.key is not None else "None" ) self.report["salt"] = ( - self._decryptor.salt.hex() - if self._decryptor is not None and self._decryptor.salt is not None - else "None" + self._decryptor.salt.hex() if self._decryptor is not None and self._decryptor.salt is not None else "None" ) except Exception as e: # self.report["config"] = f"Exception encountered for {file_path}: {e}" @@ -92,18 +83,13 @@ def __init__(self, file_data: bytes = None) -> None: # Decrypts/decodes values from an encrypted config and returns the # decrypted/decoded config - def _decrypt_and_decode_config( - self, encrypted_config: bytes, min_config_len: int - ) -> dict[str, Any]: + def _decrypt_and_decode_config(self, encrypted_config: bytes, min_config_len: int) -> dict[str, Any]: decoded_config = {} for item_class in config_item.SUPPORTED_CONFIG_ITEMS: item = item_class() # Translate config Field RVAs to Field names - item_data = { - self._dnpp.field_name_from_rva(k): v - for k, v in item.parse_from(encrypted_config).items() - } + item_data = {self._dnpp.field_name_from_rva(k): v for k, v in item.parse_from(encrypted_config).items()} if len(item_data) > 0: if type(item) is config_item.EncryptedStringConfigItem: @@ -122,22 +108,16 @@ def _decrypt_and_decode_config( try: self._decryptor = decryptor(self._dnpp) except IncompatibleDecryptorException as ide: - logger.debug( - f"Decryptor incompatible {decryptor} : {ide}" - ) + logger.debug(f"Decryptor incompatible {decryptor} : {ide}") self._incompatible_decryptors.append(decryptor) continue try: # Try to decrypt the encrypted strings # Continue to next compatible decryptor on failure - item_data = self._decryptor.decrypt_encrypted_strings( - item_data - ) + item_data = self._decryptor.decrypt_encrypted_strings(item_data) break except Exception as e: - logger.debug( - f"Decryption failed with decryptor {decryptor} : {e}" - ) + logger.debug(f"Decryption failed with decryptor {decryptor} : {e}") self._decryptor = None if self._decryptor is None: @@ -146,16 +126,12 @@ def _decrypt_and_decode_config( elif type(item) is config_item.ByteArrayConfigItem: for k in item_data: arr_size, arr_rva = item_data[k] - item_data[k] = self._dnpp.byte_array_from_size_and_rva( - arr_size, arr_rva - ).hex() + item_data[k] = self._dnpp.byte_array_from_size_and_rva(arr_size, arr_rva).hex() decoded_config.update(item_data) if len(decoded_config) < min_config_len: - raise ConfigParserException( - f"Minimum threshold of config items not met: {len(decoded_config)}/{min_config_len}" - ) + raise ConfigParserException(f"Minimum threshold of config items not met: {len(decoded_config)}/{min_config_len}") return decoded_config # Searches for the RAT configuration section, using the VerifyHash() marker @@ -185,10 +161,7 @@ def _get_config_cctor_brute_force(self) -> Tuple[int, dict[str, Any]]: raise ConfigParserException("No .cctor method could be found") # For each .cctor method, map its RVA and body (in raw bytes) - candidate_cctor_data = { - method.rva: self._dnpp.method_body_from_method(method) - for method in candidates - } + candidate_cctor_data = {method.rva: self._dnpp.method_body_from_method(method) for method in candidates} config_start, decrypted_config = None, None # Start at our ceiling value for number of config items @@ -196,9 +169,7 @@ def _get_config_cctor_brute_force(self) -> Tuple[int, dict[str, Any]]: while decrypted_config is None and min_config_len >= self._MIN_CONFIG_LEN_FLOOR: for method_rva, method_body in candidate_cctor_data.items(): - logger.debug( - f"Attempting brute force at .cctor method at {hex(method_rva)}" - ) + logger.debug(f"Attempting brute force at .cctor method at {hex(method_rva)}") try: config_start, decrypted_config = ( method_rva, @@ -206,17 +177,13 @@ def _get_config_cctor_brute_force(self) -> Tuple[int, dict[str, Any]]: ) break except Exception as e: - logger.debug( - f"Brute force failed for method at {hex(method_rva)}: {e}" - ) + logger.debug(f"Brute force failed for method at {hex(method_rva)}: {e}") continue # Reduce the minimum config length until we reach our floor min_config_len -= 1 if decrypted_config is None: - raise ConfigParserException( - "No valid configuration could be parsed from any .cctor methods" - ) + raise ConfigParserException("No valid configuration could be parsed from any .cctor methods") return config_start, decrypted_config # Attempts to retrieve the config via looking for a config section preceded @@ -230,16 +197,12 @@ def _get_config_verify_hash_method(self) -> Tuple[int, dict[str, Any]]: # Reverse the hit to find the VerifyHash() method, then grab the # subsequent function - config_method = self._dnpp.method_from_instruction_offset( - verify_hash_hit.start(), 1 - ) + config_method = self._dnpp.method_from_instruction_offset(verify_hash_hit.start(), 1) encrypted_config = self._dnpp.method_body_from_method(config_method) min_config_len = self._MIN_CONFIG_LEN_CEILING while True: try: - decrypted_config = self._decrypt_and_decode_config( - encrypted_config, min_config_len - ) + decrypted_config = self._decrypt_and_decode_config(encrypted_config, min_config_len) return config_method.rva, decrypted_config except Exception as e: # Reduce the minimum config length until we reach our floor diff --git a/lib/parsers_aux/ratking/utils/config_item.py b/lib/parsers_aux/ratking/utils/config_item.py index cba36ed272c..e466c018b5e 100644 --- a/lib/parsers_aux/ratking/utils/config_item.py +++ b/lib/parsers_aux/ratking/utils/config_item.py @@ -68,9 +68,7 @@ def parse_from(self, data: bytes) -> dict[int, Any]: fields[field_rva] = field_value found_items += 1 else: - logger.debug( - f"Overlapping Field RVAs detected in config at {hex(field_rva)}" - ) + logger.debug(f"Overlapping Field RVAs detected in config at {hex(field_rva)}") logger.debug(f"Parsed {found_items} {self._label} values") return fields diff --git a/lib/parsers_aux/ratking/utils/data_utils.py b/lib/parsers_aux/ratking/utils/data_utils.py index 34d96ce3964..1f0ec88cd02 100644 --- a/lib/parsers_aux/ratking/utils/data_utils.py +++ b/lib/parsers_aux/ratking/utils/data_utils.py @@ -50,9 +50,7 @@ def decode_bytes(byte_str: bytes | str) -> str: else: result = byte_str.decode("utf-8") except Exception: - raise ConfigParserException( - f"Error decoding bytes object to Unicode: {byte_str}" - ) + raise ConfigParserException(f"Error decoding bytes object to Unicode: {byte_str}") return result diff --git a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor.py b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor.py index 16084742d56..8b4827d38b3 100644 --- a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor.py +++ b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor.py @@ -49,7 +49,5 @@ def __init__(self, payload: DotNetPEPayload) -> None: # Abstract method to take in a map representing a configuration of config # Field names and values and return a decoded/decrypted configuration @abstractmethod - def decrypt_encrypted_strings( - self, encrypted_strings: dict[str, str] - ) -> dict[str, list[str] | str]: + def decrypt_encrypted_strings(self, encrypted_strings: dict[str, str]) -> dict[str, list[str] | str]: pass diff --git a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_cbc.py b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_cbc.py index 13bd9b4a359..886a804eb68 100644 --- a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_cbc.py +++ b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_cbc.py @@ -56,9 +56,7 @@ class ConfigDecryptorAESCBC(ConfigDecryptor): _MIN_CIPHERTEXT_LEN = 48 # Patterns for identifying AES metadata - _PATTERN_AES_KEY_AND_BLOCK_SIZE = compile( - b"[\x06-\x09]\x20(.{4})\x6f.{4}[\x06-\x09]\x20(.{4})", DOTALL - ) + _PATTERN_AES_KEY_AND_BLOCK_SIZE = compile(b"[\x06-\x09]\x20(.{4})\x6f.{4}[\x06-\x09]\x20(.{4})", DOTALL) # Do not compile in-line replacement patterns _PATTERN_AES_KEY_BASE = b"(.{3}\x04).%b" _PATTERN_AES_SALT_INIT = b"\x80%b\x2a" @@ -79,9 +77,7 @@ def __init__(self, payload: DotNetPEPayload) -> None: # Given an initialization vector and ciphertext, creates a Cipher # object with the AES key and specified IV and decrypts the ciphertext def _decrypt(self, iv: bytes, ciphertext: bytes) -> bytes: - logger.debug( - f"Decrypting {ciphertext} with key {self.key.hex()} and IV {iv.hex()}..." - ) + logger.debug(f"Decrypting {ciphertext} with key {self.key.hex()} and IV {iv.hex()}...") aes_cipher = Cipher(AES(self.key), CBC(iv), backend=default_backend()) decryptor = aes_cipher.decryptor() # Use a PKCS7 unpadder to remove padding from decrypted value @@ -113,9 +109,7 @@ def _derive_aes_passphrase_candidates(self, key_val: str) -> list[bytes]: return passphrase_candidates # Decrypts encrypted config values with the provided cipher data - def decrypt_encrypted_strings( - self, encrypted_strings: dict[str, str] - ) -> dict[str, str]: + def decrypt_encrypted_strings(self, encrypted_strings: dict[str, str]) -> dict[str, str]: logger.debug("Decrypting encrypted strings...") if self._key_candidates is None: self._key_candidates = self._get_aes_key_candidates(encrypted_strings) @@ -157,9 +151,7 @@ def decrypt_encrypted_strings( last_exc = e if result is None: - logger.debug( - f"Decryption failed for item {v}: {last_exc}; Leaving as original value..." - ) + logger.debug(f"Decryption failed for item {v}: {last_exc}; Leaving as original value...") result = v logger.debug(f"Key: {k}, Value: {result}") @@ -174,9 +166,7 @@ def _get_aes_key_candidates(self, encrypted_strings: dict[str, str]) -> list[byt keys = [] # Use the key Field name to index into our existing config - key_raw_value = encrypted_strings[ - self._payload.field_name_from_rva(self._key_rva) - ] + key_raw_value = encrypted_strings[self._payload.field_name_from_rva(self._key_rva)] passphrase_candidates = self._derive_aes_passphrase_candidates(key_raw_value) for candidate in passphrase_candidates: @@ -196,9 +186,7 @@ def _get_aes_key_candidates(self, encrypted_strings: dict[str, str]) -> list[byt continue if len(keys) == 0: - raise ConfigParserException( - f"Could not derive key from passphrase candidates: {passphrase_candidates}" - ) + raise ConfigParserException(f"Could not derive key from passphrase candidates: {passphrase_candidates}") return keys # Extracts the AES key and block size from the payload @@ -222,9 +210,7 @@ def _get_aes_key_rva(self, metadata_ins_offset: int) -> int: logger.debug("Extracting AES key RVA...") # Get the RVA of the method that sets up AES256 metadata - metadata_method_token = self._payload.method_from_instruction_offset( - metadata_ins_offset, by_token=True - ).token + metadata_method_token = self._payload.method_from_instruction_offset(metadata_ins_offset, by_token=True).token # Insert this RVA into the KEY_BASE pattern to find where the AES key # is initialized @@ -269,9 +255,7 @@ def _get_aes_salt(self, salt_rva: int) -> bytes: # # stsfld uint8[] Client.Algorithm.Aes256::Salt # ret - aes_salt_initialization = self._payload.data.find( - self._PATTERN_AES_SALT_INIT % escape(salt_rva) - ) + aes_salt_initialization = self._payload.data.find(self._PATTERN_AES_SALT_INIT % escape(salt_rva)) if aes_salt_initialization == -1: raise ConfigParserException("Could not identify AES salt initialization") @@ -283,9 +267,7 @@ def _get_aes_salt(self, salt_rva: int) -> bytes: salt_op = bytes([self._payload.data[salt_op_offset]]) # Get the salt RVA from the 4 bytes following the initialization op - salt_strings_rva_packed = self._payload.data[ - salt_op_offset + 1 : salt_op_offset + 5 - ] + salt_strings_rva_packed = self._payload.data[salt_op_offset + 1 : salt_op_offset + 5] salt_strings_rva = bytes_to_int(salt_strings_rva_packed) # If the op is a ldstr op, just get the bytes value of the string being @@ -300,12 +282,9 @@ def _get_aes_salt(self, salt_rva: int) -> bytes: # byte array value from the FieldRVA table elif salt_op == OPCODE_LDTOKEN: salt_size = self._payload.data[salt_op_offset - 7] - salt = self._payload.byte_array_from_size_and_rva( - salt_size, salt_strings_rva - ) + salt = self._payload.byte_array_from_size_and_rva(salt_size, salt_strings_rva) else: raise ConfigParserException(f"Unknown salt opcode found: {salt_op.hex()}") logger.debug(f"Found salt value: {salt.hex()}") return salt - diff --git a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_ecb.py b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_ecb.py index 75e54f3f4c6..668c18de4e6 100644 --- a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_ecb.py +++ b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_aes_ecb.py @@ -74,17 +74,13 @@ def _decrypt(self, ciphertext: bytes) -> bytes: padded_text = decryptor.update(ciphertext) + decryptor.finalize() unpadded_text = unpadder.update(padded_text) + unpadder.finalize() except Exception as e: - raise ConfigParserException( - f"Error decrypting ciphertext {ciphertext} with key {self.key.hex()}: {e}" - ) + raise ConfigParserException(f"Error decrypting ciphertext {ciphertext} with key {self.key.hex()}: {e}") logger.debug(f"Decryption result: {unpadded_text}") return unpadded_text # Decrypts encrypted config values with the provided cipher data - def decrypt_encrypted_strings( - self, encrypted_strings: dict[str, str] - ) -> dict[str, str]: + def decrypt_encrypted_strings(self, encrypted_strings: dict[str, str]) -> dict[str, str]: logger.debug("Decrypting encrypted strings...") if self.key is None: diff --git a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_decrypt_xor.py b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_decrypt_xor.py index 74311e5e7a2..7ea52618ceb 100644 --- a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_decrypt_xor.py +++ b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_decrypt_xor.py @@ -83,9 +83,7 @@ def _decode_encoded_strings(self) -> list[str]: # Parses the config, adds decoded XOR strings, and returns the decoded # config - def decrypt_encrypted_strings( - self, encrypted_strings: dict[str, str] - ) -> dict[str, list[str] | str]: + def decrypt_encrypted_strings(self, encrypted_strings: dict[str, str]) -> dict[str, list[str] | str]: config = {} # Pass off plaintext config to a ConfigDecryptorPlaintext ptcd = ConfigDecryptorPlaintext(self._payload) @@ -105,18 +103,13 @@ def _get_xor_metadata(self): self._xor_strings = list( filter( None, - [ - self._payload.user_string_from_rva(bytes_to_int(rva)) - for rva in xor_string_rvas - ], + [self._payload.user_string_from_rva(bytes_to_int(rva)) for rva in xor_string_rvas], ) ) logger.debug(f"{len(self._xor_strings)} XOR strings found") # Get the static constructor containing the XOR key - xor_key_cctor = self._payload.method_from_instruction_offset( - dxor_block.start(), step=1, by_token=True - ) + xor_key_cctor = self._payload.method_from_instruction_offset(dxor_block.start(), step=1, by_token=True) xor_key_cctor_body = self._payload.method_body_from_method(xor_key_cctor) # Derive the XOR key RVA and value diff --git a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_plaintext.py b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_plaintext.py index ebd87c49ee8..b24d2b387fd 100644 --- a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_plaintext.py +++ b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_plaintext.py @@ -116,13 +116,9 @@ def __init__(self, payload: DotNetPEPayload) -> None: # Calculates whether the config meets the minimum threshold for known Field # Names and returns it if it does - def decrypt_encrypted_strings( - self, encrypted_strings: dict[str, str] - ) -> dict[str, str]: + def decrypt_encrypted_strings(self, encrypted_strings: dict[str, str]) -> dict[str, str]: field_names = set(encrypted_strings.keys()) num_overlapping_field_names = len(KNOWN_CONFIG_FIELD_NAMES & field_names) if num_overlapping_field_names < self.MIN_THRESHOLD_MATCH: - raise ConfigParserException( - "Plaintext threshold of known config items not met" - ) + raise ConfigParserException("Plaintext threshold of known config items not met") return encrypted_strings diff --git a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_random_hardcoded.py b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_random_hardcoded.py index 747364b8a9d..e5d598f47b7 100644 --- a/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_random_hardcoded.py +++ b/lib/parsers_aux/ratking/utils/decryptors/config_decryptor_random_hardcoded.py @@ -47,9 +47,7 @@ class ConfigDecryptorRandomHardcoded(ConfigDecryptor): _KEY_HARDCODED_HOSTS = "hardcoded_hosts" # Pattern to find the Method that retrieves a random domain - _PATTERN_RANDOM_DOMAIN = compile( - rb"(?:\x73.{3}\x0a){2}\x25.+?\x0a\x06(?:\x6f.{3}\x0a){2}\x0b", flags=DOTALL - ) + _PATTERN_RANDOM_DOMAIN = compile(rb"(?:\x73.{3}\x0a){2}\x25.+?\x0a\x06(?:\x6f.{3}\x0a){2}\x0b", flags=DOTALL) def __init__(self, payload: DotNetPEPayload) -> None: super().__init__(payload) @@ -59,9 +57,7 @@ def __init__(self, payload: DotNetPEPayload) -> None: raise IncompatibleDecryptorException(e) # Returns a combined config containing config fields + hardcoded hosts - def decrypt_encrypted_strings( - self, encrypted_strings: dict[str, str] - ) -> dict[str, list[str] | str]: + def decrypt_encrypted_strings(self, encrypted_strings: dict[str, str]) -> dict[str, list[str] | str]: config = {} # Pass off plaintext config to a ConfigDecryptorPlaintext ptcd = ConfigDecryptorPlaintext(self._payload) @@ -71,9 +67,7 @@ def decrypt_encrypted_strings( # Retrieves and returns a list of hardcoded hosts def _get_hardcoded_hosts(self) -> list[str]: - random_domain_method_body = self._payload.method_body_from_method( - self._random_domain_method - ) + random_domain_method_body = self._payload.method_body_from_method(self._random_domain_method) hardcoded_host_rvas = findall(PATTERN_LDSTR_OP, random_domain_method_body) hardcoded_hosts = [] @@ -94,15 +88,9 @@ def _get_random_domain_method(self) -> DotNetPEMethod: logger.debug("Searching for random domain method") random_domain_marker = search(self._PATTERN_RANDOM_DOMAIN, self._payload.data) if random_domain_marker is None: - raise ConfigParserException( - "Could not identify random domain generator method" - ) + raise ConfigParserException("Could not identify random domain generator method") - random_domain_method = self._payload.method_from_instruction_offset( - random_domain_marker.start() - ) + random_domain_method = self._payload.method_from_instruction_offset(random_domain_marker.start()) - logger.debug( - f"Random domain generator found at offset {hex(random_domain_method.offset)}" - ) + logger.debug(f"Random domain generator found at offset {hex(random_domain_method.offset)}") return random_domain_method diff --git a/lib/parsers_aux/ratking/utils/dotnetpe_payload.py b/lib/parsers_aux/ratking/utils/dotnetpe_payload.py index d704b3397d2..d2d9d3f60f3 100644 --- a/lib/parsers_aux/ratking/utils/dotnetpe_payload.py +++ b/lib/parsers_aux/ratking/utils/dotnetpe_payload.py @@ -88,9 +88,7 @@ def byte_array_from_size_and_rva(self, arr_size: int, arr_rva: int) -> bytes: # Given an offset, and either a terminating offset or delimiter, extracts # the byte string - def byte_string_from_offset( - self, offset_start: int, offstart_end: int = -1, delimiter: bytes = b"\0" - ) -> bytes: + def byte_string_from_offset(self, offset_start: int, offstart_end: int = -1, delimiter: bytes = b"\0") -> bytes: if offstart_end != -1: try: return self.data[offset_start:offstart_end] @@ -108,9 +106,7 @@ def byte_string_from_offset( # Given an RVA, derives the corresponding Field name def field_name_from_rva(self, rva: int) -> str: try: - return self.dotnetpe.net.mdtables.Field.rows[ - (rva ^ MDT_FIELD_DEF) - 1 - ].Name.value + return self.dotnetpe.net.mdtables.Field.rows[(rva ^ MDT_FIELD_DEF) - 1].Name.value except Exception: raise ConfigParserException(f"Could not find Field for RVA {rva}") @@ -138,9 +134,7 @@ def _generate_method_list( if flags & 3 == 2: # Tiny format method_size = flags >> 2 elif flags & 3 == 3: # Fat format (add 12-byte header) - method_size = 12 + bytes_to_int( - self.data[method_offset + 4 : method_offset + 8] - ) + method_size = 12 + bytes_to_int(self.data[method_offset + 4 : method_offset + 8]) method_objs.append( DotNetPEMethod( @@ -186,9 +180,7 @@ def methods_from_name(self, name: str) -> list[DotNetPEMethod]: # parent Method, optionally returning an adjacent Method using step to # signify the direction of adjacency, and using by_token to determine # whether to calculate adjacency by token or offset - def method_from_instruction_offset( - self, ins_offset: int, step: int = 0, by_token: bool = False - ) -> DotNetPEMethod: + def method_from_instruction_offset(self, ins_offset: int, step: int = 0, by_token: bool = False) -> DotNetPEMethod: for idx, method in enumerate(self._methods_by_offset): if method.offset <= ins_offset < method.offset + method.size: return ( @@ -196,9 +188,7 @@ def method_from_instruction_offset( if by_token else self._methods_by_offset[idx + step] ) - raise ConfigParserException( - f"Could not find method from instruction offset {hex(ins_offset)}" - ) + raise ConfigParserException(f"Could not find method from instruction offset {hex(ins_offset)}") # Given an RVA, returns a data/file offset def offset_from_rva(self, rva: int) -> int: From b0f2da4fd06a1eb130d0baf9d8e2cdd5bb31362a Mon Sep 17 00:00:00 2001 From: Kevin O'Reilly Date: Wed, 9 Oct 2024 13:43:11 +0100 Subject: [PATCH 08/33] Rat King families: fix naming --- data/yara/CAPE/{AsyncRat.yar => AsyncRAT.yar} | 10 +++++----- .../parsers/CAPE/{AsyncRat.py => AsyncRAT.py} | 0 .../parsers/CAPE/{Quasarrat.py => QuasarRAT.py} | 0 .../parsers/CAPE/{VenomRat.py => VenomRAT.py} | 0 .../processing/parsers/CAPE/{Xenorat.py => XenoRAT.py} | 0 5 files changed, 5 insertions(+), 5 deletions(-) rename data/yara/CAPE/{AsyncRat.yar => AsyncRAT.yar} (90%) rename modules/processing/parsers/CAPE/{AsyncRat.py => AsyncRAT.py} (100%) rename modules/processing/parsers/CAPE/{Quasarrat.py => QuasarRAT.py} (100%) rename modules/processing/parsers/CAPE/{VenomRat.py => VenomRAT.py} (100%) rename modules/processing/parsers/CAPE/{Xenorat.py => XenoRAT.py} (100%) diff --git a/data/yara/CAPE/AsyncRat.yar b/data/yara/CAPE/AsyncRAT.yar similarity index 90% rename from data/yara/CAPE/AsyncRat.yar rename to data/yara/CAPE/AsyncRAT.yar index 936299acdfb..84a02f65c2e 100644 --- a/data/yara/CAPE/AsyncRat.yar +++ b/data/yara/CAPE/AsyncRAT.yar @@ -1,9 +1,9 @@ -rule AsyncRat +rule AsyncRAT { meta: author = "kevoreilly, JPCERT/CC Incident Response Group" - description = "AsyncRat Payload" - cape_type = "AsyncRat Payload" + description = "AsyncRAT Payload" + cape_type = "AsyncRAT Payload" strings: $salt = {BF EB 1E 56 FB CD 97 3B B2 19 02 24 30 A5 78 43 00 3D 56 44 D2 1E 62 B9 D4 F1 80 E7 E6 C3 39 41} $b1 = {00 00 00 0D 53 00 48 00 41 00 32 00 35 00 36 00 00} @@ -16,10 +16,10 @@ rule AsyncRat uint16(0) == 0x5A4D and not $kitty and ($salt and (2 of ($str*) or 1 of ($b*))) or (all of ($b*) and 2 of ($str*)) } -rule asyncrat_kingrat { +rule AsyncRAT_kingrat { meta: author = "jeFF0Falltrades" - cape_type = "AsyncRat Payload" + cape_type = "AsyncRAT Payload" strings: $str_async = "AsyncClient" wide ascii nocase diff --git a/modules/processing/parsers/CAPE/AsyncRat.py b/modules/processing/parsers/CAPE/AsyncRAT.py similarity index 100% rename from modules/processing/parsers/CAPE/AsyncRat.py rename to modules/processing/parsers/CAPE/AsyncRAT.py diff --git a/modules/processing/parsers/CAPE/Quasarrat.py b/modules/processing/parsers/CAPE/QuasarRAT.py similarity index 100% rename from modules/processing/parsers/CAPE/Quasarrat.py rename to modules/processing/parsers/CAPE/QuasarRAT.py diff --git a/modules/processing/parsers/CAPE/VenomRat.py b/modules/processing/parsers/CAPE/VenomRAT.py similarity index 100% rename from modules/processing/parsers/CAPE/VenomRat.py rename to modules/processing/parsers/CAPE/VenomRAT.py diff --git a/modules/processing/parsers/CAPE/Xenorat.py b/modules/processing/parsers/CAPE/XenoRAT.py similarity index 100% rename from modules/processing/parsers/CAPE/Xenorat.py rename to modules/processing/parsers/CAPE/XenoRAT.py From c083d182a8fc85d4fc8ff83b7e2fd88add4f2a4a Mon Sep 17 00:00:00 2001 From: Kevin O'Reilly Date: Wed, 9 Oct 2024 15:54:09 +0100 Subject: [PATCH 09/33] Move RAT King family yara sigs from community to main repo --- data/yara/CAPE/DCRat.yar | 87 ++++++++++++++++++++++++++++++++++++ data/yara/CAPE/QuasarRAT.yar | 43 ++++++++++++++++++ data/yara/CAPE/XWorm.yar | 46 +++++++++++++++++++ data/yara/CAPE/XenoRAT.yar | 14 ++++++ 4 files changed, 190 insertions(+) create mode 100644 data/yara/CAPE/DCRat.yar create mode 100644 data/yara/CAPE/QuasarRAT.yar create mode 100644 data/yara/CAPE/XWorm.yar create mode 100644 data/yara/CAPE/XenoRAT.yar diff --git a/data/yara/CAPE/DCRat.yar b/data/yara/CAPE/DCRat.yar new file mode 100644 index 00000000000..4ca7696dfd0 --- /dev/null +++ b/data/yara/CAPE/DCRat.yar @@ -0,0 +1,87 @@ +rule DCRat { + meta: + author = "ditekSHen" + description = "DCRat payload" + cape_type = "DCRat Payload" + strings: + // DCRat + $dc1 = "DCRatBuild" ascii + $dc2 = "DCStlr" ascii + $x1 = "px\">
DCRat Keylogger" wide + $x2 = "DCRat-Log#" wide + $x3 = "DCRat.Code" wide + $string1 = "CaptureBrowsers" fullword ascii + $string2 = "DecryptBrowsers" fullword ascii + $string3 = "Browsers.IE10" ascii + $string4 = "Browsers.Chromium" ascii + $string5 = "WshShell" ascii + $string6 = "SysMngmts" fullword ascii + $string7 = "LoggerData" fullword ascii + // DCRat Plugins/Libraries + $plugin = "DCRatPlugin" fullword ascii + // AntiVM + $av1 = "AntiVM" ascii wide + $av2 = "vmware" fullword wide + $av3 = "VirtualBox" fullword wide + $av4 = "microsoft corporation" fullword wide + $av5 = "VIRTUAL" fullword wide + $av6 = "DetectVirtualMachine" fullword ascii + $av7 = "Select * from Win32_ComputerSystem" fullword wide + // Plugin_AutoStealer, Plugin_AutoKeylogger + $pl1 = "dcratAPI" fullword ascii + $pl2 = "dsockapi" fullword ascii + $pl3 = "file_get_contents" fullword ascii + $pl4 = "classthis" fullword ascii + $pl5 = "typemdt" fullword ascii + $pl6 = "Plugin_AutoStealer" ascii wide + $pl7 = "Plugin_AutoKeylogger" ascii wide + // variant + $v1 = "Plugin couldn't process this action!" wide + $v2 = "Unknown command!" wide + $v3 = "PLUGINCONFIGS" wide + $v4 = "Saving log..." wide + $v5 = "~Work.log" wide + $v6 = "MicrophoneNum" fullword wide + $v7 = "WebcamNum" fullword wide + $v8 = "%SystemDrive% - Slow" wide + $v9 = "%UsersFolder% - Fast" wide + $v10 = "%AppData% - Very Fast" wide + $v11 = /\[(Up|Down|Enter|ESC|CTRL|Shift|Win|Tab|CAPSLOCK: (ON|OFF))\]<\/span>/ wide + $px1 = "[Browsers] Scanned elements: " wide + $px2 = "[Browsers] Grabbing cookies" wide + $px3 = "[Browsers] Grabbing passwords" wide + $px4 = "[Browsers] Grabbing forms" wide + $px5 = "[Browsers] Grabbing CC" wide + $px6 = "[Browsers] Grabbing history" wide + $px7 = "[StealerPlugin] Invoke: " wide + $px8 = "[Other] Grabbing steam" wide + $px9 = "[Other] Grabbing telegram" wide + $px10 = "[Other] Grabbing discord tokens" wide + $px11 = "[Other] Grabbing filezilla" wide + $px12 = "[Other] Screenshots:" wide + $px13 = "[Other] Clipboard" wide + $px14 = "[Other] Saving system information" wide + condition: + uint16(0) == 0x5a4d and (all of ($dc*) or all of ($string*) or 2 of ($x*) or 6 of ($v*) or 5 of ($px*)) or ($plugin and (4 of ($av*) or 5 of ($pl*))) +} + +rule dcrat_kingrat { + meta: + author = "jeFF0Falltrades" + cape_type = "DCRat Payload" + strings: + $venom_1 = "VenomRAT" wide ascii nocase + $venom_2 = "HVNC_REPLY_MESSAGE" wide ascii + $str_aes_exc = "masterKey can not be null or empty" wide ascii + $str_b64_amsi = "YW1zaS5kbGw=" wide ascii + $str_b64_virtual_protect = "VmlydHVhbFByb3RlY3Q=" wide ascii + $str_dcrat = "dcrat" wide ascii nocase + $str_plugin = "save_Plugin" wide ascii + $str_qwqdan = "qwqdan" wide ascii + $byte_aes_key_base = { 7E [3] 04 73 [3] 06 80 } + $patt_config = { 72 [3] 70 80 [3] 04 } + $patt_verify_hash = { 7e [3] 04 6f [3] 0a 6f [3] 0a 74 [3] 01 } + + condition: + (not any of ($venom*)) and 5 of them and #patt_config >= 10 +} diff --git a/data/yara/CAPE/QuasarRAT.yar b/data/yara/CAPE/QuasarRAT.yar new file mode 100644 index 00000000000..8877430d23c --- /dev/null +++ b/data/yara/CAPE/QuasarRAT.yar @@ -0,0 +1,43 @@ +rule QuasarRAT { + meta: + author = "ditekshen" + description = "QuasarRAT payload" + cape_type = "QuasarRAT Payload" + strings: + $s1 = "GetKeyloggerLogsResponse" fullword ascii + $s2 = "GetKeyloggerLogs" fullword ascii + $s3 = "/>Log created on" wide + $s4 = "User: {0}{3}Pass: {1}{3}Host: {2}" wide + $s5 = "Domain: {1}{0}Cookie Name: {2}{0}Value: {3}{0}Path: {4}{0}Expired: {5}{0}HttpOnly: {6}{0}Secure: {7}" wide + $s6 = "grabber_" wide + $s7 = "" ascii + $s8 = "k__BackingField" fullword ascii + $s9 = "" ascii + $s10 = "add_OnHotKeysDown" ascii + $mutex = "QSR_MUTEX_" ascii wide + $ua1 = "Mozilla/5.0 (Windows NT 6.3; rv:48.0) Gecko/20100101 Firefox/48.0" fullword wide + $us2 = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/7046A194A" fullword wide + condition: + uint16(0) == 0x5a4d and ($mutex or (all of ($ua*) and 2 of them) or 6 of ($s*)) +} + +rule quasarrat_kingrat { + meta: + author = "jeFF0Falltrades" + cape_type = "QuasarRAT Payload" + strings: + $str_quasar = "Quasar." wide ascii + $str_hidden = "set_Hidden" wide ascii + $str_shell = "DoShellExecuteResponse" wide ascii + $str_close = "echo DONT CLOSE THIS WINDOW!" wide ascii + $str_pause = "ping -n 10 localhost > nul" wide ascii + $str_aes_exc = "masterKey can not be null or empty" wide ascii + $byte_aes_key_base = { 7E [3] 04 73 [3] 06 25 } + $byte_aes_salt_base = { BF EB 1E 56 FB CD 97 3B B2 19 } + $byte_special_folder = { 7e 73 [4] 28 [4] 80 } + $patt_config = { 72 [3] 70 80 [3] 04 } + $patt_verify_hash = { 7e [3] 04 6f [3] 0a 6f [3] 0a 74 [3] 01 } + + condition: + 6 of them and #patt_config >= 10 +} diff --git a/data/yara/CAPE/XWorm.yar b/data/yara/CAPE/XWorm.yar new file mode 100644 index 00000000000..76e401a3e47 --- /dev/null +++ b/data/yara/CAPE/XWorm.yar @@ -0,0 +1,46 @@ +rule XWorm { + meta: + author = "ditekSHen" + description = "Detects XWorm" + cape_type = "XWorm Payload" + strings: + $x1 = "XWorm " wide nocase + $x2 = /XWorm\s(V|v)\d+\.\d+/ fullword wide + $s1 = "RunBotKiller" fullword wide + $s2 = "XKlog.txt" fullword wide + $s3 = /(shell|reg)fuc/ fullword wide + $s4 = "closeshell" fullword ascii + $s5 = { 62 00 79 00 70 00 73 00 73 00 00 ?? 63 00 61 00 6c 00 6c 00 75 00 61 00 63 00 00 ?? 73 00 63 00 } + $s6 = { 44 00 44 00 6f 00 73 00 54 00 00 ?? 43 00 69 00 6c 00 70 00 70 00 65 00 72 00 00 ?? 50 00 45 00 } + $s7 = { 69 00 6e 00 6a 00 52 00 75 00 6e 00 00 ?? 73 00 74 00 61 00 72 00 74 00 75 00 73 00 62 } + $s8 = { 48 6f 73 74 00 50 6f 72 74 00 75 70 6c 6f 61 64 65 72 00 6e 61 6d 65 65 65 00 4b 45 59 00 53 50 4c 00 4d 75 74 65 78 78 00 } + $v2_1 = "PING!" fullword wide + $v2_2 = "Urlhide" fullword wide + $v2_3 = /PC(Restart|Shutdown)/ fullword wide + $v2_4 = /(Start|Stop)(DDos|Report)/ fullword wide + $v2_5 = /Offline(Get|Keylogger)/ wide + $v2_6 = "injRun" fullword wide + $v2_7 = "Xchat" fullword wide + $v2_8 = "UACFunc" fullword ascii wide + condition: + uint16(0) == 0x5a4d and ((1 of ($x*) and (3 of ($s*) or 3 of ($v2*))) or 6 of them) +} + +rule xworm_kingrat { + meta: + author = "jeFF0Falltrades" + cape_type = "XWorm payload" + strings: + $str_xworm = "xworm" wide ascii nocase + $str_xwormmm = "Xwormmm" wide ascii + $str_xclient = "XClient" wide ascii + $str_default_log = "\\Log.tmp" wide ascii + $str_create_proc = "/create /f /RL HIGHEST /sc minute /mo 1 /t" wide ascii + $str_ddos_start = "StartDDos" wide ascii + $str_ddos_stop = "StopDDos" wide ascii + $str_timeout = "timeout 3 > NUL" wide ascii + $byte_md5_hash = { 7e [3] 04 28 [3] 06 6f } + $patt_config = { 72 [3] 70 80 [3] 04 } + condition: + 5 of them and #patt_config >= 7 + } diff --git a/data/yara/CAPE/XenoRAT.yar b/data/yara/CAPE/XenoRAT.yar new file mode 100644 index 00000000000..4208eb54c91 --- /dev/null +++ b/data/yara/CAPE/XenoRAT.yar @@ -0,0 +1,14 @@ +rule XenoRAT { + meta: + author = "jeFF0Falltrades" + cape_type = "XenoRAT payload" + strings: + $str_xeno_rat_1 = "xeno rat" wide ascii nocase + $str_xeno_rat_2 = "xeno_rat" wide ascii nocase + $str_xeno_update_mgr = "XenoUpdateManager" wide ascii + $str_nothingset = "nothingset" wide ascii + $byte_enc_dec_pre = { 1f 10 8d [4] (0a | 0b) } + $patt_config = { 72 [3] 70 80 [3] 04 } + condition: + 4 of them and #patt_config >= 5 + } From da5694aff488bb80e8713a63a7cb7cd42264fbe0 Mon Sep 17 00:00:00 2001 From: xiangchen96 Date: Fri, 11 Oct 2024 11:04:22 +0200 Subject: [PATCH 10/33] Only upload_to_host once in browsermonitor (#2346) --- analyzer/windows/modules/auxiliary/browsermonitor.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/analyzer/windows/modules/auxiliary/browsermonitor.py b/analyzer/windows/modules/auxiliary/browsermonitor.py index e46e8fbe52c..6989f190b20 100644 --- a/analyzer/windows/modules/auxiliary/browsermonitor.py +++ b/analyzer/windows/modules/auxiliary/browsermonitor.py @@ -27,8 +27,6 @@ def __init__(self, options=None, config=None): self.startupinfo = subprocess.STARTUPINFO() self.startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW self.browser_logfile = "" - self.last_modification = 0.0 - self._is_first_save = True def _find_browser_extension(self): temp_dir = tempfile.gettempdir() @@ -54,22 +52,12 @@ def _find_browser_extension(self): time.sleep(1) def _collect_browser_logs(self): - if not self._is_first_save and self.last_modification != os.path.getmtime(self.browser_logfile): - return - self.last_modification = os.path.getmtime(self.browser_logfile) upload_to_host(self.browser_logfile, "browser/requests.log") - self._is_first_save = False def run(self): self.do_run = True if self.enabled: self._find_browser_extension() - self.last_modification = os.path.getmtime(self.browser_logfile) - while self.do_run: - self._collect_browser_logs() - time.sleep(1) - return True - return False def stop(self): if self.enabled: From b9a4f95dbd703558e4c3111f48271d662dd08a54 Mon Sep 17 00:00:00 2001 From: Josh Feather <142008135+josh-feather@users.noreply.github.com> Date: Fri, 11 Oct 2024 10:05:00 +0100 Subject: [PATCH 11/33] Adds private auxiliary module configuration option (#2345) This adds a new `configure_from_data` method to analyzer.windows.lib.common.abstracts.Auxiliary. This optional method provides the ability to run private auxiliary-specific configuration code from `data.packages.`. If an auxiliary module doesn't provide a `configure_from_data` method, it's logged but ignored. --- analyzer/windows/analyzer.py | 16 ++++++++ analyzer/windows/lib/common/abstracts.py | 26 ++++++++++++ docs/book/src/customization/auxiliary.rst | 49 +++++++++++++++++++++++ 3 files changed, 91 insertions(+) diff --git a/analyzer/windows/analyzer.py b/analyzer/windows/analyzer.py index 7c5694e84e5..c4543a2d419 100644 --- a/analyzer/windows/analyzer.py +++ b/analyzer/windows/analyzer.py @@ -509,6 +509,21 @@ def run(self): except ImportError as e: log.warning('Unable to import the auxiliary module "%s": %s', name, e) + def configure_aux_from_data(instance): + # Do auxiliary module configuration stored in 'data/auxiliary/' + _class = type(instance) + try: + log.debug("attempting to configure '%s' from data", _class.__name__) + instance.configure_from_data() + except ModuleNotFoundError: + # let it go, not every module is configurable from data + log.debug("module %s does not support data configuration, ignoring", _class.__name__) + except ImportError as iexc: + # let it go but emit a warning; assume a dependency is missing + log.warning("configuration error for module %s: %s", _class.__name__, iexc) + except Exception as exc: + log.error("error configuring module %s: %s", _class.__name__, exc) + # Walk through the available auxiliary modules. aux_modules = [] @@ -517,6 +532,7 @@ def run(self): aux = module(self.options, self.config) log.debug('Initialized auxiliary module "%s"', module.__name__) aux_modules.append(aux) + configure_aux_from_data(aux) log.debug('Trying to start auxiliary module "%s"...', module.__module__) aux.start() except (NotImplementedError, AttributeError) as e: diff --git a/analyzer/windows/lib/common/abstracts.py b/analyzer/windows/lib/common/abstracts.py index 6cd05214b10..595ec6d2860 100644 --- a/analyzer/windows/lib/common/abstracts.py +++ b/analyzer/windows/lib/common/abstracts.py @@ -322,3 +322,29 @@ def add_pid(self, pid): def del_pid(self, pid): pass + + def configure_from_data(self): + """Do private auxiliary module-specific configuration. + + Auxiliary modules can implement this method to perform pre-analysis + configuration based on runtime data contained in "data/auxiliary/". + + This method raises: + - ImportError when any exception occurs during import + - AttributeError if the module configure function is invalid + - ModuleNotFoundError if the module does not support configuration from data + """ + package_module_name = self.__class__.__module__.split(".")[-1] + module_name = f"data.auxiliary.{package_module_name}" + try: + mod = importlib.import_module(module_name) + except ModuleNotFoundError as exc: + raise exc + except Exception as exc: + raise ImportError(f"error importing {module_name}: {exc}") from exc + + spec = inspect.getfullargspec(mod.configure) + if len(spec.args) != 1: + err_msg = f"{module_name}.configure: expected 1 arguments, got {len(spec.args)}" + raise AttributeError(err_msg) + mod.configure(self) diff --git a/docs/book/src/customization/auxiliary.rst b/docs/book/src/customization/auxiliary.rst index e858f4c8ba8..48b98801f7a 100644 --- a/docs/book/src/customization/auxiliary.rst +++ b/docs/book/src/customization/auxiliary.rst @@ -27,3 +27,52 @@ very end of the analysis process, before launching the processing and reporting For example, an auxiliary module provided by default in CAPE is called *sniffer.py* and takes care of executing **tcpdump** in order to dump the generated network traffic. + +Auxiliary Module Configuration +============================== + +Auxiliary modules can be "configured" before being started. This allows data to be added +at runtime, whilst also allowing for the configuration to be stored separately from the +CAPE python code. + +Private Auxiliary Module Configuration +-------------------------------------- + +Private auxiliary module configuration is stored outside the auxiliary class, in a module +under the same name as the auxiliary module. This is useful when managing configuration +of auxiliary modules separately if desired, for privacy reasons or otherwise. + +Here is a configuration module example that installs some software prior to the auxiliary +module starting: + + .. code-block:: python + :linenos: + + # data/auxiliary/example.py + import subprocess + import logging + from pathlib import Path + + log = logging.getLogger(__name__) + BIN_PATH = Path.cwd() / "bin" + + + def configure(aux_instance): + # here "example" refers to modules.auxiliary.example.Example + if not aux_instance.enabled: + return + msi = aux_instance.options.get("example_msi") + if not msi: + return + msi_path = BIN_PATH / msi + if not msi_path.exists(): + log.warning("missing MSI %s", msi_path) + return + cmd = ["msiexec", "/i", msi_path, "/quiet"] + try: + log.info("Executing msi package...") + subprocess.check_output(cmd) + log.info("Installation succesful") + except subprocess.CalledProcessError as exc: + log.error("Installation failed: %s", exc) + return From d7ed557496908345ec4a2cedc25a91ff05b0d7b5 Mon Sep 17 00:00:00 2001 From: Josh Feather <142008135+josh-feather@users.noreply.github.com> Date: Fri, 11 Oct 2024 11:26:25 +0100 Subject: [PATCH 12/33] Disable analysis log truncation if buffer is configured to 0 (#2347) --- lib/cuckoo/common/utils.py | 12 ++++++++++++ modules/processing/debug.py | 3 ++- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/lib/cuckoo/common/utils.py b/lib/cuckoo/common/utils.py index ab5be1d231a..4446d280843 100644 --- a/lib/cuckoo/common/utils.py +++ b/lib/cuckoo/common/utils.py @@ -342,6 +342,18 @@ def convert_to_printable_and_truncate(s: str, buf: int, cache=None): return convert_to_printable(f"{s[:buf]} " if len(s) > buf else s, cache=cache) +def truncate_str(s: str, max_length: int, marker=" "): + """Truncate a string if its length exceeds the configured `max_length`. + + If `max_length` is less than or equal to 0, the string is not modified. + If the string is truncated, `marker` is added to the end.""" + truncate_size = min(max_length, len(s)) + if truncate_size > 0 and truncate_size < len(s): + return f"{s[:truncate_size]}{marker}" + else: + return s + + def convert_filename_char(c): """Escapes filename characters. @param c: dirty char. diff --git a/modules/processing/debug.py b/modules/processing/debug.py index 61ce5c3a1cf..6d861dbf6fa 100644 --- a/modules/processing/debug.py +++ b/modules/processing/debug.py @@ -7,6 +7,7 @@ from lib.cuckoo.common.abstracts import Processing from lib.cuckoo.common.exceptions import CuckooProcessingError from lib.cuckoo.common.path_utils import path_exists +from lib.cuckoo.common.utils import truncate_str from lib.cuckoo.core.database import Database @@ -24,7 +25,7 @@ def run(self): try: buf_size = self.options.get("buffer", 8192) content = codecs.open(self.log_path, "rb", "utf-8").read() - debug["log"] = content[:buf_size] + " " if len(content) > buf_size else content + debug["log"] = truncate_str(content, buf_size) except ValueError as e: raise CuckooProcessingError(f"Error decoding {self.log_path}: {e}") from e except (IOError, OSError) as e: From ef469a9afcbf10de532cbadcb07f488c95fdfd5c Mon Sep 17 00:00:00 2001 From: Josh Feather <142008135+josh-feather@users.noreply.github.com> Date: Fri, 11 Oct 2024 11:26:39 +0100 Subject: [PATCH 13/33] Contextual window titles (#2343) * Make window titles more contextual * Add resubmit link to status page when a task is 'failed_analysis' * Remove unnecessary f-string --- data/html/base-report.html | 4 ++++ web/analysis/views.py | 26 +++++++++++++++++--------- web/apiv2/views.py | 2 +- web/compare/views.py | 7 ++++++- web/dashboard/views.py | 6 +++++- web/submission/views.py | 18 ++++++++++++++++-- web/templates/header.html | 4 ++++ web/templates/submission/status.html | 7 ++++++- 8 files changed, 59 insertions(+), 15 deletions(-) diff --git a/data/html/base-report.html b/data/html/base-report.html index b7dddd52bff..60fcb615486 100644 --- a/data/html/base-report.html +++ b/data/html/base-report.html @@ -22,7 +22,11 @@ +{% if title %} +{{ title|add:" · CAPE Sandbox" }} +{% else %} CAPE Sandbox +{% endif %} diff --git a/web/analysis/views.py b/web/analysis/views.py index 1e7b699dd41..58ddd693fd1 100644 --- a/web/analysis/views.py +++ b/web/analysis/views.py @@ -502,6 +502,7 @@ def index(request, page=1): request, "analysis/index.html", { + "title": "Recent Analysis", "files": analyses_files, "static": analyses_static, "urls": analyses_urls, @@ -534,7 +535,12 @@ def pending(request): } ) - return render(request, "analysis/pending.html", {"tasks": pending, "count": len(pending)}) + data = { + "tasks": pending, + "count": len(pending), + "title": "Pending Tasks" + } + return render(request, "analysis/pending.html", data) # @require_safe @@ -1701,6 +1707,7 @@ def report(request, task_id): request, "analysis/report.html", { + "title": "Analysis Report", "analysis": report, # ToDo test "file": report.get("target", {}).get("file", {}), @@ -2183,7 +2190,7 @@ def search(request, searched=""): return render( request, "analysis/search.html", - {"analyses": None, "term": searched, "error": "Search term too short, minimum 3 characters required"}, + {"title": "Search", "analyses": None, "term": searched, "error": "Search term too short, minimum 3 characters required"}, ) # name:foo or name: foo @@ -2210,7 +2217,7 @@ def search(request, searched=""): return render( request, "analysis/search.html", - {"analyses": None, "term": searched, "error": "Not all values are integers"}, + {"title": "Search", "analyses": None, "term": searched, "error": "Not all values are integers"}, ) # Escape forward slash characters @@ -2226,13 +2233,13 @@ def search(request, searched=""): return render( request, "analysis/search.html", - {"analyses": None, "term": searched, "error": "Invalid search term: %s" % term}, + {"title": "Search", "analyses": None, "term": searched, "error": "Invalid search term: %s" % term}, ) else: return render( request, "analysis/search.html", - {"analyses": None, "term": None, "error": "Unable to recognize the search syntax"}, + {"title": "Search", "analyses": None, "term": None, "error": "Unable to recognize the search syntax"}, ) analyses = [] @@ -2252,6 +2259,7 @@ def search(request, searched=""): request, "analysis/search.html", { + "title": "Search Results", "analyses": analyses, "config": enabledconf, "term": searched, @@ -2260,7 +2268,7 @@ def search(request, searched=""): "value_only": value_only, }, ) - return render(request, "analysis/search.html", {"analyses": None, "term": None, "error": None}) + return render(request, "analysis/search.html", {"title": "Search", "analyses": None, "term": None, "error": None}) @require_safe @@ -2461,10 +2469,10 @@ def statistics_data(request, days=7): # psycopg2.OperationalError print(e) return render( - request, "error.html", {"error": "Please restart your database. Probably it had an update or it just down"} + request, "error.html", {"title": "Statistics", "error": "Please restart your database. Probably it had an update or it just down"} ) - return render(request, "statistics.html", {"statistics": details, "days": days}) - return render(request, "error.html", {"error": "Provide days as number"}) + return render(request, "statistics.html", {"title": "Statistics", "statistics": details, "days": days}) + return render(request, "error.html", {"title": "Statistics", "error": "Provide days as number"}) on_demand_config_mapper = { diff --git a/web/apiv2/views.py b/web/apiv2/views.py index 087c9a7794f..ff1bf410c16 100644 --- a/web/apiv2/views.py +++ b/web/apiv2/views.py @@ -170,7 +170,7 @@ def index(request): parsed[key]["rps"] = "None" parsed[key]["rpm"] = "None" - return render(request, "apiv2/index.html", {"config": parsed}) + return render(request, "apiv2/index.html", {"title":"API", "config": parsed}) @csrf_exempt diff --git a/web/compare/views.py b/web/compare/views.py index 94ee2626946..abfb5d1ab98 100644 --- a/web/compare/views.py +++ b/web/compare/views.py @@ -84,7 +84,12 @@ def left(request, left_id): for item in results: records.append(item["_source"]) - return render(request, "compare/left.html", {"left": left, "records": records}) + data = { + "title": "Compare", + "left": left, + "records": records + } + return render(request, "compare/left.html", data) @require_safe diff --git a/web/dashboard/views.py b/web/dashboard/views.py index 76191a8fd7b..ecec4c540b2 100644 --- a/web/dashboard/views.py +++ b/web/dashboard/views.py @@ -91,4 +91,8 @@ def index(request): report["estimate_day"] = format_number_with_space(int(24 * hourly)) report["top_detections"] = top_detections() - return render(request, "dashboard/index.html", {"report": report}) + data = { + "title": "Dashboard", + "report": report + } + return render(request, "dashboard/index.html", data) diff --git a/web/submission/views.py b/web/submission/views.py index 44827af8a98..6aeb4cf4332 100644 --- a/web/submission/views.py +++ b/web/submission/views.py @@ -646,6 +646,7 @@ def index(request, task_id=None, resubmit_hash=None): tasks_count = 0 if tasks_count > 0: data = { + "title": "Submission", "tasks": details["task_ids"], "tasks_count": tasks_count, "errors": details["errors"], @@ -654,7 +655,12 @@ def index(request, task_id=None, resubmit_hash=None): } return render(request, "submission/complete.html", data) else: - return render(request, "error.html", {"error": "Error adding task(s) to CAPE's database.", "errors": details["errors"]}) + err_data = { + "error": "Error adding task(s) to CAPE's database.", + "errors": details["errors"], + "title": "Submission Failure" + } + return render(request, "error.html", err_data) else: enabledconf = {} enabledconf["vt"] = settings.VTDL_ENABLED @@ -753,6 +759,7 @@ def index(request, task_id=None, resubmit_hash=None): request, "submission/index.html", { + "title": "Submit", "packages": sorted(packages, key=lambda i: i["name"].lower()), "machines": machines, "vpns": vpns_data, @@ -785,7 +792,14 @@ def status(request, task_id): if status == "completed": status = "processing" - response = {"completed": completed, "status": status, "task_id": task_id, "session_data": ""} + response = { + "title": "Task Status", + "completed": completed, + "status": status, + "task_id": task_id, + "session_data": "", + "target": task.sample.sha256 if task.sample.sha256 else task.target, + } if settings.REMOTE_SESSION: machine = db.view_machine_by_label(task.machine) if machine: diff --git a/web/templates/header.html b/web/templates/header.html index 52f4ce26f13..033d8508aec 100644 --- a/web/templates/header.html +++ b/web/templates/header.html @@ -2,7 +2,11 @@ +{% if title %} +{{ title|add:" · CAPE Sandbox" }} +{% else %} CAPE Sandbox +{% endif %} diff --git a/web/templates/submission/status.html b/web/templates/submission/status.html index fbc2933a96e..cc5d42217d0 100644 --- a/web/templates/submission/status.html +++ b/web/templates/submission/status.html @@ -3,10 +3,15 @@ {% if completed %}

Good news! :-)

The analysis is completed, you can view it here.
+{% elif status == "failed_analysis" %} +
+

Status for task {{task_id}} - {{ target }}

+

The analysis failed with status '{{status}}'. Click here to resubmit.

+
{% else %}
-

Hang on...

+

Status for task {{task_id}} - {{ target }}

The analysis is not finished yet, it's still {{status}}. This page will refresh every 30 seconds.

{% if session_data %}

To view the Remote Session - click here.

From b8f54518f9d5089da1ce608588d1050f6a5bbad0 Mon Sep 17 00:00:00 2001 From: doomedraven Date: Fri, 11 Oct 2024 12:28:23 +0200 Subject: [PATCH 14/33] Update action.yml --- .github/actions/python-setup/action.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/actions/python-setup/action.yml b/.github/actions/python-setup/action.yml index 83d8f6c35f7..4608efbbd9c 100644 --- a/.github/actions/python-setup/action.yml +++ b/.github/actions/python-setup/action.yml @@ -16,7 +16,7 @@ runs: - name: Install poetry shell: bash - run: pip install poetry + run: PIP_BREAK_SYSTEM_PACKAGES=1 pip install poetry - name: Set up Python ${{ inputs.python-version }} uses: actions/setup-python@v5 @@ -27,4 +27,4 @@ runs: - name: Install requirements shell: bash run: | - poetry install --no-interaction --no-root + PIP_BREAK_SYSTEM_PACKAGES=1 poetry install --no-interaction --no-root From 0127eceef8e522668fd283e06baebc362ff38551 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Fri, 11 Oct 2024 10:30:16 +0000 Subject: [PATCH 15/33] style: Automatic code formatting --- analyzer/windows/analyzer.py | 2 +- lib/cuckoo/common/utils.py | 18 +++++++++--------- web/analysis/views.py | 17 ++++++++++------- web/apiv2/views.py | 2 +- web/compare/views.py | 6 +----- web/dashboard/views.py | 5 +---- web/submission/views.py | 2 +- 7 files changed, 24 insertions(+), 28 deletions(-) diff --git a/analyzer/windows/analyzer.py b/analyzer/windows/analyzer.py index c4543a2d419..deb7a583b87 100644 --- a/analyzer/windows/analyzer.py +++ b/analyzer/windows/analyzer.py @@ -510,7 +510,7 @@ def run(self): log.warning('Unable to import the auxiliary module "%s": %s', name, e) def configure_aux_from_data(instance): - # Do auxiliary module configuration stored in 'data/auxiliary/' + # Do auxiliary module configuration stored in 'data/auxiliary/' _class = type(instance) try: log.debug("attempting to configure '%s' from data", _class.__name__) diff --git a/lib/cuckoo/common/utils.py b/lib/cuckoo/common/utils.py index 4446d280843..829aa5006a4 100644 --- a/lib/cuckoo/common/utils.py +++ b/lib/cuckoo/common/utils.py @@ -343,15 +343,15 @@ def convert_to_printable_and_truncate(s: str, buf: int, cache=None): def truncate_str(s: str, max_length: int, marker=" "): - """Truncate a string if its length exceeds the configured `max_length`. - - If `max_length` is less than or equal to 0, the string is not modified. - If the string is truncated, `marker` is added to the end.""" - truncate_size = min(max_length, len(s)) - if truncate_size > 0 and truncate_size < len(s): - return f"{s[:truncate_size]}{marker}" - else: - return s + """Truncate a string if its length exceeds the configured `max_length`. + + If `max_length` is less than or equal to 0, the string is not modified. + If the string is truncated, `marker` is added to the end.""" + truncate_size = min(max_length, len(s)) + if truncate_size > 0 and truncate_size < len(s): + return f"{s[:truncate_size]}{marker}" + else: + return s def convert_filename_char(c): diff --git a/web/analysis/views.py b/web/analysis/views.py index 58ddd693fd1..fd3c7158516 100644 --- a/web/analysis/views.py +++ b/web/analysis/views.py @@ -535,11 +535,7 @@ def pending(request): } ) - data = { - "tasks": pending, - "count": len(pending), - "title": "Pending Tasks" - } + data = {"tasks": pending, "count": len(pending), "title": "Pending Tasks"} return render(request, "analysis/pending.html", data) @@ -2190,7 +2186,12 @@ def search(request, searched=""): return render( request, "analysis/search.html", - {"title": "Search", "analyses": None, "term": searched, "error": "Search term too short, minimum 3 characters required"}, + { + "title": "Search", + "analyses": None, + "term": searched, + "error": "Search term too short, minimum 3 characters required", + }, ) # name:foo or name: foo @@ -2469,7 +2470,9 @@ def statistics_data(request, days=7): # psycopg2.OperationalError print(e) return render( - request, "error.html", {"title": "Statistics", "error": "Please restart your database. Probably it had an update or it just down"} + request, + "error.html", + {"title": "Statistics", "error": "Please restart your database. Probably it had an update or it just down"}, ) return render(request, "statistics.html", {"title": "Statistics", "statistics": details, "days": days}) return render(request, "error.html", {"title": "Statistics", "error": "Provide days as number"}) diff --git a/web/apiv2/views.py b/web/apiv2/views.py index ff1bf410c16..0fb0babfa49 100644 --- a/web/apiv2/views.py +++ b/web/apiv2/views.py @@ -170,7 +170,7 @@ def index(request): parsed[key]["rps"] = "None" parsed[key]["rpm"] = "None" - return render(request, "apiv2/index.html", {"title":"API", "config": parsed}) + return render(request, "apiv2/index.html", {"title": "API", "config": parsed}) @csrf_exempt diff --git a/web/compare/views.py b/web/compare/views.py index abfb5d1ab98..0ab3b65f048 100644 --- a/web/compare/views.py +++ b/web/compare/views.py @@ -84,11 +84,7 @@ def left(request, left_id): for item in results: records.append(item["_source"]) - data = { - "title": "Compare", - "left": left, - "records": records - } + data = {"title": "Compare", "left": left, "records": records} return render(request, "compare/left.html", data) diff --git a/web/dashboard/views.py b/web/dashboard/views.py index ecec4c540b2..c31e2d332df 100644 --- a/web/dashboard/views.py +++ b/web/dashboard/views.py @@ -91,8 +91,5 @@ def index(request): report["estimate_day"] = format_number_with_space(int(24 * hourly)) report["top_detections"] = top_detections() - data = { - "title": "Dashboard", - "report": report - } + data = {"title": "Dashboard", "report": report} return render(request, "dashboard/index.html", data) diff --git a/web/submission/views.py b/web/submission/views.py index 6aeb4cf4332..dcf6d42d411 100644 --- a/web/submission/views.py +++ b/web/submission/views.py @@ -658,7 +658,7 @@ def index(request, task_id=None, resubmit_hash=None): err_data = { "error": "Error adding task(s) to CAPE's database.", "errors": details["errors"], - "title": "Submission Failure" + "title": "Submission Failure", } return render(request, "error.html", err_data) else: From eb1570dcd2d7519d6f40f7e3c166bd4fe566cdb8 Mon Sep 17 00:00:00 2001 From: Kevin O'Reilly Date: Fri, 11 Oct 2024 16:33:10 +0100 Subject: [PATCH 16/33] Fixes #2339 --- analyzer/windows/data/yara/Formbook.yar | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/analyzer/windows/data/yara/Formbook.yar b/analyzer/windows/data/yara/Formbook.yar index 732310fc320..a1d3d50adf6 100644 --- a/analyzer/windows/data/yara/Formbook.yar +++ b/analyzer/windows/data/yara/Formbook.yar @@ -18,13 +18,14 @@ rule FormhookB meta: author = "kevoreilly" description = "Formbook Anti-hook Bypass" - cape_options = "clear,bp0=$decode,action0=scan,hc0=1,bp1=$remap_ntdll+6,action1=setdst:ntdll,count=0,force-sleepskip=1" + cape_options = "clear,bp0=$entry,action0=scan,hc0=1,bp1=$new_remap+6,action1=setdst:ntdll,count=0,force-sleepskip=1" packed = "08c5f44d57f5ccc285596b3d9921bf7fbbbf7f9a827bb3285a800e4c9faf6731" strings: - $decode = {55 8B EC 83 EC 24 53 56 57 [480-520] 8B E5 5D C3} - $remap_ntdll = {90 90 90 90 90 90 8B (86 [2] 00 00|46 ??|06) 5F 5E 5B 8B E5 5D C3} + $remap_ntdll = {33 96 [2] 00 00 8D 86 [2] 00 00 68 F0 00 00 00 50 89 [2-5] E8 [4-10] 6A 00 6A 0? 8D 4D ?? 51 6A} + $entry = {55 8B EC 83 EC ?4 53 56 57 [480-520] 8B E5 5D C3} + $new_remap = {90 90 90 90 90 90 8B (86 [2] 00 00|46 ??|06) 5F 5E 5B 8B E5 5D C3} condition: - any of them + 2 of them } rule FormconfA From d43245f28a73020d39b67bbe6349ccb0f2e2b8ff Mon Sep 17 00:00:00 2001 From: doomedraven Date: Sun, 13 Oct 2024 21:28:46 +0200 Subject: [PATCH 17/33] Update disguise.py --- analyzer/windows/modules/auxiliary/disguise.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/analyzer/windows/modules/auxiliary/disguise.py b/analyzer/windows/modules/auxiliary/disguise.py index 062dd22bd5c..aaa19e8f454 100644 --- a/analyzer/windows/modules/auxiliary/disguise.py +++ b/analyzer/windows/modules/auxiliary/disguise.py @@ -244,9 +244,9 @@ def randomizeUUID(self): SetValueEx(key, "MachineGuid", 0, REG_SZ, createdUUID) def add_persistent_route(self): - self.run_as_system(["C:\\Windows\\System32\ROUTE.exe", "-p", "add", "0.0.0.0", "mask", "0.0.0.0", PERSISTENT_ROUTE_GATEWAY]) + self.run_as_system(["C:\\Windows\\System32\\ROUTE.exe", "-p", "add", "0.0.0.0", "mask", "0.0.0.0", PERSISTENT_ROUTE_GATEWAY]) self.run_as_system( - ["C:\\Windows\\System32\ROUTE.exe", "-p", "change", "0.0.0.0", "mask", "0.0.0.0", PERSISTENT_ROUTE_GATEWAY] + ["C:\\Windows\\System32\\ROUTE.exe", "-p", "change", "0.0.0.0", "mask", "0.0.0.0", PERSISTENT_ROUTE_GATEWAY] ) def start(self): From 19bf9ec0e45068d0092e5765a008218e2baa4219 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Sun, 13 Oct 2024 19:29:40 +0000 Subject: [PATCH 18/33] style: Automatic code formatting --- analyzer/windows/modules/auxiliary/disguise.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/analyzer/windows/modules/auxiliary/disguise.py b/analyzer/windows/modules/auxiliary/disguise.py index aaa19e8f454..9f8745dbe4c 100644 --- a/analyzer/windows/modules/auxiliary/disguise.py +++ b/analyzer/windows/modules/auxiliary/disguise.py @@ -244,7 +244,9 @@ def randomizeUUID(self): SetValueEx(key, "MachineGuid", 0, REG_SZ, createdUUID) def add_persistent_route(self): - self.run_as_system(["C:\\Windows\\System32\\ROUTE.exe", "-p", "add", "0.0.0.0", "mask", "0.0.0.0", PERSISTENT_ROUTE_GATEWAY]) + self.run_as_system( + ["C:\\Windows\\System32\\ROUTE.exe", "-p", "add", "0.0.0.0", "mask", "0.0.0.0", PERSISTENT_ROUTE_GATEWAY] + ) self.run_as_system( ["C:\\Windows\\System32\\ROUTE.exe", "-p", "change", "0.0.0.0", "mask", "0.0.0.0", PERSISTENT_ROUTE_GATEWAY] ) From 6db9ea74400c04ce8704fb9feacaa1d6033d500f Mon Sep 17 00:00:00 2001 From: doomedraven Date: Mon, 14 Oct 2024 15:37:23 +0200 Subject: [PATCH 19/33] expose demux error msgs (#2349) --- conf/default/web.conf.default | 4 +- lib/cuckoo/common/demux.py | 77 ++++++++++++++----- .../common/integrations/file_extra_info.py | 3 + lib/cuckoo/common/web_utils.py | 2 +- lib/cuckoo/core/database.py | 19 ++++- modules/processing/CAPE.py | 2 +- tests/test_demux.py | 5 +- tests/test_objects.py | 4 +- utils/submit.py | 1 + web/apiv2/views.py | 27 ++++++- web/submission/views.py | 25 +++++- 11 files changed, 134 insertions(+), 35 deletions(-) diff --git a/conf/default/web.conf.default b/conf/default/web.conf.default index 476e9164ee1..5d9538a18e4 100644 --- a/conf/default/web.conf.default +++ b/conf/default/web.conf.default @@ -78,8 +78,10 @@ enabled = no #enable linux fields on webgui [linux] -#For advanced users only, can be buggy, linux analysis is work in progress for fun +# For advanced users only, can be buggy, linux analysis is work in progress for fun enabled = no +# independent of enabled or not. To not show linux options, but process statically those files +static_only = no [malscore] enabled = no diff --git a/lib/cuckoo/common/demux.py b/lib/cuckoo/common/demux.py index d1287553b8e..2960d19fe30 100644 --- a/lib/cuckoo/common/demux.py +++ b/lib/cuckoo/common/demux.py @@ -32,7 +32,7 @@ cuckoo_conf = Config() web_cfg = Config("web") tmp_path = cuckoo_conf.cuckoo.get("tmppath", "/tmp") -linux_enabled = web_cfg.linux.get("enabled", False) +linux_enabled = web_cfg.linux.get("enabled", False) or web_cfg.linux.get("static_only", False) demux_extensions_list = { b".accdr", @@ -162,7 +162,8 @@ def is_valid_package(package: str) -> bool: return any(ptype in package for ptype in VALID_PACKAGES) -def _sf_children(child: sfFile) -> bytes: +# ToDo fix return type +def _sf_children(child: sfFile): # -> bytes: path_to_extract = "" _, ext = os.path.splitext(child.filename) ext = ext.lower() @@ -184,15 +185,17 @@ def _sf_children(child: sfFile) -> bytes: _ = path_write_file(path_to_extract, child.contents) except Exception as e: log.error(e, exc_info=True) - return path_to_extract.encode() + return (path_to_extract.encode(), child.platform, child.get_type(), child.get_size()) -def demux_sflock(filename: bytes, options: str, check_shellcode: bool = True) -> List[bytes]: +# ToDo fix typing need to add str as error msg +def demux_sflock(filename: bytes, options: str, check_shellcode: bool = True): # -> List[bytes]: retlist = [] # do not extract from .bin (downloaded from us) if os.path.splitext(filename)[1] == b".bin": - return retlist + return retlist, "" + # ToDo need to introduce error msgs here try: password = options2passwd(options) or "infected" try: @@ -201,9 +204,13 @@ def demux_sflock(filename: bytes, options: str, check_shellcode: bool = True) -> unpacked = unpack(filename, check_shellcode=check_shellcode) if unpacked.package in whitelist_extensions: - return [filename] + file = File(filename) + magic_type = file.get_type() + platform = file.get_platform() + file_size = file.get_size() + return [filename, platform, magic_type, file_size], "" if unpacked.package in blacklist_extensions: - return [filename] + return [], "blacklisted package" for sf_child in unpacked.children: if sf_child.to_dict().get("children"): retlist.extend(_sf_children(ch) for ch in sf_child.children) @@ -214,7 +221,7 @@ def demux_sflock(filename: bytes, options: str, check_shellcode: bool = True) -> retlist.append(_sf_children(sf_child)) except Exception as e: log.error(e, exc_info=True) - return list(filter(None, retlist)) + return list(filter(None, retlist)), "" def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = True, platform: str = ""): # -> tuple[bytes, str]: @@ -227,21 +234,29 @@ def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = if isinstance(filename, str) and use_sflock: filename = filename.encode() + error_list = [] retlist = [] # if a package was specified, trim if allowed and required if package: - if package in ("msix",): retlist.append((filename, "windows")) else: if File(filename).get_size() <= web_cfg.general.max_sample_size or ( web_cfg.general.allow_ignore_size and "ignore_size_check" in options ): - retlist.append((filename, platform)) + retlist.append((filename, platform, "")) else: if web_cfg.general.enable_trim and trim_file(filename): retlist.append((trimmed_path(filename), platform)) - return retlist + else: + error_list.append( + { + os.path.basename( + filename + ): "File too bit, enable 'allow_ignore_size' in web.conf or use 'ignore_size_check' option" + } + ) + return retlist, error_list # handle quarantine files tmp_path = unquarantine(filename) @@ -259,9 +274,16 @@ def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = if use_sflock: if HAS_SFLOCK: retlist = demux_office(filename, password, platform) - return retlist + return retlist, error_list else: log.error("Detected password protected office file, but no sflock is installed: poetry install") + error_list.append( + { + os.path.basename( + filename + ): "Detected password protected office file, but no sflock is installed or correct password provided" + } + ) # don't try to extract from Java archives or executables if ( @@ -279,7 +301,14 @@ def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = else: if web_cfg.general.enable_trim and trim_file(filename): retlist.append((trimmed_path(filename), platform)) - return retlist + else: + error_list.append( + { + os.path.basename(filename), + "File too bit, enable 'allow_ignore_size' in web.conf or use 'ignore_size_check' option", + } + ) + return retlist, error_list new_retlist = [] @@ -288,26 +317,34 @@ def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = check_shellcode = False # all in one unarchiver - retlist = demux_sflock(filename, options, check_shellcode) if HAS_SFLOCK and use_sflock else [] + retlist, error_msg = demux_sflock(filename, options, check_shellcode) if HAS_SFLOCK and use_sflock else [] # if it isn't a ZIP or an email, or we aren't able to obtain anything interesting from either, then just submit the # original file if not retlist: + if error_msg: + error_list.append({os.path.basename(filename), error_msg}) new_retlist.append((filename, platform)) else: - for filename in retlist: + for filename, platform, magic_type, file_size in retlist: # verify not Windows binaries here: - file = File(filename) - magic_type = file.get_type() - platform = file.get_platform() if platform == "linux" and not linux_enabled and "Python" not in magic_type: + error_list.append({os.path.basename(filename): "Linux processing is disabled"}) continue - if file.get_size() > web_cfg.general.max_sample_size and not ( + if file_size > web_cfg.general.max_sample_size and not ( web_cfg.general.allow_ignore_size and "ignore_size_check" in options ): if web_cfg.general.enable_trim: # maybe identify here if trim_file(filename): filename = trimmed_path(filename) + else: + error_list.append( + { + os.path.basename(filename), + "File too bit, enable 'allow_ignore_size' in web.conf or use 'ignore_size_check' option", + } + ) new_retlist.append((filename, platform)) - return new_retlist[:10] + + return new_retlist[:10], error_list diff --git a/lib/cuckoo/common/integrations/file_extra_info.py b/lib/cuckoo/common/integrations/file_extra_info.py index 25555f87359..f582aac1bd0 100644 --- a/lib/cuckoo/common/integrations/file_extra_info.py +++ b/lib/cuckoo/common/integrations/file_extra_info.py @@ -820,6 +820,9 @@ def SevenZip_unpack(file: str, *, filetype: str, data_dictionary: dict, options: ): return + if all([pattern in file_data for pattern in (b"AndroidManifest.xml", b"classes.dex")]): + return + password = "" # Only for real 7zip, breaks others password = options.get("password", "infected") diff --git a/lib/cuckoo/common/web_utils.py b/lib/cuckoo/common/web_utils.py index 4b0de852f48..f47fa29f80a 100644 --- a/lib/cuckoo/common/web_utils.py +++ b/lib/cuckoo/common/web_utils.py @@ -766,7 +766,7 @@ def download_file(**kwargs): if not onesuccess: return "error", {"error": f"Provided hash not found on {kwargs['service']}"} - return "ok", kwargs["task_ids"] + return "ok", kwargs["task_ids"], extra_details.get("erros", []) def save_script_to_storage(task_ids, kwargs): diff --git a/lib/cuckoo/core/database.py b/lib/cuckoo/core/database.py index eb643b56d75..a549b1b870f 100644 --- a/lib/cuckoo/core/database.py +++ b/lib/cuckoo/core/database.py @@ -119,6 +119,7 @@ distconf = Config("distributed") web_conf = Config("web") LINUX_ENABLED = web_conf.linux.enabled +LINUX_STATIC = web_conf.linux.static_only DYNAMIC_ARCH_DETERMINATION = web_conf.general.dynamic_arch_determination if repconf.mongodb.enabled: @@ -1538,7 +1539,7 @@ def demux_sample_and_add_to_db( package, _ = self._identify_aux_func(file_path, package, check_shellcode=check_shellcode) # extract files from the (potential) archive - extracted_files = demux_sample(file_path, package, options, platform=platform) + extracted_files, demux_error_msgs = demux_sample(file_path, package, options, platform=platform) # check if len is 1 and the same file, if diff register file, and set parent if extracted_files and (file_path, platform) not in extracted_files: sample_parent_id = self.register_sample(File(file_path), source_url=source_url) @@ -1547,6 +1548,18 @@ def demux_sample_and_add_to_db( # create tasks for each file in the archive for file, platform in extracted_files: + # ToDo we lose package here and send APKs to windows + if platform in ("linux", "darwin") and LINUX_STATIC: + task_ids += self.add_static( + file_path=file_path, + priority=priority, + tlp=tlp, + user_id=user_id, + username=username, + options=options, + package=package, + ) + continue if static: # On huge loads this just become a bottleneck config = False @@ -1621,6 +1634,8 @@ def demux_sample_and_add_to_db( if config and isinstance(config, dict): details = {"config": config.get("cape_config", {})} + if demux_error_msgs: + details["errors"] = demux_error_msgs # this is aim to return custom data, think of this as kwargs return task_ids, details @@ -1694,7 +1709,7 @@ def add_static( user_id=0, username=False, ): - extracted_files = demux_sample(file_path, package, options) + extracted_files, demux_error_msgs = demux_sample(file_path, package, options) sample_parent_id = None # check if len is 1 and the same file, if diff register file, and set parent if not isinstance(file_path, bytes): diff --git a/modules/processing/CAPE.py b/modules/processing/CAPE.py index f257444236b..a9bc97cd57f 100644 --- a/modules/processing/CAPE.py +++ b/modules/processing/CAPE.py @@ -113,7 +113,7 @@ def _cape_type_string(self, type_strings, file_info, append_file): elif type_strings[0] == "MS-DOS": file_info["cape_type"] = "DOS MZ image: executable" else: - file_info["cape_type"] = file_info["cape_type"] or "PE image" + file_info["cape_type"] = file_info["cape_type"] or "unknown" return append_file def _metadata_processing(self, metadata, file_info, append_file): diff --git a/tests/test_demux.py b/tests/test_demux.py index 605974ee71e..e3f4aa3e946 100644 --- a/tests/test_demux.py +++ b/tests/test_demux.py @@ -85,8 +85,9 @@ def test_demux_sample_pe32(self, grab_sample): def test_demux_package(self): empty_file = tempfile.NamedTemporaryFile() - assert demux.demux_sample(filename=empty_file.name, package="Emotet", options="foo", use_sflock=False) == [ - (empty_file.name, "") + demuxed, _ = demux.demux_sample(filename=empty_file.name, package="Emotet", options="foo", use_sflock=False) + demuxed == [ + (empty_file.name, "", "") ] empty_file.close() diff --git a/tests/test_objects.py b/tests/test_objects.py index ed0966aab7e..45af7ea7b06 100644 --- a/tests/test_objects.py +++ b/tests/test_objects.py @@ -223,7 +223,9 @@ def test_get_type(self, test_files): [ ("temp_pe32", "PE32 executable (GUI) Intel 80386, for MS Windows", True), # emulated magic type ("temp_pe64", "PE32+ executable (GUI) x86-64, for MS Windows", True), # emulated magic type - ("temp_pe_aarch64", "MS-DOS executable PE32 executable Aarch64, for MS Windows", True), + # Broken we remove "MS-DOS executable" + # ("temp_pe_aarch64", "MS-DOS executable PE32 executable Aarch64, for MS Windows", True), + ("temp_pe_aarch64", "PE32 executable Aarch64, for MS Windows", True), ("temp_elf32", "ELF 32-bit LSB", False), ("temp_elf64", "ELF 64-bit LSB", False), ("temp_macho_arm64", "Mach-O 64-bit arm64 executable", False), diff --git a/utils/submit.py b/utils/submit.py index c66378f8ce3..8785b58f5c8 100644 --- a/utils/submit.py +++ b/utils/submit.py @@ -345,6 +345,7 @@ def main(): try: tmp_path = store_temp_file(open(file_path, "rb").read(), sanitize_filename(os.path.basename(file_path))) with db.session.begin(): + # ToDo expose extra_details["errors"] task_ids, extra_details = db.demux_sample_and_add_to_db( file_path=tmp_path, package=args.package, diff --git a/web/apiv2/views.py b/web/apiv2/views.py index 0fb0babfa49..293c968d090 100644 --- a/web/apiv2/views.py +++ b/web/apiv2/views.py @@ -187,7 +187,7 @@ def tasks_create_static(request): options = request.data.get("options", "") priority = force_int(request.data.get("priority")) - resp["error"] = False + resp["error"] = [] files = request.FILES.getlist("file") extra_details = {} task_ids = [] @@ -203,6 +203,8 @@ def tasks_create_static(request): user_id=request.user.id or 0, ) task_ids.extend(task_id) + if extra_details.get("erros"): + resp["errors"].extend(extra_details["errors"]) except CuckooDemuxError as e: resp = {"error": True, "error_value": e} return Response(resp) @@ -226,7 +228,6 @@ def tasks_create_static(request): resp["url"].append("{0}/submit/status/{1}".format(apiconf.api.get("url"), tid)) else: resp = {"error": True, "error_value": "Error adding task to database"} - return Response(resp) @@ -341,12 +342,22 @@ def tasks_create_file(request): if tmp_path: details["path"] = tmp_path details["content"] = content - status, task_ids_tmp = download_file(**details) + demux_error_msgs = [] + + result = download_file(**details) + if len(result) == 2: + status, task_ids_tmp = result + elif len(result) == 3: + status, task_ids_tmp, demux_error_msgs = result + if status == "error": details["errors"].append({os.path.basename(tmp_path).decode(): task_ids_tmp}) else: details["task_ids"] = task_ids_tmp + if demux_error_msgs: + details["errors"].extend(demux_error_msgs) + if details["task_ids"]: tasks_count = len(details["task_ids"]) else: @@ -565,12 +576,20 @@ def tasks_create_dlnexec(request): "user_id": request.user.id or 0, } - status, task_ids_tmp = download_file(**details) + result = download_file(**details) + if len(result) == 2: + status, task_ids_tmp = result + elif len(result) == 3: + status, task_ids_tmp, demux_error_msgs = result + if status == "error": details["errors"].append({os.path.basename(path).decode(): task_ids_tmp}) else: details["task_ids"] = task_ids_tmp + if demux_error_msgs: + details["errors"].extend(demux_error_msgs) + if details["task_ids"]: tasks_count = len(details["task_ids"]) else: diff --git a/web/submission/views.py b/web/submission/views.py index dcf6d42d411..0efc0db2ba0 100644 --- a/web/submission/views.py +++ b/web/submission/views.py @@ -508,7 +508,13 @@ def index(request, task_id=None, resubmit_hash=None): details["path"] = path details["content"] = content - status, task_ids_tmp = download_file(**details) + result = download_file(**details) + if len(result) == 2: + status, task_ids_tmp = result + elif len(result) == 3: + status, task_ids_tmp, demux_error_msg = result + if demux_error_msg: + details["errors"].extend(demux_error_msg) if status == "error": details["errors"].append({os.path.basename(filename): task_ids_tmp}) else: @@ -537,7 +543,13 @@ def index(request, task_id=None, resubmit_hash=None): details["path"] = path details["content"] = content - status, task_ids_tmp = download_file(**details) + result = download_file(**details) + if len(result) == 2: + status, task_ids_tmp = result + elif len(result) == 3: + status, task_ids_tmp, demux_error_msg = result + if demux_error_msg: + details["errors"].extend(demux_error_msg) if status == "error": details["errors"].append({os.path.basename(path): task_ids_tmp}) else: @@ -619,7 +631,14 @@ def index(request, task_id=None, resubmit_hash=None): details["content"] = content details["service"] = "DLnExec" details["source_url"] = samples - status, task_ids_tmp = download_file(**details) + result = download_file(**details) + if len(result) == 2: + status, task_ids_tmp = result + elif len(result) == 3: + status, task_ids_tmp, demux_error_msg = result + if demux_error_msg: + details["errors"].extend(demux_error_msg) + if status == "error": details["errors"].append({os.path.basename(path): task_ids_tmp}) else: From 9f660a18595c3ce0fddf69e3b285a8eb615e15a5 Mon Sep 17 00:00:00 2001 From: GitHub Actions Date: Mon, 14 Oct 2024 13:38:22 +0000 Subject: [PATCH 20/33] style: Automatic code formatting --- tests/test_demux.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/test_demux.py b/tests/test_demux.py index e3f4aa3e946..e24a9664c41 100644 --- a/tests/test_demux.py +++ b/tests/test_demux.py @@ -86,9 +86,7 @@ def test_demux_package(self): empty_file = tempfile.NamedTemporaryFile() demuxed, _ = demux.demux_sample(filename=empty_file.name, package="Emotet", options="foo", use_sflock=False) - demuxed == [ - (empty_file.name, "", "") - ] + demuxed == [(empty_file.name, "", "")] empty_file.close() def test_options2passwd(self): From cf7e325b3961fffa9a0c946345d203bc10d2cfa5 Mon Sep 17 00:00:00 2001 From: doomedraven Date: Mon, 14 Oct 2024 16:24:50 +0200 Subject: [PATCH 21/33] demux --- lib/cuckoo/common/demux.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/cuckoo/common/demux.py b/lib/cuckoo/common/demux.py index 2960d19fe30..ddfb7954a1e 100644 --- a/lib/cuckoo/common/demux.py +++ b/lib/cuckoo/common/demux.py @@ -244,7 +244,7 @@ def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = if File(filename).get_size() <= web_cfg.general.max_sample_size or ( web_cfg.general.allow_ignore_size and "ignore_size_check" in options ): - retlist.append((filename, platform, "")) + retlist.append((filename, platform)) else: if web_cfg.general.enable_trim and trim_file(filename): retlist.append((trimmed_path(filename), platform)) From 432ca07268290ce8b7fcff43eba02c5d0da66b26 Mon Sep 17 00:00:00 2001 From: doomedraven Date: Tue, 15 Oct 2024 08:10:29 +0200 Subject: [PATCH 22/33] dashboard fix --- web/dashboard/views.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/web/dashboard/views.py b/web/dashboard/views.py index c31e2d332df..5a7a09787e1 100644 --- a/web/dashboard/views.py +++ b/web/dashboard/views.py @@ -76,6 +76,8 @@ def index(request): tasks = db.count_tasks(status=TASK_COMPLETED) tasks += db.count_tasks(status=TASK_REPORTED) + data = {"title": "Dashboard", "report": {}} + if tasks: # Get the time when the first task started and last one ended. started, completed = db.minmax_tasks() @@ -91,5 +93,5 @@ def index(request): report["estimate_day"] = format_number_with_space(int(24 * hourly)) report["top_detections"] = top_detections() - data = {"title": "Dashboard", "report": report} - return render(request, "dashboard/index.html", data) + data["report"] = report + return render(request, "dashboard/index.html", data) From 9ec12d3cd1c313b28433e5902ba22bf757cb8f35 Mon Sep 17 00:00:00 2001 From: Tommy Beadle Date: Tue, 15 Oct 2024 12:48:12 -0400 Subject: [PATCH 23/33] Fix test where the `file` utility changes its output based on the installed version. (#2352) Co-authored-by: Tommy Beadle --- tests/test_objects.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/tests/test_objects.py b/tests/test_objects.py index 45af7ea7b06..d6859890375 100644 --- a/tests/test_objects.py +++ b/tests/test_objects.py @@ -223,9 +223,14 @@ def test_get_type(self, test_files): [ ("temp_pe32", "PE32 executable (GUI) Intel 80386, for MS Windows", True), # emulated magic type ("temp_pe64", "PE32+ executable (GUI) x86-64, for MS Windows", True), # emulated magic type - # Broken we remove "MS-DOS executable" - # ("temp_pe_aarch64", "MS-DOS executable PE32 executable Aarch64, for MS Windows", True), - ("temp_pe_aarch64", "PE32 executable Aarch64, for MS Windows", True), + ( + "temp_pe_aarch64", + ( + "PE32 executable Aarch64, for MS Windows", + "MS-DOS executable PE32 executable Aarch64, for MS Windows", + ), + True, + ), ("temp_elf32", "ELF 32-bit LSB", False), ("temp_elf64", "ELF 64-bit LSB", False), ("temp_macho_arm64", "Mach-O 64-bit arm64 executable", False), @@ -234,7 +239,9 @@ def test_get_type(self, test_files): def test_get_type_pe(self, file_fixture, expected, is_pe, request): path = request.getfixturevalue(file_fixture) file = File(path) - assert file.get_type() == expected + if isinstance(expected, str): + expected = (expected,) + assert file.get_type() in expected assert bool(file.pe) == is_pe def test_get_yara(self, hello_file, yara_compiled): From 0b7eee8f6ccac5cdc2103eda999298a01d8a52b0 Mon Sep 17 00:00:00 2001 From: Tommy Beadle Date: Tue, 15 Oct 2024 13:23:24 -0400 Subject: [PATCH 24/33] Fix typo getting demux errors. (#2354) Co-authored-by: Tommy Beadle --- lib/cuckoo/common/web_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/cuckoo/common/web_utils.py b/lib/cuckoo/common/web_utils.py index f47fa29f80a..60b4eff6c70 100644 --- a/lib/cuckoo/common/web_utils.py +++ b/lib/cuckoo/common/web_utils.py @@ -766,7 +766,7 @@ def download_file(**kwargs): if not onesuccess: return "error", {"error": f"Provided hash not found on {kwargs['service']}"} - return "ok", kwargs["task_ids"], extra_details.get("erros", []) + return "ok", kwargs["task_ids"], extra_details.get("errors", []) def save_script_to_storage(task_ids, kwargs): From 6b66d4791272ca7d2272ca096327c2ad802fdf8b Mon Sep 17 00:00:00 2001 From: Tommy Beadle Date: Wed, 16 Oct 2024 01:58:27 -0400 Subject: [PATCH 25/33] Fix bug when download_file returns success. (#2356) --- lib/cuckoo/common/web_utils.py | 14 +++++++---- web/apiv2/views.py | 35 ++++++++------------------- web/submission/views.py | 44 ++++++++++++---------------------- 3 files changed, 34 insertions(+), 59 deletions(-) diff --git a/lib/cuckoo/common/web_utils.py b/lib/cuckoo/common/web_utils.py index 60b4eff6c70..eb10064548c 100644 --- a/lib/cuckoo/common/web_utils.py +++ b/lib/cuckoo/common/web_utils.py @@ -766,7 +766,7 @@ def download_file(**kwargs): if not onesuccess: return "error", {"error": f"Provided hash not found on {kwargs['service']}"} - return "ok", kwargs["task_ids"], extra_details.get("errors", []) + return "ok", {"task_ids": kwargs["task_ids"], "errors": extra_details.get("errors", [])} def save_script_to_storage(task_ids, kwargs): @@ -1324,15 +1324,19 @@ def thirdpart_aux(samples, prefix, opt_filename, details, settings): if content: details["content"] = content + errors = {} if not details.get("content", False): - status, task_ids_tmp = download_file(**details) + status, tasks_details = download_file(**details) else: details["service"] = "Local" - status, task_ids_tmp = download_file(**details) + status, tasks_details = download_file(**details) if status == "error": - details["errors"].append({h: task_ids_tmp}) + details["errors"].append({h: tasks_details}) else: - details["task_ids"] = task_ids_tmp + details["task_ids"] = tasks_details.get("task_ids", []) + errors = tasks_details.get("errors") + if errors: + details["errors"].extend(errors) return details diff --git a/web/apiv2/views.py b/web/apiv2/views.py index 293c968d090..b744aafdb0a 100644 --- a/web/apiv2/views.py +++ b/web/apiv2/views.py @@ -287,7 +287,6 @@ def tasks_create_file(request): "user_id": request.user.id or 0, } - task_ids_tmp = [] task_machines = [] vm_list = [vm.label for vm in db.list_machines()] @@ -342,21 +341,13 @@ def tasks_create_file(request): if tmp_path: details["path"] = tmp_path details["content"] = content - demux_error_msgs = [] - - result = download_file(**details) - if len(result) == 2: - status, task_ids_tmp = result - elif len(result) == 3: - status, task_ids_tmp, demux_error_msgs = result - + status, tasks_details = download_file(**details) if status == "error": - details["errors"].append({os.path.basename(tmp_path).decode(): task_ids_tmp}) + details["errors"].append({os.path.basename(tmp_path).decode(): tasks_details}) else: - details["task_ids"] = task_ids_tmp - - if demux_error_msgs: - details["errors"].extend(demux_error_msgs) + details["task_ids"] = tasks_details.get("task_ids") + if tasks_details.get("errors"): + details["errors"].extend(tasks_details["errors"]) if details["task_ids"]: tasks_count = len(details["task_ids"]) @@ -576,19 +567,13 @@ def tasks_create_dlnexec(request): "user_id": request.user.id or 0, } - result = download_file(**details) - if len(result) == 2: - status, task_ids_tmp = result - elif len(result) == 3: - status, task_ids_tmp, demux_error_msgs = result - + status, tasks_details = download_file(**details) if status == "error": - details["errors"].append({os.path.basename(path).decode(): task_ids_tmp}) + details["errors"].append({os.path.basename(path).decode(): tasks_details}) else: - details["task_ids"] = task_ids_tmp - - if demux_error_msgs: - details["errors"].extend(demux_error_msgs) + details["task_ids"] = tasks_details.get("task_ids") + if tasks_details.get("errors"): + details["errors"].extend(tasks_details["errors"]) if details["task_ids"]: tasks_count = len(details["task_ids"]) diff --git a/web/submission/views.py b/web/submission/views.py index 0efc0db2ba0..39c25136c3a 100644 --- a/web/submission/views.py +++ b/web/submission/views.py @@ -366,7 +366,6 @@ def index(request, task_id=None, resubmit_hash=None): opt_apikey = opts.get("apikey", False) status = "ok" - task_ids_tmp = [] existent_tasks = {} details = { "errors": [], @@ -508,17 +507,13 @@ def index(request, task_id=None, resubmit_hash=None): details["path"] = path details["content"] = content - result = download_file(**details) - if len(result) == 2: - status, task_ids_tmp = result - elif len(result) == 3: - status, task_ids_tmp, demux_error_msg = result - if demux_error_msg: - details["errors"].extend(demux_error_msg) + status, tasks_details = download_file(**details) if status == "error": - details["errors"].append({os.path.basename(filename): task_ids_tmp}) + details["errors"].append({os.path.basename(filename): tasks_details}) else: - details["task_ids"] = task_ids_tmp + details["task_ids"] = tasks_details.get("task_ids") + if tasks_details.get("errors"): + details["errors"].extend(tasks_details["errors"]) if web_conf.web_reporting.get("enabled", False) and web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", hash, search_limit=5) if records: @@ -543,23 +538,19 @@ def index(request, task_id=None, resubmit_hash=None): details["path"] = path details["content"] = content - result = download_file(**details) - if len(result) == 2: - status, task_ids_tmp = result - elif len(result) == 3: - status, task_ids_tmp, demux_error_msg = result - if demux_error_msg: - details["errors"].extend(demux_error_msg) + status, tasks_details = download_file(**details) if status == "error": - details["errors"].append({os.path.basename(path): task_ids_tmp}) + details["errors"].append({os.path.basename(path): tasks_details}) else: + details["task_ids"] = tasks_details.get("task_ids") + if tasks_details.get("errors"): + details["errors"].extend(tasks_details["errors"]) if web_conf.general.get("existent_tasks", False): records = perform_search("target_sha256", sha256, search_limit=5) if records: for record in records: if record.get("target").get("file", {}).get("sha256"): existent_tasks.setdefault(record["target"]["file"]["sha256"], []).append(record) - details["task_ids"] = task_ids_tmp elif task_category == "static": for content, path, sha256 in list_of_tasks: @@ -631,18 +622,13 @@ def index(request, task_id=None, resubmit_hash=None): details["content"] = content details["service"] = "DLnExec" details["source_url"] = samples - result = download_file(**details) - if len(result) == 2: - status, task_ids_tmp = result - elif len(result) == 3: - status, task_ids_tmp, demux_error_msg = result - if demux_error_msg: - details["errors"].extend(demux_error_msg) - + status, tasks_details = download_file(**details) if status == "error": - details["errors"].append({os.path.basename(path): task_ids_tmp}) + details["errors"].append({os.path.basename(path): tasks_details}) else: - details["task_ids"] = task_ids_tmp + details["task_ids"] = tasks_details.get("task_ids") + if tasks_details.get("errors"): + details["errors"].extend(tasks_details["errors"]) elif task_category == "vtdl": if not settings.VTDL_KEY: From 4c915fd35b0e936c42e2d315e5f2b16e188860e4 Mon Sep 17 00:00:00 2001 From: Tommy Beadle Date: Wed, 16 Oct 2024 08:30:16 -0400 Subject: [PATCH 26/33] Fix return types in demux_sflock/demux_sample. (#2351) Also fix handling of one case where sample size was smaller than the limit but was still being reported as an error saying the sample is too big. Co-authored-by: Tommy Beadle --- lib/cuckoo/common/demux.py | 35 +++++++++++++++++------------------ 1 file changed, 17 insertions(+), 18 deletions(-) diff --git a/lib/cuckoo/common/demux.py b/lib/cuckoo/common/demux.py index ddfb7954a1e..fa42507f698 100644 --- a/lib/cuckoo/common/demux.py +++ b/lib/cuckoo/common/demux.py @@ -208,7 +208,7 @@ def demux_sflock(filename: bytes, options: str, check_shellcode: bool = True): magic_type = file.get_type() platform = file.get_platform() file_size = file.get_size() - return [filename, platform, magic_type, file_size], "" + return [[filename, platform, magic_type, file_size]], "" if unpacked.package in blacklist_extensions: return [], "blacklisted package" for sf_child in unpacked.children: @@ -253,7 +253,7 @@ def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = { os.path.basename( filename - ): "File too bit, enable 'allow_ignore_size' in web.conf or use 'ignore_size_check' option" + ): "File too big, enable 'allow_ignore_size' in web.conf or use 'ignore_size_check' option" } ) return retlist, error_list @@ -305,7 +305,7 @@ def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = error_list.append( { os.path.basename(filename), - "File too bit, enable 'allow_ignore_size' in web.conf or use 'ignore_size_check' option", + "File too big, enable 'allow_ignore_size' in web.conf or use 'ignore_size_check' option", } ) return retlist, error_list @@ -317,7 +317,7 @@ def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = check_shellcode = False # all in one unarchiver - retlist, error_msg = demux_sflock(filename, options, check_shellcode) if HAS_SFLOCK and use_sflock else [] + retlist, error_msg = demux_sflock(filename, options, check_shellcode) if HAS_SFLOCK and use_sflock else ([], "") # if it isn't a ZIP or an email, or we aren't able to obtain anything interesting from either, then just submit the # original file if not retlist: @@ -331,20 +331,19 @@ def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = error_list.append({os.path.basename(filename): "Linux processing is disabled"}) continue - if file_size > web_cfg.general.max_sample_size and not ( - web_cfg.general.allow_ignore_size and "ignore_size_check" in options - ): - if web_cfg.general.enable_trim: - # maybe identify here - if trim_file(filename): - filename = trimmed_path(filename) - else: - error_list.append( - { - os.path.basename(filename), - "File too bit, enable 'allow_ignore_size' in web.conf or use 'ignore_size_check' option", - } - ) + if file_size > web_cfg.general.max_sample_size: + if web_cfg.general.allow_ignore_size and "ignore_size_check" in options: + if web_cfg.general.enable_trim: + # maybe identify here + if trim_file(filename): + filename = trimmed_path(filename) + else: + error_list.append( + { + os.path.basename(filename), + "File too big, enable 'allow_ignore_size' in web.conf or use 'ignore_size_check' option", + } + ) new_retlist.append((filename, platform)) return new_retlist[:10], error_list From 45b0099934b58c259563323f6433d8f3c80a5ab0 Mon Sep 17 00:00:00 2001 From: Kevin O'Reilly Date: Wed, 16 Oct 2024 16:23:32 +0100 Subject: [PATCH 27/33] Remove obsolete 'Unpacker' packages --- .../modules/packages/Shellcode-Unpacker.py | 48 ----- analyzer/windows/modules/packages/Unpacker.py | 42 ---- .../windows/modules/packages/Unpacker_dll.py | 64 ------ .../windows/modules/packages/Unpacker_js.py | 47 ----- .../windows/modules/packages/Unpacker_ps1.py | 37 ---- .../modules/packages/Unpacker_regsvr.py | 46 ----- .../windows/modules/packages/Unpacker_zip.py | 182 ------------------ .../packages/{Shellcode.py => shellcode.py} | 0 .../{Shellcode_x64.py => shellcode_x64.py} | 0 9 files changed, 466 deletions(-) delete mode 100644 analyzer/windows/modules/packages/Shellcode-Unpacker.py delete mode 100644 analyzer/windows/modules/packages/Unpacker.py delete mode 100644 analyzer/windows/modules/packages/Unpacker_dll.py delete mode 100644 analyzer/windows/modules/packages/Unpacker_js.py delete mode 100644 analyzer/windows/modules/packages/Unpacker_ps1.py delete mode 100644 analyzer/windows/modules/packages/Unpacker_regsvr.py delete mode 100644 analyzer/windows/modules/packages/Unpacker_zip.py rename analyzer/windows/modules/packages/{Shellcode.py => shellcode.py} (100%) rename analyzer/windows/modules/packages/{Shellcode_x64.py => shellcode_x64.py} (100%) diff --git a/analyzer/windows/modules/packages/Shellcode-Unpacker.py b/analyzer/windows/modules/packages/Shellcode-Unpacker.py deleted file mode 100644 index 28e4cc377ad..00000000000 --- a/analyzer/windows/modules/packages/Shellcode-Unpacker.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -import logging -import os -import shutil - -from lib.common.abstracts import Package -from lib.common.constants import OPT_PROCDUMP, OPT_UNPACKER - -log = logging.getLogger(__name__) - -_OPT_DUMP_CALLER_REGIONS = "dump-caller-regions" - - -class Shellcode_Unpacker(Package): - """32-bit Shellcode Unpacker package.""" - - summary = "Executes 32-bit Shellcode using loader.exe with the unpacker option." - description = f"""Uses 'bin\\loader.exe shellcode ' to execute 32-bit Shellcode. - Turns off '{OPT_PROCDUMP}' and '{_OPT_DUMP_CALLER_REGIONS}'. - Turns on '{OPT_UNPACKER}'.""" - - def __init__(self, options=None, config=None): - """@param options: options dict.""" - if options is None: - options = {} - self.config = config - self.options = options - self.options[OPT_UNPACKER] = "1" - self.options[OPT_PROCDUMP] = "0" - self.options[_OPT_DUMP_CALLER_REGIONS] = "0" - - def start(self, path): - loaderpath = "bin\\loader.exe" - arguments = f"shellcode {path}" - - # we need to move out of the analyzer directory - # due to a check in monitor dll - basepath = os.path.dirname(path) - newpath = os.path.join(basepath, os.path.basename(loaderpath)) - shutil.copy(loaderpath, newpath) - - log.info("[-] newpath : %s", newpath) - log.info("[-] arguments : %s", arguments) - - return self.execute(newpath, arguments, newpath) diff --git a/analyzer/windows/modules/packages/Unpacker.py b/analyzer/windows/modules/packages/Unpacker.py deleted file mode 100644 index 35d6f47264f..00000000000 --- a/analyzer/windows/modules/packages/Unpacker.py +++ /dev/null @@ -1,42 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from lib.common.abstracts import Package -from lib.common.common import check_file_extension -from lib.common.constants import OPT_ARGUMENTS, OPT_INJECTION, OPT_PROCDUMP, OPT_UNPACKER - - -class Unpacker(Package): - """CAPE Unpacker analysis package.""" - - # PATHS = [ - # ("SystemRoot", "system32"), - # ] - summary = "Executes a .exe file with the unpacker option." - description = f"""Executes the sample passing arguments if any. - Turns off '{OPT_PROCDUMP}' and '{OPT_INJECTION}'. - Turns on '{OPT_UNPACKER}'. - The .exe filename extension will be added automatically.""" - option_names = (OPT_ARGUMENTS,) - - def __init__(self, options=None, config=None): - """@param options: options dict.""" - if options is None: - options = {} - self.config = config - self.options = options - self.pids = [] - self.options[OPT_UNPACKER] = "1" - self.options[OPT_PROCDUMP] = "0" - self.options[OPT_INJECTION] = "0" - - def start(self, path): - arguments = self.options.get(OPT_ARGUMENTS) - - # If the file doesn't have an extension, add .exe - # See CWinApp::SetCurrentHandles(), it will throw - # an exception that will crash the app if it does - # not find an extension on the main exe's filename - path = check_file_extension(path, ".exe") - return self.execute(path, arguments, path) diff --git a/analyzer/windows/modules/packages/Unpacker_dll.py b/analyzer/windows/modules/packages/Unpacker_dll.py deleted file mode 100644 index 09e5a5ed16d..00000000000 --- a/analyzer/windows/modules/packages/Unpacker_dll.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -import os -import shutil - -from lib.common.abstracts import Package -from lib.common.common import check_file_extension -from lib.common.constants import ( - DLL_OPTION_TEXT, - DLL_OPTIONS, - OPT_ARGUMENTS, - OPT_DLLLOADER, - OPT_FUNCTION, - OPT_INJECTION, - OPT_UNPACKER, -) - - -class Unpacker_dll(Package): - """CAPE Unpacker DLL analysis package.""" - - PATHS = [ - ("SystemRoot", "system32", "rundll32.exe"), - ] - summary = "Unpacks a .dll file using rundll32.exe as the loader." - description = f"""Uses rundll32.exe with the '/wait' option to run a .lnk file. - {DLL_OPTION_TEXT} - Turns off '{OPT_INJECTION}'. - Turns on '{OPT_UNPACKER}'. - The .dll filename extension will be added automatically.""" - option_names = DLL_OPTIONS - - def __init__(self, options=None, config=None): - """@param options: options dict.""" - if options is None: - options = {} - self.config = config - self.options = options - self.options[OPT_UNPACKER] = "1" - self.options[OPT_INJECTION] = "0" - - def start(self, path): - rundll32 = self.get_path("rundll32.exe") - function = self.options.get(OPT_FUNCTION, "#1") - arguments = self.options.get(OPT_ARGUMENTS) - dllloader = self.options.get(OPT_DLLLOADER) - - # If the file doesn't have the proper .dll extension force it - # and rename it. This is needed for rundll32 to execute correctly. - # See ticket #354 for details. - path = check_file_extension(path, ".dll") - - args = f"{path},{function}" - if arguments: - args += f" {arguments}" - - if dllloader: - newname = os.path.join(os.path.dirname(rundll32), dllloader) - shutil.copy(rundll32, newname) - rundll32 = newname - - return self.execute(rundll32, args, path) diff --git a/analyzer/windows/modules/packages/Unpacker_js.py b/analyzer/windows/modules/packages/Unpacker_js.py deleted file mode 100644 index 18875faa347..00000000000 --- a/analyzer/windows/modules/packages/Unpacker_js.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright (C) 2015 Optiv, Inc. (brad.spengler@optiv.com) -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -import os - -from lib.common.abstracts import Package -from lib.common.constants import OPT_INJECTION, OPT_PROCDUMP, OPT_UNPACKER - - -class Unpacker_JS(Package): - """JavaScript analysis package.""" - - PATHS = [ - ("SystemRoot", "system32", "wscript.exe"), - ] - summary = "Executes a .JS file using wscript.exe." - description = f"""Uses wscript.exe to run a .js/.jse file. - Turns off '{OPT_PROCDUMP}' and '{OPT_INJECTION}'. - Turns on '{OPT_UNPACKER}'. - The appropriate filename extension will be added automatically.""" - - def __init__(self, options=None, config=None): - """@param options: options dict.""" - if options is None: - options = {} - self.config = config - self.options = options - self.options[OPT_UNPACKER] = "1" - self.options[OPT_PROCDUMP] = "0" - self.options[OPT_INJECTION] = "0" - - def start(self, path): - wscript = self.get_path("wscript.exe") - args = f'"{path}"' - ext = os.path.splitext(path)[-1].lower() - if ext not in (".js", ".jse"): - with open(path, "r") as tmpfile: - magic_bytes = tmpfile.read(4) - if magic_bytes == "#@~^": - os.rename(path, f"{path}.jse") - path = f"{path}.jse" - else: - os.rename(path, f"{path}.js") - path = f"{path}.js" - args = f'"{path}"' - return self.execute(wscript, args, path) diff --git a/analyzer/windows/modules/packages/Unpacker_ps1.py b/analyzer/windows/modules/packages/Unpacker_ps1.py deleted file mode 100644 index 1b1243f850d..00000000000 --- a/analyzer/windows/modules/packages/Unpacker_ps1.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from lib.common.abstracts import Package -from lib.common.common import check_file_extension -from lib.common.constants import OPT_INJECTION, OPT_PROCDUMP, OPT_UNPACKER - - -class PS1(Package): - """PowerShell Unpacker analysis package.""" - - PATHS = [ - ("SystemRoot", "system32", "WindowsPowerShell", "v*.0", "powershell.exe"), - ] - summary = "Executes a sample file with powershell." - description = f"""Uses 'powershell -NoProfile -ExecutionPolicy bypass -File ' - to run a .ps1 file. - Turns off '{OPT_PROCDUMP}' and '{OPT_INJECTION}'. - Turns on '{OPT_UNPACKER}'. - The .ps1 filename extension will be added automatically.""" - - def __init__(self, options=None, config=None): - """@param options: options dict.""" - if options is None: - options = {} - self.config = config - self.options = options - self.options[OPT_UNPACKER] = "1" - self.options[OPT_PROCDUMP] = "0" - self.options[OPT_INJECTION] = "0" - - def start(self, path): - powershell = self.get_path_glob("PowerShell") - path = check_file_extension(path, ".ps1") - args = f'-NoProfile -ExecutionPolicy bypass -File "{path}"' - return self.execute(powershell, args, path) diff --git a/analyzer/windows/modules/packages/Unpacker_regsvr.py b/analyzer/windows/modules/packages/Unpacker_regsvr.py deleted file mode 100644 index dd357ca7169..00000000000 --- a/analyzer/windows/modules/packages/Unpacker_regsvr.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from lib.common.abstracts import Package -from lib.common.common import check_file_extension -from lib.common.constants import OPT_ARGUMENTS, OPT_INJECTION, OPT_PROCDUMP, OPT_UNPACKER - - -class Unpacker_Regsvr(Package): - """CAPE Unpacker DLL analysis package.""" - - PATHS = [ - ("SystemRoot", "system32", "regsvr32.exe"), - ] - summary = "Executes function(s) in a DLL file using regsvr32.exe." - description = f"""Uses regsvr32.exe to run one or more functions in a .dll file. - Turns off '{OPT_PROCDUMP}' and '{OPT_INJECTION}'. - Turns on '{OPT_UNPACKER}'. - The .dll filename extension will be added automatically.""" - option_names = (OPT_ARGUMENTS,) - - def __init__(self, options=None, config=None): - """@param options: options dict.""" - if options is None: - options = {} - self.config = config - self.options = options - self.options[OPT_UNPACKER] = "1" - self.options[OPT_PROCDUMP] = "0" - self.options[OPT_INJECTION] = "0" - - def start(self, path): - regsvr32 = self.get_path("regsvr32.exe") - arguments = self.options.get(OPT_ARGUMENTS) - - # If the file doesn't have the proper .dll extension force it - # and rename it. This is needed for rundll32 to execute correctly. - # See ticket #354 for details. - path = check_file_extension(path, ".dll") - - args = path - if arguments: - args += f" {arguments}" - - return self.execute(regsvr32, args, path) diff --git a/analyzer/windows/modules/packages/Unpacker_zip.py b/analyzer/windows/modules/packages/Unpacker_zip.py deleted file mode 100644 index d1bd8f5b85f..00000000000 --- a/analyzer/windows/modules/packages/Unpacker_zip.py +++ /dev/null @@ -1,182 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -import logging -import os -import shutil -from zipfile import BadZipfile, ZipFile - -try: - import re2 as re -except ImportError: - import re - -from lib.common.abstracts import Package -from lib.common.constants import ( - ARCHIVE_OPTIONS, - DLL_OPTIONS, - OPT_ARGUMENTS, - OPT_DLLLOADER, - OPT_FILE, - OPT_FUNCTION, - OPT_INJECTION, - OPT_PASSWORD, - OPT_PROCDUMP, - OPT_UNPACKER, -) -from lib.common.exceptions import CuckooPackageError - -log = logging.getLogger(__name__) - - -class Unpacker_zip(Package): - """CAPE Unpacker zip analysis package.""" - - PATHS = [ - ("SystemRoot", "system32", "cmd.exe"), - ] - summary = "Unzips a file with the supplied password, execute its contents." - description = f"""Extracts the sample from a zip file. If the file name is not - supplied in the '{OPT_FILE}" option, the first file in the zip is taken. - Turns off '{OPT_PROCDUMP}' and '{OPT_INJECTION}'. - Turns on '{OPT_UNPACKER}'. - The execution method is chosen based on the filename extension.""" - option_names = sorted(set(ARCHIVE_OPTIONS + DLL_OPTIONS)) - - def __init__(self, options=None, config=None): - """@param options: options dict.""" - if options is None: - options = {} - self.config = config - self.options = options - self.pids = [] - self.options[OPT_UNPACKER] = "1" - self.options[OPT_PROCDUMP] = "0" - self.options[OPT_INJECTION] = "0" - - def extract_zip(self, zip_path, extract_path, password, recursion_depth): - """Extracts a nested ZIP file. - @param zip_path: ZIP path - @param extract_path: where to extract - @param password: ZIP password - @param recursion_depth: how deep we are in a nested archive - """ - # Test if zip file contains a file named as itself. - if self.is_overwritten(zip_path): - log.debug("ZIP file contains a file with the same name, original is going to be overwritten") - # TODO: add random string. - new_zip_path = f"{zip_path}.old" - shutil.move(zip_path, new_zip_path) - zip_path = new_zip_path - - # Unpacker. - with ZipFile(zip_path, "r") as archive: - try: - archive.extractall(path=extract_path, pwd=password) - except BadZipfile as e: - raise CuckooPackageError("Invalid Zip file") from e - except RuntimeError: - try: - archive.extractall(path=extract_path, pwd="infected") - except RuntimeError as e: - raise CuckooPackageError(f"Unable to extract Zip file: {e}") from e - finally: - if recursion_depth < 4: - # Extract nested archives. - for name in archive.namelist(): - if name.endswith(".zip"): - # Recurse. - try: - self.extract_zip(os.path.join(extract_path, name), extract_path, password, recursion_depth + 1) - except BadZipfile: - log.warning( - "Nested zip file '%s' name end with 'zip' extension is not a valid zip, skipping extraction", - name, - ) - except RuntimeError as run_err: - log.error("Error to extract nested zip file %s with details: %s", name, run_err) - - def is_overwritten(self, zip_path): - """Checks if the ZIP file contains another file with the same name, so it is going to be overwritten. - @param zip_path: zip file path - @return: comparison boolean - """ - with ZipFile(zip_path, "r") as archive: - # Test if zip file contains a file named as itself. - try: - return any(name == os.path.basename(zip_path) for name in archive.namelist()) - except BadZipfile as e: - raise CuckooPackageError("Invalid Zip file") from e - - def get_infos(self, zip_path): - """Get information from ZIP file. - @param zip_path: zip file path - @return: ZipInfo class - """ - try: - with ZipFile(zip_path, "r") as archive: - return archive.infolist() - except BadZipfile as e: - raise CuckooPackageError("Invalid Zip file") from e - - def start(self, path): - root = os.environ["TEMP"] - password = self.options.get(OPT_PASSWORD) - exe_regex = re.compile(r"(\.exe|\.scr|\.msi|\.bat|\.lnk|\.js|\.jse|\.vbs|\.vbe|\.wsf\.ps1)$", flags=re.IGNORECASE) - dll_regex = re.compile(r"(\.dll|\.ocx)$", flags=re.IGNORECASE) - zipinfos = self.get_infos(path) - self.extract_zip(path, root, password, 0) - - file_name = self.options.get(OPT_FILE) - # If no file name is provided via option, take the first file. - if file_name is None: - # No name provided try to find a better name. - if not len(zipinfos): - raise CuckooPackageError("Empty ZIP archive") - - # Attempt to find a valid exe extension in the archive - for f in zipinfos: - if exe_regex.search(f.filename): - file_name = f.filename - break - if file_name is None: - for f in zipinfos: - if dll_regex.search(f.filename): - file_name = f.filename - break - # Default to the first one if none found - file_name = file_name or zipinfos[0].filename - log.debug("Missing file option, auto executing: %s", file_name) - file_path = os.path.join(root, file_name) - log.debug('file_name: "%s"', file_name) - if file_name.lower().endswith(".lnk"): - cmd_path = self.get_path("cmd.exe") - cmd_args = f'/c start /wait "" "{file_path}"' - return self.execute(cmd_path, cmd_args, file_path) - elif file_name.lower().endswith(".msi"): - msi_path = self.get_path("msiexec.exe") - msi_args = f'/I "{file_path}"' - return self.execute(msi_path, msi_args, file_path) - elif file_name.lower().endswith((".js", ".jse", ".vbs", ".vbe", ".wsf")): - wscript = self.get_path_app_in_path("wscript.exe") - wscript_args = f'"{file_path}"' - return self.execute(wscript, wscript_args, file_path) - elif file_name.lower().endswith((".dll", ".ocx")): - rundll32 = self.get_path_app_in_path("rundll32.exe") - function = self.options.get(OPT_FUNCTION, "#1") - arguments = self.options.get(OPT_ARGUMENTS) - dllloader = self.options.get(OPT_DLLLOADER) - dll_args = f'"{file_path}",{function}' - if arguments: - dll_args += f" {arguments}" - if dllloader: - newname = os.path.join(os.path.dirname(rundll32), dllloader) - shutil.copy(rundll32, newname) - rundll32 = newname - return self.execute(rundll32, dll_args, file_path) - elif file_name.lower().endswith(".ps1"): - powershell = self.get_path_app_in_path("powershell.exe") - args = f'-NoProfile -ExecutionPolicy bypass -File "{path}"' - return self.execute(powershell, args, file_path) - return self.execute(file_path, self.options.get(OPT_ARGUMENTS), file_path) diff --git a/analyzer/windows/modules/packages/Shellcode.py b/analyzer/windows/modules/packages/shellcode.py similarity index 100% rename from analyzer/windows/modules/packages/Shellcode.py rename to analyzer/windows/modules/packages/shellcode.py diff --git a/analyzer/windows/modules/packages/Shellcode_x64.py b/analyzer/windows/modules/packages/shellcode_x64.py similarity index 100% rename from analyzer/windows/modules/packages/Shellcode_x64.py rename to analyzer/windows/modules/packages/shellcode_x64.py From a9682ffe02a2f60331bbb3f3cca9cc585cace782 Mon Sep 17 00:00:00 2001 From: Kevin O'Reilly Date: Wed, 16 Oct 2024 16:28:07 +0100 Subject: [PATCH 28/33] Remove references to'o 'Unpacker' packages from docs --- docs/book/src/usage/packages.rst | 46 -------------------------------- 1 file changed, 46 deletions(-) diff --git a/docs/book/src/usage/packages.rst b/docs/book/src/usage/packages.rst index 83eb2efbaa3..59152c7cd2e 100644 --- a/docs/book/src/usage/packages.rst +++ b/docs/book/src/usage/packages.rst @@ -163,7 +163,6 @@ The following is a list of the existing packages in alphabetical order: **Options**: * ``offset``: specify the offset to run with the 64-bit CAPE loader. - * ``shellcode-unpacker``: used to run and analyze **Shellcode** via the 32-bit CAPE loader, with unpacking! * ``shellcode``: used to run and analyze **Shellcode** via the 32-bit CAPE loader, with unpacking! **Options**: @@ -173,51 +172,6 @@ The following is a list of the existing packages in alphabetical order: *NB*: You need to have ``flashplayer.exe`` in the analyzer's ``bin`` folder. - * ``unpacker_dll``: used to run and analyze **Dynamically Linked Libraries** via ``flashplayer.exe``, with unpacking! - - *NB*: You need to have ``flashplayer.exe`` in the analyzer's ``bin`` folder. - - **Options**: - * ``arguments``: specify arguments to pass to the DLL through commandline. - * ``dllloader``: specify a process name to use to fake the DLL launcher name instead of ``rundll32.exe`` (this is used to fool possible anti-sandboxing tricks of certain malware). - * ``function``: specify the function to be executed. If none is specified, CAPE will try to run all available functions, - up to the limit found in the `max_dll_exports` task option. - - * ``unpacker_js``: used to run and analyze **JavaScript and JScript Encoded files** via ``wscript.exe``, with unpacking! - * ``unpacker_ps1``: used to run and analyze **PowerShell scripts** via ``powershell.exe``, with unpacking! - * ``unpacker_regsvr``: used to run and analyze **Dynamically Linked Libraries** via ``regsvr.exe``, with unpacking! - - **Options**: - * ``arguments``: specify any command line argument to pass to the initial process of the submitted malware. - - * ``unpacker_zip``: used to run and analyze **Zip archives** via the zipfile Python package, and runs an executable file (if it exists), with ``cmd.exe``. Also unpacking! - - **Options**: - * ``arguments``: specify arguments to pass to the DLL through commandline. - * ``dllloader``: specify a process name to use to fake the DLL launcher name instead of ``rundll32.exe`` (this is used to fool possible anti-sandboxing tricks of certain malware). - * ``file``: specify the name of the file contained in the archive to execute. If none is specified, CAPE will try to execute *sample.exe*. - * ``function``: specify the function to be executed. If none is specified, CAPE will try to run the entry at ordinal 1. - * ``password``: specify the password of the archive. If none is specified, CAPE will try to extract the archive without password or use the password "*infected*". - - * ``unpacker``: used to run and analyze generic **Windows executables**, with unpacking! - - **Options**: - * ``arguments``: specify any command line argument to pass to the initial process of the submitted malware. - - * ``upx_dll``: used to run and analyze **Dynamically Linked Libraries** packed with **Ultimate Packer for eXecutables**. - - **Options**: - * ``arguments``: specify arguments to pass to the DLL through commandline. - * ``dllloader``: specify a process name to use to fake the DLL launcher name instead of ``rundll32.exe`` (this is used to fool possible anti-sandboxing tricks of certain malware). - * ``function``: specify the function to be executed. If none is specified, CAPE will try to run all available functions, - up to the limit found in the `max_dll_exports` task option. - - * ``upx``: used to run and analyze generic **Windows executables** packed with **Ultimate Packer for eXecutables**. - - **Options**: - * ``appdata``: *[yes/no]* if enabled, run the executable from the APPDATA directory. - * ``arguments``: specify any command line argument to pass to the initial process of the submitted malware. - * ``vawtrak``: used to run and analyze **Vawtrak malware** with ``iexplore.exe``. *NB*: https://www.microsoft.com/en-us/wdsi/threats/malware-encyclopedia-description?Name=Backdoor:Win32/Vawtrak.A From 2ec86640798a3bd86a96a3680a729fe4719272b4 Mon Sep 17 00:00:00 2001 From: doomedraven Date: Thu, 17 Oct 2024 12:41:01 +0200 Subject: [PATCH 29/33] dotNet reactor --- conf/default/selfextract.conf.default | 2 +- lib/cuckoo/common/integrations/file_extra_info.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/conf/default/selfextract.conf.default b/conf/default/selfextract.conf.default index 6175734093e..638bc1b4f49 100644 --- a/conf/default/selfextract.conf.default +++ b/conf/default/selfextract.conf.default @@ -75,7 +75,7 @@ binary = /usr/bin/de4dot extra_args = timeout = 60 -# https://github.com/SychicBoy/NETReactorSlayer/releases +# https://github.com/otavepto/NETReactorSlayer/releases [eziriz_deobfuscate] enabled = yes binary = data/NETReactorSlayer.CLI diff --git a/lib/cuckoo/common/integrations/file_extra_info.py b/lib/cuckoo/common/integrations/file_extra_info.py index f582aac1bd0..4e65c4bd18c 100644 --- a/lib/cuckoo/common/integrations/file_extra_info.py +++ b/lib/cuckoo/common/integrations/file_extra_info.py @@ -576,7 +576,7 @@ def eziriz_deobfuscate(file: str, *, data_dictionary: dict, **_) -> ExtractorRet if file.endswith("_Slayed"): return - if all("Eziriz .NET Reactor" not in string for string in data_dictionary.get("die", [])): + if all(".NET Reactor" not in string for string in data_dictionary.get("die", [])): return binary = shlex.split(selfextract_conf.eziriz_deobfuscate.binary.strip())[0] @@ -587,7 +587,7 @@ def eziriz_deobfuscate(file: str, *, data_dictionary: dict, **_) -> ExtractorRet if not path_exists(binary): log.error( - "Missing dependency: Download from https://github.com/SychicBoy/NETReactorSlayer/releases and place under %s.", + "Missing dependency: Download from https://github.com/otavepto/NETReactorSlayer/releases and place under %s.", binary, ) return From a057c61a1e6462f574e94fd441ca0acbd89e1ce7 Mon Sep 17 00:00:00 2001 From: Josh Feather <142008135+josh-feather@users.noreply.github.com> Date: Thu, 17 Oct 2024 12:05:50 +0100 Subject: [PATCH 30/33] Package consolidation fix (#2357) * Fix unit tests for package changes in 45b0099 * Remove vawtrack package --- analyzer/windows/modules/packages/vawtrak.py | 51 ------------ .../windows/tests/test_analysis_packages.py | 49 +---------- analyzer/windows/tests/test_analyzer.py | 81 ++----------------- docs/book/src/usage/packages.rst | 15 +--- lib/cuckoo/common/integrations/floss.py | 4 +- lib/cuckoo/core/database.py | 4 +- tests/web/test_submission_views.py | 8 +- 7 files changed, 18 insertions(+), 194 deletions(-) delete mode 100644 analyzer/windows/modules/packages/vawtrak.py diff --git a/analyzer/windows/modules/packages/vawtrak.py b/analyzer/windows/modules/packages/vawtrak.py deleted file mode 100644 index 709ff56736d..00000000000 --- a/analyzer/windows/modules/packages/vawtrak.py +++ /dev/null @@ -1,51 +0,0 @@ -# Andriy :P - -import os -import shutil -from subprocess import call - -from lib.common.abstracts import Package -from lib.common.common import check_file_extension -from lib.common.constants import OPT_APPDATA, OPT_ARGUMENTS, OPT_RUNASX86 - - -class IE(Package): - """Internet Explorer analysis package.""" - - PATHS = [ - ("ProgramFiles", "Internet Explorer", "iexplore.exe"), - ] - summary = "Runs the supplied executable." - description = f"""First runs 'iexplore.exe about:blank' to open Internet Explorer. - Next executes the given sample, passing '{OPT_ARGUMENTS}' if specified. - Use the '{OPT_APPDATA}' option to run the executable from the APPDATA directory. - Use the '{OPT_RUNASX86}' option to set the 32BITREQUIRED flag in the PE header, - using 'CorFlags.exe /32bit+'. - The .exe filename extension will be added automatically.""" - option_names = (OPT_ARGUMENTS, OPT_APPDATA, OPT_RUNASX86) - - def start(self, path): - iexplore = self.get_path("iexplore.exe") - # pass the URL instead of a filename in this case - self.execute(iexplore, '"about:blank"', "about:blank") - - args = self.options.get(OPT_ARGUMENTS) - appdata = self.options.get(OPT_APPDATA) - runasx86 = self.options.get(OPT_RUNASX86) - - # If the file doesn't have an extension, add .exe - # See CWinApp::SetCurrentHandles(), it will throw - # an exception that will crash the app if it does - # not find an extension on the main exe's filename - path = check_file_extension(path, ".exe") - - if appdata: - # run the executable from the APPDATA directory, required for some malware - basepath = os.getenv("APPDATA") - newpath = os.path.join(basepath, os.path.basename(path)) - shutil.copy(path, newpath) - path = newpath - if runasx86: - # ignore the return value, user must have CorFlags.exe installed in the guest VM - call(["CorFlags.exe", path, "/32bit+"]) - return self.execute(path, args, path) diff --git a/analyzer/windows/tests/test_analysis_packages.py b/analyzer/windows/tests/test_analysis_packages.py index a86bc0b33ce..5c86fba6220 100644 --- a/analyzer/windows/tests/test_analysis_packages.py +++ b/analyzer/windows/tests/test_analysis_packages.py @@ -36,56 +36,17 @@ def test_has_summary_description(self): self.assertGreater(len(subclass.summary), 0) self.assertGreater(len(subclass.description), 0) - def test_choose_package_Shellcode_Unpacker(self): - pkg_class = self.class_from_analysis_package("modules.packages.Shellcode-Unpacker") - pkg_class() - def test_Shellcode(self): - pkg_class = self.class_from_analysis_package("modules.packages.Shellcode") + pkg_class = self.class_from_analysis_package("modules.packages.shellcode") obj = pkg_class() self.assertEqual("offset", obj.option_names[0]) expected_summary = "Executes 32-bit Shellcode using loader.exe." self.assertEqual(expected_summary, obj.summary) - def test_Shellcode_x64(self): - pkg_class = self.class_from_analysis_package("modules.packages.Shellcode_x64") + def test_shellcode_x64(self): + pkg_class = self.class_from_analysis_package("modules.packages.shellcode_x64") pkg_class() - def test_Unpacker(self): - pkg_class = self.class_from_analysis_package("modules.packages.Unpacker") - obj = pkg_class() - expected_summary = "Executes a .exe file with the unpacker option." - self.assertEqual(expected_summary, obj.summary) - - def test_Unpacker_dll(self): - pkg_class = self.class_from_analysis_package("modules.packages.Unpacker_dll") - obj = pkg_class() - self.assertEqual("arguments", obj.option_names[0]) - self.assertEqual("dllloader", obj.option_names[1]) - self.assertEqual("function", obj.option_names[2]) - - def test_Unpacker_js(self): - pkg_class = self.class_from_analysis_package("modules.packages.Unpacker_js") - obj = pkg_class() - expected_summary = "Executes a .JS file using wscript.exe." - self.assertEqual(expected_summary, obj.summary) - - def test_Unpacker_ps1(self): - pkg_class = self.class_from_analysis_package("modules.packages.Unpacker_ps1") - obj = pkg_class() - expected_summary = "Executes a sample file with powershell." - self.assertEqual(expected_summary, obj.summary) - - def test_Unpacker_regsvr(self): - pkg_class = self.class_from_analysis_package("modules.packages.Unpacker_regsvr") - pkg_class() - - def test_Unpacker_zip(self): - pkg_class = self.class_from_analysis_package("modules.packages.Unpacker_zip") - obj = pkg_class() - expected_summary = "Unzips a file with the supplied password, execute its contents." - self.assertEqual(expected_summary, obj.summary) - def test_access(self): pkg_class = self.class_from_analysis_package("modules.packages.access") pkg_class() @@ -315,10 +276,6 @@ def test_swf(self): pkg_class = self.class_from_analysis_package("modules.packages.swf") pkg_class() - def test_vawtrak(self): - pkg_class = self.class_from_analysis_package("modules.packages.vawtrak") - pkg_class() - def test_vbejse(self): pkg_class = self.class_from_analysis_package("modules.packages.vbejse") pkg_class() diff --git a/analyzer/windows/tests/test_analyzer.py b/analyzer/windows/tests/test_analyzer.py index c633a4a7d73..a8db4ec4241 100644 --- a/analyzer/windows/tests/test_analyzer.py +++ b/analyzer/windows/tests/test_analyzer.py @@ -81,87 +81,25 @@ def test_prepare(self, set_lock, init_logging, config, pipeserver): class TestAnalyzerChoosePackage(unittest.TestCase): - def test_choose_package_Shellcode_Unpacker(self): - test = analyzer.Analyzer() - test.config = MagicMock() - test.options = MagicMock() - test.config.package = "Shellcode-Unpacker" - pkg_name, pkg_class = test.choose_package() - self.assertEqual("modules.packages.Shellcode-Unpacker", pkg_name) - self.assertEqual(pkg_class.__class__.__name__, "Shellcode_Unpacker") - def test_choose_package_Shellcode(self): + def test_choose_package_shellcode(self): test = analyzer.Analyzer() test.config = MagicMock() test.options = MagicMock() - test.config.package = "Shellcode" + test.config.package = "shellcode" pkg_name, pkg_class = test.choose_package() - self.assertEqual("modules.packages.Shellcode", pkg_name) + self.assertEqual("modules.packages.shellcode", pkg_name) self.assertEqual(pkg_class.__class__.__name__, "Shellcode") def test_choose_package_Shellcode_x64(self): test = analyzer.Analyzer() test.config = MagicMock() test.options = MagicMock() - test.config.package = "Shellcode_x64" + test.config.package = "shellcode_x64" pkg_name, pkg_class = test.choose_package() - self.assertEqual("modules.packages.Shellcode_x64", pkg_name) + self.assertEqual("modules.packages.shellcode_x64", pkg_name) self.assertEqual(pkg_class.__class__.__name__, "Shellcode_x64") - def test_choose_package_Unpacker(self): - test = analyzer.Analyzer() - test.config = MagicMock() - test.options = MagicMock() - test.config.package = "Unpacker" - pkg_name, pkg_class = test.choose_package() - self.assertEqual("modules.packages.Unpacker", pkg_name) - self.assertEqual(pkg_class.__class__.__name__, "Unpacker") - - def test_choose_package_Unpacker_dll(self): - test = analyzer.Analyzer() - test.config = MagicMock() - test.options = MagicMock() - test.config.package = "Unpacker_dll" - pkg_name, pkg_class = test.choose_package() - self.assertEqual("modules.packages.Unpacker_dll", pkg_name) - self.assertEqual(pkg_class.__class__.__name__, "Unpacker_dll") - - def test_choose_package_Unpacker_js(self): - test = analyzer.Analyzer() - test.config = MagicMock() - test.options = MagicMock() - test.config.package = "Unpacker_js" - pkg_name, pkg_class = test.choose_package() - self.assertEqual("modules.packages.Unpacker_js", pkg_name) - self.assertEqual(pkg_class.__class__.__name__, "Unpacker_JS") - - def test_choose_package_Unpacker_ps1(self): - test = analyzer.Analyzer() - test.config = MagicMock() - test.options = MagicMock() - test.config.package = "Unpacker_ps1" - pkg_name, pkg_class = test.choose_package() - self.assertEqual("modules.packages.Unpacker_ps1", pkg_name) - self.assertEqual(pkg_class.__class__.__name__, "PS1") - - def test_choose_package_Unpacker_regsvr(self): - test = analyzer.Analyzer() - test.config = MagicMock() - test.options = MagicMock() - test.config.package = "Unpacker_regsvr" - pkg_name, pkg_class = test.choose_package() - self.assertEqual("modules.packages.Unpacker_regsvr", pkg_name) - self.assertEqual(pkg_class.__class__.__name__, "Unpacker_Regsvr") - - def test_choose_package_Unpacker_zip(self): - test = analyzer.Analyzer() - test.config = MagicMock() - test.options = MagicMock() - test.config.package = "Unpacker_zip" - pkg_name, pkg_class = test.choose_package() - self.assertEqual("modules.packages.Unpacker_zip", pkg_name) - self.assertEqual(pkg_class.__class__.__name__, "Unpacker_zip") - def test_choose_package_access(self): test = analyzer.Analyzer() test.config = MagicMock() @@ -603,15 +541,6 @@ def test_choose_package_swf(self): self.assertEqual("modules.packages.swf", pkg_name) self.assertEqual(pkg_class.__class__.__name__, "SWF") - def test_choose_package_vawtrak(self): - test = analyzer.Analyzer() - test.config = MagicMock() - test.options = MagicMock() - test.config.package = "vawtrak" - pkg_name, pkg_class = test.choose_package() - self.assertEqual("modules.packages.vawtrak", pkg_name) - self.assertEqual(pkg_class.__class__.__name__, "IE") - def test_choose_package_vbejse(self): test = analyzer.Analyzer() test.config = MagicMock() diff --git a/docs/book/src/usage/packages.rst b/docs/book/src/usage/packages.rst index 59152c7cd2e..bf622f1d7d7 100644 --- a/docs/book/src/usage/packages.rst +++ b/docs/book/src/usage/packages.rst @@ -30,7 +30,7 @@ The following is a list of the existing packages in alphabetical order: prior to executing files of interest within in the extracted folder. **Options**: - * ``arguments``: specify arguments to pass to the DLL through commandline. + * ``arguments``: specify arguments to pass to the DLL through commandline. * ``dllloader``: specify a process name to use to fake the DLL launcher name instead of ``rundll32.exe`` (this is used to fool possible anti-sandboxing tricks of certain malware). * ``file``: specify the name of the file contained in the archive to execute. If none is specified, CAPE will try to execute *sample.exe*. * ``function``: specify the function to be executed. If none is specified, CAPE will try to run the entry at ordinal 1. @@ -172,15 +172,6 @@ The following is a list of the existing packages in alphabetical order: *NB*: You need to have ``flashplayer.exe`` in the analyzer's ``bin`` folder. - * ``vawtrak``: used to run and analyze **Vawtrak malware** with ``iexplore.exe``. - - *NB*: https://www.microsoft.com/en-us/wdsi/threats/malware-encyclopedia-description?Name=Backdoor:Win32/Vawtrak.A - - **Options**: - * ``appdata``: *[yes/no]* if enabled, run the executable from the APPDATA directory. - * ``arguments``: specify any command line argument to pass to the initial process of the submitted malware. - * ``runasx86``: *[yes/no]* if enabled, run ``CorFlags.exe`` with ``/32bit+`` prior to execution. - * ``vbejse``: used to run and analyze **VBScript Encoded and JScript Encoded files** via ``wscript.exe``. * ``vbs``: used to run and analyze **VBScript and VBScript Encoded files** via ``wscript.exe``. * ``wsf``: used to run and analyze **Windows Script Files** via ``wscript.exe``. @@ -205,7 +196,7 @@ The following is a list of the existing packages in alphabetical order: **Options**: * ``appdata``: *[yes/no]* if enabled, create custom folders in the APPDATA directory. - * ``arguments``: specify arguments to pass to the DLL through commandline. + * ``arguments``: specify arguments to pass to the DLL through commandline. * ``curdir``: specify the directory to create custom folders. * ``dllloader``: specify a process name to use to fake the DLL launcher name instead of ``rundll32.exe`` (this is used to fool possible anti-sandboxing tricks of certain malware). * ``file``: specify the name of the file contained in the archive to execute. If none is specified, a ``__configuration.json`` file must be present in the zip file. @@ -216,7 +207,7 @@ The following is a list of the existing packages in alphabetical order: **Options**: * ``appdata``: *[yes/no]* if enabled, create custom folders in the APPDATA directory. - * ``arguments``: specify arguments to pass to the DLL through commandline. + * ``arguments``: specify arguments to pass to the DLL through commandline. * ``dllloader``: specify a process name to use to fake the DLL launcher name instead of ``rundll32.exe`` (this is used to fool possible anti-sandboxing tricks of certain malware). * ``file``: specify the name of the file contained in the archive to execute. If none is specified, CAPE will try to execute *sample.exe*. * ``function``: specify the function to be executed. If none is specified, CAPE will try to run the entry at ordinal 1. diff --git a/lib/cuckoo/common/integrations/floss.py b/lib/cuckoo/common/integrations/floss.py index f146ff7e9c9..5bc55331822 100644 --- a/lib/cuckoo/common/integrations/floss.py +++ b/lib/cuckoo/common/integrations/floss.py @@ -48,9 +48,9 @@ def run(self): try: if not fm.is_supported_file_type(Path(self.file_path)): - if self.package == "Shellcode": + if self.package == "shellcode": fileformat = "sc32" - elif self.package == "Shellcode_x64": + elif self.package == "shellcode_x64": fileformat = "sc64" else: return results diff --git a/lib/cuckoo/core/database.py b/lib/cuckoo/core/database.py index a549b1b870f..0efb16f49e1 100644 --- a/lib/cuckoo/core/database.py +++ b/lib/cuckoo/core/database.py @@ -103,8 +103,8 @@ "msbuild", "sct", "xslt", - "Shellcode", - "Shellcode_x64", + "shellcode", + "shellcode_x64", "generic", "iso", "vhd", diff --git a/tests/web/test_submission_views.py b/tests/web/test_submission_views.py index 007fe0dca85..5896cf79428 100644 --- a/tests/web/test_submission_views.py +++ b/tests/web/test_submission_views.py @@ -71,7 +71,6 @@ def test_submission_page(self): self.assertEqual('value="" title="">Detect Automatically', options[0]) self.one_should_match('value="exe" title=".*">exe - .*', options) - self.one_should_match('value="Unpacker" title="[^"]*">Unpacker', options) self.one_should_match(".*ichitaro.*", options) self.one_should_match(".*chromium.*", options) self.assertGreater(len(options), 10) @@ -80,7 +79,7 @@ def test_submission_page(self): def test_package_exclusion(self): """Pick a couple of packages to exclude, to test exclusion""" - web_conf.package_exclusion.packages = "chromium,chromium_ext,ichitaro,Shellcode" + web_conf.package_exclusion.packages = "chromium,chromium_ext,ichitaro,shellcode" submission_page = self.client.get("/submit/#file") self.assertIsNotNone(submission_page.content) self.assertIn("Analysis Package", submission_page.content.decode()) @@ -90,9 +89,8 @@ def test_package_exclusion(self): # excluded packages should not be listed self.none_should_match(".*ichitaro.*", options) self.none_should_match(".*chromium.*", options) - # Package 'Shellcode' was excluded, but not 'Shellcode-Unpacker'. - self.none_should_match('.*"Shellcode".*', options) - self.one_should_match('.*"Shellcode-Unpacker".*', options) + # Package 'shellcode' was excluded. + self.none_should_match('.*"shellcode".*', options) def test_get_package_exe_info(self): """Get the package info from exe.py.""" From 72c5289838b3d41dcb6db6b18ef27a700ea3a8eb Mon Sep 17 00:00:00 2001 From: Josh Feather <142008135+josh-feather@users.noreply.github.com> Date: Thu, 17 Oct 2024 12:19:19 +0100 Subject: [PATCH 31/33] Rename test_Shellcode test to test_shellcode (#2358) --- analyzer/windows/tests/test_analysis_packages.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/analyzer/windows/tests/test_analysis_packages.py b/analyzer/windows/tests/test_analysis_packages.py index 5c86fba6220..d1bd1202198 100644 --- a/analyzer/windows/tests/test_analysis_packages.py +++ b/analyzer/windows/tests/test_analysis_packages.py @@ -36,7 +36,7 @@ def test_has_summary_description(self): self.assertGreater(len(subclass.summary), 0) self.assertGreater(len(subclass.description), 0) - def test_Shellcode(self): + def test_shellcode(self): pkg_class = self.class_from_analysis_package("modules.packages.shellcode") obj = pkg_class() self.assertEqual("offset", obj.option_names[0]) From a411d423d2badcb5ac5f344345770d902b053287 Mon Sep 17 00:00:00 2001 From: qux-bbb <1147635419@qq.com> Date: Thu, 17 Oct 2024 23:28:36 +0800 Subject: [PATCH 32/33] Save stdout and stderr to log (#2359) --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index f0c302fd6ef..5bc6459548d 100644 --- a/README.md +++ b/README.md @@ -162,10 +162,10 @@ A huge thank you to @D00m3dR4v3n for single-handedly porting CAPE to Python 3. * Replace `` with a real pattern. * You need to replace all `` inside! * Read it! You must understand what it does! It has configuration in header of the script. - * `sudo ./kvm-qemu.sh all | tee kvm-qemu.log` + * `sudo ./kvm-qemu.sh all 2>&1 | tee kvm-qemu.log` 4. To install CAPE itself, [cape2.sh](https://github.com/kevoreilly/CAPEv2/blob/master/installer/cape2.sh) with all optimizations * Read and understand what it does! This is not a silver bullet for all your problems! It has configuration in header of the script. - * `sudo ./cape2.sh base | tee cape.log` + * `sudo ./cape2.sh base 2>&1 | tee cape.log` 5. After installing everything save both installation logs as gold! 6. Configure CAPE by doing mods to config files inside `conf` folder. 7. Restart all CAPE services to pick config changes and run CAPE properly! From da36b34377f3697b40a86ee4fbfa048bd69205b9 Mon Sep 17 00:00:00 2001 From: Maxime Berthault <2143487+Maxou56800@users.noreply.github.com> Date: Fri, 18 Oct 2024 07:59:50 +0200 Subject: [PATCH 33/33] Update mongodb to 8.0 (#2361) --- installer/cape2.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/installer/cape2.sh b/installer/cape2.sh index d89faff9ab8..055dfcfa5ef 100644 --- a/installer/cape2.sh +++ b/installer/cape2.sh @@ -785,7 +785,7 @@ function install_mongo(){ echo "[+] Installing MongoDB" # Mongo >=5 requires CPU AVX instruction support https://www.mongodb.com/docs/manual/administration/production-notes/#x86_64 if grep -q ' avx ' /proc/cpuinfo; then - MONGO_VERSION="7.0" + MONGO_VERSION="8.0" else echo "[-] Mongo >= 5 is not supported" MONGO_VERSION="4.4"