Skip to content

Commit 35d3ead

Browse files
authored
Merge branch 'develop' into phunter
2 parents 300cb3b + ae4e4e4 commit 35d3ead

File tree

34 files changed

+1100
-384
lines changed

34 files changed

+1100
-384
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,150 @@
1+
# This file is a part of IntelOwl https://github.com/intelowlproject/IntelOwl
2+
# See the file 'LICENSE' for copying permission.
3+
4+
import hashlib
5+
import logging
6+
import os
7+
import sys
8+
from base64 import b64encode
9+
from tempfile import TemporaryDirectory
10+
11+
import pefile
12+
from debloat.processor import process_pe
13+
14+
from api_app.analyzers_manager.classes import FileAnalyzer
15+
from api_app.analyzers_manager.exceptions import AnalyzerRunException
16+
from tests.mock_utils import MockUpResponse, if_mock_connections, patch
17+
18+
logger = logging.getLogger(__name__)
19+
logger.setLevel(logging.DEBUG)
20+
21+
22+
# Custom logger to handle the debloat library's logging
23+
def log_message(*args, end="\n", flush=False, **kwargs):
24+
message = " ".join(map(str, args))
25+
if end:
26+
message += end
27+
valid_kwargs = {}
28+
for key, value in kwargs.items():
29+
if key in [
30+
"level",
31+
"exc_info",
32+
"stack_info",
33+
"extra",
34+
"msg",
35+
"args",
36+
"kwargs",
37+
]:
38+
valid_kwargs[key] = value
39+
logger.info(message, **valid_kwargs)
40+
# Emulate flush if requested
41+
if flush:
42+
for handler in logger.handlers:
43+
if hasattr(handler, "flush"):
44+
handler.flush()
45+
break
46+
else:
47+
# Fallback to stdout flush if no flushable handlers
48+
sys.stdout.flush()
49+
50+
51+
class Debloat(FileAnalyzer):
52+
53+
def run(self):
54+
try:
55+
binary = pefile.PE(self.filepath, fast_load=True)
56+
except pefile.PEFormatError as e:
57+
raise AnalyzerRunException(f"Invalid PE file: {e}")
58+
59+
with TemporaryDirectory() as temp_dir:
60+
output_path = os.path.join(temp_dir, "debloated.exe")
61+
original_size = os.path.getsize(self.filepath)
62+
63+
try:
64+
debloat_code = process_pe(
65+
binary,
66+
out_path=output_path,
67+
last_ditch_processing=True,
68+
cert_preservation=True,
69+
log_message=log_message,
70+
beginning_file_size=original_size,
71+
)
72+
except OSError as e:
73+
raise AnalyzerRunException(
74+
f"File operation failed during Debloat processing: {e}"
75+
)
76+
except ValueError as e:
77+
raise AnalyzerRunException(
78+
f"Invalid parameter in Debloat processing: {e}"
79+
)
80+
except AttributeError as e:
81+
raise AnalyzerRunException(
82+
f"Debloat library error, possibly malformed PE object: {e}"
83+
)
84+
85+
logger.info(f"Debloat processed {self.filepath} with code {debloat_code}")
86+
87+
if debloat_code == 0 and not os.path.exists(output_path):
88+
return {
89+
"success": False,
90+
"error": "No solution found",
91+
}
92+
93+
if not os.path.exists(output_path) or not os.path.isfile(output_path):
94+
raise AnalyzerRunException(
95+
"Debloat did not produce a valid output file"
96+
)
97+
98+
debloated_size = os.path.getsize(output_path)
99+
size_reduction = (
100+
(original_size - debloated_size) / original_size * 100
101+
if original_size > 0
102+
else 0
103+
)
104+
105+
with open(output_path, "rb") as f:
106+
output = f.read()
107+
debloated_hash = hashlib.md5(output).hexdigest()
108+
debloated_sha256 = hashlib.sha256(output).hexdigest()
109+
110+
encoded_output = b64encode(output).decode("utf-8")
111+
112+
os.remove(output_path)
113+
logger.debug("Cleaned up temporary file.")
114+
115+
return {
116+
"success": True,
117+
"original_size": original_size,
118+
"debloated_size": debloated_size,
119+
"debloated_file": encoded_output,
120+
"size_reduction_percentage": size_reduction,
121+
"debloated_hash": debloated_hash,
122+
"debloated_sha256": debloated_sha256,
123+
}
124+
125+
@classmethod
126+
def update(cls) -> bool:
127+
pass
128+
129+
@classmethod
130+
def _monkeypatch(cls, patches: list = None):
131+
patches = [
132+
if_mock_connections(
133+
patch(
134+
"debloat.processor.process_pe",
135+
return_value=MockUpResponse(
136+
{
137+
"success": True,
138+
"original_size": 3840392,
139+
"debloated_file": "TVqQAAMAAAAEAAAA//",
140+
"debloated_hash": "f7f92eadfb444e7fce27efa2007a955a",
141+
"debloated_size": 813976,
142+
"size_reduction_percentage": 78.80487200264973,
143+
"debloated_sha256": "f7f92eadfb444e7fce27efa2007a955a",
144+
},
145+
200,
146+
),
147+
)
148+
),
149+
]
150+
return super()._monkeypatch(patches)
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,125 @@
1+
from django.db import migrations
2+
from django.db.models.fields.related_descriptors import (
3+
ForwardManyToOneDescriptor,
4+
ForwardOneToOneDescriptor,
5+
ManyToManyDescriptor,
6+
ReverseManyToOneDescriptor,
7+
ReverseOneToOneDescriptor,
8+
)
9+
10+
plugin = {
11+
"python_module": {
12+
"health_check_schedule": None,
13+
"update_schedule": None,
14+
"module": "debloat.Debloat",
15+
"base_path": "api_app.analyzers_manager.file_analyzers",
16+
},
17+
"name": "Debloat",
18+
"description": '"Analyzer for debloating PE files using the [Debloat](https://github.com/Squiblydoo/debloat) tool. Reduces file size for easier malware analysis."',
19+
"disabled": False,
20+
"soft_time_limit": 300,
21+
"routing_key": "default",
22+
"health_check_status": True,
23+
"type": "file",
24+
"docker_based": False,
25+
"maximum_tlp": "CLEAR",
26+
"observable_supported": [],
27+
"supported_filetypes": ["application/vnd.microsoft.portable-executable"],
28+
"run_hash": False,
29+
"run_hash_type": "",
30+
"not_supported_filetypes": [],
31+
"mapping_data_model": {},
32+
"model": "analyzers_manager.AnalyzerConfig",
33+
}
34+
35+
params = []
36+
37+
values = []
38+
39+
40+
def _get_real_obj(Model, field, value):
41+
def _get_obj(Model, other_model, value):
42+
if isinstance(value, dict):
43+
real_vals = {}
44+
for key, real_val in value.items():
45+
real_vals[key] = _get_real_obj(other_model, key, real_val)
46+
value = other_model.objects.get_or_create(**real_vals)[0]
47+
# it is just the primary key serialized
48+
else:
49+
if isinstance(value, int):
50+
if Model.__name__ == "PluginConfig":
51+
value = other_model.objects.get(name=plugin["name"])
52+
else:
53+
value = other_model.objects.get(pk=value)
54+
else:
55+
value = other_model.objects.get(name=value)
56+
return value
57+
58+
if (
59+
type(getattr(Model, field))
60+
in [
61+
ForwardManyToOneDescriptor,
62+
ReverseManyToOneDescriptor,
63+
ReverseOneToOneDescriptor,
64+
ForwardOneToOneDescriptor,
65+
]
66+
and value
67+
):
68+
other_model = getattr(Model, field).get_queryset().model
69+
value = _get_obj(Model, other_model, value)
70+
elif type(getattr(Model, field)) in [ManyToManyDescriptor] and value:
71+
other_model = getattr(Model, field).rel.model
72+
value = [_get_obj(Model, other_model, val) for val in value]
73+
return value
74+
75+
76+
def _create_object(Model, data):
77+
mtm, no_mtm = {}, {}
78+
for field, value in data.items():
79+
value = _get_real_obj(Model, field, value)
80+
if type(getattr(Model, field)) is ManyToManyDescriptor:
81+
mtm[field] = value
82+
else:
83+
no_mtm[field] = value
84+
try:
85+
o = Model.objects.get(**no_mtm)
86+
except Model.DoesNotExist:
87+
o = Model(**no_mtm)
88+
o.full_clean()
89+
o.save()
90+
for field, value in mtm.items():
91+
attribute = getattr(o, field)
92+
if value is not None:
93+
attribute.set(value)
94+
return False
95+
return True
96+
97+
98+
def migrate(apps, schema_editor):
99+
Parameter = apps.get_model("api_app", "Parameter")
100+
PluginConfig = apps.get_model("api_app", "PluginConfig")
101+
python_path = plugin.pop("model")
102+
Model = apps.get_model(*python_path.split("."))
103+
if not Model.objects.filter(name=plugin["name"]).exists():
104+
exists = _create_object(Model, plugin)
105+
if not exists:
106+
for param in params:
107+
_create_object(Parameter, param)
108+
for value in values:
109+
_create_object(PluginConfig, value)
110+
111+
112+
def reverse_migrate(apps, schema_editor):
113+
python_path = plugin.pop("model")
114+
Model = apps.get_model(*python_path.split("."))
115+
Model.objects.get(name=plugin["name"]).delete()
116+
117+
118+
class Migration(migrations.Migration):
119+
atomic = False
120+
dependencies = [
121+
("api_app", "0071_delete_last_elastic_report"),
122+
("analyzers_manager", "0154_analyzer_config_bbot"),
123+
]
124+
125+
operations = [migrations.RunPython(migrate, reverse_migrate)]

api_app/serializers/job.py

+22
Original file line numberDiff line numberDiff line change
@@ -503,6 +503,20 @@ class JobTreeSerializer(ModelSerializer):
503503
evaluation = rfs.CharField(
504504
source="data_model.evaluation", allow_null=True, read_only=True
505505
)
506+
reliability = rfs.IntegerField(
507+
source="data_model.reliability", allow_null=True, read_only=True
508+
)
509+
tags = rfs.ListField(
510+
source="data_model.tags",
511+
allow_null=True,
512+
child=rfs.CharField(read_only=True),
513+
read_only=True,
514+
default=[],
515+
)
516+
isp = rfs.CharField(source="data_model.isp", allow_null=True, read_only=True)
517+
country = rfs.CharField(
518+
source="data_model.country_code", allow_null=True, read_only=True
519+
)
506520
is_sample = rfs.BooleanField(read_only=True)
507521

508522
playbook = rfs.SlugRelatedField(
@@ -513,6 +527,9 @@ class JobTreeSerializer(ModelSerializer):
513527
required=False,
514528
)
515529
analyzed_object_name = rfs.CharField(source="analyzable.name", read_only=True)
530+
mimetype = rfs.CharField(
531+
source="analyzable.mimetype", allow_null=True, read_only=True
532+
)
516533

517534
class Meta:
518535
model = Job
@@ -525,6 +542,11 @@ class Meta:
525542
"received_request_time",
526543
"is_sample",
527544
"evaluation",
545+
"reliability",
546+
"tags",
547+
"mimetype",
548+
"isp",
549+
"country",
528550
]
529551

530552
def to_representation(self, instance):

api_app/visualizers_manager/classes.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -89,8 +89,11 @@ def __init__(
8989
self.icon = icon
9090
self.bold = bold
9191
self.italic = italic
92-
self.copy_text = copy_text or value
9392
self.description = description
93+
if link:
94+
self.copy_text = copy_text or link
95+
else:
96+
self.copy_text = copy_text or value
9497

9598
@property
9699
def attributes(self) -> List[str]:

0 commit comments

Comments
 (0)