mirror of
https://github.com/DarrylNixon/binhop
synced 2024-04-22 12:37:06 -07:00
Still deciding on output
This commit is contained in:
parent
60524bf2dc
commit
02e660538f
2 changed files with 99 additions and 11 deletions
66
binhop.py
66
binhop.py
|
@ -1,10 +1,33 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import hashlib
|
import hashlib
|
||||||
import math
|
|
||||||
import time
|
import time
|
||||||
|
import tempfile
|
||||||
|
import binwalk
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
|
|
||||||
|
async def scan_file(filename, base_dir):
|
||||||
|
try:
|
||||||
|
scan = binwalk.scan(filename, signature=True, quiet=True, extract=True, matryoshka=True, remove_after_execute=False, directory=base_dir)
|
||||||
|
return scan
|
||||||
|
except binwalk.ModuleException as e:
|
||||||
|
print("Critical failure: ", e)
|
||||||
|
|
||||||
|
async def build_listing(path):
|
||||||
|
result = {}
|
||||||
|
for item in os.listdir(path):
|
||||||
|
item_path = os.path.join(path, item)
|
||||||
|
if os.path.isdir(item_path):
|
||||||
|
result[item] = await build_listing(item_path)
|
||||||
|
else:
|
||||||
|
result[item] = {"s": os.path.getsize(item_path)}
|
||||||
|
print(item)
|
||||||
|
return result
|
||||||
|
|
||||||
async def upload_file(request):
|
async def upload_file(request):
|
||||||
reader = await request.multipart()
|
reader = await request.multipart()
|
||||||
field = await reader.next()
|
field = await reader.next()
|
||||||
|
@ -16,28 +39,53 @@ async def upload_file(request):
|
||||||
sha1_hash = hashlib.sha1()
|
sha1_hash = hashlib.sha1()
|
||||||
md5_hash = hashlib.md5()
|
md5_hash = hashlib.md5()
|
||||||
|
|
||||||
|
|
||||||
|
temp_file = tempfile.NamedTemporaryFile(mode='ab', delete=False)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
chunk = await field.read_chunk()
|
chunk = await field.read_chunk()
|
||||||
if not chunk:
|
if not chunk:
|
||||||
break
|
break
|
||||||
|
temp_file.write(chunk)
|
||||||
file_size += len(chunk)
|
file_size += len(chunk)
|
||||||
sha1_hash.update(chunk)
|
sha1_hash.update(chunk)
|
||||||
md5_hash.update(chunk)
|
md5_hash.update(chunk)
|
||||||
|
|
||||||
|
try:
|
||||||
|
working_dir = tempfile.TemporaryDirectory(ignore_cleanup_errors=True)
|
||||||
|
scan = await scan_file(temp_file.name, working_dir.name)
|
||||||
|
sigs = scan[0]
|
||||||
|
extractor = sigs.extractor.output
|
||||||
|
finally:
|
||||||
|
os.unlink(temp_file.name)
|
||||||
|
|
||||||
size_suffixes = ['B', 'KB', 'MB', 'GB', 'TB']
|
carved = []
|
||||||
size_suffix_index = math.floor(math.log(file_size, 1024))
|
for sig in sigs.results:
|
||||||
human_readable_size = f'{file_size / (1024 ** size_suffix_index):.2f} {size_suffixes[size_suffix_index]}'
|
tmp_path = sig.file.path
|
||||||
|
if tmp_path in extractor:
|
||||||
# await asyncio.sleep(2)
|
if sig.offset in extractor[tmp_path].carved:
|
||||||
|
end_offset = sig.offset + os.path.getsize(extractor[tmp_path].carved[sig.offset])
|
||||||
|
print("Carved data from offsets 0x%X-0x%X to %s" % (sig.offset, end_offset, extractor[tmp_path].carved[sig.offset]))
|
||||||
|
carved.append({"start": sig.offset, "end": end_offset, "d": sig.description})
|
||||||
|
if sig.offset in extractor[tmp_path].extracted:
|
||||||
|
extracted_files = [x for x in extractor[tmp_path].extracted[sig.offset].files if os.path.isfile(x)]
|
||||||
|
extracted_dirs = [x for x in extractor[tmp_path].extracted[sig.offset].files if os.path.isdir(x)]
|
||||||
|
print("Extracted %d files and %d directories from offset 0x%X to '%s' using '%s'" % (len(extracted_files), len(extracted_dirs), sig.offset, extractor[tmp_path].extracted[sig.offset].files[0], sigs.extractor.output[tmp_path].extracted[sig.offset].command))
|
||||||
|
for i in extractor[tmp_path].extracted[sig.offset].files:
|
||||||
|
print(f" File: {i}")
|
||||||
|
# listing = await build_listing(working_dir.name)
|
||||||
|
# print(listing)
|
||||||
|
working_dir.cleanup()
|
||||||
|
|
||||||
response_data = {
|
response_data = {
|
||||||
'meta': {
|
'meta': {
|
||||||
'name': filename,
|
'name': filename,
|
||||||
'sizeb': file_size,
|
'sizeb': file_size,
|
||||||
'sizeh': human_readable_size,
|
|
||||||
'sha1': sha1_hash.hexdigest(),
|
'sha1': sha1_hash.hexdigest(),
|
||||||
'md5': md5_hash.hexdigest()
|
'md5': md5_hash.hexdigest(),
|
||||||
}
|
'sig_quant': len(sigs.magic.signatures)
|
||||||
|
},
|
||||||
|
'offsets': carved
|
||||||
}
|
}
|
||||||
|
|
||||||
processing_time = time.time() - start_time
|
processing_time = time.time() - start_time
|
||||||
|
|
|
@ -1,3 +1,42 @@
|
||||||
|
function draw_bytes(data) {
|
||||||
|
const canvas = document.getElementById("blob");
|
||||||
|
const ctx = canvas.getContext("2d");
|
||||||
|
|
||||||
|
// canvas.width = canvas.parentNode.width;
|
||||||
|
// canvas.height = canvas.parentNode.height;
|
||||||
|
|
||||||
|
const blockSize = 2;
|
||||||
|
const blockPadding = 1;
|
||||||
|
|
||||||
|
ctx.fillStyle = "#ddd";
|
||||||
|
|
||||||
|
const numBlocks = Math.ceil(data.meta.sizeb / (blockSize + blockPadding));
|
||||||
|
for (let i = 0; i < numBlocks; i++) {
|
||||||
|
const x = i * (blockSize + blockPadding);
|
||||||
|
ctx.fillRect(x, 0, blockSize, canvas.height);
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.fillStyle = "blue";
|
||||||
|
data.offsets.forEach((offset) => {
|
||||||
|
const start = Math.floor(offset.start / (blockSize + blockPadding));
|
||||||
|
const end = Math.ceil(offset.end / (blockSize + blockPadding));
|
||||||
|
for (let i = start; i < end; i++) {
|
||||||
|
const x = i * (blockSize + blockPadding);
|
||||||
|
ctx.fillRect(x, 0, blockSize, canvas.height);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function bytes_to_human(bytes) {
|
||||||
|
const units = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"];
|
||||||
|
let i =0;
|
||||||
|
while (bytes >= 1024 && i < units.length - 1) {
|
||||||
|
bytes /= 1024;
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
return bytes.toFixed(2) + " " + units[i];
|
||||||
|
}
|
||||||
|
|
||||||
async function process_upload(file) {
|
async function process_upload(file) {
|
||||||
const formData = new FormData();
|
const formData = new FormData();
|
||||||
formData.append('file', file);
|
formData.append('file', file);
|
||||||
|
@ -39,7 +78,7 @@ async function process_upload(file) {
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<th><strong>size:</strong></th>
|
<th><strong>size:</strong></th>
|
||||||
<td>${data.meta.sizeb} (${data.meta.sizeh})</td>
|
<td>${data.meta["sizeb"]} (${bytes_to_human(data.meta.sizeb)})</td>
|
||||||
</tr>
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<th><strong>sha1:</strong></th>
|
<th><strong>sha1:</strong></th>
|
||||||
|
@ -66,6 +105,7 @@ async function process_upload(file) {
|
||||||
</small>
|
</small>
|
||||||
</div>
|
</div>
|
||||||
</div>`;
|
</div>`;
|
||||||
|
draw_bytes(data);
|
||||||
} else {
|
} else {
|
||||||
console.error('error uploading file');
|
console.error('error uploading file');
|
||||||
}
|
}
|
||||||
|
@ -79,4 +119,4 @@ async function process_upload(file) {
|
||||||
process_upload(file);
|
process_upload(file);
|
||||||
});
|
});
|
||||||
fileInput.click();
|
fileInput.click();
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue