mirror of
https://github.com/DarrylNixon/binhop
synced 2024-04-22 12:37:06 -07:00
Still deciding on output
This commit is contained in:
parent
60524bf2dc
commit
02e660538f
2 changed files with 99 additions and 11 deletions
64
binhop.py
64
binhop.py
|
@ -1,10 +1,33 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import asyncio
|
||||
import hashlib
|
||||
import math
|
||||
import time
|
||||
import tempfile
|
||||
import binwalk
|
||||
import os
|
||||
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
async def scan_file(filename, base_dir):
|
||||
try:
|
||||
scan = binwalk.scan(filename, signature=True, quiet=True, extract=True, matryoshka=True, remove_after_execute=False, directory=base_dir)
|
||||
return scan
|
||||
except binwalk.ModuleException as e:
|
||||
print("Critical failure: ", e)
|
||||
|
||||
async def build_listing(path):
|
||||
result = {}
|
||||
for item in os.listdir(path):
|
||||
item_path = os.path.join(path, item)
|
||||
if os.path.isdir(item_path):
|
||||
result[item] = await build_listing(item_path)
|
||||
else:
|
||||
result[item] = {"s": os.path.getsize(item_path)}
|
||||
print(item)
|
||||
return result
|
||||
|
||||
async def upload_file(request):
|
||||
reader = await request.multipart()
|
||||
field = await reader.next()
|
||||
|
@ -16,28 +39,53 @@ async def upload_file(request):
|
|||
sha1_hash = hashlib.sha1()
|
||||
md5_hash = hashlib.md5()
|
||||
|
||||
|
||||
temp_file = tempfile.NamedTemporaryFile(mode='ab', delete=False)
|
||||
|
||||
while True:
|
||||
chunk = await field.read_chunk()
|
||||
if not chunk:
|
||||
break
|
||||
temp_file.write(chunk)
|
||||
file_size += len(chunk)
|
||||
sha1_hash.update(chunk)
|
||||
md5_hash.update(chunk)
|
||||
|
||||
size_suffixes = ['B', 'KB', 'MB', 'GB', 'TB']
|
||||
size_suffix_index = math.floor(math.log(file_size, 1024))
|
||||
human_readable_size = f'{file_size / (1024 ** size_suffix_index):.2f} {size_suffixes[size_suffix_index]}'
|
||||
try:
|
||||
working_dir = tempfile.TemporaryDirectory(ignore_cleanup_errors=True)
|
||||
scan = await scan_file(temp_file.name, working_dir.name)
|
||||
sigs = scan[0]
|
||||
extractor = sigs.extractor.output
|
||||
finally:
|
||||
os.unlink(temp_file.name)
|
||||
|
||||
# await asyncio.sleep(2)
|
||||
carved = []
|
||||
for sig in sigs.results:
|
||||
tmp_path = sig.file.path
|
||||
if tmp_path in extractor:
|
||||
if sig.offset in extractor[tmp_path].carved:
|
||||
end_offset = sig.offset + os.path.getsize(extractor[tmp_path].carved[sig.offset])
|
||||
print("Carved data from offsets 0x%X-0x%X to %s" % (sig.offset, end_offset, extractor[tmp_path].carved[sig.offset]))
|
||||
carved.append({"start": sig.offset, "end": end_offset, "d": sig.description})
|
||||
if sig.offset in extractor[tmp_path].extracted:
|
||||
extracted_files = [x for x in extractor[tmp_path].extracted[sig.offset].files if os.path.isfile(x)]
|
||||
extracted_dirs = [x for x in extractor[tmp_path].extracted[sig.offset].files if os.path.isdir(x)]
|
||||
print("Extracted %d files and %d directories from offset 0x%X to '%s' using '%s'" % (len(extracted_files), len(extracted_dirs), sig.offset, extractor[tmp_path].extracted[sig.offset].files[0], sigs.extractor.output[tmp_path].extracted[sig.offset].command))
|
||||
for i in extractor[tmp_path].extracted[sig.offset].files:
|
||||
print(f" File: {i}")
|
||||
# listing = await build_listing(working_dir.name)
|
||||
# print(listing)
|
||||
working_dir.cleanup()
|
||||
|
||||
response_data = {
|
||||
'meta': {
|
||||
'name': filename,
|
||||
'sizeb': file_size,
|
||||
'sizeh': human_readable_size,
|
||||
'sha1': sha1_hash.hexdigest(),
|
||||
'md5': md5_hash.hexdigest()
|
||||
}
|
||||
'md5': md5_hash.hexdigest(),
|
||||
'sig_quant': len(sigs.magic.signatures)
|
||||
},
|
||||
'offsets': carved
|
||||
}
|
||||
|
||||
processing_time = time.time() - start_time
|
||||
|
|
|
@ -1,3 +1,42 @@
|
|||
function draw_bytes(data) {
|
||||
const canvas = document.getElementById("blob");
|
||||
const ctx = canvas.getContext("2d");
|
||||
|
||||
// canvas.width = canvas.parentNode.width;
|
||||
// canvas.height = canvas.parentNode.height;
|
||||
|
||||
const blockSize = 2;
|
||||
const blockPadding = 1;
|
||||
|
||||
ctx.fillStyle = "#ddd";
|
||||
|
||||
const numBlocks = Math.ceil(data.meta.sizeb / (blockSize + blockPadding));
|
||||
for (let i = 0; i < numBlocks; i++) {
|
||||
const x = i * (blockSize + blockPadding);
|
||||
ctx.fillRect(x, 0, blockSize, canvas.height);
|
||||
}
|
||||
|
||||
ctx.fillStyle = "blue";
|
||||
data.offsets.forEach((offset) => {
|
||||
const start = Math.floor(offset.start / (blockSize + blockPadding));
|
||||
const end = Math.ceil(offset.end / (blockSize + blockPadding));
|
||||
for (let i = start; i < end; i++) {
|
||||
const x = i * (blockSize + blockPadding);
|
||||
ctx.fillRect(x, 0, blockSize, canvas.height);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function bytes_to_human(bytes) {
|
||||
const units = ["B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"];
|
||||
let i =0;
|
||||
while (bytes >= 1024 && i < units.length - 1) {
|
||||
bytes /= 1024;
|
||||
i++;
|
||||
}
|
||||
return bytes.toFixed(2) + " " + units[i];
|
||||
}
|
||||
|
||||
async function process_upload(file) {
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
|
@ -39,7 +78,7 @@ async function process_upload(file) {
|
|||
</tr>
|
||||
<tr>
|
||||
<th><strong>size:</strong></th>
|
||||
<td>${data.meta.sizeb} (${data.meta.sizeh})</td>
|
||||
<td>${data.meta["sizeb"]} (${bytes_to_human(data.meta.sizeb)})</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th><strong>sha1:</strong></th>
|
||||
|
@ -66,6 +105,7 @@ async function process_upload(file) {
|
|||
</small>
|
||||
</div>
|
||||
</div>`;
|
||||
draw_bytes(data);
|
||||
} else {
|
||||
console.error('error uploading file');
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue