mirror of
https://github.com/DarrylNixon/binhop
synced 2024-04-22 12:37:06 -07:00
better temp dir cleanup
This commit is contained in:
parent
bc4f021fa9
commit
1f5afc3000
2 changed files with 35 additions and 36 deletions
|
@ -17,9 +17,9 @@ take action on the parts that didn't.<br />
|
||||||
|
|
||||||
## Installation and Usage
|
## Installation and Usage
|
||||||
|
|
||||||
To use this script, you need Python 3 and a functioning and "recent" version of `binwalk` installed on your system.
|
To use this script, you need Python 3 and a functioning and "recent" version of `binwalk` installed on your system. In practice, this means you're *probably* going to need to be on an x86/x86_64 Linux, but maybe you have better karma than I do.
|
||||||
|
|
||||||
You'll probably also want to install optional `binwalk` dependencies such as `sasquatch`, `jefferson`, and others. You can learn how to do that in [binwalk's INSTALL.md](https://github.com/ReFirmLabs/binwalk/blob/master/INSTALL.md). `binhop` only "requires" `binwalk`, but it'll fail on binaries for which `binwalk` is dependent on optional modules.
|
You'll probably also want to install optional `binwalk` dependencies such as `sasquatch`, `jefferson`, and others, depending on the binaries you want to submit. You can learn how to do that in [binwalk's INSTALL.md](https://github.com/ReFirmLabs/binwalk/blob/master/INSTALL.md). `binhop` only "requires" `binwalk`, but it'll fail on binaries for which `binwalk` is dependent on optional modules.
|
||||||
|
|
||||||
When that's done, get `binhop` running with something like:
|
When that's done, get `binhop` running with something like:
|
||||||
|
|
||||||
|
@ -30,7 +30,7 @@ pip install -r requirements.txt
|
||||||
./binhop.py
|
./binhop.py
|
||||||
```
|
```
|
||||||
|
|
||||||
Once running, browse to [http://localhost:8080](http://localhost:8080) to upload a blob.
|
Once running, browse to [http://localhost:8080](http://localhost:8080) and upload a blob.
|
||||||
|
|
||||||
## FAQ
|
## FAQ
|
||||||
|
|
||||||
|
@ -42,7 +42,7 @@ Once running, browse to [http://localhost:8080](http://localhost:8080) to upload
|
||||||
|
|
||||||
This repository is part of my coursework for CSC 842 - Security Tool Development at Dakota State University. Consequently, I may choose not to maintain this tool beyond the length of the course, but have selected a license that enables open contributions in any case.
|
This repository is part of my coursework for CSC 842 - Security Tool Development at Dakota State University. Consequently, I may choose not to maintain this tool beyond the length of the course, but have selected a license that enables open contributions in any case.
|
||||||
|
|
||||||
For aesthetics, the interface is browser-based. It'd be ideal to make it command-line only, but I ran out of time trying to summarize an arbitrarily large number of bytes and sections into a human-consumable CLI output. I'm open to ideas.
|
For aesthetics, the interface is browser-based. It'd be ideal to make it command-line accessible, but I ran out of time trying to summarize an arbitrarily large number of bytes and sections into a human-consumable CLI output. I'm open to ideas.
|
||||||
|
|
||||||
**Why did you select GPLv3? MIT is so much better.**
|
**Why did you select GPLv3? MIT is so much better.**
|
||||||
|
|
||||||
|
|
63
binhop.py
63
binhop.py
|
@ -63,40 +63,39 @@ async def upload_file(request):
|
||||||
sha1_hash.update(chunk)
|
sha1_hash.update(chunk)
|
||||||
md5_hash.update(chunk)
|
md5_hash.update(chunk)
|
||||||
|
|
||||||
try:
|
with tempfile.TemporaryDirectory() as working_dir:
|
||||||
working_dir = tempfile.TemporaryDirectory(ignore_cleanup_errors=True)
|
try:
|
||||||
scan = await scan_file(temp_file.name, working_dir.name)
|
scan = await scan_file(temp_file.name, working_dir)
|
||||||
sigs = scan[0]
|
sigs = scan[0]
|
||||||
extractor = sigs.extractor.output
|
extractor = sigs.extractor.output
|
||||||
finally:
|
finally:
|
||||||
os.unlink(temp_file.name)
|
os.unlink(temp_file.name)
|
||||||
|
|
||||||
carved, summary = [], []
|
carved, summary = [], []
|
||||||
for sig in sigs.results:
|
for sig in sigs.results:
|
||||||
tmp_path = sig.file.path
|
tmp_path = sig.file.path
|
||||||
if tmp_path in extractor:
|
summary.append("%s 0x%.8X %s" % (sig.file.path, sig.offset, sig.description))
|
||||||
if sig.offset in extractor[tmp_path].carved:
|
if tmp_path in extractor:
|
||||||
end_offset = sig.offset + os.path.getsize(extractor[tmp_path].carved[sig.offset])
|
if sig.offset in extractor[tmp_path].carved:
|
||||||
summary.append(
|
end_offset = sig.offset + os.path.getsize(extractor[tmp_path].carved[sig.offset])
|
||||||
"Carved data from offsets 0x%X-0x%X to %s"
|
summary.append(
|
||||||
% (sig.offset, end_offset, extractor[tmp_path].carved[sig.offset])
|
"Carved data from offsets 0x%X-0x%X to %s"
|
||||||
)
|
% (sig.offset, end_offset, extractor[tmp_path].carved[sig.offset])
|
||||||
carved.append({"start": sig.offset, "end": end_offset, "d": sig.description})
|
|
||||||
if sig.offset in extractor[tmp_path].extracted:
|
|
||||||
extracted_files = [x for x in extractor[tmp_path].extracted[sig.offset].files if os.path.isfile(x)]
|
|
||||||
extracted_dirs = [x for x in extractor[tmp_path].extracted[sig.offset].files if os.path.isdir(x)]
|
|
||||||
summary.append(
|
|
||||||
"Extracted %d files and %d directories from offset 0x%X to '%s' using '%s'"
|
|
||||||
% (
|
|
||||||
len(extracted_files),
|
|
||||||
len(extracted_dirs),
|
|
||||||
sig.offset,
|
|
||||||
extractor[tmp_path].extracted[sig.offset].files[0],
|
|
||||||
sigs.extractor.output[tmp_path].extracted[sig.offset].command,
|
|
||||||
)
|
)
|
||||||
)
|
carved.append({"start": sig.offset, "end": end_offset, "d": sig.description})
|
||||||
num_files, num_dirs, listing = await build_listing(working_dir.name)
|
if sig.offset in extractor[tmp_path].extracted:
|
||||||
working_dir.cleanup()
|
extracted_files = [x for x in extractor[tmp_path].extracted[sig.offset].files if os.path.isfile(x)]
|
||||||
|
extracted_dirs = [x for x in extractor[tmp_path].extracted[sig.offset].files if os.path.isdir(x)]
|
||||||
|
summary.append(
|
||||||
|
"Extracted %d files and %d directories from offset 0x%X to using '%s'"
|
||||||
|
% (
|
||||||
|
len(extracted_files),
|
||||||
|
len(extracted_dirs),
|
||||||
|
sig.offset,
|
||||||
|
sigs.extractor.output[tmp_path].extracted[sig.offset].command,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
num_files, num_dirs, listing = await build_listing(working_dir)
|
||||||
|
|
||||||
response_data = {
|
response_data = {
|
||||||
"meta": {
|
"meta": {
|
||||||
|
|
Loading…
Reference in a new issue