mirror of
https://github.com/DarrylNixon/melamine.git
synced 2024-04-22 06:27:20 -07:00
ignoredir
This commit is contained in:
parent
737822fd99
commit
3282d5a655
2 changed files with 32 additions and 8 deletions
|
@ -60,9 +60,13 @@ class ShredDir(AsyncObject):
|
||||||
yield from item.enumerate_mount_points()
|
yield from item.enumerate_mount_points()
|
||||||
yield self.mount_point
|
yield self.mount_point
|
||||||
|
|
||||||
async def shred(self, hash: bool = False, dryrun: bool = False) -> bool:
|
async def shred(self, hash: bool = False, dryrun: bool = False, ignoredirs: Set[AsyncPath] = None) -> bool:
|
||||||
|
if ignoredirs is None:
|
||||||
|
ignoredirs = set()
|
||||||
tasks = []
|
tasks = []
|
||||||
for item in self.contents:
|
for item in self.contents:
|
||||||
|
if any(str(item).startswith(str(path)) for path in ignoredirs):
|
||||||
|
continue
|
||||||
tasks.append(item.shred(hash, dryrun))
|
tasks.append(item.shred(hash, dryrun))
|
||||||
try:
|
try:
|
||||||
if all(await asyncio.gather(*tasks)):
|
if all(await asyncio.gather(*tasks)):
|
||||||
|
@ -105,9 +109,15 @@ class ShredFile(AsyncObject):
|
||||||
self.fs_handler = await mount_to_fs_handler(self.mount_point)
|
self.fs_handler = await mount_to_fs_handler(self.mount_point)
|
||||||
self.hardlinks = None
|
self.hardlinks = None
|
||||||
|
|
||||||
async def shred(self, hash: bool = False, dryrun: bool = False) -> Union[bool, bytes]:
|
async def shred(
|
||||||
|
self, hash: bool = False, dryrun: bool = False, ignoredirs: Set[AsyncPath] = None
|
||||||
|
) -> Union[bool, bytes]:
|
||||||
"""Shred the file with a single file descriptor."""
|
"""Shred the file with a single file descriptor."""
|
||||||
|
if ignoredirs is None:
|
||||||
|
ignoredirs = set()
|
||||||
try:
|
try:
|
||||||
|
if any(str(path).startswith(str(path)) for path in ignoredirs):
|
||||||
|
return False
|
||||||
logger.info(f"Shredding: {self.absolute_path}")
|
logger.info(f"Shredding: {self.absolute_path}")
|
||||||
|
|
||||||
async with aiofiles.open(self.absolute_path, "rb+") as file:
|
async with aiofiles.open(self.absolute_path, "rb+") as file:
|
||||||
|
@ -161,7 +171,13 @@ class ShredFile(AsyncObject):
|
||||||
if self.hardlinks:
|
if self.hardlinks:
|
||||||
log_buf = f"[5/4] Unlinking {len(self.hardlinks)} hardlinks"
|
log_buf = f"[5/4] Unlinking {len(self.hardlinks)} hardlinks"
|
||||||
if not dryrun:
|
if not dryrun:
|
||||||
tasks = [link.unlink() for link in self.hardlinks if link.name != "." and link.name != ".."]
|
tasks = []
|
||||||
|
for link in self.hardlinks:
|
||||||
|
if link.name == "." and link.name == "..":
|
||||||
|
continue
|
||||||
|
if any(str(await link.absolute()).startswith(str(path)) for path in ignoredirs):
|
||||||
|
return False
|
||||||
|
tasks.append(link.unlink())
|
||||||
done, _ = await asyncio.wait(tasks)
|
done, _ = await asyncio.wait(tasks)
|
||||||
for task in done:
|
for task in done:
|
||||||
e = task.exception()
|
e = task.exception()
|
||||||
|
|
|
@ -2,6 +2,8 @@ import argparse
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from aiopath import AsyncPath
|
||||||
|
|
||||||
from .classes import get_all_hardlinks
|
from .classes import get_all_hardlinks
|
||||||
from .classes import ShredDir
|
from .classes import ShredDir
|
||||||
from .classes import ShredFile
|
from .classes import ShredFile
|
||||||
|
@ -30,13 +32,17 @@ async def main(job: argparse.Namespace) -> bool:
|
||||||
# Try to delete hardlinks based on the filesystem type
|
# Try to delete hardlinks based on the filesystem type
|
||||||
job.paths = await get_all_hardlinks(new_paths)
|
job.paths = await get_all_hardlinks(new_paths)
|
||||||
|
|
||||||
|
tasks = [path.absolute() for path in job.ignoredir]
|
||||||
|
tasks.append(AsyncPath("/proc").absolute())
|
||||||
|
job.ignoredir = set(await asyncio.gather(*tasks))
|
||||||
|
|
||||||
# Shred all physical files including hardlinks
|
# Shred all physical files including hardlinks
|
||||||
for path in job.paths:
|
for path in job.paths:
|
||||||
tasks = []
|
tasks = []
|
||||||
if isinstance(path, ShredFile):
|
if isinstance(path, ShredFile):
|
||||||
tasks.append(path.shred(hash=job.exhaustive, dryrun=job.dryrun))
|
tasks.append(path.shred(hash=job.exhaustive, dryrun=job.dryrun, ignoredirs=job.ignoredir))
|
||||||
elif isinstance(path, ShredDir):
|
elif isinstance(path, ShredDir):
|
||||||
tasks.append(path.shred(hash=job.exhaustive, dryrun=job.dryrun))
|
tasks.append(path.shred(hash=job.exhaustive, dryrun=job.dryrun, ignoredirs=job.ignoredir))
|
||||||
done, _ = await asyncio.wait(tasks)
|
done, _ = await asyncio.wait(tasks)
|
||||||
for task in done:
|
for task in done:
|
||||||
e = task.exception()
|
e = task.exception()
|
||||||
|
@ -54,9 +60,9 @@ async def main(job: argparse.Namespace) -> bool:
|
||||||
# on an entire filesystem might be a bit burdensome
|
# on an entire filesystem might be a bit burdensome
|
||||||
semaphore = asyncio.Semaphore(1024)
|
semaphore = asyncio.Semaphore(1024)
|
||||||
|
|
||||||
async def check_inode_and_unlink(item):
|
async def check_inode_and_unlink(item, inodes):
|
||||||
async with semaphore:
|
async with semaphore:
|
||||||
if await item.stat().st_ino in inodes_in_mount_points[item.mount_point]:
|
if await item.stat().st_ino in inodes:
|
||||||
log_buf = f"Deleting hardlink: {item.path}"
|
log_buf = f"Deleting hardlink: {item.path}"
|
||||||
if not job.dryrun:
|
if not job.dryrun:
|
||||||
log_buf = "DRY RUN " + log_buf
|
log_buf = "DRY RUN " + log_buf
|
||||||
|
@ -67,7 +73,9 @@ async def main(job: argparse.Namespace) -> bool:
|
||||||
# checking for . and .. should not be neccessary w/ rglob
|
# checking for . and .. should not be neccessary w/ rglob
|
||||||
tasks = []
|
tasks = []
|
||||||
async for item in mount_point.rglob("*"):
|
async for item in mount_point.rglob("*"):
|
||||||
tasks.append(check_inode_and_unlink(item))
|
if any(str(item).startswith(path) for path in job.ignoredir):
|
||||||
|
continue
|
||||||
|
tasks.append(check_inode_and_unlink(item, inodes))
|
||||||
done, _ = await asyncio.wait(tasks)
|
done, _ = await asyncio.wait(tasks)
|
||||||
for task in done:
|
for task in done:
|
||||||
e = task.exception()
|
e = task.exception()
|
||||||
|
|
Loading…
Reference in a new issue