Move FS discovery into new class coros

This commit is contained in:
Darryl Nixon 2023-07-16 11:14:03 -07:00
parent ef7b1b95f6
commit d8b3fb7f17
2 changed files with 17 additions and 19 deletions

View file

@ -10,13 +10,14 @@ from typing import Union
import aiofiles import aiofiles
from .fileops import find_mount from .fileops import find_mount
from .fileops import mount_to_fs_handler
from .logs import logger from .logs import logger
async def get_all_hardlinks(paths: Set[Path]) -> None: async def get_all_hardlinks(paths: Set[Path]) -> None:
for path in paths: for path in paths:
if isinstance(path, ShredFile): if isinstance(path, ShredFile):
logger.info("Getting hardlinks for {path}") logger.info(f"Getting hardlinks for {path}")
hardlink_count = 0 hardlink_count = 0
path.hardlinks = set() path.hardlinks = set()
async for link in path.fs_handler.get_hardlinks(path): async for link in path.fs_handler.get_hardlinks(path):
@ -32,25 +33,25 @@ async def get_all_hardlinks(paths: Set[Path]) -> None:
class ShredDir: class ShredDir:
"""Class for tracking each directory to be shredded, and its contents.""" """Class for tracking each directory to be shredded, and its contents."""
def __init__(self, path: Path) -> None: async def __init__(self, path: Path) -> None:
self.absolute_path = path.resolve() self.absolute_path = path.resolve()
self.contents = self._get_contents()
self.byte_size = sum(item.byte_size for item in self.contents) self.byte_size = sum(item.byte_size for item in self.contents)
self.mount_point = find_mount(self.absolute_path) self.mount_point = find_mount(self.absolute_path)
self.contents = await self._get_contents()
self.mount_points = set(m for m in self.get_mount_points()) self.mount_points = set(m for m in self.get_mount_points())
self.mount_points.add(self.mount_point) self.mount_points.add(self.mount_point)
self.fs_handler = None self.fs_handler = mount_to_fs_handler(self.mount_point)
def _get_contents(self) -> List: async def _get_contents(self) -> List:
contents = [] contents = []
for subpath in self.absolute_path.glob("*"): for subpath in self.absolute_path.glob("*"):
if subpath.is_dir(): if subpath.is_dir():
if subpath.is_symlink(): if subpath.is_symlink():
logger.warning(f"Symlink subdirectory found: {subpath}, skipping") logger.warning(f"Symlink subdirectory found: {subpath}, skipping")
continue continue
contents.append(ShredDir(subpath)) contents.append(await ShredDir(subpath))
elif subpath.is_file(): elif subpath.is_file():
contents.append(ShredFile(subpath)) contents.append(await ShredFile(subpath))
return contents return contents
def get_mount_points(self) -> Generator: def get_mount_points(self) -> Generator:
@ -72,11 +73,11 @@ class ShredDir:
class ShredFile: class ShredFile:
"""Class for tracking each file to be shredded.""" """Class for tracking each file to be shredded."""
def __init__(self, path: Path) -> None: async def __init__(self, path: Path) -> None:
self.absolute_path = path.resolve() self.absolute_path = path.resolve()
self.byte_size = path.stat().st_size self.byte_size = path.stat().st_size
self.mount_point = find_mount(self.absolute_path) self.mount_point = find_mount(self.absolute_path)
self.fs_handler = None self.fs_handler = await mount_to_fs_handler(self.mount_point)
self.hardlinks = None self.hardlinks = None
async def shred(self, hash: bool = False, dryrun: bool = False) -> Union[bool, bytes]: async def shred(self, hash: bool = False, dryrun: bool = False) -> Union[bool, bytes]:
@ -118,14 +119,14 @@ class ShredFile:
logger.info(f"[4/4] Unlinking {self.absolute_path}") logger.info(f"[4/4] Unlinking {self.absolute_path}")
if not dryrun: if not dryrun:
file.unlink() await file.unlink()
# Remove any hardlinks # Remove any hardlinks
if self.hardlinks: if self.hardlinks:
logger.info(f"[5/4] Unlinking {len(self.hardlinks)} hardlinks") logger.info(f"[5/4] Unlinking {len(self.hardlinks)} hardlinks")
if not dryrun: if not dryrun:
for link in self.hardlinks: for link in self.hardlinks:
link.unlink() await link.unlink()
return True return True

View file

@ -1,11 +1,12 @@
import argparse
from .classes import get_all_hardlinks from .classes import get_all_hardlinks
from .classes import ShredDir from .classes import ShredDir
from .classes import ShredFile from .classes import ShredFile
from .fileops import mount_to_fs_handler
from .logs import logger from .logs import logger
async def main(job) -> bool: async def main(job: argparse.Namespace) -> bool:
""" """
This is the main function for processing a shred request. This is the main function for processing a shred request.
It is called by the CLI and builds a job queue based on the arguments passed. It is called by the CLI and builds a job queue based on the arguments passed.
@ -17,15 +18,11 @@ async def main(job) -> bool:
for path in job.paths: for path in job.paths:
if path.is_file(): if path.is_file():
logger.info(f"Adding file: {path}") logger.info(f"Adding file: {path}")
shred_file = ShredFile(path) new_paths.add(await ShredFile(path))
shred_file.fs_handler = await mount_to_fs_handler(shred_file.mount_point)
new_paths.add(shred_file)
elif path.is_dir(): elif path.is_dir():
if job.recursive: if job.recursive:
logger.info(f"Adding directory: {path}") logger.info(f"Adding directory: {path}")
shred_dir = ShredDir(path) new_paths.add(await ShredDir(path))
shred_dir.fs_handler = await mount_to_fs_handler(shred_dir.mount_point)
new_paths.add(shred_dir)
else: else:
logger.info(f"Skipping directory: {path} (try -r/--recursive)") logger.info(f"Skipping directory: {path} (try -r/--recursive)")
else: else: