mirror of
https://github.com/DarrylNixon/melamine.git
synced 2024-04-22 06:27:20 -07:00
mvp
This commit is contained in:
parent
2a773f6f1b
commit
0d61346b3c
3 changed files with 21 additions and 4 deletions
|
@ -53,6 +53,15 @@ class ShredDir(AsyncObject):
|
||||||
elif await subpath.is_file():
|
elif await subpath.is_file():
|
||||||
tasks.append(ShredFile(subpath))
|
tasks.append(ShredFile(subpath))
|
||||||
return set(await asyncio.gather(*tasks))
|
return set(await asyncio.gather(*tasks))
|
||||||
|
|
||||||
|
async def get_inodes(self, recursive: bool) -> Generator:
|
||||||
|
for path in self.contents:
|
||||||
|
if isinstance(path, ShredDir):
|
||||||
|
if recursive:
|
||||||
|
async for sub_inode in path.get_inodes(recursive):
|
||||||
|
yield sub_inode
|
||||||
|
elif isinstance(path, ShredFile):
|
||||||
|
yield path.inode
|
||||||
|
|
||||||
def enumerate_mount_points(self) -> Generator:
|
def enumerate_mount_points(self) -> Generator:
|
||||||
for item in self.contents:
|
for item in self.contents:
|
||||||
|
@ -196,6 +205,9 @@ class ShredFile(AsyncObject):
|
||||||
def __hash__(self) -> int:
|
def __hash__(self) -> int:
|
||||||
return hash(self.absolute_path)
|
return hash(self.absolute_path)
|
||||||
|
|
||||||
|
async def get_inodes(self, recursive: bool) -> Generator:
|
||||||
|
return self.inode
|
||||||
|
|
||||||
async def delete_hardlinks_by_inode(self) -> None:
|
async def delete_hardlinks_by_inode(self) -> None:
|
||||||
proc = await asyncio.create_subprocess_exec("find", str(self.mount_point), "-inum", str(self.inode), "-delete")
|
proc = await asyncio.create_subprocess_exec("find", str(self.mount_point), "-inum", str(self.inode), "-delete")
|
||||||
stdout, _ = await proc.communicate()
|
stdout, _ = await proc.communicate()
|
||||||
|
|
|
@ -29,15 +29,18 @@ async def get_all_mounts() -> Generator:
|
||||||
async def mount_bound_rglob(path: AsyncPath, mount: AsyncPath, pattern: str, ignoredirs: List[AsyncPath]) -> Generator:
|
async def mount_bound_rglob(path: AsyncPath, mount: AsyncPath, pattern: str, ignoredirs: List[AsyncPath]) -> Generator:
|
||||||
"""Recursively glob a path, but stop at mount points."""
|
"""Recursively glob a path, but stop at mount points."""
|
||||||
path = await path.absolute()
|
path = await path.absolute()
|
||||||
|
if str(path).startswith("/home/parallels"):
|
||||||
|
logger.warning(f"Processing {str(path)}")
|
||||||
|
path_mount = await find_mount(path)
|
||||||
|
if path_mount != mount:
|
||||||
|
logger.info(f"Skipping due to mount diff: {path} (wanted {mount})")
|
||||||
|
return
|
||||||
# Skip any ignored directories/files with a sanity check
|
# Skip any ignored directories/files with a sanity check
|
||||||
for ignore in ignoredirs:
|
for ignore in ignoredirs:
|
||||||
if str(path).startswith(str(ignore)) and await path.is_relative_to(ignore):
|
if str(path).startswith(str(ignore)) and await path.is_relative_to(ignore):
|
||||||
logger.info(f"Skipping ignored subdir: {path}")
|
logger.info(f"Skipping ignored subdir: {path}")
|
||||||
return
|
return
|
||||||
if await path.is_dir():
|
if await path.is_dir():
|
||||||
if await find_mount(path) != mount:
|
|
||||||
logger.info(f"Skipping differently mounted subdir: {path} (wanted {mount})")
|
|
||||||
return
|
|
||||||
async for subpath in path.glob(pattern):
|
async for subpath in path.glob(pattern):
|
||||||
async for subitem in mount_bound_rglob(subpath, mount, pattern, ignoredirs):
|
async for subitem in mount_bound_rglob(subpath, mount, pattern, ignoredirs):
|
||||||
yield subitem
|
yield subitem
|
||||||
|
|
|
@ -58,7 +58,8 @@ async def main(job: argparse.Namespace) -> bool:
|
||||||
logger.info("Deleting remaining hardlinks using find")
|
logger.info("Deleting remaining hardlinks using find")
|
||||||
inodes_in_mount_points = defaultdict(set)
|
inodes_in_mount_points = defaultdict(set)
|
||||||
for path in job.paths:
|
for path in job.paths:
|
||||||
inodes_in_mount_points[path.mount_point].add(path.inode)
|
async for inode in path.get_inodes(job.recursive):
|
||||||
|
inodes_in_mount_points[path.mount_point].add(inode)
|
||||||
|
|
||||||
# We'll also limit concurrency to something reasonable since stat
|
# We'll also limit concurrency to something reasonable since stat
|
||||||
# on an entire filesystem might be a bit burdensome
|
# on an entire filesystem might be a bit burdensome
|
||||||
|
@ -81,6 +82,7 @@ async def main(job: argparse.Namespace) -> bool:
|
||||||
# checking for . and .. should not be neccessary w/ rglob
|
# checking for . and .. should not be neccessary w/ rglob
|
||||||
# scandir/glob/rglob doesn't play nice with FileNotFound errors,
|
# scandir/glob/rglob doesn't play nice with FileNotFound errors,
|
||||||
# so let's avoid them in dynamic fs areas
|
# so let's avoid them in dynamic fs areas
|
||||||
|
logger.warning(f"Scanning {mount_point} for inodes: {inodes}")
|
||||||
if str(mount_point) == "/":
|
if str(mount_point) == "/":
|
||||||
logger.info("Root filesystem mount processing")
|
logger.info("Root filesystem mount processing")
|
||||||
async for item in mount_point.glob("*"):
|
async for item in mount_point.glob("*"):
|
||||||
|
|
Loading…
Reference in a new issue