mirror of
https://github.com/DarrylNixon/melamine.git
synced 2024-04-22 06:27:20 -07:00
Trying out aiopath
This commit is contained in:
parent
e04d86d3cb
commit
cd91b960dd
6 changed files with 29 additions and 28 deletions
|
@ -1,13 +1,13 @@
|
|||
import asyncio
|
||||
import hashlib
|
||||
from collections.abc import Generator
|
||||
from pathlib import Path
|
||||
from secrets import token_bytes
|
||||
from typing import List
|
||||
from typing import Set
|
||||
from typing import Union
|
||||
|
||||
import aiofiles
|
||||
from aiopath import Path
|
||||
|
||||
from .fileops import find_mount
|
||||
from .fileops import mount_to_fs_handler
|
||||
|
@ -28,20 +28,21 @@ class ShredDir(AsyncObject):
|
|||
"""Class for tracking each directory to be shredded, and its contents."""
|
||||
|
||||
async def __init__(self, path: Path) -> None:
|
||||
self.absolute_path = path.resolve()
|
||||
self.absolute_path = await path.resolve()
|
||||
self.mount_point = find_mount(self.absolute_path)
|
||||
self.contents = await self._get_contents()
|
||||
self.mount_points = set(m for m in self.get_mount_points())
|
||||
self.mount_points.add(self.mount_point)
|
||||
self.fs_handler = await mount_to_fs_handler(self.mount_point)
|
||||
self.byte_size = sum(item.byte_size for item in self.contents)
|
||||
self.inode = path.stat().st_ino
|
||||
stat = await path.stat()
|
||||
self.inode = stat.st_ino
|
||||
|
||||
async def _get_contents(self) -> List:
|
||||
contents = []
|
||||
for subpath in self.absolute_path.glob("*"):
|
||||
if subpath.is_dir():
|
||||
if subpath.is_symlink():
|
||||
async for subpath in self.absolute_path.glob("*"):
|
||||
if await subpath.is_dir():
|
||||
if await subpath.is_symlink():
|
||||
logger.warning(f"Symlink subdirectory found: {subpath}, skipping")
|
||||
continue
|
||||
contents.append(await ShredDir(subpath))
|
||||
|
@ -84,8 +85,8 @@ class ShredFile(AsyncObject):
|
|||
"""Class for tracking each file to be shredded."""
|
||||
|
||||
async def __init__(self, path: Path) -> None:
|
||||
self.absolute_path = path.resolve().absolute()
|
||||
stat = path.stat()
|
||||
self.absolute_path = await path.resolve().absolute()
|
||||
stat = await path.stat()
|
||||
self.byte_size = stat.st_size
|
||||
self.inode = stat.st_ino
|
||||
self.mount_point = find_mount(self.absolute_path)
|
||||
|
@ -139,7 +140,7 @@ class ShredFile(AsyncObject):
|
|||
# Remove the file
|
||||
log_buf = f"[4/4] Unlinking {self.absolute_path}"
|
||||
if not dryrun:
|
||||
self.absolute_path.unlink()
|
||||
await self.absolute_path.unlink()
|
||||
else:
|
||||
log_buf = "DRY RUN (no changes made) " + log_buf
|
||||
logger.info(log_buf)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import asyncio
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
from aiopath import Path
|
||||
from asyncstdlib.functools import lru_cache
|
||||
|
||||
from .filesystems import FSHandlers
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
import ctypes
|
||||
from collections.abc import Generator
|
||||
from pathlib import Path
|
||||
|
||||
from aiopath import Path
|
||||
|
||||
from melamine.classes import AsyncObject
|
||||
|
||||
|
||||
class ext2_filsys(ctypes.Structure):
|
||||
|
@ -54,19 +57,16 @@ class EXT23Handler:
|
|||
self.libext2fs.ext2fs_get_next_inode.argtypes = [ext2_inode_scan, ext2_inode_large_p]
|
||||
self.libext2fs.ext2fs_get_next_inode.restype = ctypes.c_int
|
||||
|
||||
async def get_hardlinks(self, path: Path) -> Generator:
|
||||
path = path.resolve().absolute()
|
||||
inode = path.stat().st_ino
|
||||
|
||||
async def get_hardlinks(self, path: AsyncObject) -> Generator:
|
||||
fs = ext2_filsys()
|
||||
if self.libext2fs.ext2fs_open(bytes(path), 0, 0, 0, ctypes.byref(fs)) == 0:
|
||||
if self.libext2fs.ext2fs_open(bytes(path.absolute_path), 0, 0, 0, ctypes.byref(fs)) == 0:
|
||||
try:
|
||||
scan = ext2_inode_scan()
|
||||
try:
|
||||
if self.libext2fs.ext2fs_open_inode_scan(fs, ctypes.byref(scan)) == 0:
|
||||
inode_large = ext2_inode_large()
|
||||
while self.libext2fs.ext2fs_get_next_inode(scan, ctypes.byref(inode_large)) == 0:
|
||||
if inode_large.i_links_count > 1 and inode_large.i_file_acl == inode:
|
||||
if inode_large.i_links_count > 1 and inode_large.i_file_acl == path.inode:
|
||||
yield Path(fs.fs_mount_point) / scan.name.decode()
|
||||
finally:
|
||||
self.libext2fs.ext2fs_close_inode_scan(scan)
|
||||
|
|
|
@ -1,25 +1,26 @@
|
|||
from collections.abc import Generator
|
||||
from pathlib import Path
|
||||
|
||||
import libzfs
|
||||
from aiopath import Path
|
||||
|
||||
from melamine.classes import AsyncObject
|
||||
|
||||
|
||||
class ZFSHandler:
|
||||
def __init__(self) -> None:
|
||||
self.fs = "zfs"
|
||||
|
||||
async def get_hardlinks(self, path: Path) -> Generator:
|
||||
path = path.resolve().absolute()
|
||||
inode = path.stat().st_ino
|
||||
async def get_hardlinks(self, path: AsyncObject) -> Generator:
|
||||
path_str = str(path.absolute_path)
|
||||
|
||||
zfs = libzfs.ZFS()
|
||||
dataset = zfs.get_dataset_by_path(str(path))
|
||||
dataset = zfs.get_dataset_by_path(path_str)
|
||||
if dataset is not None:
|
||||
pool = dataset.pool
|
||||
filesystem = dataset.filesystem
|
||||
fs = pool.open(filesystem)
|
||||
|
||||
for snapshot in fs.snapshots():
|
||||
for entry in snapshot.ls(str(path)):
|
||||
if entry.inode() == inode:
|
||||
for entry in snapshot.ls(path_str):
|
||||
if entry.inode() == path.inode:
|
||||
yield Path(entry.path())
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
import argparse
|
||||
from collections import defaultdict
|
||||
|
||||
import aiofiles
|
||||
|
||||
from .classes import get_all_hardlinks
|
||||
from .classes import ShredDir
|
||||
from .classes import ShredFile
|
||||
|
@ -41,14 +39,14 @@ async def main(job: argparse.Namespace) -> bool:
|
|||
inodes_in_mount_points[path.mount_point].add(path.inode)
|
||||
|
||||
for mount_point, inodes in inodes_in_mount_points.items():
|
||||
async for item in aiofiles.os.scandir(mount_point):
|
||||
async for item in mount_point.rglob("*"):
|
||||
if item.name == "." or item.name == "..":
|
||||
continue
|
||||
if item.stat().st_ino in inodes:
|
||||
log_buf = f"Deleting hardlink: {item.path}"
|
||||
if not job.dryrun:
|
||||
log_buf = "DRY RUN " + log_buf
|
||||
await aiofiles.os.unlink(item.path)
|
||||
await item.path.unlink()
|
||||
logger.info(log_buf)
|
||||
|
||||
# Shred all physical files including hardlinks
|
||||
|
|
|
@ -16,6 +16,7 @@ dependencies = [
|
|||
"uvloop==0.17.0",
|
||||
"asyncstdlib==3.10.8",
|
||||
"psutil==5.9.5",
|
||||
"aiopath==0.6.11",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
|
|
Loading…
Reference in a new issue