一、临时文件的智能识别系统
1. 文件指纹识别引擎
import os
import hashlib
import magic
from pathlib import path
from datetime import datetime, timedelta
from typing import dict, list, set, optional
import mimetypes
import json
class filefingerprint:
"""文件指纹识别系统 - 智能判断文件类型和用途"""
def __init__(self):
self.temp_patterns = {
'compilation': ['.o', '.obj', '.class', '.pyc', '.pyo'],
'cache': ['.cache', '.tmp', '.swp', '.swo', '.swn'],
'log': ['.log', '.out', '.err', '.trace'],
'download': ['.part', '.crdownload', '.download'],
'backup': ['~', '.bak', '.backup', '.old'],
'ide': ['.idea/', '.vscode/', '.vs/', 'thumbs.db'],
'build': ['node_modules/', '__pycache__/', 'dist/', 'build/']
}
self.mime = magic.magic(mime=true)
self.safe_extensions = {'.py', '.js', '.java', '.cpp', '.md', '.txt'}
def analyze_file(self, filepath: path) -> dict:
"""深度分析文件特征"""
stats = filepath.stat()
fingerprint = {
'path': str(filepath),
'size': stats.st_size,
'created': datetime.fromtimestamp(stats.st_ctime),
'modified': datetime.fromtimestamp(stats.st_mtime),
'accessed': datetime.fromtimestamp(stats.st_atime),
'extension': filepath.suffix.lower(),
'is_temp': false,
'category': 'unknown',
'risk_level': 'low',
'content_hash': self._calculate_hash(filepath),
'mime_type': self._detect_mime(filepath)
}
# 智能分类
fingerprint.update(self._classify_file(filepath, fingerprint))
return fingerprint
def _classify_file(self, filepath: path, fp: dict) -> dict:
"""智能文件分类"""
result = {'category': 'other', 'is_temp': false}
# 基于扩展名识别
for category, patterns in self.temp_patterns.items():
for pattern in patterns:
if pattern.startswith('.') and fp['extension'] == pattern:
result.update({'category': category, 'is_temp': true})
return result
elif pattern.endswith('/') and pattern in str(filepath):
result.update({'category': category, 'is_temp': true})
return result
elif pattern in filepath.name:
result.update({'category': category, 'is_temp': true})
return result
# 基于文件名模式识别
filename = filepath.name.lower()
temp_patterns = ['temp', 'tmp', 'cache', 'swap', 'dump']
if any(pattern in filename for pattern in temp_patterns):
result.update({'category': 'temp_pattern', 'is_temp': true})
# 基于内容识别(大文件且长时间未访问)
if (fp['size'] > 100 * 1024 * 1024 and # 大于100mb
datetime.now() - fp['accessed'] > timedelta(days=30)):
result.update({'category': 'large_inactive', 'is_temp': true})
return result
def _calculate_hash(self, filepath: path) -> str:
"""计算文件内容哈希(采样)"""
try:
if filepath.stat().st_size > 10 * 1024 * 1024: # 大文件采样
with open(filepath, 'rb') as f:
# 采样文件头、中、尾
f.seek(0)
head = f.read (4096)
f.seek(max(0, filepath.stat().st_size // 2 - 2048))
middle = f.read(4096)
f.seek(max(0, filepath.stat().st_size - 4096))
tail = f.read(4096)
data = head + middle + tail
else:
data = filepath.read_bytes()
return hashlib.md5(data).hexdigest()
except:
return 'error'
def _detect_mime(self, filepath: path) -> str:
"""检测文件真实类型"""
try:
return self.mime.from_file(str(filepath))
except:
return mimetypes.guess_type(str(filepath))[0] or 'unknown'2. 目录扫描与监控系统
import psutil
import platform
from dataclasses import dataclass
from collections import defaultdict
import shutil
@dataclass
class scanresult:
total_size: int
file_count: int
temp_files: list[dict]
by_category: dict[str, list[dict]]
disk_usage: dict
class tempfilescanner:
"""临时文件扫描器"""
def __init__(self):
self.fingerprint = filefingerprint()
# 系统特定的临时目录
self.system_temp_dirs = self._get_system_temp_dirs()
def _get_system_temp_dirs(self) -> list[path]:
"""获取系统临时目录"""
system = platform.system()
temp_dirs = []
# 系统临时目录
if system == 'windows':
temp_dirs.extend([
path(os.environ.get('temp', 'c:\\windows\\temp')),
path(os.environ.get('tmp', 'c:\\windows\\temp')),
path('c:\\users\\') / os.environ['username'] / 'appdata' / 'local' / 'temp'
])
elif system == 'linux' or system == 'darwin':
temp_dirs.extend([
path('/tmp'),
path('/var/tmp'),
path.home() / '.cache',
path.home() / '.tmp'
])
# 开发工具临时目录
dev_tools = [
path.home() / '.npm', # npm缓存
path.home() / '.m2', # maven仓库
path.home() / '.gradle', # gradle缓存
path.home() / '.cache/pip', # pip缓存
path.home() / '.cargo/registry', # rust缓存
path.home() / 'library/caches', # macos应用缓存
]
temp_dirs.extend([d for d in dev_tools if d.exists()])
return temp_dirs
def scan_directory(self, directory: path, recursive: bool = true) -> scanresult:
"""扫描目录中的临时文件"""
temp_files = []
by_category = defaultdict(list)
total_size = 0
file_count = 0
scan_method = directory.rglob if recursive else directory.glob
for filepath in scan_method('*'):
if filepath.is_file():
try:
fp = self.fingerprint.analyze_file(filepath)
file_count += 1
total_size += fp['size']
if fp['is_temp']:
temp_files.append(fp)
by_category[fp['category']].append(fp)
except (permissionerror, oserror):
continue
# 获取磁盘使用情况
disk_usage = self._get_disk_usage(directory)
return scanresult(
total_size=total_size,
file_count=file_count,
temp_files=temp_files,
by_category=dict(by_category),
disk_usage=disk_usage
)
def _get_disk_usage(self, path: path) -> dict:
"""获取磁盘使用情况"""
usage = psutil.disk_usage(str(path))
return {
'total': usage.total,
'used': usage.used,
'free': usage.free,
'percent': usage.percent,
'threshold': 85 # 警告阈值85%
}
def find_largest_temp_files(self, directory: path, top_n: int = 20) -> list[dict]:
"""查找最大的临时文件"""
scanner = self.scan_directory(directory)
# 按大小排序
sorted_files = sorted(
scanner.temp_files,
key=lambda x: x['size'],
reverse=true
)
return sorted_files[:top_n]二、智能清理策略引擎
1. 基于规则的清理策略
from abc import abc, abstractmethod
from typing import list, tuple
import heapq
class cleanupstrategy(abc):
"""清理策略抽象基类"""
@abstractmethod
def should_clean(self, file_info: dict) -> bool:
pass
@abstractmethod
def get_priority(self, file_info: dict) -> int:
pass
class agebasedstrategy(cleanupstrategy):
"""基于时间的清理策略"""
def __init__(self, max_age_days: int = 7):
self.max_age = timedelta(days=max_age_days)
def should_clean(self, file_info: dict) -> bool:
age = datetime.now() - file_info['modified']
return age > self.max_age
def get_priority(self, file_info: dict) -> int:
age_hours = (datetime.now() - file_info['modified']).total_seconds() / 3600
return int(age_hours) # 越旧优先级越高
class sizebasedstrategy(cleanupstrategy):
"""基于大小的清理策略"""
def __init__(self, min_size_mb: int = 10):
self.min_size = min_size_mb * 1024 * 1024
def should_clean(self, file_info: dict) -> bool:
return file_info['size'] > self.min_size
def get_priority(self, file_info: dict) -> int:
# 每100mb得1分
return file_info['size'] // (100 * 1024 * 1024)
class accessbasedstrategy(cleanupstrategy):
"""基于访问频率的清理策略"""
def __init__(self, min_access_days: int = 30):
self.min_access = timedelta(days=min_access_days)
def should_clean(self, file_info: dict) -> bool:
last_access = datetime.now() - file_info['accessed']
return last_access > self.min_access
def get_priority(self, file_info: dict) -> int:
days_since_access = (datetime.now() - file_info['accessed']).days
return days_since_access
class compositestrategy(cleanupstrategy):
"""组合策略 - 加权评分"""
def __init__(self):
self.strategies = [
(agebasedstrategy(max_age_days=7), 0.4), # 40%权重
(sizebasedstrategy(min_size_mb=50), 0.3), # 30%权重
(accessbasedstrategy(min_access_days=14), 0.3) # 30%权重
]
def should_clean(self, file_info: dict) -> bool:
# 只要有一个策略认为应该清理,就返回true
return any(strategy.should_clean(file_info)
for strategy, _ in self.strategies)
def get_priority(self, file_info: dict) -> int:
total_score = 0
for strategy, weight in self.strategies:
if strategy.should_clean(file_info):
score = strategy.get_priority(file_info)
total_score += int(score * weight * 100)
return total_score2. 智能清理管理器
class smartcleanupmanager:
"""智能清理管理器"""
def __init__(self, strategy: cleanupstrategy = none):
self.strategy = strategy or compositestrategy()
self.scanner = tempfilescanner()
self.cleaned_files = []
self.backup_dir = path.home() / '.temp_cleanup_backup'
self.backup_dir.mkdir(exist_ok=true)
# 清理历史记录
self.history_file = self.backup_dir / 'cleanup_history.json'
self.history = self._load_history()
def _load_history(self) -> list:
"""加载清理历史"""
if self.history_file.exists():
with open(self.history_file, 'r') as f:
return json.load(f)
return []
def _save_history(self):
"""保存清理历史"""
with open(self.history_file, 'w') as f:
json.dump(self.history[-1000:], f, indent=2, default=str)
def analyze_and_clean(self, directory: path,
dry_run: bool = true,
max_size_to_free: int = 0) -> dict:
"""分析并清理目录"""
print(f"🔍 扫描目录: {directory}")
# 扫描文件
scan_result = self.scanner.scan_directory(directory)
print(f"📊 扫描结果:")
print(f" 总文件数: {scan_result.file_count}")
print(f" 总大小: {self._format_size(scan_result.total_size)}")
print(f" 临时文件数: {len(scan_result.temp_files)}")
# 应用清理策略
files_to_clean = []
for file_info in scan_result.temp_files:
if self.strategy.should_clean(file_info):
priority = self.strategy.get_priority(file_info)
files_to_clean.append((priority, file_info))
# 按优先级排序
files_to_clean.sort(reverse=true)
# 如果指定了要释放的空间大小
if max_size_to_free > 0:
files_to_clean = self._select_files_to_free_space(
files_to_clean, max_size_to_free
)
# 执行清理
cleanup_result = self._execute_cleanup(
[file_info for _, file_info in files_to_clean],
dry_run
)
# 更新历史
self.history.append({
'timestamp': datetime.now().isoformat(),
'directory': str(directory),
'dry_run': dry_run,
'result': cleanup_result
})
self._save_history()
return cleanup_result
def _select_files_to_free_space(self, files: list[tuple],
target_size: int) -> list[tuple]:
"""选择文件以释放目标空间大小"""
selected = []
freed_size = 0
for priority, file_info in sorted(files, reverse=true):
if freed_size >= target_size:
break
selected.append((priority, file_info))
freed_size += file_info['size']
return selected
def _execute_cleanup(self, files: list[dict], dry_run: bool) -> dict:
"""执行清理操作"""
total_freed = 0
cleaned_count = 0
errors = []
print(f"\n{'🧪 模拟运行' if dry_run else '🧹 开始清理'}:")
for file_info in files:
filepath = path(file_info['path'])
try:
if dry_run:
action = "将删除"
else:
# 先备份
backup_path = self._backup_file(filepath)
# 执行删除
if filepath.is_file():
filepath.unlink()
elif filepath.is_dir():
shutil.rmtree(filepath)
size_mb = file_info['size'] / (1024 * 1024)
total_freed += file_info['size']
cleaned_count += 1
status = "✅" if not dry_run else "📝"
print(f"{status} {action} {filepath.name} ({size_mb:.1f}mb)")
except exception as e:
errors.append(str(e))
print(f"❌ 失败: {filepath.name} - {e}")
result = {
'total_freed': total_freed,
'cleaned_count': cleaned_count,
'error_count': len(errors),
'errors': errors,
'dry_run': dry_run
}
print(f"\n📈 清理总结:")
print(f" 释放空间: {self._format_size(total_freed)}")
print(f" 清理文件: {cleaned_count}个")
if errors:
print(f" 错误: {len(errors)}个")
return result
def _backup_file(self, filepath: path) -> path:
"""备份文件(安全措施)"""
if not filepath.exists():
return none
# 生成备份路径
timestamp = datetime.now().strftime('%y%m%d_%h%m%s')
relative_path = filepath.relative_to(filepath.anchor)
safe_name = str(relative_path).replace(os.sep, '_')
backup_path = self.backup_dir / f"{timestamp}_{safe_name}"
try:
if filepath.is_file():
shutil.copy2(filepath, backup_path)
elif filepath.is_dir():
shutil.copytree(filepath, backup_path)
except:
pass # 备份失败也不阻止清理
return backup_path
def _format_size(self, size_bytes: int) -> str:
"""格式化文件大小"""
for unit in ['b', 'kb', 'mb', 'gb', 'tb']:
if size_bytes < 1024.0:
return f"{size_bytes:.2f}{unit}"
size_bytes /= 1024.0
return f"{size_bytes:.2f}pb"
def restore_from_backup(self, backup_filename: str) -> bool:
"""从备份恢复文件"""
backup_path = self.backup_dir / backup_filename
if not backup_path.exists():
return false
# 从备份文件名解析原始路径
# 实现恢复逻辑...
return true三、自动化监控与调度系统
1. 实时监控守护进程
import time
import threading
from watchdog.observers import observer
from watchdog.events import filesystemeventhandler
import schedule
class tempfilemonitor(filesystemeventhandler):
"""临时文件监控器"""
def __init__(self, cleanup_manager: smartcleanupmanager):
self.manager = cleanup_manager
self.temp_extensions = {'.tmp', '.temp', '.cache', '.log'}
self.recent_creations = {}
def on_created(self, event):
"""监控新创建的文件"""
if not event.is_directory:
filepath = path(event.src_path)
if filepath.suffix in self.temp_extensions:
self.recent_creations[filepath] = time.time()
print(f"📁 检测到临时文件: {filepath.name}")
# 如果文件超过1小时未修改,标记为可清理
threading.timer(3600, self._check_if_stale, args=[filepath]).start()
def on_modified(self, event):
"""文件修改时更新访问时间"""
if not event.is_directory:
filepath = path(event.src_path)
if filepath in self.recent_creations:
self.recent_creations[filepath] = time.time()
def _check_if_stale(self, filepath: path):
"""检查文件是否已过期"""
if filepath in self.recent_creations:
create_time = self.recent_creations[filepath]
if time.time() - create_time > 3600: # 1小时
if filepath.exists():
print(f"⏰ 文件已过期: {filepath.name}")
# 自动清理
self.manager.analyze_and_clean(
filepath.parent,
dry_run=false,
max_size_to_free=0
)
class automatedcleanupscheduler:
"""自动化清理调度器"""
def __init__(self):
self.manager = smartcleanupmanager()
self.monitor = tempfilemonitor(self.manager)
self.observer = observer()
# 监控的目录
self.watch_dirs = [
path.home() / 'downloads',
path.home() / 'desktop',
path('/tmp') if platform.system() != 'windows' else
path(os.environ.get('temp', 'c:\\windows\\temp'))
]
def start_monitoring(self):
"""启动文件监控"""
for directory in self.watch_dirs:
if directory.exists():
self.observer.schedule(
self.monitor,
str(directory),
recursive=true
)
print(f"👀 开始监控: {directory}")
self.observer.start()
# 定时任务
schedule.every().day.at("02:00").do(self._nightly_cleanup)
schedule.every().hour.do(self._check_disk_usage)
print("🚀 临时文件监控器已启动")
try:
while true:
schedule.run_pending()
time.sleep(60)
except keyboardinterrupt:
self.observer.stop()
self.observer.join()
def _nightly_cleanup(self):
"""夜间自动清理"""
print("🌙 执行夜间清理...")
for directory in self.watch_dirs:
if directory.exists():
self.manager.analyze_and_clean(
directory,
dry_run=false,
max_size_to_free=1024 * 1024 * 1024 # 尝试释放1gb
)
def _check_disk_usage(self):
"""检查磁盘使用率"""
for directory in self.watch_dirs:
if directory.exists():
usage = psutil.disk_usage(str(directory))
if usage.percent > 85: # 磁盘使用率超过85%
print(f"⚠️ 磁盘空间不足: {directory} ({usage.percent}%)")
# 紧急清理
self.manager.analyze_and_clean(
directory,
dry_run=false,
max_size_to_free=1024 * 1024 * 1024 * 5 # 尝试释放5gb
)2. 命令行工具集成
import argparse
import sys
from rich.console import console
from rich.table import table
from rich.progress import progress
console = console()
def main():
parser = argparse.argumentparser(
description='智能临时文件清理工具',
formatter_class=argparse.rawdescriptionhelpformatter,
epilog="""
使用示例:
%(prog)s scan ~/downloads # 扫描目录
%(prog)s clean ~/downloads --dry-run # 模拟清理
%(prog)s clean ~/downloads --force # 实际清理
%(prog)s monitor # 启动监控守护进程
%(prog)s stats # 显示统计信息
"""
)
subparsers = parser.add_subparsers(dest='command', help='命令')
# scan 命令
scan_parser = subparsers.add_parser('scan', help='扫描临时文件')
scan_parser.add_argument('directory', help='要扫描的目录')
scan_parser.add_argument('--recursive', '-r', action='store_true',
help='递归扫描')
scan_parser.add_argument('--top', type=int, default=20,
help='显示最大的n个文件')
# clean 命令
clean_parser = subparsers.add_parser('clean', help='清理临时文件')
clean_parser.add_argument('directory', help='要清理的目录')
clean_parser.add_argument('--dry-run', '-d', action='store_true',
help='模拟运行,不实际删除')
clean_parser.add_argument('--force', '-f', action='store_true',
help='强制清理,无需确认')
clean_parser.add_argument('--free-size', type=int,
help='要释放的空间大小(mb)')
# monitor 命令
subparsers.add_parser('monitor', help='启动监控守护进程')
# stats 命令
stats_parser = subparsers.add_parser('stats', help='显示统计信息')
stats_parser.add_argument('--days', type=int, default=7,
help='显示最近n天的统计')
args = parser.parse_args()
if args.command == 'scan':
run_scan(args)
elif args.command == 'clean':
run_clean(args)
elif args.command == 'monitor':
run_monitor(args)
elif args.command == 'stats':
run_stats(args)
else:
parser.print_help()
def run_scan(args):
"""执行扫描命令"""
scanner = tempfilescanner()
directory = path(args.directory).expanduser()
if not directory.exists():
console.print(f"[red]目录不存在: {directory}[/red]")
return
console.print(f"[bold blue]扫描目录: {directory}[/bold blue]")
with progress() as progress:
task = progress.add_task("[cyan]扫描中...", total=none)
# 扫描文件
result = scanner.scan_directory(directory, args.recursive)
progress.update(task, completed=100)
# 显示结果表格
table = table(title="临时文件分析结果")
table.add_column("分类", style="cyan")
table.add_column("文件数", justify="right")
table.add_column("总大小", justify="right")
table.add_column("占比", justify="right")
for category, files in result.by_category.items():
category_size = sum(f['size'] for f in files)
percentage = (category_size / result.total_size * 100) if result.total_size > 0 else 0
table.add_row(
category,
str(len(files)),
scanner._format_size(category_size),
f"{percentage:.1f}%"
)
console.print(table)
# 显示最大的文件
if args.top > 0:
largest_files = scanner.find_largest_temp_files(directory, args.top)
if largest_files:
console.print(f"\n[bold yellow]最大的 {args.top} 个临时文件:[/bold yellow]")
file_table = table()
file_table.add_column("文件名", style="green")
file_table.add_column("大小", justify="right")
file_table.add_column("修改时间", justify="right")
file_table.add_column("分类", style="cyan")
for file_info in largest_files:
file_table.add_row(
path(file_info['path']).name,
scanner._format_size(file_info['size']),
file_info['modified'].strftime('%y-%m-%d %h:%m'),
file_info['category']
)
console.print(file_table)
def run_clean(args):
"""执行清理命令"""
manager = smartcleanupmanager()
directory = path(args.directory).expanduser()
if not directory.exists():
console.print(f"[red]目录不存在: {directory}[/red]")
return
# 确认(除非使用--force)
if not args.force and not args.dry_run:
console.print(f"[bold yellow]警告: 将清理目录: {directory}[/bold yellow]")
response = input("确定继续吗? (y/n): ")
if response.lower() != 'y':
console.print("[red]操作已取消[/red]")
return
console.print(f"[bold blue]开始清理: {directory}[/bold blue]")
# 执行清理
result = manager.analyze_and_clean(
directory,
dry_run=args.dry_run,
max_size_to_free=(args.free_size * 1024 * 1024) if args.free_size else 0
)
if result['dry_run']:
console.print(f"[yellow]模拟运行完成,可释放 {manager._format_size(result['total_freed'])}[/yellow]")
else:
console.print(f"[green]清理完成,已释放 {manager._format_size(result['total_freed'])}[/green]")
def run_monitor(args):
"""启动监控守护进程"""
scheduler = automatedcleanupscheduler()
console.print("[bold green]启动临时文件监控守护进程...[/bold green]")
console.print("按 ctrl+c 停止监控")
scheduler.start_monitoring()
def run_stats(args):
"""显示统计信息"""
manager = smartcleanupmanager()
if manager.history:
console.print("[bold blue]清理历史统计:[/bold blue]")
table = table()
table.add_column("时间", style="cyan")
table.add_column("目录")
table.add_column("释放空间", justify="right")
table.add_column("清理文件", justify="right")
for record in manager.history[-args.days:]:
table.add_row(
record['timestamp'][:16],
path(record['directory']).name,
manager._format_size(record['result']['total_freed']),
str(record['result']['cleaned_count'])
)
console.print(table)
# 计算总计
total_freed = sum(r['result']['total_freed'] for r in manager.history)
total_files = sum(r['result']['cleaned_count'] for r in manager.history)
console.print(f"\n[bold green]总计:[/bold green]")
console.print(f" 释放空间: {manager._format_size(total_freed)}")
console.print(f" 清理文件: {total_files}个")
else:
console.print("[yellow]暂无清理历史记录[/yellow]")
if __name__ == '__main__':
main()四、使用示例与最佳实践
1. 基本使用示例
from pathlib import path
# 创建清理管理器
manager = smartcleanupmanager()
# 扫描downloads目录
downloads = path.home() / 'downloads'
result = manager.scan_directory(downloads)
print(f"找到 {len(result.temp_files)} 个临时文件")
print(f"总大小: {manager._format_size(result.total_size)}")
2. 智能清理配置
# 自定义策略:清理超过100mb且7天未访问的文件 custom_strategy = compositestrategy() manager = smartcleanupmanager(custom_strategy)
3. 自动化监控
# 创建监控调度器 scheduler = automatedcleanupscheduler() # 添加自定义监控目录 scheduler.watch_dirs.append(path.home() / 'projects' / 'builds')
4. 安全备份与恢复
#清理前自动备份
manager.backup_dir = path.home() / '.safe_cleanup_backups'
# 查看可恢复的备份
backup_files = list(manager.backup_dir.glob('*.backup'))
for backup in backup_files[:5]:
print(f"备份: {backup.name}")5. 集成到开发工作流
# 在构建脚本中添加清理
def build_project():
# 构建前清理临时文件
cleanup_tool = smartcleanupmanager()
cleanup_tool.analyze_and_clean(
path('build'),
dry_run=false
)
# 执行构建
# ... 构建代码
# 构建后清理
cleanup_tool.analyze_and_clean(
path('dist'),
dry_run=false
)五、安全注意事项
class safecleanupvalidator:
"""安全验证器 - 防止误删重要文件"""
safe_patterns = {
'git': ['.git/', '.gitignore', '.gitmodules'],
'config': ['.env', 'config.', 'settings.', 'secret'],
'database': ['.db', '.sqlite', '.mdb'],
'project': ['package.json', 'requirements.txt', 'pom.xml']
}
def __init__(self):
self.whitelist = self._load_whitelist()
def _load_whitelist(self):
"""加载白名单"""
whitelist_file = path.home() / '.cleanup_whitelist.txt'
if whitelist_file.exists():
return set(whitelist_file.read_text().splitlines())
return set()
def is_safe_to_delete(self, filepath: path) -> bool:
"""检查文件是否可以安全删除"""
# 检查白名单
if str(filepath) in self.whitelist:
return false
# 检查安全模式
for category, patterns in self.safe_patterns.items():
for pattern in patterns:
if pattern in str(filepath):
return false
# 检查文件内容(简单启发式)
try:
if filepath.stat().st_size < 1024: # 小文件
content = filepath.read_text()[:500]
dangerous_keywords = ['password', 'secret', 'key', 'token']
if any(keyword in content.lower() for keyword in dangerous_keywords):
return false
except:
pass
return true
def add_to_whitelist(self, filepath: path):
"""添加文件到白名单"""
self.whitelist.add(str(filepath))
self._save_whitelist()
def _save_whitelist(self):
"""保存白名单"""
whitelist_file = path.home() / '.cleanup_whitelist.txt'
whitelist_file.write_text('\n'.join(sorted(self.whitelist)))这个完整的临时文件管理工具提供了:
- 智能识别 - 准确识别临时文件
- 安全清理 - 多重验证防止误删
- 自动化监控 - 实时监控和定时清理
- 可视化报告 - 清晰的统计信息
- 备份恢复 - 安全网机制
以上就是python实现自动化清理临时文件的全攻略的详细内容,更多关于python清理文件的资料请关注代码网其它相关文章!
发表评论