老王在一家小公司管服务器。每天最烦的事就是开发同事丢来一堆日志文件文件名乱七八糟——有的叫log1.txt有的叫1212.log还有的直接叫新建文本文档(1).log。更糟的是每周五要手动备份一遍配置文件还得确保备份目录和源目录同步。老王决定写几个Python脚本把这些破事自动化。场景一批量重命名——让文件名规规矩矩先拿日志文件开刀。老王想把所有.log文件改成2024-12-01_001.log这种格式按修改时间排序。import os from datetime import datetime def rename_logs(folder_path): log_files [f for f in os.listdir(folder_path) if f.endswith(.log)] for index, filename in enumerate(log_files, 1): old_path os.path.join(folder_path, filename) # 获取修改时间 mtime os.path.getmtime(old_path) date_str datetime.fromtimestamp(mtime).strftime(%Y-%m-%d) new_name f{date_str}_{index:03d}.log new_path os.path.join(folder_path, new_name) os.rename(old_path, new_path) print(f{filename} - {new_name}) rename_logs(/var/logs)运行一次新建文本文档(1).log变成了2024-12-15_005.log一目了然。但os.rename有个坑目标文件已存在时会报错。加上覆盖检查更稳妥def safe_rename(old_path, new_path): if os.path.exists(new_path): base, ext os.path.splitext(new_path) counter 1 while os.path.exists(f{base}_{counter}{ext}): counter 1 new_path f{base}_{counter}{ext} os.rename(old_path, new_path) return new_path更灵活的模式匹配用glob模块按规则筛选文件比如只改report_*.xlsximport glob for filepath in glob.glob(/data/report_*.xlsx): dirname, filename os.path.split(filepath) # 提取月份比如 report_202412.xlsx - 202412 month filename.split(_)[1].split(.)[0] new_name f财务报告_{month}.xlsx os.rename(filepath, os.path.join(dirname, new_name))正则表达式批量替换某些文件命名有规律但混乱比如IMG_001 (1).jpg、IMG_001 (2).jpg这种括号空格让人头疼。import re def clean_filenames(folder, pattern, replacement): for filename in os.listdir(folder): new_name re.sub(pattern, replacement, filename) if new_name ! filename: old_path os.path.join(folder, filename) new_path os.path.join(folder, new_name) os.rename(old_path, new_path) print(f修正: {filename} - {new_name}) # 去掉文件名中的空格和括号 clean_filenames(./photos, r[\(\s\)], ) # IMG_001 (1).jpg - IMG_0011.jpg 虽然不太完美但至少没空格了场景二智能备份——别每次都全量复制老王之前用cp -r全量备份几十G的配置文件越来越慢。他需要增量备份——只复制改动过的文件。import shutil import hashlib def md5_file(filepath): 计算文件MD5用于判断内容是否变化 hash_md5 hashlib.md5() with open(filepath, rb) as f: for chunk in iter(lambda: f.read(4096), b): hash_md5.update(chunk) return hash_md5.hexdigest() def incremental_backup(src_dir, dst_dir, state_filebackup_state.json): import json # 加载上次备份的状态 if os.path.exists(state_file): with open(state_file, r) as f: last_state json.load(f) else: last_state {} new_state {} copied_count 0 for root, dirs, files in os.walk(src_dir): rel_path os.path.relpath(root, src_dir) dst_root os.path.join(dst_dir, rel_path) os.makedirs(dst_root, exist_okTrue) for file in files: src_file os.path.join(root, file) dst_file os.path.join(dst_root, file) file_md5 md5_file(src_file) file_key os.path.relpath(src_file, src_dir) new_state[file_key] file_md5 # 检查是否需要备份 if file_key not in last_state or last_state[file_key] ! file_md5: shutil.copy2(src_file, dst_file) # copy2保留元数据 copied_count 1 print(f备份: {file_key}) # 保存新状态 with open(state_file, w) as f: json.dump(new_state, f, indent2) print(f完成共备份 {copied_count} 个文件) incremental_backup(/etc/nginx, /backup/nginx)第一次运行会全量备份第二次只复制改动过的文件。状态文件backup_state.json记录了每个文件的MD5拿掉它就能强制全量。定时自动备份结合schedule库设置每天凌晨2点执行。import schedule import time def auto_backup_job(): print(f{datetime.now()} 开始自动备份) incremental_backup(/data/mysql, /backup/mysql) print(备份完成) schedule.every().day.at(02:00).do(auto_backup_job) while True: schedule.run_pending() time.sleep(60)场景三目录同步——像rsync一样工作老王还负责维护两台服务器之间的文件同步。rsync很好用但某些环境不允许装额外软件自己写一个轻量版。import filecmp import shutil def sync_dirs(src, dst, delete_extraFalse): 同步目录dst会变成和src一模一样 delete_extraTrue时删除dst中多余的文件 if not os.path.exists(dst): os.makedirs(dst) # 比较两个目录 comparison filecmp.dircmp(src, dst) # 复制src有但dst没有的 for file in comparison.left_only: src_path os.path.join(src, file) dst_path os.path.join(dst, file) if os.path.isdir(src_path): shutil.copytree(src_path, dst_path) else: shutil.copy2(src_path, dst_path) print(f新增: {file}) # 复制有更新的 for file in comparison.diff_files: src_path os.path.join(src, file) dst_path os.path.join(dst, file) shutil.copy2(src_path, dst_path) print(f更新: {file}) # 删除dst多余的 if delete_extra: for file in comparison.right_only: dst_path os.path.join(dst, file) if os.path.isdir(dst_path): shutil.rmtree(dst_path) else: os.remove(dst_path) print(f删除: {file}) # 递归处理子目录 for common_dir in comparison.common_dirs: sync_dirs( os.path.join(src, common_dir), os.path.join(dst, common_dir), delete_extra ) sync_dirs(/home/user/project, /backup/project, delete_extraTrue)filecmp.dircmp一次性告诉你三类差异left_only源独有、right_only目标独有、diff_files内容不同。递归调用就能同步整个目录树。实时同步监听文件系统事件文件一改动立刻同步。from watchdog.observers import Observer from watchdog.events import FileSystemEventHandler class SyncHandler(FileSystemEventHandler): def __init__(self, src, dst): self.src src self.dst dst def on_modified(self, event): if not event.is_directory: rel_path os.path.relpath(event.src_path, self.src) dst_path os.path.join(self.dst, rel_path) os.makedirs(os.path.dirname(dst_path), exist_okTrue) shutil.copy2(event.src_path, dst_path) print(f实时同步: {rel_path}) observer Observer() handler SyncHandler(/watch/src, /watch/dst) observer.schedule(handler, /watch/src, recursiveTrue) observer.start()运行后在/watch/src里新建或修改任何文件都会瞬间复制到/watch/dst。适合配置文件热同步场景。场景四批量清理旧文件日志文件占满磁盘是家常便饭。写个脚本自动删除30天前的文件。import time def clean_old_files(folder, days30, pattern*): now time.time() cutoff now - (days * 86400) for root, dirs, files in os.walk(folder): for file in files: if not glob.fnmatch.fnmatch(file, pattern): continue filepath os.path.join(root, file) mtime os.path.getmtime(filepath) if mtime cutoff: os.remove(filepath) print(f删除旧文件: {filepath} ({datetime.fromtimestamp(mtime).date()})) # 删除空目录 for dir in dirs: dirpath os.path.join(root, dir) try: os.rmdir(dirpath) print(f删除空目录: {dirpath}) except OSError: pass clean_old_files(/var/log/nginx, days7, pattern*.log*)更安全的干跑模式先预览不执行确认无误再真正删除。def dry_run_clean(folder, days30): now time.time() cutoff now - (days * 86400) to_delete [] for root, dirs, files in os.walk(folder): for file in files: filepath os.path.join(root, file) if os.path.getmtime(filepath) cutoff: to_delete.append(filepath) print(f将删除 {len(to_delete)} 个文件:) for path in to_delete[:10]: # 只显示前10个 print(f {path}) if input(确认删除(y/n): ).lower() y: for path in to_delete: os.remove(path) print(删除完成)踩坑与进阶技巧文件名编码问题Windows和Linux的文件名编码不同。跨平台时用os.fsencode和os.fsdecode处理。def safe_listdir(path): try: return os.listdir(path) except UnicodeEncodeError: return [os.fsdecode(f) for f in os.listdir(os.fsencode(path))]大量文件性能优化os.listdir在几十万文件的目录里很慢用scandir代替。with os.scandir(/big_folder) as entries: for entry in entries: if entry.is_file() and entry.name.endswith(.log): print(entry.name, entry.stat().st_size)scandir一次性返回文件属性避免额外调用stat速度提升2-5倍。移动而不是复制备份到同一磁盘时移动文件比复制快得多。def move_to_archive(src, dst): os.makedirs(dst, exist_okTrue) for filename in os.listdir(src): shutil.move(os.path.join(src, filename), os.path.join(dst, filename))日志记录所有操作记下来出问题能追溯。import logging logging.basicConfig( filenamefile_ops.log, levellogging.INFO, format%(asctime)s - %(message)s ) def log_op(op, src, dstNone): msg f{op}: {src} if dst: msg f - {dst} logging.info(msg) print(msg)一键搞定综合运维脚本把常用功能打包成命令行工具import argparse def main(): parser argparse.ArgumentParser(description文件批量处理工具) subparsers parser.add_subparsers(destcommand) rename_parser subparsers.add_parser(rename) rename_parser.add_argument(folder) rename_parser.add_argument(--pattern, default*) rename_parser.add_argument(--template, default{date}_{index}) backup_parser subparsers.add_parser(backup) backup_parser.add_argument(src) backup_parser.add_argument(dst) backup_parser.add_argument(--incremental, actionstore_true) sync_parser subparsers.add_parser(sync) sync_parser.add_argument(src) sync_parser.add_argument(dst) sync_parser.add_argument(--delete, actionstore_true) clean_parser subparsers.add_parser(clean) clean_parser.add_argument(folder) clean_parser.add_argument(--days, typeint, default30) args parser.parse_args() if args.command rename: rename_files(args.folder, args.pattern, args.template) elif args.command backup: if args.incremental: incremental_backup(args.src, args.dst) else: shutil.copytree(args.src, args.dst) elif args.command sync: sync_dirs(args.src, args.dst, args.delete) elif args.command clean: clean_old_files(args.folder, args.days) # 使用示例 # python file_tool.py rename ./logs --pattern *.log --template {date}_{index} # python file_tool.py backup /data /backup --incremental # python file_tool.py sync /project /backup/project --delete老王把这些脚本整合起来设置好定时任务。现在每天早上一到公司检查一下日志文件有没有按规范命名备份有没有成功同步。之前每周五加班备份的日子一去不返他泡杯茶就能开始一天的工作。