Neat little script that implements find in pure python, this can be passed different patterns and directories. The script will walk the directories and match the patterns, it will then generate a list of files and get the ctime of each. Some comparison is done against a date you set and removes them. This is great for cleaning up application logs that clog up the filesystem.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 |
#!/usr/bin/python3.5 import fnmatch import os from datetime import datetime, timedelta from pathlib import Path # set variables for dirs to clean. log_path = os.environ["LOG_PATH"] user_prod_home = str(Path.home()) # set lists of dirs and patterns to clean dirs_to_clean = [log_path, user_prod_home] patterns = ['*.log', 'app_*'] # function to loop and search patterns and rm files. def find_files(dir_to_clean): file_list = [] days_ago = datetime.now() - timedelta(days=60) for root, dirs, files in os.walk(dir_to_clean): for pattern in patterns: for filename in fnmatch.filter(files, pattern): file_list.append(os.path.join(root, filename)) file_list.sort() for file in file_list: file_ctime = datetime.fromtimestamp(os.path.getctime(file)) if file_ctime < days_ago: if os.path.isfile(file): try: print("Removing file :[{0}]".format(file)) os.remove(file) except OSError as e: print('File Clean Up Failed: [{0}]'.format(e)) # main function def main(): for dirs in dirs_to_clean: find_files(dirs) if __name__ == "__main__": main() |