dev #2

Merged
nyaasuki merged 4 commits from dev into main 2024-06-27 01:27:45 +08:00
Showing only changes of commit 112ab8e56e - Show all commits

View File

@ -5,12 +5,12 @@ import re
import time
def natural_sort_key(s):
"""为了自然排序的辅助函数,将字符串中的数字部分转换成整数"""
"""为了自然排序的辅助函数将字符串中的数字部分转换成整数"""
return [int(text) if text.isdigit() else text.lower() for text in re.split('([0-9]+)', s)]
def generate_urls(file_paths, base_url, sub_directory, min_size):
"""根据文件路径、基础URL、子目录和最小文件大小生成URL链接"""
urls = []
urls = {}
if not base_url.endswith('/'):
base_url += '/'
if sub_directory and not sub_directory.endswith('/'):
@ -23,14 +23,23 @@ def generate_urls(file_paths, base_url, sub_directory, min_size):
relative_path = os.path.relpath(path, start='.')
encoded_path = urllib.parse.quote(relative_path)
url = f"{base_url}{sub_directory}{encoded_path}"
urls.append(f"{file_size_bytes}:{current_timestamp}:{url}")
dir_name = os.path.dirname(relative_path)
if dir_name not in urls:
urls[dir_name] = []
urls[dir_name].append(f"{file_size_bytes}:{current_timestamp}:{url}")
return urls
def save_urls(urls, output_file):
"""将URL链接保存到文本文件中"""
with open(output_file, 'w', encoding='utf-8') as f:
for url in urls:
f.write(url + '\n')
for dir_name, files in urls.items():
if dir_name == '.':
for file in files:
f.write(f"{file}\n")
else:
f.write(f"{dir_name}:\n")
for file in files:
f.write(f" {file}\n")
def parse_arguments():
"""解析命令行参数"""
@ -42,7 +51,7 @@ def parse_arguments():
return parser.parse_args()
def list_files_recursive(start_path='.', exclude_files=None):
"""递归列出目录及其子目录中的所有文件,排除指定的文件"""
"""递归列出目录及其子目录中的所有文件排除指定的文件"""
if exclude_files is None:
exclude_files = set()
file_paths = []