整个代码顺一下逻辑,并修正错误 #!/usr/bin/env python3
import subprocess
import os
import time
import traceback
import gzip
import shutil
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
import sys
import platform
class ChangeHandler(PatternMatchingEventHandler):
# 只监控.tex文件的变化
patterns = ["*.tex"]
last_processed_file = None # 用于防止重复处理
def process(self, event):
"""处理文件变化事件"""
if event.is_directory:
return
modified_file = event.src_path
# 简单防抖,避免在短时间内重复编译
if self.last_processed_file == modified_file and (time.time() - os.path.getmtime(modified_file)) < 1:
return
self.last_processed_file = modified_file
try:
latex_project_filename = 'latex_project.config'
compiled_path_relative_to_project_path = "compiled"
# 查找项目目录
modified_dir = os.path.dirname(modified_file)
project_directory = self.get_project_directory(modified_dir, latex_project_filename)
if project_directory is None:
print('Python script: 未找到项目文件,跳过编译。')
return
# 获取源文件
src_files = self.get_source_files(project_directory, latex_project_filename)
if not src_files:
print('Python script: 未找到源文件,请检查 latex_project.config。')
return
for src_file in src_files:
print(f'\n--- Python script: 开始处理文件 \'{os.path.basename(src_file)}\' ---')
src_file_relative = os.path.relpath(src_file, project_directory)
filename = os.path.splitext(os.path.basename(src_file))[0]
compiled_dir = os.path.join(project_directory, compiled_path_relative_to_project_path)
# 创建编译目录
os.makedirs(compiled_dir, exist_ok=True)
# 构建latexmk命令
latexmk_cmd = [
"latexmk",
"-pdf",
"-recorder-",
f"-outdir={compiled_path_relative_to_project_path}",
f"-aux-directory={compiled_path_relative_to_project_path}",
"-pdflatex=pdflatex -synctex=1 -interaction=nonstopmode -file-line-error",
src_file_relative
]
# 执行命令并捕获输出
print(f"执行命令: {' '.join(latexmk_cmd)}")
try:
# 执行latexmk命令
result = subprocess.run(
latexmk_cmd,
cwd=project_directory,
capture_output=True,
text=True,
encoding='utf-8'
)
# 检查是否有错误
if result.returncode != 0:
print(f"编译出现错误,返回代码: {result.returncode}")
else:
print("编译成功")
# 从输出目录查找并解析日志文件
find_and_parse_log(compiled_dir, filename)
except FileNotFoundError:
print("错误: 找不到 latexmk 命令。请确保它已安装并配置在系统 PATH 中。")
except Exception as e:
print(f"命令执行异常: {str(e)}")
traceback.print_exc()
# 修正synctex路径
self.fix_synctex(project_directory, compiled_path_relative_to_project_path, filename)
print('--- Python script: latexmk处理完成 ---')
except Exception:
traceback.print_exc()
def find_and_parse_log(compiled_dir, filename):
"""在编译输出目录中查找日志文件并解析"""
log_file_path = os.path.join(compiled_dir, f"{filename}.log")
# 兼容文件名不完全匹配的情况,找到最新的.log文件
if not os.path.exists(log_file_path):
print(f"未找到 {filename}.log,正在搜索编译目录中的其他日志文件...")
log_files = [os.path.join(compiled_dir, f) for f in os.listdir(compiled_dir) if f.endswith('.log')]
if not log_files:
print("在编译目录中未找到任何日志文件")
return
# 选择最新的日志文件
log_files.sort(key=os.path.getmtime, reverse=True)
log_file_path = log_files[0]
print(f"使用最新的日志文件: {log_file_path}")
# 解析找到的日志文件
parse_log_with_texlogfilter(log_file_path)
def parse_log_with_texlogfilter(log_file_path):
try:
# 执行texlogfilter命令,默认显示错误和警告
result = subprocess.run(['texlogfilter', log_file_path],
capture_output=True, text=True, encoding='utf-8')
if result.returncode == 0:
# 解析成功,输出结果
filtered_log = result.stdout
# 将结果保存到文件
output_file = log_file_path.rsplit('.', 1)[0] + '_filtered.txt'
with open(output_file, 'w', encoding='utf-8') as f:
f.write(filtered_log)
print(f"已过滤的日志文件保存至: {output_file}")
else:
print(f"命令执行失败,返回代码: {result.returncode}")
print(f"错误信息: {result.stderr}")
except FileNotFoundError:
print("未找到texlogfilter命令。请确保TeX Live已安装并添加到系统PATH。")
def print_raw_errors(log_file_path, include_warnings=True):
"""打印原始错误信息,可选择包含警告"""
print("\n===== 原始日志信息 =====")
try:
# 使用更健壮的编码方式
with open(log_file_path, 'r', encoding='utf-8', errors='ignore') as f:
content = f.read()
error_markers = ['error', '!', 'undefined', 'missing']
if include_warnings:
error_markers.append('warning')
found_errors = False
lines = content.split('\n')
for i, line in enumerate(lines):
if any(marker in line.lower() for marker in error_markers):
start = max(0, i-2)
end = min(len(lines), i+3)
for j in range(start, end):
print(f"第{j+1}行: {lines[j]}")
found_errors = True
print("---")
if not found_errors:
print("未发现明显的错误或警告")
except Exception as e:
print(f"读取日志文件失败: {str(e)}")
def on_modified(self, event):
self.process(event)
def on_created(self, event):
self.process(event)
def on_moved(self, event):
self.process(event)
def get_project_directory(self, startDir, projectFilename):
"""查找包含项目文件的目录"""
current_dir = os.path.abspath(startDir)
while True:
if os.path.isfile(os.path.join(current_dir, projectFilename)):
return current_dir
parent_dir = os.path.dirname(current_dir)
if parent_dir == current_dir:
break
current_dir = parent_dir
return None
def get_source_files(self, projectDirectory, projectFilename):
"""从项目文件获取源文件列表"""
files_to_compile = []
project_file_path = os.path.join(projectDirectory, projectFilename)
if not os.path.exists(project_file_path):
print(f"项目配置文件不存在: {project_file_path}")
return []
with open(project_file_path, 'r', encoding='utf-8') as f:
for line in f:
line = line.strip()
if line and not line.startswith('#'):
files_to_compile.append(line)
src_files = []
for f in files_to_compile:
full_path = os.path.join(projectDirectory, f)
if os.path.isfile(full_path):
src_files.append(full_path)
else:
print(f'Python script: 无效文件 ({full_path})')
return src_files
def fix_synctex(self, project_directory, compiled_path_relative_to_project_path, filename):
"""修正synctex文件中的路径,兼容gz和普通文件"""
synctex_base_path = os.path.join(project_directory, compiled_path_relative_to_project_path, filename)
synctex_path = f"{synctex_base_path}.synctex"
synctex_gz_path = f"{synctex_base_path}.synctex.gz"
if os.path.isfile(synctex_path):
input_file = synctex_path
is_compressed = False
elif os.path.isfile(synctex_gz_path):
input_file = synctex_gz_path
is_compressed = True
else:
return # 没有synctex文件,跳过
try:
temp_file = f"{synctex_base_path}.synctex.tmp"
# 读取文件内容
if is_compressed:
with gzip.open(input_file, 'rt', encoding='utf-8') as f:
content = f.read()
else:
with open(input_file, 'r', encoding='utf-8') as f:
content = f.read()
# 修正路径
project_abs = os.path.abspath(project_directory)
compiled_abs = os.path.abspath(os.path.join(project_directory, compiled_path_relative_to_project_path))
project_rel = os.path.relpath(project_abs, compiled_abs)
modified_content = content.replace(project_abs, project_rel)
# 将修正后的内容写入新文件
if is_compressed:
with gzip.open(temp_file, 'wt', encoding='utf-8') as f:
f.write(modified_content)
os.replace(temp_file, synctex_gz_path)
else:
with open(temp_file, 'w', encoding='utf-8') as f:
f.write(modified_content)
os.replace(temp_file, synctex_path)
print(f"修正 synctex 文件成功: {os.path.basename(input_file)}")
except Exception as e:
print(f"修正synctex时出错: {e}")
traceback.print_exc()
if __name__ == '__main__':
# 检查依赖
try:
import watchdog
except ImportError:
print("请安装必要的依赖:")
print("pip install watchdog")
sys.exit(1)
handler = ChangeHandler()
directory = './'
if len(sys.argv) > 1:
directory = sys.argv[1]
if not os.path.exists(directory):
os.makedirs(directory)
print(f"创建监控目录: {os.path.abspath(directory)}")
observer = Observer()
observer.schedule(handler, directory, recursive=True)
observer.start()
print(f"开始监控目录: {os.path.abspath(directory)}")
print("按Ctrl+C停止监控")
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
print("\n正在停止监控...")
observer.stop()
observer.join()
print('监控已停止')
最新发布