feat(init): 添加初始化项目时的进度条显示

This commit is contained in:
songsenand 2026-03-18 06:57:14 +08:00
parent 2ffd124a28
commit d69c6ed171
5 changed files with 264 additions and 119 deletions

View File

@ -0,0 +1,41 @@
# 需求工单:增强交互性 - 添加进度条显示
name: 增强交互性:添加进度条显示
description: |
当前工具在执行耗时操作(如初始化项目时生成多个文件、运行并行检查、自动修复循环)时,终端上仅打印日志信息,用户无法直观了解当前进度和剩余时间,导致等待体验不佳。
希望利用 `rich` 库的进度条功能,在以下关键步骤中显示实时进度,提升用户体验:
1. **初始化项目init**
- 在解析 `README.md` 生成 `design.json` 后,开始生成文件时,显示文件生成进度条。
- 进度条显示已生成文件数 / 总文件数,每个文件生成时显示当前文件名。
- 若实现并发生成,进度条应动态更新已完成任务数,并可能显示每个文件的生成状态(如排队中、生成中、完成、失败)。
2. **增强/修复模式enhance/fix**
- 在分析受影响文件、生成代码变更时,显示处理进度(例如分析中的文件数、生成补丁的进度)。
- 在运行检查工具时(`run_parallel_checks`),显示检查工具运行的进度条(已完成检查的文件数 / 总文件数)。
- 在自动修复循环中,显示每次修复尝试的进度(如第几次重试、剩余错误数)。
3. **通用**
- 所有进度条应使用 `rich.progress` 实现,支持彩色输出,并能动态更新。
- 进度条应显示预计剩余时间(可选)。
- 错误信息应在进度条区域之外打印,避免干扰进度条显示(使用 `rich.console` 的 `print` 或日志集成)。
- 当操作完成时,进度条应自动消失或转为完成状态,并显示成功/失败统计。
需要修改的代码包括:
- `cli.py`:主命令入口,控制整体流程,应在此处创建进度条上下文。
- `core.py``CodeGenerator.generate_files` 方法(或类似方法)中,文件生成循环应集成进度条更新。
- `checker.py``run_parallel_checks`、`auto_fix` 和 `run_full_check_and_fix` 中,添加进度条。
- `utils.py`:可能添加辅助函数来统一创建进度条。
acceptance_criteria:
- 执行 `llm-codegen init` 时,终端显示一个清晰的文件生成进度条,实时更新已完成文件数。
- 执行 `llm-codegen enhance` 或 `fix` 时,在分析、生成、检查、修复各阶段均有对应的进度条提示。
- 所有进度条样式美观,不会与日志输出混乱。
- 并发生成时,进度条能够准确反映整体进度,并能区分不同文件的状态(可选)。
- 当某个文件生成失败时,进度条仍能继续更新,最终汇总显示成功/失败数量。
- 进度条显示不会显著影响性能(更新频率合理)。
affected_files:
- src/llm_codegen/cli.py
- src/llm_codegen/core.py
- src/llm_codegen/checker.py
- src/llm_codegen/utils.py

View File

@ -8,6 +8,7 @@ import os
import warnings import warnings
from loguru import logger from loguru import logger
from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn, TimeElapsedColumn, TimeRemainingColumn
from .core import CodeGenerator from .core import CodeGenerator
# 尝试导入 pathspec用于精确解析 .gitignore # 尝试导入 pathspec用于精确解析 .gitignore
@ -267,15 +268,26 @@ class Checker:
logger.info(f"开始并行检查,文件数: {len(files)},工具: {tool}") logger.info(f"开始并行检查,文件数: {len(files)},工具: {tool}")
all_results = [] all_results = []
with ThreadPoolExecutor(max_workers=min(4, len(files))) as executor: with Progress(
futures = [executor.submit(self.run_check, tool, file_path) for file_path in files] SpinnerColumn(),
TextColumn("[progress.description]{task.description}"),
BarColumn(),
TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
TimeElapsedColumn(),
TimeRemainingColumn(),
) as progress:
task = progress.add_task("[cyan]Running parallel checks...", total=len(files))
with ThreadPoolExecutor(max_workers=min(4, len(files))) as executor:
futures = [executor.submit(self.run_check, tool, file_path) for file_path in files]
for future in as_completed(futures): for future in as_completed(futures):
try: try:
result = future.result() result = future.result()
all_results.append(result) all_results.append(result)
except Exception as e: except Exception as e:
logger.error(f"并行检查任务失败: {e}") logger.error(f"并行检查任务失败: {e}")
finally:
progress.update(task, advance=1)
# 保存结果到文件 # 保存结果到文件
self.save_results(all_results) self.save_results(all_results)
@ -391,24 +403,35 @@ class Checker:
description = result.get("description", "无描述") description = result.get("description", "无描述")
logger.info(f"LLM 生成修复补丁: {description}, 补丁数: {len(patches)}") logger.info(f"LLM 生成修复补丁: {description}, 补丁数: {len(patches)}")
# 应用补丁 # 应用补丁,使用进度条
success_count = 0 with Progress(
for patch in patches: SpinnerColumn(),
file_path = patch.get("file") TextColumn("[progress.description]{task.description}"),
code = patch.get("code") BarColumn(),
if not file_path or not code: TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
logger.warning(f"无效补丁: {patch}") TimeElapsedColumn(),
continue TimeRemainingColumn(),
) as progress:
task = progress.add_task("[cyan]Applying fixes...", total=len(patches))
success_count = 0
for patch in patches:
file_path = patch.get("file")
code = patch.get("code")
if not file_path or not code:
logger.warning(f"无效补丁: {patch}")
progress.update(task, advance=1)
continue
full_path = self.output_dir / file_path full_path = self.output_dir / file_path
try: try:
# 如果是完整代码,直接覆盖;如果是差异,这里简化处理为覆盖 with open(full_path, "w", encoding="utf-8") as f:
with open(full_path, "w", encoding="utf-8") as f: f.write(code)
f.write(code) logger.info(f"已应用修复到文件: {file_path}")
logger.info(f"已应用修复到文件: {file_path}") success_count += 1
success_count += 1 except Exception as e:
except Exception as e: logger.error(f"应用修复失败到文件 {file_path}: {e}")
logger.error(f"应用修复失败到文件 {file_path}: {e}") finally:
progress.update(task, advance=1)
logger.info(f"自动修复完成,成功修复 {success_count}/{len(patches)} 个补丁") logger.info(f"自动修复完成,成功修复 {success_count}/{len(patches)} 个补丁")
return success_count > 0 return success_count > 0
@ -426,32 +449,46 @@ class Checker:
Returns: Returns:
bool: 是否成功无错误或修复后无错误 bool: 是否成功无错误或修复后无错误
""" """
for attempt in range(max_retries): with Progress(
logger.info(f"检查与修复循环,尝试 {attempt + 1}/{max_retries}") SpinnerColumn(),
TextColumn("[progress.description]{task.description}"),
BarColumn(),
TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
TimeElapsedColumn(),
TimeRemainingColumn(),
) as progress:
task = progress.add_task("[cyan]Full check and fix cycle", total=max_retries)
for attempt in range(max_retries):
progress.update(task, description=f"[cyan]Attempt {attempt + 1}/{max_retries}")
logger.info(f"检查与修复循环,尝试 {attempt + 1}/{max_retries}")
# 运行并行检查 # 运行并行检查
results = self.run_parallel_checks()
errors = self.collect_errors(results)
if not errors:
progress.update(task, completed=max_retries)
logger.success("所有检查通过,无错误")
return True
logger.warning(f"发现 {len(errors)} 个错误,尝试自动修复")
success = self.auto_fix(errors)
if not success:
logger.error(f"{attempt + 1} 次修复失败")
progress.update(task, advance=1)
if attempt == max_retries - 1:
return False
else:
logger.info(f"{attempt + 1} 次修复成功,重新检查")
progress.update(task, advance=1)
# 最后一次检查
progress.update(task, description="[cyan]Final check...")
results = self.run_parallel_checks() results = self.run_parallel_checks()
errors = self.collect_errors(results) errors = self.collect_errors(results)
if errors:
if not errors: logger.error(f"修复后仍有 {len(errors)} 个错误")
logger.success("所有检查通过,无错误") return False
return True
logger.warning(f"发现 {len(errors)} 个错误,尝试自动修复")
success = self.auto_fix(errors)
if not success:
logger.error(f"{attempt + 1} 次修复失败")
if attempt == max_retries - 1:
return False
else: else:
logger.info(f"{attempt + 1} 次修复成功,重新检查") logger.success("修复后所有检查通过")
return True
# 最后一次检查
results = self.run_parallel_checks()
errors = self.collect_errors(results)
if errors:
logger.error(f"修复后仍有 {len(errors)} 个错误")
return False
else:
logger.success("修复后所有检查通过")
return True

View File

@ -10,6 +10,7 @@ import sys
import typer import typer
from rich.console import Console from rich.console import Console
from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn
from loguru import logger from loguru import logger
from .core import CodeGenerator from .core import CodeGenerator
@ -51,15 +52,23 @@ def init(
# 处理致命错误检查README文件存在性已由typer处理其他错误在try块中捕获 # 处理致命错误检查README文件存在性已由typer处理其他错误在try块中捕获
try: try:
generator = CodeGenerator( with Progress(
api_key=api_key, SpinnerColumn(),
base_url=base_url, TextColumn("[progress.description]{task.description}"),
model=model, BarColumn(),
output_dir=str(output_dir), console=console
log_file=log_file_path, ) as progress:
max_concurrency=max_concurrency, task_id = progress.add_task("正在初始化项目...", total=None)
) generator = CodeGenerator(
generator.run(readme) api_key=api_key,
base_url=base_url,
model=model,
output_dir=str(output_dir),
log_file=log_file_path,
max_concurrency=max_concurrency,
)
generator.run(readme)
progress.update(task_id, description="初始化完成")
# 调用core.CodeGenerator.run并显示最终统计信息假设从日志或生成器状态获取 # 调用core.CodeGenerator.run并显示最终统计信息假设从日志或生成器状态获取
console.print("[green]生成完成。成功处理文件,详情请查看日志。[/green]") console.print("[green]生成完成。成功处理文件,详情请查看日志。[/green]")
except Exception as e: except Exception as e:
@ -99,15 +108,23 @@ def enhance(
raise typer.Exit(code=1) raise typer.Exit(code=1)
try: try:
generator = CodeGenerator( with Progress(
api_key=api_key, SpinnerColumn(),
base_url=base_url, TextColumn("[progress.description]{task.description}"),
model=model, BarColumn(),
output_dir=str(output_dir), console=console
log_file=log_file_path, ) as progress:
max_concurrency=max_concurrency, task_id = progress.add_task("正在增强项目...", total=None)
) generator = CodeGenerator(
success = generator.process_issue(issue_content, issue_type="enhance") api_key=api_key,
base_url=base_url,
model=model,
output_dir=str(output_dir),
log_file=log_file_path,
max_concurrency=max_concurrency,
)
success = generator.process_issue(issue_content, issue_type="enhance")
progress.update(task_id, description="增强处理完成")
if not success: if not success:
logger.error("增强处理失败") logger.error("增强处理失败")
raise typer.Exit(code=1) raise typer.Exit(code=1)
@ -149,15 +166,23 @@ def fix(
raise typer.Exit(code=1) raise typer.Exit(code=1)
try: try:
generator = CodeGenerator( with Progress(
api_key=api_key, SpinnerColumn(),
base_url=base_url, TextColumn("[progress.description]{task.description}"),
model=model, BarColumn(),
output_dir=str(output_dir), console=console
log_file=log_file_path, ) as progress:
max_concurrency=max_concurrency, task_id = progress.add_task("正在修复项目...", total=None)
) generator = CodeGenerator(
success = generator.process_issue(issue_content, issue_type="fix") api_key=api_key,
base_url=base_url,
model=model,
output_dir=str(output_dir),
log_file=log_file_path,
max_concurrency=max_concurrency,
)
success = generator.process_issue(issue_content, issue_type="fix")
progress.update(task_id, description="修复处理完成")
if not success: if not success:
logger.error("修复处理失败") logger.error("修复处理失败")
raise typer.Exit(code=1) raise typer.Exit(code=1)
@ -205,4 +230,4 @@ def check(
if __name__ == "__main__": if __name__ == "__main__":
app() app()

View File

@ -7,13 +7,12 @@ from typing import List, Dict, Optional, Any, Tuple
from pathlib import Path from pathlib import Path
from collections import deque from collections import deque
import typer
from rich.console import Console from rich.console import Console
from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn, TaskID from rich.progress import Progress, SpinnerColumn, TextColumn, BarColumn, TaskID
from loguru import logger from loguru import logger
from openai import OpenAI from openai import OpenAI
from .utils import is_dangerous_command, read_file, write_file, ensure_dir, safe_join from .utils import is_dangerous_command
from .models import DesignModel, StateModel, LLMResponse, FileModel from .models import DesignModel, StateModel, LLMResponse, FileModel
@ -27,6 +26,7 @@ class CodeGenerator:
model: str = "deepseek-reasoner", model: str = "deepseek-reasoner",
output_dir: str = "./generated", output_dir: str = "./generated",
log_file: Optional[str] = None, log_file: Optional[str] = None,
max_concurrency: int = 4
): ):
""" """
初始化生成器 初始化生成器
@ -49,6 +49,8 @@ class CodeGenerator:
self.state_file = self.output_dir / ".llm_generator_state.json" self.state_file = self.output_dir / ".llm_generator_state.json"
self.console = Console() # 添加console实例用于rich打印 self.console = Console() # 添加console实例用于rich打印
self.max_concurrency = max_concurrency
# 配置日志 # 配置日志
if log_file is None: if log_file is None:
log_file = self.output_dir / "generator.log" log_file = self.output_dir / "generator.log"
@ -135,7 +137,7 @@ class CodeGenerator:
system_prompt = ( system_prompt = (
"你是一个软件架构师。请根据README描述生成项目的中间设计文件design.json。" "你是一个软件架构师。请根据README描述生成项目的中间设计文件design.json。"
"design.json应包含项目名称、版本、描述、文件列表含路径、摘要、依赖、函数和类、建议命令和检查工具。" "design.json应包含项目名称、版本、描述、文件列表含路径、摘要、依赖、函数和类、建议命令和检查工具。"
"返回严格的JSON对象符合DesignModel结构。" "返回严格的 JSON 对象符合DesignModel结构。"
) )
user_prompt = f"README内容如下\n\n{self.readme_content}" user_prompt = f"README内容如下\n\n{self.readme_content}"
@ -492,8 +494,11 @@ class CodeGenerator:
total_task = progress.add_task("[cyan]整体进度...", total=len(remaining_files)) total_task = progress.add_task("[cyan]整体进度...", total=len(remaining_files))
progress.update(total_task, completed=len(processed_files) - len(generated_files_set)) progress.update(total_task, completed=len(processed_files) - len(generated_files_set))
# 初始化文件任务映射
file_tasks = {} # 局部字典映射文件到任务ID
# 并发任务调度 # 并发任务调度
with concurrent.futures.ThreadPoolExecutor(max_workers=4) as executor: with concurrent.futures.ThreadPoolExecutor(max_workers=self.max_concurrency) as executor:
futures = {} futures = {}
while queue or futures: while queue or futures:
# 提交队列中的任务 # 提交队列中的任务
@ -501,13 +506,25 @@ class CodeGenerator:
file = queue.popleft() file = queue.popleft()
future = executor.submit(self._generate_file_task, file, dependencies.get(file, []), processed_files) future = executor.submit(self._generate_file_task, file, dependencies.get(file, []), processed_files)
futures[future] = file futures[future] = file
progress.add_task(f"生成 {file}", total=None) # 为每个文件添加独立进度任务并保存任务ID
task_id = progress.add_task(f"生成 {file}", total=1)
file_tasks[file] = task_id
# 等待任意任务完成 # 等待任意任务完成
done, not_done = concurrent.futures.wait(futures.keys(), return_when=concurrent.futures.FIRST_COMPLETED, timeout=1.0) done, not_done = concurrent.futures.wait(futures.keys(), return_when=concurrent.futures.FIRST_COMPLETED, timeout=1.0)
for future in done: for future in done:
file = futures.pop(future) file = futures.pop(future)
success, error_msg = future.result() success, error_msg = future.result()
# 更新文件进度任务
if file in file_tasks:
if success:
progress.update(file_tasks[file], completed=1)
progress.remove_task(file_tasks[file]) # 移除任务
else:
# 如果失败,标记为错误状态
progress.update(file_tasks[file], description=f"生成失败: {file}")
progress.remove_task(file_tasks[file])
del file_tasks[file] # 清理映射
if success: if success:
processed_files.add(file) processed_files.add(file)
# 更新入度:减少依赖该文件的节点的入度 # 更新入度:减少依赖该文件的节点的入度
@ -518,12 +535,11 @@ class CodeGenerator:
queue.append(other_file) queue.append(other_file)
# 保存状态 # 保存状态
self.save_state(list(processed_files), dependencies) self.save_state(list(processed_files), dependencies)
progress.update(total_task, advance=1) progress.update(total_task, advance=1) # 更新整体进度
else: else:
logger.error(f"文件 {file} 生成失败,错误: {error_msg}") logger.error(f"文件 {file} 生成失败,错误: {error_msg}")
self.console.print(f"[bold red]❌ 文件 {file} 生成失败,错误: {error_msg}[/bold red]") self.console.print(f"[bold red]❌ 文件 {file} 生成失败,错误: {error_msg}[/bold red]")
# 错误处理:继续处理其他文件,但记录失败 # 错误处理:继续处理其他文件,但记录失败
# 可以选择重试或跳过,这里简单记录并继续
logger.success("所有文件处理完成!") logger.success("所有文件处理完成!")
# 清理状态文件 # 清理状态文件
@ -727,40 +743,40 @@ class CodeGenerator:
result = self._call_llm(system_prompt, user_prompt, temperature=0.2) result = self._call_llm(system_prompt, user_prompt, temperature=0.2)
return result return result
def _update_design(self, generated_files: List[str], design_updates: Dict[str, Any]): def _update_design(self, generated_files: List[str], design_updates: Dict[str, Any]):
""" """
根据生成的变更更新 design.json 根据生成的变更更新 design.json
使用 FileModel 来处理文件信息 使用 FileModel 来处理文件信息
""" """
updated = False updated = False
# 处理新增文件 # 处理新增文件
for file_path in generated_files: for file_path in generated_files:
# 检查文件是否已在 design.files 中 # 检查文件是否已在 design.files 中
exists = any(f.path == file_path for f in self.design.files) exists = any(f.path == file_path for f in self.design.files)
if not exists: if not exists:
# 获取更新信息 # 获取更新信息
update_info = design_updates.get(file_path, {}) update_info = design_updates.get(file_path, {})
# 创建新文件条目FileModel实例 # 创建新文件条目FileModel实例
new_file = FileModel( new_file = FileModel(
path=file_path, path=file_path,
summary=update_info.get("summary", "自动生成的新文件"), summary=update_info.get("summary", "自动生成的新文件"),
dependencies=update_info.get("dependencies", []), dependencies=update_info.get("dependencies", []),
functions=update_info.get("functions", []), functions=update_info.get("functions", []),
classes=update_info.get("classes", []), classes=update_info.get("classes", []),
design_updates=update_info.get("design_updates", {}) design_updates=update_info.get("design_updates", {})
) )
self.design.files.append(new_file) self.design.files.append(new_file)
updated = True updated = True
logger.info(f"已将新文件 {file_path} 添加到 design.json") logger.info(f"已将新文件 {file_path} 添加到 design.json")
# 如果 design_updates 中提供了具体的更新信息,可以进一步处理(例如修改现有文件的摘要) # 如果 design_updates 中提供了具体的更新信息,可以进一步处理(例如修改现有文件的摘要)
# 这里可根据实际需求扩展,当前仅处理新增文件 # 这里可根据实际需求扩展,当前仅处理新增文件
if updated: if updated:
# 保存更新后的 design.json # 保存更新后的 design.json
design_path = self.output_dir / "design.json" design_path = self.output_dir / "design.json"
with open(design_path, "w", encoding="utf-8") as f: with open(design_path, "w", encoding="utf-8") as f:
json.dump(self.design.model_dump(), f, indent=2, ensure_ascii=False) json.dump(self.design.model_dump(), f, indent=2, ensure_ascii=False)
logger.info("design.json 已更新") logger.info("design.json 已更新")

View File

@ -3,6 +3,7 @@ import os
from pathlib import Path from pathlib import Path
import queue import queue
from loguru import logger # 添加导入 from loguru import logger # 添加导入
from rich.progress import Progress, TextColumn, BarColumn, TimeElapsedColumn, TaskProgressColumn
# 危险命令列表,可配置 # 危险命令列表,可配置
DANGEROUS_COMMANDS = ["rm", "sudo", "chmod", "dd", "mkfs", "> /dev/sda", "format"] DANGEROUS_COMMANDS = ["rm", "sudo", "chmod", "dd", "mkfs", "> /dev/sda", "format"]
@ -232,3 +233,28 @@ def add_implicit_dependency(file_content: str, current_deps: List[str], implicit
if implicit_dep_file not in updated_deps: if implicit_dep_file not in updated_deps:
updated_deps.append(implicit_dep_file) updated_deps.append(implicit_dep_file)
return updated_deps return updated_deps
def create_progress_bar(total: int = 100, description: str = "Processing",
columns: Optional[List] = None, auto_refresh: bool = True) -> Progress:
"""
创建并配置一个标准的 rich 进度条
Args:
total: 总任务数默认为 100
description: 进度条的初始描述默认为 "Processing"
columns: 自定义进度条列列表如果为 None 则使用默认列
auto_refresh: 是否自动刷新显示默认为 True
Returns:
Progress: 一个配置好的 Progress 实例可以使用 start() stop() 控制或作为上下文管理器
"""
if columns is None:
columns = [
TextColumn("[progress.description]{task.description}"),
BarColumn(),
TaskProgressColumn(),
TimeElapsedColumn(),
]
progress = Progress(*columns, auto_refresh=auto_refresh)
return progress