66 lines
2.8 KiB
Python
66 lines
2.8 KiB
Python
# [DEF:backend/src/plugins/git/llm_extension:Module]
|
|
# @TIER: STANDARD
|
|
# @SEMANTICS: git, llm, commit
|
|
# @PURPOSE: LLM-based extensions for the Git plugin, specifically for commit message generation.
|
|
# @LAYER: Domain
|
|
# @RELATION: DEPENDS_ON -> backend.src.plugins.llm_analysis.service.LLMClient
|
|
|
|
from typing import List
|
|
from tenacity import retry, stop_after_attempt, wait_exponential
|
|
from ..llm_analysis.service import LLMClient
|
|
from ...core.logger import belief_scope, logger
|
|
|
|
# [DEF:GitLLMExtension:Class]
|
|
# @PURPOSE: Provides LLM capabilities to the Git plugin.
|
|
class GitLLMExtension:
|
|
def __init__(self, client: LLMClient):
|
|
self.client = client
|
|
|
|
# [DEF:suggest_commit_message:Function]
|
|
# @PURPOSE: Generates a suggested commit message based on a diff and history.
|
|
# @PARAM: diff (str) - The git diff of staged changes.
|
|
# @PARAM: history (List[str]) - Recent commit messages for context.
|
|
# @RETURN: str - The suggested commit message.
|
|
@retry(
|
|
stop=stop_after_attempt(2),
|
|
wait=wait_exponential(multiplier=1, min=2, max=10),
|
|
reraise=True
|
|
)
|
|
async def suggest_commit_message(self, diff: str, history: List[str]) -> str:
|
|
with belief_scope("suggest_commit_message"):
|
|
history_text = "\n".join(history)
|
|
prompt = f"""
|
|
Generate a concise and professional git commit message based on the following diff and recent history.
|
|
Use Conventional Commits format (e.g., feat: ..., fix: ..., docs: ...).
|
|
|
|
Recent History:
|
|
{history_text}
|
|
|
|
Diff:
|
|
{diff}
|
|
|
|
Commit Message:
|
|
"""
|
|
|
|
logger.debug(f"[suggest_commit_message] Calling LLM with model: {self.client.default_model}")
|
|
response = await self.client.client.chat.completions.create(
|
|
model=self.client.default_model,
|
|
messages=[{"role": "user", "content": prompt}],
|
|
temperature=0.7
|
|
)
|
|
|
|
logger.debug(f"[suggest_commit_message] LLM Response: {response}")
|
|
|
|
if not response or not hasattr(response, 'choices') or not response.choices:
|
|
error_info = getattr(response, 'error', 'No choices in response')
|
|
logger.error(f"[suggest_commit_message] Invalid LLM response. Error info: {error_info}")
|
|
|
|
# If it's a timeout/provider error, we might want to throw to trigger retry if decorated
|
|
# but for now we return a safe fallback to avoid UI crash
|
|
return "Update dashboard configurations (LLM generation failed)"
|
|
|
|
return response.choices[0].message.content.strip()
|
|
# [/DEF:suggest_commit_message:Function]
|
|
# [/DEF:GitLLMExtension:Class]
|
|
|
|
# [/DEF:backend/src/plugins/git/llm_extension:Module] |