File size: 4,374 Bytes
e596ab5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 |
from __future__ import annotations
import base64
from typing import Dict, List, Optional
from urllib.parse import urlparse
import requests
from setting import SETTINGS
# Optional provider SDKs
try:
import openai # type: ignore
except Exception:
openai = None
try:
import anthropic # type: ignore
except Exception:
anthropic = None
try:
import google.generativeai as genai # type: ignore
except Exception:
genai = None
# ---------------- GitHub HTTP adapters -----------------
def github_request(
url: str,
token: str,
params: Optional[Dict[str, str]] = None,
) -> Dict:
headers = {
"Accept": "application/vnd.github.v3+json",
"Authorization": f"token {token}",
}
response = requests.get(url, headers=headers, params=params, timeout=30)
if response.status_code == 404:
raise FileNotFoundError(f"GitHub resource not found: {url}")
if response.status_code == 401:
raise PermissionError("GitHub token is invalid or lacks necessary scopes.")
if response.status_code >= 400:
raise RuntimeError(
f"GitHub API request failed with status {response.status_code}: {response.text}"
)
return response.json()
def fetch_file_from_pr(
repo_name: str,
pr_number: int,
path: str,
head_sha: str,
github_token: str,
) -> str:
url = f"{SETTINGS.github_api_base}/repos/{repo_name}/contents/{path}"
data = github_request(url, github_token, params={"ref": head_sha})
content = data.get("content")
encoding = data.get("encoding")
if content is None or encoding != "base64":
raise ValueError(
f"Unexpected content response for '{path}' (encoding={encoding!r})."
)
decoded = base64.b64decode(content)
try:
return decoded.decode("utf-8")
except UnicodeDecodeError as exc:
raise ValueError(
f"File '{path}' in PR {pr_number} is not valid UTF-8 text"
) from exc
# ---------------- LLM provider adapters -----------------
def call_openai(
token: str,
system_prompt: str,
user_prompt: str,
model_name: str = "gpt-5",
) -> str:
if openai is None:
raise RuntimeError("openai package not installed. Install with `pip install openai`.")
client = openai.OpenAI(api_key=token)
params = {
"model": model_name,
"messages": [
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_prompt},
],
}
# Some models (e.g., gpt-5) may not allow custom temperature.
if model_name not in {"gpt-5"}:
params["temperature"] = 0.2
response = client.chat.completions.create(**params)
return response.choices[0].message.content.strip()
def call_anthropic(
token: str,
system_prompt: str,
user_prompt: str,
model_name: str = "claude-3-5-sonnet-20240620",
) -> str:
if anthropic is None:
raise RuntimeError("anthropic package not installed. Install with `pip install anthropic`.")
client = anthropic.Anthropic(api_key=token)
response = client.messages.create(
model=model_name,
system=system_prompt,
max_tokens=1500,
temperature=0.2,
messages=[{"role": "user", "content": user_prompt}],
)
return "".join(block.text for block in response.content if hasattr(block, "text")).strip()
def call_gemini(
token: str,
system_prompt: str,
user_prompt: str,
model_name: str = "gemini-1.5-pro",
) -> str:
if genai is None:
raise RuntimeError("google-generativeai package not installed. Install with `pip install google-generativeai`.")
genai.configure(api_key=token)
model = genai.GenerativeModel(model_name)
prompt = f"{system_prompt}\n\n{user_prompt}"
response = model.generate_content(prompt, generation_config={"temperature": 0.2})
return response.text.strip()
PROVIDERS = {
"openai": call_openai,
"anthropic": call_anthropic,
"gemini": call_gemini,
}
def dispatch_review(
provider: str,
token: str,
system_prompt: str,
user_prompt: str,
model_name: str,
) -> str:
if provider not in PROVIDERS:
raise ValueError(f"Unknown provider '{provider}'. Choose from: {', '.join(PROVIDERS)}")
return PROVIDERS[provider](token, system_prompt, user_prompt, model_name)
|