Emperor555 commited on
Commit
2ce77eb
·
1 Parent(s): ed8fb90

Add better error messages for debugging

Browse files
Files changed (1) hide show
  1. src/agent.py +22 -17
src/agent.py CHANGED
@@ -95,23 +95,28 @@ def call_llm(messages: list[dict], max_tokens: int = 1500) -> str:
95
  """Call Nebius LLM API."""
96
  api_key = get_nebius_client()
97
 
98
- with httpx.Client(timeout=60.0) as client:
99
- resp = client.post(
100
- f"{NEBIUS_API_BASE}/chat/completions",
101
- headers={
102
- "Authorization": f"Bearer {api_key}",
103
- "Content-Type": "application/json",
104
- },
105
- json={
106
- "model": NEBIUS_MODEL,
107
- "messages": messages,
108
- "max_tokens": max_tokens,
109
- "temperature": 0.8,
110
- },
111
- )
112
- resp.raise_for_status()
113
- data = resp.json()
114
- return data["choices"][0]["message"]["content"]
 
 
 
 
 
115
 
116
 
117
  def research_topic(topic: str) -> tuple[str, list[dict]]:
 
95
  """Call Nebius LLM API."""
96
  api_key = get_nebius_client()
97
 
98
+ try:
99
+ with httpx.Client(timeout=60.0) as client:
100
+ resp = client.post(
101
+ f"{NEBIUS_API_BASE}/chat/completions",
102
+ headers={
103
+ "Authorization": f"Bearer {api_key}",
104
+ "Content-Type": "application/json",
105
+ },
106
+ json={
107
+ "model": NEBIUS_MODEL,
108
+ "messages": messages,
109
+ "max_tokens": max_tokens,
110
+ "temperature": 0.8,
111
+ },
112
+ )
113
+ resp.raise_for_status()
114
+ data = resp.json()
115
+ return data["choices"][0]["message"]["content"]
116
+ except httpx.HTTPStatusError as e:
117
+ raise Exception(f"Nebius API error: {e.response.status_code} - {e.response.text}")
118
+ except Exception as e:
119
+ raise Exception(f"LLM call failed: {str(e)}")
120
 
121
 
122
  def research_topic(topic: str) -> tuple[str, list[dict]]: