|
|
from pydoc import cli |
|
|
from dotenv import load_dotenv |
|
|
from openai import OpenAI |
|
|
from datetime import datetime |
|
|
from zoneinfo import ZoneInfo |
|
|
from requests.exceptions import RequestException, Timeout |
|
|
import json |
|
|
import requests |
|
|
from pypdf import PdfReader |
|
|
import gradio as gr |
|
|
import os |
|
|
|
|
|
|
|
|
load_dotenv(override=True) |
|
|
|
|
|
def push(text: str) -> bool: |
|
|
if not text or not isinstance(text, str): |
|
|
raise ValueError("`text` must be a non-empty string") |
|
|
|
|
|
ts = datetime.now(ZoneInfo("Asia/Kolkata")).isoformat() |
|
|
|
|
|
payload = { |
|
|
"timestamp": ts, |
|
|
"message": text, |
|
|
} |
|
|
|
|
|
headers = { |
|
|
"Content-Type": "application/json", |
|
|
"Authorization": "3175641f-82e8-4243tinytools8e62-4a202127c2db" |
|
|
} |
|
|
|
|
|
try: |
|
|
response = requests.post( |
|
|
"https://converteasly.com/api/send-feedback", |
|
|
json=payload, |
|
|
headers=headers, |
|
|
timeout=20 |
|
|
) |
|
|
|
|
|
response.raise_for_status() |
|
|
|
|
|
return True |
|
|
|
|
|
except Timeout: |
|
|
print("β Request timed out while sending feedback") |
|
|
return False |
|
|
|
|
|
except RequestException as e: |
|
|
print(f"β Failed to send feedback: {e}") |
|
|
return False |
|
|
|
|
|
except Exception as e: |
|
|
print(f"β Unexpected error: {e}") |
|
|
return False |
|
|
|
|
|
|
|
|
|
|
|
def record_user_details(email, name="Name not provided", notes="not provided"): |
|
|
print(f"Recording {name} with email {email} and notes {notes}") |
|
|
push(f"Recording {name} with email {email} and notes {notes}") |
|
|
return {"recorded": "ok"} |
|
|
|
|
|
def record_unknown_question(question): |
|
|
print(f"Recording {question}") |
|
|
push(f"Recording {question}") |
|
|
return {"recorded": "ok"} |
|
|
|
|
|
record_user_details_json = { |
|
|
"name": "record_user_details", |
|
|
"description": "Use this tool to record that a user is interested in being in touch and provided an email address", |
|
|
"parameters": { |
|
|
"type": "object", |
|
|
"properties": { |
|
|
"email": { |
|
|
"type": "string", |
|
|
"description": "The email address of this user" |
|
|
}, |
|
|
"name": { |
|
|
"type": "string", |
|
|
"description": "The user's name, if they provided it" |
|
|
} |
|
|
, |
|
|
"notes": { |
|
|
"type": "string", |
|
|
"description": "Any additional information about the conversation that's worth recording to give context" |
|
|
} |
|
|
}, |
|
|
"required": ["email"], |
|
|
"additionalProperties": False |
|
|
} |
|
|
} |
|
|
|
|
|
record_unknown_question_json = { |
|
|
"name": "record_unknown_question", |
|
|
"description": "Always use this tool to record any question that couldn't be answered as you didn't know the answer", |
|
|
"parameters": { |
|
|
"type": "object", |
|
|
"properties": { |
|
|
"question": { |
|
|
"type": "string", |
|
|
"description": "The question that couldn't be answered" |
|
|
}, |
|
|
}, |
|
|
"required": ["question"], |
|
|
"additionalProperties": False |
|
|
} |
|
|
} |
|
|
|
|
|
tools = [{"type": "function", "function": record_user_details_json}, |
|
|
{"type": "function", "function": record_unknown_question_json}] |
|
|
|
|
|
|
|
|
class Me: |
|
|
|
|
|
def __init__(self): |
|
|
self.openai = OpenAI() |
|
|
self.name = "Pawan Malhotra" |
|
|
reader = PdfReader("me/my-profile.pdf") |
|
|
self.linkedin = "" |
|
|
for page in reader.pages: |
|
|
text = page.extract_text() |
|
|
if text: |
|
|
self.linkedin += text |
|
|
with open("me/summary.txt", "r", encoding="utf-8") as f: |
|
|
self.summary = f.read() |
|
|
|
|
|
|
|
|
def handle_tool_call(self, tool_calls): |
|
|
results = [] |
|
|
for tool_call in tool_calls: |
|
|
tool_name = tool_call.function.name |
|
|
arguments = json.loads(tool_call.function.arguments) |
|
|
print(f"Tool called: {tool_name}", flush=True) |
|
|
tool = globals().get(tool_name) |
|
|
result = tool(**arguments) if tool else {} |
|
|
results.append({"role": "tool","content": json.dumps(result),"tool_call_id": tool_call.id}) |
|
|
return results |
|
|
|
|
|
def system_prompt(self): |
|
|
system_prompt = f"You are acting as {self.name}. You are answering questions on {self.name}'s website, \ |
|
|
particularly questions related to {self.name}'s career, background, skills and experience. \ |
|
|
Your responsibility is to represent {self.name} for interactions on the website as faithfully as possible. \ |
|
|
You are given a summary of {self.name}'s background and LinkedIn profile which you can use to answer questions. \ |
|
|
Be professional and engaging, as if talking to a potential client or future employer who came across the website. \ |
|
|
If you don't know the answer to any question, use your record_unknown_question tool to record the question that you couldn't answer, even if it's about something trivial or unrelated to career. \ |
|
|
If the user is engaging in discussion, try to steer them towards getting in touch via email; ask for their email and record it using your record_user_details tool. " |
|
|
|
|
|
system_prompt += f"\n\n## Summary:\n{self.summary}\n\n## LinkedIn Profile:\n{self.linkedin}\n\n" |
|
|
system_prompt += f"With this context, please chat with the user, always staying in character as {self.name}." |
|
|
return system_prompt |
|
|
|
|
|
def chat(self, message, history): |
|
|
messages = [ |
|
|
{"role": "system", "content": self.system_prompt()}, |
|
|
*history[-6:], |
|
|
{"role": "user", "content": message} |
|
|
] |
|
|
|
|
|
def run(client, model): |
|
|
if client is open_router_client: |
|
|
return client.chat.completions.create( |
|
|
model=model, |
|
|
messages=messages, |
|
|
tools=tools |
|
|
) |
|
|
else: |
|
|
return client.chat.completions.create( |
|
|
model=model, |
|
|
messages=messages, |
|
|
tools=tools, |
|
|
max_tokens=512 |
|
|
) |
|
|
|
|
|
|
|
|
gemini = OpenAI( |
|
|
api_key=os.getenv("GOOGLE_API_KEY"), |
|
|
base_url="https://generativelanguage.googleapis.com/v1beta/openai/" |
|
|
) |
|
|
openai_client = OpenAI(api_key=os.getenv("OPENAI_API_KEY")) |
|
|
open_router_client = OpenAI( |
|
|
api_key=os.getenv("OPEN_ROUTER_API_KEY"), |
|
|
base_url="https://openrouter.ai/api/v1" |
|
|
) |
|
|
|
|
|
try: |
|
|
response = run(gemini, "gemini-2.0-flash") |
|
|
except Exception as e: |
|
|
if "quota" in str(e).lower() or "resource_exhausted" in str(e).lower(): |
|
|
print("Google Limit Exceeded! Falling to open API") |
|
|
try: |
|
|
response = run(openai_client, "gpt-4o-mini") |
|
|
except Exception as gpt: |
|
|
if "rate limit" in str(gpt).lower() or "rate_limit_exceeded" in str(gpt).lower(): |
|
|
print("Open API rate limit exceeded! Falling to Open Router API") |
|
|
|
|
|
|
|
|
response = run(open_router_client, "xiaomi/mimo-v2-flash:free") |
|
|
else: |
|
|
raise |
|
|
else: |
|
|
raise |
|
|
|
|
|
choice = response.choices[0] |
|
|
|
|
|
|
|
|
if choice.finish_reason == "tool_calls": |
|
|
tool_results = self.handle_tool_call(choice.message.tool_calls) |
|
|
|
|
|
messages.append(choice.message) |
|
|
messages.extend(tool_results) |
|
|
|
|
|
|
|
|
response = run(open_router_client, "xiaomi/mimo-v2-flash:free") |
|
|
return response.choices[0].message.content |
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
me = Me() |
|
|
with gr.Blocks( |
|
|
theme=gr.themes.Soft(), |
|
|
css=""" |
|
|
.gradio-container { |
|
|
width: 100%; |
|
|
margin: auto; |
|
|
} |
|
|
footer {display: none !important;} |
|
|
""" |
|
|
) as demo: |
|
|
gr.Markdown( |
|
|
""" |
|
|
# π§ Virtual Resume Assistant - Pawan Malhotra |
|
|
Ask anything about my skills, experience, or projects. |
|
|
""" |
|
|
) |
|
|
|
|
|
gr.ChatInterface( |
|
|
fn=me.chat, |
|
|
type="messages", |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
textbox=gr.Textbox( |
|
|
placeholder="Ask about experience, skills, projects...", |
|
|
scale=7 |
|
|
), |
|
|
) |
|
|
demo.launch() |
|
|
|