Pulastya0 commited on
Commit
bda35d5
·
1 Parent(s): 0c83855

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +53 -38
main.py CHANGED
@@ -7,20 +7,17 @@ from typing import List
7
  import firebase_admin
8
  from firebase_admin import credentials, firestore
9
 
 
10
  from encoder import SentenceEncoder
11
- from populate_chroma import populate_vector_db
 
 
12
 
13
  # --------------------------------------------------------------------
14
- # Cache setup (store HF models in /data for persistence on Hugging Face)
15
  # --------------------------------------------------------------------
16
  os.environ["HF_HOME"] = "/data/cache"
17
  os.environ["SENTENCE_TRANSFORMERS_HOME"] = "/data/cache"
18
-
19
- # --------------------------------------------------------------------
20
- # Determine root_path dynamically
21
- # Locally: root_path = ""
22
- # On Hugging Face Spaces: root_path = "/username/space-name"
23
- # --------------------------------------------------------------------
24
  root_path = os.getenv("HF_SPACE_ROOT_PATH", "")
25
 
26
  # --------------------------------------------------------------------
@@ -42,24 +39,31 @@ class InternshipData(BaseModel):
42
  createdAt: str
43
  stipend: int = None
44
 
 
 
 
 
45
  class RecommendationResponse(BaseModel):
46
- recommendations: List[dict]
47
 
48
  class StatusResponse(BaseModel):
49
  status: str
50
  internship_id: str
51
 
 
 
 
 
 
 
52
  # --------------------------------------------------------------------
53
  # FastAPI App
54
  # --------------------------------------------------------------------
55
  app = FastAPI(
56
- title="Internship Recommendation API",
57
- description="An API using Firestore for metadata, and ChromaDB for vector search.",
58
- version="2.2.0",
59
- docs_url="/docs", # Swagger UI
60
- redoc_url="/redoc", # ReDoc
61
- openapi_url="/openapi.json", # OpenAPI schema
62
- root_path=root_path # ✅ Fix for Hugging Face Spaces subpath issue
63
  )
64
 
65
  # --------------------------------------------------------------------
@@ -97,16 +101,20 @@ def load_model_and_data():
97
  print("🚀 Loading sentence encoder model...")
98
  encoder = SentenceEncoder()
99
 
100
- # Point ChromaDB to the persistent /data storage path
101
  chroma_db_path = "/data/chroma_db"
102
-
103
  try:
104
  client = chromadb.PersistentClient(path=chroma_db_path)
105
  chroma_collection = client.get_or_create_collection(name="internships")
106
  print("✅ ChromaDB client initialized and collection is ready.")
107
  print(f" - Internships in DB: {chroma_collection.count()}")
 
 
 
 
 
 
108
  except Exception as e:
109
- print(f"❌ Error initializing ChromaDB: {e}")
110
  raise
111
 
112
  # --------------------------------------------------------------------
@@ -116,20 +124,11 @@ def load_model_and_data():
116
  def read_root():
117
  return {"message": "Welcome to the Internship Recommendation API!"}
118
 
119
- @app.get("/healthz")
120
- def health_check():
121
- return {"status": "ok"}
122
-
123
  @app.post("/setup")
124
  def run_initial_setup(secret_key: str = Query(..., example="your_secret_password")):
125
- """
126
- A secret endpoint to run the initial database setup.
127
- This should only be run once after deployment.
128
- """
129
  correct_key = os.getenv("SETUP_SECRET_KEY")
130
  if not correct_key or secret_key != correct_key:
131
  raise HTTPException(status_code=403, detail="Invalid secret key.")
132
-
133
  try:
134
  print("--- RUNNING DATABASE POPULATION SCRIPT ---")
135
  populate_vector_db()
@@ -155,35 +154,51 @@ def add_internship(internship: InternshipData, db_client: firestore.Client = Dep
155
  return {"status": "success", "internship_id": internship.id}
156
 
157
  @app.post("/profile-recommendations", response_model=RecommendationResponse)
158
- def get_profile_recommendations(profile: UserProfile):
159
  if chroma_collection is None or encoder is None:
160
  raise HTTPException(status_code=503, detail="Server is not ready.")
161
 
162
  query_text = f"Skills: {', '.join(profile.skills)}. Sectors: {', '.join(profile.sectors)}"
163
  query_embedding = encoder.encode([query_text])[0].tolist()
 
164
  results = chroma_collection.query(query_embeddings=[query_embedding], n_results=3)
165
 
166
  recommendations = []
167
  ids = results.get('ids', [[]])[0]
168
  distances = results.get('distances', [[]])[0]
169
-
170
  for i, internship_id in enumerate(ids):
171
- recommendations.append({
172
- "internship_id": internship_id,
173
- "score": 1 - distances[i]
174
- })
175
-
 
176
  return {"recommendations": recommendations}
177
 
178
  @app.post("/search", response_model=RecommendationResponse)
179
- def search_internships(search: SearchQuery):
180
  if chroma_collection is None or encoder is None:
181
  raise HTTPException(status_code=503, detail="Server is not ready.")
 
182
  query_embedding = encoder.encode([search.query])[0].tolist()
 
183
  results = chroma_collection.query(query_embeddings=[query_embedding], n_results=3)
 
184
  recommendations = []
185
  ids = results.get('ids', [[]])[0]
186
  distances = results.get('distances', [[]])[0]
 
187
  for i, internship_id in enumerate(ids):
188
- recommendations.append({"internship_id": internship_id, "score": 1 - distances[i]})
189
- return {"recommendations": recommendations}
 
 
 
 
 
 
 
 
 
 
 
7
  import firebase_admin
8
  from firebase_admin import credentials, firestore
9
 
10
+ # --- Local Imports ---
11
  from encoder import SentenceEncoder
12
+ from populate_chroma import populate_vector_db # For the setup endpoint
13
+ from llm_handler import initialize_llm, get_rag_response
14
+ import llm_handler # To pass global objects
15
 
16
  # --------------------------------------------------------------------
17
+ # Cache & Root Path Setup
18
  # --------------------------------------------------------------------
19
  os.environ["HF_HOME"] = "/data/cache"
20
  os.environ["SENTENCE_TRANSFORMERS_HOME"] = "/data/cache"
 
 
 
 
 
 
21
  root_path = os.getenv("HF_SPACE_ROOT_PATH", "")
22
 
23
  # --------------------------------------------------------------------
 
39
  createdAt: str
40
  stipend: int = None
41
 
42
+ # --- NEW: More descriptive response models ---
43
+ class InternshipRecommendation(InternshipData):
44
+ score: float
45
+
46
  class RecommendationResponse(BaseModel):
47
+ recommendations: List[InternshipRecommendation]
48
 
49
  class StatusResponse(BaseModel):
50
  status: str
51
  internship_id: str
52
 
53
+ class ChatMessage(BaseModel):
54
+ query: str
55
+
56
+ class ChatResponse(BaseModel):
57
+ response: str
58
+
59
  # --------------------------------------------------------------------
60
  # FastAPI App
61
  # --------------------------------------------------------------------
62
  app = FastAPI(
63
+ title="Internship Recommendation & Chatbot API",
64
+ description="An API using Firestore for metadata, ChromaDB for vector search, and an LLM chatbot.",
65
+ version="3.0.0",
66
+ root_path=root_path
 
 
 
67
  )
68
 
69
  # --------------------------------------------------------------------
 
101
  print("🚀 Loading sentence encoder model...")
102
  encoder = SentenceEncoder()
103
 
 
104
  chroma_db_path = "/data/chroma_db"
 
105
  try:
106
  client = chromadb.PersistentClient(path=chroma_db_path)
107
  chroma_collection = client.get_or_create_collection(name="internships")
108
  print("✅ ChromaDB client initialized and collection is ready.")
109
  print(f" - Internships in DB: {chroma_collection.count()}")
110
+
111
+ # Pass the loaded models to the llm_handler module
112
+ llm_handler.encoder = encoder
113
+ llm_handler.chroma_collection = chroma_collection
114
+ initialize_llm() # Initialize the LLM for the chatbot
115
+
116
  except Exception as e:
117
+ print(f"❌ Error initializing ChromaDB or LLM: {e}")
118
  raise
119
 
120
  # --------------------------------------------------------------------
 
124
  def read_root():
125
  return {"message": "Welcome to the Internship Recommendation API!"}
126
 
 
 
 
 
127
  @app.post("/setup")
128
  def run_initial_setup(secret_key: str = Query(..., example="your_secret_password")):
 
 
 
 
129
  correct_key = os.getenv("SETUP_SECRET_KEY")
130
  if not correct_key or secret_key != correct_key:
131
  raise HTTPException(status_code=403, detail="Invalid secret key.")
 
132
  try:
133
  print("--- RUNNING DATABASE POPULATION SCRIPT ---")
134
  populate_vector_db()
 
154
  return {"status": "success", "internship_id": internship.id}
155
 
156
  @app.post("/profile-recommendations", response_model=RecommendationResponse)
157
+ def get_profile_recommendations(profile: UserProfile, db_client: firestore.Client = Depends(get_db)):
158
  if chroma_collection is None or encoder is None:
159
  raise HTTPException(status_code=503, detail="Server is not ready.")
160
 
161
  query_text = f"Skills: {', '.join(profile.skills)}. Sectors: {', '.join(profile.sectors)}"
162
  query_embedding = encoder.encode([query_text])[0].tolist()
163
+
164
  results = chroma_collection.query(query_embeddings=[query_embedding], n_results=3)
165
 
166
  recommendations = []
167
  ids = results.get('ids', [[]])[0]
168
  distances = results.get('distances', [[]])[0]
169
+
170
  for i, internship_id in enumerate(ids):
171
+ doc_ref = db_client.collection('internships').document(internship_id).get()
172
+ if doc_ref.exists:
173
+ internship_data = doc_ref.to_dict()
174
+ internship_data['score'] = 1 - distances[i]
175
+ recommendations.append(internship_data)
176
+
177
  return {"recommendations": recommendations}
178
 
179
  @app.post("/search", response_model=RecommendationResponse)
180
+ def search_internships(search: SearchQuery, db_client: firestore.Client = Depends(get_db)):
181
  if chroma_collection is None or encoder is None:
182
  raise HTTPException(status_code=503, detail="Server is not ready.")
183
+
184
  query_embedding = encoder.encode([search.query])[0].tolist()
185
+
186
  results = chroma_collection.query(query_embeddings=[query_embedding], n_results=3)
187
+
188
  recommendations = []
189
  ids = results.get('ids', [[]])[0]
190
  distances = results.get('distances', [[]])[0]
191
+
192
  for i, internship_id in enumerate(ids):
193
+ doc_ref = db_client.collection('internships').document(internship_id).get()
194
+ if doc_ref.exists:
195
+ internship_data = doc_ref.to_dict()
196
+ internship_data['score'] = 1 - distances[i]
197
+ recommendations.append(internship_data)
198
+
199
+ return {"recommendations": recommendations}
200
+
201
+ @app.post("/chat", response_model=ChatResponse)
202
+ def chat_with_bot(message: ChatMessage):
203
+ response = get_rag_response(message.query)
204
+ return {"response": response}