Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,102 +1,62 @@
|
|
| 1 |
-
|
| 2 |
-
import fitz # PyMuPDF
|
| 3 |
-
import numpy as np
|
| 4 |
-
import faiss
|
| 5 |
-
from sentence_transformers import SentenceTransformer
|
| 6 |
import gradio as gr
|
| 7 |
-
import spaces
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
# PDF reader
|
| 14 |
-
def extract_text_from_pdf(folder_path="meal_plans"):
|
| 15 |
-
documents = []
|
| 16 |
-
for filename in os.listdir(folder_path):
|
| 17 |
-
if filename.lower().endswith(".pdf"):
|
| 18 |
-
path = os.path.join(folder_path, filename)
|
| 19 |
-
try:
|
| 20 |
-
doc = fitz.open(path)
|
| 21 |
-
text = ""
|
| 22 |
-
for page in doc:
|
| 23 |
-
text += page.get_text()
|
| 24 |
-
documents.append({"text": text, "source": filename})
|
| 25 |
-
except Exception as e:
|
| 26 |
-
print(f"Error reading {filename}: {e}")
|
| 27 |
-
return documents
|
| 28 |
-
|
| 29 |
-
# Index builder
|
| 30 |
-
def create_index(docs):
|
| 31 |
-
texts = [doc["text"] for doc in docs]
|
| 32 |
-
embeddings = model.encode(texts)
|
| 33 |
-
dim = embeddings[0].shape[0]
|
| 34 |
-
index = faiss.IndexFlatL2(dim)
|
| 35 |
-
index.add(np.array(embeddings).astype("float32"))
|
| 36 |
-
return index
|
| 37 |
-
|
| 38 |
-
# Search logic
|
| 39 |
-
def search_index(query, index, docs, include_source=True, verbose=False, top_k=3):
|
| 40 |
-
query_vec = model.encode([query])
|
| 41 |
-
D, I = index.search(np.array(query_vec).astype("float32"), top_k)
|
| 42 |
-
responses = []
|
| 43 |
-
for i in I[0]:
|
| 44 |
-
doc = docs[i]
|
| 45 |
-
snippet = doc["text"][:750 if verbose else 300].replace("\n", " ").strip()
|
| 46 |
-
label = f"**📄 {doc['source']}**\n" if include_source else ""
|
| 47 |
-
responses.append(f"{label}{snippet}...")
|
| 48 |
-
return "\n\n---\n\n".join(responses)
|
| 49 |
-
|
| 50 |
-
# Setup
|
| 51 |
-
model = SentenceTransformer("all-MiniLM-L6-v2")
|
| 52 |
-
documents = extract_text_from_pdf("meal_plans")
|
| 53 |
-
index = create_index(documents)
|
| 54 |
-
|
| 55 |
-
def ingest(folder_path="meal_plans", index_file="mealplan.index"):
|
| 56 |
-
if os.path.exists(index_file) and os.path.exists("docstore.npy"):
|
| 57 |
-
print("📦 Loading existing FAISS index...")
|
| 58 |
-
index = faiss.read_index(index_file)
|
| 59 |
-
documents = np.load("docstore.npy", allow_pickle=True).tolist()
|
| 60 |
-
else:
|
| 61 |
-
print("📥 Ingesting PDFs from scratch...")
|
| 62 |
-
documents = extract_text_from_pdf(folder_path)
|
| 63 |
-
texts = [doc["text"] for doc in documents]
|
| 64 |
-
embeddings = model.encode(texts)
|
| 65 |
-
dim = embeddings[0].shape[0]
|
| 66 |
-
index = faiss.IndexFlatL2(dim)
|
| 67 |
-
index.add(np.array(embeddings).astype("float32"))
|
| 68 |
-
|
| 69 |
-
faiss.write_index(index, index_file)
|
| 70 |
-
np.save("docstore.npy", documents)
|
| 71 |
-
print("✅ Index and documents saved.")
|
| 72 |
-
|
| 73 |
-
return documents, index
|
| 74 |
-
|
| 75 |
-
# Load model
|
| 76 |
-
model = SentenceTransformer("all-MiniLM-L6-v2")
|
| 77 |
|
| 78 |
-
|
| 79 |
-
documents, index = ingest("meal_plans")
|
| 80 |
|
| 81 |
-
|
| 82 |
-
|
| 83 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 84 |
with gr.Row():
|
| 85 |
-
|
| 86 |
-
|
| 87 |
-
|
| 88 |
-
|
| 89 |
-
|
| 90 |
-
|
| 91 |
-
|
| 92 |
-
|
| 93 |
-
|
| 94 |
-
|
| 95 |
-
|
| 96 |
-
|
| 97 |
-
|
| 98 |
-
send_btn.click(user_query,
|
| 99 |
-
inputs=[user_input, chatbot, include_source, verbose],
|
| 100 |
-
outputs=[chatbot, chatbot])
|
| 101 |
|
| 102 |
demo.launch()
|
|
|
|
| 1 |
+
# app.py
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
import gradio as gr
|
| 3 |
+
import spaces
|
| 4 |
+
from transformers import pipeline
|
| 5 |
+
import os
|
| 6 |
+
from PyPDF2 import PdfReader
|
| 7 |
+
from dotenv import load_dotenv
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8 |
|
| 9 |
+
load_dotenv()
|
|
|
|
| 10 |
|
| 11 |
+
@spaces.GPU
|
| 12 |
+
class ChatBot:
|
| 13 |
+
def __init__(self):
|
| 14 |
+
self.llm = pipeline("text-generation", model="mistralai/Mistral-7B-Instruct-v0.1", token=os.getenv("HF_TOKEN"))
|
| 15 |
+
self.context = ""
|
| 16 |
+
|
| 17 |
+
def read_meal_plans(self, folder="meal_plans"):
|
| 18 |
+
text = ""
|
| 19 |
+
for file in os.listdir(folder):
|
| 20 |
+
if file.endswith(".pdf"):
|
| 21 |
+
reader = PdfReader(os.path.join(folder, file))
|
| 22 |
+
for page in reader.pages:
|
| 23 |
+
text += page.extract_text() + "\n"
|
| 24 |
+
return text
|
| 25 |
+
|
| 26 |
+
def reply(self, message, history, preferences):
|
| 27 |
+
diet, goal, allergens = preferences
|
| 28 |
+
if not self.context:
|
| 29 |
+
mealplan_text = self.read_meal_plans()
|
| 30 |
+
self.context = f"Meal Plans: {mealplan_text}\nUser Preferences: Diet={diet}, Goal={goal}, Allergens={allergens}"
|
| 31 |
+
|
| 32 |
+
prompt = f"{self.context}\nUser: {message}\nAI:"
|
| 33 |
+
response = self.llm(prompt, max_new_tokens=100, do_sample=True, temperature=0.7)[0]['generated_text'].split("AI:")[-1].strip()
|
| 34 |
+
return response
|
| 35 |
+
|
| 36 |
+
bot = ChatBot()
|
| 37 |
+
|
| 38 |
+
def chat(message, history, diet, goal, allergens):
|
| 39 |
+
return bot.reply(message, history, (diet, goal, allergens))
|
| 40 |
+
|
| 41 |
+
diet_choices = ["Vegetarian", "Vegan", "Keto", "Paleo", "No Preference"]
|
| 42 |
+
goal_choices = ["Weight Loss", "Muscle Gain", "Maintenance"]
|
| 43 |
+
allergen_choices = ["Nuts", "Dairy", "Gluten", "Soy", "Eggs"]
|
| 44 |
+
|
| 45 |
+
with gr.Blocks() as demo:
|
| 46 |
+
gr.Markdown("# 🥗 AI Meal Plan Assistant")
|
| 47 |
with gr.Row():
|
| 48 |
+
diet = gr.Dropdown(diet_choices, label="Diet Type")
|
| 49 |
+
goal = gr.Dropdown(goal_choices, label="Goal")
|
| 50 |
+
allergens = gr.CheckboxGroup(allergen_choices, label="Allergies")
|
| 51 |
+
chatbot = gr.Chatbot()
|
| 52 |
+
msg = gr.Textbox(placeholder="Ask me for meal ideas...", label="Message")
|
| 53 |
+
send = gr.Button("Send")
|
| 54 |
+
|
| 55 |
+
def user_input(message, chat_history):
|
| 56 |
+
response = chat(message, chat_history, diet.value, goal.value, allergens.value)
|
| 57 |
+
chat_history.append((message, response))
|
| 58 |
+
return chat_history, ""
|
| 59 |
+
|
| 60 |
+
send.click(user_input, [msg, chatbot], [chatbot, msg])
|
|
|
|
|
|
|
|
|
|
| 61 |
|
| 62 |
demo.launch()
|