rdune71's picture
Update system to use remote Ollama host with graceful fallback handling and sync with remote changes
ef600c3
raw
history blame
1.86 kB
from flask import Flask, render_template, request, jsonify
import os
import sys
from datetime import datetime
# Add src to Python path to import models
sys.path.append(os.path.join(os.path.dirname(__file__), 'src'))
app = Flask(__name__)
# Try to import dotenv, if not available, create a simple fallback
try:
from dotenv import load_dotenv
load_dotenv()
except ImportError:
def load_dotenv():
pass
# Path to your Obsidian vault (synced via OneDrive)
OBSIDIAN_PATH = os.path.expanduser("~/OneDrive/ObsidianVault")
def get_todays_journal():
""""Retrieve today's journal entry from Obsidian vault"""
today = datetime.now().strftime("%Y-%m-%d")
journal_path = os.path.join(OBSIDIAN_PATH, "Journal", f"{today}.md")
if os.path.exists(journal_path):
with open(journal_path, "r", encoding="utf-8") as f:
return f.read()
else:
return f"No journal entry found for {today}. Create one in your Obsidian vault!"
# Import and initialize the AI model
from src.models.model_factory import get_model
ai_model = get_model()
def ai_coach(prompt):
""""Main AI coaching function using the model factory"""
context = get_todays_journal()
try:
return ai_model.generate_response(prompt, context)
except Exception as e:
return f"Error generating response: {str(e)}"
@app.route('/')
def index():
return render_template('index.html')
@app.route('/coach', methods=['POST'])
def coach():
data = request.get_json()
prompt = data.get('prompt', '')
if not prompt:
return jsonify({'error': 'No prompt provided'}), 400
response = ai_coach(prompt)
return jsonify({'response': response})
# Required for Hugging Face Spaces
if __name__ == '__main__':
app.run(debug=False, host='0.0.0.0', port=int(os.environ.get('PORT', 7860)))