File size: 16,002 Bytes
0fbe5c9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2bffb3f
 
 
 
 
 
0fbe5c9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ebbf48e
 
 
 
 
 
 
 
 
 
 
0fbe5c9
 
 
 
 
 
dd16530
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2c84f68
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0fbe5c9
2c84f68
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0fbe5c9
2c84f68
0fbe5c9
2c84f68
 
 
dd16530
2c84f68
dd16530
2c84f68
dd16530
0fbe5c9
2c84f68
dd16530
 
0fbe5c9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ebbf48e
 
0fbe5c9
 
 
 
 
 
 
ebbf48e
 
 
0fbe5c9
 
 
 
 
 
 
 
 
 
 
ebbf48e
0fbe5c9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ebbf48e
0fbe5c9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2bffb3f
 
 
 
 
 
0fbe5c9
 
 
 
 
 
 
 
 
 
 
 
 
2bffb3f
 
 
 
 
0fbe5c9
 
 
 
 
 
 
 
 
 
 
 
 
144f6c7
d9e47b5
144f6c7
0fbe5c9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
import gradio as gr
import requests
import json
import asyncio
import logging
from typing import Dict, List, Any, Optional
import anthropic
import openai
from datetime import datetime
import os
# from dotenv import load_dotenv

# load_dotenv()
def get_api_keys():
    # Try to get from Hugging Face secrets first
    openai_key = os.getenv("OPENAI_API_KEY")
    dolibarr_key = os.getenv("DOLIBARR_API_KEY")
    
    # If not found, try to load from .env file (for local development)
    if not openai_key or not dolibarr_key:
        from dotenv import load_dotenv
        load_dotenv()
        openai_key = os.getenv("OPENAI_API_KEY")
        dolibarr_key = os.getenv("DOLIBARR_API_KEY")
    
    # Add more specific error messages
    if not openai_key:
        raise ValueError("OPENAI_API_KEY not found in environment variables or .env file")
    if not dolibarr_key:
        raise ValueError("DOLIBARR_API_KEY not found in environment variables or .env file")
    
    return openai_key, dolibarr_key


# Set up logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

class DolibarrAPI:
    """Your existing Dolibarr API class - keeping it unchanged"""
    base_url = "https://valiant-trust-production.up.railway.app/api/index.php"
    
    def __init__(self, api_key: str):
        self.api_key = api_key
        self.headers = {
            'DOLAPIKEY': api_key,
            'Content-Type': 'application/json',
            'Accept': 'application/json'
        }

    def _request(self, method: str, endpoint: str, data: Optional[dict] = None, params: Optional[dict] = None) -> Any:
        base_url = "https://valiant-trust-production.up.railway.app/api/index.php"
        url = f"{base_url}{endpoint}"
        
        try:
            response = requests.request(method, url, headers=self.headers, json=data, params=params)
            response.raise_for_status()
            return response.json()
        except requests.exceptions.RequestException as e:
            logger.error(f"API request failed: {e}")
            return {"error": f"API request failed: {str(e)}"}
        except json.JSONDecodeError as e:
            logger.error(f"JSON decode error: {e}")
            return {"error": f"Invalid JSON response: {str(e)}"}

    def get_req(self, endpoint: str, params: Optional[dict] = None):
        return self._request('GET', endpoint, params=params)

    def post_req(self, endpoint: str, params: dict):
        return self._request("POST", endpoint, data=params)

    def put_req(self, endpoint: str, params: dict):
        return self._request("PUT", endpoint, data=params)

    def del_req(self, endpoint: str, params: Optional[dict] = None):
        return self._request("DELETE", endpoint, params=params)

def dolibarr_interface(method: str, endpoint: str, api_key=os.getenv("DOLIBARR_API_KEY"), payload_str: str = "") -> str:
    """Your existing interface function - keeping it unchanged"""
    try:
        api = DolibarrAPI(api_key)
        method = method.upper()
        
        payload = None
        if payload_str and payload_str.strip():
            try:
                payload = json.loads(payload_str)
            except json.JSONDecodeError as e:
                return json.dumps({"error": f"Invalid JSON payload: {str(e)}"}, indent=2)

        if method == 'GET':
            result = api.get_req(endpoint, payload)
        elif method == 'POST':
            if not payload:
                return json.dumps({"error": "POST requests require a payload"}, indent=2)
            result = api.post_req(endpoint, payload)
        elif method == 'PUT':
            if not payload:
                return json.dumps({"error": "PUT requests require a payload"}, indent=2)
            result = api.put_req(endpoint, payload)
        elif method == 'DELETE':
            result = api.del_req(endpoint, payload)
        else:
            return json.dumps({"error": f"Invalid HTTP method '{method}' selected."}, indent=2)
        
        return json.dumps(result, indent=2)
    
    except Exception as e:
        logger.error(f"Unexpected error in dolibarr_interface: {e}")
        return json.dumps({"error": f"Unexpected error: {str(e)}"}, indent=2)

def format_api_response(api_result, max_items=10):
    try:
        data = json.loads(api_result)
        if isinstance(data, list) and len(data) > max_items:
            truncated = data[:max_items]
            truncated.append({"info": f"Showing first {max_items} results. Ask for more if needed."})
            return json.dumps(truncated, indent=2)
        return api_result
    except Exception:
        return api_result  # fallback if not JSON

class OpenAIDolibarrAgent:
    def __init__(self, openai_api_key: str, dolibarr_api_key: str, base_url: str = None):
        self.client = openai.OpenAI(api_key=openai_api_key, base_url=base_url)
        self.dolibarr_api_key = dolibarr_api_key
        
        # System prompt with Dolibarr context
        self.system_prompt = """### CRITICAL RULES

1. Show ALL data from API calls - never truncate unless asked

2. Display every record in structured tables

3. Never fabricate data

4. Be proactive - call correct API immediately

5. Confirm success for create/update operations

6. Show full details for specific IDs

7. Explain errors clearly



### API ENDPOINTS

- /thirdparties (Customers/Suppliers)

- /invoices

- /products

- /contacts

- /orders, /proposals, /bills, /stocks, /projects, /users



All endpoints support: GET (list/fetch), POST (create), PUT (update), DELETE



### REQUIRED FIELDS



**Create Thirdparty**

```json

{

  "name": "John Doe",

  "address": "123 Main St",

  "zip": "12345",

  "town": "Sample City",

  "country_id": 1,

  "email": "[email protected]",

  "phone": "+123456789",

  "type": 1,

  "status": 1

}

```



**Create Invoice**

```json

{

  "socid": 10,

  "date": "2025-06-01",

  "duedate": "2025-06-15",

  "lines": [

    {

      "desc": "Service",

      "subprice": 500,

      "qty": 1,

      "total_ht": 500,

      "vat": 18,

      "total_ttc": 590

    }

  ]

}

```



**Create Product**

```json

{

  "label": "Smartphone",

  "price": 499.99,

  "stock": 100,

  "description": "Latest model",

  "socid": 10

}

```



**Create Contact**

```json

{

  "thirdparty_id": 1,

  "firstname": "Jane",

  "lastname": "Doe",

  "email": "[email protected]",

  "phone": "+123456789",

  "position": "Sales Manager",

  "address": "123 Street"

}

```



### 🧾 RESPONSE FORMAT

- For **lists**: display `ID`, `Name/Label`, `Status`, and any other key fields in **tables**.

- For **individual records**: show all fields in **structured format**.



- Prefix counts: e.g., **"Found 32 customers:"**

 

- On errors: explain clearly what failed, and why.

 ---



### βš™οΈ GENERAL RULE

When user mentions something like "get invoice", immediately call the respective endpoint (`GET /invoices`) and show **complete** results.  

NEVER truncate unless user asks for filtered or paginated results.



Current date: """ + datetime.now().strftime("%Y-%m-%d")

        # Function definition for OpenAI format
        self.functions = [
            {
                "name": "dolibarr_api",
                "description": "Execute API calls to the Dolibarr ERP system",
                "parameters": {
                    "type": "object",
                    "properties": {
                        "method": {
                            "type": "string",
                            "enum": ["GET", "POST", "PUT", "DELETE"],
                            "description": "HTTP method for the API call"
                        },
                        "endpoint": {
                            "type": "string", 
                            "description": "API endpoint (e.g., /thirdparties, /invoices)"
                        },
                        "payload": {
                            "type": "string",
                            "description": "JSON payload for POST/PUT requests (leave empty for GET)"
                        }
                    },
                    "required": ["method", "endpoint"]
                }
            }
        ]

    def execute_dolibarr_call(self, method: str, endpoint: str, payload: str = "") -> str:
        """Execute the actual Dolibarr API call"""
        raw_result = dolibarr_interface(method, endpoint, self.dolibarr_api_key, payload)
        return format_api_response(raw_result)

    def chat(self, message: str, history: List[List[str]]) -> str:
        """Main chat function that processes user messages"""
        try:
            # Convert Gradio history to OpenAI format
            messages = [{"role": "system", "content": self.system_prompt}]
            
            # Only keep the last 6 messages (3 user/assistant pairs)
            max_history = 6
            for human_msg, assistant_msg in history[-max_history:]:
                if human_msg:
                    messages.append({"role": "user", "content": human_msg})
                if assistant_msg:
                    messages.append({"role": "assistant", "content": assistant_msg})
            
            # Add current message
            messages.append({"role": "user", "content": message})

            # Call OpenAI API with functions
            logger.info("Sending request to Nebius API...")
            response = self.client.chat.completions.create(
                model="gpt-4.1-mini",#"gpt-3.5-turbo",  # or gpt-4 "Qwen/Qwen3-235B-A22B",
                messages=messages,
                functions=self.functions,
                function_call="auto",
                max_tokens=1500
            )

            # Process the response
            message = response.choices[0].message
            logger.info(f"Received response from Nebius: {message}")
            
            if message.function_call:
                # Execute the Dolibarr API call
                function_name = message.function_call.name
                function_args = json.loads(message.function_call.arguments)
                logger.info(f"Function call: {function_name} with args: {function_args}")
                
                if function_name == "dolibarr_api":
                    api_result = self.execute_dolibarr_call(
                        method=function_args.get("method", "GET"),
                        endpoint=function_args.get("endpoint", ""),
                        payload=function_args.get("payload", "")
                    )
                    logger.info(f"Dolibarr API result: {api_result}")
                    # Send function result back to OpenAI
                    messages.append({
                        "role": "assistant",
                        "content": None,
                        "function_call": message.function_call
                    })
                    messages.append({
                        "role": "function",
                        "name": function_name,
                        "content": api_result
                    })
                    
                    # Get final response
                    logger.info("Getting final response from Nebius...")
                    final_response = self.client.chat.completions.create(
                        model="gpt-4.1-mini",#"gpt-3.5-turbo",#"Qwen/Qwen3-235B-A22B",
                        messages=messages,
                        max_tokens=1500
                    )
                    logger.info(f"Final response: {final_response.choices[0].message}")
                    
                    # Clean up the response content
                    content = final_response.choices[0].message.content
                    # Remove the <think> sections
                    content = content.split('</think>')[-1].strip() if '</think>' in content else content
                    return content
            
            # Clean up the response content for non-function calls too
            content = message.content
            content = content.split('</think>')[-1].strip() if '</think>' in content else content
            return content if content else "I couldn't process that request."
            
        except openai.APIConnectionError as e:
            logger.error(f"OpenAI API Connection Error: {e}")
            return "Sorry, I'm having trouble connecting to OpenAI. Please check if the API key is valid and the service is available."
        except openai.AuthenticationError as e:
            logger.error(f"OpenAI API Authentication Error: {e}")
            return "Sorry, there's an authentication error with the OpenAI API. Please check if the API key is correct."
        except Exception as e:
            logger.error(f"Error in chat: {e}")
            return f"Sorry, I encountered an error: {str(e)}"

def create_openai_agent_interface():
    """Create the Gradio interface for the OpenAI-powered Dolibarr agent"""
    
    # OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")  # Replace with your key
    # NEBIUS_BASE_URL = "https://api.studio.nebius.ai/v1"  # For Nebius (optional)
    # DOLIBARR_API_KEY = os.getenv("DOLIBARR_API_KEY")
    
    OPENAI_API_KEY, DOLIBARR_API_KEY = get_api_keys()
    
    # Add logging to verify keys (but don't log the actual keys)
    logger.info("API Keys loaded successfully")
    logger.info(f"OpenAI API Key length: {len(OPENAI_API_KEY) if OPENAI_API_KEY else 0}")
    logger.info(f"Dolibarr API Key length: {len(DOLIBARR_API_KEY) if DOLIBARR_API_KEY else 0}")
    
    if not OPENAI_API_KEY or not DOLIBARR_API_KEY:
        raise ValueError("API keys not found. Please set them in Hugging Face Secrets or .env file")
    
    # Initialize the agent
    agent = OpenAIDolibarrAgent(OPENAI_API_KEY, DOLIBARR_API_KEY)
    agent = OpenAIDolibarrAgent(OPENAI_API_KEY, DOLIBARR_API_KEY)
    #agent = OpenAIDolibarrAgent(os.getenv("NEBIUS_API_KEY"), DOLIBARR_API_KEY, NEBIUS_BASE_URL)  # For Nebius
    
    # Create Gradio ChatInterface
    demo = gr.ChatInterface(
        fn=agent.chat,
        title="πŸ€– ERP Assistant",
        description="""

πŸ€– AI-Powered Dolibarr ERP Assistant - Your intelligent business management companion. I can help you manage customers, invoices, products, orders, and financial operations through natural conversation. Simply type your request (e.g., "Show me all customers" or "Create a new invoice") and get instant results. Try it with our demo instance at https://valiant-trust-production.up.railway.app/ (username: admin, password: admin123).

  - Check this out: https://youtu.be/oYAxRSNC8hc

""",
        examples=[
            "Show me all customers",
            "List all invoices", 
            "What products do we have?",
            "Get details for customer ID 1",
            "Show me recent proposals"
        ],
        cache_examples=False,
        theme=gr.themes.Soft()
    )
    
    return demo

# Main execution
if __name__ == '__main__':
    try:
        print("πŸš€ Starting OpenAI-Powered Dolibarr Agent...")
        
        # Create and launch the interface
        demo = create_openai_agent_interface()
        demo.launch(
            server_name="127.0.0.1",
            server_port=7862,
            share=False,
            debug=True,
            show_error=True
        )
        
    except Exception as e:
        logger.error(f"Failed to start application: {e}")
        print(f"❌ Error starting application: {e}")

# Example queries you can try:
"""

- "Show me all customers"

- "List all invoices"

- "Get me customer details for ID 1"  

- "What products do we have?"

- "Show me recent proposals"

- "Create a new customer named Test Corp"

- "Find all unpaid invoices"

"""