kawre commited on
Commit
e66ba96
·
verified ·
1 Parent(s): 2e2c018

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -9
app.py CHANGED
@@ -1,11 +1,11 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
4
- def respond(message, history, system_message, max_tokens, temperature, top_p, hf_token: gr.OAuthToken):
5
  # Inicializa os 3 clientes
6
- client_main = InferenceClient(token=hf_token.token, model="meta-llama/Llama-3.1-8B-Instruct")
7
- client_aux1 = InferenceClient(token=hf_token.token, model="google/flan-t5-large")
8
- client_aux2 = InferenceClient(token=hf_token.token, model="facebook/bart-large-cnn")
9
 
10
  # Histórico e system message
11
  messages = [{"role": "system", "content": system_message}]
@@ -21,12 +21,11 @@ def respond(message, history, system_message, max_tokens, temperature, top_p, hf
21
  # Passo 3: Aux2
22
  response_aux2 = client_aux2.text_generation(inputs=response_aux1, max_new_tokens=max_tokens)
23
 
24
- # Retorna resposta final
25
  return response_aux2
26
 
27
  # Interface Gradio
28
  chatbot = gr.ChatInterface(
29
- respond,
30
  additional_inputs=[
31
  gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
32
  gr.Slider(1, 2048, 512, label="Max new tokens"),
@@ -35,10 +34,7 @@ chatbot = gr.ChatInterface(
35
  ],
36
  )
37
 
38
-
39
  with gr.Blocks() as demo:
40
- with gr.Sidebar():
41
- gr.LoginButton()
42
  chatbot.render()
43
 
44
  if __name__ == "__main__":
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
 
4
+ def respond(message, history, system_message, max_tokens, temperature, top_p, hf_token):
5
  # Inicializa os 3 clientes
6
+ client_main = InferenceClient(token=hf_token, model="meta-llama/Llama-3.1-8B-Instruct")
7
+ client_aux1 = InferenceClient(token=hf_token, model="google/flan-t5-large")
8
+ client_aux2 = InferenceClient(token=hf_token, model="facebook/bart-large-cnn")
9
 
10
  # Histórico e system message
11
  messages = [{"role": "system", "content": system_message}]
 
21
  # Passo 3: Aux2
22
  response_aux2 = client_aux2.text_generation(inputs=response_aux1, max_new_tokens=max_tokens)
23
 
 
24
  return response_aux2
25
 
26
  # Interface Gradio
27
  chatbot = gr.ChatInterface(
28
+ fn=respond,
29
  additional_inputs=[
30
  gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
31
  gr.Slider(1, 2048, 512, label="Max new tokens"),
 
34
  ],
35
  )
36
 
 
37
  with gr.Blocks() as demo:
 
 
38
  chatbot.render()
39
 
40
  if __name__ == "__main__":