Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import AutoTokenizer, AutoModelForSequenceClassification | |
| import torch | |
| MODEL = "roberta-base-openai-detector" | |
| tokenizer = AutoTokenizer.from_pretrained(MODEL) | |
| model = AutoModelForSequenceClassification.from_pretrained(MODEL) | |
| def detect_text(text): | |
| inputs = tokenizer(text, return_tensors="pt", truncation=True) | |
| outputs = model(**inputs) | |
| probs = torch.softmax(outputs.logits, dim=1) | |
| return {"Human": probs[0][0].item(), "AI": probs[0][1].item()} | |
| demo = gr.Interface( | |
| fn=detect_text, | |
| inputs=gr.Textbox(lines=5, placeholder="Paste text here..."), | |
| outputs="label", | |
| title="AI Text Detector Chatbot π€", | |
| description="Detect whether text is human-written or AI-generated." | |
| ) | |
| demo = gr.Interface( | |
| fn=detect_text, | |
| inputs=gr.Textbox(lines=5, placeholder="Paste text here..."), | |
| outputs=gr.Label(), # shows classification with confidence | |
| title="AI Text Detector Chatbot π€", | |
| description="Detect whether text is human-written or AI-generated." | |
| ) | |
| demo.launch(share=True) | |