Spaces:
Sleeping
Sleeping
Update llm.py
Browse files
llm.py
CHANGED
|
@@ -10,7 +10,7 @@ text_generator = pipeline(
|
|
| 10 |
model="google/gemma-3n-e4b-it",
|
| 11 |
device="cuda" if torch.cuda.is_available() else "cpu",
|
| 12 |
torch_dtype=torch.bfloat16,
|
| 13 |
-
max_new_tokens=
|
| 14 |
)
|
| 15 |
|
| 16 |
# Wrap pipeline for LangChain compatibility
|
|
|
|
| 10 |
model="google/gemma-3n-e4b-it",
|
| 11 |
device="cuda" if torch.cuda.is_available() else "cpu",
|
| 12 |
torch_dtype=torch.bfloat16,
|
| 13 |
+
max_new_tokens=500 # Limit output length
|
| 14 |
)
|
| 15 |
|
| 16 |
# Wrap pipeline for LangChain compatibility
|