Inserito ollama fra le possibilità usando il modello gpt-oss:latest. La risposta di ollama è parsificata male, vede anche il thinking del modello

This commit is contained in:
Simone Garau
2025-09-19 11:55:14 +02:00
parent 6729a66d50
commit 25b93985e5
2 changed files with 2 additions and 2 deletions

View File

@@ -14,7 +14,7 @@ with gr.Blocks() as demo:
with gr.Row(): with gr.Row():
provider = gr.Dropdown( provider = gr.Dropdown(
choices=["mock", "openai", "anthropic", "google", "deepseek"], choices=["mock", "openai", "anthropic", "google", "deepseek", "ollama"],
value="mock", value="mock",
label="Modello da usare" label="Modello da usare"
) )

View File

@@ -14,7 +14,7 @@ class PredictorAgent:
def __init__(self): def __init__(self):
# Ollama via HTTP locale # Ollama via HTTP locale
self.providers = { self.providers = {
"ollama": {"type": "ollama", "host": os.getenv("OLLAMA_HOST", "http://localhost:11434"), "model": "llama3"} "ollama": {"type": "ollama", "host": os.getenv("OLLAMA_HOST", "http://localhost:11434"), "model": "gpt-oss:latest"}
} }
# OpenAI # OpenAI