@@ -203,13 +203,58 @@ def _generate_impl(self, prompt: str | list[dict[str, str]], response_format: ty
203203 )
204204
205205
206+ class OllamaProvider (LLMProvider ):
207+ """Ollama provider for structured text generation."""
208+
209+ # No API key needed for local Ollama
210+ DEFAULT_MODEL = "llama3:latest"
211+
212+ @property
213+ def name (self ) -> str :
214+ return "ollama"
215+
216+ def _get_api_key (self ) -> str :
217+ """Override _get_api_key since Ollama doesn't need an API key"""
218+ return "not_needed" # Return a dummy value
219+
220+ def _initialize_client (self ):
221+ try :
222+ import ollama
223+ return ollama
224+ except ImportError as e :
225+ raise ImportError (f"Ollama package not installed. Install it with 'pip install ollama': { str (e )} " )
226+ except Exception as e :
227+ raise ValueError (f"Error initializing Ollama client: { str (e )} " )
228+
229+ def _generate_impl (
230+ self , prompt : str | list [dict [str , str ]], response_format : type [BaseModel ]
231+ ) -> BaseModel :
232+ # Convert prompt to messages format if it's a string
233+ messages = get_messages (prompt ) if isinstance (prompt , str ) else prompt
234+
235+ # Get schema for the response format
236+ schema = response_format .model_json_schema ()
237+
238+ # Call the Ollama chat API
239+ response = self .client .chat (
240+ messages = messages ,
241+ model = self .model_id ,
242+ format = schema ,
243+ )
244+
245+ # Parse the response content and validate against the Pydantic model
246+ # Unlike other providers that use instructor and return the parsed model directly,
247+ # we need to manually parse the JSON response here
248+ return response_format .model_validate_json (response .message .content )
249+
250+
206251def create_provider (
207252 provider : str , model_id : str | None = None , ** kwargs
208253) -> LLMProvider :
209254 """Create an LLM provider for structured text generation.
210255
211256 Args:
212- provider: Provider name ('anthropic', 'google', or 'openai')
257+ provider: Provider name ('anthropic', 'google', 'openai', 'ollama ')
213258 model_id: Optional model identifier. If not provided, uses provider's default
214259 **kwargs: Additional provider-specific arguments
215260
@@ -220,6 +265,7 @@ def create_provider(
220265 "anthropic" : AnthropicProvider ,
221266 "google" : GoogleProvider ,
222267 "openai" : OpenAIProvider ,
268+ "ollama" : OllamaProvider ,
223269 }
224270
225271 provider_class = provider_map .get (provider .lower ())
0 commit comments