1818import  io .github .ollama4j .models .chat .OllamaChatRequest ;
1919import  io .github .ollama4j .models .chat .OllamaChatRequestBuilder ;
2020import  io .github .ollama4j .models .chat .OllamaChatResult ;
21+ import  io .github .ollama4j .models .response .LibraryModelTag ;
2122import  io .github .ollama4j .models .response .Model ;
2223import  io .github .ollama4j .types .OllamaModelType ;
2324import  io .twentysixty .ollama .hologram .chatbot .jms .OllamaProducer ;
@@ -95,7 +96,7 @@ public String getChatResponse(List<OllamaChatMessage> messages) throws OllamaBas
9596		OllamaAPI  ollamaAPI  = new  OllamaAPI (serviceUrl );
9697		ollamaAPI .setRequestTimeoutSeconds (timeout );
9798
98-         OllamaChatRequestBuilder  builder  = OllamaChatRequestBuilder .getInstance (OllamaModelType . LLAMA2 );
99+         OllamaChatRequestBuilder  builder  = OllamaChatRequestBuilder .getInstance ("llama3.2:1b" );
99100		OllamaChatRequest  requestModel  = builder .withMessages (messages ).build ();
100101		OllamaChatResult  chatResult  = ollamaAPI .chat (requestModel );
101102
@@ -129,7 +130,7 @@ private void checkModels() {
129130		try  {
130131			models  = ollamaAPI .listModels ();
131132			for  (Model  m : models ) {
132- 				if  (m .getName ().startsWith ("llama2: " )) {
133+ 				if  (m .getName ().startsWith ("llama3.2:1b " )) {
133134					modelLoaded  = true ;
134135				}
135136				logger .info (m .getName ());
@@ -142,7 +143,7 @@ private void checkModels() {
142143		if  (!modelLoaded ) {
143144			logger .info ("loading model..." );
144145			try  {
145- 				ollamaAPI .pullModel (OllamaModelType . LLAMA2 );
146+ 				ollamaAPI .pullModel ("llama3.2:1b" );
146147			} catch  (Exception  e ) {
147148				logger .error ("" , e );
148149			}
0 commit comments