初始化
This commit is contained in:
34
easy-agents-chat/easy-agents-chat-ollama/pom.xml
Normal file
34
easy-agents-chat/easy-agents-chat-ollama/pom.xml
Normal file
@@ -0,0 +1,34 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<parent>
|
||||
<groupId>com.easyagents</groupId>
|
||||
<artifactId>easy-agents-chat</artifactId>
|
||||
<version>${revision}</version>
|
||||
</parent>
|
||||
|
||||
<name>easy-agents-chat-ollama</name>
|
||||
<artifactId>easy-agents-chat-ollama</artifactId>
|
||||
|
||||
<properties>
|
||||
<maven.compiler.source>8</maven.compiler.source>
|
||||
<maven.compiler.target>8</maven.compiler.target>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>com.easyagents</groupId>
|
||||
<artifactId>easy-agents-core</artifactId>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
</project>
|
||||
@@ -0,0 +1,32 @@
|
||||
/*
|
||||
* Copyright (c) 2023-2026, Easy-Agents (fuhai999@gmail.com).
|
||||
* <p>
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* <p>
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* <p>
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.easyagents.llm.ollama;
|
||||
|
||||
import com.easyagents.core.model.chat.ChatConfig;
|
||||
|
||||
public class OllamaChatConfig extends ChatConfig {
|
||||
|
||||
private static final String DEFAULT_PROVIDER = "ollama";
|
||||
private static final String DEFAULT_ENDPOINT = "https://localhost:11434";
|
||||
private static final String DEFAULT_REQUEST_PATH = "/v1/chat/completions";
|
||||
|
||||
public OllamaChatConfig() {
|
||||
setProvider(DEFAULT_PROVIDER);
|
||||
setEndpoint(DEFAULT_ENDPOINT);
|
||||
setRequestPath(DEFAULT_REQUEST_PATH);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
/*
|
||||
* Copyright (c) 2023-2026, Easy-Agents (fuhai999@gmail.com).
|
||||
* <p>
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
* <p>
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* <p>
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package com.easyagents.llm.ollama;
|
||||
|
||||
import com.easyagents.core.model.chat.OpenAICompatibleChatModel;
|
||||
import com.easyagents.core.model.chat.ChatInterceptor;
|
||||
import com.easyagents.core.model.chat.GlobalChatInterceptors;
|
||||
import com.easyagents.core.model.client.ChatRequestSpecBuilder;
|
||||
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class OllamaChatModel extends OpenAICompatibleChatModel<OllamaChatConfig> {
|
||||
|
||||
/**
|
||||
* 构造一个聊天模型实例,不使用实例级拦截器。
|
||||
*
|
||||
* @param config 聊天模型配置
|
||||
*/
|
||||
public OllamaChatModel(OllamaChatConfig config) {
|
||||
super(config);
|
||||
}
|
||||
|
||||
/**
|
||||
* 构造一个聊天模型实例,并指定实例级拦截器。
|
||||
* <p>
|
||||
* 实例级拦截器会与全局拦截器(通过 {@link GlobalChatInterceptors} 注册)合并,
|
||||
* 执行顺序为:可观测性拦截器 → 全局拦截器 → 实例拦截器。
|
||||
*
|
||||
* @param config 聊天模型配置
|
||||
* @param userInterceptors 实例级拦截器列表
|
||||
*/
|
||||
public OllamaChatModel(OllamaChatConfig config, List<ChatInterceptor> userInterceptors) {
|
||||
super(config, userInterceptors);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public ChatRequestSpecBuilder getChatRequestSpecBuilder() {
|
||||
return new OllamaRequestSpecBuilder();
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,21 @@
|
||||
package com.easyagents.llm.ollama;
|
||||
|
||||
import com.easyagents.core.model.chat.ChatConfig;
|
||||
import com.easyagents.core.model.chat.ChatOptions;
|
||||
import com.easyagents.core.model.client.OpenAIChatRequestSpecBuilder;
|
||||
import com.easyagents.core.prompt.Prompt;
|
||||
import com.easyagents.core.util.Maps;
|
||||
|
||||
public class OllamaRequestSpecBuilder extends OpenAIChatRequestSpecBuilder {
|
||||
protected Maps buildBaseParamsOfRequestBody(Prompt prompt, ChatOptions options, ChatConfig config) {
|
||||
Maps params = super.buildBaseParamsOfRequestBody(prompt, options, config);
|
||||
params.setIf(!options.isStreaming(), "stream", false);
|
||||
|
||||
// 支持思考
|
||||
if (config.isSupportThinking()) {
|
||||
params.setIf(options.getThinkingEnabled() != null, "thinking", options.getThinkingEnabled());
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,94 @@
|
||||
package com.easyagents.llm.ollama;
|
||||
|
||||
import com.easyagents.core.message.AiMessage;
|
||||
import com.easyagents.core.model.chat.ChatModel;
|
||||
import com.easyagents.core.model.chat.response.AiMessageResponse;
|
||||
import com.easyagents.core.model.exception.ModelException;
|
||||
import com.easyagents.core.prompt.SimplePrompt;
|
||||
import org.junit.Test;
|
||||
|
||||
public class OllamaChatModelTest {
|
||||
|
||||
@Test(expected = ModelException.class)
|
||||
public void testChat() {
|
||||
OllamaChatConfig config = new OllamaChatConfig();
|
||||
config.setEndpoint("http://localhost:11434");
|
||||
config.setModel("llama3");
|
||||
config.setLogEnabled(true);
|
||||
|
||||
ChatModel chatModel = new OllamaChatModel(config);
|
||||
String chat = chatModel.chat("Why is the sky blue?");
|
||||
System.out.println(">>>" + chat);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testChatStream() throws InterruptedException {
|
||||
OllamaChatConfig config = new OllamaChatConfig();
|
||||
config.setEndpoint("http://localhost:11434");
|
||||
config.setModel("llama3");
|
||||
config.setLogEnabled(true);
|
||||
|
||||
ChatModel chatModel = new OllamaChatModel(config);
|
||||
chatModel.chatStream("Why is the sky blue?", (context, response) -> System.out.println(response.getMessage().getContent()));
|
||||
|
||||
Thread.sleep(2000);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testFunctionCall1() throws InterruptedException {
|
||||
OllamaChatConfig config = new OllamaChatConfig();
|
||||
config.setEndpoint("http://localhost:11434");
|
||||
config.setModel("llama3.1");
|
||||
config.setLogEnabled(true);
|
||||
|
||||
ChatModel chatModel = new OllamaChatModel(config);
|
||||
|
||||
SimplePrompt prompt = new SimplePrompt("What's the weather like in Beijing?");
|
||||
prompt.addToolsFromClass(WeatherFunctions.class);
|
||||
AiMessageResponse response = chatModel.chat(prompt);
|
||||
|
||||
System.out.println(response.executeToolCallsAndGetResults());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testFunctionCall2() throws InterruptedException {
|
||||
OllamaChatConfig config = new OllamaChatConfig();
|
||||
config.setEndpoint("http://localhost:11434");
|
||||
config.setModel("llama3.1");
|
||||
config.setLogEnabled(true);
|
||||
|
||||
ChatModel chatModel = new OllamaChatModel(config);
|
||||
|
||||
SimplePrompt prompt = new SimplePrompt("What's the weather like in Beijing?");
|
||||
prompt.addToolsFromClass(WeatherFunctions.class);
|
||||
AiMessageResponse response = chatModel.chat(prompt);
|
||||
|
||||
if (response.hasToolCalls()) {
|
||||
prompt.setToolMessages(response.executeToolCallsAndGetToolMessages());
|
||||
AiMessageResponse response1 = chatModel.chat(prompt);
|
||||
System.out.println(response1.getMessage().getContent());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testVisionModel() {
|
||||
OllamaChatConfig config = new OllamaChatConfig();
|
||||
config.setEndpoint("http://localhost:11434");
|
||||
config.setModel("llava");
|
||||
config.setLogEnabled(true);
|
||||
|
||||
ChatModel chatModel = new OllamaChatModel(config);
|
||||
|
||||
SimplePrompt imagePrompt = new SimplePrompt("What's in the picture?");
|
||||
imagePrompt.addImageUrl("https://agentsflex.com/assets/images/logo.png");
|
||||
|
||||
AiMessageResponse response = chatModel.chat(imagePrompt);
|
||||
AiMessage message = response == null ? null : response.getMessage();
|
||||
System.out.println(message);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
package com.easyagents.llm.ollama;
|
||||
|
||||
import com.easyagents.core.model.chat.tool.annotation.ToolDef;
|
||||
import com.easyagents.core.model.chat.tool.annotation.ToolParam;
|
||||
|
||||
public class WeatherFunctions {
|
||||
|
||||
@ToolDef(name = "get_the_weather_info", description = "get the weather info")
|
||||
public static String getWeatherInfo(
|
||||
@ToolParam(name = "city", description = "the city name") String name
|
||||
) {
|
||||
return "Snowy days";
|
||||
}
|
||||
|
||||
|
||||
@ToolDef(name = "get_the_temperature", description = "get the temperature")
|
||||
public static String getTemperature(
|
||||
@ToolParam(name = "city", description = "the city name") String name
|
||||
) {
|
||||
return "The temperature in " + name + " is 15°C";
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user