diff --git a/pom.xml b/pom.xml
index e1a762f954910ff0b3223cca75d1c6a4af56cf53..2ae1a38baec0da0965c1ee649652ea4ffe44b798 100644
--- a/pom.xml
+++ b/pom.xml
@@ -48,6 +48,7 @@
tinyflow-support-springai
tinyflow-support-langchain4j
tinyflow-support-agentsflex
+ tinyflow-support-solonai
diff --git a/tinyflow-support-solonai/pom.xml b/tinyflow-support-solonai/pom.xml
new file mode 100644
index 0000000000000000000000000000000000000000..b3657e6434b7c191425c4458dcfc47ecaa97fb09
--- /dev/null
+++ b/tinyflow-support-solonai/pom.xml
@@ -0,0 +1,35 @@
+
+
+ 4.0.0
+
+ dev.tinyflow
+ tinyflow-java
+ 2.0.0-beta.1
+
+
+ tinyflow-support-solonai
+ tinyflow-support-solonai
+
+
+ 8
+ 8
+ UTF-8
+
+
+
+
+ dev.tinyflow
+ tinyflow-core
+ 2.0.0-beta.1
+
+
+
+ org.noear
+ solon-ai
+ 3.7.2
+
+
+
+
\ No newline at end of file
diff --git a/tinyflow-support-solonai/src/main/java/dev/tinyflow/solon/provider/SolonAiLlm.java b/tinyflow-support-solonai/src/main/java/dev/tinyflow/solon/provider/SolonAiLlm.java
new file mode 100644
index 0000000000000000000000000000000000000000..e4b5b5acf693396f1c1fae5975abb71e90e4a04b
--- /dev/null
+++ b/tinyflow-support-solonai/src/main/java/dev/tinyflow/solon/provider/SolonAiLlm.java
@@ -0,0 +1,85 @@
+package dev.tinyflow.solon.provider;
+
+import dev.tinyflow.core.chain.Chain;
+import dev.tinyflow.core.llm.Llm;
+import dev.tinyflow.core.node.LlmNode;
+import dev.tinyflow.core.util.StringUtil;
+import org.noear.solon.ai.chat.ChatModel;
+import org.noear.solon.ai.chat.ChatResponse;
+import org.noear.solon.ai.chat.message.ChatMessage;
+import org.noear.solon.ai.chat.prompt.Prompt;
+import org.noear.solon.ai.media.Image;
+import org.noear.solon.core.util.Assert;
+
+import java.util.List;
+
+/**
+ *
+ * @author noear 2025/11/27 created
+ */
+public class SolonAiLlm implements Llm {
+ private ChatModel chatModel;
+
+ public ChatModel getChatModel() {
+ return chatModel;
+ }
+
+ public void setChatModel(ChatModel chatModel) {
+ this.chatModel = chatModel;
+ }
+
+ @Override
+ public String chat(MessageInfo messageInfo, ChatOptions options, LlmNode llmNode, Chain chain) {
+ Prompt prompt = new Prompt();
+
+ // 系统提示词
+ if (StringUtil.hasText(llmNode.getSystemPrompt())) {
+ prompt.addMessage(ChatMessage.ofSystem(llmNode.getSystemPrompt()));
+ }
+
+ // 用户提示词
+ if (StringUtil.hasText(messageInfo.getMessage())) {
+ prompt.addMessage(ChatMessage.ofUser(messageInfo.getMessage()));
+ }
+
+ // 用户图片
+ List imageUrls = messageInfo.getImages();
+ if (Assert.isNotEmpty(imageUrls)) {
+ for (String url : imageUrls) {
+ prompt.addMessage(ChatMessage.ofUser(Image.ofUrl(url)));
+ }
+ }
+
+ ChatResponse response = null;
+
+ try {
+ response = chatModel.prompt(prompt)
+ .options(o -> {
+ o.temperature(options.getTemperature());
+ o.top_k(options.getTopK());
+ o.top_p(options.getTopP());
+ o.max_tokens(options.getMaxTokens());
+ o.optionAdd("seed", options.getSeed());
+ o.optionAdd("stop", options.getStop());
+ })
+ .call();
+
+ } catch (Exception ex) {
+ throw new RuntimeException("SolonAiLlm error: " + ex.getMessage(), ex);
+ }
+
+ if (response == null) {
+ throw new RuntimeException("SolonAiLlm can not get response!");
+ }
+
+ if (response.getError() != null) {
+ throw new RuntimeException("SolonAiLlm error: " + response.getError().getMessage(), response.getError());
+ }
+
+ if (response.hasContent()) {
+ return response.getContent();
+ }
+
+ throw new RuntimeException("SolonAiLlm can not get aiMessage!");
+ }
+}
\ No newline at end of file