升级langchain4j到1.3.0,解决很多模型不支持问题和MCP支持

This commit is contained in:
JEECG
2025-10-13 11:27:09 +08:00
parent 107e13c8af
commit b9f6f6dc53
6 changed files with 46 additions and 33 deletions

View File

@@ -31,10 +31,28 @@
</repositories> </repositories>
<properties> <properties>
<langchain4j.version>0.35.0</langchain4j.version>
<apache-tika.version>2.9.1</apache-tika.version> <apache-tika.version>2.9.1</apache-tika.version>
</properties> </properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-bom</artifactId>
<version>1.3.0</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-community-bom</artifactId>
<version>1.3.0-beta9</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies> <dependencies>
<!-- system单体 api--> <!-- system单体 api-->
<dependency> <dependency>
@@ -55,7 +73,7 @@
<dependency> <dependency>
<groupId>org.jeecgframework.boot3</groupId> <groupId>org.jeecgframework.boot3</groupId>
<artifactId>jeecg-aiflow</artifactId> <artifactId>jeecg-aiflow</artifactId>
<version>1.2.0</version> <version>3.8.3.1</version>
</dependency> </dependency>
<!-- beigin 这两个依赖太多每个包50M左右如果你发布需要使用请把<scope>provided</scope>删掉 --> <!-- beigin 这两个依赖太多每个包50M左右如果你发布需要使用请把<scope>provided</scope>删掉 -->
@@ -72,7 +90,6 @@
<scope>provided</scope> <scope>provided</scope>
</dependency> </dependency>
<!-- end 这两个依赖太多每个包50M左右如果你发布需要使用请把<scope>provided</scope>删掉 --> <!-- end 这两个依赖太多每个包50M左右如果你发布需要使用请把<scope>provided</scope>删掉 -->
<!-- aiflow 脚本依赖 --> <!-- aiflow 脚本依赖 -->
<dependency> <dependency>
<groupId>com.yomahub</groupId> <groupId>com.yomahub</groupId>
@@ -109,13 +126,15 @@
<!-- langChain4j model support --> <!-- langChain4j model support -->
<dependency> <dependency>
<groupId>dev.langchain4j</groupId> <groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-ollama</artifactId> <artifactId>langchain4j-open-ai</artifactId>
<version>${langchain4j.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>dev.langchain4j</groupId> <groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-zhipu-ai</artifactId> <artifactId>langchain4j-ollama</artifactId>
<version>${langchain4j.version}</version> </dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-community-zhipu-ai</artifactId>
<exclusions> <exclusions>
<exclusion> <exclusion>
<artifactId>checker-qual</artifactId> <artifactId>checker-qual</artifactId>
@@ -129,13 +148,11 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>dev.langchain4j</groupId> <groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-qianfan</artifactId> <artifactId>langchain4j-community-qianfan</artifactId>
<version>${langchain4j.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>dev.langchain4j</groupId> <groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-dashscope</artifactId> <artifactId>langchain4j-community-dashscope</artifactId>
<version>${langchain4j.version}</version>
<exclusions> <exclusions>
<exclusion> <exclusion>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
@@ -151,7 +168,7 @@
<dependency> <dependency>
<groupId>org.jeecgframework</groupId> <groupId>org.jeecgframework</groupId>
<artifactId>langchain4j-pgvector</artifactId> <artifactId>langchain4j-pgvector</artifactId>
<version>${langchain4j.version}</version> <version>1.3.0-beta9</version>
</dependency> </dependency>
<!-- langChain4j Document Parser --> <!-- langChain4j Document Parser -->
<dependency> <dependency>

View File

@@ -71,7 +71,7 @@ public class AiragAppServiceImpl extends ServiceImpl<AiragAppMapper, AiragApp> i
AtomicBoolean isThinking = new AtomicBoolean(false); AtomicBoolean isThinking = new AtomicBoolean(false);
String requestId = UUIDGenerator.generate(); String requestId = UUIDGenerator.generate();
// ai聊天响应逻辑 // ai聊天响应逻辑
tokenStream.onNext((String resMessage) -> { tokenStream.onPartialResponse((String resMessage) -> {
// 兼容推理模型 // 兼容推理模型
if ("<think>".equals(resMessage)) { if ("<think>".equals(resMessage)) {
isThinking.set(true); isThinking.set(true);
@@ -99,9 +99,9 @@ public class AiragAppServiceImpl extends ServiceImpl<AiragAppMapper, AiragApp> i
throw new RuntimeException(e); throw new RuntimeException(e);
} }
}) })
.onComplete((responseMessage) -> { .onCompleteResponse((responseMessage) -> {
// 记录ai的回复 // 记录ai的回复
AiMessage aiMessage = responseMessage.content(); AiMessage aiMessage = responseMessage.aiMessage();
FinishReason finishReason = responseMessage.finishReason(); FinishReason finishReason = responseMessage.finishReason();
String respText = aiMessage.text(); String respText = aiMessage.text();
if (FinishReason.STOP.equals(finishReason) || null == finishReason) { if (FinishReason.STOP.equals(finishReason) || null == finishReason) {
@@ -114,9 +114,6 @@ public class AiragAppServiceImpl extends ServiceImpl<AiragAppMapper, AiragApp> i
throw new RuntimeException(e); throw new RuntimeException(e);
} }
closeSSE(emitter, eventData); closeSSE(emitter, eventData);
} else if (FinishReason.TOOL_EXECUTION.equals(finishReason)) {
// 需要执行工具
// TODO author: chenrui for: date:2025/3/7
} else { } else {
// 异常结束 // 异常结束
log.error("调用模型异常:" + respText); log.error("调用模型异常:" + respText);

View File

@@ -860,7 +860,7 @@ public class AiragChatServiceImpl implements IAiragChatService {
*/ */
AtomicBoolean isThinking = new AtomicBoolean(false); AtomicBoolean isThinking = new AtomicBoolean(false);
// ai聊天响应逻辑 // ai聊天响应逻辑
chatStream.onNext((String resMessage) -> { chatStream.onPartialResponse((String resMessage) -> {
// 兼容推理模型 // 兼容推理模型
if ("<think>".equals(resMessage)) { if ("<think>".equals(resMessage)) {
isThinking.set(true); isThinking.set(true);
@@ -886,12 +886,12 @@ public class AiragChatServiceImpl implements IAiragChatService {
return; return;
} }
sendMessage2Client(emitter, eventData); sendMessage2Client(emitter, eventData);
}).onComplete((responseMessage) -> { }).onCompleteResponse((responseMessage) -> {
// 打印流程耗时日志 // 打印流程耗时日志
printChatDuration(requestId, "LLM输出消息完成"); printChatDuration(requestId, "LLM输出消息完成");
AiragLocalCache.remove(AiragConsts.CACHE_TYPE_SSE_SEND_TIME, requestId); AiragLocalCache.remove(AiragConsts.CACHE_TYPE_SSE_SEND_TIME, requestId);
// 记录ai的回复 // 记录ai的回复
AiMessage aiMessage = responseMessage.content(); AiMessage aiMessage = responseMessage.aiMessage();
FinishReason finishReason = responseMessage.finishReason(); FinishReason finishReason = responseMessage.finishReason();
String respText = aiMessage.text(); String respText = aiMessage.text();
// sse // sse

View File

@@ -105,14 +105,14 @@ public class AIChatHandler implements IAIChatHandler {
// langchain4j 异常友好提示 // langchain4j 异常友好提示
String errMsg = "调用大模型接口失败,详情请查看后台日志。"; String errMsg = "调用大模型接口失败,详情请查看后台日志。";
if (oConvertUtils.isNotEmpty(e.getMessage())) { if (oConvertUtils.isNotEmpty(e.getMessage())) {
// // 根据常见异常关键字做细致翻译 // 根据常见异常关键字做细致翻译
// for (Map.Entry<String, String> entry : MODEL_ERROR_MAP.entrySet()) { for (Map.Entry<String, String> entry : MODEL_ERROR_MAP.entrySet()) {
// String key = entry.getKey(); String key = entry.getKey();
// String value = entry.getValue(); String value = entry.getValue();
// if (errMsg.contains(key)) { if (errMsg.contains(key)) {
// errMsg = value; errMsg = value;
// } }
// } }
} }
log.error("AI模型调用异常: {}", errMsg, e); log.error("AI模型调用异常: {}", errMsg, e);
throw new JeecgBootException(errMsg); throw new JeecgBootException(errMsg);

View File

@@ -9,7 +9,6 @@ import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding; import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.rag.content.retriever.ContentRetriever; import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever; import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.rag.query.router.DefaultQueryRouter; import dev.langchain4j.rag.query.router.DefaultQueryRouter;
@@ -167,7 +166,7 @@ public class EmbeddingHandler implements IEmbeddingHandler {
// 删除旧数据 // 删除旧数据
embeddingStore.removeAll(metadataKey(EMBED_STORE_METADATA_DOCID).isEqualTo(doc.getId())); embeddingStore.removeAll(metadataKey(EMBED_STORE_METADATA_DOCID).isEqualTo(doc.getId()));
// 分段器 // 分段器
DocumentSplitter splitter = DocumentSplitters.recursive(DEFAULT_SEGMENT_SIZE, DEFAULT_OVERLAP_SIZE, new OpenAiTokenizer()); DocumentSplitter splitter = DocumentSplitters.recursive(DEFAULT_SEGMENT_SIZE, DEFAULT_OVERLAP_SIZE);
// 分段并存储 // 分段并存储
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(splitter) .documentSplitter(splitter)

View File

@@ -524,7 +524,7 @@
<dependency> <dependency>
<groupId>org.jeecgframework.boot3</groupId> <groupId>org.jeecgframework.boot3</groupId>
<artifactId>jeecg-boot-starter-chatgpt</artifactId> <artifactId>jeecg-boot-starter-chatgpt</artifactId>
<version>${jeecgboot.version}</version> <version>3.8.3.1</version>
</dependency> </dependency>
<!--flyway 支持 mysql5.7+、MariaDB10.3.16--> <!--flyway 支持 mysql5.7+、MariaDB10.3.16-->
<!--mysql5.6需要把版本号改成5.2.1--> <!--mysql5.6需要把版本号改成5.2.1-->