Fetch the repository succeeded.
为了让研发伙伴更快,更方便的接入使用智谱Ai大模型。从而开发的 chatglm-sdk-java 也欢迎👏🏻大家基于智谱API接口补充需要的功能。
此SDK设计,以 Session 会话模型,提供工厂🏭创建服务。代码非常清晰,易于扩展、易于维护。你的PR/ISSUE贡献💐会让AI更加璀璨,感谢智谱AI团队。
作者:小傅哥 - 个人博客 bugstack.cn,互联网大厂架构师,《重学Java设计模式》、《手写MyBatis:渐进式源码实践》图书作者。
欢迎百度搜索:小傅哥bugstack
使用:代码的方式主要用于程序接入
测试:生成Token,直接通过HTTP访问Ai服务
已发布到Maven仓库
<dependency>
<groupId>cn.bugstack</groupId>
<artifactId>chatglm-sdk-java</artifactId>
<version>2.2</version>
</dependency>
private OpenAiSession openAiSession;
@Before
public void test_OpenAiSessionFactory() {
// 1. 配置文件
Configuration configuration = new Configuration();
configuration.setApiHost("https://open.bigmodel.cn/");
configuration.setApiSecretKey("62ddec38b1d0b9a7b0fddaf271e6ed90.HpD0SUBUlvqd05ey");
configuration.setLevel(HttpLoggingInterceptor.Level.BODY);
// 2. 会话工厂
OpenAiSessionFactory factory = new DefaultOpenAiSessionFactory(configuration);
// 3. 开启会话
this.openAiSession = factory.openSession();
}
/**
* 流式对话;
* 1. 默认 isCompatible = true 会兼容新旧版数据格式
* 2. GLM_3_5_TURBO、GLM_4 支持联网等插件
*/
@Test
public void test_completions() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(1);
// 入参;模型、请求信息
ChatCompletionRequest request = new ChatCompletionRequest();
request.setModel(Model.GLM_3_5_TURBO); // chatGLM_6b_SSE、chatglm_lite、chatglm_lite_32k、chatglm_std、chatglm_pro
request.setIncremental(false);
request.setIsCompatible(true); // 是否对返回结果数据做兼容,24年1月发布的 GLM_3_5_TURBO、GLM_4 模型,与之前的模型在返回结果上有差异。开启 true 可以做兼容。
// 24年1月发布的 glm-3-turbo、glm-4 支持函数、知识库、联网功能
request.setTools(new ArrayList<ChatCompletionRequest.Tool>() {
private static final long serialVersionUID = -7988151926241837899L;
{
add(ChatCompletionRequest.Tool.builder()
.type(ChatCompletionRequest.Tool.Type.web_search)
.webSearch(ChatCompletionRequest.Tool.WebSearch.builder().enable(true).searchQuery("小傅哥").build())
.build());
}
});
request.setPrompt(new ArrayList<ChatCompletionRequest.Prompt>() {
private static final long serialVersionUID = -7988151926241837899L;
{
add(ChatCompletionRequest.Prompt.builder()
.role(Role.user.getCode())
.content("小傅哥的是谁")
.build());
}
});
// 请求
openAiSession.completions(request, new EventSourceListener() {
@Override
public void onEvent(EventSource eventSource, @Nullable String id, @Nullable String type, String data) {
ChatCompletionResponse response = JSON.parseObject(data, ChatCompletionResponse.class);
log.info("测试结果 onEvent:{}", response.getData());
// type 消息类型,add 增量,finish 结束,error 错误,interrupted 中断
if (EventType.finish.getCode().equals(type)) {
ChatCompletionResponse.Meta meta = JSON.parseObject(response.getMeta(), ChatCompletionResponse.Meta.class);
log.info("[输出结束] Tokens {}", JSON.toJSONString(meta));
}
}
@Override
public void onClosed(EventSource eventSource) {
log.info("对话完成");
countDownLatch.countDown();
}
@Override
public void onFailure(EventSource eventSource, @Nullable Throwable t, @Nullable Response response) {
log.info("对话异常");
countDownLatch.countDown();
}
});
// 等待
countDownLatch.await();
}
/**
* 流式对话;
* 1. 与 test_completions 测试类相比,只是设置 isCompatible = false 这样就是使用了新的数据结构。onEvent 处理接收数据有差异
* 2. 不兼容旧版格式的话,仅支持 GLM_3_5_TURBO、GLM_4 其他模型会有解析错误
*/
@Test
public void test_completions_new() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(1);
// 入参;模型、请求信息
ChatCompletionRequest request = new ChatCompletionRequest();
request.setModel(Model.GLM_3_5_TURBO); // GLM_3_5_TURBO、GLM_4
request.setIsCompatible(false);
// 24年1月发布的 glm-3-turbo、glm-4 支持函数、知识库、联网功能
request.setTools(new ArrayList<ChatCompletionRequest.Tool>() {
private static final long serialVersionUID = -7988151926241837899L;
{
add(ChatCompletionRequest.Tool.builder()
.type(ChatCompletionRequest.Tool.Type.web_search)
.webSearch(ChatCompletionRequest.Tool.WebSearch.builder().enable(true).searchQuery("小傅哥").build())
.build());
}
});
request.setMessages(new ArrayList<ChatCompletionRequest.Prompt>() {
private static final long serialVersionUID = -7988151926241837899L;
{
add(ChatCompletionRequest.Prompt.builder()
.role(Role.user.getCode())
.content("小傅哥的是谁")
.build());
}
});
// 请求
openAiSession.completions(request, new EventSourceListener() {
@Override
public void onEvent(EventSource eventSource, @Nullable String id, @Nullable String type, String data) {
if ("[DONE]".equals(data)) {
log.info("[输出结束] Tokens {}", JSON.toJSONString(data));
return;
}
ChatCompletionResponse response = JSON.parseObject(data, ChatCompletionResponse.class);
log.info("测试结果:{}", JSON.toJSONString(response));
}
@Override
public void onClosed(EventSource eventSource) {
log.info("对话完成");
countDownLatch.countDown();
}
@Override
public void onFailure(EventSource eventSource, @Nullable Throwable t, @Nullable Response response) {
log.error("对话失败", t);
countDownLatch.countDown();
}
});
// 等待
countDownLatch.await();
}
@Test
public void test_completions_4v() throws Exception {
CountDownLatch countDownLatch = new CountDownLatch(1);
// 入参;模型、请求信息
ChatCompletionRequest request = new ChatCompletionRequest();
request.setModel(Model.GLM_4V); // GLM_3_5_TURBO、GLM_4
request.setStream(true);
request.setMessages(new ArrayList<ChatCompletionRequest.Prompt>() {
private static final long serialVersionUID = -7988151926241837899L;
{
// content 字符串格式
add(ChatCompletionRequest.Prompt.builder()
.role(Role.user.getCode())
.content("这个图片写了什么")
.build());
// content 对象格式
add(ChatCompletionRequest.Prompt.builder()
.role(Role.user.getCode())
.content(ChatCompletionRequest.Prompt.Content.builder()
.type(ChatCompletionRequest.Prompt.Content.Type.text.getCode())
.text("这是什么图片")
.build())
.build());
// content 对象格式,上传图片;图片支持url、basde64
add(ChatCompletionRequest.Prompt.builder()
.role(Role.user.getCode())
.content(ChatCompletionRequest.Prompt.Content.builder()
.type(ChatCompletionRequest.Prompt.Content.Type.image_url.getCode())
.imageUrl(ChatCompletionRequest.Prompt.Content.ImageUrl.builder().url("https://bugstack.cn/images/article/project/chatgpt/chatgpt-extra-231011-01.png").build())
.build())
.build());
}
});
openAiSession.completions(request, new EventSourceListener() {
@Override
public void onEvent(EventSource eventSource, @Nullable String id, @Nullable String type, String data) {
if ("[DONE]".equals(data)) {
log.info("[输出结束] Tokens {}", JSON.toJSONString(data));
return;
}
ChatCompletionResponse response = JSON.parseObject(data, ChatCompletionResponse.class);
log.info("测试结果:{}", JSON.toJSONString(response));
}
@Override
public void onClosed(EventSource eventSource) {
log.info("对话完成");
countDownLatch.countDown();
}
@Override
public void onFailure(EventSource eventSource, @Nullable Throwable t, @Nullable Response response) {
log.error("对话失败", t);
countDownLatch.countDown();
}
});
// 等待
countDownLatch.await();
}
@Test
public void test_completions_future() throws Exception {
// 入参;模型、请求信息
ChatCompletionRequest request = new ChatCompletionRequest();
request.setModel(Model.CHATGLM_TURBO); // chatGLM_6b_SSE、chatglm_lite、chatglm_lite_32k、chatglm_std、chatglm_pro
request.setPrompt(new ArrayList<ChatCompletionRequest.Prompt>() {
private static final long serialVersionUID = -7988151926241837899L;
{
add(ChatCompletionRequest.Prompt.builder()
.role(Role.user.getCode())
.content("1+1")
.build());
}
});
CompletableFuture<String> future = openAiSession.completions(request);
String response = future.get();
log.info("测试结果:{}", response);
}
@Test
public void test_completions_sync() throws Exception {
// 入参;模型、请求信息
ChatCompletionRequest request = new ChatCompletionRequest();
request.setModel(Model.GLM_4V); // chatGLM_6b_SSE、chatglm_lite、chatglm_lite_32k、chatglm_std、chatglm_pro
request.setPrompt(new ArrayList<ChatCompletionRequest.Prompt>() {
private static final long serialVersionUID = -7988151926241837899L;
{
add(ChatCompletionRequest.Prompt.builder()
.role(Role.user.getCode())
.content("小傅哥是谁")
.build());
}
});
// 24年1月发布的 glm-3-turbo、glm-4 支持函数、知识库、联网功能
request.setTools(new ArrayList<ChatCompletionRequest.Tool>() {
private static final long serialVersionUID = -7988151926241837899L;
{
add(ChatCompletionRequest.Tool.builder()
.type(ChatCompletionRequest.Tool.Type.web_search)
.webSearch(ChatCompletionRequest.Tool.WebSearch.builder().enable(true).searchQuery("小傅哥").build())
.build());
}
});
ChatCompletionSyncResponse response = openAiSession.completionsSync(request);
log.info("测试结果:{}", JSON.toJSONString(response));
}
@Test
public void test_genImages() throws Exception {
ImageCompletionRequest request = new ImageCompletionRequest();
request.setModel(Model.COGVIEW_3);
request.setPrompt("画个小狗");
ImageCompletionResponse response = openAiSession.genImages(request);
log.info("测试结果:{}", JSON.toJSONString(response));
}
@Test
public void test_curl() {
// 1. 配置文件
Configuration configuration = new Configuration();
configuration.setApiHost("https://open.bigmodel.cn/");
configuration.setApiSecretKey("4e087e4135306ef4a676f0cce3cee560.sgP2D****");
// 2. 获取Token
String token = BearerTokenUtils.getToken(configuration.getApiKey(), configuration.getApiSecret());
log.info("1. 在智谱Ai官网,申请 ApiSeretKey 配置到此测试类中,替换 setApiSecretKey 值。 https://open.bigmodel.cn/usercenter/apikeys");
log.info("2. 运行 test_curl 获取 token:{}", token);
log.info("3. 将获得的 token 值,复制到 curl.sh 中,填写到 Authorization: Bearer 后面");
log.info("4. 执行完步骤3以后,可以复制直接运行 curl.sh 文件,或者复制 curl.sh 文件内容到控制台/终端/ApiPost中运行");
}
curl -X POST \
-H "Authorization: Bearer <把获得的Token填写这,并去掉两个尖括号>" \
-H "Content-Type: application/json" \
-H "User-Agent: Mozilla/4.0 (compatible; MSIE 5.0; Windows NT; DigExt)" \
-H "Accept: text/event-stream" \
-d '{
"top_p": 0.7,
"sseFormat": "data",
"temperature": 0.9,
"incremental": true,
"request_id": "xfg-1696992276607",
"prompt": [
{
"role": "user",
"content": "写个java冒泡排序"
}
]
}' \
http://open.bigmodel.cn/api/paas/v3/model-api/chatglm_lite/sse-invoke
SpringBoot 配置类
@Configuration
@EnableConfigurationProperties(ChatGLMSDKConfigProperties.class)
public class ChatGLMSDKConfig {
@Bean
@ConditionalOnProperty(value = "chatglm.sdk.config.enabled", havingValue = "true", matchIfMissing = false)
public OpenAiSession openAiSession(ChatGLMSDKConfigProperties properties) {
// 1. 配置文件
cn.bugstack.chatglm.session.Configuration configuration = new cn.bugstack.chatglm.session.Configuration();
configuration.setApiHost(properties.getApiHost());
configuration.setApiSecretKey(properties.getApiSecretKey());
// 2. 会话工厂
OpenAiSessionFactory factory = new DefaultOpenAiSessionFactory(configuration);
// 3. 开启会话
return factory.openSession();
}
}
@Data
@ConfigurationProperties(prefix = "chatglm.sdk.config", ignoreInvalidFields = true)
public class ChatGLMSDKConfigProperties {
/** 状态;open = 开启、close 关闭 */
private boolean enable;
/** 转发地址 */
private String apiHost;
/** 可以申请 sk-*** */
private String apiSecretKey;
}
@Autowired(required = false)
private OpenAiSession openAiSession;
yml 配置
# ChatGLM SDK Config
chatglm:
sdk:
config:
# 状态;true = 开启、false 关闭
enable: false
# 官网地址
api-host: https://open.bigmodel.cn/
# 官网申请 https://open.bigmodel.cn/usercenter/apikeys
api-secret-key: 4e087e4135306ef4a676f0cce3cee560.sgP2DUs*****
对接案例:https://bugstack.cn/md/road-map/mock.html
CodeGuide | 程序员编码指南
| RoadMap 编程路书
| Java 数据结构和算法
| IM 仿微信
| Java 面经手册
| IntelliJ IDEA 插件开发
| Lottery 抽奖系统 - 基于领域驱动设计的四层架构实践
| API网关
| 手写MyBatis
| 重学Java设计模式
| Netty 实战案例
| 字节码编程
| ChatGPT AI 问答助手
| 更多搜索...md5 chatglm-sdk-java-2.2.pom > chatglm-sdk-java-2.2.pom.md5
shasum chatglm-sdk-java-2.2.pom > chatglm-sdk-java-2.2.pom.sha1
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。