背景:
最近在调用千帆大模型,调用了他的接口,本文只是做一个记录.
正文:
1、描述
封装了千帆大模型的接口,采用了流式的方式的响应,减少用户的等待。
备注:以前没有使用过http的流式响应,也是感觉比较新奇,很好玩的事情,特意分享给大家。
2、代码:
/**
* @author twilight
* @since V1.0
*/
@Slf4j
@Service
public class HttpStreamServiceImpl {
public void chat(String accesstoken) {
try {
//获取RequestBody
RequestBody funcCallReqBody = getRequestBody();
//发送请求
sendRequest(funcCallReqBody,accesstoken);
} catch (Exception e) {
log.error("chat执行失败", e);
}
}
/**
* 获取RequestBody
* @return
*/
protected RequestBody getRequestBody() {
List<Message> messages = new ArrayList<>();
Message chatMsg = new Message("RoleName", "content");
messages.add(chatMsg);
//获取组装好的请求
ReqChatMessage reqMessage = new ReqChatMessage();
reqMessage.setMessages(messages);
reqMessage.setStream(true);
reqMessage.setSystem("systemInfo");
reqMessage.setUser_id("userID");
String content = JSONObject.toJSONString(reqMessage);
MediaType mediaType = MediaType.parse("application/json");
return RequestBody.create(mediaType, content);
}
/**
* 发送请求
*
* @param body
* @throws IOException
* @returnline
*/
protected String sendRequest(RequestBody body, String accessToken) {
String respResult = "";
Request request = new Request.Builder().url("https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie_bot_8k?access_token=" +
accessToken).method("POST", body).addHeader("Content-Type", "application/json").build();
try (Response response = OkHttpUtil.HTTP_CLIENT.newCall(request).execute()) {
if (!response.isSuccessful()) {
throw new IOException("Unexpected code " + response);
}
BufferedSource source = Okio.buffer(response.body().source());
String line = "";
while ((line = source.readUtf8Line()) != null) {
//流式打印出来
System.out.println(line);
}
} catch (Exception e) {
log.error("sendRequest请求执行失败", e);
}
return respResult;
}
}