对接文心一言,前端是fetch请求,后端使用AsyncTcpConnection请求后流式返回数据,但是前端一直是请求尚未完成
/**
* 流式对话
*/
public function conversation()
{
//流式对话api
$conversationApi = "https://agentapi.baidu.com/assistant/conversation?appId={$this->client_id}&secretKey={$this->client_secret}";
$user_connection = request()->connection;
$post = request()->post();
$adminUser = request()->adminUser;
try {
$parse_url = parse_url($conversationApi);
$connection = new AsyncTcpConnection("tcp://{$parse_url['host']}", ['ssl' => [
'verify_peer' => false,
]]);
$connection->transport = in_array($parse_url['scheme'], ['wss', 'https']) ? 'ssl' : 'tcp';
//建立链接时,发送请求数据
$connection->onConnect = function($connection) use($parse_url, $post, $adminUser) {
// 准备POST数据,包括appId
$postData = json_encode([
'message' => [
'content' => [
'type' => 'text',
'value' => [
'showText' => $post['content']
]
]
],
'source' => $this->client_id,
'from' => 'openapi',
'openId' => $adminUser['tel'] . '_' . $adminUser['id'],
]);
// 构建HTTP POST请求
$httpRequest = "POST {$parse_url['path']}?{$parse_url['query']} HTTP/1.1\r\n" .
"Host: agentapi.baidu.com\r\n" .
"Content-Type: application/json\r\n" .
"Content-Length: " . strlen($postData) . "\r\n" .
"Connection: close\r\n\r\n" .
$postData;
// 发送HTTP POST请求
$connection->send($httpRequest);
};
//当收到消息的时候
$connection->onMessage = function ($connection, $http_buffer) use($user_connection) {
//如果消息中包含event:message才是消息,
if (strpos($http_buffer, 'event:message') !== false) {
//分割成数组
$buffer = explode('event:message', $http_buffer);
//取最后一个
$buffer = end($buffer);
//去掉末尾的换行符
$buffer = rtrim($buffer);
$user_connection->send(new Chunk($buffer));
}
};
//关闭链接时
$connection->onClose = function($user_connection) {
$user_connection->send(new Chunk(''));
};
//链接错误时
$connection->onError = function($connection, $code, $msg) use($user_connection) {
$user_connection->send(new Chunk(''));
};
//建立链接
$connection->connect();
} catch (\Exception $e) {
abort($e->getMessage());
}
return response()->withHeaders([
'Content-Type' => 'application/octet-stream',
"Transfer-Encoding" => "chunked",
]);
}
直接使用 webman/openai