我想从 node.js 中的 gpt api 流式传输响应。我可以使用以下代码从 OpenAI GPT api 流式传输响应:
import OpenAI from "openai";
const openai = new OpenAI({
apiKey: 'my_api_key',
});
const response = await openai.chat.completions.create({
model: "gpt-3.5-turbo",
messages: [
{
"role": "user",
"content": "Generate Lorem Ipsum text."
}
],
temperature: 0,
stream: true
});
for await (const chunk of response[Symbol.asyncIterator]()) {
try {
process.stdout.write(chunk['choices'][0]['delta']['content']);
} catch (err) {
}
}
如何使用 azure 重写此代码?我尝试过类似的方法,但不起作用:
const {OpenAIClient, AzureKeyCredential} = require("@azure/openai");
const endpoint = 'my_endpoint';
const azureApiKey = 'my_azure_gpt_api_key';
const messages = [
{
"role": "user",
"content": "Generate Lorem Ipsum text."
}
];
async function main() {
const client = new OpenAIClient(endpoint, new AzureKeyCredential(azureApiKey));
const deploymentId = "gpt35-turbo-deploy";
const response = await client.getChatCompletions(deploymentId, messages, {
temperature: 0,
stream: true
})
// TODO console.log(response['choices'][0]['message']['content']) how?
}
main().catch((err) => {
console.error("The sample encountered an error:", err);
});
module.exports = {main};
我应该使用上面提到的库吗?或者我可以使用不同的东西吗?
最佳答案
Azure OpenAI SDK 目前似乎不支持直接流式响应。
您可以按照documentation实现流聊天完成。与 readableStream.js使用 listChatCompletions
和 maxTokens
值的示例。
下面是示例代码片段:
const { OpenAIClient, AzureKeyCredential } = require("@azure/openai");
require("dotenv").config();
const endpoint = process.env["ENDPOINT"] || "<endpoint>";
const azureApiKey = process.env["AZURE_API_KEY"] || "<api key>";
const messages = [
{ role: "system", content: "You are a helpful assistant. " },
{ role: "user", content: "Can you help me?" },
{ role: "user", content: "Generate Lorem Ipsum text." },
];
async function main() {
console.log("== Streaming Chat Completions Sample ==");
const client = new OpenAIClient(endpoint, new AzureKeyCredential(azureApiKey));
const deploymentId = "<Deployment Name>";
const events = await client.listChatCompletions(deploymentId, messages, { maxTokens: 128 });
const stream = new ReadableStream({
async start(controller) {
for await (const event of events) {
controller.enqueue(event);
}
controller.close();
},
});
const reader = stream.getReader();
while (true) {
const { done, value } = await reader.read();
if (done) {
break;
}
for (const choice of value.choices) {
if (choice.delta?.content !== undefined) {
console.log(choice.delta?.content);
}
}
}
}
main().catch((err) => {
console.error("The sample encountered an error:", err);
});
module.exports = { main };
注意:这是示例输出,您可以根据需要修改和重新配置。
关于javascript - azure gpt api流响应,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/77065143/