Fetch the repository succeeded.
import { NextRequest, NextResponse } from "next/server";
import { Message as VercelChatMessage, StreamingTextResponse } from "ai";
import { createClient } from "@supabase/supabase-js";
import { ChatOpenAI, OpenAIEmbeddings } from "@langchain/openai";
import { PromptTemplate } from "@langchain/core/prompts";
import { SupabaseVectorStore } from "@langchain/community/vectorstores/supabase";
import { Document } from "@langchain/core/documents";
import { RunnableSequence } from "@langchain/core/runnables";
import {
BytesOutputParser,
StringOutputParser,
} from "@langchain/core/output_parsers";
export const runtime = "edge";
const combineDocumentsFn = (docs: Document[]) => {
const serializedDocs = docs.map((doc) => doc.pageContent);
return serializedDocs.join("\n\n");
};
const formatVercelMessages = (chatHistory: VercelChatMessage[]) => {
const formattedDialogueTurns = chatHistory.map((message) => {
if (message.role === "user") {
return `Human: ${message.content}`;
} else if (message.role === "assistant") {
return `Assistant: ${message.content}`;
} else {
return `${message.role}: ${message.content}`;
}
});
return formattedDialogueTurns.join("\n");
};
const CONDENSE_QUESTION_TEMPLATE = `Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question, in its original language.
<chat_history>
{chat_history}
</chat_history>
Follow Up Input: {question}
Standalone question:`;
const condenseQuestionPrompt = PromptTemplate.fromTemplate(
CONDENSE_QUESTION_TEMPLATE,
);
const ANSWER_TEMPLATE = `You are an energetic talking puppy named Dana, and must answer all questions like a happy, talking dog would.
Use lots of puns!
Answer the question based only on the following context and chat history:
<context>
{context}
</context>
<chat_history>
{chat_history}
</chat_history>
Question: {question}
`;
const answerPrompt = PromptTemplate.fromTemplate(ANSWER_TEMPLATE);
/**
* This handler initializes and calls a retrieval chain. It composes the chain using
* LangChain Expression Language. See the docs for more information:
*
* https://js.langchain.com/docs/guides/expression_language/cookbook#conversational-retrieval-chain
*/
export async function POST(req: NextRequest) {
try {
const body = await req.json();
const messages = body.messages ?? [];
const previousMessages = messages.slice(0, -1);
const currentMessageContent = messages[messages.length - 1].content;
const model = new ChatOpenAI({
modelName: "gpt-3.5-turbo-1106",
temperature: 0.2,
});
const client = createClient(
process.env.SUPABASE_URL!,
process.env.SUPABASE_PRIVATE_KEY!,
);
const vectorstore = new SupabaseVectorStore(new OpenAIEmbeddings(), {
client,
tableName: "documents",
queryName: "match_documents",
});
/**
* We use LangChain Expression Language to compose two chains.
* To learn more, see the guide here:
*
* https://js.langchain.com/docs/guides/expression_language/cookbook
*
* You can also use the "createRetrievalChain" method with a
* "historyAwareRetriever" to get something prebaked.
*/
const standaloneQuestionChain = RunnableSequence.from([
condenseQuestionPrompt,
model,
new StringOutputParser(),
]);
let resolveWithDocuments: (value: Document[]) => void;
const documentPromise = new Promise<Document[]>((resolve) => {
resolveWithDocuments = resolve;
});
const retriever = vectorstore.asRetriever({
callbacks: [
{
handleRetrieverEnd(documents) {
resolveWithDocuments(documents);
},
},
],
});
const retrievalChain = retriever.pipe(combineDocumentsFn);
const answerChain = RunnableSequence.from([
{
context: RunnableSequence.from([
(input) => input.question,
retrievalChain,
]),
chat_history: (input) => input.chat_history,
question: (input) => input.question,
},
answerPrompt,
model,
]);
const conversationalRetrievalQAChain = RunnableSequence.from([
{
question: standaloneQuestionChain,
chat_history: (input) => input.chat_history,
},
answerChain,
new BytesOutputParser(),
]);
const stream = await conversationalRetrievalQAChain.stream({
question: currentMessageContent,
chat_history: formatVercelMessages(previousMessages),
});
const documents = await documentPromise;
const serializedSources = Buffer.from(
JSON.stringify(
documents.map((doc) => {
return {
pageContent: doc.pageContent.slice(0, 50) + "...",
metadata: doc.metadata,
};
}),
),
).toString("base64");
return new StreamingTextResponse(stream, {
headers: {
"x-message-index": (previousMessages.length + 1).toString(),
"x-sources": serializedSources,
},
});
} catch (e: any) {
return NextResponse.json({ error: e.message }, { status: e.status ?? 500 });
}
}
此处可能存在不合适展示的内容,页面不予展示。您可通过相关编辑功能自查并修改。
如您确认内容无涉及 不当用语 / 纯广告导流 / 暴力 / 低俗色情 / 侵权 / 盗版 / 虚假 / 无价值内容或违法国家有关法律法规的内容,可点击提交进行申诉,我们将尽快为您处理。