1 Star 0 Fork 0

43038038/langchain-nextjs

Create your Gitee Account
Explore and code with more than 13.5 million developers,Free private repositories !:)
Sign up
文件
Clone or Download
route.ts 5.06 KB
Copy Edit Raw Blame History
jacoblee93 authored 2024-01-20 02:47 +08:00 . Error handling improvements, version bump
import { NextRequest, NextResponse } from "next/server";
import { Message as VercelChatMessage, StreamingTextResponse } from "ai";
import { createClient } from "@supabase/supabase-js";
import { ChatOpenAI, OpenAIEmbeddings } from "@langchain/openai";
import { PromptTemplate } from "@langchain/core/prompts";
import { SupabaseVectorStore } from "@langchain/community/vectorstores/supabase";
import { Document } from "@langchain/core/documents";
import { RunnableSequence } from "@langchain/core/runnables";
import {
BytesOutputParser,
StringOutputParser,
} from "@langchain/core/output_parsers";
export const runtime = "edge";
const combineDocumentsFn = (docs: Document[]) => {
const serializedDocs = docs.map((doc) => doc.pageContent);
return serializedDocs.join("\n\n");
};
const formatVercelMessages = (chatHistory: VercelChatMessage[]) => {
const formattedDialogueTurns = chatHistory.map((message) => {
if (message.role === "user") {
return `Human: ${message.content}`;
} else if (message.role === "assistant") {
return `Assistant: ${message.content}`;
} else {
return `${message.role}: ${message.content}`;
}
});
return formattedDialogueTurns.join("\n");
};
const CONDENSE_QUESTION_TEMPLATE = `Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question, in its original language.
<chat_history>
{chat_history}
</chat_history>
Follow Up Input: {question}
Standalone question:`;
const condenseQuestionPrompt = PromptTemplate.fromTemplate(
CONDENSE_QUESTION_TEMPLATE,
);
const ANSWER_TEMPLATE = `You are an energetic talking puppy named Dana, and must answer all questions like a happy, talking dog would.
Use lots of puns!
Answer the question based only on the following context and chat history:
<context>
{context}
</context>
<chat_history>
{chat_history}
</chat_history>
Question: {question}
`;
const answerPrompt = PromptTemplate.fromTemplate(ANSWER_TEMPLATE);
/**
* This handler initializes and calls a retrieval chain. It composes the chain using
* LangChain Expression Language. See the docs for more information:
*
* https://js.langchain.com/docs/guides/expression_language/cookbook#conversational-retrieval-chain
*/
export async function POST(req: NextRequest) {
try {
const body = await req.json();
const messages = body.messages ?? [];
const previousMessages = messages.slice(0, -1);
const currentMessageContent = messages[messages.length - 1].content;
const model = new ChatOpenAI({
modelName: "gpt-3.5-turbo-1106",
temperature: 0.2,
});
const client = createClient(
process.env.SUPABASE_URL!,
process.env.SUPABASE_PRIVATE_KEY!,
);
const vectorstore = new SupabaseVectorStore(new OpenAIEmbeddings(), {
client,
tableName: "documents",
queryName: "match_documents",
});
/**
* We use LangChain Expression Language to compose two chains.
* To learn more, see the guide here:
*
* https://js.langchain.com/docs/guides/expression_language/cookbook
*
* You can also use the "createRetrievalChain" method with a
* "historyAwareRetriever" to get something prebaked.
*/
const standaloneQuestionChain = RunnableSequence.from([
condenseQuestionPrompt,
model,
new StringOutputParser(),
]);
let resolveWithDocuments: (value: Document[]) => void;
const documentPromise = new Promise<Document[]>((resolve) => {
resolveWithDocuments = resolve;
});
const retriever = vectorstore.asRetriever({
callbacks: [
{
handleRetrieverEnd(documents) {
resolveWithDocuments(documents);
},
},
],
});
const retrievalChain = retriever.pipe(combineDocumentsFn);
const answerChain = RunnableSequence.from([
{
context: RunnableSequence.from([
(input) => input.question,
retrievalChain,
]),
chat_history: (input) => input.chat_history,
question: (input) => input.question,
},
answerPrompt,
model,
]);
const conversationalRetrievalQAChain = RunnableSequence.from([
{
question: standaloneQuestionChain,
chat_history: (input) => input.chat_history,
},
answerChain,
new BytesOutputParser(),
]);
const stream = await conversationalRetrievalQAChain.stream({
question: currentMessageContent,
chat_history: formatVercelMessages(previousMessages),
});
const documents = await documentPromise;
const serializedSources = Buffer.from(
JSON.stringify(
documents.map((doc) => {
return {
pageContent: doc.pageContent.slice(0, 50) + "...",
metadata: doc.metadata,
};
}),
),
).toString("base64");
return new StreamingTextResponse(stream, {
headers: {
"x-message-index": (previousMessages.length + 1).toString(),
"x-sources": serializedSources,
},
});
} catch (e: any) {
return NextResponse.json({ error: e.message }, { status: e.status ?? 500 });
}
}
Loading...
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化
Python
1
https://gitee.com/haishang001/langchain-nextjs.git
git@gitee.com:haishang001/langchain-nextjs.git
haishang001
langchain-nextjs
langchain-nextjs
main

Search