1 Star 0 Fork 0

43038038/langchain-nextjs

Create your Gitee Account
Explore and code with more than 13.5 million developers,Free private repositories !:)
Sign up
文件
Clone or Download
route.ts 2.31 KB
Copy Edit Raw Blame History
jacoblee93 authored 2024-01-20 02:47 +08:00 . Error handling improvements, version bump
import { NextRequest, NextResponse } from "next/server";
import { Message as VercelChatMessage, StreamingTextResponse } from "ai";
import { ChatOpenAI } from "@langchain/openai";
import { PromptTemplate } from "@langchain/core/prompts";
import { HttpResponseOutputParser } from "langchain/output_parsers";
export const runtime = "edge";
const formatMessage = (message: VercelChatMessage) => {
return `${message.role}: ${message.content}`;
};
const TEMPLATE = `You are a pirate named Patchy. All responses must be extremely verbose and in pirate dialect.
Current conversation:
{chat_history}
User: {input}
AI:`;
/**
* This handler initializes and calls a simple chain with a prompt,
* chat model, and output parser. See the docs for more information:
*
* https://js.langchain.com/docs/guides/expression_language/cookbook#prompttemplate--llm--outputparser
*/
export async function POST(req: NextRequest) {
try {
const body = await req.json();
const messages = body.messages ?? [];
const formattedPreviousMessages = messages.slice(0, -1).map(formatMessage);
const currentMessageContent = messages[messages.length - 1].content;
const prompt = PromptTemplate.fromTemplate(TEMPLATE);
/**
* You can also try e.g.:
*
* import { ChatAnthropic } from "langchain/chat_models/anthropic";
* const model = new ChatAnthropic({});
*
* See a full list of supported models at:
* https://js.langchain.com/docs/modules/model_io/models/
*/
const model = new ChatOpenAI({
temperature: 0.8,
modelName: "gpt-3.5-turbo-1106",
});
/**
* Chat models stream message chunks rather than bytes, so this
* output parser handles serialization and byte-encoding.
*/
const outputParser = new HttpResponseOutputParser();
/**
* Can also initialize as:
*
* import { RunnableSequence } from "langchain/schema/runnable";
* const chain = RunnableSequence.from([prompt, model, outputParser]);
*/
const chain = prompt.pipe(model).pipe(outputParser);
const stream = await chain.stream({
chat_history: formattedPreviousMessages.join("\n"),
input: currentMessageContent,
});
return new StreamingTextResponse(stream);
} catch (e: any) {
return NextResponse.json({ error: e.message }, { status: e.status ?? 500 });
}
}
Loading...
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化
Python
1
https://gitee.com/haishang001/langchain-nextjs.git
git@gitee.com:haishang001/langchain-nextjs.git
haishang001
langchain-nextjs
langchain-nextjs
main

Search