chat.ts 1.5 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758
  1. import type { NextApiRequest, NextApiResponse } from 'next';
  2. import { OpenAIEmbeddings } from 'langchain/embeddings';
  3. import { SupabaseVectorStore } from 'langchain/vectorstores';
  4. import { openai } from '@/utils/openai-client';
  5. import { supabaseClient } from '@/utils/supabase-client';
  6. import { makeChain } from '@/utils/makechain';
  7. export default async function handler(
  8. req: NextApiRequest,
  9. res: NextApiResponse,
  10. ) {
  11. const { question, history } = req.body;
  12. if (!question) {
  13. return res.status(400).json({ message: 'No question in the request' });
  14. }
  15. // OpenAI recommends replacing newlines with spaces for best results
  16. const sanitizedQuestion = question.trim().replaceAll('\n', ' ');
  17. /* create vectorstore*/
  18. const vectorStore = await SupabaseVectorStore.fromExistingIndex(
  19. supabaseClient,
  20. new OpenAIEmbeddings(),
  21. );
  22. res.writeHead(200, {
  23. 'Content-Type': 'text/event-stream',
  24. 'Cache-Control': 'no-cache, no-transform',
  25. Connection: 'keep-alive',
  26. });
  27. const sendData = (data: string) => {
  28. res.write(`data: ${data}\n\n`);
  29. };
  30. sendData(JSON.stringify({ data: '' }));
  31. const model = openai;
  32. // create the chain
  33. const chain = makeChain(vectorStore, (token: string) => {
  34. sendData(JSON.stringify({ data: token }));
  35. });
  36. try {
  37. //Ask a question
  38. const response = await chain.call({
  39. question: sanitizedQuestion,
  40. chat_history: history || [],
  41. });
  42. console.log('response', response);
  43. } catch (error) {
  44. console.log('error', error);
  45. } finally {
  46. sendData('[DONE]');
  47. res.end();
  48. }
  49. }