diff --git a/src/pages/api/llm.ts b/src/pages/api/llm.ts
index 8858f08..4885a2f 100644
--- a/src/pages/api/llm.ts
+++ b/src/pages/api/llm.ts
@@ -1,4 +1,3 @@
-// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from 'next';
import OpenAI from 'openai';
import { LLMRequest, LLMResponse } from '../../types';
@@ -30,3 +29,17 @@ export default async function handler(
const response: LLMResponse = { completion };
res.status(200).json(response);
}
+
+export async function getDiffData(
+ req: NextApiRequest,
+ res: NextApiResponse
+) {
+ // TODO: Implement the logic to get the diff data
+}
+
+export async function getAllOtherData(
+ req: NextApiRequest,
+ res: NextApiResponse
+) {
+ // TODO: Implement the logic to get all other data
+}
\ No newline at end of file
diff --git a/src/pages/forms/fill/[id].tsx b/src/pages/forms/fill/[id].tsx
index c763e35..bdb438a 100644
--- a/src/pages/forms/fill/[id].tsx
+++ b/src/pages/forms/fill/[id].tsx
@@ -99,7 +99,7 @@ export function InnerChat(props: {
setMessages(messagesToSend);
setInputValue('');
setIsWaiting(true);
- const assistantResponse = await callLLM(PROMPT_FILL(form), messagesToSend);
+ const assistantResponse = await callLLM(PROMPT_FILL(form), messagesToSend, 'getDiffData');
if (assistantResponse instanceof Error) {
setError(assistantResponse);
return;
@@ -125,7 +125,8 @@ export function InnerChat(props: {
submitResponseToSupabase(
form.id,
parsed.submission,
- props.supabase
+ props.supabase,
+ 'getAllOtherData'
).then((maybeError) => {
setIsDone(true);
setSubmission(parsed.submission);
@@ -239,4 +240,4 @@ function SubmissionBox(submission: object): React.ReactNode {
);
-}
+}
\ No newline at end of file
diff --git a/src/utils.ts b/src/utils.ts
index 0b5d4c3..216f517 100644
--- a/src/utils.ts
+++ b/src/utils.ts
@@ -10,9 +10,11 @@ import { Session, SupabaseClient } from '@supabase/auth-helpers-nextjs';
import { v4 } from 'uuid';
import { Database, Json } from '../types/supabase';
+export const callLLM = async (
export const callLLM = async (
systemPrompt: string,
- messages: ChatMessage[]
+ messages: ChatMessage[],
+ endpoint: string
) => {
const data: LLMRequest = {
completion_create: {
@@ -21,7 +23,7 @@ export const callLLM = async (
messages: [{ role: 'system', content: systemPrompt }, ...messages],
},
};
- const response = await fetch('/api/llm', {
+ const response = await fetch(`/api/${endpoint}`, {
method: 'POST',
body: JSON.stringify(data),
headers: {
@@ -35,7 +37,6 @@ export const callLLM = async (
const json: LLMResponse = await response.json();
return json.completion.choices[0].message;
};
-
export async function getUserFromSupabase(
session: Session | null,
supabase: SupabaseClient,
@@ -130,10 +131,33 @@ export async function submitResponseToSupabase(
return response;
}
}
+export async function submitResponseToSupabase(
+ formId: string,
+ responseJson: Json,
+ supabase: SupabaseClient,
+ endpoint: string
+): Promise {
+ const response: Response = {
+ id: v4(),
+ form_id: formId,
+ fields: responseJson,
+ created_at: new Date().toISOString(),
+ updated_at: new Date().toISOString(),
+ };
+ console.log('Submitting response to Supabase', { formId, response });
+ const { error } = await supabase.from('responses').insert(response);
+ if (error) {
+ console.error(`Error creating response`, { response, error });
+ return Error(error.message, { cause: error });
+ } else {
+ console.log('Successfully created response', response);
+ return response;
+ }
+}
export const removeStartAndEndQuotes = (str: string | null) => {
if (!str) {
return str;
}
return str.replace(/^"(.*)"$/, '$1');
-};
+};
\ No newline at end of file