Skip to content

Commit

Permalink
rest
Browse files Browse the repository at this point in the history
  • Loading branch information
nsbradford committed Aug 29, 2023
1 parent fb5a1af commit 9a348f3
Show file tree
Hide file tree
Showing 4 changed files with 42 additions and 21 deletions.
2 changes: 1 addition & 1 deletion next.config.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
/** @type {import('next').NextConfig} */
const nextConfig = {
reactStrictMode: true,
reactStrictMode: false, // creates problems for chat
}

module.exports = nextConfig
58 changes: 38 additions & 20 deletions src/pages/forms/fill.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import React, { useState } from 'react';
import React, { useEffect, useRef, useState } from 'react';
import { ChatMessage, LLMRequest, LLMResponse } from '@/types';
import { FAKE_SCHEMA, PROMPT_FILL } from '@/prompts';
import { MessageUI } from '@/components/chat';
Expand All @@ -9,34 +9,50 @@ export default function CreateForm() {
const schema = FAKE_SCHEMA; // TODO hydrate from route after page loads
const systemPrompt = PROMPT_FILL(schema);
const [messages, setMessages] = useState<ChatMessage[]>([
{
role: "assistant",
content: "What kind of form can I help you with?"
}
// {
// role: "assistant",
// content: "What kind of form can I help you with?"
// }
]);
const [inputValue, setInputValue] = useState('');
const [isWaiting, setIsWaiting] = useState(false);
const inputRef = useRef<HTMLInputElement>(null); // Initialize the ref

const handleSubmit = async () => {
if (inputValue.trim()) {
const userMessage = {
role: "user" as const,
content: inputValue.trim()
};
setMessages([...messages, userMessage]);
setInputValue('');
setIsWaiting(true);
const assistantResponse = await callLLM(systemPrompt, [...messages, userMessage]);
setMessages(prev => [...prev, assistantResponse]);
setIsWaiting(false);
}
const handleSubmit = async (userMessage?: string) => {
const messagesToSend = userMessage && userMessage.trim() ? [...messages, {
role: "user" as const,
content: userMessage.trim()
}] : messages;
setMessages(messagesToSend);
setInputValue('');
setIsWaiting(true);
const assistantResponse = await callLLM(systemPrompt, messagesToSend);
setMessages(prev => [...prev, assistantResponse]);
setIsWaiting(false);
};
const handleCancel = () => {
setIsWaiting(false);
};
const handleKeyPress = (e: React.KeyboardEvent<HTMLInputElement>) => {
if (e.key === 'Enter' && !isWaiting) {
handleSubmit(inputValue);
}
};
useEffect(() => {
if (!isWaiting && inputRef.current) {
// Ensure the input gets focus when isWaiting transitions to false
inputRef.current.focus();
}
}, [isWaiting]); // Track changes to the isWaiting state

useEffect(() => {
if (messages.length === 0) {
handleSubmit();
}
}, []); // The empty array ensures this effect runs only once on mount

return (
<div className="h-screen flex flex-col justify-center items-center bg-gray-100">
<div className="flex flex-col items-center bg-gray-100 py-20">
<h1 className="text-4xl font-extrabold mb-6">Fill a form</h1>
<div className="w-4/5 md:w-1/2 lg:w-1/3 bg-white shadow-md p-6 rounded-lg">
{messages.map((message, index) => (
Expand All @@ -49,10 +65,12 @@ export default function CreateForm() {
value={inputValue}
onChange={e => setInputValue(e.target.value)}
disabled={isWaiting}
onKeyPress={handleKeyPress}
ref={inputRef}
/>
<button
className="ml-2 py-2 px-4 bg-green-500 text-white rounded-lg"
onClick={handleSubmit}
onClick={() => handleSubmit(inputValue)}
disabled={isWaiting}
>
Submit
Expand Down
2 changes: 2 additions & 0 deletions src/prompts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ You must respond with a JSON blob with the following format:
\`\`\`
RULES YOU MUST FOLLOW:
- You must ONLY keep the conversation to the topic of the form. STICK WITH THE PROGRAM. If the user tries to ask you about anything else, politely redirect them back to the form and repeat your previous question.
- Users might sometimes be uncertain about some fields; you can press a little, but you must ultimately respect your decision and fill in "[User not sure]".
YOUR SCHEMA:
Expand All @@ -27,6 +28,7 @@ ${schema}
export const FAKE_SCHEMA = `
{
"rsvpForm": {
"meta": "RSVP for Sarah's bday dinner",
"fields": [
{
"label": "Full Name",
Expand Down
1 change: 1 addition & 0 deletions src/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ export const callLLM = async (systemPrompt: string, messages: ChatMessage[]) =>
const data: LLMRequest = {
completion_create: {
model: "gpt-3.5-turbo",
temperature: 0,
messages: [{ role: "system", content: systemPrompt }, ...messages],
},
};
Expand Down

0 comments on commit 9a348f3

Please sign in to comment.