chore: remove reasoning model
This commit is contained in:
parent
7a457c626f
commit
7cff01440d
@ -164,14 +164,6 @@ const HomeContent = () => {
|
|||||||
const fileInputRef = useRef<HTMLInputElement>(null);
|
const fileInputRef = useRef<HTMLInputElement>(null);
|
||||||
const inputRef = useRef<HTMLInputElement>(null);
|
const inputRef = useRef<HTMLInputElement>(null);
|
||||||
|
|
||||||
const [isInitialQueryProcessed, setIsInitialQueryProcessed] = useState(false);
|
|
||||||
|
|
||||||
const [o1Conversation, setO1Conversation] = useState<Message[]>([]);
|
|
||||||
const [o1Input, setO1Input] = useState<string>('');
|
|
||||||
const [isO1Loading, setIsO1Loading] = useState(false);
|
|
||||||
const [remainingRequests, setRemainingRequests] = useState<number | null>(null);
|
|
||||||
const [resetTime, setResetTime] = useState<number | null>(null);
|
|
||||||
|
|
||||||
const [openChangelog, setOpenChangelog] = useState(false);
|
const [openChangelog, setOpenChangelog] = useState(false);
|
||||||
|
|
||||||
const { isLoading, input, messages, setInput, handleInputChange, append, handleSubmit, setMessages, reload } = useChat({
|
const { isLoading, input, messages, setInput, handleInputChange, append, handleSubmit, setMessages, reload } = useChat({
|
||||||
@ -199,93 +191,6 @@ const HomeContent = () => {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const handleO1InputChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
|
||||||
setO1Input(e.target.value);
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleO1Submit = useCallback(async () => {
|
|
||||||
if (o1Input.trim()) {
|
|
||||||
setIsO1Loading(true);
|
|
||||||
const newUserMessage = { role: 'user' as const, content: o1Input };
|
|
||||||
setLastSubmittedQuery(o1Input);
|
|
||||||
setO1Input('');
|
|
||||||
setO1Conversation(prev => [...prev, newUserMessage]);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const { messages: newMessages, remaining, reset } = await continueConversation([...o1Conversation, newUserMessage]);
|
|
||||||
setO1Conversation(newMessages);
|
|
||||||
// make suggestion questions
|
|
||||||
const { questions } = await suggestQuestions(newMessages);
|
|
||||||
setSuggestedQuestions(questions);
|
|
||||||
setRemainingRequests(remaining);
|
|
||||||
setResetTime(reset);
|
|
||||||
if (remaining !== null && remaining <= 3) {
|
|
||||||
toast.warning(`You have ${remaining} requests remaining for the next 4 hours.`);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Error in O1 conversation:", error);
|
|
||||||
toast.error(error instanceof Error ? error.message : "An error occurred while processing your request.");
|
|
||||||
} finally {
|
|
||||||
setIsO1Loading(false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}, [o1Input, o1Conversation]);
|
|
||||||
|
|
||||||
interface RateLimitInfoProps {
|
|
||||||
remainingRequests: number;
|
|
||||||
resetTime: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
const RateLimitInfo: React.FC<RateLimitInfoProps> = ({ remainingRequests, resetTime }) => {
|
|
||||||
const formatResetTime = (resetTimestamp: number) => {
|
|
||||||
const resetDate = new Date(resetTimestamp);
|
|
||||||
return resetDate.toLocaleTimeString([], { hour: '2-digit', minute: '2-digit' });
|
|
||||||
};
|
|
||||||
|
|
||||||
const getBatteryColor = (remaining: number) => {
|
|
||||||
if (remaining <= 2) return "text-red-500";
|
|
||||||
if (remaining <= 5) return "text-yellow-500";
|
|
||||||
return "text-green-500";
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<TooltipProvider>
|
|
||||||
<Tooltip>
|
|
||||||
<TooltipTrigger asChild>
|
|
||||||
<div className="flex items-center space-x-2 bg-gray-100 dark:bg-gray-800 px-3 py-1 rounded-full text-sm">
|
|
||||||
<Battery className={`w-4 h-4 ${getBatteryColor(remainingRequests)}`} />
|
|
||||||
<span className="font-medium">{remainingRequests}</span>
|
|
||||||
<Clock className="w-4 h-4 text-gray-500" />
|
|
||||||
</div>
|
|
||||||
</TooltipTrigger>
|
|
||||||
<TooltipContent>
|
|
||||||
<p>4-hour limit: {remainingRequests} requests remaining</p>
|
|
||||||
<p>Resets at: {formatResetTime(resetTime)}</p>
|
|
||||||
</TooltipContent>
|
|
||||||
</Tooltip>
|
|
||||||
</TooltipProvider>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
const processInitialQuery = useCallback(async () => {
|
|
||||||
if (initialQuery && !isInitialQueryProcessed) {
|
|
||||||
setHasSubmitted(true);
|
|
||||||
setIsInitialQueryProcessed(true);
|
|
||||||
track('search with url params', { query: initialQuery });
|
|
||||||
|
|
||||||
if (selectedModel === 'openai/o1-mini') {
|
|
||||||
setO1Input(initialQuery);
|
|
||||||
handleO1Submit();
|
|
||||||
} else {
|
|
||||||
await append({ content: initialQuery, role: 'user' });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}, [initialQuery, isInitialQueryProcessed, selectedModel, handleO1Submit, append]);
|
|
||||||
|
|
||||||
if (!isInitialQueryProcessed) {
|
|
||||||
processInitialQuery();
|
|
||||||
}
|
|
||||||
|
|
||||||
const CopyButton = ({ text }: { text: string }) => {
|
const CopyButton = ({ text }: { text: string }) => {
|
||||||
const [isCopied, setIsCopied] = useState(false);
|
const [isCopied, setIsCopied] = useState(false);
|
||||||
|
|
||||||
@ -1802,63 +1707,26 @@ The o1-mini is a new OpenAI model that is optimized for reasoning tasks. Current
|
|||||||
setHasSubmitted(true);
|
setHasSubmitted(true);
|
||||||
setSuggestedQuestions([]);
|
setSuggestedQuestions([]);
|
||||||
|
|
||||||
if (selectedModel === 'openai/o1-mini') {
|
|
||||||
setO1Input(exampleText.trim());
|
await append({
|
||||||
setIsO1Loading(true);
|
content: exampleText.trim(),
|
||||||
const newUserMessage = { role: 'user' as const, content: exampleText.trim() };
|
role: 'user',
|
||||||
setO1Conversation(prev => [...prev, newUserMessage]);
|
});
|
||||||
setO1Input("");
|
|
||||||
try {
|
}, [append, setLastSubmittedQuery, setHasSubmitted, setSuggestedQuestions, selectedModel]);
|
||||||
const { messages: newMessages, remaining, reset } = await continueConversation([...o1Conversation, newUserMessage]);
|
|
||||||
setO1Conversation(newMessages);
|
|
||||||
// make suggestions for the next user message
|
|
||||||
const { questions } = await suggestQuestions(newMessages);
|
|
||||||
setSuggestedQuestions(questions);
|
|
||||||
setRemainingRequests(remaining);
|
|
||||||
setResetTime(reset);
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Error in O1 conversation:", error);
|
|
||||||
toast.error(error instanceof Error ? error.message : "An error occurred while processing your request.");
|
|
||||||
} finally {
|
|
||||||
setIsO1Loading(false);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
await append({
|
|
||||||
content: exampleText.trim(),
|
|
||||||
role: 'user',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}, [append, setLastSubmittedQuery, setHasSubmitted, setSuggestedQuestions, selectedModel, setO1Input, o1Conversation]);
|
|
||||||
|
|
||||||
const handleSuggestedQuestionClick = useCallback(async (question: string) => {
|
const handleSuggestedQuestionClick = useCallback(async (question: string) => {
|
||||||
setHasSubmitted(true);
|
setHasSubmitted(true);
|
||||||
setSuggestedQuestions([]);
|
setSuggestedQuestions([]);
|
||||||
|
|
||||||
if (selectedModel === 'openai/o1-mini') {
|
|
||||||
setO1Input(question.trim());
|
setInput(question.trim());
|
||||||
setIsO1Loading(true);
|
await append({
|
||||||
const newUserMessage = { role: 'user' as const, content: question.trim() };
|
content: question.trim(),
|
||||||
setO1Conversation(prev => [...prev, newUserMessage]);
|
role: 'user'
|
||||||
setO1Input("");
|
});
|
||||||
try {
|
|
||||||
const { messages: newMessages, remaining, reset } = await continueConversation([...o1Conversation, newUserMessage]);
|
}, [setInput, append]);
|
||||||
setO1Conversation(newMessages);
|
|
||||||
setRemainingRequests(remaining);
|
|
||||||
setResetTime(reset);
|
|
||||||
} catch (error) {
|
|
||||||
console.error("Error in O1 conversation:", error);
|
|
||||||
toast.error(error instanceof Error ? error.message : "An error occurred while processing your request.");
|
|
||||||
} finally {
|
|
||||||
setIsO1Loading(false);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
setInput(question.trim());
|
|
||||||
await append({
|
|
||||||
content: question.trim(),
|
|
||||||
role: 'user'
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}, [setInput, append, selectedModel, setO1Input, o1Conversation]);
|
|
||||||
|
|
||||||
const handleMessageEdit = useCallback((index: number) => {
|
const handleMessageEdit = useCallback((index: number) => {
|
||||||
setIsEditingMessage(true);
|
setIsEditingMessage(true);
|
||||||
@ -1901,13 +1769,9 @@ The o1-mini is a new OpenAI model that is optimized for reasoning tasks. Current
|
|||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
interface NavbarProps {
|
interface NavbarProps { }
|
||||||
selectedModel: string;
|
|
||||||
remainingRequests: number | null;
|
|
||||||
resetTime: number | null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const Navbar: React.FC<NavbarProps> = ({ selectedModel, remainingRequests, resetTime }) => {
|
const Navbar: React.FC<NavbarProps> = () => {
|
||||||
return (
|
return (
|
||||||
<div className="fixed top-0 left-0 right-0 z-50 flex justify-between items-center p-4 bg-background">
|
<div className="fixed top-0 left-0 right-0 z-50 flex justify-between items-center p-4 bg-background">
|
||||||
<Link href="/new">
|
<Link href="/new">
|
||||||
@ -1923,12 +1787,6 @@ The o1-mini is a new OpenAI model that is optimized for reasoning tasks. Current
|
|||||||
</Button>
|
</Button>
|
||||||
</Link>
|
</Link>
|
||||||
<div className='flex items-center space-x-4'>
|
<div className='flex items-center space-x-4'>
|
||||||
{selectedModel === 'openai/o1-mini' && remainingRequests !== null && resetTime !== null && (
|
|
||||||
<RateLimitInfo
|
|
||||||
remainingRequests={remainingRequests}
|
|
||||||
resetTime={resetTime}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
<Button
|
<Button
|
||||||
variant="secondary"
|
variant="secondary"
|
||||||
size="sm"
|
size="sm"
|
||||||
@ -2164,16 +2022,14 @@ The o1-mini is a new OpenAI model that is optimized for reasoning tasks. Current
|
|||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
event.stopPropagation();
|
event.stopPropagation();
|
||||||
|
|
||||||
if ((selectedModel === 'openai/o1-mini' ? o1Input : input).trim() || (selectedModel !== 'openai/o1-mini' && attachments.length > 0)) {
|
if (input.trim() || (selectedModel !== 'openai/o1-mini' && attachments.length > 0)) {
|
||||||
track("search enter", { query: (selectedModel === 'openai/o1-mini' ? o1Input : input).trim() });
|
track("search enter", { query: input.trim() });
|
||||||
setHasSubmitted(true);
|
setHasSubmitted(true);
|
||||||
if (selectedModel === 'openai/o1-mini') {
|
|
||||||
handleO1Submit();
|
handleSubmit(event, {
|
||||||
} else {
|
experimental_attachments: attachments,
|
||||||
handleSubmit(event, {
|
});
|
||||||
experimental_attachments: attachments,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
setAttachments([]);
|
setAttachments([]);
|
||||||
setUploadingAttachments([]);
|
setUploadingAttachments([]);
|
||||||
setSuggestedQuestions([]);
|
setSuggestedQuestions([]);
|
||||||
@ -2246,9 +2102,9 @@ The o1-mini is a new OpenAI model that is optimized for reasoning tasks. Current
|
|||||||
ref={inputRef}
|
ref={inputRef}
|
||||||
name="search"
|
name="search"
|
||||||
placeholder={hasSubmitted ? "Ask a new question..." : "Ask a question..."}
|
placeholder={hasSubmitted ? "Ask a new question..." : "Ask a question..."}
|
||||||
value={selectedModel === 'openai/o1-mini' ? o1Input : input}
|
value={input}
|
||||||
onChange={selectedModel === 'openai/o1-mini' ? handleO1InputChange : handleInputChange}
|
onChange={handleInputChange}
|
||||||
disabled={isLoading || isO1Loading}
|
disabled={isLoading}
|
||||||
className={cn(
|
className={cn(
|
||||||
"w-full h-12 pr-12 bg-muted",
|
"w-full h-12 pr-12 bg-muted",
|
||||||
"ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2",
|
"ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2",
|
||||||
@ -2263,30 +2119,29 @@ The o1-mini is a new OpenAI model that is optimized for reasoning tasks. Current
|
|||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
{selectedModel !== 'openai/o1-mini' && (
|
<label
|
||||||
<label
|
htmlFor={hasSubmitted ? "file-upload-bottom" : "file-upload-top"}
|
||||||
htmlFor={hasSubmitted ? "file-upload-bottom" : "file-upload-top"}
|
className={`absolute left-3 cursor-pointer ${attachments.length + uploadingAttachments.length >= MAX_IMAGES ? 'opacity-50 cursor-not-allowed' : ''}`}
|
||||||
className={`absolute left-3 cursor-pointer ${attachments.length + uploadingAttachments.length >= MAX_IMAGES ? 'opacity-50 cursor-not-allowed' : ''}`}
|
>
|
||||||
>
|
<Paperclip className="h-5 w-5 text-muted-foreground" />
|
||||||
<Paperclip className="h-5 w-5 text-muted-foreground" />
|
<input
|
||||||
<input
|
id={hasSubmitted ? "file-upload-bottom" : "file-upload-top"}
|
||||||
id={hasSubmitted ? "file-upload-bottom" : "file-upload-top"}
|
type="file"
|
||||||
type="file"
|
accept="image/*"
|
||||||
accept="image/*"
|
multiple
|
||||||
multiple
|
onChange={handleFileChange}
|
||||||
onChange={handleFileChange}
|
className="hidden"
|
||||||
className="hidden"
|
disabled={attachments.length + uploadingAttachments.length >= MAX_IMAGES}
|
||||||
disabled={attachments.length + uploadingAttachments.length >= MAX_IMAGES}
|
ref={fileInputRef}
|
||||||
ref={fileInputRef}
|
/>
|
||||||
/>
|
</label>
|
||||||
</label>
|
|
||||||
)}
|
|
||||||
<Button
|
<Button
|
||||||
type="submit"
|
type="submit"
|
||||||
size="icon"
|
size="icon"
|
||||||
variant="ghost"
|
variant="ghost"
|
||||||
className="absolute right-2"
|
className="absolute right-2"
|
||||||
disabled={(selectedModel === 'openai/o1-mini' ? o1Input : input).trim().length === 0 || isLoading || isO1Loading || uploadingAttachments.length > 0}
|
disabled={input.trim().length === 0 || isLoading || uploadingAttachments.length > 0}
|
||||||
>
|
>
|
||||||
<ArrowRight size={20} />
|
<ArrowRight size={20} />
|
||||||
</Button>
|
</Button>
|
||||||
@ -2422,20 +2277,12 @@ The o1-mini is a new OpenAI model that is optimized for reasoning tasks. Current
|
|||||||
const handleModelChange = useCallback((newModel: string) => {
|
const handleModelChange = useCallback((newModel: string) => {
|
||||||
setSelectedModel(newModel);
|
setSelectedModel(newModel);
|
||||||
setSuggestedQuestions([]);
|
setSuggestedQuestions([]);
|
||||||
if (newModel === 'openai/o1-mini') {
|
reload({ body: { model: newModel } });
|
||||||
setO1Conversation([]);
|
}, [reload]);
|
||||||
} else if (messages.length > 0) {
|
|
||||||
reload({ body: { model: newModel } });
|
|
||||||
}
|
|
||||||
}, [messages, reload]);
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="flex flex-col font-sans items-center justify-center p-2 sm:p-4 bg-background text-foreground transition-all duration-500">
|
<div className="flex flex-col font-sans items-center justify-center p-2 sm:p-4 bg-background text-foreground transition-all duration-500">
|
||||||
<Navbar
|
<Navbar />
|
||||||
selectedModel={selectedModel}
|
|
||||||
remainingRequests={remainingRequests}
|
|
||||||
resetTime={resetTime}
|
|
||||||
/>
|
|
||||||
|
|
||||||
<div className={`w-full max-w-[90%] sm:max-w-2xl space-y-6 p-0 ${hasSubmitted ? 'mt-16 sm:mt-20' : 'mt-[20vh] sm:mt-[30vh]'}`}>
|
<div className={`w-full max-w-[90%] sm:max-w-2xl space-y-6 p-0 ${hasSubmitted ? 'mt-16 sm:mt-20' : 'mt-[20vh] sm:mt-[30vh]'}`}>
|
||||||
{!hasSubmitted && (
|
{!hasSubmitted && (
|
||||||
@ -2466,14 +2313,14 @@ The o1-mini is a new OpenAI model that is optimized for reasoning tasks. Current
|
|||||||
transition={{ duration: 0.5 }}
|
transition={{ duration: 0.5 }}
|
||||||
>
|
>
|
||||||
<FormComponent
|
<FormComponent
|
||||||
input={selectedModel === 'openai/o1-mini' ? o1Input : input}
|
input={input}
|
||||||
setInput={selectedModel === 'openai/o1-mini' ? setO1Input : setInput}
|
setInput={setInput}
|
||||||
attachments={attachments}
|
attachments={attachments}
|
||||||
setAttachments={setAttachments}
|
setAttachments={setAttachments}
|
||||||
hasSubmitted={hasSubmitted}
|
hasSubmitted={hasSubmitted}
|
||||||
setHasSubmitted={setHasSubmitted}
|
setHasSubmitted={setHasSubmitted}
|
||||||
handleSubmit={selectedModel === 'openai/o1-mini' ? handleO1Submit : handleSubmit}
|
handleSubmit={handleSubmit}
|
||||||
isLoading={selectedModel === 'openai/o1-mini' ? isO1Loading : isLoading}
|
isLoading={isLoading}
|
||||||
fileInputRef={fileInputRef}
|
fileInputRef={fileInputRef}
|
||||||
inputRef={inputRef}
|
inputRef={inputRef}
|
||||||
/>
|
/>
|
||||||
@ -2484,169 +2331,113 @@ The o1-mini is a new OpenAI model that is optimized for reasoning tasks. Current
|
|||||||
|
|
||||||
|
|
||||||
<div className="space-y-4 sm:space-y-6 mb-32">
|
<div className="space-y-4 sm:space-y-6 mb-32">
|
||||||
{selectedModel === 'openai/o1-mini' ? (
|
{messages.map((message, index) => (
|
||||||
<>
|
<div key={index}>
|
||||||
{o1Conversation.map((message, index) => (
|
{message.role === 'user' && (
|
||||||
<div key={index}>
|
|
||||||
{message.role === 'user' && (
|
|
||||||
<motion.div
|
|
||||||
initial={{ opacity: 0, y: 20 }}
|
|
||||||
animate={{ opacity: 1, y: 0 }}
|
|
||||||
transition={{ duration: 0.5 }}
|
|
||||||
className="flex items-start space-x-2 mb-4"
|
|
||||||
>
|
|
||||||
<User2 className="size-5 sm:size-6 text-primary flex-shrink-0 mt-1" />
|
|
||||||
<div className="flex-grow min-w-0">
|
|
||||||
<p className="text-xl sm:text-2xl font-medium font-serif break-words">
|
|
||||||
{message.content}
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
</motion.div>
|
|
||||||
)}
|
|
||||||
{message.role === 'assistant' && (
|
|
||||||
<div>
|
|
||||||
<div className='flex items-center justify-between mb-2'>
|
|
||||||
<div className='flex items-center gap-2'>
|
|
||||||
<Sparkles className="size-5 text-primary" />
|
|
||||||
<h2 className="text-base font-semibold">Answer</h2>
|
|
||||||
</div>
|
|
||||||
<CopyButton text={message.content} />
|
|
||||||
</div>
|
|
||||||
<div>
|
|
||||||
<MarkdownRenderer content={message.content} />
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
))}
|
|
||||||
{isO1Loading && (
|
|
||||||
<motion.div
|
<motion.div
|
||||||
initial={{ opacity: 0, y: 20 }}
|
initial={{ opacity: 0, y: 20 }}
|
||||||
animate={{ opacity: 1, y: 0 }}
|
animate={{ opacity: 1, y: 0 }}
|
||||||
transition={{ duration: 0.5 }}
|
transition={{ duration: 0.5 }}
|
||||||
className="flex items-start space-x-2 mb-4"
|
className="flex items-start space-x-2 mb-4"
|
||||||
>
|
>
|
||||||
<Sparkles className="size-5 sm:size-6 text-primary flex-shrink-0 mt-1" />
|
<User2 className="size-5 sm:size-6 text-primary flex-shrink-0 mt-1" />
|
||||||
<div className="flex-grow min-w-0">
|
<div className="flex-grow min-w-0">
|
||||||
<div className="flex items-center space-x-2">
|
{isEditingMessage && editingMessageIndex === index ? (
|
||||||
<Loader2 className="h-5 w-5 animate-spin text-primary" />
|
<form onSubmit={handleMessageUpdate} className="flex items-center space-x-2">
|
||||||
<p className="text-lg font-medium">Thinking...</p>
|
<Input
|
||||||
</div>
|
value={input}
|
||||||
<div className="mt-2 bg-muted rounded-md p-4 animate-pulse">
|
onChange={(e) => setInput(e.target.value)}
|
||||||
<div className="h-4 bg-muted-foreground/20 rounded w-3/4 mb-2"></div>
|
className="flex-grow"
|
||||||
<div className="h-4 bg-muted-foreground/20 rounded w-1/2"></div>
|
/>
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</motion.div>
|
|
||||||
)}
|
|
||||||
</>
|
|
||||||
) : (
|
|
||||||
messages.map((message, index) => (
|
|
||||||
<div key={index}>
|
|
||||||
{message.role === 'user' && (
|
|
||||||
<motion.div
|
|
||||||
initial={{ opacity: 0, y: 20 }}
|
|
||||||
animate={{ opacity: 1, y: 0 }}
|
|
||||||
transition={{ duration: 0.5 }}
|
|
||||||
className="flex items-start space-x-2 mb-4"
|
|
||||||
>
|
|
||||||
<User2 className="size-5 sm:size-6 text-primary flex-shrink-0 mt-1" />
|
|
||||||
<div className="flex-grow min-w-0">
|
|
||||||
{isEditingMessage && editingMessageIndex === index ? (
|
|
||||||
<form onSubmit={handleMessageUpdate} className="flex items-center space-x-2">
|
|
||||||
<Input
|
|
||||||
value={input}
|
|
||||||
onChange={(e) => setInput(e.target.value)}
|
|
||||||
className="flex-grow"
|
|
||||||
/>
|
|
||||||
<Button
|
|
||||||
variant="secondary"
|
|
||||||
size="sm"
|
|
||||||
type="button"
|
|
||||||
onClick={() => {
|
|
||||||
setIsEditingMessage(false)
|
|
||||||
setEditingMessageIndex(-1)
|
|
||||||
setInput('')
|
|
||||||
}}
|
|
||||||
disabled={isLoading}
|
|
||||||
>
|
|
||||||
<X size={16} />
|
|
||||||
</Button>
|
|
||||||
<Button type="submit" size="sm">
|
|
||||||
<ArrowRight size={16} />
|
|
||||||
</Button>
|
|
||||||
</form>
|
|
||||||
) : (
|
|
||||||
<div>
|
|
||||||
<p className="text-xl sm:text-2xl font-medium font-serif break-words">
|
|
||||||
{message.content}
|
|
||||||
</p>
|
|
||||||
<div
|
|
||||||
className='flex flex-row gap-2'
|
|
||||||
>
|
|
||||||
{message.experimental_attachments?.map((attachment, attachmentIndex) => (
|
|
||||||
<div key={attachmentIndex} className="mt-2">
|
|
||||||
{attachment.contentType!.startsWith('image/') && (
|
|
||||||
<img
|
|
||||||
src={attachment.url}
|
|
||||||
alt={attachment.name || `Attachment ${attachmentIndex + 1}`}
|
|
||||||
className="max-w-full h-32 object-fill rounded-lg"
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{!isEditingMessage && index === lastUserMessageIndex && (
|
|
||||||
<div
|
|
||||||
className="flex items-center space-x-2"
|
|
||||||
>
|
|
||||||
<Button
|
<Button
|
||||||
variant="ghost"
|
variant="secondary"
|
||||||
size="sm"
|
size="sm"
|
||||||
onClick={() => handleMessageEdit(index)}
|
type="button"
|
||||||
className="ml-2"
|
onClick={() => {
|
||||||
|
setIsEditingMessage(false)
|
||||||
|
setEditingMessageIndex(-1)
|
||||||
|
setInput('')
|
||||||
|
}}
|
||||||
disabled={isLoading}
|
disabled={isLoading}
|
||||||
>
|
>
|
||||||
<Edit2 size={16} />
|
<X size={16} />
|
||||||
</Button>
|
</Button>
|
||||||
|
<Button type="submit" size="sm">
|
||||||
|
<ArrowRight size={16} />
|
||||||
|
</Button>
|
||||||
|
</form>
|
||||||
|
) : (
|
||||||
|
<div>
|
||||||
|
<p className="text-xl sm:text-2xl font-medium font-serif break-words">
|
||||||
|
{message.content}
|
||||||
|
</p>
|
||||||
|
<div
|
||||||
|
className='flex flex-row gap-2'
|
||||||
|
>
|
||||||
|
{message.experimental_attachments?.map((attachment, attachmentIndex) => (
|
||||||
|
<div key={attachmentIndex} className="mt-2">
|
||||||
|
{attachment.contentType!.startsWith('image/') && (
|
||||||
|
<img
|
||||||
|
src={attachment.url}
|
||||||
|
alt={attachment.name || `Attachment ${attachmentIndex + 1}`}
|
||||||
|
className="max-w-full h-32 object-fill rounded-lg"
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</motion.div>
|
</div>
|
||||||
)}
|
|
||||||
{message.role === 'assistant' && message.content && (
|
{!isEditingMessage && index === lastUserMessageIndex && (
|
||||||
<div>
|
<div
|
||||||
<div className='flex items-center justify-between mb-2'>
|
className="flex items-center space-x-2"
|
||||||
<div className='flex items-center gap-2'>
|
>
|
||||||
<Sparkles className="size-5 text-primary" />
|
<Button
|
||||||
<h2 className="text-base font-semibold">Answer</h2>
|
variant="ghost"
|
||||||
</div>
|
size="sm"
|
||||||
<div
|
onClick={() => handleMessageEdit(index)}
|
||||||
className='flex items-center gap-2'
|
className="ml-2"
|
||||||
|
disabled={isLoading}
|
||||||
>
|
>
|
||||||
<ModelSwitcher
|
<Edit2 size={16} />
|
||||||
selectedModel={selectedModel}
|
</Button>
|
||||||
setSelectedModel={handleModelChange}
|
|
||||||
className="!px-4 rounded-full"
|
|
||||||
/>
|
|
||||||
<CopyButton text={message.content} />
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
<div>
|
)}
|
||||||
<MarkdownRenderer content={message.content} />
|
</motion.div>
|
||||||
|
)}
|
||||||
|
{message.role === 'assistant' && message.content && (
|
||||||
|
<div>
|
||||||
|
<div className='flex items-center justify-between mb-2'>
|
||||||
|
<div className='flex items-center gap-2'>
|
||||||
|
<Sparkles className="size-5 text-primary" />
|
||||||
|
<h2 className="text-base font-semibold">Answer</h2>
|
||||||
|
</div>
|
||||||
|
<div
|
||||||
|
className='flex items-center gap-2'
|
||||||
|
>
|
||||||
|
<ModelSwitcher
|
||||||
|
selectedModel={selectedModel}
|
||||||
|
setSelectedModel={handleModelChange}
|
||||||
|
className="!px-4 rounded-full"
|
||||||
|
/>
|
||||||
|
<CopyButton text={message.content} />
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)}
|
<div>
|
||||||
{message.toolInvocations?.map((toolInvocation: ToolInvocation, toolIndex: number) => (
|
<MarkdownRenderer content={message.content} />
|
||||||
<div key={`tool-${toolIndex}`}>
|
|
||||||
{renderToolInvocation(toolInvocation, toolIndex)}
|
|
||||||
</div>
|
</div>
|
||||||
))}
|
</div>
|
||||||
</div>
|
)}
|
||||||
)))}
|
{message.toolInvocations?.map((toolInvocation: ToolInvocation, toolIndex: number) => (
|
||||||
|
<div key={`tool-${toolIndex}`}>
|
||||||
|
{renderToolInvocation(toolInvocation, toolIndex)}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
|
||||||
{suggestedQuestions.length > 0 && (
|
{suggestedQuestions.length > 0 && (
|
||||||
<motion.div
|
<motion.div
|
||||||
initial={{ opacity: 0, y: 20 }}
|
initial={{ opacity: 0, y: 20 }}
|
||||||
@ -2680,14 +2471,14 @@ The o1-mini is a new OpenAI model that is optimized for reasoning tasks. Current
|
|||||||
<AnimatePresence>
|
<AnimatePresence>
|
||||||
{hasSubmitted && (
|
{hasSubmitted && (
|
||||||
<FormComponent
|
<FormComponent
|
||||||
input={selectedModel === 'openai/o1-mini' ? o1Input : input}
|
input={input}
|
||||||
setInput={selectedModel === 'openai/o1-mini' ? setO1Input : setInput}
|
setInput={setInput}
|
||||||
attachments={attachments}
|
attachments={attachments}
|
||||||
setAttachments={setAttachments}
|
setAttachments={setAttachments}
|
||||||
hasSubmitted={hasSubmitted}
|
hasSubmitted={hasSubmitted}
|
||||||
setHasSubmitted={setHasSubmitted}
|
setHasSubmitted={setHasSubmitted}
|
||||||
handleSubmit={selectedModel === 'openai/o1-mini' ? handleO1Submit : handleSubmit}
|
handleSubmit={handleSubmit}
|
||||||
isLoading={selectedModel === 'openai/o1-mini' ? isO1Loading : isLoading}
|
isLoading={isLoading}
|
||||||
fileInputRef={fileInputRef}
|
fileInputRef={fileInputRef}
|
||||||
inputRef={inputRef}
|
inputRef={inputRef}
|
||||||
/>
|
/>
|
||||||
|
|||||||
@ -10,7 +10,7 @@ dependencies:
|
|||||||
version: 0.0.31(zod@3.23.8)
|
version: 0.0.31(zod@3.23.8)
|
||||||
'@ai-sdk/cohere':
|
'@ai-sdk/cohere':
|
||||||
specifier: latest
|
specifier: latest
|
||||||
version: 0.0.24(zod@3.23.8)
|
version: 0.0.25(zod@3.23.8)
|
||||||
'@ai-sdk/google':
|
'@ai-sdk/google':
|
||||||
specifier: ^0.0.46
|
specifier: ^0.0.46
|
||||||
version: 0.0.46(zod@3.23.8)
|
version: 0.0.46(zod@3.23.8)
|
||||||
@ -88,7 +88,7 @@ dependencies:
|
|||||||
version: 1.4.0
|
version: 1.4.0
|
||||||
ai:
|
ai:
|
||||||
specifier: latest
|
specifier: latest
|
||||||
version: 3.3.41(openai@4.56.0)(react@18.3.1)(svelte@4.2.18)(vue@3.4.35)(zod@3.23.8)
|
version: 3.4.9(openai@4.56.0)(react@18.3.1)(svelte@4.2.18)(vue@3.4.35)(zod@3.23.8)
|
||||||
anthropic-vertex-ai:
|
anthropic-vertex-ai:
|
||||||
specifier: ^1.0.0
|
specifier: ^1.0.0
|
||||||
version: 1.0.0(zod@3.23.8)
|
version: 1.0.0(zod@3.23.8)
|
||||||
@ -232,14 +232,14 @@ packages:
|
|||||||
zod: 3.23.8
|
zod: 3.23.8
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
/@ai-sdk/cohere@0.0.24(zod@3.23.8):
|
/@ai-sdk/cohere@0.0.25(zod@3.23.8):
|
||||||
resolution: {integrity: sha512-2BDe6hSp3N6lRW9qS6/knjZVAUk0oo/oGzANar0XrENrFeMiMGh0tr081otATyZLxVeFJjksI029hW9QwWZNeg==}
|
resolution: {integrity: sha512-i7wraTa9/Qmozhw/E5OX+TNIYcPYE7higmCiYvknYyfGTc2XjnVaB2zyIfmbw0TgL+2m3PbVk75vEL5zG1ickQ==}
|
||||||
engines: {node: '>=18'}
|
engines: {node: '>=18'}
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
zod: ^3.0.0
|
zod: ^3.0.0
|
||||||
dependencies:
|
dependencies:
|
||||||
'@ai-sdk/provider': 0.0.23
|
'@ai-sdk/provider': 0.0.24
|
||||||
'@ai-sdk/provider-utils': 1.0.19(zod@3.23.8)
|
'@ai-sdk/provider-utils': 1.0.20(zod@3.23.8)
|
||||||
zod: 3.23.8
|
zod: 3.23.8
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
@ -352,6 +352,22 @@ packages:
|
|||||||
zod: 3.23.8
|
zod: 3.23.8
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
|
/@ai-sdk/provider-utils@1.0.20(zod@3.23.8):
|
||||||
|
resolution: {integrity: sha512-ngg/RGpnA00eNOWEtXHenpX1MsM2QshQh4QJFjUfwcqHpM5kTfG7je7Rc3HcEDP+OkRVv2GF+X4fC1Vfcnl8Ow==}
|
||||||
|
engines: {node: '>=18'}
|
||||||
|
peerDependencies:
|
||||||
|
zod: ^3.0.0
|
||||||
|
peerDependenciesMeta:
|
||||||
|
zod:
|
||||||
|
optional: true
|
||||||
|
dependencies:
|
||||||
|
'@ai-sdk/provider': 0.0.24
|
||||||
|
eventsource-parser: 1.1.2
|
||||||
|
nanoid: 3.3.6
|
||||||
|
secure-json-parse: 2.7.0
|
||||||
|
zod: 3.23.8
|
||||||
|
dev: false
|
||||||
|
|
||||||
/@ai-sdk/provider@0.0.21:
|
/@ai-sdk/provider@0.0.21:
|
||||||
resolution: {integrity: sha512-9j95uaPRxwYkzQdkl4XO/MmWWW5c5vcVSXtqvALpD9SMB9fzH46dO3UN4VbOJR2J3Z84CZAqgZu5tNlkptT9qQ==}
|
resolution: {integrity: sha512-9j95uaPRxwYkzQdkl4XO/MmWWW5c5vcVSXtqvALpD9SMB9fzH46dO3UN4VbOJR2J3Z84CZAqgZu5tNlkptT9qQ==}
|
||||||
engines: {node: '>=18'}
|
engines: {node: '>=18'}
|
||||||
@ -373,8 +389,15 @@ packages:
|
|||||||
json-schema: 0.4.0
|
json-schema: 0.4.0
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
/@ai-sdk/react@0.0.59(react@18.3.1)(zod@3.23.8):
|
/@ai-sdk/provider@0.0.24:
|
||||||
resolution: {integrity: sha512-1WbgO3J2/OoheMuNMxy5itJ3NVqOpqpAQxFNp7AoXgnDv4wDF4kTif61rTlKh7dCPvBHj2HXLmob+TrVFaWhYw==}
|
resolution: {integrity: sha512-XMsNGJdGO+L0cxhhegtqZ8+T6nn4EoShS819OvCgI2kLbYTIvk0GWFGD0AXJmxkxs3DrpsJxKAFukFR7bvTkgQ==}
|
||||||
|
engines: {node: '>=18'}
|
||||||
|
dependencies:
|
||||||
|
json-schema: 0.4.0
|
||||||
|
dev: false
|
||||||
|
|
||||||
|
/@ai-sdk/react@0.0.62(react@18.3.1)(zod@3.23.8):
|
||||||
|
resolution: {integrity: sha512-1asDpxgmeHWL0/EZPCLENxfOHT+0jce0z/zasRhascodm2S6f6/KZn5doLG9jdmarcb+GjMjFmmwyOVXz3W1xg==}
|
||||||
engines: {node: '>=18'}
|
engines: {node: '>=18'}
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
react: ^18 || ^19
|
react: ^18 || ^19
|
||||||
@ -385,15 +408,15 @@ packages:
|
|||||||
zod:
|
zod:
|
||||||
optional: true
|
optional: true
|
||||||
dependencies:
|
dependencies:
|
||||||
'@ai-sdk/provider-utils': 1.0.19(zod@3.23.8)
|
'@ai-sdk/provider-utils': 1.0.20(zod@3.23.8)
|
||||||
'@ai-sdk/ui-utils': 0.0.44(zod@3.23.8)
|
'@ai-sdk/ui-utils': 0.0.46(zod@3.23.8)
|
||||||
react: 18.3.1
|
react: 18.3.1
|
||||||
swr: 2.2.5(react@18.3.1)
|
swr: 2.2.5(react@18.3.1)
|
||||||
zod: 3.23.8
|
zod: 3.23.8
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
/@ai-sdk/solid@0.0.47(zod@3.23.8):
|
/@ai-sdk/solid@0.0.49(zod@3.23.8):
|
||||||
resolution: {integrity: sha512-lVMxIxtuNqoo/TObSFGflEP2dUeJv7bfPQbS4jHTZGBNlyhgBRY2Xc19yNjA3QKRfvQNDVoQusqxn+18MiHJJQ==}
|
resolution: {integrity: sha512-KnfWTt640cS1hM2fFIba8KHSPLpOIWXtEm28pNCHTvqasVKlh2y/zMQANTwE18pF2nuXL9P9F5/dKWaPsaEzQw==}
|
||||||
engines: {node: '>=18'}
|
engines: {node: '>=18'}
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
solid-js: ^1.7.7
|
solid-js: ^1.7.7
|
||||||
@ -401,14 +424,14 @@ packages:
|
|||||||
solid-js:
|
solid-js:
|
||||||
optional: true
|
optional: true
|
||||||
dependencies:
|
dependencies:
|
||||||
'@ai-sdk/provider-utils': 1.0.19(zod@3.23.8)
|
'@ai-sdk/provider-utils': 1.0.20(zod@3.23.8)
|
||||||
'@ai-sdk/ui-utils': 0.0.44(zod@3.23.8)
|
'@ai-sdk/ui-utils': 0.0.46(zod@3.23.8)
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- zod
|
- zod
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
/@ai-sdk/svelte@0.0.49(svelte@4.2.18)(zod@3.23.8):
|
/@ai-sdk/svelte@0.0.51(svelte@4.2.18)(zod@3.23.8):
|
||||||
resolution: {integrity: sha512-gV0MhaWxkatjf7uJrCAHO3bWrihokNUwGhuMCgyG+y53lwJKAYhR0zCoDRM2HnTJ89fdnx/PVe3R9fOWEVY5qA==}
|
resolution: {integrity: sha512-aIZJaIds+KpCt19yUDCRDWebzF/17GCY7gN9KkcA2QM6IKRO5UmMcqEYja0ZmwFQPm1kBZkF2njhr8VXis2mAw==}
|
||||||
engines: {node: '>=18'}
|
engines: {node: '>=18'}
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
svelte: ^3.0.0 || ^4.0.0
|
svelte: ^3.0.0 || ^4.0.0
|
||||||
@ -416,16 +439,16 @@ packages:
|
|||||||
svelte:
|
svelte:
|
||||||
optional: true
|
optional: true
|
||||||
dependencies:
|
dependencies:
|
||||||
'@ai-sdk/provider-utils': 1.0.19(zod@3.23.8)
|
'@ai-sdk/provider-utils': 1.0.20(zod@3.23.8)
|
||||||
'@ai-sdk/ui-utils': 0.0.44(zod@3.23.8)
|
'@ai-sdk/ui-utils': 0.0.46(zod@3.23.8)
|
||||||
sswr: 2.1.0(svelte@4.2.18)
|
sswr: 2.1.0(svelte@4.2.18)
|
||||||
svelte: 4.2.18
|
svelte: 4.2.18
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- zod
|
- zod
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
/@ai-sdk/ui-utils@0.0.44(zod@3.23.8):
|
/@ai-sdk/ui-utils@0.0.46(zod@3.23.8):
|
||||||
resolution: {integrity: sha512-0qiyun/n5zqJzQs/WfQT86dZE5DiDhSHJc7b7ZGLYvNMztHkRQmak2zUCZP4IyGVZEicyEPQK6NEEpBgkmd3Dg==}
|
resolution: {integrity: sha512-ZG/wneyJG+6w5Nm/hy1AKMuRgjPQToAxBsTk61c9sVPUTaxo+NNjM2MhXQMtmsja2N5evs8NmHie+ExEgpL3cA==}
|
||||||
engines: {node: '>=18'}
|
engines: {node: '>=18'}
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
zod: ^3.0.0
|
zod: ^3.0.0
|
||||||
@ -433,16 +456,16 @@ packages:
|
|||||||
zod:
|
zod:
|
||||||
optional: true
|
optional: true
|
||||||
dependencies:
|
dependencies:
|
||||||
'@ai-sdk/provider': 0.0.23
|
'@ai-sdk/provider': 0.0.24
|
||||||
'@ai-sdk/provider-utils': 1.0.19(zod@3.23.8)
|
'@ai-sdk/provider-utils': 1.0.20(zod@3.23.8)
|
||||||
json-schema: 0.4.0
|
json-schema: 0.4.0
|
||||||
secure-json-parse: 2.7.0
|
secure-json-parse: 2.7.0
|
||||||
zod: 3.23.8
|
zod: 3.23.8
|
||||||
zod-to-json-schema: 3.23.2(zod@3.23.8)
|
zod-to-json-schema: 3.23.2(zod@3.23.8)
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
/@ai-sdk/vue@0.0.50(vue@3.4.35)(zod@3.23.8):
|
/@ai-sdk/vue@0.0.54(vue@3.4.35)(zod@3.23.8):
|
||||||
resolution: {integrity: sha512-eIWfxqpKwRdL3rxJMg1HDJcjfugFJGg4P934Tl69S7UCot2/U4BPZoESVJQFroS1elbKHaMRgv0ZJt1ddWQPjQ==}
|
resolution: {integrity: sha512-Ltu6gbuii8Qlp3gg7zdwdnHdS4M8nqKDij2VVO1223VOtIFwORFJzKqpfx44U11FW8z2TPVBYN+FjkyVIcN2hg==}
|
||||||
engines: {node: '>=18'}
|
engines: {node: '>=18'}
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
vue: ^3.3.4
|
vue: ^3.3.4
|
||||||
@ -450,8 +473,8 @@ packages:
|
|||||||
vue:
|
vue:
|
||||||
optional: true
|
optional: true
|
||||||
dependencies:
|
dependencies:
|
||||||
'@ai-sdk/provider-utils': 1.0.19(zod@3.23.8)
|
'@ai-sdk/provider-utils': 1.0.20(zod@3.23.8)
|
||||||
'@ai-sdk/ui-utils': 0.0.44(zod@3.23.8)
|
'@ai-sdk/ui-utils': 0.0.46(zod@3.23.8)
|
||||||
swrv: 1.0.4(vue@3.4.35)
|
swrv: 1.0.4(vue@3.4.35)
|
||||||
vue: 3.4.35(typescript@5.5.4)
|
vue: 3.4.35(typescript@5.5.4)
|
||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
@ -1938,8 +1961,8 @@ packages:
|
|||||||
humanize-ms: 1.2.1
|
humanize-ms: 1.2.1
|
||||||
dev: false
|
dev: false
|
||||||
|
|
||||||
/ai@3.3.41(openai@4.56.0)(react@18.3.1)(svelte@4.2.18)(vue@3.4.35)(zod@3.23.8):
|
/ai@3.4.9(openai@4.56.0)(react@18.3.1)(svelte@4.2.18)(vue@3.4.35)(zod@3.23.8):
|
||||||
resolution: {integrity: sha512-unWUqw0hnZo0irhdedTv8ef7IEiySBCO3zjPxx1/k0kI1G0whKYq8l83k/LzqShLekc2Qg3gyyhdEO+39ptegw==}
|
resolution: {integrity: sha512-wmVzpIHNGjCEjIJ/3945a/DIkz+gwObjC767ZRgO8AmtIZMO5KqvqNr7n2KF+gQrCPCMC8fM1ICQFXSvBZnBlA==}
|
||||||
engines: {node: '>=18'}
|
engines: {node: '>=18'}
|
||||||
peerDependencies:
|
peerDependencies:
|
||||||
openai: ^4.42.0
|
openai: ^4.42.0
|
||||||
@ -1959,13 +1982,13 @@ packages:
|
|||||||
zod:
|
zod:
|
||||||
optional: true
|
optional: true
|
||||||
dependencies:
|
dependencies:
|
||||||
'@ai-sdk/provider': 0.0.23
|
'@ai-sdk/provider': 0.0.24
|
||||||
'@ai-sdk/provider-utils': 1.0.19(zod@3.23.8)
|
'@ai-sdk/provider-utils': 1.0.20(zod@3.23.8)
|
||||||
'@ai-sdk/react': 0.0.59(react@18.3.1)(zod@3.23.8)
|
'@ai-sdk/react': 0.0.62(react@18.3.1)(zod@3.23.8)
|
||||||
'@ai-sdk/solid': 0.0.47(zod@3.23.8)
|
'@ai-sdk/solid': 0.0.49(zod@3.23.8)
|
||||||
'@ai-sdk/svelte': 0.0.49(svelte@4.2.18)(zod@3.23.8)
|
'@ai-sdk/svelte': 0.0.51(svelte@4.2.18)(zod@3.23.8)
|
||||||
'@ai-sdk/ui-utils': 0.0.44(zod@3.23.8)
|
'@ai-sdk/ui-utils': 0.0.46(zod@3.23.8)
|
||||||
'@ai-sdk/vue': 0.0.50(vue@3.4.35)(zod@3.23.8)
|
'@ai-sdk/vue': 0.0.54(vue@3.4.35)(zod@3.23.8)
|
||||||
'@opentelemetry/api': 1.9.0
|
'@opentelemetry/api': 1.9.0
|
||||||
eventsource-parser: 1.1.2
|
eventsource-parser: 1.1.2
|
||||||
json-schema: 0.4.0
|
json-schema: 0.4.0
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user