diff --git a/app.dockerfile b/app.dockerfile index c3c0fd040..057435146 100644 --- a/app.dockerfile +++ b/app.dockerfile @@ -31,5 +31,6 @@ COPY --from=builder /home/perplexica/migrator/index.js ./migrate.js RUN mkdir /home/perplexica/uploads COPY entrypoint.sh ./entrypoint.sh +RUN sed -i 's/\r$//' ./entrypoint.sh RUN chmod +x ./entrypoint.sh -CMD ["./entrypoint.sh"] \ No newline at end of file +CMD ["/bin/sh","/home/perplexica/entrypoint.sh"] \ No newline at end of file diff --git a/src/app/api/chat/route.ts b/src/app/api/chat/route.ts index 2d53b753c..9726f6c7c 100644 --- a/src/app/api/chat/route.ts +++ b/src/app/api/chat/route.ts @@ -61,10 +61,22 @@ const handleEmitterEvents = async ( ) => { let recievedMessage = ''; let sources: any[] = []; + let sentGeneratingStatus = false; - stream.on('data', (data) => { + stream.on('data', (data: string) => { const parsedData = JSON.parse(data); if (parsedData.type === 'response') { + if (!sentGeneratingStatus) { + writer.write( + encoder.encode( + JSON.stringify({ + type: 'status', + data: 'Generating answer...', + }) + '\n', + ), + ); + sentGeneratingStatus = true; + } writer.write( encoder.encode( JSON.stringify({ @@ -77,6 +89,17 @@ const handleEmitterEvents = async ( recievedMessage += parsedData.data; } else if (parsedData.type === 'sources') { + if (!sentGeneratingStatus) { + writer.write( + encoder.encode( + JSON.stringify({ + type: 'status', + data: 'Generating answer...', + }) + '\n', + ), + ); + sentGeneratingStatus = true; + } writer.write( encoder.encode( JSON.stringify({ @@ -114,8 +137,16 @@ const handleEmitterEvents = async ( }) .execute(); }); - stream.on('error', (data) => { + stream.on('error', (data: string) => { const parsedData = JSON.parse(data); + writer.write( + encoder.encode( + JSON.stringify({ + type: 'status', + data: 'Chat completion failed.', + }) + '\n', + ), + ); writer.write( encoder.encode( JSON.stringify({ @@ -218,6 +249,28 @@ export const POST = async (req: Request) => { body.embeddingModel?.name || Object.keys(embeddingProvider)[0] ]; + const selectedChatProviderKey = + body.chatModel?.provider || Object.keys(chatModelProviders)[0]; + const selectedChatModelKey = + body.chatModel?.name || Object.keys(chatModelProvider)[0]; + const selectedEmbeddingProviderKey = + body.embeddingModel?.provider || Object.keys(embeddingModelProviders)[0]; + const selectedEmbeddingModelKey = + body.embeddingModel?.name || Object.keys(embeddingProvider)[0]; + + console.log('[Models] Chat request', { + chatProvider: selectedChatProviderKey, + chatModel: selectedChatModelKey, + embeddingProvider: selectedEmbeddingProviderKey, + embeddingModel: selectedEmbeddingModelKey, + ...(selectedChatProviderKey === 'custom_openai' + ? { chatBaseURL: getCustomOpenaiApiUrl() } + : {}), + ...(selectedEmbeddingProviderKey === 'custom_openai' + ? { embeddingBaseURL: getCustomOpenaiApiUrl() } + : {}), + }); + let llm: BaseChatModel | undefined; let embedding = embeddingModel.model; @@ -272,11 +325,54 @@ export const POST = async (req: Request) => { ); } + const llmProxy = new Proxy(llm as any, { + get(target, prop, receiver) { + if ( + prop === 'invoke' || + prop === 'stream' || + prop === 'streamEvents' || + prop === 'generate' + ) { + return (...args: any[]) => { + console.log('[Models] Chat model call', { + provider: selectedChatProviderKey, + model: selectedChatModelKey, + method: String(prop), + }); + return (target as any)[prop](...args); + }; + } + return Reflect.get(target, prop, receiver); + }, + }); + + const embeddingProxy = new Proxy(embedding as any, { + get(target, prop, receiver) { + if (prop === 'embedQuery' || prop === 'embedDocuments') { + return (...args: any[]) => { + console.log('[Models] Embedding model call', { + provider: selectedEmbeddingProviderKey, + model: selectedEmbeddingModelKey, + method: String(prop), + size: + prop === 'embedDocuments' + ? Array.isArray(args[0]) + ? args[0].length + : undefined + : undefined, + }); + return (target as any)[prop](...args); + }; + } + return Reflect.get(target, prop, receiver); + }, + }); + const stream = await handler.searchAndAnswer( message.content, history, - llm, - embedding, + llmProxy as any, + embeddingProxy as any, body.optimizationMode, body.files, body.systemInstructions, @@ -286,6 +382,18 @@ export const POST = async (req: Request) => { const writer = responseStream.writable.getWriter(); const encoder = new TextEncoder(); + writer.write( + encoder.encode( + JSON.stringify({ + type: 'status', + data: + body.focusMode === 'writingAssistant' + ? 'Waiting for chat completion...' + : 'Searching web...', + }) + '\n', + ), + ); + handleEmitterEvents(stream, writer, encoder, aiMessageId, message.chatId); handleHistorySave(message, humanMessageId, body.focusMode, body.files); diff --git a/src/app/api/search/route.ts b/src/app/api/search/route.ts index 5f752ec74..008f36407 100644 --- a/src/app/api/search/route.ts +++ b/src/app/api/search/route.ts @@ -75,6 +75,19 @@ export const POST = async (req: Request) => { body.embeddingModel?.name || Object.keys(embeddingModelProviders[embeddingModelProvider])[0]; + console.log('[Models] Search request', { + chatProvider: chatModelProvider, + chatModel, + embeddingProvider: embeddingModelProvider, + embeddingModel, + ...(chatModelProvider === 'custom_openai' + ? { chatBaseURL: getCustomOpenaiApiUrl() } + : {}), + ...(embeddingModelProvider === 'custom_openai' + ? { embeddingBaseURL: getCustomOpenaiApiUrl() } + : {}), + }); + let llm: BaseChatModel | undefined; let embeddings: Embeddings | undefined; @@ -118,11 +131,54 @@ export const POST = async (req: Request) => { return Response.json({ message: 'Invalid focus mode' }, { status: 400 }); } + const llmProxy = new Proxy(llm as any, { + get(target, prop, receiver) { + if ( + prop === 'invoke' || + prop === 'stream' || + prop === 'streamEvents' || + prop === 'generate' + ) { + return (...args: any[]) => { + console.log('[Models] Chat model call', { + provider: chatModelProvider, + model: chatModel, + method: String(prop), + }); + return (target as any)[prop](...args); + }; + } + return Reflect.get(target, prop, receiver); + }, + }); + + const embeddingProxy = new Proxy(embeddings as any, { + get(target, prop, receiver) { + if (prop === 'embedQuery' || prop === 'embedDocuments') { + return (...args: any[]) => { + console.log('[Models] Embedding model call', { + provider: embeddingModelProvider, + model: embeddingModel, + method: String(prop), + size: + prop === 'embedDocuments' + ? Array.isArray(args[0]) + ? args[0].length + : undefined + : undefined, + }); + return (target as any)[prop](...args); + }; + } + return Reflect.get(target, prop, receiver); + }, + }); + const emitter = await searchHandler.searchAndAnswer( body.query, history, - llm, - embeddings, + llmProxy as any, + embeddingProxy as any, body.optimizationMode, [], body.systemInstructions || '', diff --git a/src/app/api/uploads/route.ts b/src/app/api/uploads/route.ts index 9fbaf2d48..fd8d943bc 100644 --- a/src/app/api/uploads/route.ts +++ b/src/app/api/uploads/route.ts @@ -3,6 +3,7 @@ import fs from 'fs'; import path from 'path'; import crypto from 'crypto'; import { getAvailableEmbeddingModelProviders } from '@/lib/providers'; +import { getCustomOpenaiApiUrl } from '@/lib/config'; import { PDFLoader } from '@langchain/community/document_loaders/fs/pdf'; import { DocxLoader } from '@langchain/community/document_loaders/fs/docx'; import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters'; @@ -46,6 +47,14 @@ export async function POST(req: Request) { const embeddingModel = embedding_model ?? Object.keys(embeddingModels[provider as string])[0]; + console.log('[Models] Upload embeddings request', { + embeddingProvider: provider, + embeddingModel, + ...(provider === 'custom_openai' + ? { embeddingBaseURL: getCustomOpenaiApiUrl() } + : {}), + }); + let embeddingsModel = embeddingModels[provider as string]?.[embeddingModel as string]?.model; if (!embeddingsModel) { @@ -55,6 +64,28 @@ export async function POST(req: Request) { ); } + const loggedEmbeddings = new Proxy(embeddingsModel as any, { + get(target, prop, receiver) { + if (prop === 'embedQuery' || prop === 'embedDocuments') { + return (...args: any[]) => { + console.log('[Models] Upload embedding model call', { + provider, + model: embeddingModel, + method: String(prop), + size: + prop === 'embedDocuments' + ? Array.isArray(args[0]) + ? args[0].length + : undefined + : undefined, + }); + return (target as any)[prop](...args); + }; + } + return Reflect.get(target, prop, receiver); + }, + }); + const processedFiles: FileRes[] = []; await Promise.all( @@ -98,7 +129,7 @@ export async function POST(req: Request) { }), ); - const embeddings = await embeddingsModel.embedDocuments( + const embeddings = await loggedEmbeddings.embedDocuments( splitted.map((doc) => doc.pageContent), ); const embeddingsDataPath = filePath.replace( diff --git a/src/components/Chat.tsx b/src/components/Chat.tsx index 0cf125b81..dba87283a 100644 --- a/src/components/Chat.tsx +++ b/src/components/Chat.tsx @@ -16,6 +16,7 @@ const Chat = ({ setFileIds, files, setFiles, + statusText, }: { messages: Message[]; sendMessage: (message: string) => void; @@ -26,6 +27,7 @@ const Chat = ({ setFileIds: (fileIds: string[]) => void; files: File[]; setFiles: (files: File[]) => void; + statusText?: string; }) => { const [dividerWidth, setDividerWidth] = useState(0); const dividerRef = useRef(null); @@ -78,6 +80,7 @@ const Chat = ({ isLast={isLast} rewrite={rewrite} sendMessage={sendMessage} + statusText={statusText} /> {!isLast && msg.role === 'assistant' && (
@@ -85,7 +88,9 @@ const Chat = ({ ); })} - {loading && !messageAppeared && } + {loading && !messageAppeared && ( + + )}
{dividerWidth > 0 && (
{ const [isMessagesLoaded, setIsMessagesLoaded] = useState(false); const [notFound, setNotFound] = useState(false); + const [statusText, setStatusText] = useState(undefined); useEffect(() => { if ( @@ -367,6 +368,11 @@ const ChatWindow = ({ id }: { id?: string }) => { setLoading(true); setMessageAppeared(false); + setStatusText( + focusMode === 'writingAssistant' + ? 'Waiting for chat completion...' + : 'Searching web...' + ); let sources: Document[] | undefined = undefined; let recievedMessage = ''; @@ -386,13 +392,19 @@ const ChatWindow = ({ id }: { id?: string }) => { ]); const messageHandler = async (data: any) => { + if (data.type === 'status') { + if (typeof data.data === 'string') setStatusText(data.data); + return; + } if (data.type === 'error') { toast.error(data.data); + setStatusText('Chat completion failed.'); setLoading(false); return; } if (data.type === 'sources') { + setStatusText('Generating answer...'); sources = data.data; if (!added) { setMessages((prevMessages) => [ @@ -412,6 +424,7 @@ const ChatWindow = ({ id }: { id?: string }) => { } if (data.type === 'message') { + setStatusText('Generating answer...'); if (!added) { setMessages((prevMessages) => [ ...prevMessages, @@ -442,6 +455,7 @@ const ChatWindow = ({ id }: { id?: string }) => { } if (data.type === 'messageEnd') { + setStatusText(undefined); setChatHistory((prevHistory) => [ ...prevHistory, ['human', message], @@ -519,31 +533,61 @@ const ChatWindow = ({ id }: { id?: string }) => { }), }); - if (!res.body) throw new Error('No response body'); + if (!res.ok) { + const text = await res.text(); + try { + const json = JSON.parse(text); + toast.error(json.message || `Request failed: ${res.status} ${res.statusText}`); + } catch { + toast.error(`Request failed: ${res.status} ${res.statusText}`); + } + setStatusText('Chat completion failed.'); + setLoading(false); + return; + } + + if (!res.body) { + toast.error('No response body'); + setStatusText('Chat completion failed.'); + setLoading(false); + return; + } const reader = res.body?.getReader(); const decoder = new TextDecoder('utf-8'); let partialChunk = ''; - - while (true) { - const { value, done } = await reader.read(); - if (done) break; - - partialChunk += decoder.decode(value, { stream: true }); - - try { - const messages = partialChunk.split('\n'); - for (const msg of messages) { - if (!msg.trim()) continue; - const json = JSON.parse(msg); - messageHandler(json); + try { + while (true) { + const { value, done } = await reader.read(); + if (done) break; + + partialChunk += decoder.decode(value, { stream: true }); + + try { + const messages = partialChunk.split('\n'); + for (const msg of messages) { + if (!msg.trim()) continue; + const json = JSON.parse(msg); + messageHandler(json); + } + partialChunk = ''; + } catch (error) { + console.warn('Incomplete JSON, waiting for next chunk...'); } - partialChunk = ''; - } catch (error) { - console.warn('Incomplete JSON, waiting for next chunk...'); } + } catch (e) { + console.error('Streaming error', e); + toast.error('Chat streaming failed.'); + setStatusText('Chat completion failed.'); + setLoading(false); + return; } + + // Fallback: if the stream ended without 'messageEnd' or explicit error, + // ensure the UI doesn't stay in a loading state indefinitely. + setStatusText(undefined); + setLoading(false); }; const rewrite = (messageId: string) => { @@ -605,6 +649,7 @@ const ChatWindow = ({ id }: { id?: string }) => { setFileIds={setFileIds} files={files} setFiles={setFiles} + statusText={statusText} /> ) : ( diff --git a/src/components/MessageActions/Copy.tsx b/src/components/MessageActions/Copy.tsx index cb07b3ec6..fdda91e84 100644 --- a/src/components/MessageActions/Copy.tsx +++ b/src/components/MessageActions/Copy.tsx @@ -1,3 +1,5 @@ +'use client'; + import { Check, ClipboardList } from 'lucide-react'; import { Message } from '../ChatWindow'; import { useState } from 'react'; @@ -13,11 +15,37 @@ const Copy = ({ return (
diff --git a/src/components/MessageBoxLoading.tsx b/src/components/MessageBoxLoading.tsx index 3c53d9eaf..26cc640c5 100644 --- a/src/components/MessageBoxLoading.tsx +++ b/src/components/MessageBoxLoading.tsx @@ -1,9 +1,14 @@ -const MessageBoxLoading = () => { +const MessageBoxLoading = ({ statusText }: { statusText?: string }) => { return (
+ {statusText && ( +
+ {statusText} +
+ )}
); }; diff --git a/src/components/theme/Provider.tsx b/src/components/theme/Provider.tsx index 43e2714cb..2a1705c9c 100644 --- a/src/components/theme/Provider.tsx +++ b/src/components/theme/Provider.tsx @@ -1,13 +1,10 @@ 'use client'; import { ThemeProvider } from 'next-themes'; +import type { ReactNode } from 'react'; -const ThemeProviderComponent = ({ - children, -}: { - children: React.ReactNode; -}) => { +const ThemeProviderComponent = ({ children }: { children: ReactNode }) => { return ( - + {children} ); diff --git a/src/components/theme/Switcher.tsx b/src/components/theme/Switcher.tsx index b1e737150..791f7debb 100644 --- a/src/components/theme/Switcher.tsx +++ b/src/components/theme/Switcher.tsx @@ -1,44 +1,19 @@ 'use client'; import { useTheme } from 'next-themes'; -import { useCallback, useEffect, useState } from 'react'; +import { useEffect, useState } from 'react'; +import type { ChangeEvent } from 'react'; import Select from '../ui/Select'; type Theme = 'dark' | 'light' | 'system'; const ThemeSwitcher = ({ className }: { className?: string }) => { const [mounted, setMounted] = useState(false); - const { theme, setTheme } = useTheme(); - const isTheme = useCallback((t: Theme) => t === theme, [theme]); - - const handleThemeSwitch = (theme: Theme) => { - setTheme(theme); - }; - useEffect(() => { setMounted(true); }, []); - useEffect(() => { - if (isTheme('system')) { - const preferDarkScheme = window.matchMedia( - '(prefers-color-scheme: dark)', - ); - - const detectThemeChange = (event: MediaQueryListEvent) => { - const theme: Theme = event.matches ? 'dark' : 'light'; - setTheme(theme); - }; - - preferDarkScheme.addEventListener('change', detectThemeChange); - - return () => { - preferDarkScheme.removeEventListener('change', detectThemeChange); - }; - } - }, [isTheme, setTheme, theme]); - // Avoid Hydration Mismatch if (!mounted) { return null; @@ -48,8 +23,9 @@ const ThemeSwitcher = ({ className }: { className?: string }) => {