Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion app.dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -31,5 +31,6 @@ COPY --from=builder /home/perplexica/migrator/index.js ./migrate.js
RUN mkdir /home/perplexica/uploads

COPY entrypoint.sh ./entrypoint.sh
RUN sed -i 's/\r$//' ./entrypoint.sh
RUN chmod +x ./entrypoint.sh
CMD ["./entrypoint.sh"]
CMD ["/bin/sh","/home/perplexica/entrypoint.sh"]
116 changes: 112 additions & 4 deletions src/app/api/chat/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -61,10 +61,22 @@ const handleEmitterEvents = async (
) => {
let recievedMessage = '';
let sources: any[] = [];
let sentGeneratingStatus = false;

stream.on('data', (data) => {
stream.on('data', (data: string) => {
const parsedData = JSON.parse(data);
if (parsedData.type === 'response') {
if (!sentGeneratingStatus) {
writer.write(
encoder.encode(
JSON.stringify({
type: 'status',
data: 'Generating answer...',
}) + '\n',
),
);
sentGeneratingStatus = true;
}
writer.write(
encoder.encode(
JSON.stringify({
Expand All @@ -77,6 +89,17 @@ const handleEmitterEvents = async (

recievedMessage += parsedData.data;
} else if (parsedData.type === 'sources') {
if (!sentGeneratingStatus) {
writer.write(
encoder.encode(
JSON.stringify({
type: 'status',
data: 'Generating answer...',
}) + '\n',
),
);
sentGeneratingStatus = true;
}
writer.write(
encoder.encode(
JSON.stringify({
Expand Down Expand Up @@ -114,8 +137,16 @@ const handleEmitterEvents = async (
})
.execute();
});
stream.on('error', (data) => {
stream.on('error', (data: string) => {
const parsedData = JSON.parse(data);
writer.write(
encoder.encode(
JSON.stringify({
type: 'status',
data: 'Chat completion failed.',
}) + '\n',
),
);
writer.write(
encoder.encode(
JSON.stringify({
Expand Down Expand Up @@ -218,6 +249,28 @@ export const POST = async (req: Request) => {
body.embeddingModel?.name || Object.keys(embeddingProvider)[0]
];

const selectedChatProviderKey =
body.chatModel?.provider || Object.keys(chatModelProviders)[0];
const selectedChatModelKey =
body.chatModel?.name || Object.keys(chatModelProvider)[0];
const selectedEmbeddingProviderKey =
body.embeddingModel?.provider || Object.keys(embeddingModelProviders)[0];
const selectedEmbeddingModelKey =
body.embeddingModel?.name || Object.keys(embeddingProvider)[0];

console.log('[Models] Chat request', {
chatProvider: selectedChatProviderKey,
chatModel: selectedChatModelKey,
embeddingProvider: selectedEmbeddingProviderKey,
embeddingModel: selectedEmbeddingModelKey,
...(selectedChatProviderKey === 'custom_openai'
? { chatBaseURL: getCustomOpenaiApiUrl() }
: {}),
...(selectedEmbeddingProviderKey === 'custom_openai'
? { embeddingBaseURL: getCustomOpenaiApiUrl() }
: {}),
});

let llm: BaseChatModel | undefined;
let embedding = embeddingModel.model;

Expand Down Expand Up @@ -272,11 +325,54 @@ export const POST = async (req: Request) => {
);
}

const llmProxy = new Proxy(llm as any, {
get(target, prop, receiver) {
if (
prop === 'invoke' ||
prop === 'stream' ||
prop === 'streamEvents' ||
prop === 'generate'
) {
return (...args: any[]) => {
console.log('[Models] Chat model call', {
provider: selectedChatProviderKey,
model: selectedChatModelKey,
method: String(prop),
});
return (target as any)[prop](...args);
};
}
return Reflect.get(target, prop, receiver);
},
});

const embeddingProxy = new Proxy(embedding as any, {
get(target, prop, receiver) {
if (prop === 'embedQuery' || prop === 'embedDocuments') {
return (...args: any[]) => {
console.log('[Models] Embedding model call', {
provider: selectedEmbeddingProviderKey,
model: selectedEmbeddingModelKey,
method: String(prop),
size:
prop === 'embedDocuments'
? Array.isArray(args[0])
? args[0].length
: undefined
: undefined,
});
return (target as any)[prop](...args);
};
}
return Reflect.get(target, prop, receiver);
},
});

const stream = await handler.searchAndAnswer(
message.content,
history,
llm,
embedding,
llmProxy as any,
embeddingProxy as any,
body.optimizationMode,
body.files,
body.systemInstructions,
Expand All @@ -286,6 +382,18 @@ export const POST = async (req: Request) => {
const writer = responseStream.writable.getWriter();
const encoder = new TextEncoder();

writer.write(
encoder.encode(
JSON.stringify({
type: 'status',
data:
body.focusMode === 'writingAssistant'
? 'Waiting for chat completion...'
: 'Searching web...',
}) + '\n',
),
);

handleEmitterEvents(stream, writer, encoder, aiMessageId, message.chatId);
handleHistorySave(message, humanMessageId, body.focusMode, body.files);

Expand Down
60 changes: 58 additions & 2 deletions src/app/api/search/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,19 @@ export const POST = async (req: Request) => {
body.embeddingModel?.name ||
Object.keys(embeddingModelProviders[embeddingModelProvider])[0];

console.log('[Models] Search request', {
chatProvider: chatModelProvider,
chatModel,
embeddingProvider: embeddingModelProvider,
embeddingModel,
...(chatModelProvider === 'custom_openai'
? { chatBaseURL: getCustomOpenaiApiUrl() }
: {}),
...(embeddingModelProvider === 'custom_openai'
? { embeddingBaseURL: getCustomOpenaiApiUrl() }
: {}),
});

let llm: BaseChatModel | undefined;
let embeddings: Embeddings | undefined;

Expand Down Expand Up @@ -118,11 +131,54 @@ export const POST = async (req: Request) => {
return Response.json({ message: 'Invalid focus mode' }, { status: 400 });
}

const llmProxy = new Proxy(llm as any, {
get(target, prop, receiver) {
if (
prop === 'invoke' ||
prop === 'stream' ||
prop === 'streamEvents' ||
prop === 'generate'
) {
return (...args: any[]) => {
console.log('[Models] Chat model call', {
provider: chatModelProvider,
model: chatModel,
method: String(prop),
});
return (target as any)[prop](...args);
};
}
return Reflect.get(target, prop, receiver);
},
});

const embeddingProxy = new Proxy(embeddings as any, {
get(target, prop, receiver) {
if (prop === 'embedQuery' || prop === 'embedDocuments') {
return (...args: any[]) => {
console.log('[Models] Embedding model call', {
provider: embeddingModelProvider,
model: embeddingModel,
method: String(prop),
size:
prop === 'embedDocuments'
? Array.isArray(args[0])
? args[0].length
: undefined
: undefined,
});
return (target as any)[prop](...args);
};
}
return Reflect.get(target, prop, receiver);
},
});

const emitter = await searchHandler.searchAndAnswer(
body.query,
history,
llm,
embeddings,
llmProxy as any,
embeddingProxy as any,
body.optimizationMode,
[],
body.systemInstructions || '',
Expand Down
33 changes: 32 additions & 1 deletion src/app/api/uploads/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import fs from 'fs';
import path from 'path';
import crypto from 'crypto';
import { getAvailableEmbeddingModelProviders } from '@/lib/providers';
import { getCustomOpenaiApiUrl } from '@/lib/config';
import { PDFLoader } from '@langchain/community/document_loaders/fs/pdf';
import { DocxLoader } from '@langchain/community/document_loaders/fs/docx';
import { RecursiveCharacterTextSplitter } from '@langchain/textsplitters';
Expand Down Expand Up @@ -46,6 +47,14 @@ export async function POST(req: Request) {
const embeddingModel =
embedding_model ?? Object.keys(embeddingModels[provider as string])[0];

console.log('[Models] Upload embeddings request', {
embeddingProvider: provider,
embeddingModel,
...(provider === 'custom_openai'
? { embeddingBaseURL: getCustomOpenaiApiUrl() }
: {}),
});

let embeddingsModel =
embeddingModels[provider as string]?.[embeddingModel as string]?.model;
if (!embeddingsModel) {
Expand All @@ -55,6 +64,28 @@ export async function POST(req: Request) {
);
}

const loggedEmbeddings = new Proxy(embeddingsModel as any, {
get(target, prop, receiver) {
if (prop === 'embedQuery' || prop === 'embedDocuments') {
return (...args: any[]) => {
console.log('[Models] Upload embedding model call', {
provider,
model: embeddingModel,
method: String(prop),
size:
prop === 'embedDocuments'
? Array.isArray(args[0])
? args[0].length
: undefined
: undefined,
});
return (target as any)[prop](...args);
};
}
return Reflect.get(target, prop, receiver);
},
});

const processedFiles: FileRes[] = [];

await Promise.all(
Expand Down Expand Up @@ -98,7 +129,7 @@ export async function POST(req: Request) {
}),
);

const embeddings = await embeddingsModel.embedDocuments(
const embeddings = await loggedEmbeddings.embedDocuments(
splitted.map((doc) => doc.pageContent),
);
const embeddingsDataPath = filePath.replace(
Expand Down
7 changes: 6 additions & 1 deletion src/components/Chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ const Chat = ({
setFileIds,
files,
setFiles,
statusText,
}: {
messages: Message[];
sendMessage: (message: string) => void;
Expand All @@ -26,6 +27,7 @@ const Chat = ({
setFileIds: (fileIds: string[]) => void;
files: File[];
setFiles: (files: File[]) => void;
statusText?: string;
}) => {
const [dividerWidth, setDividerWidth] = useState(0);
const dividerRef = useRef<HTMLDivElement | null>(null);
Expand Down Expand Up @@ -78,14 +80,17 @@ const Chat = ({
isLast={isLast}
rewrite={rewrite}
sendMessage={sendMessage}
statusText={statusText}
/>
{!isLast && msg.role === 'assistant' && (
<div className="h-px w-full bg-light-secondary dark:bg-dark-secondary" />
)}
</Fragment>
);
})}
{loading && !messageAppeared && <MessageBoxLoading />}
{loading && !messageAppeared && (
<MessageBoxLoading statusText={statusText} />
)}
<div ref={messageEnd} className="h-0" />
{dividerWidth > 0 && (
<div
Expand Down
Loading