Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Custom prompts #266

Draft
wants to merge 1 commit into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 12 additions & 24 deletions electron/preload/index.ts
Original file line number Diff line number Diff line change
@@ -1,25 +1,25 @@
import { ChatHistory } from "@/components/Chat/Chat";
import { ChatHistoryMetadata } from "@/components/Chat/hooks/use-chat-history";
import { IpcRendererEvent, contextBridge, ipcRenderer } from "electron";
import {
EmbeddingModelConfig,
EmbeddingModelWithLocalPath,
EmbeddingModelWithRepo,
HardwareConfig,
LLMGenerationParameters,
LLMConfig,
} from "electron/main/Store/storeConfig";
import {
AugmentPromptWithFileProps,
FileInfoNode,
FileInfoTree,
RenameFileProps,
WriteFileProps,
} from "electron/main/Files/Types";
import { DBEntry, DBQueryResult } from "electron/main/database/Schema";
import { PromptWithContextLimit } from "electron/main/Prompts/Prompts";
import { PromptWithRagResults } from "electron/main/database/dbSessionHandlers";
import {
EmbeddingModelConfig,
EmbeddingModelWithLocalPath,
EmbeddingModelWithRepo,
HardwareConfig,
LLMConfig,
LLMGenerationParameters,
} from "electron/main/Store/storeConfig";
import { DBEntry, DBQueryResult } from "electron/main/database/Schema";
import { BasePromptRequirements } from "electron/main/database/dbSessionHandlerTypes";
import { ChatHistory } from "@/components/Chat/Chat";
import { ChatHistoryMetadata } from "@/components/Chat/hooks/use-chat-history";
import { PromptWithRagResults } from "electron/main/database/dbSessionHandlers";
// eslint-disable-next-line @typescript-eslint/no-explicit-any
export type ReceiveCallback = (...args: any[]) => void;

Expand Down Expand Up @@ -178,18 +178,6 @@ contextBridge.exposeInMainWorld("database", {
indexFilesInDirectory: async () => {
return ipcRenderer.invoke("index-files-in-directory");
},
augmentPromptWithRAG: async (
prompt: string,
llmName: string,
filter?: string
): Promise<PromptWithRagResults> => {
return ipcRenderer.invoke(
"augment-prompt-with-rag",
prompt,
llmName,
filter
);
},
augmentPromptWithTemporalAgent: async ({
query,
llmName,
Expand Down
33 changes: 9 additions & 24 deletions src/components/Chat/Chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,9 @@ import { PromptSuggestion } from "./Chat-Prompts";
import ChatInput from "./ChatInput";
import {
formatOpenAIMessageContentIntoString,
resolveRAGContext,
generateOverallChatHistory,
getChatContextFromChatHistory,
ragPromptTemplate,
} from "./chatUtils";

// convert ask options to enum
Expand Down Expand Up @@ -92,7 +94,7 @@ const ChatWithLLM: React.FC<ChatWithLLMProps> = ({
useState<boolean>(false);

useEffect(() => {
const context = getChatHistoryContext(currentChatHistory);
const context = getChatContextFromChatHistory(currentChatHistory);
setCurrentContext(context);
}, [currentChatHistory]);

Expand Down Expand Up @@ -141,17 +143,12 @@ const ChatWithLLM: React.FC<ChatWithLLMProps> = ({
}
if (chatHistory.displayableChatHistory.length === 0) {
if (chatFilters) {
// chatHistory.displayableChatHistory.push({
// role: "system",
// content:
// "You are an advanced question answer agent answering questions based on provided context. You will respond to queries in second person: saying things like 'you'. The context provided was written by the same user who is asking the question.",
// messageType: "success",

// context: [],
// });
chatHistory.displayableChatHistory.push(
await resolveRAGContext(userTextFieldInput, chatFilters)
const newChatHistory = await generateOverallChatHistory(
userTextFieldInput,
chatFilters,
ragPromptTemplate
);
chatHistory.displayableChatHistory.push(...newChatHistory);
}
} else {
chatHistory.displayableChatHistory.push({
Expand Down Expand Up @@ -397,16 +394,4 @@ const ChatWithLLM: React.FC<ChatWithLLMProps> = ({
);
};

const getChatHistoryContext = (
chatHistory: ChatHistory | undefined
): DBQueryResult[] => {
if (!chatHistory) return [];
const contextForChat = chatHistory.displayableChatHistory
.map((message) => {
return message.context;
})
.flat();
return contextForChat as DBQueryResult[];
};

export default ChatWithLLM;
148 changes: 94 additions & 54 deletions src/components/Chat/chatUtils.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import { DBEntry } from "electron/main/database/Schema";
import { DBEntry, DBQueryResult } from "electron/main/database/Schema";
import {
ChatCompletionContentPart,
ChatCompletionMessageParam,
} from "openai/resources/chat/completions";
import { ChatFilters, ChatMessageToDisplay } from "./Chat";
import { ChatFilters, ChatHistory, ChatMessageToDisplay } from "./Chat";

export function formatOpenAIMessageContentIntoString(
content: string | ChatCompletionContentPart[] | null | undefined
Expand All @@ -20,75 +20,115 @@ export function formatOpenAIMessageContentIntoString(
}

interface ChatProperties {
context: string;
query: string;
[key: string]: string; // Values must be strings
}

export type ChatTemplate = {
messageHistory: ChatCompletionMessageParam[];
properties: ChatProperties;
export const getChatContextFromChatHistory = (
chatHistory: ChatHistory | undefined
): DBQueryResult[] => {
if (!chatHistory) return [];
const contextForChat = chatHistory.displayableChatHistory
.map((message) => {
return message.context;
})
.flat();
return contextForChat as DBQueryResult[];
};

// function replaceContentInMessages(
// messages: ChatMessageToDisplay[],
// context: ChatProperties
// ): ChatMessageToDisplay[] {
// return messages.map((message) => {
// if ("content" in message) {
// if (typeof message.content === "string") {
// message.content = message.content.replace(
// /\{(\w+)\}/g,
// (match, key) => {
// return key in context ? context[key] : match;
// }
// );
// }
// }
// return message;
// });
// }
function replaceContentInMessages(
messages: ChatCompletionMessageParam[],
chatProperties: ChatProperties
): ChatCompletionMessageParam[] {
return messages.map((message) => {
let newMessage = { ...message };
if ("content" in newMessage) {
if (typeof newMessage.content === "string") {
newMessage.content = newMessage.content.replace(
/\{(\w+)\}/g,
(match, key) => {
return key in chatProperties ? chatProperties[key] : match;
}
);
}
}
return newMessage;
});
}

export const ragPromptTemplate: ChatCompletionMessageParam[] = [
{
content:
"You are an advanced question-answer agent answering questions based on provided context. The context is a set of notes from a note-taking app. Respond in Spanish *ONLY*.",
role: "system",
},
{
content: `
Context:


Query:
{query}`,
role: "user",
},
];

// const ragPromptTemplate: ChatCompletionMessageParam[] = [
// {
// content:
// "You are an advanced question answer agent answering questions based on provided context.",
// role: "system",
// },
// {
// content: `
// Context:
// {context}
const transformChatMessageParamIntoChatMessageToDisplay = (
message: ChatCompletionMessageParam,
query: string,
context: DBEntry[]
): ChatMessageToDisplay => {
const output: ChatMessageToDisplay = {
...message,
messageType: "success",
context: context,
visibleContent: query,
};

// Query:
// {query}`,
// role: "user",
// },
// ];
return output;
};

export const resolveRAGContext = async (
query: string,
chatFilters: ChatFilters
): Promise<ChatMessageToDisplay> => {
// I mean like the only real places to get context from are like particular files or semantic search or full text search.
// and like it could be like that if a file is here

let results: DBEntry[] = [];
): Promise<DBEntry[]> => {
let contextResults: DBEntry[] = [];
if (chatFilters.files.length > 0) {
console.log("chatFilters.files", chatFilters.files);
results = await window.files.getFilesystemPathsAsDBItems(chatFilters.files);
contextResults = await window.files.getFilesystemPathsAsDBItems(
chatFilters.files
);
} else {
results = await window.database.search(
contextResults = await window.database.search(
query,
chatFilters.numberOfChunksToFetch
);
}
console.log("RESULTS", results);
return {
messageType: "success",
role: "user",
context: results,
content: `Based on the following context answer the question down below. \n\n\nContext: \n${results
.map((dbItem) => dbItem.content)
.join("\n\n")}\n\n\nQuery:\n${query}`,
visibleContent: query,
return contextResults;
};

export const generateOverallChatHistory = async (
query: string,
chatFilters: ChatFilters,
promptTemplate: ChatCompletionMessageParam[]
): Promise<ChatMessageToDisplay[]> => {
const contextResults = await resolveRAGContext(query, chatFilters);
const chatProperties: ChatProperties = {
context: concatenateDBItems(contextResults),
query,
};

const ragPrompt = replaceContentInMessages(promptTemplate, chatProperties);
return ragPrompt.map((message) =>
transformChatMessageParamIntoChatMessageToDisplay(
message,
query,
contextResults
)
);
};

export const concatenateDBItems = (dbItems: DBEntry[]): string => {
return dbItems.map((dbItem) => dbItem.content).join("\n\n");
};
3 changes: 2 additions & 1 deletion src/components/Chat/hooks/use-chat-history.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import { useEffect, useState } from "react";
import { ChatHistory, formatOpenAIMessageContentIntoString } from "../Chat";
import { ChatHistory } from "../Chat";
import { formatOpenAIMessageContentIntoString } from "../chatUtils";

export interface ChatHistoryMetadata {
id: string;
Expand Down
Loading