Skip to content

Commit

Permalink
feat: support OncoKB api agent and auto agent selector (#70)
Browse files Browse the repository at this point in the history
* support description in customization yaml file

* add blast api and oncokb api to customization yam file

* add auto checkbox and oncokb checkbox

* feat: support OncoKB api agent and auto agent selector

* remove vectorstore description

* update biochatter-server

* Capitalisation of checkbox labels

* fix the bug that ChatSession.useOncoKBSession may be 'undefined'

* fix the bug in displaying vectorstore name

* support global vectorstore

* fix a bug

* disable auto-agent and make agent select button be radio button

* add loading text

* add useReflexion setting in KG settings

* support example list items

* update server in docker compose

* remove config

* restructure welcome text: 2 bodies, 2 lists

---------

Co-authored-by: fengsh <shaohong.feng.78@gmail.com>
Co-authored-by: slobentanzer <sebastian.lobentanzer@gmail.com>
  • Loading branch information
3 people authored Aug 19, 2024
1 parent 80315c9 commit 6ff977c
Show file tree
Hide file tree
Showing 21 changed files with 378 additions and 151 deletions.
13 changes: 13 additions & 0 deletions app/client/api.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import { getClientConfig } from "../config/client";
import { ACCESS_CODE_PREFIX, Azure, ServiceProvider } from "../constant";
import { ChatMessage, ModelType, useAccessStore } from "../store";
import { KGConfig } from "../store/kg";
import { RAGConfig } from "../store/rag";
import { ChatGPTApi } from "./platforms/openai";

export const ROLES = ["system", "user", "assistant"] as const;
Expand All @@ -23,9 +25,20 @@ export interface LLMConfig {
frequency_penalty?: number;
}

export interface AgentInfo {
useRAG: boolean;
useKG: boolean;
useOncoKB: boolean;
useAutoAgent: boolean;
kgConfig?: KGConfig;
ragConfig?: RAGConfig;
oncokbConfig?: Record<string, any>;
}

export interface ChatOptions {
messages: RequestMessage[];
config: LLMConfig;
agentInfo?: AgentInfo;

onUpdate?: (message: string, chunk: string) => void;
onFinish: (message: string, context?: any[]) => void;
Expand Down
2 changes: 1 addition & 1 deletion app/client/datarequest.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ export const requestKGConnectionStatus = async (

export const requestAllVSDocuments = async (
connectionArgs: DbConnectionArgs,
docIds: string[]
docIds?: string[]
) => {
const RAG_URL = ApiPath.RAG;
let fetchUrl = RAG_URL as string;
Expand Down
17 changes: 8 additions & 9 deletions app/client/platforms/openai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ import { getClientConfig } from "@/app/config/client";
import { makeAzurePath } from "@/app/azure";
import { useRAGStore } from "@/app/store/rag";
import { useKGStore } from "@/app/store/kg";
import { getKnowledgeGraphInfo, getOncoKBInfo, getVectorStoreInfo } from "@/app/utils/prodinfo";

export interface OpenAIListModelResponse {
object: string;
Expand Down Expand Up @@ -81,11 +82,7 @@ export class ChatGPTApi implements LLMApi {
model: options.config.model,
},
};
const ragConfig = useRAGStore.getState().currentRAGConfig();
const useRAG = useChatStore.getState().currentSession().useRAGSession;
const useKG = useChatStore.getState().currentSession().useKGSession;
const kgConfig = useKGStore.getState().config;


const requestPayload = {
messages,
stream: options.config.stream,
Expand All @@ -97,10 +94,12 @@ export class ChatGPTApi implements LLMApi {
// max_tokens: Math.max(modelConfig.max_tokens, 1024),
// Please do not ask me why not send max_tokens, no reason, this param is just shit, I dont want to explain anymore.
session_id: useChatStore.getState().currentSession().id,
useRAG,
useKG,
ragConfig,
kgConfig,
useRAG: options.agentInfo?.useRAG??false,
useKG: options.agentInfo?.useKG??false,
ragConfig: options.agentInfo?.ragConfig,
kgConfig: options.agentInfo?.kgConfig,
oncokbConfig: options.agentInfo?.oncokbConfig,
useAutoAgent: options.agentInfo?.useAutoAgent??false,
};

console.log("[Request] openai payload: ", requestPayload);
Expand Down
4 changes: 4 additions & 0 deletions app/components/chat.module.scss
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,10 @@
flex-direction: row;
margin-right: 10px;

.agent-checkbox:disabled {
cursor: not-allowed;
}

&:last-child {
margin-right: 20px;
}
Expand Down
92 changes: 76 additions & 16 deletions app/components/chat.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,9 @@ import EditIcon from "../icons/rename.svg";
import ConfirmIcon from "../icons/confirm.svg";
import CancelIcon from "../icons/cancel.svg";

import LightIcon from "../icons/light.svg";
import DarkIcon from "../icons/dark.svg";
import AutoIcon from "../icons/auto.svg";
import BottomIcon from "../icons/bottom.svg";
import StopIcon from "../icons/pause.svg";
import RobotIcon from "../icons/robot.svg";
import RagIcon from "../icons/rag.svg";

import {
ChatMessage,
Expand Down Expand Up @@ -94,6 +90,7 @@ import { useAllModels } from "../utils/hooks";
import { useRAGStore } from "../store/rag";
import { useKGStore } from "../store/kg";
import { DbConfiguration } from "../utils/datatypes";
import { getOncoKBInfo } from "../utils/prodinfo";

const Markdown = dynamic(async () => (await import("./markdown")).Markdown, {
loading: () => <LoadingIcon />,
Expand Down Expand Up @@ -174,7 +171,7 @@ function PromptToast(props: {
const chatStore = useChatStore();
const session = chatStore.currentSession();
const context = session.mask.context;

return (
<div className={styles["prompt-toast"]} key="prompt-toast">
{props.showToast && (
Expand All @@ -197,7 +194,7 @@ function PromptToast(props: {
}

function RagPromptToast(
{showModal, setShowModal}: {showModal: boolean, setShowModal: (_: boolean) => void}
{ showModal, setShowModal }: { showModal: boolean, setShowModal: (_: boolean) => void }
) {
const chatStore = useChatStore();
const session = chatStore.currentSession();
Expand Down Expand Up @@ -441,14 +438,18 @@ export function ChatActions(props: {
const chatStore = useChatStore();
const accessStore = useAccessStore();
const prodInfo = accessStore.productionInfo === "undefined" ? undefined : JSON.parse(accessStore.productionInfo);
const kgProdInfo = (prodInfo?.KnowledgeGraph ?? {servers: [], enabled: true}) as DbConfiguration;
const ragProdInfo = (prodInfo?.VectorStore ?? {servers: [], enabled: true}) as DbConfiguration;
const kgProdInfo = (prodInfo?.KnowledgeGraph ?? { servers: [], enabled: true }) as DbConfiguration;
const ragProdInfo = (prodInfo?.VectorStore ?? { servers: [], enabled: true }) as DbConfiguration;
const oncokbInfo = getOncoKBInfo(prodInfo);
const agentEnableFlags = [kgProdInfo.enabled, ragProdInfo.enabled, oncokbInfo.enabled]
let enabledAgentsNum = 0;
agentEnableFlags.forEach((flag) => (enabledAgentsNum += flag ? 1 : 0));
const session = chatStore.currentSession();
const contexts = session.contextualPrompts;
const total_rag_prompts_num = contexts.reduce((prev, cur) => (prev + cur.context.length), 0);
const rag_prompts_text = (total_rag_prompts_num === 0) ?
const rag_prompts_text = (total_rag_prompts_num === 0) ?
(Locale.RagContext.Toast("0", "")) :
(contexts.map((ctx) => (ctx.context.length > 0 ?
(contexts.map((ctx) => (ctx.context.length > 0 ?
Locale.RagContext.Toast(ctx.context.length, ctx.mode) :
""
))).join(" ");
Expand Down Expand Up @@ -566,31 +567,67 @@ export function ChatActions(props: {
)}
</div>
<div className={styles["chat-toggle-group"]}>
{oncokbInfo.enabled && (
<div className={styles["chat-toggle"]}>
<label>OncoKB</label>
<input
type="checkbox"
className={styles["agent-checkbox"]}
disabled={chatStore.currentSession().useAutoAgentSession}
checked={chatStore.currentSession().useOncoKBSession}
onChange={(e) => (
chatStore.updateCurrentSession(
(session) => {
session.useOncoKBSession = e.currentTarget.checked;
if (session.useOncoKBSession) {
session.useKGSession = false;
session.useRAGSession = false;
}
}
)
)}
/>
</div>
)}
{ragProdInfo.enabled && (
<div className={styles["chat-toggle"]}>
<label>rag</label>
<label>RAG</label>
<input
disabled={!ragProdInfo.enabled}
disabled={chatStore.currentSession().useAutoAgentSession}
type="checkbox"
className={styles["agent-checkbox"]}
checked={chatStore.currentSession().useRAGSession}
onChange={(e) => (
chatStore.updateCurrentSession(
(session) => (session.useRAGSession = e.currentTarget.checked)
(session) => {
session.useRAGSession = e.currentTarget.checked;
if (session.useRAGSession) {
session.useKGSession = false;
session.useOncoKBSession = false;
}
}
)
)}
/>
</div>
)}
{kgProdInfo.enabled && (
<div className={styles["chat-toggle"]}>
<label aria-disabled={!kgProdInfo.enabled}>kg</label>
<label aria-disabled={!kgProdInfo.enabled}>KG RAG</label>
<input
disabled={!kgProdInfo.enabled}
disabled={chatStore.currentSession().useAutoAgentSession}
type="checkbox"
className={styles["agent-checkbox"]}
checked={chatStore.currentSession().useKGSession}
onChange={(e) => (
chatStore.updateCurrentSession(
(session) => (session.useKGSession = e.currentTarget.checked)
(session) => {
session.useKGSession = e.currentTarget.checked;
if (session.useKGSession) {
session.useRAGSession = false;
session.useOncoKBSession = false;
}
}
)
)}
/>
Expand Down Expand Up @@ -923,6 +960,22 @@ function _Chat() {
});
};

const getLoadingText = (
useOncoKB: boolean,
useRAG: boolean,
useKG: boolean,
): string | undefined => {
if (useOncoKB) {
return Locale.Chat.Loading.OncoKB;
} else if (useRAG) {
return Locale.Chat.Loading.RAG;
} else if (useKG) {
return Locale.Chat.Loading.KG;
} else {
return undefined;
}
}

const context: RenderMessage[] = useMemo(() => {
return session.mask.hideContext ? [] : session.mask.context.slice();
}, [session.mask.context, session.mask.hideContext]);
Expand Down Expand Up @@ -1298,6 +1351,13 @@ function _Chat() {
message.content.length === 0 &&
!isUser
}
loadingText={
((message.preview || message.streaming) &&
message.content.length === 0 &&
!isUser) ?
(getLoadingText(session.useOncoKBSession, session.useRAGSession, session.useKGSession)) :
undefined
}
onContextMenu={(e) => onRightClick(e, message)}
onDoubleClickCapture={() => {
if (!isMobileScreen) return;
Expand Down
23 changes: 20 additions & 3 deletions app/components/kg.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ import { List, ListItem, SelectInput } from "./ui-lib";

import { InputRange } from "./input-range";
import { DbConnectionArgs } from "../utils/datatypes";
import { getConnectionArgsToConnect, getConnectionArgsToDisplay } from "../utils/rag";
import { getKGConnectionArgsToConnect, getKGConnectionArgsToDisplay } from "../utils/rag";

const DEFAULT_PORT = "7687";
const DEFAULT_HOST = "";
Expand All @@ -36,7 +36,7 @@ export function KGPage() {
let kgProdInfo = (prodInfo?.KnowledgeGraph ?? {servers: []}) as DbConfiguration;
const kgConfig = kgStore.config;
const [connectionArgs, setConnectionArgs]
= useState(getConnectionArgsToDisplay(kgConfig.connectionArgs, kgProdInfo.servers ?? []));
= useState(getKGConnectionArgsToDisplay(kgConfig.connectionArgs, kgProdInfo.servers ?? []));
const [uploading, setUploading] = useState(false);
const [document, setDocument] = useState<string | undefined>();
const [connected, setConnected] = useState(false);
Expand Down Expand Up @@ -65,7 +65,7 @@ export function KGPage() {
const updateConnectionStatus = useDebouncedCallback(async () => {
setIsReconnecting(true);
try {
const conn = getConnectionArgsToConnect(connectionArgs, kgProdInfo.servers??[]);
const conn = getKGConnectionArgsToConnect(connectionArgs, kgProdInfo.servers??[]);
const res = await requestKGConnectionStatus(conn);
const value = await res.json();
if(value?.code === ERROR_BIOSERVER_OK && value.status) {
Expand Down Expand Up @@ -205,6 +205,9 @@ export function KGPage() {
if (kg.number_of_results !== undefined) {
config.resultNum = kg.number_of_results;
}
if (kg.description !== undefined) {
config.description = kg.description;
}
}
)
break;
Expand Down Expand Up @@ -283,6 +286,20 @@ export function KGPage() {
}}
></InputRange>
</ListItem>
<ListItem
title={Locale.KG.Settings.useReflexion.Label}
>
<input
type="checkbox"
disabled={!kgStore.useKG}
checked={kgConfig.useReflexion??false}
onChange={(e) => {
kgStore.updateConfig(
(config) => (config.useReflexion = e.currentTarget?.checked??false)
)
}}
></input>
</ListItem>
</List>
</div>
</div>
Expand Down
6 changes: 5 additions & 1 deletion app/components/markdown.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,7 @@ export function Markdown(
props: {
content: string;
loading?: boolean;
loadingText?: string;
fontSize?: number;
parentRef?: RefObject<HTMLDivElement>;
defaultShow?: boolean;
Expand All @@ -176,7 +177,10 @@ export function Markdown(
dir="auto"
>
{props.loading ? (
<LoadingIcon />
props.loadingText ? (
<div style={{display: "flex", flexDirection: "row", alignItems: "center" }}>
<div style={{marginRight: 5, maxHeight: 25}}><p>{props.loadingText} </p></div> <LoadingIcon />
</div>) : (<LoadingIcon />)
) : (
<MarkdownContent content={props.content} />
)}
Expand Down
27 changes: 18 additions & 9 deletions app/components/rag-contextual-prompts-modal.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ export function RagContextualPromptsModal(
contexts: Array<RagContext>,
}
) {

return (
<div className="modal-mask">
<Modal
Expand All @@ -20,19 +21,27 @@ export function RagContextualPromptsModal(
{props.contexts.map((ctx, ix) => (
ctx.context.length === 0 ? (<></>) : (
<div key={`context-item-${ix}`} className={styles["rag-mode"]}>{`${Locale.RagContext.ModeType}: ${ctx.mode}`}
{ctx.context.map(ctxItem => (
(ctx.mode === "kg") ? (
<>
{ctx.context.map(ctxItem => {
if (ctx.mode === "kg") {
return (<>
<div className={styles["rag-context-kg-item"]}>{ctxItem[0]}</div>
<div className={styles["rag-context-kg-meta"]}>{`cypher_query: ${ctxItem[1].cypher_query}`}</div>
</>
) : (
<>
</>);
} else if (ctx.mode === "vectorstore") {
return (<>
<div className={styles["rag-context-vs-item"]}>{ctxItem[0]}</div>
<div className={styles["rag-context-vs-meta"]}>{` source: ${ctxItem[1].source}`}</div>
</>
)
))}
</>);
} else if (ctx.mode === "api_oncokb") {
const source = ctxItem[1] === "error" ? "error information" : "source: OncoKB";
return (<>
<div className={styles["rag-context-vs-item"]}>{ctxItem[0]}</div>
<div className={styles["rag-context-vs-meta"]}>{` ${source}`}</div>
</>);
} else {
return (<></>);
}
})}
</div>
)
))}
Expand Down
Loading

0 comments on commit 6ff977c

Please sign in to comment.