Skip to content

Commit

Permalink
Fix stream updates.
Browse files Browse the repository at this point in the history
  • Loading branch information
cjcenizal committed Jun 6, 2024
1 parent 251da4e commit 3607435
Show file tree
Hide file tree
Showing 8 changed files with 184 additions and 190 deletions.
47 changes: 29 additions & 18 deletions docs/src/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,19 @@ import {
ApiV2,
} from "@vectara/stream-query-client";

const CUSTOMER_ID = "1526022105";
const API_KEY = "zqt_WvU_2ewh7ZGRwq8LdL2SV8B9RJmVGyUm1VAuOw";
const CORPUS_NAME = "ofer-bm-moma-docs";
const CORPUS_ID = "232";
// TODO: Switch back to prod values before merging
const CUSTOMER_ID = "3099635174";
const API_KEY = "zqt_uMCt5uGR7CXARu7QHg7GDYNG5Q5v58HOpvQO0A";
const CORPUS_NAME = "markdown";
const CORPUS_ID = "203";

// const CUSTOMER_ID = "1526022105";
// const API_KEY = "zqt_WvU_2ewh7ZGRwq8LdL2SV8B9RJmVGyUm1VAuOw";
// const CORPUS_NAME = "ofer-bm-moma-docs";
// const CORPUS_ID = "232";

const App = () => {
const [questionV1, setQuestionV1] = useState("");
const [questionV1, setQuestionV1] = useState("What is Vectara?");
const [answerV1, setAnswerV1] = useState<string>();
const [conversationIdV1, setConversationIdV1] = useState<string>();

Expand Down Expand Up @@ -49,24 +55,25 @@ const App = () => {
streamQueryV1(configurationOptions, onStreamUpdate);
};

const [questionV2, setQuestionV2] = useState("");
const [questionV2, setQuestionV2] = useState("markdown");
const [answerV2, setAnswerV2] = useState<string>();
const [conversationIdV2, setConversationIdV2] = useState<string>();

const sendQueryV2 = async () => {
const configurationOptions: ApiV2.StreamQueryConfig = {
customerId: CUSTOMER_ID,
apiKey: API_KEY,
query: questionV1,
query: questionV2,
corpusKey: `${CORPUS_NAME}_${CORPUS_ID}`,
search: {
offset: 0,
corpora: [
{
corpusKey: `${CORPUS_NAME}_${CORPUS_ID}`,
metadataFilter: "",
},
],
limit: 5,
metadataFilter: "",
limit: 10,
lexicalInterpolation: 0,
contextConfiguration: {
sentencesBefore: 2,
sentencesAfter: 2,
},
},
generation: {
maxUsedSearchResults: 5,
Expand All @@ -78,15 +85,19 @@ const App = () => {
store: true,
conversationId: conversationIdV2,
},
stream_response: true,
};

const onStreamUpdate = (update: ApiV2.StreamUpdate) => {
console.log(update);
const { updatedText, details } = update;
if (details?.chat) {
setConversationIdV2(details.chat.conversationId);
const { updatedText, chatId } = update;
if (chatId) {
setConversationIdV2(chatId);
}

if (updatedText) {
setAnswerV2(updatedText);
}
setAnswerV2(updatedText);
};

streamQueryV2(configurationOptions, onStreamUpdate);
Expand Down
6 changes: 3 additions & 3 deletions src/apiV1/client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,10 @@ import {
StreamUpdateHandler,
} from "./types";
import { deserializeSearchResponse } from "./deserializeSearchResponse";
import { DEFAULT_DOMAIN } from "../common/constants";
import { processStreamChunk } from "./processStreamChunk";
import { SNIPPET_START_TAG, SNIPPET_END_TAG } from "./constants";
import { generateStream } from "common/generateStream";
import { processStreamChunk } from "common/processStreamPart";
import { DEFAULT_DOMAIN } from "../common/constants";
import { generateStream } from "../common/generateStream";

export const streamQueryV1 = async (
config: StreamQueryConfig,
Expand Down
File renamed without changes.
17 changes: 15 additions & 2 deletions src/apiV2/apiTypes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,14 @@ export type MmrReranker = {
};

export type SearchConfiguration = {
offset: number;
corpora: {
corpus_key: string;
metadata_filter: string;
metadata_filter?: string;
lexical_interpolation?: number;
custom_dimensions?: Record<string, number>;
semantics?: "default" | "query" | "response";
}[];
offset: number;
limit?: number;
context_configuration?: {
characters_before?: number;
Expand Down Expand Up @@ -83,3 +83,16 @@ export type QueryBody = {
generation?: GenerationConfiguration;
chat?: ChatConfiguration;
};

export type SearchResult = {
document_id: string;
text: string;
score: number;
part_metadata: {
lang: string;
section: number;
offset: number;
len: number;
};
document_metadata: Record<string, any>;
};
215 changes: 99 additions & 116 deletions src/apiV2/client.ts
Original file line number Diff line number Diff line change
@@ -1,15 +1,12 @@
import {
Chat,
GenerationConfig,
ParsedResult,
StreamQueryConfig,
StreamUpdate,
StreamUpdateHandler,
} from "./types";
import { QueryBody } from "./apiTypes";
import { processStreamChunk } from "./processStreamChunk";
import { DEFAULT_DOMAIN } from "../common/constants";
import { generateStream } from "common/generateStream";
import { processStreamChunk } from "common/processStreamPart";
import { generateStream } from "../common/generateStream";

const convertReranker = (
reranker?: StreamQueryConfig["search"]["reranker"]
Expand Down Expand Up @@ -57,8 +54,18 @@ export const streamQueryV2 = async (
customerId,
apiKey,
endpoint,
corpusKey,
query,
search: { offset, corpora, limit, contextConfiguration, reranker },
search: {
metadataFilter,
lexicalInterpolation,
customDimensions,
semantics,
offset,
limit,
contextConfiguration,
reranker,
},
generation: {
promptName,
maxUsedSearchResults,
Expand All @@ -71,28 +78,19 @@ export const streamQueryV2 = async (
chat,
} = config;

const body: QueryBody = {
let body: QueryBody = {
query,
search: {
offset,
corpora: corpora.map(
({
corpusKey,
metadataFilter,
lexicalInterpolation,
customDimensions,
semantics,
}) => ({
corpora: [
{
corpus_key: corpusKey,
metadata_filter: metadataFilter,
lexical_interpolation: lexicalInterpolation,
custom_dimensions: customDimensions?.reduce(
(acc, { name, weight }) => ({ ...acc, [name]: weight }),
{} as Record<string, number>
),
custom_dimensions: customDimensions,
semantics,
})
),
},
],
offset,
limit,
context_configuration: {
characters_before: contextConfiguration?.charactersBefore,
Expand Down Expand Up @@ -121,109 +119,94 @@ export const streamQueryV2 = async (
chat: chat && {
store: chat.store,
},
stream_response: true,
};

let path;

if (!chat) {
path = `/v2/query`;
} else {
if (chat.conversationId) {
path = `/v2/chats/${chat.conversationId}/turns`;
} else {
path = "/v2/chats";
}
}

const headers = {
"x-api-key": apiKey,
"customer-id": customerId,
"Content-Type": "application/json",
};

const path = !chat
? "/v2/query"
: chat.conversationId
? `/v2/chats/${chat.conversationId}/turns`
: "/v2/chats";

const url = `${endpoint ?? DEFAULT_DOMAIN}${path}`;

const stream = await generateStream(headers, JSON.stringify(body), url);

let previousAnswerText = "";

for await (const chunk of stream) {
try {
processStreamChunk(chunk, (part: string) => {
const dataObj = JSON.parse(part);

if (!dataObj.result) return;

const details: StreamUpdate["details"] = {};

// TODO: Add back once debug has been added back to API v2.
// const summaryDetail = getSummaryDetail(config, dataObj.result);
// if (summaryDetail) {
// details.summary = summaryDetail;
// }

const chatDetail = getChatDetail(dataObj.result);
if (chatDetail) {
details.chat = chatDetail;
}

const fcsDetail = getFactualConsistencyDetail(dataObj.result);
if (fcsDetail) {
details.factualConsistency = fcsDetail;
}

const streamUpdate: StreamUpdate = {
responseSet: dataObj.result.responseSet ?? undefined,
details,
updatedText: getUpdatedText(dataObj.result, previousAnswerText),
isDone: dataObj.result.summary?.done ?? false,
};

previousAnswerText = streamUpdate.updatedText ?? "";

onStreamUpdate(streamUpdate);
});
} catch (error) {}
try {
const stream = await generateStream(headers, JSON.stringify(body), url);

let updatedText = "";

for await (const chunk of stream) {
try {
processStreamChunk(chunk, (part: string) => {
// Trim the "data:" prefix to get the JSON.
const data = part.slice(5, part.length);
const dataObj = JSON.parse(data);

const {
type,
search_results,
chat_id,
turn_id,
factual_consistency_score,
generation_chunk,
} = dataObj;

switch (type) {
case "search_results":
onStreamUpdate({
type: "searchResults",
searchResults: search_results,
});
break;

case "chat_info":
onStreamUpdate({
type: "chatInfo",
chatId: chat_id,
turnId: turn_id,
});
break;

case "generation_chunk":
updatedText += generation_chunk;
onStreamUpdate({
type: "generationChunk",
updatedText,
generationChunk: generation_chunk,
});
break;

case "factual_consistency_score":
onStreamUpdate({
type: "factualConsistencyScore",
factualConsistencyScore: factual_consistency_score,
});
break;

case "end":
onStreamUpdate({
type: "end",
});
break;
}
});
} catch (error) {
console.log("error", error);
}
}
} catch (error) {
console.log("error", error);
}
};

// TODO: Add back once debug has been added back to API v2.
// const getSummaryDetail = (
// config: StreamQueryConfig,
// parsedResult: ParsedResult
// ) => {
// if (!parsedResult.summary) return;

// if (config.debug && parsedResult.summary.prompt) {
// return {
// prompt: parsedResult.summary.prompt,
// };
// }
// };

const getChatDetail = (
// config: StreamQueryConfig,
parsedResult: ParsedResult
) => {
if (!parsedResult.summary?.chat) return;

const chatDetail: Chat = {
conversationId: parsedResult.summary.chat.conversationId,
turnId: parsedResult.summary.chat.turnId,
};

// TODO: Add back once debug has been added back to API v2.
// if (config.debug && parsedResult.summary.chat.rephrasedQuery) {
// chatDetail.rephrasedQuery = parsedResult.summary.chat.rephrasedQuery;
// }

return chatDetail;
};

const getFactualConsistencyDetail = (parsedResult: ParsedResult) => {
if (!parsedResult.summary || !parsedResult.summary.factualConsistency) return;

return {
score: parsedResult.summary.factualConsistency.score,
};
};

const getUpdatedText = (parsedResult: ParsedResult, previousText: string) => {
if (!parsedResult.summary) return;

return `${previousText}${parsedResult.summary.text}`;
};
Loading

0 comments on commit 3607435

Please sign in to comment.