Skip to content

Commit

Permalink
fix: type
Browse files Browse the repository at this point in the history
  • Loading branch information
himself65 committed Apr 10, 2024
1 parent 6277441 commit 71cc4cf
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 9 deletions.
10 changes: 7 additions & 3 deletions examples/toolsStream.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
import { ChatResponseChunk, OpenAI } from "llamaindex";
import {
ChatResponseChunk,
OpenAI,
OpenAIAdditionalChatOptions,
} from "llamaindex";

async function main() {
const llm = new OpenAI({ model: "gpt-4-turbo" });
Expand Down Expand Up @@ -34,11 +38,11 @@ async function main() {
};

const stream = await llm.chat({ ...args, stream: true });
let chunk: ChatResponseChunk | null = null;
let chunk: ChatResponseChunk<OpenAIAdditionalChatOptions>;
for await (chunk of stream) {
process.stdout.write(chunk.delta);
}
console.log(chunk?.additionalKwargs?.toolCalls[0]);
console.log(chunk.options.toolCalls[0]);

Check failure on line 45 in examples/toolsStream.ts

View workflow job for this annotation

GitHub Actions / typecheck

Variable 'chunk' is used before being assigned.

Check failure on line 45 in examples/toolsStream.ts

View workflow job for this annotation

GitHub Actions / typecheck

'chunk.options' is possibly 'undefined'.

Check failure on line 45 in examples/toolsStream.ts

View workflow job for this annotation

GitHub Actions / typecheck

Property 'toolCalls' does not exist on type 'OpenAIAdditionalChatOptions'.
}

(async function () {
Expand Down
4 changes: 2 additions & 2 deletions packages/core/src/llm/open_ai.ts
Original file line number Diff line number Diff line change
Expand Up @@ -377,7 +377,7 @@ export class OpenAI extends BaseLLM<
@wrapEventCaller
protected async *streamChat(
baseRequestParams: OpenAILLM.Chat.ChatCompletionCreateParams,
): AsyncIterable<ChatResponseChunk> {
): AsyncIterable<ChatResponseChunk<OpenAIAdditionalMessageOptions>> {
const stream: AsyncIterable<OpenAILLM.Chat.ChatCompletionChunk> =
await this.session.openai.chat.completions.create({
...baseRequestParams,
Expand All @@ -403,7 +403,7 @@ export class OpenAI extends BaseLLM<

yield {
// add tool calls to final chunk
options: toolCalls.length > 0 ? { toolCalls: toolCalls } : undefined,
options: toolCalls.length > 0 ? { toolCalls: toolCalls } : {},
delta: choice.delta.content ?? "",
};
}
Expand Down
19 changes: 15 additions & 4 deletions packages/core/src/llm/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -130,10 +130,21 @@ export interface ChatResponse<
raw: object;
}

export interface ChatResponseChunk {
delta: string;
options?: Record<string, any>;
}
export type ChatResponseChunk<
AdditionalMessageOptions extends Record<string, unknown> = Record<
string,
unknown
>,
> =
AdditionalMessageOptions extends Record<string, unknown>
? {
delta: string;
options?: AdditionalMessageOptions;
}
: {
delta: string;
options: AdditionalMessageOptions;
};

export interface CompletionResponse {
text: string;
Expand Down

0 comments on commit 71cc4cf

Please sign in to comment.