Saltar al contenido principal

Class: Gemini

ToolCallLLM for Gemini

Extends

Constructors

new Gemini()

new Gemini(init?): Gemini

Parameters

init?: GeminiConfig

Returns

Gemini

Overrides

ToolCallLLM . constructor

Source

packages/llamaindex/src/llm/gemini/base.ts:160

Properties

maxTokens?

optional maxTokens: number

Source

packages/llamaindex/src/llm/gemini/base.ts:157


model

model: GEMINI_MODEL

Source

packages/llamaindex/src/llm/gemini/base.ts:154


session

session: IGeminiSession

Source

packages/llamaindex/src/llm/gemini/base.ts:158


temperature

temperature: number

Source

packages/llamaindex/src/llm/gemini/base.ts:155


topP

topP: number

Source

packages/llamaindex/src/llm/gemini/base.ts:156

Accessors

metadata

get metadata(): LLMMetadata

Returns

LLMMetadata

Source

packages/llamaindex/src/llm/gemini/base.ts:173


supportToolCall

get supportToolCall(): boolean

Returns

boolean

Source

packages/llamaindex/src/llm/gemini/base.ts:169

Methods

chat()

chat(params)

chat(params): Promise<GeminiChatStreamResponse>

Parameters

params: GeminiChatParamsStreaming

Returns

Promise<GeminiChatStreamResponse>

Overrides

ToolCallLLM . chat

Source

packages/llamaindex/src/llm/gemini/base.ts:218

chat(params)

chat(params): Promise<GeminiChatNonStreamResponse>

Parameters

params: GeminiChatParamsNonStreaming

Returns

Promise<GeminiChatNonStreamResponse>

Overrides

ToolCallLLM . chat

Source

packages/llamaindex/src/llm/gemini/base.ts:219


complete()

complete(params)

complete(params): Promise<AsyncIterable <CompletionResponse>>

Parameters

params: LLMCompletionParamsStreaming

Returns

Promise<AsyncIterable <CompletionResponse>>

Overrides

ToolCallLLM . complete

Source

packages/llamaindex/src/llm/gemini/base.ts:230

complete(params)

complete(params): Promise <CompletionResponse>

Parameters

params: LLMCompletionParamsNonStreaming

Returns

Promise <CompletionResponse>

Overrides

ToolCallLLM . complete

Source

packages/llamaindex/src/llm/gemini/base.ts:233


nonStreamChat()

protected nonStreamChat(params): Promise<GeminiChatNonStreamResponse>

Parameters

params: GeminiChatParamsNonStreaming

Returns

Promise<GeminiChatNonStreamResponse>

Source

packages/llamaindex/src/llm/gemini/base.ts:184


streamChat()

protected streamChat(params): GeminiChatStreamResponse

Parameters

params: GeminiChatParamsStreaming

Returns

GeminiChatStreamResponse

Source

packages/llamaindex/src/llm/gemini/base.ts:206