Pular para o conteúdo principal

Class: FireworksLLM

Extends

Constructors

new FireworksLLM()

new FireworksLLM(init?): FireworksLLM

Parameters

init?: Partial <OpenAI>

Returns

FireworksLLM

Overrides

OpenAI . constructor

Source

packages/core/src/llm/fireworks.ts:5

Properties

additionalChatOptions?

optional additionalChatOptions: OpenAIAdditionalChatOptions

Inherited from

OpenAI . additionalChatOptions

Source

packages/core/src/llm/openai.ts:161


additionalSessionOptions?

optional additionalSessionOptions: Omit<Partial<ClientOptions>, "apiKey" | "timeout" | "maxRetries">

Inherited from

OpenAI . additionalSessionOptions

Source

packages/core/src/llm/openai.ts:168


apiKey?

optional apiKey: string = undefined

Inherited from

OpenAI . apiKey

Source

packages/core/src/llm/openai.ts:164


maxRetries

maxRetries: number

Inherited from

OpenAI . maxRetries

Source

packages/core/src/llm/openai.ts:165


maxTokens?

optional maxTokens: number

Inherited from

OpenAI . maxTokens

Source

packages/core/src/llm/openai.ts:160


model

model: string

Inherited from

OpenAI . model

Source

packages/core/src/llm/openai.ts:157


session

session: OpenAISession

Inherited from

OpenAI . session

Source

packages/core/src/llm/openai.ts:167


temperature

temperature: number

Inherited from

OpenAI . temperature

Source

packages/core/src/llm/openai.ts:158


timeout?

optional timeout: number

Inherited from

OpenAI . timeout

Source

packages/core/src/llm/openai.ts:166


topP

topP: number

Inherited from

OpenAI . topP

Source

packages/core/src/llm/openai.ts:159

Accessors

metadata

get metadata(): LLMMetadata

Returns

LLMMetadata

Source

packages/core/src/llm/openai.ts:224


supportToolCall

get supportToolCall(): boolean

Returns

boolean

Source

packages/core/src/llm/openai.ts:220

Methods

chat()

chat(params)

chat(params): Promise<AsyncIterable <ChatResponseChunk <ToolCallLLMMessageOptions>>>

Parameters

params: LLMChatParamsStreaming <OpenAIAdditionalChatOptions, ToolCallLLMMessageOptions>

Returns

Promise<AsyncIterable <ChatResponseChunk <ToolCallLLMMessageOptions>>>

Inherited from

OpenAI . chat

Source

packages/core/src/llm/openai.ts:301

chat(params)

chat(params): Promise <ChatResponse <ToolCallLLMMessageOptions>>

Parameters

params: LLMChatParamsNonStreaming <OpenAIAdditionalChatOptions, ToolCallLLMMessageOptions>

Returns

Promise <ChatResponse <ToolCallLLMMessageOptions>>

Inherited from

OpenAI . chat

Source

packages/core/src/llm/openai.ts:307


complete()

complete(params)

complete(params): Promise<AsyncIterable <CompletionResponse>>

Parameters

params: LLMCompletionParamsStreaming

Returns

Promise<AsyncIterable <CompletionResponse>>

Inherited from

OpenAI . complete

Source

packages/core/src/llm/base.ts:22

complete(params)

complete(params): Promise <CompletionResponse>

Parameters

params: LLMCompletionParamsNonStreaming

Returns

Promise <CompletionResponse>

Inherited from

OpenAI . complete

Source

packages/core/src/llm/base.ts:25


streamChat()

protected streamChat(baseRequestParams): AsyncIterable <ChatResponseChunk <ToolCallLLMMessageOptions>>

Parameters

baseRequestParams: ChatCompletionCreateParams

Returns

AsyncIterable <ChatResponseChunk <ToolCallLLMMessageOptions>>

Inherited from

OpenAI . streamChat

Source

packages/core/src/llm/openai.ts:383


toOpenAIMessage()

static toOpenAIMessage(messages): ChatCompletionMessageParam[]

Parameters

messages: ChatMessage <ToolCallLLMMessageOptions>[]

Returns

ChatCompletionMessageParam[]

Inherited from

OpenAI . toOpenAIMessage

Source

packages/core/src/llm/openai.ts:252


toOpenAIRole()

static toOpenAIRole(messageType): ChatCompletionRole

Parameters

messageType: MessageType

Returns

ChatCompletionRole

Inherited from

OpenAI . toOpenAIRole

Source

packages/core/src/llm/openai.ts:239


toTool()

static toTool(tool): ChatCompletionTool

Parameters

tool: BaseTool<any>

Returns

ChatCompletionTool

Inherited from

OpenAI . toTool

Source

packages/core/src/llm/openai.ts:461