Saltar al contenido principal

Class: MistralAI

MistralAI LLM implementation

Hierarchy

  • BaseLLM

    MistralAI

Constructors

constructor

new MistralAI(init?): MistralAI

Parameters

NameType
init?Partial<MistralAI>

Returns

MistralAI

Overrides

BaseLLM.constructor

Defined in

packages/core/src/llm/mistral.ts:58

Properties

apiKey

Optional apiKey: string

Defined in

packages/core/src/llm/mistral.ts:52


maxTokens

Optional maxTokens: number

Defined in

packages/core/src/llm/mistral.ts:51


model

model: "mistral-tiny" | "mistral-small" | "mistral-medium"

Defined in

packages/core/src/llm/mistral.ts:48


randomSeed

Optional randomSeed: number

Defined in

packages/core/src/llm/mistral.ts:54


safeMode

safeMode: boolean

Defined in

packages/core/src/llm/mistral.ts:53


session

Private session: MistralAISession

Defined in

packages/core/src/llm/mistral.ts:56


temperature

temperature: number

Defined in

packages/core/src/llm/mistral.ts:49


topP

topP: number

Defined in

packages/core/src/llm/mistral.ts:50

Accessors

metadata

get metadata(): Object

Returns

Object

NameType
contextWindownumber
maxTokensundefined | number
model"mistral-tiny" | "mistral-small" | "mistral-medium"
temperaturenumber
tokenizerundefined
topPnumber

Overrides

BaseLLM.metadata

Defined in

packages/core/src/llm/mistral.ts:69

Methods

buildParams

buildParams(messages): any

Parameters

NameType
messages{ content: MessageContent ; options?: Record<string, unknown> ; role: MessageType }[]

Returns

any

Defined in

packages/core/src/llm/mistral.ts:80


chat

chat(params): Promise<AsyncIterable<{ delta: string ; options?: Record<string, unknown> }>>

Parameters

NameType
paramsLLMChatParamsStreaming<Record<string, unknown>>

Returns

Promise<AsyncIterable<{ delta: string ; options?: Record<string, unknown> }>>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/mistral.ts:92

chat(params): Promise<ChatResponse<Record<string, unknown>>>

Parameters

NameType
paramsLLMChatParamsNonStreaming<Record<string, unknown>>

Returns

Promise<ChatResponse<Record<string, unknown>>>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/mistral.ts:95


complete

complete(params): Promise<AsyncIterable<CompletionResponse>>

Parameters

NameType
paramsLLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/base.ts:27

complete(params): Promise<CompletionResponse>

Parameters

NameType
paramsLLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/base.ts:30


streamChat

streamChat(«destructured»): AsyncIterable<{ delta: string ; options?: Record<string, unknown> }>

Parameters

NameType
«destructured»LLMChatParamsStreaming<Record<string, unknown>>

Returns

AsyncIterable<{ delta: string ; options?: Record<string, unknown> }>

Defined in

packages/core/src/llm/mistral.ts:114