メインコンテンツまでスキップ

Class: HuggingFaceInferenceAPI

Wrapper on the Hugging Face's Inference API. API Docs: https://huggingface.co/docs/huggingface.js/inference/README List of tasks with models: huggingface.co/api/tasks

Note that Conversational API is not yet supported by the Inference API. They recommend using the text generation API instead. See: https://github.com/huggingface/huggingface.js/issues/586#issuecomment-2024059308

Hierarchy

  • BaseLLM

    HuggingFaceInferenceAPI

Constructors

constructor

new HuggingFaceInferenceAPI(init): HuggingFaceInferenceAPI

Parameters

NameType
initHFConfig

Returns

HuggingFaceInferenceAPI

Overrides

BaseLLM.constructor

Defined in

packages/core/src/llm/huggingface.ts:47

Properties

contextWindow

contextWindow: number

Defined in

packages/core/src/llm/huggingface.ts:44


hf

hf: HfInference

Defined in

packages/core/src/llm/huggingface.ts:45


maxTokens

Optional maxTokens: number

Defined in

packages/core/src/llm/huggingface.ts:43


model

model: string

Defined in

packages/core/src/llm/huggingface.ts:40


temperature

temperature: number

Defined in

packages/core/src/llm/huggingface.ts:41


topP

topP: number

Defined in

packages/core/src/llm/huggingface.ts:42

Accessors

metadata

get metadata(): LLMMetadata

Returns

LLMMetadata

Overrides

BaseLLM.metadata

Defined in

packages/core/src/llm/huggingface.ts:68

Methods

chat

chat(params): Promise<AsyncIterable<ChatResponseChunk>>

Parameters

NameType
paramsLLMChatParamsStreaming<object, object>

Returns

Promise<AsyncIterable<ChatResponseChunk>>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/huggingface.ts:79

chat(params): Promise<ChatResponse<object>>

Parameters

NameType
paramsLLMChatParamsNonStreaming<object, object>

Returns

Promise<ChatResponse<object>>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/huggingface.ts:82


complete

complete(params): Promise<AsyncIterable<CompletionResponse>>

Parameters

NameType
paramsLLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/base.ts:22

complete(params): Promise<CompletionResponse>

Parameters

NameType
paramsLLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/base.ts:25


messagesToPrompt

messagesToPrompt(messages): string

Parameters

NameType
messagesChatMessage<ToolCallLLMMessageOptions>[]

Returns

string

Defined in

packages/core/src/llm/huggingface.ts:91


nonStreamChat

nonStreamChat(params): Promise<ChatResponse<object>>

Parameters

NameType
paramsLLMChatParamsNonStreaming<object, object>

Returns

Promise<ChatResponse<object>>

Defined in

packages/core/src/llm/huggingface.ts:111


streamChat

streamChat(params): AsyncIterable<ChatResponseChunk>

Parameters

NameType
paramsLLMChatParamsStreaming<object, object>

Returns

AsyncIterable<ChatResponseChunk>

Defined in

packages/core/src/llm/huggingface.ts:128