Skip to content

Type Alias: LLamaChatPromptCompletionEngineOptions

ts
type LLamaChatPromptCompletionEngineOptions = {  maxPreloadTokens?: number;  onGeneration?: void;  maxCachedCompletions?: number;  temperature?: LLamaChatCompletePromptOptions["temperature"];  minP?: LLamaChatCompletePromptOptions["minP"];  topK?: LLamaChatCompletePromptOptions["topK"];  topP?: LLamaChatCompletePromptOptions["topP"];  seed?: LLamaChatCompletePromptOptions["seed"];  trimWhitespaceSuffix?: LLamaChatCompletePromptOptions["trimWhitespaceSuffix"];  evaluationPriority?: LLamaChatCompletePromptOptions["evaluationPriority"];  repeatPenalty?: LLamaChatCompletePromptOptions["repeatPenalty"];  tokenBias?: LLamaChatCompletePromptOptions["tokenBias"];  customStopTriggers?: LLamaChatCompletePromptOptions["customStopTriggers"];  grammar?: LLamaChatCompletePromptOptions["grammar"];  functions?: LLamaChatCompletePromptOptions["functions"];  documentFunctionParams?: LLamaChatCompletePromptOptions["documentFunctionParams"];  completeAsModel?: LLamaChatCompletePromptOptions["completeAsModel"]; };

Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:8

Properties

maxPreloadTokens?

ts
optional maxPreloadTokens: number;

Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:14

Max tokens to allow for preloading a prompt and generating a completion for it.

Defaults to 256 or half of the context size, whichever is smaller.


maxCachedCompletions?

ts
optional maxCachedCompletions: number;

Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:22

Max number of completions to cache.

Defaults to 100.


temperature?

ts
optional temperature: LLamaChatCompletePromptOptions["temperature"];

Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:24


minP?

ts
optional minP: LLamaChatCompletePromptOptions["minP"];

Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:25


topK?

ts
optional topK: LLamaChatCompletePromptOptions["topK"];

Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:26


topP?

ts
optional topP: LLamaChatCompletePromptOptions["topP"];

Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:27


seed?

ts
optional seed: LLamaChatCompletePromptOptions["seed"];

Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:28


trimWhitespaceSuffix?

ts
optional trimWhitespaceSuffix: LLamaChatCompletePromptOptions["trimWhitespaceSuffix"];

Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:29


evaluationPriority?

ts
optional evaluationPriority: LLamaChatCompletePromptOptions["evaluationPriority"];

Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:30


repeatPenalty?

ts
optional repeatPenalty: LLamaChatCompletePromptOptions["repeatPenalty"];

Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:31


tokenBias?

ts
optional tokenBias: LLamaChatCompletePromptOptions["tokenBias"];

Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:32


customStopTriggers?

ts
optional customStopTriggers: LLamaChatCompletePromptOptions["customStopTriggers"];

Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:33


grammar?

ts
optional grammar: LLamaChatCompletePromptOptions["grammar"];

Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:34


functions?

ts
optional functions: LLamaChatCompletePromptOptions["functions"];

Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:35


documentFunctionParams?

ts
optional documentFunctionParams: LLamaChatCompletePromptOptions["documentFunctionParams"];

Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:36


completeAsModel?

ts
optional completeAsModel: LLamaChatCompletePromptOptions["completeAsModel"];

Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:37

Methods

onGeneration()?

ts
optional onGeneration(prompt: string, completion: string): void;

Defined in: evaluator/LlamaChatSession/utils/LlamaChatSessionPromptCompletionEngine.ts:15

Parameters

ParameterType
promptstring
completionstring

Returns

void