interface ChatApiEvents {
    completed: ChatCompletionResult;
    error: {
        error: string;
        jobID: string;
        message: string;
        workerName?: string;
    };
    jobState: ChatJobStateEvent;
    modelsUpdated: Record<string, LLMModelInfo>;
    token: ChatCompletionChunk;
}

Properties

Emitted when a chat completion finishes

error: { error: string; jobID: string; message: string; workerName?: string }

Emitted when a chat completion fails

Emitted when the job state changes (queued, assigned to worker, started, etc.)

modelsUpdated: Record<string, LLMModelInfo>

Emitted when the available LLM models list is updated from the network

Emitted for each token chunk received during streaming