Table of Contents

Class LemurClient

Namespace
AssemblyAI.Lemur
Assembly
AssemblyAI.dll

The client to interact with the AssemblyAI LeMUR API.

public class LemurClient
Inheritance
LemurClient
Inherited Members

Methods

ActionItemsAsync(LemurActionItemsParams, RequestOptions?, CancellationToken)

Use LeMUR to generate a list of action items from a transcript

public Task<LemurActionItemsResponse> ActionItemsAsync(LemurActionItemsParams request, RequestOptions? options = null, CancellationToken cancellationToken = default)

Parameters

request LemurActionItemsParams
options RequestOptions
cancellationToken CancellationToken

Returns

Task<LemurActionItemsResponse>

Examples

await client.Lemur.ActionItemsAsync(
    new LemurActionItemsParams
    {
        TranscriptIds = new List<string>() { "64nygnr62k-405c-4ae8-8a6b-d90b40ff3cce" },
        Context = "This is an interview about wildfires.",
        FinalModel = LemurModel.AnthropicClaude35Sonnet,
        MaxOutputSize = 3000,
        Temperature = 0f,
        AnswerFormat = "Bullet Points",
    }
);

GetResponseAsync(string, RequestOptions?, CancellationToken)

Retrieve a LeMUR response that was previously generated.

public Task<OneOf<LemurStringResponse, LemurQuestionAnswerResponse>> GetResponseAsync(string requestId, RequestOptions? options = null, CancellationToken cancellationToken = default)

Parameters

requestId string
options RequestOptions
cancellationToken CancellationToken

Returns

Task<OneOf<LemurStringResponse, LemurQuestionAnswerResponse>>

Examples

await client.Lemur.GetResponseAsync("request_id");

PurgeRequestDataAsync(string, RequestOptions?, CancellationToken)

Delete the data for a previously submitted LeMUR request. The LLM response data, as well as any context provided in the original request will be removed.

public Task<PurgeLemurRequestDataResponse> PurgeRequestDataAsync(string requestId, RequestOptions? options = null, CancellationToken cancellationToken = default)

Parameters

requestId string
options RequestOptions
cancellationToken CancellationToken

Returns

Task<PurgeLemurRequestDataResponse>

Examples

await client.Lemur.PurgeRequestDataAsync("request_id");

QuestionAnswerAsync(LemurQuestionAnswerParams, RequestOptions?, CancellationToken)

public Task<LemurQuestionAnswerResponse> QuestionAnswerAsync(LemurQuestionAnswerParams request, RequestOptions? options = null, CancellationToken cancellationToken = default)

Parameters

request LemurQuestionAnswerParams
options RequestOptions
cancellationToken CancellationToken

Returns

Task<LemurQuestionAnswerResponse>

SummaryAsync(LemurSummaryParams, RequestOptions?, CancellationToken)

Custom Summary allows you to distill a piece of audio into a few impactful sentences. You can give the model context to obtain more targeted results while outputting the results in a variety of formats described in human language.

public Task<LemurSummaryResponse> SummaryAsync(LemurSummaryParams request, RequestOptions? options = null, CancellationToken cancellationToken = default)

Parameters

request LemurSummaryParams
options RequestOptions
cancellationToken CancellationToken

Returns

Task<LemurSummaryResponse>

Examples

await client.Lemur.SummaryAsync(
    new LemurSummaryParams
    {
        TranscriptIds = new List<string>() { "47b95ba5-8889-44d8-bc80-5de38306e582" },
        Context = "This is an interview about wildfires.",
        FinalModel = LemurModel.AnthropicClaude35Sonnet,
        MaxOutputSize = 3000,
        Temperature = 0f,
    }
);

TaskAsync(LemurTaskParams, RequestOptions?, CancellationToken)

Use the LeMUR task endpoint to input your own LLM prompt.

public Task<LemurTaskResponse> TaskAsync(LemurTaskParams request, RequestOptions? options = null, CancellationToken cancellationToken = default)

Parameters

request LemurTaskParams
options RequestOptions
cancellationToken CancellationToken

Returns

Task<LemurTaskResponse>

Examples

await client.Lemur.TaskAsync(
    new LemurTaskParams
    {
        TranscriptIds = new List<string>() { "64nygnr62k-405c-4ae8-8a6b-d90b40ff3cce" },
        Context = "This is an interview about wildfires.",
        FinalModel = LemurModel.AnthropicClaude35Sonnet,
        MaxOutputSize = 3000,
        Temperature = 0f,
        Prompt = "List all the locations affected by wildfires.",
    }
);