Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -28,21 +28,8 @@ public class LemurTaskParamsJsonConverter : global::System.Text.Json.Serializati
{
}

readerCopy = reader;
global::AssemblyAI.LemurBaseParams? value2 = default;
try
{
var typeInfo = typeInfoResolver.GetTypeInfo(typeof(global::AssemblyAI.LemurBaseParams), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo<global::AssemblyAI.LemurBaseParams> ??
throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::AssemblyAI.LemurBaseParams).Name}");
value2 = global::System.Text.Json.JsonSerializer.Deserialize(ref readerCopy, typeInfo);
}
catch (global::System.Text.Json.JsonException)
{
}

var result = new global::AssemblyAI.LemurTaskParams(
value1,
value2
value1
);

if (value1 != null)
Expand All @@ -51,12 +38,6 @@ public class LemurTaskParamsJsonConverter : global::System.Text.Json.Serializati
throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::AssemblyAI.LemurTaskParamsVariant1).Name}");
_ = global::System.Text.Json.JsonSerializer.Deserialize(ref reader, typeInfo);
}
else if (value2 != null)
{
var typeInfo = typeInfoResolver.GetTypeInfo(typeof(global::AssemblyAI.LemurBaseParams), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo<global::AssemblyAI.LemurBaseParams> ??
throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::AssemblyAI.LemurBaseParams).Name}");
_ = global::System.Text.Json.JsonSerializer.Deserialize(ref reader, typeInfo);
}

return result;
}
Expand All @@ -76,12 +57,6 @@ public override void Write(
throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::AssemblyAI.LemurTaskParamsVariant1).Name}");
global::System.Text.Json.JsonSerializer.Serialize(writer, value.Value1, typeInfo);
}
else if (value.IsValue2)
{
var typeInfo = typeInfoResolver.GetTypeInfo(typeof(global::AssemblyAI.LemurBaseParams), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo<global::AssemblyAI.LemurBaseParams?> ??
throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(global::AssemblyAI.LemurBaseParams).Name}");
global::System.Text.Json.JsonSerializer.Serialize(writer, value.Value2, typeInfo);
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -44,83 +44,33 @@ public LemurTaskParams(global::AssemblyAI.LemurTaskParamsVariant1? value)
Value1 = value;
}

/// <summary>
///
/// </summary>
#if NET6_0_OR_GREATER
public global::AssemblyAI.LemurBaseParams? Value2 { get; init; }
#else
public global::AssemblyAI.LemurBaseParams? Value2 { get; }
#endif

/// <summary>
///
/// </summary>
#if NET6_0_OR_GREATER
[global::System.Diagnostics.CodeAnalysis.MemberNotNullWhen(true, nameof(Value2))]
#endif
public bool IsValue2 => Value2 != null;

/// <summary>
///
/// </summary>
public static implicit operator LemurTaskParams(global::AssemblyAI.LemurBaseParams value) => new LemurTaskParams((global::AssemblyAI.LemurBaseParams?)value);

/// <summary>
///
/// </summary>
public static implicit operator global::AssemblyAI.LemurBaseParams?(LemurTaskParams @this) => @this.Value2;

/// <summary>
///
/// </summary>
public LemurTaskParams(global::AssemblyAI.LemurBaseParams? value)
{
Value2 = value;
}

/// <summary>
///
/// </summary>
public LemurTaskParams(
global::AssemblyAI.LemurTaskParamsVariant1? value1,
global::AssemblyAI.LemurBaseParams? value2
)
{
Value1 = value1;
Value2 = value2;
}

/// <summary>
///
/// </summary>
public object? Object =>
Value2 as object ??
Value1 as object
;

/// <summary>
///
/// </summary>
public override string? ToString() =>
Value1?.ToString() ??
Value2?.ToString()
Value1?.ToString()
;

/// <summary>
///
/// </summary>
public bool Validate()
{
return IsValue1 && IsValue2;
return IsValue1;
}

/// <summary>
///
/// </summary>
public TResult? Match<TResult>(
global::System.Func<global::AssemblyAI.LemurTaskParamsVariant1?, TResult>? value1 = null,
global::System.Func<global::AssemblyAI.LemurBaseParams?, TResult>? value2 = null,
bool validate = true)
{
if (validate)
Expand All @@ -132,10 +82,6 @@ public bool Validate()
{
return value1(Value1!);
}
else if (IsValue2 && value2 != null)
{
return value2(Value2!);
}

return default(TResult);
}
Expand All @@ -145,7 +91,6 @@ public bool Validate()
/// </summary>
public void Match(
global::System.Action<global::AssemblyAI.LemurTaskParamsVariant1?>? value1 = null,
global::System.Action<global::AssemblyAI.LemurBaseParams?>? value2 = null,
bool validate = true)
{
if (validate)
Expand All @@ -157,10 +102,6 @@ public void Match(
{
value1?.Invoke(Value1!);
}
else if (IsValue2)
{
value2?.Invoke(Value2!);
}
}

/// <summary>
Expand All @@ -172,8 +113,6 @@ public override int GetHashCode()
{
Value1,
typeof(global::AssemblyAI.LemurTaskParamsVariant1),
Value2,
typeof(global::AssemblyAI.LemurBaseParams),
};
const int offset = unchecked((int)2166136261);
const int prime = 16777619;
Expand All @@ -190,8 +129,7 @@ static int HashCodeAggregator(int hashCode, object? value) => value == null
public bool Equals(LemurTaskParams other)
{
return
global::System.Collections.Generic.EqualityComparer<global::AssemblyAI.LemurTaskParamsVariant1?>.Default.Equals(Value1, other.Value1) &&
global::System.Collections.Generic.EqualityComparer<global::AssemblyAI.LemurBaseParams?>.Default.Equals(Value2, other.Value2)
global::System.Collections.Generic.EqualityComparer<global::AssemblyAI.LemurTaskParamsVariant1?>.Default.Equals(Value1, other.Value1)
;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,52 @@ namespace AssemblyAI
/// </summary>
public sealed partial class LemurTaskParamsVariant1
{
/// <summary>
/// The model that is used for the final prompt after compression is performed.<br/>
/// Default Value: default
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("final_model")]
[global::System.Text.Json.Serialization.JsonConverter(typeof(global::AssemblyAI.JsonConverters.LemurModelJsonConverter))]
[global::System.Text.Json.Serialization.JsonRequired]
public required global::AssemblyAI.LemurModel FinalModel { get; set; }

/// <summary>
/// Custom formatted transcript data. Maximum size is the context limit of the selected model.<br/>
/// Use either transcript_ids or input_text as input into LeMUR.
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("input_text")]
public string? InputText { get; set; }

/// <summary>
/// Max output size in tokens, up to 4000<br/>
/// Default Value: 2000
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("max_output_size")]
public int? MaxOutputSize { get; set; }

/// <summary>
/// Your text to prompt the model to produce a desired output, including any context you want to pass into the model.
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("prompt")]
[global::System.Text.Json.Serialization.JsonRequired]
public required string Prompt { get; set; }

/// <summary>
/// The temperature to use for the model.<br/>
/// Higher values result in answers that are more creative, lower values are more conservative.<br/>
/// Can be any value between 0.0 and 1.0 inclusive.<br/>
/// Default Value: 0F
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("temperature")]
public float? Temperature { get; set; }

/// <summary>
/// A list of completed transcripts with text. Up to a maximum of 100 hours of audio.<br/>
/// Use either transcript_ids or input_text as input into LeMUR.
/// </summary>
[global::System.Text.Json.Serialization.JsonPropertyName("transcript_ids")]
public global::System.Collections.Generic.IList<global::System.Guid>? TranscriptIds { get; set; }

/// <summary>
/// Additional properties that are not explicitly defined in the schema
/// </summary>
Expand All @@ -24,16 +63,48 @@ public sealed partial class LemurTaskParamsVariant1
/// <summary>
/// Initializes a new instance of the <see cref="LemurTaskParamsVariant1" /> class.
/// </summary>
/// <param name="finalModel">
/// The model that is used for the final prompt after compression is performed.<br/>
/// Default Value: default
/// </param>
/// <param name="inputText">
/// Custom formatted transcript data. Maximum size is the context limit of the selected model.<br/>
/// Use either transcript_ids or input_text as input into LeMUR.
/// </param>
/// <param name="maxOutputSize">
/// Max output size in tokens, up to 4000<br/>
/// Default Value: 2000
/// </param>
/// <param name="prompt">
/// Your text to prompt the model to produce a desired output, including any context you want to pass into the model.
/// </param>
/// <param name="temperature">
/// The temperature to use for the model.<br/>
/// Higher values result in answers that are more creative, lower values are more conservative.<br/>
/// Can be any value between 0.0 and 1.0 inclusive.<br/>
/// Default Value: 0F
/// </param>
/// <param name="transcriptIds">
/// A list of completed transcripts with text. Up to a maximum of 100 hours of audio.<br/>
/// Use either transcript_ids or input_text as input into LeMUR.
/// </param>
#if NET7_0_OR_GREATER
[global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers]
#endif
public LemurTaskParamsVariant1(
string prompt)
global::AssemblyAI.LemurModel finalModel,
string prompt,
string? inputText,
int? maxOutputSize,
float? temperature,
global::System.Collections.Generic.IList<global::System.Guid>? transcriptIds)
{
this.FinalModel = finalModel;
this.Prompt = prompt ?? throw new global::System.ArgumentNullException(nameof(prompt));
this.InputText = inputText;
this.MaxOutputSize = maxOutputSize;
this.Temperature = temperature;
this.TranscriptIds = transcriptIds;
}

/// <summary>
Expand Down
43 changes: 40 additions & 3 deletions src/libs/AssemblyAI/openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5644,13 +5644,50 @@ components:
x-label: Prompt
description: Your text to prompt the model to produce a desired output, including any context you want to pass into the model.
type: string
required: [prompt]
- $ref: "#/components/schemas/LemurBaseParams"
transcript_ids:
x-label: Transcript IDs
description: |
A list of completed transcripts with text. Up to a maximum of 100 hours of audio.
Use either transcript_ids or input_text as input into LeMUR.
type: array
items:
x-label: Transcript ID
type: string
format: uuid
input_text:
x-label: Input text
description: |
Custom formatted transcript data. Maximum size is the context limit of the selected model.
Use either transcript_ids or input_text as input into LeMUR.
type: string
final_model:
Comment on lines +5657 to +5663
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

input_text duplicated & still optional – but now mutually exclusive rule vanished

input_text duplicates the base definition and, more importantly, the prose rule “Use either transcript_ids or input_text” is no longer enforceable once inheritance is removed.
Consider re-using LemurBaseParams or adding an oneOf composition to keep mutual-exclusivity machine-readable.

🤖 Prompt for AI Agents
In src/libs/AssemblyAI/openapi.yaml around lines 5657 to 5663, the input_text
property is duplicated and the mutual exclusivity rule between transcript_ids
and input_text is lost. To fix this, refactor the schema to reuse
LemurBaseParams where input_text is defined or add a oneOf composition that
enforces either transcript_ids or input_text is provided, ensuring the mutual
exclusivity is machine-readable and not just in the description.

x-label: Final model
description: |
The model that is used for the final prompt after compression is performed.
default: "default"
oneOf:
- $ref: "#/components/schemas/LemurModel"
max_output_size:
x-label: Maximum output size
description: Max output size in tokens, up to 4000
type: integer
default: 2000
temperature:
Comment on lines +5671 to +5675
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

max_output_size duplicated with no bounds check

The duplicated field omits minimum / maximum guards that still exist in LemurBaseParams. This widens the accepted range for the task endpoint only, creating inconsistent server validation.

🤖 Prompt for AI Agents
In src/libs/AssemblyAI/openapi.yaml around lines 5671 to 5675, the
max_output_size field is duplicated without minimum and maximum bounds, unlike
in LemurBaseParams. To fix this, add the same minimum and maximum constraints
(e.g., minimum: 1, maximum: 4000) to this max_output_size definition to ensure
consistent validation across endpoints.

x-label: Temperature
description: |
The temperature to use for the model.
Higher values result in answers that are more creative, lower values are more conservative.
Can be any value between 0.0 and 1.0 inclusive.
type: number
format: float
default: 0
minimum: 0
maximum: 1
required: [prompt, final_model]
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

💡 Verification agent

🧩 Analysis chain

Breaking change: context field silently removed

Existing clients may rely on context when calling /lemur/v3/generate/task. Removing it without a new version or deprecation period is a breaking API change.

If intentional, increment the API major version and document migration steps; otherwise restore the field via inheritance.


🏁 Script executed:

#!/bin/bash
# Locate the /lemur/v3/generate/task endpoint and inspect its requestBody schema (showing 20 lines of context)
rg -n "/lemur/v3/generate/task" -C20 src/libs/AssemblyAI/openapi.yaml

Length of output: 1665


🏁 Script executed:

#!/bin/bash
# Inspect the LemurTaskParams schema to verify if 'context' was removed
rg -n "LemurTaskParams:" -C50 src/libs/AssemblyAI/openapi.yaml

Length of output: 4448


Breaking change: context property removed from LeMUR task schema

The components/schemas/LemurTaskParams definition no longer includes a context field and only lists prompt, transcript_ids, input_text, final_model, max_output_size, and temperature as allowed properties (with additionalProperties: false), and the required array only contains [prompt, final_model]. Any clients sending context will now fail schema validation.

• File: src/libs/AssemblyAI/openapi.yaml
Location: components/schemas/LemurTaskParams (around line 5635)

Actions:

  • If removal was intentional, bump the API’s major version and document migration steps for clients.
  • Otherwise, restore the context property (e.g., via schema inheritance) to maintain backward compatibility.
🤖 Prompt for AI Agents
In src/libs/AssemblyAI/openapi.yaml around line 5635, the LemurTaskParams schema
has removed the 'context' property, causing breaking changes for clients using
it. To fix this, either restore the 'context' property in the schema to maintain
backward compatibility or, if the removal is intentional, increment the API's
major version and add clear migration instructions for clients to handle the
change.

example:
{
transcript_ids: ["64nygnr62k-405c-4ae8-8a6b-d90b40ff3cce"],
prompt: "List all the locations affected by wildfires.",
context: "This is an interview about wildfires.",
final_model: "anthropic/claude-sonnet-4-20250514",
temperature: 0,
max_output_size: 3000,
Expand Down
Loading