Skip to content

Commit

Permalink
Regenerate client using the latest spec (#8245) (#8246)
Browse files Browse the repository at this point in the history
Co-authored-by: Florian Bernd <[email protected]>
  • Loading branch information
github-actions[bot] and flobernd authored Jun 20, 2024
1 parent cbb7606 commit a65b06b
Show file tree
Hide file tree
Showing 95 changed files with 7,382 additions and 4,774 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ public sealed partial class AsyncSearchStatusRequestParameters : RequestParamete
}

/// <summary>
/// <para>Retreives the status of a previously submitted async search request given its identifier, without retrieving search results.<br/>If the Elasticsearch security features are enabled, use of this API is restricted to the `monitoring_user` role.</para>
/// <para>Get async search status<br/>Retrieves the status of a previously submitted async search request given its identifier, without retrieving search results.<br/>If the Elasticsearch security features are enabled, use of this API is restricted to the `monitoring_user` role.</para>
/// </summary>
public sealed partial class AsyncSearchStatusRequest : PlainRequest<AsyncSearchStatusRequestParameters>
{
Expand All @@ -52,7 +52,7 @@ public AsyncSearchStatusRequest(Elastic.Clients.Elasticsearch.Serverless.Id id)
}

/// <summary>
/// <para>Retreives the status of a previously submitted async search request given its identifier, without retrieving search results.<br/>If the Elasticsearch security features are enabled, use of this API is restricted to the `monitoring_user` role.</para>
/// <para>Get async search status<br/>Retrieves the status of a previously submitted async search request given its identifier, without retrieving search results.<br/>If the Elasticsearch security features are enabled, use of this API is restricted to the `monitoring_user` role.</para>
/// </summary>
public sealed partial class AsyncSearchStatusRequestDescriptor<TDocument> : RequestDescriptor<AsyncSearchStatusRequestDescriptor<TDocument>, AsyncSearchStatusRequestParameters>
{
Expand Down Expand Up @@ -82,7 +82,7 @@ protected override void Serialize(Utf8JsonWriter writer, JsonSerializerOptions o
}

/// <summary>
/// <para>Retreives the status of a previously submitted async search request given its identifier, without retrieving search results.<br/>If the Elasticsearch security features are enabled, use of this API is restricted to the `monitoring_user` role.</para>
/// <para>Get async search status<br/>Retrieves the status of a previously submitted async search request given its identifier, without retrieving search results.<br/>If the Elasticsearch security features are enabled, use of this API is restricted to the `monitoring_user` role.</para>
/// </summary>
public sealed partial class AsyncSearchStatusRequestDescriptor : RequestDescriptor<AsyncSearchStatusRequestDescriptor, AsyncSearchStatusRequestParameters>
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ public sealed partial class CloseJobRequestParameters : RequestParameters
}

/// <summary>
/// <para>Closes one or more anomaly detection jobs.<br/>A job can be opened and closed multiple times throughout its lifecycle. A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results.<br/>When you close a job, it runs housekeeping tasks such as pruning the model history, flushing buffers, calculating final results and persisting the model snapshots. Depending upon the size of the job, it could take several minutes to close and the equivalent time to re-open. After it is closed, the job has a minimal overhead on the cluster except for maintaining its meta data. Therefore it is a best practice to close jobs that are no longer required to process data.<br/>If you close an anomaly detection job whose datafeed is running, the request first tries to stop the datafeed. This behavior is equivalent to calling stop datafeed API with the same timeout and force parameters as the close job request.<br/>When a datafeed that has a specified end date stops, it automatically closes its associated job.</para>
/// <para>Close anomaly detection jobs<br/>A job can be opened and closed multiple times throughout its lifecycle. A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results.<br/>When you close a job, it runs housekeeping tasks such as pruning the model history, flushing buffers, calculating final results and persisting the model snapshots. Depending upon the size of the job, it could take several minutes to close and the equivalent time to re-open. After it is closed, the job has a minimal overhead on the cluster except for maintaining its meta data. Therefore it is a best practice to close jobs that are no longer required to process data.<br/>If you close an anomaly detection job whose datafeed is running, the request first tries to stop the datafeed. This behavior is equivalent to calling stop datafeed API with the same timeout and force parameters as the close job request.<br/>When a datafeed that has a specified end date stops, it automatically closes its associated job.</para>
/// </summary>
public sealed partial class CloseJobRequest : PlainRequest<CloseJobRequestParameters>
{
Expand Down Expand Up @@ -70,7 +70,7 @@ public CloseJobRequest(Elastic.Clients.Elasticsearch.Serverless.Id jobId) : base
}

/// <summary>
/// <para>Closes one or more anomaly detection jobs.<br/>A job can be opened and closed multiple times throughout its lifecycle. A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results.<br/>When you close a job, it runs housekeeping tasks such as pruning the model history, flushing buffers, calculating final results and persisting the model snapshots. Depending upon the size of the job, it could take several minutes to close and the equivalent time to re-open. After it is closed, the job has a minimal overhead on the cluster except for maintaining its meta data. Therefore it is a best practice to close jobs that are no longer required to process data.<br/>If you close an anomaly detection job whose datafeed is running, the request first tries to stop the datafeed. This behavior is equivalent to calling stop datafeed API with the same timeout and force parameters as the close job request.<br/>When a datafeed that has a specified end date stops, it automatically closes its associated job.</para>
/// <para>Close anomaly detection jobs<br/>A job can be opened and closed multiple times throughout its lifecycle. A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results.<br/>When you close a job, it runs housekeeping tasks such as pruning the model history, flushing buffers, calculating final results and persisting the model snapshots. Depending upon the size of the job, it could take several minutes to close and the equivalent time to re-open. After it is closed, the job has a minimal overhead on the cluster except for maintaining its meta data. Therefore it is a best practice to close jobs that are no longer required to process data.<br/>If you close an anomaly detection job whose datafeed is running, the request first tries to stop the datafeed. This behavior is equivalent to calling stop datafeed API with the same timeout and force parameters as the close job request.<br/>When a datafeed that has a specified end date stops, it automatically closes its associated job.</para>
/// </summary>
public sealed partial class CloseJobRequestDescriptor : RequestDescriptor<CloseJobRequestDescriptor, CloseJobRequestParameters>
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ public sealed partial class GetTrainedModelsRequestParameters : RequestParameter
/// <summary>
/// <para>A comma delimited string of tags. A trained model can have many tags, or<br/>none. When supplied, only trained models that contain all the supplied<br/>tags are returned.</para>
/// </summary>
public string? Tags { get => Q<string?>("tags"); set => Q("tags", value); }
public ICollection<string>? Tags { get => Q<ICollection<string>?>("tags"); set => Q("tags", value); }
}

/// <summary>
Expand All @@ -78,7 +78,7 @@ public GetTrainedModelsRequest()
{
}

public GetTrainedModelsRequest(Elastic.Clients.Elasticsearch.Serverless.Id? modelId) : base(r => r.Optional("model_id", modelId))
public GetTrainedModelsRequest(Elastic.Clients.Elasticsearch.Serverless.Ids? modelId) : base(r => r.Optional("model_id", modelId))
{
}

Expand Down Expand Up @@ -132,7 +132,7 @@ public GetTrainedModelsRequest(Elastic.Clients.Elasticsearch.Serverless.Id? mode
/// <para>A comma delimited string of tags. A trained model can have many tags, or<br/>none. When supplied, only trained models that contain all the supplied<br/>tags are returned.</para>
/// </summary>
[JsonIgnore]
public string? Tags { get => Q<string?>("tags"); set => Q("tags", value); }
public ICollection<string>? Tags { get => Q<ICollection<string>?>("tags"); set => Q("tags", value); }
}

/// <summary>
Expand All @@ -142,7 +142,7 @@ public sealed partial class GetTrainedModelsRequestDescriptor : RequestDescripto
{
internal GetTrainedModelsRequestDescriptor(Action<GetTrainedModelsRequestDescriptor> configure) => configure.Invoke(this);

public GetTrainedModelsRequestDescriptor(Elastic.Clients.Elasticsearch.Serverless.Id? modelId) : base(r => r.Optional("model_id", modelId))
public GetTrainedModelsRequestDescriptor(Elastic.Clients.Elasticsearch.Serverless.Ids? modelId) : base(r => r.Optional("model_id", modelId))
{
}

Expand All @@ -164,9 +164,9 @@ public GetTrainedModelsRequestDescriptor()
public GetTrainedModelsRequestDescriptor From(int? from) => Qs("from", from);
public GetTrainedModelsRequestDescriptor Include(Elastic.Clients.Elasticsearch.Serverless.MachineLearning.Include? include) => Qs("include", include);
public GetTrainedModelsRequestDescriptor Size(int? size) => Qs("size", size);
public GetTrainedModelsRequestDescriptor Tags(string? tags) => Qs("tags", tags);
public GetTrainedModelsRequestDescriptor Tags(ICollection<string>? tags) => Qs("tags", tags);

public GetTrainedModelsRequestDescriptor ModelId(Elastic.Clients.Elasticsearch.Serverless.Id? modelId)
public GetTrainedModelsRequestDescriptor ModelId(Elastic.Clients.Elasticsearch.Serverless.Ids? modelId)
{
RouteValues.Optional("model_id", modelId);
return Self;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ public sealed partial class PutCalendarJobRequestParameters : RequestParameters
/// </summary>
public sealed partial class PutCalendarJobRequest : PlainRequest<PutCalendarJobRequestParameters>
{
public PutCalendarJobRequest(Elastic.Clients.Elasticsearch.Serverless.Id calendarId, Elastic.Clients.Elasticsearch.Serverless.Id jobId) : base(r => r.Required("calendar_id", calendarId).Required("job_id", jobId))
public PutCalendarJobRequest(Elastic.Clients.Elasticsearch.Serverless.Id calendarId, Elastic.Clients.Elasticsearch.Serverless.Ids jobId) : base(r => r.Required("calendar_id", calendarId).Required("job_id", jobId))
{
}

Expand All @@ -58,7 +58,7 @@ public sealed partial class PutCalendarJobRequestDescriptor : RequestDescriptor<
{
internal PutCalendarJobRequestDescriptor(Action<PutCalendarJobRequestDescriptor> configure) => configure.Invoke(this);

public PutCalendarJobRequestDescriptor(Elastic.Clients.Elasticsearch.Serverless.Id calendarId, Elastic.Clients.Elasticsearch.Serverless.Id jobId) : base(r => r.Required("calendar_id", calendarId).Required("job_id", jobId))
public PutCalendarJobRequestDescriptor(Elastic.Clients.Elasticsearch.Serverless.Id calendarId, Elastic.Clients.Elasticsearch.Serverless.Ids jobId) : base(r => r.Required("calendar_id", calendarId).Required("job_id", jobId))
{
}

Expand All @@ -76,7 +76,7 @@ public PutCalendarJobRequestDescriptor CalendarId(Elastic.Clients.Elasticsearch.
return Self;
}

public PutCalendarJobRequestDescriptor JobId(Elastic.Clients.Elasticsearch.Serverless.Id jobId)
public PutCalendarJobRequestDescriptor JobId(Elastic.Clients.Elasticsearch.Serverless.Ids jobId)
{
RouteValues.Required("job_id", jobId);
return Self;
Expand Down
Loading

0 comments on commit a65b06b

Please sign in to comment.