Skip to content

Commit

Permalink
🌿 Fern Regeneration -- July 10, 2024 (#41)
Browse files Browse the repository at this point in the history
  • Loading branch information
fern-api[bot] authored Jul 10, 2024
1 parent 64410b4 commit d3c4ba9
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 12 deletions.
8 changes: 0 additions & 8 deletions lib/assemblyai/lemur/client.rb
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ def initialize(request_client:)
# Use either transcript_ids or input_text as input into LeMUR.
# @param context [String, Hash{String => Object}] Context to provide the model. This can be a string or a free-form JSON value.
# @param final_model [AssemblyAI::Lemur::LemurModel] The model that is used for the final prompt after compression is performed.
# Defaults to "default".
# @param max_output_size [Integer] Max output size in tokens, up to 4000
# @param temperature [Float] The temperature to use for the model.
# Higher values result in answers that are more creative, lower values are more
Expand Down Expand Up @@ -84,7 +83,6 @@ def task(prompt:, transcript_ids: nil, input_text: nil, context: nil, final_mode
# Use either transcript_ids or input_text as input into LeMUR.
# @param context [String, Hash{String => Object}] Context to provide the model. This can be a string or a free-form JSON value.
# @param final_model [AssemblyAI::Lemur::LemurModel] The model that is used for the final prompt after compression is performed.
# Defaults to "default".
# @param max_output_size [Integer] Max output size in tokens, up to 4000
# @param temperature [Float] The temperature to use for the model.
# Higher values result in answers that are more creative, lower values are more
Expand Down Expand Up @@ -136,7 +134,6 @@ def summary(transcript_ids: nil, input_text: nil, context: nil, final_model: nil
# Use either transcript_ids or input_text as input into LeMUR.
# @param context [String, Hash{String => Object}] Context to provide the model. This can be a string or a free-form JSON value.
# @param final_model [AssemblyAI::Lemur::LemurModel] The model that is used for the final prompt after compression is performed.
# Defaults to "default".
# @param max_output_size [Integer] Max output size in tokens, up to 4000
# @param temperature [Float] The temperature to use for the model.
# Higher values result in answers that are more creative, lower values are more
Expand Down Expand Up @@ -187,7 +184,6 @@ def question_answer(questions:, transcript_ids: nil, input_text: nil, context: n
# Use either transcript_ids or input_text as input into LeMUR.
# @param context [String, Hash{String => Object}] Context to provide the model. This can be a string or a free-form JSON value.
# @param final_model [AssemblyAI::Lemur::LemurModel] The model that is used for the final prompt after compression is performed.
# Defaults to "default".
# @param max_output_size [Integer] Max output size in tokens, up to 4000
# @param temperature [Float] The temperature to use for the model.
# Higher values result in answers that are more creative, lower values are more
Expand Down Expand Up @@ -294,7 +290,6 @@ def initialize(request_client:)
# Use either transcript_ids or input_text as input into LeMUR.
# @param context [String, Hash{String => Object}] Context to provide the model. This can be a string or a free-form JSON value.
# @param final_model [AssemblyAI::Lemur::LemurModel] The model that is used for the final prompt after compression is performed.
# Defaults to "default".
# @param max_output_size [Integer] Max output size in tokens, up to 4000
# @param temperature [Float] The temperature to use for the model.
# Higher values result in answers that are more creative, lower values are more
Expand Down Expand Up @@ -347,7 +342,6 @@ def task(prompt:, transcript_ids: nil, input_text: nil, context: nil, final_mode
# Use either transcript_ids or input_text as input into LeMUR.
# @param context [String, Hash{String => Object}] Context to provide the model. This can be a string or a free-form JSON value.
# @param final_model [AssemblyAI::Lemur::LemurModel] The model that is used for the final prompt after compression is performed.
# Defaults to "default".
# @param max_output_size [Integer] Max output size in tokens, up to 4000
# @param temperature [Float] The temperature to use for the model.
# Higher values result in answers that are more creative, lower values are more
Expand Down Expand Up @@ -401,7 +395,6 @@ def summary(transcript_ids: nil, input_text: nil, context: nil, final_model: nil
# Use either transcript_ids or input_text as input into LeMUR.
# @param context [String, Hash{String => Object}] Context to provide the model. This can be a string or a free-form JSON value.
# @param final_model [AssemblyAI::Lemur::LemurModel] The model that is used for the final prompt after compression is performed.
# Defaults to "default".
# @param max_output_size [Integer] Max output size in tokens, up to 4000
# @param temperature [Float] The temperature to use for the model.
# Higher values result in answers that are more creative, lower values are more
Expand Down Expand Up @@ -454,7 +447,6 @@ def question_answer(questions:, transcript_ids: nil, input_text: nil, context: n
# Use either transcript_ids or input_text as input into LeMUR.
# @param context [String, Hash{String => Object}] Context to provide the model. This can be a string or a free-form JSON value.
# @param final_model [AssemblyAI::Lemur::LemurModel] The model that is used for the final prompt after compression is performed.
# Defaults to "default".
# @param max_output_size [Integer] Max output size in tokens, up to 4000
# @param temperature [Float] The temperature to use for the model.
# Higher values result in answers that are more creative, lower values are more
Expand Down
2 changes: 0 additions & 2 deletions lib/assemblyai/lemur/types/lemur_base_params.rb
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ class LemurBaseParams
# @return [AssemblyAI::Lemur::LemurBaseParamsContext] Context to provide the model. This can be a string or a free-form JSON value.
attr_reader :context
# @return [AssemblyAI::Lemur::LemurModel] The model that is used for the final prompt after compression is performed.
# Defaults to "default".
attr_reader :final_model
# @return [Integer] Max output size in tokens, up to 4000
attr_reader :max_output_size
Expand All @@ -44,7 +43,6 @@ class LemurBaseParams
# Use either transcript_ids or input_text as input into LeMUR.
# @param context [AssemblyAI::Lemur::LemurBaseParamsContext] Context to provide the model. This can be a string or a free-form JSON value.
# @param final_model [AssemblyAI::Lemur::LemurModel] The model that is used for the final prompt after compression is performed.
# Defaults to "default".
# @param max_output_size [Integer] Max output size in tokens, up to 4000
# @param temperature [Float] The temperature to use for the model.
# Higher values result in answers that are more creative, lower values are more
Expand Down
8 changes: 7 additions & 1 deletion lib/assemblyai/lemur/types/lemur_model.rb
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,16 @@ module AssemblyAI
class Lemur
# The model that is used for the final prompt after compression is performed.
class LemurModel
ANTHROPIC_CLAUDE3_5_SONNET = "anthropic/claude-3-5-sonnet"
ANTHROPIC_CLAUDE3_OPUS = "anthropic/claude-3-opus"
ANTHROPIC_CLAUDE3_HAIKU = "anthropic/claude-3-haiku"
ANTHROPIC_CLAUDE3_SONNET = "anthropic/claude-3-sonnet"
ANTHROPIC_CLAUDE2_1 = "anthropic/claude-2-1"
ANTHROPIC_CLAUDE2 = "anthropic/claude-2"
DEFAULT = "default"
ANTHROPIC_CLAUDE_INSTANT1_2 = "anthropic/claude-instant-1-2"
BASIC = "basic"
ASSEMBLYAI_MISTRAL7B = "assemblyai/mistral-7b"
ANTHROPIC_CLAUDE2_1 = "anthropic/claude-2-1"
end
end
end
2 changes: 1 addition & 1 deletion lib/gemconfig.rb
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

module AssemblyAI
module Gemconfig
VERSION = "1.0.0-beta.14"
VERSION = "1.0.0-beta.15"
AUTHORS = [""].freeze
EMAIL = "[email protected]"
SUMMARY = "AssemblyAI Ruby SDK"
Expand Down

0 comments on commit d3c4ba9

Please sign in to comment.