Skip to content

Commit

Permalink
chore: Support new error code (#7311)
Browse files Browse the repository at this point in the history
* chore: fetch model list

* chore: suppor new error code
  • Loading branch information
appflowy authored Feb 3, 2025
1 parent 25a27df commit aacd09d
Show file tree
Hide file tree
Showing 14 changed files with 114 additions and 35 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,15 @@ class CompletionStream {
);
}

if (event.startsWith("AI_MAX_REQUIRED:")) {
final msg = event.substring(16);
onError(
AIError(
message: msg,
),
);
}

if (event.startsWith("start:")) {
await onStart();
}
Expand Down
1 change: 1 addition & 0 deletions frontend/appflowy_flutter/lib/env/cloud_env.dart
Original file line number Diff line number Diff line change
Expand Up @@ -247,6 +247,7 @@ Future<AppFlowyCloudConfiguration> configurationFromUri(
// In development mode, the app is configured to access the AppFlowy cloud server directly through specific ports.
// This setup bypasses the need for Nginx, meaning that the AppFlowy cloud should be running without an Nginx server
// in the development environment.
// If you modify following code, please update the corresponding documentation in the appflowy billing.
if (authenticatorType == AuthenticatorType.appflowyCloudDevelop) {
return AppFlowyCloudConfiguration(
base_url: "$baseUrl:8000",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,13 @@ class ChatAIMessageBloc extends Bloc<ChatAIMessageEvent, ChatAIMessageState> {
),
);
},
onAIMaxRequired: (message) {
emit(
state.copyWith(
messageState: MessageState.onAIMaxRequired(message),
),
);
},
receiveMetadata: (metadata) {
Log.debug("AI Steps: ${metadata.progress?.step}");
emit(
Expand Down Expand Up @@ -146,6 +153,12 @@ class ChatAIMessageBloc extends Bloc<ChatAIMessageEvent, ChatAIMessageState> {
add(ChatAIMessageEvent.receiveMetadata(metadata));
}
},
onAIMaxRequired: (message) {
if (!isClosed) {
Log.info(message);
add(ChatAIMessageEvent.onAIMaxRequired(message));
}
},
);
}
}
Expand All @@ -159,6 +172,8 @@ class ChatAIMessageEvent with _$ChatAIMessageEvent {
const factory ChatAIMessageEvent.onAIResponseLimit() = _OnAIResponseLimit;
const factory ChatAIMessageEvent.onAIImageResponseLimit() =
_OnAIImageResponseLimit;
const factory ChatAIMessageEvent.onAIMaxRequired(String message) =
_OnAIMaxRquired;
const factory ChatAIMessageEvent.receiveMetadata(
MetadataCollection metadata,
) = _ReceiveMetadata;
Expand Down Expand Up @@ -193,6 +208,7 @@ class MessageState with _$MessageState {
const factory MessageState.onError(String error) = _Error;
const factory MessageState.onAIResponseLimit() = _AIResponseLimit;
const factory MessageState.onAIImageResponseLimit() = _AIImageResponseLimit;
const factory MessageState.onAIMaxRequired(String message) = _AIMaxRequired;
const factory MessageState.ready() = _Ready;
const factory MessageState.loading() = _Loading;
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,14 @@ class AnswerStream {
} else if (event == "AI_IMAGE_RESPONSE_LIMIT") {
_aiImageLimitReached = true;
_onAIImageResponseLimit?.call();
} else if (event.startsWith("AI_MAX_REQUIRED:")) {
final msg = event.substring(16);
// If the callback is not registered yet, add the event to the buffer.
if (_onAIMaxRequired != null) {
_onAIMaxRequired!(msg);
} else {
_pendingAIMaxRequiredEvents.add(msg);
}
}
},
onDone: () {
Expand Down Expand Up @@ -56,8 +64,12 @@ class AnswerStream {
void Function(String error)? _onError;
void Function()? _onAIResponseLimit;
void Function()? _onAIImageResponseLimit;
void Function(String message)? _onAIMaxRequired;
void Function(MetadataCollection metadataCollection)? _onMetadata;

// Buffer for events that occur before listen() is called.
final List<String> _pendingAIMaxRequiredEvents = [];

int get nativePort => _port.sendPort.nativePort;
bool get hasStarted => _hasStarted;
bool get aiLimitReached => _aiLimitReached;
Expand All @@ -78,6 +90,7 @@ class AnswerStream {
void Function(String error)? onError,
void Function()? onAIResponseLimit,
void Function()? onAIImageResponseLimit,
void Function(String message)? onAIMaxRequired,
void Function(MetadataCollection metadata)? onMetadata,
}) {
_onData = onData;
Expand All @@ -87,6 +100,15 @@ class AnswerStream {
_onAIResponseLimit = onAIResponseLimit;
_onAIImageResponseLimit = onAIImageResponseLimit;
_onMetadata = onMetadata;
_onAIMaxRequired = onAIMaxRequired;

// Flush any buffered AI_MAX_REQUIRED events.
if (_onAIMaxRequired != null && _pendingAIMaxRequiredEvents.isNotEmpty) {
for (final msg in _pendingAIMaxRequiredEvents) {
_onAIMaxRequired!(msg);
}
_pendingAIMaxRequiredEvents.clear();
}

_onStart?.call();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,11 @@ class ChatAIMessageWidget extends StatelessWidget {
errorMessage: LocaleKeys.sideBar_purchaseAIMax.tr(),
);
},
onAIMaxRequired: (message) {
return ChatErrorMessageWidget(
errorMessage: message,
);
},
),
),
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ class SettingsAIBloc extends Bloc<SettingsAIEvent, SettingsAIState> {
_userService = UserBackendService(userId: userProfile.id),
super(
SettingsAIState(
selectedAIModel: userProfile.aiModel,
userProfile: userProfile,
currentWorkspaceMemberRole: currentWorkspaceMemberRole,
),
Expand Down Expand Up @@ -98,7 +99,25 @@ class SettingsAIBloc extends Bloc<SettingsAIEvent, SettingsAIState> {
Log.info("Available models: $decodedJson");
if (decodedJson is Map<String, dynamic>) {
final models = ModelList.fromJson(decodedJson).models;
emit(state.copyWith(availableModels: models));
if (models.isEmpty) {
// If available models is empty, then we just show the
// Default
emit(state.copyWith(availableModels: ["Default"]));
return;
}

if (!models.contains(state.selectedAIModel)) {
// Use first model as default model if current selected model
// is not available
emit(
state.copyWith(
availableModels: models,
selectedAIModel: models[0],
),
);
} else {
emit(state.copyWith(availableModels: models));
}
}
},
refreshMember: (member) {
Expand Down Expand Up @@ -185,8 +204,9 @@ class SettingsAIState with _$SettingsAIState {
const factory SettingsAIState({
required UserProfilePB userProfile,
UseAISettingPB? aiSettings,
@Default("Default") String selectedAIModel,
AFRolePB? currentWorkspaceMemberRole,
@Default(["default"]) List<String> availableModels,
@Default(["Default"]) List<String> availableModels,
@Default(true) bool enableSearchIndexing,
}) = _SettingsAIState;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class AIModelSelection extends StatelessWidget {
onChanged: (model) => context
.read<SettingsAIBloc>()
.add(SettingsAIEvent.selectModel(model)),
selectedOption: state.userProfile.aiModel,
selectedOption: state.selectedAIModel,
options: state.availableModels
.map(
(model) => buildDropdownMenuEntry<String>(
Expand Down
42 changes: 14 additions & 28 deletions frontend/rust-lib/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions frontend/rust-lib/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -103,8 +103,8 @@ dashmap = "6.0.1"
# Run the script.add_workspace_members:
# scripts/tool/update_client_api_rev.sh new_rev_id
# ⚠️⚠️⚠️️
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "82409199f8ffa0166f2f5d9403ccd55831890549" }
client-api-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "82409199f8ffa0166f2f5d9403ccd55831890549" }
client-api = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a" }
client-api-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Cloud", rev = "4ed5b367eac5ae9ffd603812e2fea26b3ed3da7a" }

[profile.dev]
opt-level = 0
Expand Down
4 changes: 4 additions & 0 deletions frontend/rust-lib/flowy-ai/src/chat.rs
Original file line number Diff line number Diff line change
Expand Up @@ -259,6 +259,10 @@ impl Chat {
let _ = answer_sink
.send("AI_IMAGE_RESPONSE_LIMIT".to_string())
.await;
} else if err.is_ai_max_required() {
let _ = answer_sink
.send(format!("AI_MAX_REQUIRED:{}", err.msg))
.await;
} else {
let _ = answer_sink.send(format!("error:{}", err)).await;
}
Expand Down
10 changes: 9 additions & 1 deletion frontend/rust-lib/flowy-ai/src/event_handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ use flowy_ai_pub::cloud::{ChatMessageMetadata, ChatMessageType, ChatRAGData, Con
use flowy_error::{ErrorCode, FlowyError, FlowyResult};
use lib_dispatch::prelude::{data_result_ok, AFPluginData, AFPluginState, DataResult};
use lib_infra::isolate_stream::IsolateSink;
use serde_json::json;
use std::sync::{Arc, Weak};
use tracing::trace;
use validator::Validate;
Expand Down Expand Up @@ -112,7 +113,14 @@ pub(crate) async fn get_available_model_list_handler(
ai_manager: AFPluginState<Weak<AIManager>>,
) -> DataResult<ModelConfigPB, FlowyError> {
let ai_manager = upgrade_ai_manager(ai_manager)?;
let models = serde_json::to_string(&ai_manager.get_available_models().await?)?;
let available_models = ai_manager.get_available_models().await?;
let models = available_models
.models
.into_iter()
.map(|m| m.name)
.collect::<Vec<String>>();

let models = serde_json::to_string(&json!({"models": models}))?;
data_result_ok(ModelConfigPB { models })
}

Expand Down
3 changes: 3 additions & 0 deletions frontend/rust-lib/flowy-error/src/code.rs
Original file line number Diff line number Diff line change
Expand Up @@ -362,6 +362,9 @@ pub enum ErrorCode {

#[error("AI Image Response limit exceeded")]
AIImageResponseLimitExceeded = 124,

#[error("AI Max Required")]
AIMaxRequired = 125,
}

impl ErrorCode {
Expand Down
Loading

0 comments on commit aacd09d

Please sign in to comment.