Skip to content

Commit

Permalink
feat: AI chat UI improvements (#6760)
Browse files Browse the repository at this point in the history
* chore: fix typo

* chore: redundant check for cloud and use guard clause

* feat: improve ai prompt input ui

* feat: improve ai chat welcome page

* feat: improve loading status indicator

* feat: improve chat bubble avatars

* chore: clean up chat theme

* feat: improve sizing, padding and text styles

* feat: improve sugested follow up question style

* feat: improve error message style

* feat: improve mobile chat ui

* feat: add superman

* fix: cannot send new questions after 2000 character limit reached

* feat: ai response action bottom sheet

* chore: code cleanup

* feat: improve look of side panel

* chore: BIG ANNOUNCEMENT CITY

* fix: test session issues

* fix: launch review issues
  • Loading branch information
richardshiue authored Nov 14, 2024
1 parent 8120656 commit bced932
Show file tree
Hide file tree
Showing 93 changed files with 3,107 additions and 2,660 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,18 @@ class FloatingAIEntry extends StatelessWidget {
scaleFactor: 0.99,
onTapUp: () => mobileCreateNewAIChatNotifier.value =
mobileCreateNewAIChatNotifier.value + 1,
child: DecoratedBox(
decoration: _buildShadowDecoration(context),
child: Container(
decoration: _buildWrapperDecoration(context),
height: 48,
alignment: Alignment.centerLeft,
child: Padding(
padding: const EdgeInsets.only(left: 18),
child: _buildHintText(context),
child: Hero(
tag: "ai_chat_prompt",
child: DecoratedBox(
decoration: _buildShadowDecoration(context),
child: Container(
decoration: _buildWrapperDecoration(context),
height: 48,
alignment: Alignment.centerLeft,
child: Padding(
padding: const EdgeInsets.only(left: 18),
child: _buildHintText(context),
),
),
),
),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,58 +5,52 @@ import 'package:appflowy/workspace/application/settings/ai/local_llm_listener.da
import 'package:appflowy_backend/dispatch/dispatch.dart';
import 'package:appflowy_backend/log.dart';
import 'package:appflowy_backend/protobuf/flowy-ai/entities.pb.dart';
import 'package:appflowy_result/appflowy_result.dart';
import 'package:flutter_bloc/flutter_bloc.dart';
import 'package:freezed_annotation/freezed_annotation.dart';

import 'chat_input_bloc.dart';
part 'ai_prompt_input_bloc.freezed.dart';

part 'chat_file_bloc.freezed.dart';
class AIPromptInputBloc extends Bloc<AIPromptInputEvent, AIPromptInputState> {
AIPromptInputBloc()
: _listener = LocalLLMListener(),
super(AIPromptInputState.initial()) {
_dispatch();
_startListening();
_init();
}

class ChatFileBloc extends Bloc<ChatFileEvent, ChatFileState> {
ChatFileBloc()
: listener = LocalLLMListener(),
super(const ChatFileState()) {
listener.start(
stateCallback: (pluginState) {
if (!isClosed) {
add(ChatFileEvent.updatePluginState(pluginState));
}
},
chatStateCallback: (chatState) {
if (!isClosed) {
add(ChatFileEvent.updateChatState(chatState));
}
},
);
ChatInputFileMetadata consumeMetadata() {
final metadata = {
for (final file in state.uploadFiles) file.filePath: file,
};

if (metadata.isNotEmpty) {
add(const AIPromptInputEvent.clear());
}

return metadata;
}

final LocalLLMListener _listener;

@override
Future<void> close() async {
await _listener.stop();
return super.close();
}

void _dispatch() {
on<AIPromptInputEvent>(
(event, emit) {
event.when(
newFile: (String filePath, String fileName) {
final files = [...state.uploadFiles];

on<ChatFileEvent>(
(event, emit) async {
await event.when(
initial: () async {
final result = await AIEventGetLocalAIChatState().send();
result.fold(
(chatState) {
if (!isClosed) {
add(
ChatFileEvent.updateChatState(chatState),
);
}
},
(err) {
Log.error(err.toString());
},
);
},
newFile: (String filePath, String fileName) async {
final files = List<ChatFile>.from(state.uploadFiles);
final newFile = ChatFile.fromFilePath(filePath);
if (newFile != null) {
files.add(newFile);
emit(
state.copyWith(
uploadFiles: files,
),
);
emit(state.copyWith(uploadFiles: files));
}
},
updateChatState: (LocalAIChatPB chatState) {
Expand All @@ -76,8 +70,8 @@ class ChatFileBloc extends Bloc<ChatFileEvent, ChatFileState> {
fileEnabled && chatState.state == RunningStatePB.Running;

final aiType = chatState.state == RunningStatePB.Running
? const AIType.localAI()
: const AIType.appflowyAI();
? AIType.localAI
: AIType.appflowyAI;

emit(
state.copyWith(
Expand Down Expand Up @@ -107,48 +101,66 @@ class ChatFileBloc extends Bloc<ChatFileEvent, ChatFileState> {
);
}

ChatInputFileMetadata consumeMetaData() {
final metadata = state.uploadFiles.fold(
<String, ChatFile>{},
(map, file) => map..putIfAbsent(file.filePath, () => file),
void _startListening() {
_listener.start(
stateCallback: (pluginState) {
if (!isClosed) {
add(AIPromptInputEvent.updatePluginState(pluginState));
}
},
chatStateCallback: (chatState) {
if (!isClosed) {
add(AIPromptInputEvent.updateChatState(chatState));
}
},
);

if (metadata.isNotEmpty) {
add(const ChatFileEvent.clear());
}

return metadata;
}

final LocalLLMListener listener;

@override
Future<void> close() async {
await listener.stop();
return super.close();
void _init() {
AIEventGetLocalAIChatState().send().fold(
(chatState) {
if (!isClosed) {
add(AIPromptInputEvent.updateChatState(chatState));
}
},
Log.error,
);
}
}

@freezed
class ChatFileEvent with _$ChatFileEvent {
const factory ChatFileEvent.initial() = Initial;
const factory ChatFileEvent.newFile(String filePath, String fileName) =
class AIPromptInputEvent with _$AIPromptInputEvent {
const factory AIPromptInputEvent.newFile(String filePath, String fileName) =
_NewFile;
const factory ChatFileEvent.deleteFile(ChatFile file) = _DeleteFile;
const factory ChatFileEvent.clear() = _ClearFile;
const factory ChatFileEvent.updateChatState(LocalAIChatPB chatState) =
_UpdateChatState;
const factory ChatFileEvent.updatePluginState(
const factory AIPromptInputEvent.deleteFile(ChatFile file) = _DeleteFile;
const factory AIPromptInputEvent.clear() = _ClearFile;
const factory AIPromptInputEvent.updateChatState(
LocalAIChatPB chatState,
) = _UpdateChatState;
const factory AIPromptInputEvent.updatePluginState(
LocalAIPluginStatePB chatState,
) = _UpdatePluginState;
}

@freezed
class ChatFileState with _$ChatFileState {
const factory ChatFileState({
@Default(false) bool supportChatWithFile,
class AIPromptInputState with _$AIPromptInputState {
const factory AIPromptInputState({
required bool supportChatWithFile,
LocalAIChatPB? chatState,
@Default([]) List<ChatFile> uploadFiles,
@Default(AIType.appflowyAI()) AIType aiType,
}) = _ChatFileState;
required List<ChatFile> uploadFiles,
required AIType aiType,
}) = _AIPromptInputState;

factory AIPromptInputState.initial() => const AIPromptInputState(
supportChatWithFile: false,
uploadFiles: [],
aiType: AIType.appflowyAI,
);
}

enum AIType {
appflowyAI,
localAI;

bool get isLocalAI => this == localAI;
}
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,8 @@ class ChatAIMessageBloc extends Bloc<ChatAIMessageEvent, ChatAIMessageState> {
}

on<ChatAIMessageEvent>(
(event, emit) async {
await event.when(
initial: () async {},
(event, emit) {
event.when(
updateText: (newText) {
emit(
state.copyWith(
Expand Down Expand Up @@ -135,7 +134,6 @@ class ChatAIMessageBloc extends Bloc<ChatAIMessageEvent, ChatAIMessageState> {

@freezed
class ChatAIMessageEvent with _$ChatAIMessageEvent {
const factory ChatAIMessageEvent.initial() = Initial;
const factory ChatAIMessageEvent.updateText(String text) = _UpdateText;
const factory ChatAIMessageEvent.receiveError(String error) = _ReceiveError;
const factory ChatAIMessageEvent.retry() = _Retry;
Expand Down
Loading

0 comments on commit bced932

Please sign in to comment.