Skip to content

Commit

Permalink
Merge pull request #489 from aws-samples/lvn
Browse files Browse the repository at this point in the history
feat: support deployment module
  • Loading branch information
NingLu authored Dec 20, 2024
2 parents 8131a5c + 28037c1 commit 297d9d8
Show file tree
Hide file tree
Showing 7 changed files with 29 additions and 2 deletions.
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -316,6 +316,8 @@ Sample config.json:
},
"chat": {
"enabled": true,
"bedrockRegion": "us-east-1",
"useOpenSourceLLM": true,
"amazonConnect": {
"enabled": true
}
Expand Down
2 changes: 2 additions & 0 deletions README_zh-cn.md
Original file line number Diff line number Diff line change
Expand Up @@ -309,6 +309,8 @@ cd Intelli-Agent/source/infrastructure
},
"chat": {
"enabled": true,
"bedrockRegion": "us-east-1",
"useOpenSourceLLM": true,
"amazonConnect": {
"enabled": true
}
Expand Down
1 change: 1 addition & 0 deletions source/infrastructure/bin/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ export function getConfig(): SystemConfig {
bedrockRegion: "us-east-1",
bedrockAk: "",
bedrockSk: "",
useOpenSourceLLM: true,
amazonConnect: {
enabled: true
}
Expand Down
13 changes: 12 additions & 1 deletion source/infrastructure/cli/magic-config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,7 @@ async function getAwsAccountAndRegion() {
options.enableChat = config.chat.enabled;
options.bedrockRegion = config.chat.bedrockRegion;
options.enableConnect = config.chat.amazonConnect.enabled;
options.useOpenSourceLLM = config.chat.useOpenSourceLLM;
options.defaultEmbedding = config.model.embeddingsModels && config.model.embeddingsModels.length > 0
? config.model.embeddingsModels[0].name
: embeddingModels[0].name;
Expand Down Expand Up @@ -192,7 +193,7 @@ async function processCreateOptions(options: any): Promise<void> {
type: "confirm",
name: "enableKnowledgeBase",
message: "Do you want to use knowledge base in this solution?",
initial: options.enableKnowledgeBase ?? false,
initial: options.enableKnowledgeBase ?? true,
},
{
type: "select",
Expand Down Expand Up @@ -335,6 +336,15 @@ async function processCreateOptions(options: any): Promise<void> {
return (!(this as any).state.answers.enableChat);
},
},
{
type: "confirm",
name: "useOpenSourceLLM",
message: "Do you want to use open source LLM(eg. Qwen, ChatGLM, IntermLM)?",
initial: options.useOpenSourceLLM ?? true,
skip(): boolean {
return (!(this as any).state.answers.enableChat);
},
},
{
type: "confirm",
name: "enableConnect",
Expand Down Expand Up @@ -474,6 +484,7 @@ async function processCreateOptions(options: any): Promise<void> {
chat: {
enabled: answers.enableChat,
bedrockRegion: answers.bedrockRegion,
useOpenSourceLLM: answers.useOpenSourceLLM,
amazonConnect: {
enabled: answers.enableConnect,
},
Expand Down
1 change: 1 addition & 0 deletions source/infrastructure/lib/shared/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ export interface SystemConfig {
bedrockRegion: string;
bedrockAk?: string;
bedrockSk?: string;
useOpenSourceLLM: boolean;
amazonConnect: {
enabled: boolean;
}
Expand Down
1 change: 0 additions & 1 deletion source/portal/src/utils/const.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ export const LLM_BOT_COMMON_MODEL_LIST = [
'mistral.mistral-large-2407-v1:0',
'cohere.command-r-plus-v1:0',
'us.amazon.nova-pro-v1:0',
'us.anthropic.claude-3-sonnet-20240229-v1:0',
"us.amazon.nova-lite-v1:0",
"us.amazon.nova-micro-v1:0",
"us.anthropic.claude-3-sonnet-20240229-v1:0",
Expand Down
11 changes: 11 additions & 0 deletions source/script/build.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ opensearch_enabled=$(jq -r '.knowledgeBase.knowledgeBaseType.intelliAgentKb.vect
embedding_model_provider=$(jq -r '.model.embeddingsModels[0].provider' $config_file)
model_assets_bucket=$(jq -r '.model.modelConfig.modelAssetsBucket' $config_file)
ui_enabled=$(jq -r '.ui.enabled' $config_file)
use_open_source_llm=$(jq -r '.chat.useOpenSourceLLM' $config_file)
# fi

echo "Knowledge Base Enabled: $knowledge_base_enabled"
Expand All @@ -21,6 +22,7 @@ echo "Knowledge Base Models Enabled: $knowledge_base_models_enabled"
echo "ECR Repository: $ecr_repository"
echo "ECR Image Tag: $ecr_image_tag"
echo "OpenSearch Enabled: $opensearch_enabled"
echo "Use Open Source Model: $use_open_source_llm"
echo "Model Assets Bucket: $model_assets_bucket"
echo "UI Enabled: $ui_enabled"

Expand Down Expand Up @@ -48,6 +50,10 @@ build_frontend() {
cd - > /dev/null
}

build_deployment_module() {
echo "Building Model Deployment Module"
}

modules_prepared=""
cd ..

Expand All @@ -56,6 +62,11 @@ if $ui_enabled; then
modules_prepared="${modules_prepared}Frontend, "
fi

if $use_open_source_llm; then
build_deployment_module
modules_prepared="${modules_prepared}Model Deployment, "
fi

if $knowledge_base_enabled && $knowledge_base_intelliagent_enabled && $knowledge_base_models_enabled; then
prepare_etl_model
modules_prepared="${modules_prepared}ETL Model, "
Expand Down

0 comments on commit 297d9d8

Please sign in to comment.