Skip to content

Commit

Permalink
adjust logic tests
Browse files Browse the repository at this point in the history
  • Loading branch information
dantengsky committed May 28, 2024
1 parent aaf51e9 commit dbfa34c
Show file tree
Hide file tree
Showing 5 changed files with 12 additions and 5 deletions.
2 changes: 1 addition & 1 deletion scripts/ci/deploy/config/databend-query-node-1.toml
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ join_spilling_memory_ratio = 60
[log]

[log.file]
level = "INFO"
level = "DEBUG"
format = "text"
dir = "./.databend/logs_1"
prefix_filter = ""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,9 @@ impl PipelineBuilder {
PhysicalPlan::TableScan(scan) => match scan.table_index {
None | Some(databend_common_sql::DUMMY_TABLE_INDEX) => (false, false),
Some(table_index) => match need_reserve_block_info(self.ctx.clone(), table_index) {
// due to issue https://github.com/datafuselabs/databend/issues/15643,
// target build optimization of merge-into is disabled

//(true, is_distributed) => (true, is_distributed),
(true, is_distributed) => (false, is_distributed),
_ => (false, false),
Expand Down
4 changes: 4 additions & 0 deletions src/query/sql/src/planner/optimizer/optimizer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -494,6 +494,10 @@ async fn optimize_merge_into(opt_ctx: OptimizerContext, plan: Box<MergeInto>) ->
== 0
&& flag
{
// due to issue https://github.com/datafuselabs/databend/issues/15643,
// target build optimization of merge-into is disabled: here row_id column should be kept

// new_columns_set.remove(&plan.row_id_index);
opt_ctx.table_ctx.set_merge_into_join(MergeIntoJoin {
merge_into_join_type: MergeIntoJoinType::Left,
is_distributed: false,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ explain merge into target_build_optimization as t1 using source_optimization as
MergeInto:
target_table: default.default.target_build_optimization
├── distributed: false
├── target_build_optimization: true
├── target_build_optimization: false
├── can_try_update_column_only: true
├── matched update: [condition: None,update set a = t2.a (#0),b = t2.b (#1),c = t2.c (#2)]
└── unmatched insert: [condition: None,insert into (a,b,c) values(CAST(a (#0) AS Int32 NULL),CAST(b (#1) AS String NULL),CAST(c (#2) AS String NULL))]
Expand Down Expand Up @@ -135,7 +135,7 @@ explain merge into target_build_optimization as t1 using source_optimization as
MergeInto:
target_table: default.default.target_build_optimization
├── distributed: false
├── target_build_optimization: true
├── target_build_optimization: false
├── can_try_update_column_only: true
├── matched update: [condition: None,update set a = t2.a (#0),b = t2.b (#1),c = t2.c (#2)]
└── unmatched insert: [condition: None,insert into (a,b,c) values(CAST(a (#0) AS Int32 NULL),CAST(b (#1) AS String NULL),CAST(c (#2) AS String NULL))]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ on t1.a = t2.a when matched then update set t1.b = t2.b when not matched then in
MergeInto:
target_table: default.default.column_only_optimization_target
├── distributed: false
├── target_build_optimization: true
├── target_build_optimization: false
├── can_try_update_column_only: true
├── matched update: [condition: None,update set b = t2.b (#1)]
└── unmatched insert: [condition: None,insert into (a,b) values(CAST(a (#0) AS Int32 NULL),CAST(b (#1) AS String NULL))]
Expand Down Expand Up @@ -211,7 +211,7 @@ on t1.a = t2.a when matched then update * when not matched then insert *;
MergeInto:
target_table: default.default.column_only_optimization_target
├── distributed: false
├── target_build_optimization: true
├── target_build_optimization: false
├── can_try_update_column_only: true
├── matched update: [condition: None,update set a = a (#0),b = b (#1)]
└── unmatched insert: [condition: None,insert into (a,b) values(CAST(a (#0) AS Int32 NULL),CAST(b (#1) AS String NULL))]
Expand Down

0 comments on commit dbfa34c

Please sign in to comment.