Skip to content

Commit

Permalink
chore: update rust toolchain and dependencies (#854)
Browse files Browse the repository at this point in the history
Signed-off-by: Runji Wang <[email protected]>

---------

Signed-off-by: Runji Wang <[email protected]>
  • Loading branch information
wangrunji0408 authored Nov 23, 2024
1 parent a599403 commit b11a14f
Show file tree
Hide file tree
Showing 20 changed files with 760 additions and 706 deletions.
1,351 changes: 701 additions & 650 deletions Cargo.lock

Large diffs are not rendered by default.

26 changes: 13 additions & 13 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,14 @@ async-recursion = "1"
async-stream = "0.3"
async-trait = "0.1"
binary-heap-plus = "0.5"
bit-set = "0.5"
bit-set = "0.8"
bitvec = { version = "1", features = ["serde"] }
btreemultimap = "0.1"
bytes = "1"
chrono = { version = "0.4", features = ["clock", "std", "wasmbind"] }
clap = { version = "4", features = ["derive"] }
comfy-table = { version = "7", default-features = false }
console-subscriber = "0.2"
console-subscriber = "0.4"
crc32fast = "1"
csv = "1"
dirs = "5"
Expand All @@ -44,31 +44,31 @@ humantime = "2"
indicatif = "0.17"
indoc = "2"
iter-chunks = "0.2"
itertools = "0.12"
itertools = "0.13"
minitrace = { version = "0.6", features = ["enable"] }
moka = { version = "0.12", features = ["future"] }
num-traits = "0.2"
ordered-float = { version = "4", features = ["serde"] }
parking_lot = "0.12"
parse-display = "0.9"
parse-display = "0.10"
paste = "1"
pgwire = "0.20"
pin-project = "1"
pretty-xmlish = "0.1"
prost = "0.12"
pyo3 = { version = "0.21", features = ["extension-module"], optional = true }
prost = "0.13"
pyo3 = { version = "0.22", features = ["extension-module"], optional = true }
ref-cast = "1.0"
regex = "1"
risinglight_proto = "0.2"
rust_decimal = "1"
rustyline = "14"
rustyline = "15"
serde = { version = "1", features = ["derive", "rc"] }
serde_json = "1"
smallvec = { version = "1", features = ["serde"] }
sqllogictest = "0.20"
sqllogictest = "0.23"
sqlparser = { version = "0.45", features = ["serde"] }
thiserror = "1"
tikv-jemallocator = { version = "0.5", optional = true, features = [
thiserror = "2"
tikv-jemallocator = { version = "0.6", optional = true, features = [
"disable_initial_exec_tls",
] }
tokio = { version = "1", features = ["full"] }
Expand All @@ -81,13 +81,13 @@ tracing-subscriber = { version = "0.3", features = [
[dev-dependencies]
criterion = { version = "0.5", features = ["async_tokio"] }
glob = "0.3"
libtest-mimic = "0.7"
sqlplannertest = "0.1"
libtest-mimic = "0.8"
sqlplannertest = "0.2"
tempfile = "3"
test-case = "3"

[build-dependencies]
pyo3-build-config = { version = "0.21", optional = true }
pyo3-build-config = { version = "0.22", optional = true }

[[test]]
name = "sqllogictest"
Expand Down
4 changes: 2 additions & 2 deletions proto/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ keywords = ["sql", "database", "embedded", "cli"]

[dependencies]
bytes = "1"
prost = "0.12"
prost = "0.13"

[build-dependencies]
prost-build = "0.12"
prost-build = "0.13"
2 changes: 1 addition & 1 deletion rust-toolchain
Original file line number Diff line number Diff line change
@@ -1 +1 @@
nightly-2024-02-04
nightly-2024-11-22
2 changes: 1 addition & 1 deletion src/array/primitive_array.rs
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ impl PrimitiveArray<bool> {
impl PrimitiveArray<Decimal> {
/// Rescale the decimals.
pub fn rescale(&mut self, scale: u8) {
for v in self.data.iter_mut() {
for v in &mut self.data {
v.rescale(scale as u32);
}
}
Expand Down
6 changes: 3 additions & 3 deletions src/binder/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -252,15 +252,15 @@ pub fn bind_header(mut chunk: array::Chunk, stmt: &Statement) -> array::Chunk {
#[derive(Debug, Default)]
struct Context {
/// Defined CTEs.
/// cte_name -> (query_id, column_alias -> id)
/// `cte_name` -> (`query_id`, `column_alias` -> id)
ctes: HashMap<String, (Id, HashMap<String, Id>)>,
/// Table aliases that can be accessed from the current query.
table_aliases: HashSet<String>,
/// Column aliases that can be accessed from the current query.
/// column_alias -> (table_alias -> id)
/// `column_alias` -> (`table_alias` -> id)
column_aliases: HashMap<String, HashMap<String, Id>>,
/// Column aliases that can be accessed from the outside query.
/// column_alias -> id
/// `column_alias` -> id
output_aliases: HashMap<String, Id>,
}

Expand Down
2 changes: 1 addition & 1 deletion src/executor/copy_from_file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ impl CopyFromFileExecutor {
return Err(Error::length_mismatch(column_count, record.len()));
}

size_count += record.as_slice().as_bytes().len();
size_count += record.as_slice().len();

// push a raw str row and send it if necessary
if let Some(chunk) = chunk_builder.push_str_row(record.iter())? {
Expand Down
39 changes: 23 additions & 16 deletions src/executor/evaluator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -211,22 +211,6 @@ impl<'a> Evaluator<'a> {

/// Evaluate the aggregation.
fn eval_agg(&self, state: AggState, chunk: &DataChunk) -> Result<AggState, ConvertError> {
impl DataValue {
fn add(self, other: Self) -> Self {
if self.is_null() {
other
} else {
self + other
}
}
fn or(self, other: Self) -> Self {
if self.is_null() {
other
} else {
self
}
}
}
use Expr::*;
Ok(match state {
AggState::Value(state) => AggState::Value(match self.node() {
Expand Down Expand Up @@ -314,3 +298,26 @@ impl AggState {
}
}
}

trait Ext {
fn add(self, other: Self) -> Self;
fn or(self, other: Self) -> Self;
}

impl Ext for DataValue {
fn add(self, other: Self) -> Self {
if self.is_null() {
other
} else {
self + other
}
}

fn or(self, other: Self) -> Self {
if self.is_null() {
other
} else {
self
}
}
}
1 change: 0 additions & 1 deletion src/executor/hash_join.rs
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,6 @@ impl<const T: JoinType> HashJoinExecutor<T> {
pub struct HashSemiJoinExecutor {
pub left_keys: RecExpr,
pub right_keys: RecExpr,
pub left_types: Vec<DataType>,
pub anti: bool,
}

Expand Down
1 change: 0 additions & 1 deletion src/executor/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -481,7 +481,6 @@ impl<S: Storage> Builder<S> {
HashSemiJoinExecutor {
left_keys: self.resolve_column_index(lkeys, left),
right_keys: self.resolve_column_index(rkeys, right),
left_types: self.plan_types(left).to_vec(),
anti,
}
.execute(self.build_id(left), self.build_id(right))
Expand Down
1 change: 0 additions & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
#![feature(proc_macro_hygiene)]
#![feature(trusted_len)]
#![feature(adt_const_params)]
#![feature(lazy_cell)]
#![feature(iterator_try_collect)]
#![feature(let_chains)]
#![feature(impl_trait_in_assoc_type)]
Expand Down
2 changes: 0 additions & 2 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@

//! A simple interactive shell of the database.
#![feature(div_duration)]

use std::fs::File;
use std::path::PathBuf;
use std::sync::Arc;
Expand Down
2 changes: 1 addition & 1 deletion src/planner/explain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -377,7 +377,7 @@ impl<'a> Explain<'a> {
}
}

impl<'a> fmt::Display for Explain<'a> {
impl fmt::Display for Explain<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
delegate_fmt(&self.pretty(), f, String::with_capacity(4096))
}
Expand Down
1 change: 0 additions & 1 deletion src/planner/rules/expr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,6 @@ mod tests {
}

egg::test_fn! {
#[cfg_attr(feature = "simd", ignore)] // FIXME: 'attempt to divide by zero'
constant_folding,
rules(),
"(* (- (+ 1 2) 4) (/ 6 2))" => "-3",
Expand Down
2 changes: 1 addition & 1 deletion src/planner/rules/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ impl Analysis<Expr> for ExprAnalysis {
let merge_columns = merge_small_set(&mut to.columns, from.columns);
let merge_schema = egg::merge_max(&mut to.schema, from.schema);
let merge_rows = egg::merge_min(
unsafe { std::mem::transmute(&mut to.rows) },
unsafe { std::mem::transmute::<&mut f32, &mut F32>(&mut to.rows) },
F32::from(from.rows),
);
let merge_order = egg::merge_max(&mut to.orderby, from.orderby);
Expand Down
6 changes: 3 additions & 3 deletions src/storage/secondary/block/rle_block_iterator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,13 @@ where
/// Indicates current position in the rle block
cur_row: usize,

/// Indicates how many rows get scanned for cur_row
/// Indicates how many rows get scanned for `cur_row`
cur_scanned_count: usize,

/// Indicates the number of rows in the rle block
rle_row_count: usize,

/// Indicates the element of current row get from block_iter
/// Indicates the element of current row get from `block_iter`
cur_element: Option<<A::Item as ToOwned>::Owned>,

/// Indicates how many rows get scanned for this iterator
Expand All @@ -48,7 +48,7 @@ where
/// Total count of elements in block
row_count: usize,

/// If never_used is true, get an item from child iter in the beginning of next_batch()
/// If `never_used` is true, get an item from child iter in the beginning of `next_batch()`
never_used: bool,
}

Expand Down
9 changes: 5 additions & 4 deletions src/storage/secondary/column/concrete_column_iterator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,16 +25,17 @@ pub trait BlockIteratorFactory<A: Array>: Send + Sync + 'static {

/// Create a [`FakeBlockIterator`](super::super::block::FakeBlockIterator) from block index and
/// seek to `start_pos`.
#[allow(dead_code)]
fn get_fake_iterator(&self, index: &BlockIndex, start_pos: usize) -> Self::BlockIteratorImpl;
}

/// `ConcreteColumnIterator` Statistics
#[derive(Debug, Default)]
pub struct Statistics {
/// next_batch call times
/// `next_batch` call times
next_batch_count: u32,

/// get_block call times
/// `get_block` call times
fetched_block_count: u32,
}

Expand All @@ -49,7 +50,7 @@ pub struct ConcreteColumnIterator<A: Array, F: BlockIteratorFactory<A>> {
/// Block iterator.
block_iterator: F::BlockIteratorImpl,

/// RowID of the current column.
/// `RowID` of the current column.
current_row_id: u32,

/// Indicates whether this iterator has finished or not.
Expand All @@ -58,7 +59,7 @@ pub struct ConcreteColumnIterator<A: Array, F: BlockIteratorFactory<A>> {
/// The factory for creating iterators.
factory: F,

/// Indicate whether current_block_iter is fake.
/// Indicate whether `current_block_iter` is fake.
is_fake_iter: bool,

/// Statistics which used for reporting.
Expand Down
4 changes: 2 additions & 2 deletions src/storage/secondary/options.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ pub struct StorageOptions {
/// Encode type
pub encode_type: EncodeType,

/// Whether record first_key of each block into block_index
/// Whether record `first_key` of each block into `block_index`
pub record_first_key: bool,

/// Whether to disable all disk operations, only for test use
Expand Down Expand Up @@ -117,7 +117,7 @@ pub struct ColumnBuilderOptions {
/// Encode type
pub encode_type: EncodeType,

/// Whether record first_key of each block
/// Whether record `first_key` of each block
pub record_first_key: bool,
}

Expand Down
1 change: 1 addition & 0 deletions src/storage/secondary/txn_iterator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ pub enum SecondaryIterator {
}

#[enum_dispatch(SecondaryIterator)]
#[allow(dead_code)]
pub trait SecondaryIteratorImpl {}

/// An iterator over all data in a transaction.
Expand Down
4 changes: 2 additions & 2 deletions src/storage/secondary/version_manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,10 +106,10 @@ pub struct VersionManagerInner {
/// MVCC structure for this, and only record changes compared with last epoch.
status: HashMap<u64, Arc<Snapshot>>,

/// (TableId, RowSetId) -> Object mapping
/// (`TableId`, `RowSetId`) -> Object mapping
rowsets: HashMap<(u32, u32), Arc<DiskRowset>>,

/// (TableId, DVId) -> Object mapping
/// (`TableId`, `DVId`) -> Object mapping
dvs: HashMap<(u32, u64), Arc<DeleteVector>>,

/// Reference count of each epoch.
Expand Down

0 comments on commit b11a14f

Please sign in to comment.