Skip to content

Commit

Permalink
chore: fix nightly clippy (#15088)
Browse files Browse the repository at this point in the history
  • Loading branch information
xxchan authored Feb 13, 2024
1 parent befeba3 commit aeff596
Show file tree
Hide file tree
Showing 19 changed files with 56 additions and 55 deletions.
18 changes: 11 additions & 7 deletions src/common/src/session_config/sink_decouple.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,16 @@ impl FromStr for SinkDecouple {
}
}

impl ToString for SinkDecouple {
fn to_string(&self) -> String {
match self {
Self::Default => "default".to_string(),
Self::Enable => "enable".to_string(),
Self::Disable => "disable".to_string(),
}
impl std::fmt::Display for SinkDecouple {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}",
match self {
Self::Default => "default",
Self::Enable => "enable",
Self::Disable => "disable",
}
)
}
}
7 changes: 4 additions & 3 deletions src/connector/src/source/pulsar/topic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,10 @@ pub struct Topic {
pub partition_index: Option<i32>,
}

impl ToString for Topic {
fn to_string(&self) -> String {
format!(
impl std::fmt::Display for Topic {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}://{}/{}/{}",
self.domain, self.tenant, self.namespace, self.topic
)
Expand Down
2 changes: 1 addition & 1 deletion src/connector/src/source/reader/fs_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ impl FsSourceReader {
.with_context(|| {
format!("Failed to find column id: {} in source: {:?}", id, self)
})
.map(|col| col.clone())
.cloned()
})
.try_collect()
}
Expand Down
1 change: 1 addition & 0 deletions src/expr/core/src/expr/build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ pub(crate) trait Build: Expression + Sized {

/// Build the expression `Self` from protobuf for test, where each child is built with
/// [`build_from_prost`].
#[cfg(test)]
fn build_for_test(prost: &ExprNode) -> Result<Self> {
Self::build(prost, build_from_prost)
}
Expand Down
4 changes: 1 addition & 3 deletions src/expr/macro/src/parse.rs
Original file line number Diff line number Diff line change
Expand Up @@ -280,9 +280,7 @@ fn strip_outer_type<'a>(ty: &'a syn::Type, type_: &str) -> Option<&'a syn::Type>
let syn::Type::Path(path) = ty else {
return None;
};
let Some(seg) = path.path.segments.last() else {
return None;
};
let seg = path.path.segments.last()?;
if seg.ident != type_ {
return None;
}
Expand Down
2 changes: 1 addition & 1 deletion src/meta/src/barrier/progress.rs
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ type ConsumedRows = u64;
#[derive(Clone, Copy, Debug)]
enum BackfillState {
Init,
ConsumingUpstream(Epoch, ConsumedRows),
ConsumingUpstream(#[allow(dead_code)] Epoch, ConsumedRows),
Done(ConsumedRows),
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -688,7 +688,7 @@ pub mod tests {
// pick_l0_to_base_level
let mut picker =
ManualCompactionPicker::new(Arc::new(RangeOverlapStrategy::default()), option, 1);
let mut expected = vec![vec![5, 6], vec![7, 8], vec![9, 10]];
let mut expected = [vec![5, 6], vec![7, 8], vec![9, 10]];
expected.reverse();
let result = picker
.pick_compaction(&levels, &levels_handler, &mut local_stats)
Expand Down Expand Up @@ -724,7 +724,7 @@ pub mod tests {
};
let mut picker =
ManualCompactionPicker::new(Arc::new(RangeOverlapStrategy::default()), option, 1);
let mut expected = vec![vec![5, 6], vec![7, 8]];
let mut expected = [vec![5, 6], vec![7, 8]];
expected.reverse();
let result = picker
.pick_compaction(&levels, &levels_handler, &mut local_stats)
Expand Down Expand Up @@ -1012,7 +1012,7 @@ pub mod tests {
}

{
let expected_input_level_sst_ids = vec![vec![4], vec![2]];
let expected_input_level_sst_ids = [vec![4], vec![2]];
let option = ManualCompactionOption {
sst_ids: vec![],
level: input_level,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -874,7 +874,7 @@ pub mod tests {

#[test]
fn test_trivial_move_bug() {
let levels = vec![
let levels = [
Level {
level_idx: 1,
level_type: LevelType::Nonoverlapping as i32,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -409,7 +409,7 @@ mod test {
// cut range [3,4] [6] [8,9,10]
levels.member_table_ids = vec![0, 1, 2, 5, 7];
let expect_task_file_count = [2, 1, 4];
let expect_task_sst_id_range = vec![vec![3, 4], vec![6], vec![8, 9, 10, 11]];
let expect_task_sst_id_range = [vec![3, 4], vec![6], vec![8, 9, 10, 11]];
for (index, x) in expect_task_file_count.iter().enumerate() {
// // pick space reclaim
let task = selector
Expand Down Expand Up @@ -460,7 +460,7 @@ mod test {
// cut range [3,4] [6] [8,9,10]
levels.member_table_ids = vec![0, 1, 2, 5, 7];
let expect_task_file_count = [2, 1, 5];
let expect_task_sst_id_range = vec![vec![3, 4], vec![6], vec![7, 8, 9, 10, 11]];
let expect_task_sst_id_range = [vec![3, 4], vec![6], vec![7, 8, 9, 10, 11]];
for (index, x) in expect_task_file_count.iter().enumerate() {
if index == expect_task_file_count.len() - 1 {
levels.member_table_ids = vec![2, 5];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -611,7 +611,7 @@ mod test {
);

let expect_task_file_count = [1, 1, 1];
let expect_task_sst_id_range = vec![vec![2], vec![3], vec![4]];
let expect_task_sst_id_range = [vec![2], vec![3], vec![4]];
for (index, x) in expect_task_file_count.iter().enumerate() {
// // pick ttl reclaim
let task = selector
Expand Down Expand Up @@ -694,7 +694,7 @@ mod test {
);

let expect_task_file_count = [1, 1];
let expect_task_sst_id_range = vec![vec![2], vec![3]];
let expect_task_sst_id_range = [vec![2], vec![3]];
for (index, x) in expect_task_file_count.iter().enumerate() {
if index == expect_task_file_count.len() - 1 {
table_id_to_options.insert(
Expand Down
16 changes: 8 additions & 8 deletions src/meta/src/hummock/compaction/selector/level_selector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,14 +50,14 @@ pub enum PickerType {
BottomLevel,
}

impl ToString for PickerType {
fn to_string(&self) -> String {
match self {
PickerType::Tier => String::from("Tier"),
PickerType::Intra => String::from("Intra"),
PickerType::ToBase => String::from("ToBase"),
PickerType::BottomLevel => String::from("BottomLevel"),
}
impl std::fmt::Display for PickerType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(match self {
PickerType::Tier => "Tier",
PickerType::Intra => "Intra",
PickerType::ToBase => "ToBase",
PickerType::BottomLevel => "BottomLevel",
})
}
}

Expand Down
2 changes: 2 additions & 0 deletions src/storage/benches/bench_fs_operation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@ fn gen_tokio_files(path: &Path) -> impl IntoIterator<Item = impl Future<Output =
.read(true)
.write(true)
.create(true)
.truncate(true)
.custom_flags(F_NOCACHE)
.open(file_path)
.await
Expand All @@ -104,6 +105,7 @@ fn gen_tokio_files(path: &Path) -> impl IntoIterator<Item = impl Future<Output =
.read(true)
.write(true)
.create(true)
.truncate(true)
.open(file_path)
.await
.unwrap();
Expand Down
2 changes: 2 additions & 0 deletions src/storage/hummock_test/src/test_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,7 @@ impl<S: LocalStateStore> TestIngestBatch for S {
}
}

#[cfg(test)]
#[async_trait::async_trait]
pub(crate) trait HummockStateStoreTestTrait: StateStore {
fn get_pinned_version(&self) -> PinnedVersion;
Expand All @@ -120,6 +121,7 @@ pub(crate) trait HummockStateStoreTestTrait: StateStore {
}
}

#[cfg(test)]
impl HummockStateStoreTestTrait for HummockStorage {
fn get_pinned_version(&self) -> PinnedVersion {
self.get_pinned_version()
Expand Down
7 changes: 0 additions & 7 deletions src/storage/hummock_trace/src/write.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,13 +30,6 @@ pub(crate) static MAGIC_BYTES: MagicBytes = 0x484D5452; // HMTR
pub(crate) trait TraceWriter {
fn write(&mut self, record: Record) -> Result<usize>;
fn flush(&mut self) -> Result<()>;
fn write_all(&mut self, records: Vec<Record>) -> Result<usize> {
let mut total_size = 0;
for r in records {
total_size += self.write(r)?
}
Ok(total_size)
}
}

/// Serializer serializes a record to std write.
Expand Down
6 changes: 3 additions & 3 deletions src/storage/src/hummock/block_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,9 @@ const MIN_BUFFER_SIZE_PER_SHARD: usize = 256 * 1024 * 1024;
type CachedBlockEntry = CacheableEntry<(HummockSstableObjectId, u64), Box<Block>>;

enum BlockEntry {
Cache(CachedBlockEntry),
Owned(Box<Block>),
RefEntry(Arc<Block>),
Cache(#[allow(dead_code)] CachedBlockEntry),
Owned(#[allow(dead_code)] Box<Block>),
RefEntry(#[allow(dead_code)] Arc<Block>),
}

pub struct BlockHolder {
Expand Down
12 changes: 6 additions & 6 deletions src/stream/src/common/table/state_table_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -453,23 +453,23 @@ mod tests {

#[test]
fn test_watermark_cache_syncing() {
let v1 = vec![
let v1 = [
Some(Timestamptz::from_secs(1000).unwrap().to_scalar_value()),
Some(1000i64.into()),
];
let v2 = vec![
let v2 = [
Some(Timestamptz::from_secs(3000).unwrap().to_scalar_value()),
Some(1000i64.into()),
];
let v3 = vec![
let v3 = [
Some(Timestamptz::from_secs(2000).unwrap().to_scalar_value()),
Some(1000i64.into()),
];
let mut cache = StateTableWatermarkCache::new(3);
let mut filler = cache.begin_syncing();
filler.insert_unchecked(DefaultOrdered(v1.into_owned_row()), ());
filler.insert_unchecked(DefaultOrdered(v2.into_owned_row()), ());
filler.insert_unchecked(DefaultOrdered(v3.into_owned_row()), ());
filler.insert_unchecked(DefaultOrdered(v1.to_owned_row()), ());
filler.insert_unchecked(DefaultOrdered(v2.to_owned_row()), ());
filler.insert_unchecked(DefaultOrdered(v3.to_owned_row()), ());
filler.finish();
assert_eq!(cache.len(), 3);
assert_eq!(
Expand Down
8 changes: 4 additions & 4 deletions src/stream/src/executor/top_n/top_n_plain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -761,7 +761,7 @@ mod tests {
}

fn create_source_new_before_recovery() -> Box<MockSource> {
let mut chunks = vec![
let mut chunks = [
StreamChunk::from_pretty(
" I I I I
+ 1 1 4 1001",
Expand Down Expand Up @@ -792,7 +792,7 @@ mod tests {
}

fn create_source_new_after_recovery() -> Box<MockSource> {
let mut chunks = vec![
let mut chunks = [
StreamChunk::from_pretty(
" I I I I
+ 1 9 1 1003
Expand Down Expand Up @@ -1209,7 +1209,7 @@ mod tests {
}

fn create_source_before_recovery() -> Box<MockSource> {
let mut chunks = vec![
let mut chunks = [
StreamChunk::from_pretty(
" I I
+ 1 0
Expand Down Expand Up @@ -1248,7 +1248,7 @@ mod tests {
}

fn create_source_after_recovery() -> Box<MockSource> {
let mut chunks = vec![
let mut chunks = [
StreamChunk::from_pretty(
" I I
- 1 0",
Expand Down
6 changes: 3 additions & 3 deletions src/stream/src/executor/top_n/top_n_state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -364,8 +364,8 @@ mod tests {
let row2_bytes = serialize_pk_to_cache_key(row2.clone(), &cache_key_serde);
let row3_bytes = serialize_pk_to_cache_key(row3.clone(), &cache_key_serde);
let row4_bytes = serialize_pk_to_cache_key(row4.clone(), &cache_key_serde);
let rows = vec![row1, row2, row3, row4];
let ordered_rows = vec![row1_bytes, row2_bytes, row3_bytes, row4_bytes];
let rows = [row1, row2, row3, row4];
let ordered_rows = [row1_bytes, row2_bytes, row3_bytes, row4_bytes];
managed_state.insert(rows[3].clone());

// now ("ab", 4)
Expand Down Expand Up @@ -446,7 +446,7 @@ mod tests {
let row3_bytes = serialize_pk_to_cache_key(row3.clone(), &cache_key_serde);
let row4_bytes = serialize_pk_to_cache_key(row4.clone(), &cache_key_serde);
let row5_bytes = serialize_pk_to_cache_key(row5.clone(), &cache_key_serde);
let rows = vec![row1, row2, row3, row4, row5];
let rows = [row1, row2, row3, row4, row5];
let ordered_rows = vec![row1_bytes, row2_bytes, row3_bytes, row4_bytes, row5_bytes];

let mut cache = TopNCache::<false>::new(1, 1, data_types);
Expand Down
2 changes: 1 addition & 1 deletion src/utils/pgwire/src/pg_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ pub struct ExecContext {

/// `ExecContextGuard` holds a `Arc` pointer. Once `ExecContextGuard` is dropped,
/// the inner `Arc<ExecContext>` should not be referred anymore, so that its `Weak` reference (used in `SessionImpl`) will be the same lifecycle of the running sql execution context.
pub struct ExecContextGuard(Arc<ExecContext>);
pub struct ExecContextGuard(#[allow(dead_code)] Arc<ExecContext>);

impl ExecContextGuard {
pub fn new(exec_context: Arc<ExecContext>) -> Self {
Expand Down

0 comments on commit aeff596

Please sign in to comment.