This is an automated email from the ASF dual-hosted git repository.

alamb pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion.git


The following commit(s) were added to refs/heads/main by this push:
     new 97148bd105 Fix clippy lints found by Clippy in Rust `1.78` (#10353)
97148bd105 is described below

commit 97148bd105fc2102b0444f2d67ef535937da5dfe
Author: Andrew Lamb <[email protected]>
AuthorDate: Thu May 2 13:21:45 2024 -0400

    Fix clippy lints found by Clippy in Rust `1.78` (#10353)
    
    * clippy: unused code
    
    * clippy: clone from
    
    * fix: more clippy
    
    * moar
    
    * fix aother
---
 datafusion-cli/src/exec.rs                         |  2 +-
 .../datasource/avro_to_arrow/arrow_array_reader.rs |  8 +---
 datafusion/core/src/datasource/file_format/json.rs |  2 +-
 .../core/src/datasource/file_format/parquet.rs     |  2 +-
 .../src/datasource/physical_plan/file_stream.rs    | 13 ------
 datafusion/core/src/execution/context/avro.rs      | 26 ------------
 datafusion/core/src/execution/context/csv.rs       | 18 --------
 datafusion/core/src/execution/context/parquet.rs   | 20 ---------
 .../src/physical_optimizer/enforce_distribution.rs |  2 +-
 datafusion/core/src/physical_planner.rs            |  2 +-
 .../user_defined/user_defined_table_functions.rs   |  2 +-
 datafusion/expr/src/logical_plan/plan.rs           | 48 ----------------------
 datafusion/functions-array/src/resize.rs           |  7 +---
 datafusion/functions-array/src/reverse.rs          |  7 +---
 .../optimizer/src/rewrite_disjunctive_predicate.rs |  2 +-
 15 files changed, 13 insertions(+), 148 deletions(-)

diff --git a/datafusion-cli/src/exec.rs b/datafusion-cli/src/exec.rs
index 19bff0528b..cfbc97ecbe 100644
--- a/datafusion-cli/src/exec.rs
+++ b/datafusion-cli/src/exec.rs
@@ -81,7 +81,7 @@ pub async fn exec_from_lines(
                         Ok(_) => {}
                         Err(err) => eprintln!("{err}"),
                     }
-                    query = "".to_owned();
+                    query = "".to_string();
                 } else {
                     query.push('\n');
                 }
diff --git a/datafusion/core/src/datasource/avro_to_arrow/arrow_array_reader.rs 
b/datafusion/core/src/datasource/avro_to_arrow/arrow_array_reader.rs
index a16c1ae333..3a5d50bba0 100644
--- a/datafusion/core/src/datasource/avro_to_arrow/arrow_array_reader.rs
+++ b/datafusion/core/src/datasource/avro_to_arrow/arrow_array_reader.rs
@@ -203,13 +203,9 @@ impl<'a, R: Read> AvroArrowArrayReader<'a, R> {
         Arc::new(builder.finish())
     }
 
-    fn build_primitive_array<T: ArrowPrimitiveType + Resolver>(
-        &self,
-        rows: RecordSlice,
-        col_name: &str,
-    ) -> ArrayRef
+    fn build_primitive_array<T>(&self, rows: RecordSlice, col_name: &str) -> 
ArrayRef
     where
-        T: ArrowNumericType,
+        T: ArrowNumericType + Resolver,
         T::Native: num_traits::cast::NumCast,
     {
         Arc::new(
diff --git a/datafusion/core/src/datasource/file_format/json.rs 
b/datafusion/core/src/datasource/file_format/json.rs
index efc0aa4328..9f526e1c87 100644
--- a/datafusion/core/src/datasource/file_format/json.rs
+++ b/datafusion/core/src/datasource/file_format/json.rs
@@ -219,7 +219,7 @@ impl BatchSerializer for JsonSerializer {
 pub struct JsonSink {
     /// Config options for writing data
     config: FileSinkConfig,
-    ///
+    /// Writer options for underlying Json writer
     writer_options: JsonWriterOptions,
 }
 
diff --git a/datafusion/core/src/datasource/file_format/parquet.rs 
b/datafusion/core/src/datasource/file_format/parquet.rs
index 7ec7d4540f..fa379eb5b4 100644
--- a/datafusion/core/src/datasource/file_format/parquet.rs
+++ b/datafusion/core/src/datasource/file_format/parquet.rs
@@ -536,7 +536,7 @@ async fn fetch_statistics(
 pub struct ParquetSink {
     /// Config options for writing data
     config: FileSinkConfig,
-    ///
+    /// Underlying parquet options
     parquet_options: TableParquetOptions,
     /// File metadata from successfully produced parquet files. The Mutex is 
only used
     /// to allow inserting to HashMap from behind borrowed reference in 
DataSink::write_all.
diff --git a/datafusion/core/src/datasource/physical_plan/file_stream.rs 
b/datafusion/core/src/datasource/physical_plan/file_stream.rs
index 619bcb29e2..9732d08c7a 100644
--- a/datafusion/core/src/datasource/physical_plan/file_stream.rs
+++ b/datafusion/core/src/datasource/physical_plan/file_stream.rs
@@ -519,7 +519,6 @@ mod tests {
     use std::sync::Arc;
 
     use super::*;
-    use crate::datasource::file_format::write::BatchSerializer;
     use crate::datasource::object_store::ObjectStoreUrl;
     use crate::prelude::SessionContext;
     use crate::test::{make_partition, object_store::register_test_store};
@@ -527,8 +526,6 @@ mod tests {
     use arrow_schema::Schema;
     use datafusion_common::{internal_err, Statistics};
 
-    use bytes::Bytes;
-
     /// Test `FileOpener` which will simulate errors during file opening or 
scanning
     #[derive(Default)]
     struct TestOpener {
@@ -974,14 +971,4 @@ mod tests {
 
         Ok(())
     }
-
-    struct TestSerializer {
-        bytes: Bytes,
-    }
-
-    impl BatchSerializer for TestSerializer {
-        fn serialize(&self, _batch: RecordBatch, _initial: bool) -> 
Result<Bytes> {
-            Ok(self.bytes.clone())
-        }
-    }
 }
diff --git a/datafusion/core/src/execution/context/avro.rs 
b/datafusion/core/src/execution/context/avro.rs
index 2703529264..e829f6123e 100644
--- a/datafusion/core/src/execution/context/avro.rs
+++ b/datafusion/core/src/execution/context/avro.rs
@@ -57,29 +57,3 @@ impl SessionContext {
         Ok(())
     }
 }
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-
-    use async_trait::async_trait;
-
-    // Test for compilation error when calling read_* functions from an 
#[async_trait] function.
-    // See https://github.com/apache/datafusion/issues/1154
-    #[async_trait]
-    trait CallReadTrait {
-        async fn call_read_avro(&self) -> DataFrame;
-    }
-
-    struct CallRead {}
-
-    #[async_trait]
-    impl CallReadTrait for CallRead {
-        async fn call_read_avro(&self) -> DataFrame {
-            let ctx = SessionContext::new();
-            ctx.read_avro("dummy", AvroReadOptions::default())
-                .await
-                .unwrap()
-        }
-    }
-}
diff --git a/datafusion/core/src/execution/context/csv.rs 
b/datafusion/core/src/execution/context/csv.rs
index 504ebf6d77..6ba1a14600 100644
--- a/datafusion/core/src/execution/context/csv.rs
+++ b/datafusion/core/src/execution/context/csv.rs
@@ -90,7 +90,6 @@ mod tests {
     use crate::assert_batches_eq;
     use crate::test_util::{plan_and_collect, populate_csv_partitions};
 
-    use async_trait::async_trait;
     use tempfile::TempDir;
 
     #[tokio::test]
@@ -125,21 +124,4 @@ mod tests {
 
         Ok(())
     }
-
-    // Test for compilation error when calling read_* functions from an 
#[async_trait] function.
-    // See https://github.com/apache/datafusion/issues/1154
-    #[async_trait]
-    trait CallReadTrait {
-        async fn call_read_csv(&self) -> DataFrame;
-    }
-
-    struct CallRead {}
-
-    #[async_trait]
-    impl CallReadTrait for CallRead {
-        async fn call_read_csv(&self) -> DataFrame {
-            let ctx = SessionContext::new();
-            ctx.read_csv("dummy", CsvReadOptions::new()).await.unwrap()
-        }
-    }
 }
diff --git a/datafusion/core/src/execution/context/parquet.rs 
b/datafusion/core/src/execution/context/parquet.rs
index f7ab15d95b..fef20df6e6 100644
--- a/datafusion/core/src/execution/context/parquet.rs
+++ b/datafusion/core/src/execution/context/parquet.rs
@@ -84,7 +84,6 @@ mod tests {
     use datafusion_common::config::TableParquetOptions;
     use datafusion_execution::config::SessionConfig;
 
-    use async_trait::async_trait;
     use tempfile::tempdir;
 
     #[tokio::test]
@@ -331,23 +330,4 @@ mod tests {
         assert_eq!(total_rows, 5);
         Ok(())
     }
-
-    // Test for compilation error when calling read_* functions from an 
#[async_trait] function.
-    // See https://github.com/apache/datafusion/issues/1154
-    #[async_trait]
-    trait CallReadTrait {
-        async fn call_read_parquet(&self) -> DataFrame;
-    }
-
-    struct CallRead {}
-
-    #[async_trait]
-    impl CallReadTrait for CallRead {
-        async fn call_read_parquet(&self) -> DataFrame {
-            let ctx = SessionContext::new();
-            ctx.read_parquet("dummy", ParquetReadOptions::default())
-                .await
-                .unwrap()
-        }
-    }
 }
diff --git a/datafusion/core/src/physical_optimizer/enforce_distribution.rs 
b/datafusion/core/src/physical_optimizer/enforce_distribution.rs
index e8fc37cc22..c07f2c5dcf 100644
--- a/datafusion/core/src/physical_optimizer/enforce_distribution.rs
+++ b/datafusion/core/src/physical_optimizer/enforce_distribution.rs
@@ -411,7 +411,7 @@ fn adjust_input_keys_ordering(
     } else {
         // By default, push down the parent requirements to children
         for child in requirements.children.iter_mut() {
-            child.data = requirements.data.clone();
+            child.data.clone_from(&requirements.data);
         }
     }
     Ok(Transformed::yes(requirements))
diff --git a/datafusion/core/src/physical_planner.rs 
b/datafusion/core/src/physical_planner.rs
index a041ab31f7..391ded84ea 100644
--- a/datafusion/core/src/physical_planner.rs
+++ b/datafusion/core/src/physical_planner.rs
@@ -2035,7 +2035,7 @@ impl DefaultPhysicalPlanner {
             let config = &session_state.config_options().explain;
 
             if !config.physical_plan_only {
-                stringified_plans = e.stringified_plans.clone();
+                stringified_plans.clone_from(&e.stringified_plans);
                 if e.logical_optimization_succeeded {
                     
stringified_plans.push(e.plan.to_stringified(FinalLogicalPlan));
                 }
diff --git a/datafusion/core/tests/user_defined/user_defined_table_functions.rs 
b/datafusion/core/tests/user_defined/user_defined_table_functions.rs
index b5d10b1c5b..7342851569 100644
--- a/datafusion/core/tests/user_defined/user_defined_table_functions.rs
+++ b/datafusion/core/tests/user_defined/user_defined_table_functions.rs
@@ -185,7 +185,7 @@ impl TableFunctionImpl for SimpleCsvTableFunc {
         for expr in exprs {
             match expr {
                 Expr::Literal(ScalarValue::Utf8(Some(ref path))) => {
-                    filepath = path.clone()
+                    filepath.clone_from(path);
                 }
                 expr => new_exprs.push(expr.clone()),
             }
diff --git a/datafusion/expr/src/logical_plan/plan.rs 
b/datafusion/expr/src/logical_plan/plan.rs
index 64c5b56a40..23f5280377 100644
--- a/datafusion/expr/src/logical_plan/plan.rs
+++ b/datafusion/expr/src/logical_plan/plan.rs
@@ -2961,54 +2961,6 @@ digraph {
             .unwrap()
     }
 
-    /// Extension plan that panic when trying to access its input plan
-    #[derive(Debug)]
-    struct NoChildExtension {
-        empty_schema: DFSchemaRef,
-    }
-
-    impl UserDefinedLogicalNode for NoChildExtension {
-        fn as_any(&self) -> &dyn std::any::Any {
-            unimplemented!()
-        }
-
-        fn name(&self) -> &str {
-            unimplemented!()
-        }
-
-        fn inputs(&self) -> Vec<&LogicalPlan> {
-            panic!("Should not be called")
-        }
-
-        fn schema(&self) -> &DFSchemaRef {
-            &self.empty_schema
-        }
-
-        fn expressions(&self) -> Vec<Expr> {
-            unimplemented!()
-        }
-
-        fn fmt_for_explain(&self, _: &mut fmt::Formatter) -> fmt::Result {
-            unimplemented!()
-        }
-
-        fn from_template(
-            &self,
-            _: &[Expr],
-            _: &[LogicalPlan],
-        ) -> Arc<dyn UserDefinedLogicalNode> {
-            unimplemented!()
-        }
-
-        fn dyn_hash(&self, _: &mut dyn Hasher) {
-            unimplemented!()
-        }
-
-        fn dyn_eq(&self, _: &dyn UserDefinedLogicalNode) -> bool {
-            unimplemented!()
-        }
-    }
-
     #[test]
     fn test_replace_invalid_placeholder() {
         // test empty placeholder
diff --git a/datafusion/functions-array/src/resize.rs 
b/datafusion/functions-array/src/resize.rs
index c5855d0544..561e98e8b7 100644
--- a/datafusion/functions-array/src/resize.rs
+++ b/datafusion/functions-array/src/resize.rs
@@ -112,15 +112,12 @@ pub(crate) fn array_resize_inner(arg: &[ArrayRef]) -> 
Result<ArrayRef> {
 }
 
 /// array_resize keep the original array and append the default element to the 
end
-fn general_list_resize<O: OffsetSizeTrait>(
+fn general_list_resize<O: OffsetSizeTrait + TryInto<i64>>(
     array: &GenericListArray<O>,
     count_array: &Int64Array,
     field: &FieldRef,
     default_element: Option<ArrayRef>,
-) -> Result<ArrayRef>
-where
-    O: TryInto<i64>,
-{
+) -> Result<ArrayRef> {
     let data_type = array.value_type();
 
     let values = array.values();
diff --git a/datafusion/functions-array/src/reverse.rs 
b/datafusion/functions-array/src/reverse.rs
index 8324c407bd..9be6405657 100644
--- a/datafusion/functions-array/src/reverse.rs
+++ b/datafusion/functions-array/src/reverse.rs
@@ -99,13 +99,10 @@ pub fn array_reverse_inner(arg: &[ArrayRef]) -> 
Result<ArrayRef> {
     }
 }
 
-fn general_array_reverse<O: OffsetSizeTrait>(
+fn general_array_reverse<O: OffsetSizeTrait + TryFrom<i64>>(
     array: &GenericListArray<O>,
     field: &FieldRef,
-) -> Result<ArrayRef>
-where
-    O: TryFrom<i64>,
-{
+) -> Result<ArrayRef> {
     let values = array.values();
     let original_data = values.to_data();
     let capacity = Capacities::Array(original_data.len());
diff --git a/datafusion/optimizer/src/rewrite_disjunctive_predicate.rs 
b/datafusion/optimizer/src/rewrite_disjunctive_predicate.rs
index b97974c859..ba865fa1e9 100644
--- a/datafusion/optimizer/src/rewrite_disjunctive_predicate.rs
+++ b/datafusion/optimizer/src/rewrite_disjunctive_predicate.rs
@@ -288,7 +288,7 @@ fn delete_duplicate_predicates(or_predicates: 
Vec<Predicate>) -> Predicate {
             Predicate::And { args } => {
                 let args_num = args.len();
                 if shortest_exprs.is_empty() || args_num < shortest_exprs_len {
-                    shortest_exprs = (*args).clone();
+                    shortest_exprs.clone_from(args);
                     shortest_exprs_len = args_num;
                 }
             }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to