Skip to content

Commit

Permalink
flx clippy warnings (#8455)
Browse files Browse the repository at this point in the history
* change get zero to first()

Signed-off-by: Ruihang Xia <[email protected]>

* wake clone to wake_by_ref

Signed-off-by: Ruihang Xia <[email protected]>

* more first()

Signed-off-by: Ruihang Xia <[email protected]>

* try_from() to from()

Signed-off-by: Ruihang Xia <[email protected]>

---------

Signed-off-by: Ruihang Xia <[email protected]>
  • Loading branch information
waynexia authored Dec 8, 2023
1 parent 9be9073 commit c0c9e88
Show file tree
Hide file tree
Showing 28 changed files with 51 additions and 59 deletions.
2 changes: 1 addition & 1 deletion datafusion-examples/examples/custom_datasource.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ async fn search_accounts(

timeout(Duration::from_secs(10), async move {
let result = dataframe.collect().await.unwrap();
let record_batch = result.get(0).unwrap();
let record_batch = result.first().unwrap();

assert_eq!(expected_result_length, record_batch.column(1).len());
dbg!(record_batch.columns());
Expand Down
2 changes: 1 addition & 1 deletion datafusion-examples/examples/memtable.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ async fn main() -> Result<()> {

timeout(Duration::from_secs(10), async move {
let result = dataframe.collect().await.unwrap();
let record_batch = result.get(0).unwrap();
let record_batch = result.first().unwrap();

assert_eq!(1, record_batch.column(0).len());
dbg!(record_batch.columns());
Expand Down
2 changes: 1 addition & 1 deletion datafusion-examples/examples/simple_udtf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ struct LocalCsvTableFunc {}

impl TableFunctionImpl for LocalCsvTableFunc {
fn call(&self, exprs: &[Expr]) -> Result<Arc<dyn TableProvider>> {
let Some(Expr::Literal(ScalarValue::Utf8(Some(ref path)))) = exprs.get(0) else {
let Some(Expr::Literal(ScalarValue::Utf8(Some(ref path)))) = exprs.first() else {
return plan_err!("read_csv requires at least one string argument");
};

Expand Down
4 changes: 2 additions & 2 deletions datafusion/common/src/dfschema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1476,8 +1476,8 @@ mod tests {
DFSchema::new_with_metadata([a, b].to_vec(), HashMap::new()).unwrap(),
);
let schema: Schema = df_schema.as_ref().clone().into();
let a_df = df_schema.fields.get(0).unwrap().field();
let a_arrow = schema.fields.get(0).unwrap();
let a_df = df_schema.fields.first().unwrap().field();
let a_arrow = schema.fields.first().unwrap();
assert_eq!(a_df.metadata(), a_arrow.metadata())
}

Expand Down
16 changes: 7 additions & 9 deletions datafusion/common/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -564,18 +564,16 @@ mod test {
assert_eq!(
err.split(DataFusionError::BACK_TRACE_SEP)
.collect::<Vec<&str>>()
.get(0)
.first()
.unwrap(),
&"Error during planning: Err"
);
assert!(
err.split(DataFusionError::BACK_TRACE_SEP)
.collect::<Vec<&str>>()
.get(1)
.unwrap()
.len()
> 0
);
assert!(!err
.split(DataFusionError::BACK_TRACE_SEP)
.collect::<Vec<&str>>()
.get(1)
.unwrap()
.is_empty());
}

#[cfg(not(feature = "backtrace"))]
Expand Down
4 changes: 2 additions & 2 deletions datafusion/common/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ pub fn bisect<const SIDE: bool>(
) -> Result<usize> {
let low: usize = 0;
let high: usize = item_columns
.get(0)
.first()
.ok_or_else(|| {
DataFusionError::Internal("Column array shouldn't be empty".to_string())
})?
Expand Down Expand Up @@ -186,7 +186,7 @@ pub fn linear_search<const SIDE: bool>(
) -> Result<usize> {
let low: usize = 0;
let high: usize = item_columns
.get(0)
.first()
.ok_or_else(|| {
DataFusionError::Internal("Column array shouldn't be empty".to_string())
})?
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/benches/sort_limit_query_sql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ fn create_context() -> Arc<Mutex<SessionContext>> {
ctx_holder.lock().push(Arc::new(Mutex::new(ctx)))
});

let ctx = ctx_holder.lock().get(0).unwrap().clone();
let ctx = ctx_holder.lock().first().unwrap().clone();
ctx
}

Expand Down
5 changes: 2 additions & 3 deletions datafusion/core/benches/sql_query_with_io.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,10 +93,9 @@ async fn setup_files(store: Arc<dyn ObjectStore>) {
for partition in 0..TABLE_PARTITIONS {
for file in 0..PARTITION_FILES {
let data = create_parquet_file(&mut rng, file * FILE_ROWS);
let location = Path::try_from(format!(
let location = Path::from(format!(
"{table_name}/partition={partition}/{file}.parquet"
))
.unwrap();
));
store.put(&location, data).await.unwrap();
}
}
Expand Down
4 changes: 2 additions & 2 deletions datafusion/core/src/datasource/file_format/parquet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1803,8 +1803,8 @@ mod tests {
// there is only one row group in one file.
assert_eq!(page_index.len(), 1);
assert_eq!(offset_index.len(), 1);
let page_index = page_index.get(0).unwrap();
let offset_index = offset_index.get(0).unwrap();
let page_index = page_index.first().unwrap();
let offset_index = offset_index.first().unwrap();

// 13 col in one row group
assert_eq!(page_index.len(), 13);
Expand Down
7 changes: 2 additions & 5 deletions datafusion/core/src/datasource/file_format/write/demux.rs
Original file line number Diff line number Diff line change
Expand Up @@ -264,12 +264,9 @@ async fn hive_style_partitions_demuxer(
// TODO: upstream RecordBatch::take to arrow-rs
let take_indices = builder.finish();
let struct_array: StructArray = rb.clone().into();
let parted_batch = RecordBatch::try_from(
let parted_batch = RecordBatch::from(
arrow::compute::take(&struct_array, &take_indices, None)?.as_struct(),
)
.map_err(|_| {
DataFusionError::Internal("Unexpected error partitioning batch!".into())
})?;
);

// Get or create channel for this batch
let part_tx = match value_map.get_mut(&part_key) {
Expand Down
12 changes: 6 additions & 6 deletions datafusion/core/src/datasource/listing/table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -157,15 +157,15 @@ impl ListingTableConfig {

/// Infer `ListingOptions` based on `table_path` suffix.
pub async fn infer_options(self, state: &SessionState) -> Result<Self> {
let store = if let Some(url) = self.table_paths.get(0) {
let store = if let Some(url) = self.table_paths.first() {
state.runtime_env().object_store(url)?
} else {
return Ok(self);
};

let file = self
.table_paths
.get(0)
.first()
.unwrap()
.list_all_files(state, store.as_ref(), "")
.await?
Expand All @@ -191,7 +191,7 @@ impl ListingTableConfig {
pub async fn infer_schema(self, state: &SessionState) -> Result<Self> {
match self.options {
Some(options) => {
let schema = if let Some(url) = self.table_paths.get(0) {
let schema = if let Some(url) = self.table_paths.first() {
options.infer_schema(state, url).await?
} else {
Arc::new(Schema::empty())
Expand Down Expand Up @@ -710,7 +710,7 @@ impl TableProvider for ListingTable {
None
};

let object_store_url = if let Some(url) = self.table_paths.get(0) {
let object_store_url = if let Some(url) = self.table_paths.first() {
url.object_store()
} else {
return Ok(Arc::new(EmptyExec::new(false, Arc::new(Schema::empty()))));
Expand Down Expand Up @@ -835,7 +835,7 @@ impl TableProvider for ListingTable {
// Multiple sort orders in outer vec are equivalent, so we pass only the first one
let ordering = self
.try_create_output_ordering()?
.get(0)
.first()
.ok_or(DataFusionError::Internal(
"Expected ListingTable to have a sort order, but none found!".into(),
))?
Expand Down Expand Up @@ -872,7 +872,7 @@ impl ListingTable {
filters: &'a [Expr],
limit: Option<usize>,
) -> Result<(Vec<Vec<PartitionedFile>>, Statistics)> {
let store = if let Some(url) = self.table_paths.get(0) {
let store = if let Some(url) = self.table_paths.first() {
ctx.runtime_env().object_store(url)?
} else {
return Ok((vec![], Statistics::new_unknown(&self.file_schema)));
Expand Down
8 changes: 4 additions & 4 deletions datafusion/core/src/datasource/physical_plan/json.rs
Original file line number Diff line number Diff line change
Expand Up @@ -357,9 +357,9 @@ mod tests {
)
.unwrap();
let meta = file_groups
.get(0)
.first()
.unwrap()
.get(0)
.first()
.unwrap()
.clone()
.object_meta;
Expand Down Expand Up @@ -391,9 +391,9 @@ mod tests {
)
.unwrap();
let path = file_groups
.get(0)
.first()
.unwrap()
.get(0)
.first()
.unwrap()
.object_meta
.location
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/datasource/physical_plan/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ impl DisplayAs for FileScanConfig {
write!(f, ", infinite_source=true")?;
}

if let Some(ordering) = orderings.get(0) {
if let Some(ordering) = orderings.first() {
if !ordering.is_empty() {
let start = if orderings.len() == 1 {
", output_ordering="
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/src/physical_planner.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2012,7 +2012,7 @@ impl DefaultPhysicalPlanner {
let mut column_names = StringBuilder::new();
let mut data_types = StringBuilder::new();
let mut is_nullables = StringBuilder::new();
for (_, field) in table_schema.fields().iter().enumerate() {
for field in table_schema.fields() {
column_names.append_value(field.name());

// "System supplied type" --> Use debug format of the datatype
Expand Down
6 changes: 3 additions & 3 deletions datafusion/core/tests/parquet/file_statistics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ async fn list_files_with_session_level_cache() {
assert_eq!(get_list_file_cache_size(&state1), 1);
let fg = &parquet1.base_config().file_groups;
assert_eq!(fg.len(), 1);
assert_eq!(fg.get(0).unwrap().len(), 1);
assert_eq!(fg.first().unwrap().len(), 1);

//Session 2 first time list files
//check session 1 cache result not show in session 2
Expand All @@ -144,7 +144,7 @@ async fn list_files_with_session_level_cache() {
assert_eq!(get_list_file_cache_size(&state2), 1);
let fg2 = &parquet2.base_config().file_groups;
assert_eq!(fg2.len(), 1);
assert_eq!(fg2.get(0).unwrap().len(), 1);
assert_eq!(fg2.first().unwrap().len(), 1);

//Session 1 second time list files
//check session 1 cache result not show in session 2
Expand All @@ -155,7 +155,7 @@ async fn list_files_with_session_level_cache() {
assert_eq!(get_list_file_cache_size(&state1), 1);
let fg = &parquet3.base_config().file_groups;
assert_eq!(fg.len(), 1);
assert_eq!(fg.get(0).unwrap().len(), 1);
assert_eq!(fg.first().unwrap().len(), 1);
// List same file no increase
assert_eq!(get_list_file_cache_size(&state1), 1);
}
Expand Down
1 change: 0 additions & 1 deletion datafusion/core/tests/sql/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
// specific language governing permissions and limitations
// under the License.

use std::convert::TryFrom;
use std::sync::Arc;

use arrow::{
Expand Down
2 changes: 1 addition & 1 deletion datafusion/core/tests/sql/parquet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ async fn parquet_list_columns() {

assert_eq!(
as_string_array(&utf8_list_array.value(0)).unwrap(),
&StringArray::try_from(vec![Some("abc"), Some("efg"), Some("hij"),]).unwrap()
&StringArray::from(vec![Some("abc"), Some("efg"), Some("hij"),])
);

assert_eq!(
Expand Down
2 changes: 1 addition & 1 deletion datafusion/execution/src/cache/cache_unit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -228,7 +228,7 @@ mod tests {

cache.put(&meta.location, vec![meta.clone()].into());
assert_eq!(
cache.get(&meta.location).unwrap().get(0).unwrap().clone(),
cache.get(&meta.location).unwrap().first().unwrap().clone(),
meta.clone()
);
}
Expand Down
1 change: 0 additions & 1 deletion datafusion/expr/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -501,7 +501,6 @@ pub fn generate_sort_key(
let res = final_sort_keys
.into_iter()
.zip(is_partition_flag)
.map(|(lhs, rhs)| (lhs, rhs))
.collect::<Vec<_>>();
Ok(res)
}
Expand Down
2 changes: 1 addition & 1 deletion datafusion/optimizer/src/eliminate_limit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ mod tests {
let optimizer = Optimizer::with_rules(vec![Arc::new(EliminateLimit::new())]);
let optimized_plan = optimizer
.optimize_recursively(
optimizer.rules.get(0).unwrap(),
optimizer.rules.first().unwrap(),
plan,
&OptimizerContext::new(),
)?
Expand Down
2 changes: 1 addition & 1 deletion datafusion/optimizer/src/push_down_filter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1062,7 +1062,7 @@ mod tests {
]);
let mut optimized_plan = optimizer
.optimize_recursively(
optimizer.rules.get(0).unwrap(),
optimizer.rules.first().unwrap(),
plan,
&OptimizerContext::new(),
)?
Expand Down
2 changes: 1 addition & 1 deletion datafusion/optimizer/src/push_down_projection.rs
Original file line number Diff line number Diff line change
Expand Up @@ -625,7 +625,7 @@ mod tests {
let optimizer = Optimizer::with_rules(vec![Arc::new(OptimizeProjections::new())]);
let optimized_plan = optimizer
.optimize_recursively(
optimizer.rules.get(0).unwrap(),
optimizer.rules.first().unwrap(),
plan,
&OptimizerContext::new(),
)?
Expand Down
8 changes: 4 additions & 4 deletions datafusion/optimizer/src/test/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ pub fn assert_optimized_plan_eq(
let optimizer = Optimizer::with_rules(vec![rule.clone()]);
let optimized_plan = optimizer
.optimize_recursively(
optimizer.rules.get(0).unwrap(),
optimizer.rules.first().unwrap(),
plan,
&OptimizerContext::new(),
)?
Expand Down Expand Up @@ -199,7 +199,7 @@ pub fn assert_optimized_plan_eq_display_indent(
let optimizer = Optimizer::with_rules(vec![rule]);
let optimized_plan = optimizer
.optimize_recursively(
optimizer.rules.get(0).unwrap(),
optimizer.rules.first().unwrap(),
plan,
&OptimizerContext::new(),
)
Expand Down Expand Up @@ -233,7 +233,7 @@ pub fn assert_optimizer_err(
) {
let optimizer = Optimizer::with_rules(vec![rule]);
let res = optimizer.optimize_recursively(
optimizer.rules.get(0).unwrap(),
optimizer.rules.first().unwrap(),
plan,
&OptimizerContext::new(),
);
Expand All @@ -255,7 +255,7 @@ pub fn assert_optimization_skipped(
let optimizer = Optimizer::with_rules(vec![rule]);
let new_plan = optimizer
.optimize_recursively(
optimizer.rules.get(0).unwrap(),
optimizer.rules.first().unwrap(),
plan,
&OptimizerContext::new(),
)?
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -309,7 +309,7 @@ impl GroupsAccumulator for GroupsAccumulatorAdapter {

// double check each array has the same length (aka the
// accumulator was implemented correctly
if let Some(first_col) = arrays.get(0) {
if let Some(first_col) = arrays.first() {
for arr in &arrays {
assert_eq!(arr.len(), first_col.len())
}
Expand Down
4 changes: 2 additions & 2 deletions datafusion/physical-expr/src/array_expressions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -537,7 +537,7 @@ fn general_except<OffsetSize: OffsetSizeTrait>(
dedup.clear();
}

if let Some(values) = converter.convert_rows(rows)?.get(0) {
if let Some(values) = converter.convert_rows(rows)?.first() {
Ok(GenericListArray::<OffsetSize>::new(
field.to_owned(),
OffsetBuffer::new(offsets.into()),
Expand Down Expand Up @@ -2088,7 +2088,7 @@ pub fn array_intersect(args: &[ArrayRef]) -> Result<ArrayRef> {
};
offsets.push(last_offset + rows.len() as i32);
let arrays = converter.convert_rows(rows)?;
let array = match arrays.get(0) {
let array = match arrays.first() {
Some(array) => array.clone(),
None => {
return internal_err!(
Expand Down
2 changes: 1 addition & 1 deletion datafusion/physical-plan/src/memory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ impl fmt::Debug for MemoryExec {
write!(f, "partitions: [...]")?;
write!(f, "schema: {:?}", self.projected_schema)?;
write!(f, "projection: {:?}", self.projection)?;
if let Some(sort_info) = &self.sort_information.get(0) {
if let Some(sort_info) = &self.sort_information.first() {
write!(f, ", output_ordering: {:?}", sort_info)?;
}
Ok(())
Expand Down
2 changes: 1 addition & 1 deletion datafusion/physical-plan/src/test/exec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -790,7 +790,7 @@ impl Stream for PanicStream {
} else {
self.ready = true;
// get called again
cx.waker().clone().wake();
cx.waker().wake_by_ref();
return Poll::Pending;
}
}
Expand Down
Loading

0 comments on commit c0c9e88

Please sign in to comment.