Skip to content
6 changes: 3 additions & 3 deletions datafusion/common-runtime/src/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ impl<R: 'static> SpawnedTask<R> {
R: Send,
{
// Ok to use spawn here as SpawnedTask handles aborting/cancelling the task on Drop
#[allow(clippy::disallowed_methods)]
#[expect(clippy::disallowed_methods)]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

❤️

let inner = tokio::task::spawn(trace_future(task));
Self { inner }
}
Expand All @@ -56,7 +56,7 @@ impl<R: 'static> SpawnedTask<R> {
R: Send,
{
// Ok to use spawn_blocking here as SpawnedTask handles aborting/cancelling the task on Drop
#[allow(clippy::disallowed_methods)]
#[expect(clippy::disallowed_methods)]
let inner = tokio::task::spawn_blocking(trace_block(task));
Self { inner }
}
Expand Down Expand Up @@ -122,7 +122,7 @@ mod tests {
#[tokio::test]
async fn runtime_shutdown() {
let rt = Runtime::new().unwrap();
#[allow(clippy::async_yields_async)]
#[expect(clippy::async_yields_async)]
let task = rt
.spawn(async {
SpawnedTask::spawn(async {
Expand Down
1 change: 1 addition & 0 deletions datafusion/common-runtime/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
// https://github.com/apache/datafusion/issues/18503
#![deny(clippy::needless_pass_by_value)]
#![cfg_attr(test, allow(clippy::needless_pass_by_value))]
#![deny(clippy::allow_attributes)]
#![doc(
html_logo_url = "https://raw.githubusercontent.com/apache/datafusion/19fe44cf2f30cbdd63d4a4f52c74055163c6cc38/docs/logos/standalone_logo/logo_original.svg",
html_favicon_url = "https://raw.githubusercontent.com/apache/datafusion/19fe44cf2f30cbdd63d4a4f52c74055163c6cc38/docs/logos/standalone_logo/logo_original.svg"
Expand Down
17 changes: 3 additions & 14 deletions datafusion/common/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -157,12 +157,10 @@ macro_rules! config_namespace {
// $(#[allow(deprecated)])?
{
$(let value = $transform(value);)? // Apply transformation if specified
#[allow(deprecated)]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hmm I wonder if there was a historical reason these allow deprecations were present; was it anticipating future compatibility? 🤔

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

it may have been needed for code that was subsequently updated but the allow(deprecated) annotation was not. That is why I like the expect(deprecated) style as then the compiler will tell you when it is no longer actually needed

let ret = self.$field_name.set(rem, value.as_ref());

$(if !$warn.is_empty() {
let default: $field_type = $default;
#[allow(deprecated)]
if default != self.$field_name {
log::warn!($warn);
}
Expand All @@ -181,7 +179,6 @@ macro_rules! config_namespace {
$(
let key = format!(concat!("{}.", stringify!($field_name)), key_prefix);
let desc = concat!($($d),*).trim();
#[allow(deprecated)]
self.$field_name.visit(v, key.as_str(), desc);
)*
}
Expand All @@ -191,8 +188,7 @@ macro_rules! config_namespace {
match key {
$(
stringify!($field_name) => {
#[allow(deprecated)]
{
{
if rem.is_empty() {
let default_value: $field_type = $default;
self.$field_name = default_value;
Expand All @@ -213,7 +209,6 @@ macro_rules! config_namespace {
}
impl Default for $struct_name {
fn default() -> Self {
#[allow(deprecated)]
Self {
$($field_name: $default),*
}
Expand Down Expand Up @@ -1851,8 +1846,7 @@ macro_rules! extensions_options {
// Safely apply deprecated attribute if present
// $(#[allow(deprecated)])?
{
#[allow(deprecated)]
self.$field_name.set(rem, value.as_ref())
self.$field_name.set(rem, value.as_ref())
}
},
)*
Expand All @@ -1866,7 +1860,6 @@ macro_rules! extensions_options {
$(
let key = stringify!($field_name).to_string();
let desc = concat!($($d),*).trim();
#[allow(deprecated)]
self.$field_name.visit(v, key.as_str(), desc);
)*
}
Expand Down Expand Up @@ -2309,7 +2302,6 @@ macro_rules! config_namespace_with_hashmap {
$(
stringify!($field_name) => {
// Handle deprecated fields
#[allow(deprecated)] // Allow deprecated fields
$(let value = $transform(value);)?
self.$field_name.set(rem, value.as_ref())
},
Expand All @@ -2325,15 +2317,13 @@ macro_rules! config_namespace_with_hashmap {
let key = format!(concat!("{}.", stringify!($field_name)), key_prefix);
let desc = concat!($($d),*).trim();
// Handle deprecated fields
#[allow(deprecated)]
self.$field_name.visit(v, key.as_str(), desc);
)*
}
}

impl Default for $struct_name {
fn default() -> Self {
#[allow(deprecated)]
Self {
$($field_name: $default),*
}
Expand Down Expand Up @@ -2361,7 +2351,6 @@ macro_rules! config_namespace_with_hashmap {
$(
let key = format!("{}.{field}::{}", key_prefix, column_name, field = stringify!($field_name));
let desc = concat!($($d),*).trim();
#[allow(deprecated)]
col_options.$field_name.visit(v, key.as_str(), desc);
)*
}
Expand Down Expand Up @@ -2940,7 +2929,7 @@ config_namespace! {
pub trait OutputFormatExt: Display {}

#[derive(Debug, Clone, PartialEq)]
#[allow(clippy::large_enum_variant)]
#[cfg_attr(feature = "parquet", expect(clippy::large_enum_variant))]
pub enum OutputFormat {
CSV(CsvOptions),
JSON(JsonOptions),
Expand Down
9 changes: 1 addition & 8 deletions datafusion/common/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -935,14 +935,9 @@ macro_rules! make_error {
}


// Note: Certain macros are used in this crate, but not all.
// This macro generates a use or all of them in case they are needed
// so we allow unused code to avoid warnings when they are not used
#[doc(hidden)]
#[allow(unused)]
pub use $NAME_ERR as [<_ $NAME_ERR>];
#[doc(hidden)]
#[allow(unused)]
pub use $NAME_DF_ERR as [<_ $NAME_DF_ERR>];
}
};
Expand Down Expand Up @@ -1238,7 +1233,7 @@ mod test {
// To pass the test the environment variable RUST_BACKTRACE should be set to 1 to enforce backtrace
#[cfg(feature = "backtrace")]
#[test]
#[allow(clippy::unnecessary_literal_unwrap)]
#[expect(clippy::unnecessary_literal_unwrap)]
fn test_enabled_backtrace() {
match std::env::var("RUST_BACKTRACE") {
Ok(val) if val == "1" => {}
Expand Down Expand Up @@ -1266,7 +1261,6 @@ mod test {

#[cfg(not(feature = "backtrace"))]
#[test]
#[allow(clippy::unnecessary_literal_unwrap)]
fn test_disabled_backtrace() {
let res: Result<(), DataFusionError> = plan_err!("Err");
let res = res.unwrap_err().to_string();
Expand Down Expand Up @@ -1336,7 +1330,6 @@ mod test {
}

#[test]
#[allow(clippy::unnecessary_literal_unwrap)]
fn test_make_error_parse_input() {
let res: Result<(), DataFusionError> = plan_err!("Err");
let res = res.unwrap_err();
Expand Down
15 changes: 4 additions & 11 deletions datafusion/common/src/file_options/parquet_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,6 @@ use crate::{

use arrow::datatypes::Schema;
use parquet::arrow::encode_arrow_schema;
// TODO: handle once deprecated
#[allow(deprecated)]
use parquet::{
arrow::ARROW_SCHEMA_META_KEY,
basic::{BrotliLevel, GzipLevel, ZstdLevel},
Expand Down Expand Up @@ -176,7 +174,6 @@ impl ParquetOptions {
///
/// Note that this method does not include the key_value_metadata from [`TableParquetOptions`].
pub fn into_writer_properties_builder(&self) -> Result<WriterPropertiesBuilder> {
#[allow(deprecated)]
let ParquetOptions {
data_pagesize_limit,
write_batch_size,
Expand Down Expand Up @@ -263,7 +260,7 @@ pub(crate) fn parse_encoding_string(
"plain" => Ok(parquet::basic::Encoding::PLAIN),
"plain_dictionary" => Ok(parquet::basic::Encoding::PLAIN_DICTIONARY),
"rle" => Ok(parquet::basic::Encoding::RLE),
#[allow(deprecated)]
#[expect(deprecated)]
"bit_packed" => Ok(parquet::basic::Encoding::BIT_PACKED),
"delta_binary_packed" => Ok(parquet::basic::Encoding::DELTA_BINARY_PACKED),
"delta_length_byte_array" => {
Expand Down Expand Up @@ -404,10 +401,9 @@ pub(crate) fn parse_statistics_string(str_setting: &str) -> Result<EnabledStatis
#[cfg(test)]
mod tests {
use super::*;
use crate::config::{
ConfigFileEncryptionProperties, ParquetColumnOptions, ParquetEncryptionOptions,
ParquetOptions,
};
#[cfg(feature = "parquet_encryption")]
use crate::config::ConfigFileEncryptionProperties;
use crate::config::{ParquetColumnOptions, ParquetEncryptionOptions, ParquetOptions};
use parquet::basic::Compression;
use parquet::file::properties::{
BloomFilterProperties, DEFAULT_BLOOM_FILTER_FPP, DEFAULT_BLOOM_FILTER_NDV,
Expand Down Expand Up @@ -440,7 +436,6 @@ mod tests {
"1.0"
};

#[allow(deprecated)] // max_statistics_size
ParquetOptions {
data_pagesize_limit: 42,
write_batch_size: 42,
Expand Down Expand Up @@ -486,7 +481,6 @@ mod tests {
) -> ParquetColumnOptions {
let bloom_filter_default_props = props.bloom_filter_properties(&col);

#[allow(deprecated)] // max_statistics_size
ParquetColumnOptions {
bloom_filter_enabled: Some(bloom_filter_default_props.is_some()),
encoding: props.encoding(&col).map(|s| s.to_string()),
Expand Down Expand Up @@ -547,7 +541,6 @@ mod tests {
#[cfg(not(feature = "parquet_encryption"))]
let fep = None;

#[allow(deprecated)] // max_statistics_size
TableParquetOptions {
global: ParquetOptions {
// global options
Expand Down
2 changes: 1 addition & 1 deletion datafusion/common/src/instant.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
/// under `wasm` feature gate. It provides the same API as [`std::time::Instant`].
pub type Instant = web_time::Instant;

#[allow(clippy::disallowed_types)]
#[expect(clippy::disallowed_types)]
#[cfg(not(target_family = "wasm"))]
/// DataFusion wrapper around [`std::time::Instant`]. This is only a type alias.
pub type Instant = std::time::Instant;
1 change: 1 addition & 0 deletions datafusion/common/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
// https://github.com/apache/datafusion/issues/18503
#![deny(clippy::needless_pass_by_value)]
#![cfg_attr(test, allow(clippy::needless_pass_by_value))]
#![deny(clippy::allow_attributes)]

mod column;
mod dfschema;
Expand Down
7 changes: 2 additions & 5 deletions datafusion/common/src/scalar/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6330,8 +6330,6 @@ mod tests {
}

#[test]
// despite clippy claiming they are useless, the code doesn't compile otherwise.
#[allow(clippy::useless_vec)]
fn scalar_iter_to_array_boolean() {
check_scalar_iter!(Boolean, BooleanArray, vec![Some(true), None, Some(false)]);
check_scalar_iter!(Float32, Float32Array, vec![Some(1.9), None, Some(-2.1)]);
Expand Down Expand Up @@ -6381,12 +6379,12 @@ mod tests {
check_scalar_iter_binary!(
Binary,
BinaryArray,
vec![Some(b"foo"), None, Some(b"bar")]
[Some(b"foo"), None, Some(b"bar")]
);
check_scalar_iter_binary!(
LargeBinary,
LargeBinaryArray,
vec![Some(b"foo"), None, Some(b"bar")]
[Some(b"foo"), None, Some(b"bar")]
);
}

Expand Down Expand Up @@ -7766,7 +7764,6 @@ mod tests {
}

#[test]
#[allow(arithmetic_overflow)] // we want to test them
fn test_scalar_negative_overflows() -> Result<()> {
macro_rules! test_overflow_on_value {
($($val:expr),* $(,)?) => {$(
Expand Down
4 changes: 1 addition & 3 deletions datafusion/common/src/stats.rs
Original file line number Diff line number Diff line change
Expand Up @@ -367,7 +367,7 @@ impl Statistics {
return self;
};

#[allow(clippy::large_enum_variant)]
#[expect(clippy::large_enum_variant)]
enum Slot {
/// The column is taken and put into the specified statistics location
Taken(usize),
Expand Down Expand Up @@ -978,8 +978,6 @@ mod tests {
// Precision<ScalarValue> is not copy (requires .clone())
let precision: Precision<ScalarValue> =
Precision::Exact(ScalarValue::Int64(Some(42)));
// Clippy would complain about this if it were Copy
#[allow(clippy::redundant_clone)]
let p2 = precision.clone();
assert_eq!(precision, p2);
}
Expand Down