Github user rdblue commented on a diff in the pull request: https://github.com/apache/spark/pull/21556#discussion_r198904089 --- Diff: sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala --- @@ -378,6 +378,22 @@ object SQLConf { .booleanConf .createWithDefault(true) + val PARQUET_FILTER_PUSHDOWN_DECIMAL_ENABLED = + buildConf("spark.sql.parquet.filterPushdown.decimal") + .doc(s"If true, enables Parquet filter push-down optimization for Decimal. " + + "The default value is false to compatible with legacy parquet format. " + + s"This configuration only has an effect when '${PARQUET_FILTER_PUSHDOWN_ENABLED.key}' is " + + "enabled and Decimal statistics are generated(Since Spark 2.4).") + .internal() + .booleanConf + .createWithDefault(true) + + val PARQUET_READ_LEGACY_FORMAT = buildConf("spark.sql.parquet.readLegacyFormat") --- End diff -- This property doesn't mention pushdown, but the description says it is only valid for push-down. Can you make the property name more clear?
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org