[ 
https://issues.apache.org/jira/browse/DRILL-6199?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16399011#comment-16399011
 ] 

ASF GitHub Bot commented on DRILL-6199:
---------------------------------------

Github user HanumathRao commented on a diff in the pull request:

    https://github.com/apache/drill/pull/1152#discussion_r174558288
  
    --- Diff: 
exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillFilterItemStarReWriterRule.java
 ---
    @@ -54,83 +44,189 @@
     import static 
org.apache.drill.exec.planner.logical.FieldsReWriterUtil.FieldsReWriter;
     
     /**
    - * Rule will transform filter -> project -> scan call with item star 
fields in filter
    - * into project -> filter -> project -> scan where item star fields are 
pushed into scan
    - * and replaced with actual field references.
    + * Rule will transform item star fields in filter and replaced with actual 
field references.
      *
      * This will help partition pruning and push down rules to detect fields 
that can be pruned or push downed.
      * Item star operator appears when sub-select or cte with star are used as 
source.
      */
    -public class DrillFilterItemStarReWriterRule extends RelOptRule {
    +public class DrillFilterItemStarReWriterRule {
     
    -  public static final DrillFilterItemStarReWriterRule INSTANCE = new 
DrillFilterItemStarReWriterRule(
    -      RelOptHelper.some(Filter.class, RelOptHelper.some(Project.class, 
RelOptHelper.any( TableScan.class))),
    -      "DrillFilterItemStarReWriterRule");
    +  public static final DrillFilterItemStarReWriterRule.ProjectOnScan 
PROJECT_ON_SCAN = new ProjectOnScan(
    +          RelOptHelper.some(DrillProjectRel.class, 
RelOptHelper.any(DrillScanRel.class)),
    +          "DrillFilterItemStarReWriterRule.ProjectOnScan");
     
    -  private DrillFilterItemStarReWriterRule(RelOptRuleOperand operand, 
String id) {
    -    super(operand, id);
    -  }
    +  public static final DrillFilterItemStarReWriterRule.FilterOnScan 
FILTER_ON_SCAN = new FilterOnScan(
    +      RelOptHelper.some(DrillFilterRel.class, 
RelOptHelper.any(DrillScanRel.class)),
    +      "DrillFilterItemStarReWriterRule.FilterOnScan");
     
    -  @Override
    -  public void onMatch(RelOptRuleCall call) {
    -    Filter filterRel = call.rel(0);
    -    Project projectRel = call.rel(1);
    -    TableScan scanRel = call.rel(2);
    +  public static final DrillFilterItemStarReWriterRule.FilterOnProject 
FILTER_ON_PROJECT = new FilterOnProject(
    +      RelOptHelper.some(DrillFilterRel.class, 
RelOptHelper.some(DrillProjectRel.class, RelOptHelper.any(DrillScanRel.class))),
    +      "DrillFilterItemStarReWriterRule.FilterOnProject");
     
    -    ItemStarFieldsVisitor itemStarFieldsVisitor = new 
ItemStarFieldsVisitor(filterRel.getRowType().getFieldNames());
    -    filterRel.getCondition().accept(itemStarFieldsVisitor);
     
    -    // there are no item fields, no need to proceed further
    -    if (!itemStarFieldsVisitor.hasItemStarFields()) {
    -      return;
    +  private static class ProjectOnScan extends RelOptRule {
    +
    +    ProjectOnScan(RelOptRuleOperand operand, String id) {
    +      super(operand, id);
         }
     
    -    Map<String, DesiredField> itemStarFields = 
itemStarFieldsVisitor.getItemStarFields();
    +    @Override
    +    public boolean matches(RelOptRuleCall call) {
    +      DrillScanRel scan = call.rel(1);
    +      return scan.getGroupScan() instanceof ParquetGroupScan && 
super.matches(call);
    +    }
     
    -    // create new scan
    -    RelNode newScan = constructNewScan(scanRel, itemStarFields.keySet());
    +    @Override
    +    public void onMatch(RelOptRuleCall call) {
    +      DrillProjectRel projectRel = call.rel(0);
    +      DrillScanRel scanRel = call.rel(1);
    +
    +      ItemStarFieldsVisitor itemStarFieldsVisitor = new 
ItemStarFieldsVisitor(scanRel.getRowType().getFieldNames());
    +      List<RexNode> projects = projectRel.getProjects();
    +      for (RexNode project : projects) {
    +        project.accept(itemStarFieldsVisitor);
    +      }
     
    -    // combine original and new projects
    -    List<RexNode> newProjects = new ArrayList<>(projectRel.getProjects());
    +      Map<String, DesiredField> itemStarFields = 
itemStarFieldsVisitor.getItemStarFields();
     
    -    // prepare node mapper to replace item star calls with new input field 
references
    -    Map<RexNode, Integer> fieldMapper = new HashMap<>();
    +      // if there are no item fields, no need to proceed further
    +      if (itemStarFieldsVisitor.hasNoItemStarFields()) {
    --- End diff --
    
    Can this be moved to before getItemStarFields call?.


> Filter push down doesn't work with more than one nested subqueries
> ------------------------------------------------------------------
>
>                 Key: DRILL-6199
>                 URL: https://issues.apache.org/jira/browse/DRILL-6199
>             Project: Apache Drill
>          Issue Type: Bug
>    Affects Versions: 1.13.0
>            Reporter: Anton Gozhiy
>            Assignee: Arina Ielchiieva
>            Priority: Major
>             Fix For: 1.14.0
>
>         Attachments: DRILL_6118_data_source.csv
>
>
> *Data set:*
> The data is generated used the attached file: *DRILL_6118_data_source.csv*
> Data gen commands:
> {code:sql}
> create table dfs.tmp.`DRILL_6118_parquet_partitioned_by_folders/d1` (c1, c2, 
> c3, c4, c5) as select cast(columns[0] as int) c1, columns[1] c2, columns[2] 
> c3, columns[3] c4, columns[4] c5 from dfs.tmp.`DRILL_6118_data_source.csv` 
> where columns[0] in (1, 3);
> create table dfs.tmp.`DRILL_6118_parquet_partitioned_by_folders/d2` (c1, c2, 
> c3, c4, c5) as select cast(columns[0] as int) c1, columns[1] c2, columns[2] 
> c3, columns[3] c4, columns[4] c5 from dfs.tmp.`DRILL_6118_data_source.csv` 
> where columns[0]=2;
> create table dfs.tmp.`DRILL_6118_parquet_partitioned_by_folders/d3` (c1, c2, 
> c3, c4, c5) as select cast(columns[0] as int) c1, columns[1] c2, columns[2] 
> c3, columns[3] c4, columns[4] c5 from dfs.tmp.`DRILL_6118_data_source.csv` 
> where columns[0]>3;
> {code}
> *Steps:*
> # Execute the following query:
> {code:sql}
> explain plan for select * from (select * from (select * from 
> dfs.tmp.`DRILL_6118_parquet_partitioned_by_folders`)) where c1<3
> {code}
> *Expected result:*
> numFiles=2, numRowGroups=2, only files from the folders d1 and d2 should be 
> scanned.
> *Actual result:*
> Filter push down doesn't work:
> numFiles=3, numRowGroups=3, scanning from all files



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to