[ 
https://issues.apache.org/jira/browse/ASTERIXDB-1290?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Till Westmann updated ASTERIXDB-1290:
-------------------------------------
    Description: 
I am having a simple retree index join fail to complete on my machine. Unless I 
am missing something, it seems that this job shouldn't be big enough to 
overwhelm my machine, and the error seems to be unrelated to overflows. I am 
including the AQL, plan, and error trace.



{noformat}
drop dataverse channels if exists;
create dataverse channels;
use dataverse channels;

create type UserLocation as closed {
        recordId: uuid,
        location: point,
        user-id: string,
        timeoffset: float
}
create type EmergencyShelter as closed {
        name: string,
        location: point
}

create type EmergencyReport as closed {
        reportId: uuid,
        severity: int,
        impactZone: circle,
        timeoffset: float,
        duration: float,
        message: string,
        emergencyType: string
}

create dataset UserLocations(UserLocation)
primary key recordId autogenerated;

create dataset EmergencyShelters(EmergencyShelter)
primary key name;

create dataset EmergencyReports(EmergencyReport)
primary key reportId autogenerated;

create index locs on EmergencyReports(impactZone) type rtree;

load dataset UserLocations using localfs 
(("path"="asterix_nc1:///Users/stevenjacobs/Desktop/EmergencyDataset/UserLocations.adm"),("format"="adm"));

load dataset EmergencyShelters using 
localfs(("path"="asterix_nc1:///Users/stevenjacobs/Desktop/EmergencyDataset/EmergencyShelters.adm"),("format"="adm"));

load dataset EmergencyReports using 
localfs(("path"="asterix_nc1:///Users/stevenjacobs/Desktop/EmergencyDataset/EmergencyReports.adm"),("format"="adm"));


for $report in dataset EmergencyReports
for $location in dataset UserLocations
where spatial-intersect($report.impactZone, $location.location)
return {
        "message":$report.message,
        "user at":$location.location,
        "emergency at":$report.impactZone,
        "type":$report.emergencyType
}
{noformat}



{noformat}
distribute result [%0->$$7]
-- DISTRIBUTE_RESULT  |PARTITIONED|
  exchange 
  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
    project ([$$7])
    -- STREAM_PROJECT  |PARTITIONED|
      assign [$$7] <- [function-call: asterix:closed-record-constructor, 
Args:[AString: {message}, function-call: asterix:field-access-by-index, 
Args:[%0->$$0, AInt32: {5}], AString: {user at}, %0->$$12, AString: {emergency 
at}, %0->$$13, AString: {type}, function-call: asterix:field-access-by-index, 
Args:[%0->$$0, AInt32: {6}]]]
      -- ASSIGN  |PARTITIONED|
        select (function-call: asterix:spatial-intersect, Args:[%0->$$13, 
%0->$$12])
        -- STREAM_SELECT  |PARTITIONED|
          assign [$$13] <- [function-call: asterix:field-access-by-index, 
Args:[%0->$$0, AInt32: {2}]]
          -- ASSIGN  |PARTITIONED|
            project ([$$0, $$12])
            -- STREAM_PROJECT  |PARTITIONED|
              exchange 
              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                unnest-map [$$14, $$0] <- function-call: asterix:index-search, 
Args:[AString: {EmergencyReports}, AInt32: {0}, AString: {channels}, AString: 
{EmergencyReports}, ABoolean: {true}, ABoolean: {false}, ABoolean: {false}, 
AInt32: {1}, %0->$$26, AInt32: {1}, %0->$$26, TRUE, TRUE, TRUE]
                -- BTREE_SEARCH  |PARTITIONED|
                  exchange 
                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                    order (ASC, %0->$$26) 
                    -- STABLE_SORT [$$26(ASC)]  |PARTITIONED|
                      exchange 
                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                        project ([$$26, $$12])
                        -- STREAM_PROJECT  |PARTITIONED|
                          exchange 
                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                            unnest-map [$$22, $$23, $$24, $$25, $$26] <- 
function-call: asterix:index-search, Args:[AString: {locs}, AInt32: {1}, 
AString: {channels}, AString: {EmergencyReports}, ABoolean: {true}, ABoolean: 
{false}, ABoolean: {true}, AInt32: {4}, %0->$$18, %0->$$19, %0->$$20, %0->$$21]
                            -- RTREE_SEARCH  |PARTITIONED|
                              exchange 
                              -- BROADCAST_EXCHANGE  |PARTITIONED|
                                assign [$$18, $$19, $$20, $$21] <- 
[function-call: asterix:create-mbr, Args:[%0->$$12, AInt32: {2}, AInt32: {0}], 
function-call: asterix:create-mbr, Args:[%0->$$12, AInt32: {2}, AInt32: {1}], 
function-call: asterix:create-mbr, Args:[%0->$$12, AInt32: {2}, AInt32: {2}], 
function-call: asterix:create-mbr, Args:[%0->$$12, AInt32: {2}, AInt32: {3}]]
                                -- ASSIGN  |PARTITIONED|
                                  project ([$$12])
                                  -- STREAM_PROJECT  |PARTITIONED|
                                    assign [$$12] <- [function-call: 
asterix:field-access-by-index, Args:[%0->$$1, AInt32: {1}]]
                                    -- ASSIGN  |PARTITIONED|
                                      project ([$$1])
                                      -- STREAM_PROJECT  |PARTITIONED|
                                        exchange 
                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                          data-scan []<-[$$15, $$1] <- 
channels:UserLocations
                                          -- DATASOURCE_SCAN  |PARTITIONED|
                                            exchange 
                                            -- ONE_TO_ONE_EXCHANGE  
|PARTITIONED|
                                              empty-tuple-source
                                              -- EMPTY_TUPLE_SOURCE  
|PARTITIONED|
{noformat}



{noformat}
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
java.nio.channels.ClosedChannelException
        at 
org.apache.hyracks.control.nc.io.IOManager.syncWrite(IOManager.java:108)
        at 
org.apache.hyracks.control.nc.dataset.ResultState.write(ResultState.java:116)
        at 
org.apache.hyracks.control.nc.dataset.DatasetPartitionWriter.nextFrame(DatasetPartitionWriter.java:97)
        at 
org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.flush(AbstractFrameAppender.java:83)
        at 
org.apache.hyracks.dataflow.common.comm.io.FrameOutputStream.flush(FrameOutputStream.java:61)
        at 
org.apache.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor$1.nextFrame(ResultWriterOperatorDescriptor.java:103)
        at 
org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.flush(AbstractFrameAppender.java:83)
        at 
org.apache.hyracks.dataflow.common.comm.util.FrameUtils.appendProjectionToWriter(FrameUtils.java:235)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendProjectionToFrame(AbstractOneInputOneOutputOneFramePushRuntime.java:95)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendProjectionToFrame(AbstractOneInputOneOutputOneFramePushRuntime.java:90)
        at 
org.apache.hyracks.algebricks.runtime.operators.std.StreamProjectRuntimeFactory$1.nextFrame(StreamProjectRuntimeFactory.java:74)
        at 
org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.flush(AbstractFrameAppender.java:83)
        at 
org.apache.hyracks.dataflow.common.comm.util.FrameUtils.appendToWriter(FrameUtils.java:162)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendToFrameFromTupleBuilder(AbstractOneInputOneOutputOneFramePushRuntime.java:82)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendToFrameFromTupleBuilder(AbstractOneInputOneOutputOneFramePushRuntime.java:78)
        at 
org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory$1.nextFrame(AssignRuntimeFactory.java:135)
        at 
org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.flush(AbstractFrameAppender.java:83)
        at 
org.apache.hyracks.dataflow.common.comm.io.FrameFixedFieldTupleAppender.flush(FrameFixedFieldTupleAppender.java:146)
        at 
org.apache.hyracks.dataflow.common.comm.util.FrameUtils.appendToWriter(FrameUtils.java:138)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendTupleToFrame(AbstractOneInputOneOutputOneFramePushRuntime.java:102)
        at 
org.apache.hyracks.algebricks.runtime.operators.std.StreamSelectRuntimeFactory$1.nextFrame(StreamSelectRuntimeFactory.java:145)
        at 
org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.flush(AbstractFrameAppender.java:83)
        at 
org.apache.hyracks.dataflow.common.comm.util.FrameUtils.appendToWriter(FrameUtils.java:162)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendToFrameFromTupleBuilder(AbstractOneInputOneOutputOneFramePushRuntime.java:82)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendToFrameFromTupleBuilder(AbstractOneInputOneOutputOneFramePushRuntime.java:78)
        at 
org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory$1.nextFrame(AssignRuntimeFactory.java:135)
        at 
org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.flush(AbstractFrameAppender.java:83)
        at 
org.apache.hyracks.dataflow.common.comm.util.FrameUtils.appendProjectionToWriter(FrameUtils.java:235)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendProjectionToFrame(AbstractOneInputOneOutputOneFramePushRuntime.java:95)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendProjectionToFrame(AbstractOneInputOneOutputOneFramePushRuntime.java:90)
        at 
org.apache.hyracks.algebricks.runtime.operators.std.StreamProjectRuntimeFactory$1.nextFrame(StreamProjectRuntimeFactory.java:74)
        at 
org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$2.nextFrame(AlgebricksMetaOperatorDescriptor.java:148)
        at 
org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.flush(AbstractFrameAppender.java:83)
        at 
org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.close(IndexSearchOperatorNodePushable.java:196)
        ... 9 more
Caused by: java.nio.channels.ClosedChannelException
        at sun.nio.ch.FileChannelImpl.ensureOpen(FileChannelImpl.java:110)
        at sun.nio.ch.FileChannelImpl.write(FileChannelImpl.java:757)
        at 
org.apache.hyracks.control.nc.io.IOManager.syncWrite(IOManager.java:95)
        ... 42 more
{noformat}

  was:
I am having a simple retree index join fail to complete on my machine. Unless I 
am missing something, it seems that this job shouldn't be big enough to 
overwhelm my machine, and the error seems to be unrelated to overflows. I am 
including the AQL, plan, and error trace.




drop dataverse channels if exists;
create dataverse channels;
use dataverse channels;

create type UserLocation as closed {
        recordId: uuid,
        location: point,
        user-id: string,
        timeoffset: float
}
create type EmergencyShelter as closed {
        name: string,
        location: point
}

create type EmergencyReport as closed {
        reportId: uuid,
        severity: int,
        impactZone: circle,
        timeoffset: float,
        duration: float,
        message: string,
        emergencyType: string
}

create dataset UserLocations(UserLocation)
primary key recordId autogenerated;

create dataset EmergencyShelters(EmergencyShelter)
primary key name;

create dataset EmergencyReports(EmergencyReport)
primary key reportId autogenerated;

create index locs on EmergencyReports(impactZone) type rtree;

load dataset UserLocations using localfs 
(("path"="asterix_nc1:///Users/stevenjacobs/Desktop/EmergencyDataset/UserLocations.adm"),("format"="adm"));

load dataset EmergencyShelters using 
localfs(("path"="asterix_nc1:///Users/stevenjacobs/Desktop/EmergencyDataset/EmergencyShelters.adm"),("format"="adm"));

load dataset EmergencyReports using 
localfs(("path"="asterix_nc1:///Users/stevenjacobs/Desktop/EmergencyDataset/EmergencyReports.adm"),("format"="adm"));


for $report in dataset EmergencyReports
for $location in dataset UserLocations
where spatial-intersect($report.impactZone, $location.location)
return {
        "message":$report.message,
        "user at":$location.location,
        "emergency at":$report.impactZone,
        "type":$report.emergencyType
}





distribute result [%0->$$7]
-- DISTRIBUTE_RESULT  |PARTITIONED|
  exchange 
  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
    project ([$$7])
    -- STREAM_PROJECT  |PARTITIONED|
      assign [$$7] <- [function-call: asterix:closed-record-constructor, 
Args:[AString: {message}, function-call: asterix:field-access-by-index, 
Args:[%0->$$0, AInt32: {5}], AString: {user at}, %0->$$12, AString: {emergency 
at}, %0->$$13, AString: {type}, function-call: asterix:field-access-by-index, 
Args:[%0->$$0, AInt32: {6}]]]
      -- ASSIGN  |PARTITIONED|
        select (function-call: asterix:spatial-intersect, Args:[%0->$$13, 
%0->$$12])
        -- STREAM_SELECT  |PARTITIONED|
          assign [$$13] <- [function-call: asterix:field-access-by-index, 
Args:[%0->$$0, AInt32: {2}]]
          -- ASSIGN  |PARTITIONED|
            project ([$$0, $$12])
            -- STREAM_PROJECT  |PARTITIONED|
              exchange 
              -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                unnest-map [$$14, $$0] <- function-call: asterix:index-search, 
Args:[AString: {EmergencyReports}, AInt32: {0}, AString: {channels}, AString: 
{EmergencyReports}, ABoolean: {true}, ABoolean: {false}, ABoolean: {false}, 
AInt32: {1}, %0->$$26, AInt32: {1}, %0->$$26, TRUE, TRUE, TRUE]
                -- BTREE_SEARCH  |PARTITIONED|
                  exchange 
                  -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                    order (ASC, %0->$$26) 
                    -- STABLE_SORT [$$26(ASC)]  |PARTITIONED|
                      exchange 
                      -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                        project ([$$26, $$12])
                        -- STREAM_PROJECT  |PARTITIONED|
                          exchange 
                          -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                            unnest-map [$$22, $$23, $$24, $$25, $$26] <- 
function-call: asterix:index-search, Args:[AString: {locs}, AInt32: {1}, 
AString: {channels}, AString: {EmergencyReports}, ABoolean: {true}, ABoolean: 
{false}, ABoolean: {true}, AInt32: {4}, %0->$$18, %0->$$19, %0->$$20, %0->$$21]
                            -- RTREE_SEARCH  |PARTITIONED|
                              exchange 
                              -- BROADCAST_EXCHANGE  |PARTITIONED|
                                assign [$$18, $$19, $$20, $$21] <- 
[function-call: asterix:create-mbr, Args:[%0->$$12, AInt32: {2}, AInt32: {0}], 
function-call: asterix:create-mbr, Args:[%0->$$12, AInt32: {2}, AInt32: {1}], 
function-call: asterix:create-mbr, Args:[%0->$$12, AInt32: {2}, AInt32: {2}], 
function-call: asterix:create-mbr, Args:[%0->$$12, AInt32: {2}, AInt32: {3}]]
                                -- ASSIGN  |PARTITIONED|
                                  project ([$$12])
                                  -- STREAM_PROJECT  |PARTITIONED|
                                    assign [$$12] <- [function-call: 
asterix:field-access-by-index, Args:[%0->$$1, AInt32: {1}]]
                                    -- ASSIGN  |PARTITIONED|
                                      project ([$$1])
                                      -- STREAM_PROJECT  |PARTITIONED|
                                        exchange 
                                        -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
                                          data-scan []<-[$$15, $$1] <- 
channels:UserLocations
                                          -- DATASOURCE_SCAN  |PARTITIONED|
                                            exchange 
                                            -- ONE_TO_ONE_EXCHANGE  
|PARTITIONED|
                                              empty-tuple-source
                                              -- EMPTY_TUPLE_SOURCE  
|PARTITIONED|





Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
java.nio.channels.ClosedChannelException
        at 
org.apache.hyracks.control.nc.io.IOManager.syncWrite(IOManager.java:108)
        at 
org.apache.hyracks.control.nc.dataset.ResultState.write(ResultState.java:116)
        at 
org.apache.hyracks.control.nc.dataset.DatasetPartitionWriter.nextFrame(DatasetPartitionWriter.java:97)
        at 
org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.flush(AbstractFrameAppender.java:83)
        at 
org.apache.hyracks.dataflow.common.comm.io.FrameOutputStream.flush(FrameOutputStream.java:61)
        at 
org.apache.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor$1.nextFrame(ResultWriterOperatorDescriptor.java:103)
        at 
org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.flush(AbstractFrameAppender.java:83)
        at 
org.apache.hyracks.dataflow.common.comm.util.FrameUtils.appendProjectionToWriter(FrameUtils.java:235)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendProjectionToFrame(AbstractOneInputOneOutputOneFramePushRuntime.java:95)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendProjectionToFrame(AbstractOneInputOneOutputOneFramePushRuntime.java:90)
        at 
org.apache.hyracks.algebricks.runtime.operators.std.StreamProjectRuntimeFactory$1.nextFrame(StreamProjectRuntimeFactory.java:74)
        at 
org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.flush(AbstractFrameAppender.java:83)
        at 
org.apache.hyracks.dataflow.common.comm.util.FrameUtils.appendToWriter(FrameUtils.java:162)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendToFrameFromTupleBuilder(AbstractOneInputOneOutputOneFramePushRuntime.java:82)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendToFrameFromTupleBuilder(AbstractOneInputOneOutputOneFramePushRuntime.java:78)
        at 
org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory$1.nextFrame(AssignRuntimeFactory.java:135)
        at 
org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.flush(AbstractFrameAppender.java:83)
        at 
org.apache.hyracks.dataflow.common.comm.io.FrameFixedFieldTupleAppender.flush(FrameFixedFieldTupleAppender.java:146)
        at 
org.apache.hyracks.dataflow.common.comm.util.FrameUtils.appendToWriter(FrameUtils.java:138)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendTupleToFrame(AbstractOneInputOneOutputOneFramePushRuntime.java:102)
        at 
org.apache.hyracks.algebricks.runtime.operators.std.StreamSelectRuntimeFactory$1.nextFrame(StreamSelectRuntimeFactory.java:145)
        at 
org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.flush(AbstractFrameAppender.java:83)
        at 
org.apache.hyracks.dataflow.common.comm.util.FrameUtils.appendToWriter(FrameUtils.java:162)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendToFrameFromTupleBuilder(AbstractOneInputOneOutputOneFramePushRuntime.java:82)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendToFrameFromTupleBuilder(AbstractOneInputOneOutputOneFramePushRuntime.java:78)
        at 
org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory$1.nextFrame(AssignRuntimeFactory.java:135)
        at 
org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.flush(AbstractFrameAppender.java:83)
        at 
org.apache.hyracks.dataflow.common.comm.util.FrameUtils.appendProjectionToWriter(FrameUtils.java:235)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendProjectionToFrame(AbstractOneInputOneOutputOneFramePushRuntime.java:95)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendProjectionToFrame(AbstractOneInputOneOutputOneFramePushRuntime.java:90)
        at 
org.apache.hyracks.algebricks.runtime.operators.std.StreamProjectRuntimeFactory$1.nextFrame(StreamProjectRuntimeFactory.java:74)
        at 
org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$2.nextFrame(AlgebricksMetaOperatorDescriptor.java:148)
        at 
org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.flush(AbstractFrameAppender.java:83)
        at 
org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.close(IndexSearchOperatorNodePushable.java:196)
        ... 9 more
Caused by: java.nio.channels.ClosedChannelException
        at sun.nio.ch.FileChannelImpl.ensureOpen(FileChannelImpl.java:110)
        at sun.nio.ch.FileChannelImpl.write(FileChannelImpl.java:757)
        at 
org.apache.hyracks.control.nc.io.IOManager.syncWrite(IOManager.java:95)
        ... 42 more


> Index Join Query fails to complete on Single Machine Instance
> -------------------------------------------------------------
>
>                 Key: ASTERIXDB-1290
>                 URL: https://issues.apache.org/jira/browse/ASTERIXDB-1290
>             Project: Apache AsterixDB
>          Issue Type: Bug
>            Reporter: Steven Jacobs
>         Attachments: datasets.zip
>
>
> I am having a simple retree index join fail to complete on my machine. Unless 
> I am missing something, it seems that this job shouldn't be big enough to 
> overwhelm my machine, and the error seems to be unrelated to overflows. I am 
> including the AQL, plan, and error trace.
> {noformat}
> drop dataverse channels if exists;
> create dataverse channels;
> use dataverse channels;
> create type UserLocation as closed {
>       recordId: uuid,
>       location: point,
>       user-id: string,
>       timeoffset: float
> }
> create type EmergencyShelter as closed {
>       name: string,
>       location: point
> }
> create type EmergencyReport as closed {
>       reportId: uuid,
>       severity: int,
>       impactZone: circle,
>       timeoffset: float,
>       duration: float,
>       message: string,
>       emergencyType: string
> }
> create dataset UserLocations(UserLocation)
> primary key recordId autogenerated;
> create dataset EmergencyShelters(EmergencyShelter)
> primary key name;
> create dataset EmergencyReports(EmergencyReport)
> primary key reportId autogenerated;
> create index locs on EmergencyReports(impactZone) type rtree;
> load dataset UserLocations using localfs 
> (("path"="asterix_nc1:///Users/stevenjacobs/Desktop/EmergencyDataset/UserLocations.adm"),("format"="adm"));
> load dataset EmergencyShelters using 
> localfs(("path"="asterix_nc1:///Users/stevenjacobs/Desktop/EmergencyDataset/EmergencyShelters.adm"),("format"="adm"));
> load dataset EmergencyReports using 
> localfs(("path"="asterix_nc1:///Users/stevenjacobs/Desktop/EmergencyDataset/EmergencyReports.adm"),("format"="adm"));
> for $report in dataset EmergencyReports
> for $location in dataset UserLocations
> where spatial-intersect($report.impactZone, $location.location)
> return {
>       "message":$report.message,
>       "user at":$location.location,
>       "emergency at":$report.impactZone,
>       "type":$report.emergencyType
> }
> {noformat}
> {noformat}
> distribute result [%0->$$7]
> -- DISTRIBUTE_RESULT  |PARTITIONED|
>   exchange 
>   -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
>     project ([$$7])
>     -- STREAM_PROJECT  |PARTITIONED|
>       assign [$$7] <- [function-call: asterix:closed-record-constructor, 
> Args:[AString: {message}, function-call: asterix:field-access-by-index, 
> Args:[%0->$$0, AInt32: {5}], AString: {user at}, %0->$$12, AString: 
> {emergency at}, %0->$$13, AString: {type}, function-call: 
> asterix:field-access-by-index, Args:[%0->$$0, AInt32: {6}]]]
>       -- ASSIGN  |PARTITIONED|
>         select (function-call: asterix:spatial-intersect, Args:[%0->$$13, 
> %0->$$12])
>         -- STREAM_SELECT  |PARTITIONED|
>           assign [$$13] <- [function-call: asterix:field-access-by-index, 
> Args:[%0->$$0, AInt32: {2}]]
>           -- ASSIGN  |PARTITIONED|
>             project ([$$0, $$12])
>             -- STREAM_PROJECT  |PARTITIONED|
>               exchange 
>               -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
>                 unnest-map [$$14, $$0] <- function-call: 
> asterix:index-search, Args:[AString: {EmergencyReports}, AInt32: {0}, 
> AString: {channels}, AString: {EmergencyReports}, ABoolean: {true}, ABoolean: 
> {false}, ABoolean: {false}, AInt32: {1}, %0->$$26, AInt32: {1}, %0->$$26, 
> TRUE, TRUE, TRUE]
>                 -- BTREE_SEARCH  |PARTITIONED|
>                   exchange 
>                   -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
>                     order (ASC, %0->$$26) 
>                     -- STABLE_SORT [$$26(ASC)]  |PARTITIONED|
>                       exchange 
>                       -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
>                         project ([$$26, $$12])
>                         -- STREAM_PROJECT  |PARTITIONED|
>                           exchange 
>                           -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
>                             unnest-map [$$22, $$23, $$24, $$25, $$26] <- 
> function-call: asterix:index-search, Args:[AString: {locs}, AInt32: {1}, 
> AString: {channels}, AString: {EmergencyReports}, ABoolean: {true}, ABoolean: 
> {false}, ABoolean: {true}, AInt32: {4}, %0->$$18, %0->$$19, %0->$$20, 
> %0->$$21]
>                             -- RTREE_SEARCH  |PARTITIONED|
>                               exchange 
>                               -- BROADCAST_EXCHANGE  |PARTITIONED|
>                                 assign [$$18, $$19, $$20, $$21] <- 
> [function-call: asterix:create-mbr, Args:[%0->$$12, AInt32: {2}, AInt32: 
> {0}], function-call: asterix:create-mbr, Args:[%0->$$12, AInt32: {2}, AInt32: 
> {1}], function-call: asterix:create-mbr, Args:[%0->$$12, AInt32: {2}, AInt32: 
> {2}], function-call: asterix:create-mbr, Args:[%0->$$12, AInt32: {2}, AInt32: 
> {3}]]
>                                 -- ASSIGN  |PARTITIONED|
>                                   project ([$$12])
>                                   -- STREAM_PROJECT  |PARTITIONED|
>                                     assign [$$12] <- [function-call: 
> asterix:field-access-by-index, Args:[%0->$$1, AInt32: {1}]]
>                                     -- ASSIGN  |PARTITIONED|
>                                       project ([$$1])
>                                       -- STREAM_PROJECT  |PARTITIONED|
>                                         exchange 
>                                         -- ONE_TO_ONE_EXCHANGE  |PARTITIONED|
>                                           data-scan []<-[$$15, $$1] <- 
> channels:UserLocations
>                                           -- DATASOURCE_SCAN  |PARTITIONED|
>                                             exchange 
>                                             -- ONE_TO_ONE_EXCHANGE  
> |PARTITIONED|
>                                               empty-tuple-source
>                                               -- EMPTY_TUPLE_SOURCE  
> |PARTITIONED|
> {noformat}
> {noformat}
> Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.nio.channels.ClosedChannelException
>       at 
> org.apache.hyracks.control.nc.io.IOManager.syncWrite(IOManager.java:108)
>       at 
> org.apache.hyracks.control.nc.dataset.ResultState.write(ResultState.java:116)
>       at 
> org.apache.hyracks.control.nc.dataset.DatasetPartitionWriter.nextFrame(DatasetPartitionWriter.java:97)
>       at 
> org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.flush(AbstractFrameAppender.java:83)
>       at 
> org.apache.hyracks.dataflow.common.comm.io.FrameOutputStream.flush(FrameOutputStream.java:61)
>       at 
> org.apache.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor$1.nextFrame(ResultWriterOperatorDescriptor.java:103)
>       at 
> org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.flush(AbstractFrameAppender.java:83)
>       at 
> org.apache.hyracks.dataflow.common.comm.util.FrameUtils.appendProjectionToWriter(FrameUtils.java:235)
>       at 
> org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendProjectionToFrame(AbstractOneInputOneOutputOneFramePushRuntime.java:95)
>       at 
> org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendProjectionToFrame(AbstractOneInputOneOutputOneFramePushRuntime.java:90)
>       at 
> org.apache.hyracks.algebricks.runtime.operators.std.StreamProjectRuntimeFactory$1.nextFrame(StreamProjectRuntimeFactory.java:74)
>       at 
> org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.flush(AbstractFrameAppender.java:83)
>       at 
> org.apache.hyracks.dataflow.common.comm.util.FrameUtils.appendToWriter(FrameUtils.java:162)
>       at 
> org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendToFrameFromTupleBuilder(AbstractOneInputOneOutputOneFramePushRuntime.java:82)
>       at 
> org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendToFrameFromTupleBuilder(AbstractOneInputOneOutputOneFramePushRuntime.java:78)
>       at 
> org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory$1.nextFrame(AssignRuntimeFactory.java:135)
>       at 
> org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.flush(AbstractFrameAppender.java:83)
>       at 
> org.apache.hyracks.dataflow.common.comm.io.FrameFixedFieldTupleAppender.flush(FrameFixedFieldTupleAppender.java:146)
>       at 
> org.apache.hyracks.dataflow.common.comm.util.FrameUtils.appendToWriter(FrameUtils.java:138)
>       at 
> org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendTupleToFrame(AbstractOneInputOneOutputOneFramePushRuntime.java:102)
>       at 
> org.apache.hyracks.algebricks.runtime.operators.std.StreamSelectRuntimeFactory$1.nextFrame(StreamSelectRuntimeFactory.java:145)
>       at 
> org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.flush(AbstractFrameAppender.java:83)
>       at 
> org.apache.hyracks.dataflow.common.comm.util.FrameUtils.appendToWriter(FrameUtils.java:162)
>       at 
> org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendToFrameFromTupleBuilder(AbstractOneInputOneOutputOneFramePushRuntime.java:82)
>       at 
> org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendToFrameFromTupleBuilder(AbstractOneInputOneOutputOneFramePushRuntime.java:78)
>       at 
> org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory$1.nextFrame(AssignRuntimeFactory.java:135)
>       at 
> org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.flush(AbstractFrameAppender.java:83)
>       at 
> org.apache.hyracks.dataflow.common.comm.util.FrameUtils.appendProjectionToWriter(FrameUtils.java:235)
>       at 
> org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendProjectionToFrame(AbstractOneInputOneOutputOneFramePushRuntime.java:95)
>       at 
> org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.appendProjectionToFrame(AbstractOneInputOneOutputOneFramePushRuntime.java:90)
>       at 
> org.apache.hyracks.algebricks.runtime.operators.std.StreamProjectRuntimeFactory$1.nextFrame(StreamProjectRuntimeFactory.java:74)
>       at 
> org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$2.nextFrame(AlgebricksMetaOperatorDescriptor.java:148)
>       at 
> org.apache.hyracks.dataflow.common.comm.io.AbstractFrameAppender.flush(AbstractFrameAppender.java:83)
>       at 
> org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.close(IndexSearchOperatorNodePushable.java:196)
>       ... 9 more
> Caused by: java.nio.channels.ClosedChannelException
>       at sun.nio.ch.FileChannelImpl.ensureOpen(FileChannelImpl.java:110)
>       at sun.nio.ch.FileChannelImpl.write(FileChannelImpl.java:757)
>       at 
> org.apache.hyracks.control.nc.io.IOManager.syncWrite(IOManager.java:95)
>       ... 42 more
> {noformat}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

Reply via email to