[ 
https://issues.apache.org/jira/browse/ASTERIXDB-1413?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Wenhai updated ASTERIXDB-1413:
------------------------------
    Description: 
When we use the fuzzyjoin patch in the context of three-way fuzzyjoin, the 
following error will "ALMOST" always arise.
Patch link:
{noformat}
https://asterix-gerrit.ics.uci.edu/#/c/531/
{noformat}

Dataset
{noformat}
http://yun.baidu.com/share/link?shareid=2678954841&uk=4030601168
{noformat}

Schema
{noformat}
drop dataverse test if exists;

create dataverse test;

use dataverse test;

create type PaperType as open {
  tid:uuid,
  title: string,
  authors: string?,
  year: int?,
  conf: string?,
  idx: string,
  abstract: string?
}

create dataset ACM(PaperType) primary key tid autogenerated;

use dataverse test;
drop dataset ACM if exists;
create dataset ACM(PaperType) primary key tid autogenerated;
load dataset ACM
using localfs
(("path"="127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/acm_split.aa,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/acm_split.ab,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/acm_split.ac,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/acm_split.ad,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/acm_split.ae"),("format"="delimited-text"),("delimiter"="#"),("quote"="\u0000"));


use dataverse test;

create dataset OUTPUT(PaperType) primary key tid autogenerated;

load dataset OUTPUT
using localfs
(("path"="127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/outputacm_raw.aa,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/outputacm_raw.ab,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/outputacm_raw.ac,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/outputacm_raw.ad,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/outputacm_raw.ae,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/outputacm_raw.af,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/outputacm_raw.ag"),("format"="delimited-text"),("delimiter"="#"),("quote"="\u0000"));


use dataverse test;

create dataset DBLP(PaperType) primary key tid autogenerated;

load dataset DBLP
using localfs
(("path"="127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/dblp_split.aa,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/dblp_split.ab,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/dblp_split.ac,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/dblp_split.ad,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/dblp_split.ae,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/dblp_split.af"),("format"="delimited-text"),("delimiter"="#"),("quote"="\u0000"));
{noformat}

Query
{noformat}
use dataverse test;
set import-private-functions 'true'
set simthreshold '.9f';
let $s := sum(
for $t in dataset ('ACM')
for $o in dataset('DBLP')
for $g in dataset('OUTPUT')
where word-tokens($o.authors) ~= word-tokens($t.authors) and 
word-tokens($t.authors) ~= word-tokens($g.authors)
order by $o.id
return 1)
return $s
{noformat}

Error message
{noformat}
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory) [FileNotFoundException]
{noformat}

Tracing information
{noformat}
org.apache.hyracks.api.exceptions.HyracksException: Job failed on account of:
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)

        at 
org.apache.hyracks.control.cc.job.JobRun.waitForCompletion(JobRun.java:211)
        at 
org.apache.hyracks.control.cc.work.WaitForJobCompletionWork$1.run(WaitForJobCompletionWork.java:48)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at 
org.apache.hyracks.control.common.utils.ExceptionUtils.setNodeIds(ExceptionUtils.java:45)
        at org.apache.hyracks.control.nc.Task.run(Task.java:319)
        ... 3 more
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.runInParallel(SuperActivityOperatorNodePushable.java:218)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.initialize(SuperActivityOperatorNodePushable.java:83)
        at org.apache.hyracks.control.nc.Task.run(Task.java:263)
        ... 3 more
Caused by: java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at java.util.concurrent.FutureTask.report(FutureTask.java:122)
        at java.util.concurrent.FutureTask.get(FutureTask.java:192)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.runInParallel(SuperActivityOperatorNodePushable.java:212)
        ... 5 more
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at 
org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.close(IndexSearchOperatorNodePushable.java:230)
        at 
org.apache.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory$1.close(EmptyTupleSourceRuntimeFactory.java:60)
        at 
org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$1.initialize(AlgebricksMetaOperatorDescriptor.java:116)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.lambda$initialize$0(SuperActivityOperatorNodePushable.java:83)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable$1.call(SuperActivityOperatorNodePushable.java:205)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable$1.call(SuperActivityOperatorNodePushable.java:202)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        ... 3 more
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at org.apache.hyracks.control.nc.io.IOManager.open(IOManager.java:81)
        at 
org.apache.hyracks.dataflow.common.io.RunFileReader.open(RunFileReader.java:47)
        at 
org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.applyInMemHashJoin(OptimizedHybridHashJoinOperatorDescriptor.java:670)
        at 
org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.joinPartitionPair(OptimizedHybridHashJoinOperatorDescriptor.java:489)
        at 
org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.close(OptimizedHybridHashJoinOperatorDescriptor.java:426)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.close(AbstractOneInputOneOutputOneFramePushRuntime.java:57)
        at 
org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$2.close(AlgebricksMetaOperatorDescriptor.java:153)
        at 
org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.close(IndexSearchOperatorNodePushable.java:227)
        ... 9 more
Caused by: java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at java.io.RandomAccessFile.open0(Native Method)
        at java.io.RandomAccessFile.open(RandomAccessFile.java:316)
        at java.io.RandomAccessFile.<init>(RandomAccessFile.java:243)
        at org.apache.hyracks.control.nc.io.FileHandle.open(FileHandle.java:70)
        at org.apache.hyracks.control.nc.io.IOManager.open(IOManager.java:79)
        ... 16 more
org.apache.hyracks.api.exceptions.HyracksException: Job failed on account of:
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)

        at 
org.apache.hyracks.control.cc.job.JobRun.waitForCompletion(JobRun.java:211)
        at 
org.apache.hyracks.control.cc.work.WaitForJobCompletionWork$1.run(WaitForJobCompletionWork.java:48)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at 
org.apache.hyracks.control.common.utils.ExceptionUtils.setNodeIds(ExceptionUtils.java:45)
        at org.apache.hyracks.control.nc.Task.run(Task.java:319)
        ... 3 more
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.runInParallel(SuperActivityOperatorNodePushable.java:218)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.initialize(SuperActivityOperatorNodePushable.java:83)
        at org.apache.hyracks.control.nc.Task.run(Task.java:263)
        ... 3 more
Caused by: java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at java.util.concurrent.FutureTask.report(FutureTask.java:122)
        at java.util.concurrent.FutureTask.get(FutureTask.java:192)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.runInParallel(SuperActivityOperatorNodePushable.java:212)
        ... 5 more
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at 
org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.close(IndexSearchOperatorNodePushable.java:230)
        at 
org.apache.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory$1.close(EmptyTupleSourceRuntimeFactory.java:60)
        at 
org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$1.initialize(AlgebricksMetaOperatorDescriptor.java:116)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.lambda$initialize$0(SuperActivityOperatorNodePushable.java:83)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable$1.call(SuperActivityOperatorNodePushable.java:205)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable$1.call(SuperActivityOperatorNodePushable.java:202)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        ... 3 more
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at org.apache.hyracks.control.nc.io.IOManager.open(IOManager.java:81)
        at 
org.apache.hyracks.dataflow.common.io.RunFileReader.open(RunFileReader.java:47)
        at 
org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.applyInMemHashJoin(OptimizedHybridHashJoinOperatorDescriptor.java:670)
        at 
org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.joinPartitionPair(OptimizedHybridHashJoinOperatorDescriptor.java:489)
        at 
org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.close(OptimizedHybridHashJoinOperatorDescriptor.java:426)
at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.close(AbstractOneInputOneOutputOneFramePushRuntime.java:57)
        at 
org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$2.close(AlgebricksMetaOperatorDescriptor.java:153)
        at 
org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.close(IndexSearchOperatorNodePushable.java:227)
        ... 9 more
Caused by: java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at java.io.RandomAccessFile.open0(Native Method)
        at java.io.RandomAccessFile.open(RandomAccessFile.java:316)
        at java.io.RandomAccessFile.<init>(RandomAccessFile.java:243)
        at org.apache.hyracks.control.nc.io.FileHandle.open(FileHandle.java:70)
        at org.apache.hyracks.control.nc.io.IOManager.open(IOManager.java:79)
        ... 16 more
Apr 24, 2016 12:01:39 PM org.apache.asterix.api.http.servlet.APIServlet doPost
SEVERE: Job failed on account of:
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)

org.apache.hyracks.api.exceptions.HyracksException: Job failed on account of:
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)

        at 
org.apache.hyracks.control.cc.job.JobRun.waitForCompletion(JobRun.java:211)
        at 
org.apache.hyracks.control.cc.work.WaitForJobCompletionWork$1.run(WaitForJobCompletionWork.java:48)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at 
org.apache.hyracks.control.common.utils.ExceptionUtils.setNodeIds(ExceptionUtils.java:45)
        at org.apache.hyracks.control.nc.Task.run(Task.java:319)
        ... 3 more
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.runInParallel(SuperActivityOperatorNodePushable.java:218)
       at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.initialize(SuperActivityOperatorNodePushable.java:83)
        at org.apache.hyracks.control.nc.Task.run(Task.java:263)
        ... 3 more
Caused by: java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at java.util.concurrent.FutureTask.report(FutureTask.java:122)
        at java.util.concurrent.FutureTask.get(FutureTask.java:192)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.runInParallel(SuperActivityOperatorNodePushable.java:212)
        ... 5 more
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at 
org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.close(IndexSearchOperatorNodePushable.java:230)
        at 
org.apache.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory$1.close(EmptyTupleSourceRuntimeFactory.java:60)
        at 
org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$1.initialize(AlgebricksMetaOperatorDescriptor.java:116)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.lambda$initialize$0(SuperActivityOperatorNodePushable.java:83)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable$1.call(SuperActivityOperatorNodePushable.java:205)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable$1.call(SuperActivityOperatorNodePushable.java:202)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        ... 3 more
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at org.apache.hyracks.control.nc.io.IOManager.open(IOManager.java:81)
        at 
org.apache.hyracks.dataflow.common.io.RunFileReader.open(RunFileReader.java:47)
        at 
org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.applyInMemHashJoin(OptimizedHybridHashJoinOperatorDescriptor.java:670)
        at 
org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.joinPartitionPair(OptimizedHybridHashJoinOperatorDescriptor.java:489)
        at 
org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.close(OptimizedHybridHashJoinOperatorDescriptor.java:426)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.close(AbstractOneInputOneOutputOneFramePushRuntime.java:57)
        at 
org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$2.close(AlgebricksMetaOperatorDescriptor.java:153)
        at 
org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.close(IndexSearchOperatorNodePushable.java:227)
        ... 9 more
Caused by: java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at java.io.RandomAccessFile.open0(Native Method)
        at java.io.RandomAccessFile.open(RandomAccessFile.java:316)
        at java.io.RandomAccessFile.<init>(RandomAccessFile.java:243)
        at org.apache.hyracks.control.nc.io.FileHandle.open(FileHandle.java:70)
        at org.apache.hyracks.control.nc.io.IOManager.open(IOManager.java:79)
        ... 16 more

Apr 24, 2016 12:01:49 PM 
org.apache.hyracks.control.common.dataset.ResultStateSweeper sweep

{noformat}

  was:
When we use the fuzzyjoin patch in the context of three-way fuzzyjoin, the 
following error will "ALMOST" always arise.
Patch link:
{noformat}
https://asterix-gerrit.ics.uci.edu/#/c/531/
{noformat}

Dataset
{noformat}
http://yun.baidu.com/share/link?shareid=2678954841&uk=4030601168
{noformat}

Schema
{noformat}
drop dataverse test if exists;

create dataverse test;

use dataverse test;

create type PaperType as open {
  tid:uuid,
  title: string,
  authors: string?,
  year: int?,
  conf: string?,
  idx: string,
  abstract: string?
}

create dataset ACM(PaperType) primary key tid autogenerated;

use dataverse test;
drop dataset ACM if exists;
create dataset ACM(PaperType) primary key tid autogenerated;
load dataset ACM
using localfs
(("path"="127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/acm_split.aa,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/acm_split.ab,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/acm_split.ac,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/acm_split.ad,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/acm_split.ae"),("format"="delimited-text"),("delimiter"="#"),("quote"="\u0000"));


use dataverse test;

create dataset OUTPUT(PaperType) primary key tid autogenerated;

load dataset OUTPUT
using localfs
(("path"="127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/outputacm_raw.aa,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/outputacm_raw.ab,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/outputacm_raw.ac,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/outputacm_raw.ad,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/outputacm_raw.ae,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/outputacm_raw.af,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/outputacm_raw.ag"),("format"="delimited-text"),("delimiter"="#"),("quote"="\u0000"));


use dataverse test;

create dataset DBLP(PaperType) primary key tid autogenerated;

load dataset DBLP
using localfs
(("path"="127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/dblp_split.aa,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/dblp_split.ab,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/dblp_split.ac,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/dblp_split.ad,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/dblp_split.ae,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/dblp_split.af"),("format"="delimited-text"),("delimiter"="#"),("quote"="\u0000"));
{noformat}

Query
{noformat}
use dataverse test;
set import-private-functions 'true'
set simthreshold '.99f';
let $s := sum(
for $t in dataset ('ACM')
for $o in dataset('DBLP')
for $g in dataset('OUTPUT')
where word-tokens($o.authors) ~= word-tokens($t.authors) and 
word-tokens($t.authors) ~= word-tokens($g.authors)
order by $o.id
return 1)
return $s
{noformat}

Error message
{noformat}
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory) [FileNotFoundException]
{noformat}

Tracing information
{noformat}
org.apache.hyracks.api.exceptions.HyracksException: Job failed on account of:
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)

        at 
org.apache.hyracks.control.cc.job.JobRun.waitForCompletion(JobRun.java:211)
        at 
org.apache.hyracks.control.cc.work.WaitForJobCompletionWork$1.run(WaitForJobCompletionWork.java:48)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at 
org.apache.hyracks.control.common.utils.ExceptionUtils.setNodeIds(ExceptionUtils.java:45)
        at org.apache.hyracks.control.nc.Task.run(Task.java:319)
        ... 3 more
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.runInParallel(SuperActivityOperatorNodePushable.java:218)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.initialize(SuperActivityOperatorNodePushable.java:83)
        at org.apache.hyracks.control.nc.Task.run(Task.java:263)
        ... 3 more
Caused by: java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at java.util.concurrent.FutureTask.report(FutureTask.java:122)
        at java.util.concurrent.FutureTask.get(FutureTask.java:192)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.runInParallel(SuperActivityOperatorNodePushable.java:212)
        ... 5 more
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at 
org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.close(IndexSearchOperatorNodePushable.java:230)
        at 
org.apache.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory$1.close(EmptyTupleSourceRuntimeFactory.java:60)
        at 
org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$1.initialize(AlgebricksMetaOperatorDescriptor.java:116)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.lambda$initialize$0(SuperActivityOperatorNodePushable.java:83)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable$1.call(SuperActivityOperatorNodePushable.java:205)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable$1.call(SuperActivityOperatorNodePushable.java:202)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        ... 3 more
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at org.apache.hyracks.control.nc.io.IOManager.open(IOManager.java:81)
        at 
org.apache.hyracks.dataflow.common.io.RunFileReader.open(RunFileReader.java:47)
        at 
org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.applyInMemHashJoin(OptimizedHybridHashJoinOperatorDescriptor.java:670)
        at 
org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.joinPartitionPair(OptimizedHybridHashJoinOperatorDescriptor.java:489)
        at 
org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.close(OptimizedHybridHashJoinOperatorDescriptor.java:426)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.close(AbstractOneInputOneOutputOneFramePushRuntime.java:57)
        at 
org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$2.close(AlgebricksMetaOperatorDescriptor.java:153)
        at 
org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.close(IndexSearchOperatorNodePushable.java:227)
        ... 9 more
Caused by: java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at java.io.RandomAccessFile.open0(Native Method)
        at java.io.RandomAccessFile.open(RandomAccessFile.java:316)
        at java.io.RandomAccessFile.<init>(RandomAccessFile.java:243)
        at org.apache.hyracks.control.nc.io.FileHandle.open(FileHandle.java:70)
        at org.apache.hyracks.control.nc.io.IOManager.open(IOManager.java:79)
        ... 16 more
org.apache.hyracks.api.exceptions.HyracksException: Job failed on account of:
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)

        at 
org.apache.hyracks.control.cc.job.JobRun.waitForCompletion(JobRun.java:211)
        at 
org.apache.hyracks.control.cc.work.WaitForJobCompletionWork$1.run(WaitForJobCompletionWork.java:48)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at 
org.apache.hyracks.control.common.utils.ExceptionUtils.setNodeIds(ExceptionUtils.java:45)
        at org.apache.hyracks.control.nc.Task.run(Task.java:319)
        ... 3 more
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.runInParallel(SuperActivityOperatorNodePushable.java:218)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.initialize(SuperActivityOperatorNodePushable.java:83)
        at org.apache.hyracks.control.nc.Task.run(Task.java:263)
        ... 3 more
Caused by: java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at java.util.concurrent.FutureTask.report(FutureTask.java:122)
        at java.util.concurrent.FutureTask.get(FutureTask.java:192)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.runInParallel(SuperActivityOperatorNodePushable.java:212)
        ... 5 more
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at 
org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.close(IndexSearchOperatorNodePushable.java:230)
        at 
org.apache.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory$1.close(EmptyTupleSourceRuntimeFactory.java:60)
        at 
org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$1.initialize(AlgebricksMetaOperatorDescriptor.java:116)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.lambda$initialize$0(SuperActivityOperatorNodePushable.java:83)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable$1.call(SuperActivityOperatorNodePushable.java:205)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable$1.call(SuperActivityOperatorNodePushable.java:202)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        ... 3 more
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at org.apache.hyracks.control.nc.io.IOManager.open(IOManager.java:81)
        at 
org.apache.hyracks.dataflow.common.io.RunFileReader.open(RunFileReader.java:47)
        at 
org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.applyInMemHashJoin(OptimizedHybridHashJoinOperatorDescriptor.java:670)
        at 
org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.joinPartitionPair(OptimizedHybridHashJoinOperatorDescriptor.java:489)
        at 
org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.close(OptimizedHybridHashJoinOperatorDescriptor.java:426)
at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.close(AbstractOneInputOneOutputOneFramePushRuntime.java:57)
        at 
org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$2.close(AlgebricksMetaOperatorDescriptor.java:153)
        at 
org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.close(IndexSearchOperatorNodePushable.java:227)
        ... 9 more
Caused by: java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at java.io.RandomAccessFile.open0(Native Method)
        at java.io.RandomAccessFile.open(RandomAccessFile.java:316)
        at java.io.RandomAccessFile.<init>(RandomAccessFile.java:243)
        at org.apache.hyracks.control.nc.io.FileHandle.open(FileHandle.java:70)
        at org.apache.hyracks.control.nc.io.IOManager.open(IOManager.java:79)
        ... 16 more
Apr 24, 2016 12:01:39 PM org.apache.asterix.api.http.servlet.APIServlet doPost
SEVERE: Job failed on account of:
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)

org.apache.hyracks.api.exceptions.HyracksException: Job failed on account of:
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)

        at 
org.apache.hyracks.control.cc.job.JobRun.waitForCompletion(JobRun.java:211)
        at 
org.apache.hyracks.control.cc.work.WaitForJobCompletionWork$1.run(WaitForJobCompletionWork.java:48)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at 
org.apache.hyracks.control.common.utils.ExceptionUtils.setNodeIds(ExceptionUtils.java:45)
        at org.apache.hyracks.control.nc.Task.run(Task.java:319)
        ... 3 more
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.runInParallel(SuperActivityOperatorNodePushable.java:218)
       at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.initialize(SuperActivityOperatorNodePushable.java:83)
        at org.apache.hyracks.control.nc.Task.run(Task.java:263)
        ... 3 more
Caused by: java.util.concurrent.ExecutionException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at java.util.concurrent.FutureTask.report(FutureTask.java:122)
        at java.util.concurrent.FutureTask.get(FutureTask.java:192)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.runInParallel(SuperActivityOperatorNodePushable.java:212)
        ... 5 more
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at 
org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.close(IndexSearchOperatorNodePushable.java:230)
        at 
org.apache.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory$1.close(EmptyTupleSourceRuntimeFactory.java:60)
        at 
org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$1.initialize(AlgebricksMetaOperatorDescriptor.java:116)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.lambda$initialize$0(SuperActivityOperatorNodePushable.java:83)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable$1.call(SuperActivityOperatorNodePushable.java:205)
        at 
org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable$1.call(SuperActivityOperatorNodePushable.java:202)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        ... 3 more
Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at org.apache.hyracks.control.nc.io.IOManager.open(IOManager.java:81)
        at 
org.apache.hyracks.dataflow.common.io.RunFileReader.open(RunFileReader.java:47)
        at 
org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.applyInMemHashJoin(OptimizedHybridHashJoinOperatorDescriptor.java:670)
        at 
org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.joinPartitionPair(OptimizedHybridHashJoinOperatorDescriptor.java:489)
        at 
org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.close(OptimizedHybridHashJoinOperatorDescriptor.java:426)
        at 
org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.close(AbstractOneInputOneOutputOneFramePushRuntime.java:57)
        at 
org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$2.close(AlgebricksMetaOperatorDescriptor.java:153)
        at 
org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.close(IndexSearchOperatorNodePushable.java:227)
        ... 9 more
Caused by: java.io.FileNotFoundException: 
/home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No such 
file or directory)
        at java.io.RandomAccessFile.open0(Native Method)
        at java.io.RandomAccessFile.open(RandomAccessFile.java:316)
        at java.io.RandomAccessFile.<init>(RandomAccessFile.java:243)
        at org.apache.hyracks.control.nc.io.FileHandle.open(FileHandle.java:70)
        at org.apache.hyracks.control.nc.io.IOManager.open(IOManager.java:79)
        ... 16 more

Apr 24, 2016 12:01:49 PM 
org.apache.hyracks.control.common.dataset.ResultStateSweeper sweep

{noformat}


> Massive parallel operators will pose a "tempory file not found error"
> ---------------------------------------------------------------------
>
>                 Key: ASTERIXDB-1413
>                 URL: https://issues.apache.org/jira/browse/ASTERIXDB-1413
>             Project: Apache AsterixDB
>          Issue Type: Bug
>          Components: Hyracks
>         Environment: Linux ubuntu  12.04, 24 cores + 128GB memory
> 2NCs X 12 partitions with 10GB per NC
>            Reporter: Wenhai
>            Assignee: Jianfeng Jia
>
> When we use the fuzzyjoin patch in the context of three-way fuzzyjoin, the 
> following error will "ALMOST" always arise.
> Patch link:
> {noformat}
> https://asterix-gerrit.ics.uci.edu/#/c/531/
> {noformat}
> Dataset
> {noformat}
> http://yun.baidu.com/share/link?shareid=2678954841&uk=4030601168
> {noformat}
> Schema
> {noformat}
> drop dataverse test if exists;
> create dataverse test;
> use dataverse test;
> create type PaperType as open {
>   tid:uuid,
>   title: string,
>   authors: string?,
>   year: int?,
>   conf: string?,
>   idx: string,
>   abstract: string?
> }
> create dataset ACM(PaperType) primary key tid autogenerated;
> use dataverse test;
> drop dataset ACM if exists;
> create dataset ACM(PaperType) primary key tid autogenerated;
> load dataset ACM
> using localfs
> (("path"="127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/acm_split.aa,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/acm_split.ab,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/acm_split.ac,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/acm_split.ad,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/acm_split.ae"),("format"="delimited-text"),("delimiter"="#"),("quote"="\u0000"));
> use dataverse test;
> create dataset OUTPUT(PaperType) primary key tid autogenerated;
> load dataset OUTPUT
> using localfs
> (("path"="127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/outputacm_raw.aa,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/outputacm_raw.ab,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/outputacm_raw.ac,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/outputacm_raw.ad,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/outputacm_raw.ae,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/outputacm_raw.af,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/outputacm_raw.ag"),("format"="delimited-text"),("delimiter"="#"),("quote"="\u0000"));
> use dataverse test;
> create dataset DBLP(PaperType) primary key tid autogenerated;
> load dataset DBLP
> using localfs
> (("path"="127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/dblp_split.aa,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/dblp_split.ab,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/dblp_split.ac,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/dblp_split.ad,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/dblp_split.ae,127.0.0.1:///home/hadoop/Downloads/doccorpus/reproduce/dblp_split.af"),("format"="delimited-text"),("delimiter"="#"),("quote"="\u0000"));
> {noformat}
> Query
> {noformat}
> use dataverse test;
> set import-private-functions 'true'
> set simthreshold '.9f';
> let $s := sum(
> for $t in dataset ('ACM')
> for $o in dataset('DBLP')
> for $g in dataset('OUTPUT')
> where word-tokens($o.authors) ~= word-tokens($t.authors) and 
> word-tokens($t.authors) ~= word-tokens($g.authors)
> order by $o.id
> return 1)
> return $s
> {noformat}
> Error message
> {noformat}
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory) [FileNotFoundException]
> {noformat}
> Tracing information
> {noformat}
> org.apache.hyracks.api.exceptions.HyracksException: Job failed on account of:
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.util.concurrent.ExecutionException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
>         at 
> org.apache.hyracks.control.cc.job.JobRun.waitForCompletion(JobRun.java:211)
>         at 
> org.apache.hyracks.control.cc.work.WaitForJobCompletionWork$1.run(WaitForJobCompletionWork.java:48)
>         at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>         at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>         at java.lang.Thread.run(Thread.java:745)
> Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.util.concurrent.ExecutionException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
>         at 
> org.apache.hyracks.control.common.utils.ExceptionUtils.setNodeIds(ExceptionUtils.java:45)
>         at org.apache.hyracks.control.nc.Task.run(Task.java:319)
>         ... 3 more
> Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.util.concurrent.ExecutionException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
>         at 
> org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.runInParallel(SuperActivityOperatorNodePushable.java:218)
>         at 
> org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.initialize(SuperActivityOperatorNodePushable.java:83)
>         at org.apache.hyracks.control.nc.Task.run(Task.java:263)
>         ... 3 more
> Caused by: java.util.concurrent.ExecutionException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
>         at java.util.concurrent.FutureTask.report(FutureTask.java:122)
>         at java.util.concurrent.FutureTask.get(FutureTask.java:192)
>         at 
> org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.runInParallel(SuperActivityOperatorNodePushable.java:212)
>         ... 5 more
> Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
>         at 
> org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.close(IndexSearchOperatorNodePushable.java:230)
>         at 
> org.apache.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory$1.close(EmptyTupleSourceRuntimeFactory.java:60)
>         at 
> org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$1.initialize(AlgebricksMetaOperatorDescriptor.java:116)
>         at 
> org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.lambda$initialize$0(SuperActivityOperatorNodePushable.java:83)
>         at 
> org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable$1.call(SuperActivityOperatorNodePushable.java:205)
>         at 
> org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable$1.call(SuperActivityOperatorNodePushable.java:202)
>         at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>         ... 3 more
> Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
>         at org.apache.hyracks.control.nc.io.IOManager.open(IOManager.java:81)
>         at 
> org.apache.hyracks.dataflow.common.io.RunFileReader.open(RunFileReader.java:47)
>         at 
> org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.applyInMemHashJoin(OptimizedHybridHashJoinOperatorDescriptor.java:670)
>         at 
> org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.joinPartitionPair(OptimizedHybridHashJoinOperatorDescriptor.java:489)
>         at 
> org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.close(OptimizedHybridHashJoinOperatorDescriptor.java:426)
>         at 
> org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.close(AbstractOneInputOneOutputOneFramePushRuntime.java:57)
>         at 
> org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$2.close(AlgebricksMetaOperatorDescriptor.java:153)
>         at 
> org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.close(IndexSearchOperatorNodePushable.java:227)
>         ... 9 more
> Caused by: java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
>         at java.io.RandomAccessFile.open0(Native Method)
>         at java.io.RandomAccessFile.open(RandomAccessFile.java:316)
>         at java.io.RandomAccessFile.<init>(RandomAccessFile.java:243)
>         at 
> org.apache.hyracks.control.nc.io.FileHandle.open(FileHandle.java:70)
>         at org.apache.hyracks.control.nc.io.IOManager.open(IOManager.java:79)
>         ... 16 more
> org.apache.hyracks.api.exceptions.HyracksException: Job failed on account of:
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.util.concurrent.ExecutionException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
>         at 
> org.apache.hyracks.control.cc.job.JobRun.waitForCompletion(JobRun.java:211)
>         at 
> org.apache.hyracks.control.cc.work.WaitForJobCompletionWork$1.run(WaitForJobCompletionWork.java:48)
>         at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>         at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>         at java.lang.Thread.run(Thread.java:745)
> Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.util.concurrent.ExecutionException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
>         at 
> org.apache.hyracks.control.common.utils.ExceptionUtils.setNodeIds(ExceptionUtils.java:45)
>         at org.apache.hyracks.control.nc.Task.run(Task.java:319)
>         ... 3 more
> Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.util.concurrent.ExecutionException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
>         at 
> org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.runInParallel(SuperActivityOperatorNodePushable.java:218)
>         at 
> org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.initialize(SuperActivityOperatorNodePushable.java:83)
>         at org.apache.hyracks.control.nc.Task.run(Task.java:263)
>         ... 3 more
> Caused by: java.util.concurrent.ExecutionException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
>         at java.util.concurrent.FutureTask.report(FutureTask.java:122)
>         at java.util.concurrent.FutureTask.get(FutureTask.java:192)
>         at 
> org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.runInParallel(SuperActivityOperatorNodePushable.java:212)
>         ... 5 more
> Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
>         at 
> org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.close(IndexSearchOperatorNodePushable.java:230)
>         at 
> org.apache.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory$1.close(EmptyTupleSourceRuntimeFactory.java:60)
>         at 
> org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$1.initialize(AlgebricksMetaOperatorDescriptor.java:116)
>         at 
> org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.lambda$initialize$0(SuperActivityOperatorNodePushable.java:83)
>         at 
> org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable$1.call(SuperActivityOperatorNodePushable.java:205)
>         at 
> org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable$1.call(SuperActivityOperatorNodePushable.java:202)
>         at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>         ... 3 more
> Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
>         at org.apache.hyracks.control.nc.io.IOManager.open(IOManager.java:81)
>         at 
> org.apache.hyracks.dataflow.common.io.RunFileReader.open(RunFileReader.java:47)
>         at 
> org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.applyInMemHashJoin(OptimizedHybridHashJoinOperatorDescriptor.java:670)
>         at 
> org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.joinPartitionPair(OptimizedHybridHashJoinOperatorDescriptor.java:489)
>         at 
> org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.close(OptimizedHybridHashJoinOperatorDescriptor.java:426)
> at 
> org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.close(AbstractOneInputOneOutputOneFramePushRuntime.java:57)
>         at 
> org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$2.close(AlgebricksMetaOperatorDescriptor.java:153)
>         at 
> org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.close(IndexSearchOperatorNodePushable.java:227)
>         ... 9 more
> Caused by: java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
>         at java.io.RandomAccessFile.open0(Native Method)
>         at java.io.RandomAccessFile.open(RandomAccessFile.java:316)
>         at java.io.RandomAccessFile.<init>(RandomAccessFile.java:243)
>         at 
> org.apache.hyracks.control.nc.io.FileHandle.open(FileHandle.java:70)
>         at org.apache.hyracks.control.nc.io.IOManager.open(IOManager.java:79)
>         ... 16 more
> Apr 24, 2016 12:01:39 PM org.apache.asterix.api.http.servlet.APIServlet doPost
> SEVERE: Job failed on account of:
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.util.concurrent.ExecutionException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
> org.apache.hyracks.api.exceptions.HyracksException: Job failed on account of:
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.util.concurrent.ExecutionException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
>         at 
> org.apache.hyracks.control.cc.job.JobRun.waitForCompletion(JobRun.java:211)
>         at 
> org.apache.hyracks.control.cc.work.WaitForJobCompletionWork$1.run(WaitForJobCompletionWork.java:48)
>         at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>         at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>         at java.lang.Thread.run(Thread.java:745)
> Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.util.concurrent.ExecutionException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
>         at 
> org.apache.hyracks.control.common.utils.ExceptionUtils.setNodeIds(ExceptionUtils.java:45)
>         at org.apache.hyracks.control.nc.Task.run(Task.java:319)
>         ... 3 more
> Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.util.concurrent.ExecutionException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
>         at 
> org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.runInParallel(SuperActivityOperatorNodePushable.java:218)
>        at 
> org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.initialize(SuperActivityOperatorNodePushable.java:83)
>         at org.apache.hyracks.control.nc.Task.run(Task.java:263)
>         ... 3 more
> Caused by: java.util.concurrent.ExecutionException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
>         at java.util.concurrent.FutureTask.report(FutureTask.java:122)
>         at java.util.concurrent.FutureTask.get(FutureTask.java:192)
>         at 
> org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.runInParallel(SuperActivityOperatorNodePushable.java:212)
>         ... 5 more
> Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
> org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
>         at 
> org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.close(IndexSearchOperatorNodePushable.java:230)
>         at 
> org.apache.hyracks.algebricks.runtime.operators.std.EmptyTupleSourceRuntimeFactory$1.close(EmptyTupleSourceRuntimeFactory.java:60)
>         at 
> org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$1.initialize(AlgebricksMetaOperatorDescriptor.java:116)
>         at 
> org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable.lambda$initialize$0(SuperActivityOperatorNodePushable.java:83)
>         at 
> org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable$1.call(SuperActivityOperatorNodePushable.java:205)
>         at 
> org.apache.hyracks.api.rewriter.runtime.SuperActivityOperatorNodePushable$1.call(SuperActivityOperatorNodePushable.java:202)
>         at java.util.concurrent.FutureTask.run(FutureTask.java:266)
>         ... 3 more
> Caused by: org.apache.hyracks.api.exceptions.HyracksDataException: 
> java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
>         at org.apache.hyracks.control.nc.io.IOManager.open(IOManager.java:81)
>         at 
> org.apache.hyracks.dataflow.common.io.RunFileReader.open(RunFileReader.java:47)
>         at 
> org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.applyInMemHashJoin(OptimizedHybridHashJoinOperatorDescriptor.java:670)
>         at 
> org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.joinPartitionPair(OptimizedHybridHashJoinOperatorDescriptor.java:489)
>         at 
> org.apache.hyracks.dataflow.std.join.OptimizedHybridHashJoinOperatorDescriptor$ProbeAndJoinActivityNode$1.close(OptimizedHybridHashJoinOperatorDescriptor.java:426)
>         at 
> org.apache.hyracks.algebricks.runtime.operators.base.AbstractOneInputOneOutputOneFramePushRuntime.close(AbstractOneInputOneOutputOneFramePushRuntime.java:57)
>         at 
> org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor$2.close(AlgebricksMetaOperatorDescriptor.java:153)
>         at 
> org.apache.hyracks.storage.am.common.dataflow.IndexSearchOperatorNodePushable.close(IndexSearchOperatorNodePushable.java:227)
>         ... 9 more
> Caused by: java.io.FileNotFoundException: 
> /home/hadoop/asterixdb/hadoop/node1/io2/./RelS1188720785205710895.waf (No 
> such file or directory)
>         at java.io.RandomAccessFile.open0(Native Method)
>         at java.io.RandomAccessFile.open(RandomAccessFile.java:316)
>         at java.io.RandomAccessFile.<init>(RandomAccessFile.java:243)
>         at 
> org.apache.hyracks.control.nc.io.FileHandle.open(FileHandle.java:70)
>         at org.apache.hyracks.control.nc.io.IOManager.open(IOManager.java:79)
>         ... 16 more
> Apr 24, 2016 12:01:49 PM 
> org.apache.hyracks.control.common.dataset.ResultStateSweeper sweep
> {noformat}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

Reply via email to