http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializerTaskState.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializerTaskState.java
 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializerTaskState.java
index 2c3e16e..918155d 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializerTaskState.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializerTaskState.java
@@ -61,7 +61,7 @@ public class MaterializerTaskState extends 
AbstractStateObject {
     public void open(IHyracksTaskContext ctx) throws HyracksDataException {
         FileReference file = ctx.getJobletContext()
                 
.createManagedWorkspaceFile(MaterializerTaskState.class.getSimpleName());
-        out = new RunFileWriter(file, ctx.getIOManager());
+        out = new RunFileWriter(file, ctx.getIoManager());
         out.open();
     }
 

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializingOperatorDescriptor.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializingOperatorDescriptor.java
 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializingOperatorDescriptor.java
index fd4b094..d3e87d4 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializingOperatorDescriptor.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/MaterializingOperatorDescriptor.java
@@ -52,7 +52,7 @@ public class MaterializingOperatorDescriptor extends 
AbstractOperatorDescriptor
     public MaterializingOperatorDescriptor(IOperatorDescriptorRegistry spec, 
RecordDescriptor recordDescriptor,
             boolean isSingleActivity) {
         super(spec, 1, 1);
-        recordDescriptors[0] = recordDescriptor;
+        outRecDescs[0] = recordDescriptor;
         this.isSingleActivity = isSingleActivity;
     }
 

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/SplitVectorOperatorDescriptor.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/SplitVectorOperatorDescriptor.java
 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/SplitVectorOperatorDescriptor.java
index 47a05f7..f5176c6 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/SplitVectorOperatorDescriptor.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/misc/SplitVectorOperatorDescriptor.java
@@ -185,7 +185,7 @@ public class SplitVectorOperatorDescriptor extends 
AbstractOperatorDescriptor {
             RecordDescriptor recordDescriptor) {
         super(spec, 1, 1);
         this.splits = splits;
-        recordDescriptors[0] = recordDescriptor;
+        outRecDescs[0] = recordDescriptor;
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractSorterOperatorDescriptor.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractSorterOperatorDescriptor.java
 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractSorterOperatorDescriptor.java
index 273d5ba..1cd5fc3 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractSorterOperatorDescriptor.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/AbstractSorterOperatorDescriptor.java
@@ -68,7 +68,7 @@ public abstract class AbstractSorterOperatorDescriptor 
extends AbstractOperatorD
         this.sortFields = sortFields;
         this.firstKeyNormalizerFactory = firstKeyNormalizerFactory;
         this.comparatorFactories = comparatorFactories;
-        recordDescriptors[0] = recordDescriptor;
+        outRecDescs[0] = recordDescriptor;
     }
 
     public abstract SortActivity getSortActivity(ActivityId id);

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/ExternalSortOperatorDescriptor.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/ExternalSortOperatorDescriptor.java
 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/ExternalSortOperatorDescriptor.java
index 8597ed6..1b66ccf 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/ExternalSortOperatorDescriptor.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/ExternalSortOperatorDescriptor.java
@@ -70,7 +70,7 @@ public class ExternalSortOperatorDescriptor extends 
AbstractSorterOperatorDescri
             protected AbstractSortRunGenerator 
getRunGenerator(IHyracksTaskContext ctx,
                     IRecordDescriptorProvider recordDescProvider) throws 
HyracksDataException {
                 return new ExternalSortRunGenerator(ctx, sortFields, 
firstKeyNormalizerFactory, comparatorFactories,
-                        recordDescriptors[0], alg, policy, framesLimit, 
outputLimit);
+                        outRecDescs[0], alg, policy, framesLimit, outputLimit);
             }
         };
     }
@@ -86,7 +86,7 @@ public class ExternalSortOperatorDescriptor extends 
AbstractSorterOperatorDescri
                     List<GeneratedRunFileReader> runs, IBinaryComparator[] 
comparators,
                     INormalizedKeyComputer nmkComputer, int necessaryFrames) {
                 return new ExternalSortRunMerger(ctx, sorter, runs, 
sortFields, comparators, nmkComputer,
-                        recordDescriptors[0], necessaryFrames, outputLimit, 
writer);
+                        outRecDescs[0], necessaryFrames, outputLimit, writer);
             }
         };
     }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/ExternalSortRunGenerator.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/ExternalSortRunGenerator.java
 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/ExternalSortRunGenerator.java
index 3075719..b451b1c 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/ExternalSortRunGenerator.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/ExternalSortRunGenerator.java
@@ -57,7 +57,7 @@ public class ExternalSortRunGenerator extends 
AbstractExternalSortRunGenerator {
     protected RunFileWriter getRunFileWriter() throws HyracksDataException {
         FileReference file = ctx.getJobletContext()
                 
.createManagedWorkspaceFile(ExternalSortRunGenerator.class.getSimpleName());
-        return new RunFileWriter(file, ctx.getIOManager());
+        return new RunFileWriter(file, ctx.getIoManager());
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/ExternalSortRunMerger.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/ExternalSortRunMerger.java
 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/ExternalSortRunMerger.java
index e773a3b..2b985b9 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/ExternalSortRunMerger.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/ExternalSortRunMerger.java
@@ -49,7 +49,7 @@ public class ExternalSortRunMerger extends 
AbstractExternalSortRunMerger {
     @Override
     protected RunFileWriter prepareIntermediateMergeRunFile() throws 
HyracksDataException {
         FileReference newRun = 
ctx.createManagedWorkspaceFile(ExternalSortRunMerger.class.getSimpleName());
-        return new RunFileWriter(newRun, ctx.getIOManager());
+        return new RunFileWriter(newRun, ctx.getIoManager());
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HeapSortRunGenerator.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HeapSortRunGenerator.java
 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HeapSortRunGenerator.java
index 648be02..a058624 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HeapSortRunGenerator.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HeapSortRunGenerator.java
@@ -78,7 +78,7 @@ public class HeapSortRunGenerator extends 
AbstractSortRunGenerator {
     protected RunFileWriter getRunFileWriter() throws HyracksDataException {
         FileReference file = ctx.getJobletContext()
                 
.createManagedWorkspaceFile(HeapSortRunGenerator.class.getSimpleName());
-        return new RunFileWriter(file, ctx.getIOManager());
+        return new RunFileWriter(file, ctx.getIoManager());
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HybridTopKSortRunGenerator.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HybridTopKSortRunGenerator.java
 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HybridTopKSortRunGenerator.java
index 5591ef7..4311128 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HybridTopKSortRunGenerator.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/HybridTopKSortRunGenerator.java
@@ -62,7 +62,7 @@ public class HybridTopKSortRunGenerator extends 
HeapSortRunGenerator {
     protected RunFileWriter getRunFileWriter() throws HyracksDataException {
         FileReference file = ctx.getJobletContext()
                 
.createManagedWorkspaceFile(HybridTopKSortRunGenerator.class.getSimpleName());
-        return new RunFileWriter(file, ctx.getIOManager());
+        return new RunFileWriter(file, ctx.getIoManager());
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/InMemorySortOperatorDescriptor.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/InMemorySortOperatorDescriptor.java
 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/InMemorySortOperatorDescriptor.java
index 521c2ae..996101b 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/InMemorySortOperatorDescriptor.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/InMemorySortOperatorDescriptor.java
@@ -68,7 +68,7 @@ public class InMemorySortOperatorDescriptor extends 
AbstractOperatorDescriptor {
         this.sortFields = sortFields;
         this.firstKeyNormalizerFactory = firstKeyNormalizerFactory;
         this.comparatorFactories = comparatorFactories;
-        recordDescriptors[0] = recordDescriptor;
+        outRecDescs[0] = recordDescriptor;
     }
 
     @Override
@@ -124,7 +124,7 @@ public class InMemorySortOperatorDescriptor extends 
AbstractOperatorDescriptor {
                             
FrameFreeSlotPolicyFactory.createFreeSlotPolicy(EnumFreeSlotPolicy.LAST_FIT));
 
                     state.frameSorter = new FrameSorterMergeSort(ctx, 
frameBufferManager, sortFields,
-                            firstKeyNormalizerFactory, comparatorFactories, 
recordDescriptors[0]);
+                            firstKeyNormalizerFactory, comparatorFactories, 
outRecDescs[0]);
                     state.frameSorter.reset();
                 }
 

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/TopKSorterOperatorDescriptor.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/TopKSorterOperatorDescriptor.java
 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/TopKSorterOperatorDescriptor.java
index 218faaf..988eea3 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/TopKSorterOperatorDescriptor.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/sort/TopKSorterOperatorDescriptor.java
@@ -54,7 +54,7 @@ public class TopKSorterOperatorDescriptor extends 
AbstractSorterOperatorDescript
             protected AbstractSortRunGenerator 
getRunGenerator(IHyracksTaskContext ctx,
                     IRecordDescriptorProvider recordDescProvider) {
                 return new HybridTopKSortRunGenerator(ctx, framesLimit, topK, 
sortFields, firstKeyNormalizerFactory,
-                        comparatorFactories, recordDescriptors[0]);
+                        comparatorFactories, outRecDescs[0]);
 
             }
         };
@@ -71,7 +71,7 @@ public class TopKSorterOperatorDescriptor extends 
AbstractSorterOperatorDescript
                     List<GeneratedRunFileReader> runs, IBinaryComparator[] 
comparators,
                     INormalizedKeyComputer nmkComputer, int necessaryFrames) {
                 return new ExternalSortRunMerger(ctx, sorter, runs, 
sortFields, comparators, nmkComputer,
-                        recordDescriptors[0], necessaryFrames, topK, writer);
+                        outRecDescs[0], necessaryFrames, topK, writer);
             }
         };
     }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/union/UnionAllOperatorDescriptor.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/union/UnionAllOperatorDescriptor.java
 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/union/UnionAllOperatorDescriptor.java
index f20fe8d..967977e 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/union/UnionAllOperatorDescriptor.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-dataflow-std/src/main/java/org/apache/hyracks/dataflow/std/union/UnionAllOperatorDescriptor.java
@@ -37,7 +37,7 @@ public class UnionAllOperatorDescriptor extends 
AbstractOperatorDescriptor {
     public UnionAllOperatorDescriptor(IOperatorDescriptorRegistry spec, int 
nInputs,
             RecordDescriptor recordDescriptor) {
         super(spec, nInputs, 1);
-        recordDescriptors[0] = recordDescriptor;
+        outRecDescs[0] = recordDescriptor;
     }
 
     private static final long serialVersionUID = 1L;

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/InsertPipelineExample.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/InsertPipelineExample.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/InsertPipelineExample.java
index 0772d83..48377e3 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/InsertPipelineExample.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/InsertPipelineExample.java
@@ -41,14 +41,11 @@ import 
org.apache.hyracks.dataflow.std.connectors.MToNPartitioningConnectorDescr
 import org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
 import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
 import org.apache.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
-import org.apache.hyracks.examples.btree.helper.DataGenOperatorDescriptor;
-import org.apache.hyracks.examples.btree.helper.IndexLifecycleManagerProvider;
 import org.apache.hyracks.examples.btree.helper.BTreeHelperStorageManager;
-import org.apache.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
+import org.apache.hyracks.examples.btree.helper.DataGenOperatorDescriptor;
 import 
org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import 
org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory;
 import 
org.apache.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
-import 
org.apache.hyracks.storage.am.common.freepage.LinkedMetadataPageManagerFactory;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
 import org.apache.hyracks.storage.common.IStorageManager;
@@ -107,27 +104,24 @@ public class InsertPipelineExample {
         // string
         // we will use field 2 as primary key to fill a clustered index
         RecordDescriptor recDesc = new RecordDescriptor(new 
ISerializerDeserializer[] {
-                new UTF8StringSerializerDeserializer(), // this field will
-                                                           // not go into 
B-Tree
-                new UTF8StringSerializerDeserializer(), // we will use this
-                                                           // as payload
-                IntegerSerializerDeserializer.INSTANCE, // we will use this
-                                                        // field as key
-                IntegerSerializerDeserializer.INSTANCE, // we will use this as
-                                                        // payload
-                new UTF8StringSerializerDeserializer() // we will use this as
-                                                          // payload
-                });
+                // this field will not go into B-Tree
+                new UTF8StringSerializerDeserializer(),
+                // we will use this as payload
+                new UTF8StringSerializerDeserializer(),
+                // we will use this field as key
+                IntegerSerializerDeserializer.INSTANCE,
+                // we will use this as payload
+                IntegerSerializerDeserializer.INSTANCE,
+                // we will use this as payload
+                new UTF8StringSerializerDeserializer() });
 
         // generate numRecords records with field 2 being unique, integer 
values
         // in [0, 100000], and strings with max length of 10 characters, and
         // random seed 100
-        DataGenOperatorDescriptor dataGen = new 
DataGenOperatorDescriptor(spec, recDesc, options.numTuples, 2, 0,
-                100000, 10, 100);
+        DataGenOperatorDescriptor dataGen =
+                new DataGenOperatorDescriptor(spec, recDesc, 
options.numTuples, 2, 0, 100000, 10, 100);
         // run data generator on first nodecontroller given
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, dataGen, 
splitNCs[0]);
-
-        IIndexLifecycleManagerProvider lcManagerProvider = 
IndexLifecycleManagerProvider.INSTANCE;
         IStorageManager storageManager = BTreeHelperStorageManager.INSTANCE;
 
         // prepare insertion into primary index
@@ -150,14 +144,12 @@ public class InsertPipelineExample {
                                                         // B-Tree tuple, etc.
         IFileSplitProvider primarySplitProvider = 
JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
 
-        IIndexDataflowHelperFactory dataflowHelperFactory = new 
BTreeDataflowHelperFactory(true);
+        IIndexDataflowHelperFactory primaryHelperFactory = new 
IndexDataflowHelperFactory(storageManager, primarySplitProvider);
 
         // create operator descriptor
-        TreeIndexInsertUpdateDeleteOperatorDescriptor primaryInsert = new 
TreeIndexInsertUpdateDeleteOperatorDescriptor(
-                spec, recDesc, storageManager, lcManagerProvider, 
primarySplitProvider, primaryTypeTraits,
-                primaryComparatorFactories, null, primaryFieldPermutation, 
IndexOperation.INSERT,
-                dataflowHelperFactory, null, 
NoOpOperationCallbackFactory.INSTANCE,
-                new LinkedMetadataPageManagerFactory());
+        TreeIndexInsertUpdateDeleteOperatorDescriptor primaryInsert =
+                new TreeIndexInsertUpdateDeleteOperatorDescriptor(spec, 
recDesc, primaryFieldPermutation,
+                        IndexOperation.INSERT, primaryHelperFactory, null, 
NoOpOperationCallbackFactory.INSTANCE);
         JobHelper.createPartitionConstraint(spec, primaryInsert, splitNCs);
 
         // prepare insertion into secondary index
@@ -175,14 +167,14 @@ public class InsertPipelineExample {
         // the B-Tree expects its keyfields to be at the front of its input
         // tuple
         int[] secondaryFieldPermutation = { 1, 2 };
-        IFileSplitProvider secondarySplitProvider = 
JobHelper.createFileSplitProvider(splitNCs,
-                options.secondaryBTreeName);
+        IFileSplitProvider secondarySplitProvider =
+                JobHelper.createFileSplitProvider(splitNCs, 
options.secondaryBTreeName);
+        IIndexDataflowHelperFactory secondaryHelperFactory =
+                new IndexDataflowHelperFactory(storageManager, 
secondarySplitProvider);
         // create operator descriptor
-        TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsert = new 
TreeIndexInsertUpdateDeleteOperatorDescriptor(
-                spec, recDesc, storageManager, lcManagerProvider, 
secondarySplitProvider, secondaryTypeTraits,
-                secondaryComparatorFactories, null, secondaryFieldPermutation, 
IndexOperation.INSERT,
-                dataflowHelperFactory, null, 
NoOpOperationCallbackFactory.INSTANCE,
-                new LinkedMetadataPageManagerFactory());
+        TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsert =
+                new TreeIndexInsertUpdateDeleteOperatorDescriptor(spec, 
recDesc, secondaryFieldPermutation,
+                        IndexOperation.INSERT, secondaryHelperFactory, null, 
NoOpOperationCallbackFactory.INSTANCE);
         JobHelper.createPartitionConstraint(spec, secondaryInsert, splitNCs);
 
         // end the insert pipeline at this sink operator

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
index 550af4c..203d22c 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
@@ -41,14 +41,11 @@ import 
org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
 import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
 import org.apache.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
 import org.apache.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import org.apache.hyracks.examples.btree.helper.DataGenOperatorDescriptor;
-import org.apache.hyracks.examples.btree.helper.IndexLifecycleManagerProvider;
 import org.apache.hyracks.examples.btree.helper.BTreeHelperStorageManager;
-import org.apache.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
+import org.apache.hyracks.examples.btree.helper.DataGenOperatorDescriptor;
 import 
org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import 
org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory;
 import 
org.apache.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import 
org.apache.hyracks.storage.am.common.freepage.LinkedMetadataPageManagerFactory;
 import org.apache.hyracks.storage.common.IStorageManager;
 import org.kohsuke.args4j.CmdLineParser;
 import org.kohsuke.args4j.Option;
@@ -83,11 +80,8 @@ public class PrimaryIndexBulkLoadExample {
         Options options = new Options();
         CmdLineParser parser = new CmdLineParser(options);
         parser.parseArgument(args);
-
         IHyracksClientConnection hcc = new HyracksConnection(options.host, 
options.port);
-
         JobSpecification job = createJob(options);
-
         long start = System.currentTimeMillis();
         JobId jobId = hcc.startJob(job);
         hcc.waitForCompletion(jobId);
@@ -105,23 +99,22 @@ public class PrimaryIndexBulkLoadExample {
         // int, string
         // we will use field-index 2 as primary key to fill a clustered index
         RecordDescriptor recDesc = new RecordDescriptor(new 
ISerializerDeserializer[] {
-                new UTF8StringSerializerDeserializer(), // this field will
-                                                           // not go into 
B-Tree
-                new UTF8StringSerializerDeserializer(), // we will use this
-                                                           // as payload
-                IntegerSerializerDeserializer.INSTANCE, // we will use this
-                                                        // field as key
-                IntegerSerializerDeserializer.INSTANCE, // we will use this as
-                                                        // payload
-                new UTF8StringSerializerDeserializer() // we will use this as
-                                                          // payload
-                });
+                // this field will not go into B-Tree
+                new UTF8StringSerializerDeserializer(),
+                // we will use this as payload
+                new UTF8StringSerializerDeserializer(),
+                // we will use this field as key
+                IntegerSerializerDeserializer.INSTANCE,
+                // we will use this as payload
+                IntegerSerializerDeserializer.INSTANCE,
+                // we will use this as payload
+                new UTF8StringSerializerDeserializer() });
 
         // generate numRecords records with field 2 being unique, integer 
values
         // in [0, 100000], and strings with max length of 10 characters, and
         // random seed 50
-        DataGenOperatorDescriptor dataGen = new 
DataGenOperatorDescriptor(spec, recDesc, options.numTuples, 2, 0,
-                100000, 10, 50);
+        DataGenOperatorDescriptor dataGen =
+                new DataGenOperatorDescriptor(spec, recDesc, 
options.numTuples, 2, 0, 100000, 10, 50);
         // run data generator on first nodecontroller given
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, dataGen, 
splitNCs[0]);
 
@@ -131,8 +124,8 @@ public class PrimaryIndexBulkLoadExample {
         // comparators for sort fields
         IBinaryComparatorFactory[] comparatorFactories = new 
IBinaryComparatorFactory[1];
         comparatorFactories[0] = 
PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
-        ExternalSortOperatorDescriptor sorter = new 
ExternalSortOperatorDescriptor(spec, options.sbSize, sortFields,
-                comparatorFactories, recDesc);
+        ExternalSortOperatorDescriptor sorter =
+                new ExternalSortOperatorDescriptor(spec, options.sbSize, 
sortFields, comparatorFactories, recDesc);
         JobHelper.createPartitionConstraint(spec, sorter, splitNCs);
 
         // tuples to be put into B-Tree shall have 4 fields
@@ -144,7 +137,6 @@ public class PrimaryIndexBulkLoadExample {
         typeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
 
         // create providers for B-Tree
-        IIndexLifecycleManagerProvider lcManagerProvider = 
IndexLifecycleManagerProvider.INSTANCE;
         IStorageManager storageManager = BTreeHelperStorageManager.INSTANCE;
 
         // the B-Tree expects its keyfields to be at the front of its input
@@ -153,11 +145,9 @@ public class PrimaryIndexBulkLoadExample {
                                                  // to field 0 of B-Tree tuple,
                                                  // etc.
         IFileSplitProvider btreeSplitProvider = 
JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
-        IIndexDataflowHelperFactory dataflowHelperFactory = new 
BTreeDataflowHelperFactory(true);
+        IIndexDataflowHelperFactory dataflowHelperFactory = new 
IndexDataflowHelperFactory(storageManager, btreeSplitProvider);
         TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new 
TreeIndexBulkLoadOperatorDescriptor(spec, recDesc,
-                storageManager, lcManagerProvider, btreeSplitProvider, 
typeTraits, comparatorFactories, null,
-                fieldPermutation, 0.7f, false, 1000L, true, 
dataflowHelperFactory,
-                new LinkedMetadataPageManagerFactory());
+                fieldPermutation, 0.7f, false, 1000L, true, 
dataflowHelperFactory);
 
         JobHelper.createPartitionConstraint(spec, btreeBulkLoad, splitNCs);
 

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
index f15648e..603dc6b 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
@@ -39,13 +39,10 @@ import 
org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
 import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
 import 
org.apache.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
 import org.apache.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
-import org.apache.hyracks.examples.btree.helper.IndexLifecycleManagerProvider;
 import org.apache.hyracks.examples.btree.helper.BTreeHelperStorageManager;
-import org.apache.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
 import 
org.apache.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
 import 
org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import 
org.apache.hyracks.storage.am.common.freepage.LinkedMetadataPageManagerFactory;
+import 
org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
 import org.apache.hyracks.storage.common.IStorageManager;
 import org.kohsuke.args4j.CmdLineParser;
@@ -106,7 +103,6 @@ public class PrimaryIndexSearchExample {
         comparatorFactories[0] = 
PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
 
         // create roviders for B-Tree
-        IIndexLifecycleManagerProvider lcManagerProvider = 
IndexLifecycleManagerProvider.INSTANCE;
         IStorageManager storageManager = BTreeHelperStorageManager.INSTANCE;
 
         // schema of tuples coming out of primary index
@@ -129,8 +125,8 @@ public class PrimaryIndexSearchExample {
                                                                     // high key
         tb.addFieldEndOffset();
 
-        ISerializerDeserializer[] keyRecDescSers = { new 
UTF8StringSerializerDeserializer(),
-                new UTF8StringSerializerDeserializer() };
+        ISerializerDeserializer[] keyRecDescSers =
+                { new UTF8StringSerializerDeserializer(), new 
UTF8StringSerializerDeserializer() };
         RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
 
         ConstantTupleSourceOperatorDescriptor keyProviderOp = new 
ConstantTupleSourceOperatorDescriptor(spec,
@@ -143,11 +139,10 @@ public class PrimaryIndexSearchExample {
                                      // into search op
 
         IFileSplitProvider btreeSplitProvider = 
JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
-        IIndexDataflowHelperFactory dataflowHelperFactory = new 
BTreeDataflowHelperFactory(true);
-        BTreeSearchOperatorDescriptor btreeSearchOp = new 
BTreeSearchOperatorDescriptor(spec, recDesc, storageManager,
-                lcManagerProvider, btreeSplitProvider, typeTraits, 
comparatorFactories, null, lowKeyFields,
+        IIndexDataflowHelperFactory dataflowHelperFactory = new 
IndexDataflowHelperFactory(storageManager, btreeSplitProvider);
+        BTreeSearchOperatorDescriptor btreeSearchOp = new 
BTreeSearchOperatorDescriptor(spec, recDesc, lowKeyFields,
                 highKeyFields, true, true, dataflowHelperFactory, false, 
false, null,
-                NoOpOperationCallbackFactory.INSTANCE, null, null, new 
LinkedMetadataPageManagerFactory());
+                NoOpOperationCallbackFactory.INSTANCE, null, null, false);
 
         JobHelper.createPartitionConstraint(spec, btreeSearchOp, splitNCs);
 

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
index 003e353..7507f10 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
@@ -35,14 +35,11 @@ import 
org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
 import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
 import org.apache.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
 import org.apache.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import org.apache.hyracks.examples.btree.helper.IndexLifecycleManagerProvider;
 import org.apache.hyracks.examples.btree.helper.BTreeHelperStorageManager;
-import org.apache.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
 import 
org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import 
org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory;
 import 
org.apache.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
 import 
org.apache.hyracks.storage.am.common.dataflow.TreeIndexDiskOrderScanOperatorDescriptor;
-import 
org.apache.hyracks.storage.am.common.freepage.LinkedMetadataPageManagerFactory;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
 import org.apache.hyracks.storage.common.IStorageManager;
 import org.kohsuke.args4j.CmdLineParser;
@@ -96,19 +93,15 @@ public class SecondaryIndexBulkLoadExample {
         JobSpecification spec = new JobSpecification(options.frameSize);
 
         String[] splitNCs = options.ncs.split(",");
-
-        IIndexLifecycleManagerProvider lcManagerProvider = 
IndexLifecycleManagerProvider.INSTANCE;
         IStorageManager storageManager = BTreeHelperStorageManager.INSTANCE;
 
         // schema of tuples that we are retrieving from the primary index
         RecordDescriptor recDesc = new RecordDescriptor(new 
ISerializerDeserializer[] {
-                IntegerSerializerDeserializer.INSTANCE, // we will use this as
-                                                        // payload in secondary
-                                                        // index
-                new UTF8StringSerializerDeserializer(), // we will use this
-                                                           // ask key in
-                                                           // secondary index
-                IntegerSerializerDeserializer.INSTANCE, new 
UTF8StringSerializerDeserializer() });
+                // we will use this as payload in secondary index
+                IntegerSerializerDeserializer.INSTANCE,
+                // we will use this ask key in secondary index
+                new UTF8StringSerializerDeserializer(), 
IntegerSerializerDeserializer.INSTANCE,
+                new UTF8StringSerializerDeserializer() });
 
         int primaryFieldCount = 4;
         ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
@@ -124,17 +117,16 @@ public class SecondaryIndexBulkLoadExample {
 
         // use a disk-order scan to read primary index
         IFileSplitProvider primarySplitProvider = 
JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
-        IIndexDataflowHelperFactory dataflowHelperFactory = new 
BTreeDataflowHelperFactory(true);
+        IIndexDataflowHelperFactory primaryHelperFactory = new 
IndexDataflowHelperFactory(storageManager, primarySplitProvider);
         TreeIndexDiskOrderScanOperatorDescriptor btreeScanOp = new 
TreeIndexDiskOrderScanOperatorDescriptor(spec,
-                recDesc, storageManager, lcManagerProvider, 
primarySplitProvider, primaryTypeTraits,
-                dataflowHelperFactory, NoOpOperationCallbackFactory.INSTANCE, 
new LinkedMetadataPageManagerFactory());
+                recDesc, primaryHelperFactory, 
NoOpOperationCallbackFactory.INSTANCE);
         JobHelper.createPartitionConstraint(spec, btreeScanOp, splitNCs);
 
         // sort the tuples as preparation for bulk load into secondary index
         // fields to sort on
         int[] sortFields = { 1, 0 };
-        ExternalSortOperatorDescriptor sorter = new 
ExternalSortOperatorDescriptor(spec, options.sbSize, sortFields,
-                comparatorFactories, recDesc);
+        ExternalSortOperatorDescriptor sorter =
+                new ExternalSortOperatorDescriptor(spec, options.sbSize, 
sortFields, comparatorFactories, recDesc);
         JobHelper.createPartitionConstraint(spec, sorter, splitNCs);
 
         // tuples to be put into B-Tree shall have 2 fields
@@ -147,10 +139,9 @@ public class SecondaryIndexBulkLoadExample {
         // tuple
         int[] fieldPermutation = { 1, 0 };
         IFileSplitProvider btreeSplitProvider = 
JobHelper.createFileSplitProvider(splitNCs, options.secondaryBTreeName);
+        IIndexDataflowHelperFactory secondaryHelperFactory = new 
IndexDataflowHelperFactory(storageManager, btreeSplitProvider);
         TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new 
TreeIndexBulkLoadOperatorDescriptor(spec, null,
-                storageManager, lcManagerProvider, btreeSplitProvider, 
secondaryTypeTraits, comparatorFactories, null,
-                fieldPermutation, 0.7f, false, 1000L, true, 
dataflowHelperFactory,
-                new LinkedMetadataPageManagerFactory());
+                fieldPermutation, 0.7f, false, 1000L, true, 
secondaryHelperFactory);
         JobHelper.createPartitionConstraint(spec, btreeBulkLoad, splitNCs);
         NullSinkOperatorDescriptor nsOpDesc = new 
NullSinkOperatorDescriptor(spec);
         JobHelper.createPartitionConstraint(spec, nsOpDesc, splitNCs);

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
index 94152d1..1e909ef 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/org/apache/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
@@ -39,13 +39,10 @@ import 
org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
 import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
 import 
org.apache.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
 import org.apache.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
-import org.apache.hyracks.examples.btree.helper.IndexLifecycleManagerProvider;
 import org.apache.hyracks.examples.btree.helper.BTreeHelperStorageManager;
-import org.apache.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
 import 
org.apache.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
 import 
org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import 
org.apache.hyracks.storage.am.common.freepage.LinkedMetadataPageManagerFactory;
+import 
org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
 import org.apache.hyracks.storage.common.IStorageManager;
 import org.kohsuke.args4j.CmdLineParser;
@@ -97,7 +94,6 @@ public class SecondaryIndexSearchExample {
 
         String[] splitNCs = options.ncs.split(",");
 
-        IIndexLifecycleManagerProvider lcManagerProvider = 
IndexLifecycleManagerProvider.INSTANCE;
         IStorageManager storageManager = BTreeHelperStorageManager.INSTANCE;
 
         // schema of tuples coming out of secondary index
@@ -146,14 +142,14 @@ public class SecondaryIndexSearchExample {
 
         tb.reset();
         new UTF8StringSerializerDeserializer().serialize("0", dos); // low
-                                                                       // key
+                                                                    // key
         tb.addFieldEndOffset();
         new UTF8StringSerializerDeserializer().serialize("f", dos); // high
-                                                                       // key
+                                                                    // key
         tb.addFieldEndOffset();
 
-        ISerializerDeserializer[] keyRecDescSers = { new 
UTF8StringSerializerDeserializer(),
-                new UTF8StringSerializerDeserializer() };
+        ISerializerDeserializer[] keyRecDescSers =
+                { new UTF8StringSerializerDeserializer(), new 
UTF8StringSerializerDeserializer() };
         RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
 
         ConstantTupleSourceOperatorDescriptor keyProviderOp = new 
ConstantTupleSourceOperatorDescriptor(spec,
@@ -167,14 +163,13 @@ public class SecondaryIndexSearchExample {
                                               // tuples going into secondary
                                               // index search op
 
-        IFileSplitProvider secondarySplitProvider = 
JobHelper.createFileSplitProvider(splitNCs,
-                options.secondaryBTreeName);
-        IIndexDataflowHelperFactory dataflowHelperFactory = new 
BTreeDataflowHelperFactory(true);
+        IFileSplitProvider secondarySplitProvider =
+                JobHelper.createFileSplitProvider(splitNCs, 
options.secondaryBTreeName);
+        IIndexDataflowHelperFactory secondaryHelperFactory =
+                new IndexDataflowHelperFactory(storageManager, 
secondarySplitProvider);
         BTreeSearchOperatorDescriptor secondarySearchOp = new 
BTreeSearchOperatorDescriptor(spec, secondaryRecDesc,
-                storageManager, lcManagerProvider, secondarySplitProvider, 
secondaryTypeTraits,
-                searchComparatorFactories, null, secondaryLowKeyFields, 
secondaryHighKeyFields, true, true,
-                dataflowHelperFactory, false, false, null, 
NoOpOperationCallbackFactory.INSTANCE, null, null,
-                new LinkedMetadataPageManagerFactory());
+                secondaryLowKeyFields, secondaryHighKeyFields, true, true, 
secondaryHelperFactory, false, false, null,
+                NoOpOperationCallbackFactory.INSTANCE, null, null, false);
 
         JobHelper.createPartitionConstraint(spec, secondarySearchOp, splitNCs);
 
@@ -188,10 +183,10 @@ public class SecondaryIndexSearchExample {
                                             // op
 
         IFileSplitProvider primarySplitProvider = 
JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
+        IIndexDataflowHelperFactory primaryHelperFactory = new 
IndexDataflowHelperFactory(storageManager, primarySplitProvider);
         BTreeSearchOperatorDescriptor primarySearchOp = new 
BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, lcManagerProvider, primarySplitProvider, 
primaryTypeTraits, primaryComparatorFactories,
-                null, primaryLowKeyFields, primaryHighKeyFields, true, true, 
dataflowHelperFactory, false, false, null,
-                NoOpOperationCallbackFactory.INSTANCE, null, null, new 
LinkedMetadataPageManagerFactory());
+                primaryLowKeyFields, primaryHighKeyFields, true, true, 
primaryHelperFactory, false, false, null,
+                NoOpOperationCallbackFactory.INSTANCE, null, null, false);
 
         JobHelper.createPartitionConstraint(spec, primarySearchOp, splitNCs);
 

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/BTreeHelperStorageManager.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/BTreeHelperStorageManager.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/BTreeHelperStorageManager.java
index 8f05266..73ac7a8 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/BTreeHelperStorageManager.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/BTreeHelperStorageManager.java
@@ -19,11 +19,13 @@
 
 package org.apache.hyracks.examples.btree.helper;
 
-import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.application.INCServiceContext;
+import org.apache.hyracks.storage.common.IIndex;
+import org.apache.hyracks.storage.common.ILocalResourceRepository;
+import org.apache.hyracks.storage.common.IResourceLifecycleManager;
 import org.apache.hyracks.storage.common.IStorageManager;
 import org.apache.hyracks.storage.common.buffercache.IBufferCache;
 import org.apache.hyracks.storage.common.file.IFileMapProvider;
-import org.apache.hyracks.storage.common.file.ILocalResourceRepository;
 import org.apache.hyracks.storage.common.file.ResourceIdFactory;
 
 public class BTreeHelperStorageManager implements IStorageManager {
@@ -35,22 +37,27 @@ public class BTreeHelperStorageManager implements 
IStorageManager {
     }
 
     @Override
-    public IBufferCache getBufferCache(IHyracksTaskContext ctx) {
+    public IBufferCache getBufferCache(INCServiceContext ctx) {
         return RuntimeContext.get(ctx).getBufferCache();
     }
 
     @Override
-    public IFileMapProvider getFileMapProvider(IHyracksTaskContext ctx) {
+    public IFileMapProvider getFileMapProvider(INCServiceContext ctx) {
         return RuntimeContext.get(ctx).getFileMapManager();
     }
 
     @Override
-    public ILocalResourceRepository 
getLocalResourceRepository(IHyracksTaskContext ctx) {
+    public ILocalResourceRepository 
getLocalResourceRepository(INCServiceContext ctx) {
         return RuntimeContext.get(ctx).getLocalResourceRepository();
     }
 
     @Override
-    public ResourceIdFactory getResourceIdFactory(IHyracksTaskContext ctx) {
+    public ResourceIdFactory getResourceIdFactory(INCServiceContext ctx) {
         return RuntimeContext.get(ctx).getResourceIdFactory();
     }
+
+    @Override
+    public IResourceLifecycleManager<IIndex> 
getLifecycleManager(INCServiceContext ctx) {
+        return RuntimeContext.get(ctx).getIndexLifecycleManager();
+    }
 }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java
index 5ef6417..e4d9960 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java
@@ -57,7 +57,7 @@ public class DataGenOperatorDescriptor extends 
AbstractSingleActivityOperatorDes
         this.intMaxVal = intMaxVal;
         this.maxStrLen = maxStrLen;
         this.randomSeed = randomSeed;
-        recordDescriptors[0] = outputRecord;
+        outRecDescs[0] = outputRecord;
     }
 
     @Override
@@ -65,7 +65,7 @@ public class DataGenOperatorDescriptor extends 
AbstractSingleActivityOperatorDes
             IRecordDescriptorProvider recordDescProvider, int partition, int 
nPartitions) throws HyracksDataException {
 
         final FrameTupleAppender appender = new FrameTupleAppender(new 
VSizeFrame(ctx));
-        final RecordDescriptor recDesc = recordDescriptors[0];
+        final RecordDescriptor recDesc = outRecDescs[0];
         final ArrayTupleBuilder tb = new 
ArrayTupleBuilder(recDesc.getFields().length);
         final Random rnd = new Random(randomSeed);
         final int maxUniqueAttempts = 20;

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/IndexLifecycleManagerProvider.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/IndexLifecycleManagerProvider.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/IndexLifecycleManagerProvider.java
deleted file mode 100644
index 960a11c..0000000
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/IndexLifecycleManagerProvider.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.hyracks.examples.btree.helper;
-
-import org.apache.hyracks.api.context.IHyracksTaskContext;
-import org.apache.hyracks.storage.am.common.api.IResourceLifecycleManager;
-import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
-
-public enum IndexLifecycleManagerProvider implements 
IIndexLifecycleManagerProvider {
-    INSTANCE;
-
-    @Override
-    public IResourceLifecycleManager getLifecycleManager(IHyracksTaskContext 
ctx) {
-        return RuntimeContext.get(ctx).getIndexLifecycleManager();
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/RuntimeContext.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/RuntimeContext.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/RuntimeContext.java
index 269516a..a2175ee 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/RuntimeContext.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/org/apache/hyracks/examples/btree/helper/RuntimeContext.java
@@ -22,10 +22,11 @@ package org.apache.hyracks.examples.btree.helper;
 import java.util.concurrent.ThreadFactory;
 
 import org.apache.hyracks.api.application.INCServiceContext;
-import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.storage.am.common.api.IResourceLifecycleManager;
 import org.apache.hyracks.storage.am.common.dataflow.IndexLifecycleManager;
+import org.apache.hyracks.storage.common.IIndex;
+import org.apache.hyracks.storage.common.ILocalResourceRepository;
+import org.apache.hyracks.storage.common.IResourceLifecycleManager;
 import org.apache.hyracks.storage.common.buffercache.BufferCache;
 import 
org.apache.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
 import org.apache.hyracks.storage.common.buffercache.DelayPageCleanerPolicy;
@@ -35,7 +36,6 @@ import 
org.apache.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
 import org.apache.hyracks.storage.common.buffercache.IPageReplacementStrategy;
 import org.apache.hyracks.storage.common.file.IFileMapManager;
 import org.apache.hyracks.storage.common.file.IFileMapProvider;
-import org.apache.hyracks.storage.common.file.ILocalResourceRepository;
 import org.apache.hyracks.storage.common.file.ILocalResourceRepositoryFactory;
 import org.apache.hyracks.storage.common.file.ResourceIdFactory;
 import org.apache.hyracks.storage.common.file.ResourceIdFactoryProvider;
@@ -46,7 +46,7 @@ public class RuntimeContext {
     private IBufferCache bufferCache;
     private IFileMapManager fileMapManager;
     private ILocalResourceRepository localResourceRepository;
-    private IResourceLifecycleManager lcManager;
+    private IResourceLifecycleManager<IIndex> lcManager;
     private ResourceIdFactory resourceIdFactory;
     private ThreadFactory threadFactory = new ThreadFactory() {
         @Override
@@ -59,8 +59,8 @@ public class RuntimeContext {
         fileMapManager = new TransientFileMapManager();
         ICacheMemoryAllocator allocator = new HeapBufferAllocator();
         IPageReplacementStrategy prs = new 
ClockPageReplacementStrategy(allocator, 32768, 50);
-        bufferCache = new BufferCache(appCtx.getIoManager(), prs, new 
DelayPageCleanerPolicy(1000),
-                fileMapManager, 100, threadFactory);
+        bufferCache = new BufferCache(appCtx.getIoManager(), prs, new 
DelayPageCleanerPolicy(1000), fileMapManager, 100,
+                threadFactory);
         ILocalResourceRepositoryFactory localResourceRepositoryFactory = new 
TransientLocalResourceRepositoryFactory();
         localResourceRepository = 
localResourceRepositoryFactory.createRepository();
         resourceIdFactory = (new 
ResourceIdFactoryProvider(localResourceRepository)).createResourceIdFactory();
@@ -79,8 +79,8 @@ public class RuntimeContext {
         return fileMapManager;
     }
 
-    public static RuntimeContext get(IHyracksTaskContext ctx) {
-        return (RuntimeContext) 
ctx.getJobletContext().getServiceContext().getApplicationContext();
+    public static RuntimeContext get(INCServiceContext ctx) {
+        return (RuntimeContext) ctx.getApplicationContext();
     }
 
     public ILocalResourceRepository getLocalResourceRepository() {
@@ -91,7 +91,7 @@ public class RuntimeContext {
         return resourceIdFactory;
     }
 
-    public IResourceLifecycleManager getIndexLifecycleManager() {
+    public IResourceLifecycleManager<IIndex> getIndexLifecycleManager() {
         return lcManager;
     }
 }

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/AbstractBTreeOperatorTest.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/AbstractBTreeOperatorTest.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/AbstractBTreeOperatorTest.java
index 8431640..04226df 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/AbstractBTreeOperatorTest.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/AbstractBTreeOperatorTest.java
@@ -19,6 +19,18 @@
 
 package org.apache.hyracks.tests.am.btree;
 
+import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.inputParserFactories;
+import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.inputRecordDesc;
+import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.primaryFieldPermutation;
+import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.primaryKeyFieldCount;
+import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.primaryRecDesc;
+import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.secondaryFieldPermutationA;
+import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.secondaryFieldPermutationB;
+import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.secondaryRecDesc;
+
+import java.io.DataOutput;
+import java.io.File;
+
 import org.apache.hyracks.api.constraints.PartitionConstraintHelper;
 import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
@@ -39,95 +51,70 @@ import 
org.apache.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescripto
 import org.apache.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
 import org.apache.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
 import 
org.apache.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
+import org.apache.hyracks.storage.am.common.api.IIndexBuilderFactory;
 import org.apache.hyracks.storage.am.common.api.IPageManagerFactory;
+import org.apache.hyracks.storage.am.common.build.IndexBuilderFactory;
 import 
org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import 
org.apache.hyracks.storage.am.common.dataflow.IndexCreateOperatorDescriptor;
 import 
org.apache.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor;
+import 
org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory;
 import 
org.apache.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import 
org.apache.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
 import 
org.apache.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
 import 
org.apache.hyracks.storage.am.common.freepage.AppendOnlyLinkedMetadataPageManagerFactory;
-import 
org.apache.hyracks.storage.am.common.freepage.LinkedMetadataPageManagerFactory;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
 import org.apache.hyracks.storage.am.common.ophelpers.IndexOperation;
+import org.apache.hyracks.storage.common.IResourceFactory;
 import org.apache.hyracks.storage.common.IStorageManager;
-import 
org.apache.hyracks.storage.common.file.TransientLocalResourceFactoryProvider;
-import org.apache.hyracks.test.support.TestIndexLifecycleManagerProvider;
 import org.apache.hyracks.test.support.TestStorageManager;
 import org.apache.hyracks.test.support.TestStorageManagerComponentHolder;
 import org.apache.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
+import org.apache.hyracks.tests.am.common.TreeOperatorTestHelper;
 import org.apache.hyracks.tests.integration.AbstractIntegrationTest;
 import org.junit.After;
 import org.junit.Before;
 
-import java.io.DataOutput;
-import java.io.File;
-
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.inputParserFactories;
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.inputRecordDesc;
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.primaryBloomFilterKeyFields;
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.primaryBtreeFields;
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.primaryComparatorFactories;
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.primaryFieldPermutation;
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.primaryFilterFields;
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.primaryKeyFieldCount;
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.primaryRecDesc;
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.primaryTypeTraits;
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.secondaryBloomFilterKeyFields;
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.secondaryBtreeFields;
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.secondaryComparatorFactories;
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.secondaryFieldPermutationA;
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.secondaryFieldPermutationB;
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.secondaryFilterFields;
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.secondaryRecDesc;
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.secondaryTypeTraits;
-
 public abstract class AbstractBTreeOperatorTest extends 
AbstractIntegrationTest {
     static {
         TestStorageManagerComponentHolder.init(8192, 20, 20);
     }
 
     protected final IStorageManager storageManager = new TestStorageManager();
-    protected final IIndexLifecycleManagerProvider lcManagerProvider = new 
TestIndexLifecycleManagerProvider();
-    protected IIndexDataflowHelperFactory primaryDataflowHelperFactory;
-    protected IIndexDataflowHelperFactory secondaryDataflowHelperFactory;
+    protected final IPageManagerFactory pageManagerFactory = 
AppendOnlyLinkedMetadataPageManagerFactory.INSTANCE;
 
     // to be set by subclasses
     protected IFileSplitProvider primarySplitProvider;
-    protected IPageManagerFactory pageManagerFactory = 
AppendOnlyLinkedMetadataPageManagerFactory.INSTANCE;
-
+    protected IIndexDataflowHelperFactory primaryHelperFactory;
     protected IFileSplitProvider secondarySplitProvider;
-
+    protected IIndexDataflowHelperFactory secondaryHelperFactory;
     protected ITreeIndexOperatorTestHelper testHelper;
 
     protected ITreeIndexOperatorTestHelper createTestHelper() throws 
HyracksDataException {
-        return new BTreeOperatorTestHelper();
+        return new TreeOperatorTestHelper();
     }
 
     @Before
     public void setup() throws Exception {
         testHelper = createTestHelper();
-        primaryDataflowHelperFactory = 
createDataFlowHelperFactory(primaryBtreeFields, primaryFilterFields);
-        secondaryDataflowHelperFactory = 
createDataFlowHelperFactory(secondaryBtreeFields, secondaryFilterFields);
         String primaryFileName = testHelper.getPrimaryIndexName();
         primarySplitProvider =
                 new ConstantFileSplitProvider(new FileSplit[] { new 
ManagedFileSplit(NC1_ID, primaryFileName) });
         String secondaryFileName = testHelper.getSecondaryIndexName();
+        primaryHelperFactory = new IndexDataflowHelperFactory(storageManager, 
primarySplitProvider);
         secondarySplitProvider =
                 new ConstantFileSplitProvider(new FileSplit[] { new 
ManagedFileSplit(NC1_ID, secondaryFileName) });
+        secondaryHelperFactory = new 
IndexDataflowHelperFactory(storageManager, secondarySplitProvider);
     }
 
-    protected abstract IIndexDataflowHelperFactory 
createDataFlowHelperFactory(int [] btreeFields, int[] filterFields);
+    protected abstract IResourceFactory createPrimaryResourceFactory();
+
+    protected abstract IResourceFactory createSecondaryResourceFactory();
 
     public void createPrimaryIndex() throws Exception {
         JobSpecification spec = new JobSpecification();
-        TransientLocalResourceFactoryProvider localResourceFactoryProvider =
-                new TransientLocalResourceFactoryProvider();
-        TreeIndexCreateOperatorDescriptor primaryCreateOp =
-                new TreeIndexCreateOperatorDescriptor(spec, storageManager, 
lcManagerProvider, primarySplitProvider,
-                        primaryTypeTraits, primaryComparatorFactories, 
primaryBloomFilterKeyFields,
-                        primaryDataflowHelperFactory, 
localResourceFactoryProvider,
-                        NoOpOperationCallbackFactory.INSTANCE, 
pageManagerFactory);
+        IResourceFactory primaryResourceFactory = 
createPrimaryResourceFactory();
+        IIndexBuilderFactory indexBuilderFactory =
+                new IndexBuilderFactory(storageManager, primarySplitProvider, 
primaryResourceFactory, false);
+        IndexCreateOperatorDescriptor primaryCreateOp = new 
IndexCreateOperatorDescriptor(spec, indexBuilderFactory);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, 
primaryCreateOp, NC1_ID);
         spec.addRoot(primaryCreateOp);
         runTest(spec);
@@ -150,11 +137,9 @@ public abstract class AbstractBTreeOperatorTest extends 
AbstractIntegrationTest
                 ordersDesc);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, 
NC1_ID);
 
-        TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad =
-                new TreeIndexBulkLoadOperatorDescriptor(spec, primaryRecDesc, 
storageManager, lcManagerProvider,
-                        primarySplitProvider, primaryTypeTraits, 
primaryComparatorFactories,
-                        primaryBloomFilterKeyFields, primaryFieldPermutation, 
0.7f, true, 1000L, true,
-                        primaryDataflowHelperFactory, pageManagerFactory);
+        int[] fieldPermutation = { 0, 1, 2, 4, 5, 7 };
+        TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new 
TreeIndexBulkLoadOperatorDescriptor(spec,
+                primaryRecDesc, fieldPermutation, 0.7f, true, 1000L, true, 
primaryHelperFactory);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, 
primaryBtreeBulkLoad, NC1_ID);
 
         NullSinkOperatorDescriptor nsOpDesc = new 
NullSinkOperatorDescriptor(spec);
@@ -170,13 +155,10 @@ public abstract class AbstractBTreeOperatorTest extends 
AbstractIntegrationTest
 
     public void createSecondaryIndex() throws Exception {
         JobSpecification spec = new JobSpecification();
-        TransientLocalResourceFactoryProvider localResourceFactoryProvider =
-                new TransientLocalResourceFactoryProvider();
-        TreeIndexCreateOperatorDescriptor secondaryCreateOp =
-                new TreeIndexCreateOperatorDescriptor(spec, storageManager, 
lcManagerProvider, secondarySplitProvider,
-                        secondaryTypeTraits, secondaryComparatorFactories, 
secondaryBloomFilterKeyFields,
-                        secondaryDataflowHelperFactory, 
localResourceFactoryProvider,
-                        NoOpOperationCallbackFactory.INSTANCE, 
pageManagerFactory);
+        IResourceFactory secondaryResourceFactory = 
createSecondaryResourceFactory();
+        IIndexBuilderFactory indexBuilderFactory =
+                new IndexBuilderFactory(storageManager, 
secondarySplitProvider, secondaryResourceFactory, false);
+        IndexCreateOperatorDescriptor secondaryCreateOp = new 
IndexCreateOperatorDescriptor(spec, indexBuilderFactory);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, 
secondaryCreateOp, NC1_ID);
         spec.addRoot(secondaryCreateOp);
         runTest(spec);
@@ -195,21 +177,17 @@ public abstract class AbstractBTreeOperatorTest extends 
AbstractIntegrationTest
 
         RecordDescriptor keyRecDesc = secondaryRecDesc;
 
-        ConstantTupleSourceOperatorDescriptor keyProviderOp =
-                new ConstantTupleSourceOperatorDescriptor(spec, keyRecDesc, 
tb.getFieldEndOffsets(), tb.getByteArray(),
-                        tb.getSize());
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new 
ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), 
tb.getSize());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, 
keyProviderOp, NC1_ID);
 
         int[] lowKeyFields = null; // - infinity
         int[] highKeyFields = null; // + infinity
 
         // scan primary index
-        BTreeSearchOperatorDescriptor primaryBtreeSearchOp =
-                new BTreeSearchOperatorDescriptor(spec, primaryRecDesc, 
storageManager, lcManagerProvider,
-                        primarySplitProvider, primaryTypeTraits, 
primaryComparatorFactories,
-                        primaryBloomFilterKeyFields, lowKeyFields, 
highKeyFields, true, true,
-                        secondaryDataflowHelperFactory, false, false, null, 
NoOpOperationCallbackFactory.INSTANCE, null,
-                        null, new LinkedMetadataPageManagerFactory());
+        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new 
BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
+                lowKeyFields, highKeyFields, true, true, primaryHelperFactory, 
false, false, null,
+                NoOpOperationCallbackFactory.INSTANCE, null, null, false);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, 
primaryBtreeSearchOp, NC1_ID);
 
         // sort based on secondary keys
@@ -217,16 +195,14 @@ public abstract class AbstractBTreeOperatorTest extends 
AbstractIntegrationTest
                 new ExternalSortOperatorDescriptor(spec, 1000, 
secondaryFieldPermutationA,
                         new IBinaryComparatorFactory[] {
                                 
PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
-                                
PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) }, 
primaryRecDesc);
+                                
PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                        primaryRecDesc);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, 
NC1_ID);
 
         // load secondary index
-        int[] fieldPermutation = secondaryFieldPermutationA;
-        TreeIndexBulkLoadOperatorDescriptor secondaryBtreeBulkLoad =
-                new TreeIndexBulkLoadOperatorDescriptor(spec, 
secondaryRecDesc, storageManager, lcManagerProvider,
-                        secondarySplitProvider, secondaryTypeTraits, 
secondaryComparatorFactories,
-                        secondaryBloomFilterKeyFields, fieldPermutation, 0.7f, 
true, 1000L, true,
-                        secondaryDataflowHelperFactory, pageManagerFactory);
+        int[] fieldPermutation = { 3, 0 };
+        TreeIndexBulkLoadOperatorDescriptor secondaryBtreeBulkLoad = new 
TreeIndexBulkLoadOperatorDescriptor(spec,
+                secondaryRecDesc, fieldPermutation, 0.7f, true, 1000L, true, 
secondaryHelperFactory);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, 
secondaryBtreeBulkLoad, NC1_ID);
 
         NullSinkOperatorDescriptor nsOpDesc = new 
NullSinkOperatorDescriptor(spec);
@@ -256,20 +232,15 @@ public abstract class AbstractBTreeOperatorTest extends 
AbstractIntegrationTest
 
         // insert into primary index
         TreeIndexInsertUpdateDeleteOperatorDescriptor primaryBtreeInsertOp =
-                new TreeIndexInsertUpdateDeleteOperatorDescriptor(spec, 
ordersDesc, storageManager, lcManagerProvider,
-                        primarySplitProvider, primaryTypeTraits, 
primaryComparatorFactories,
-                        primaryBloomFilterKeyFields, primaryFieldPermutation, 
pipelineOperation,
-                        primaryDataflowHelperFactory, null, 
NoOpOperationCallbackFactory.INSTANCE, pageManagerFactory);
+                new TreeIndexInsertUpdateDeleteOperatorDescriptor(spec, 
ordersDesc, primaryFieldPermutation,
+                        pipelineOperation, primaryHelperFactory, null, 
NoOpOperationCallbackFactory.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, 
primaryBtreeInsertOp, NC1_ID);
 
         // first secondary index
         int[] fieldPermutationB = secondaryFieldPermutationB;
         TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsertOp =
-                new TreeIndexInsertUpdateDeleteOperatorDescriptor(spec, 
ordersDesc, storageManager, lcManagerProvider,
-                        secondarySplitProvider, secondaryTypeTraits, 
secondaryComparatorFactories,
-                        secondaryBloomFilterKeyFields, fieldPermutationB, 
pipelineOperation,
-                        secondaryDataflowHelperFactory, null, 
NoOpOperationCallbackFactory.INSTANCE,
-                        pageManagerFactory);
+                new TreeIndexInsertUpdateDeleteOperatorDescriptor(spec, 
ordersDesc, fieldPermutationB,
+                        pipelineOperation, secondaryHelperFactory, null, 
NoOpOperationCallbackFactory.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, 
secondaryInsertOp, NC1_ID);
 
         NullSinkOperatorDescriptor nullSink = new 
NullSinkOperatorDescriptor(spec);
@@ -287,9 +258,7 @@ public abstract class AbstractBTreeOperatorTest extends 
AbstractIntegrationTest
 
     protected void destroyPrimaryIndex() throws Exception {
         JobSpecification spec = new JobSpecification();
-        IndexDropOperatorDescriptor primaryDropOp =
-                new IndexDropOperatorDescriptor(spec, storageManager, 
lcManagerProvider, primarySplitProvider,
-                        primaryDataflowHelperFactory, pageManagerFactory);
+        IndexDropOperatorDescriptor primaryDropOp = new 
IndexDropOperatorDescriptor(spec, primaryHelperFactory);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, 
primaryDropOp, NC1_ID);
         spec.addRoot(primaryDropOp);
         runTest(spec);
@@ -297,9 +266,7 @@ public abstract class AbstractBTreeOperatorTest extends 
AbstractIntegrationTest
 
     protected void destroySecondaryIndex() throws Exception {
         JobSpecification spec = new JobSpecification();
-        IndexDropOperatorDescriptor secondaryDropOp =
-                new IndexDropOperatorDescriptor(spec, storageManager, 
lcManagerProvider, secondarySplitProvider,
-                        secondaryDataflowHelperFactory, pageManagerFactory);
+        IndexDropOperatorDescriptor secondaryDropOp = new 
IndexDropOperatorDescriptor(spec, secondaryHelperFactory);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, 
secondaryDropOp, NC1_ID);
         spec.addRoot(secondaryDropOp);
         runTest(spec);

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreeOperatorTestHelper.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreeOperatorTestHelper.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreeOperatorTestHelper.java
deleted file mode 100644
index a503e74..0000000
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreeOperatorTestHelper.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.hyracks.tests.am.btree;
-
-import org.apache.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import 
org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import org.apache.hyracks.tests.am.common.TreeOperatorTestHelper;
-
-public class BTreeOperatorTestHelper extends TreeOperatorTestHelper {
-
-    public IIndexDataflowHelperFactory createDataFlowHelperFactory() {
-        return new BTreeDataflowHelperFactory(true);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/asterixdb/blob/735532e4/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreePrimaryIndexScanOperatorTest.java
----------------------------------------------------------------------
diff --git 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreePrimaryIndexScanOperatorTest.java
 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreePrimaryIndexScanOperatorTest.java
index 605366d..336204c 100644
--- 
a/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreePrimaryIndexScanOperatorTest.java
+++ 
b/hyracks-fullstack/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/org/apache/hyracks/tests/am/btree/BTreePrimaryIndexScanOperatorTest.java
@@ -19,6 +19,8 @@
 
 package org.apache.hyracks.tests.am.btree;
 
+import java.io.DataOutput;
+
 import org.apache.hyracks.api.constraints.PartitionConstraintHelper;
 import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
 import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
@@ -32,20 +34,13 @@ import 
org.apache.hyracks.dataflow.std.file.ConstantFileSplitProvider;
 import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
 import org.apache.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
 import 
org.apache.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import org.apache.hyracks.storage.am.btree.dataflow.BTreeResourceFactory;
 import 
org.apache.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import 
org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
 import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import org.apache.hyracks.storage.common.IResourceFactory;
 import org.junit.Before;
 import org.junit.Test;
 
-import java.io.DataOutput;
-
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.primaryBloomFilterKeyFields;
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.primaryComparatorFactories;
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.primaryKeyFieldCount;
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.primaryRecDesc;
-import static 
org.apache.hyracks.tests.am.btree.DataSetConstants.primaryTypeTraits;
-
 public class BTreePrimaryIndexScanOperatorTest extends 
AbstractBTreeOperatorTest {
 
     @Override
@@ -61,15 +56,15 @@ public class BTreePrimaryIndexScanOperatorTest extends 
AbstractBTreeOperatorTest
         JobSpecification spec = new JobSpecification();
 
         // build dummy tuple containing nothing
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
+        ArrayTupleBuilder tb = new 
ArrayTupleBuilder(DataSetConstants.primaryKeyFieldCount * 2);
         DataOutput dos = tb.getDataOutput();
 
         tb.reset();
         new UTF8StringSerializerDeserializer().serialize("0", dos);
         tb.addFieldEndOffset();
 
-        ISerializerDeserializer[] keyRecDescSers = { new 
UTF8StringSerializerDeserializer(),
-                new UTF8StringSerializerDeserializer() };
+        ISerializerDeserializer[] keyRecDescSers =
+                { new UTF8StringSerializerDeserializer(), new 
UTF8StringSerializerDeserializer() };
         RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
 
         ConstantTupleSourceOperatorDescriptor keyProviderOp = new 
ConstantTupleSourceOperatorDescriptor(spec,
@@ -79,10 +74,9 @@ public class BTreePrimaryIndexScanOperatorTest extends 
AbstractBTreeOperatorTest
         int[] lowKeyFields = null; // - infinity
         int[] highKeyFields = null; // + infinity
 
-        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new 
BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, lcManagerProvider, primarySplitProvider, 
primaryTypeTraits, primaryComparatorFactories,
-                primaryBloomFilterKeyFields, lowKeyFields, highKeyFields, 
true, true, primaryDataflowHelperFactory, false,
-                false, null, NoOpOperationCallbackFactory.INSTANCE, null, 
null, pageManagerFactory);
+        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new 
BTreeSearchOperatorDescriptor(spec,
+                DataSetConstants.primaryRecDesc, lowKeyFields, highKeyFields, 
true, true, primaryHelperFactory, false,
+                false, null, NoOpOperationCallbackFactory.INSTANCE, null, 
null, false);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, 
primaryBtreeSearchOp, NC1_ID);
 
         IFileSplitProvider outSplits = new ConstantFileSplitProvider(new 
FileSplit[] { createFile(nc1) });
@@ -97,12 +91,19 @@ public class BTreePrimaryIndexScanOperatorTest extends 
AbstractBTreeOperatorTest
     }
 
     @Override
-    protected IIndexDataflowHelperFactory createDataFlowHelperFactory(int[] 
btreeFields, int[] filterFields) {
-        return ((BTreeOperatorTestHelper) 
testHelper).createDataFlowHelperFactory();
+    public void cleanup() throws Exception {
+        destroyPrimaryIndex();
+    }
+
+    @Override
+    protected IResourceFactory createPrimaryResourceFactory() {
+        return new BTreeResourceFactory(storageManager, 
DataSetConstants.primaryTypeTraits,
+                DataSetConstants.primaryComparatorFactories, 
pageManagerFactory);
     }
 
     @Override
-    public void cleanup() throws Exception {
-        destroyPrimaryIndex();
+    protected IResourceFactory createSecondaryResourceFactory() {
+        return new BTreeResourceFactory(storageManager, 
DataSetConstants.secondaryTypeTraits,
+                DataSetConstants.secondaryComparatorFactories, 
pageManagerFactory);
     }
 }

Reply via email to