http://git-wip-us.apache.org/repos/asf/kylin/blob/1428bbc4/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMetaExtractor.java ---------------------------------------------------------------------- diff --git a/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMetaExtractor.java b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMetaExtractor.java new file mode 100644 index 0000000..680dff8 --- /dev/null +++ b/storage-hbase/src/main/java/org/apache/kylin/storage/hbase/util/CubeMetaExtractor.java @@ -0,0 +1,284 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.kylin.storage.hbase.util; + +import java.io.IOException; +import java.util.List; + +import org.apache.commons.cli.Option; +import org.apache.commons.cli.OptionBuilder; +import org.apache.commons.cli.OptionGroup; +import org.apache.commons.cli.Options; +import org.apache.commons.lang3.StringUtils; +import org.apache.kylin.common.KylinConfig; +import org.apache.kylin.common.persistence.ResourceTool; +import org.apache.kylin.common.util.AbstractApplication; +import org.apache.kylin.common.util.OptionsHelper; +import org.apache.kylin.cube.CubeDescManager; +import org.apache.kylin.cube.CubeInstance; +import org.apache.kylin.cube.CubeManager; +import org.apache.kylin.cube.CubeSegment; +import org.apache.kylin.cube.model.CubeDesc; +import org.apache.kylin.invertedindex.IIDescManager; +import org.apache.kylin.invertedindex.IIInstance; +import org.apache.kylin.invertedindex.IIManager; +import org.apache.kylin.job.dao.ExecutableDao; +import org.apache.kylin.job.dao.ExecutablePO; +import org.apache.kylin.job.exception.PersistentException; +import org.apache.kylin.metadata.MetadataManager; +import org.apache.kylin.metadata.model.DataModelDesc; +import org.apache.kylin.metadata.model.TableDesc; +import org.apache.kylin.metadata.project.ProjectInstance; +import org.apache.kylin.metadata.project.ProjectManager; +import org.apache.kylin.metadata.project.RealizationEntry; +import org.apache.kylin.metadata.realization.IRealization; +import org.apache.kylin.metadata.realization.RealizationRegistry; +import org.apache.kylin.storage.hybrid.HybridInstance; +import org.apache.kylin.storage.hybrid.HybridManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.collect.Lists; + +/** + * extract cube related info for debugging/distributing purpose + * TODO: deal with II case, deal with Streaming case + */ +public class CubeMetaExtractor extends AbstractApplication { + + private static final Logger logger = LoggerFactory.getLogger(CubeMetaExtractor.class); + + @SuppressWarnings("static-access") + private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("Specify which cube to extract").create("cube"); + @SuppressWarnings("static-access") + private static final Option OPTION_HYBRID = OptionBuilder.withArgName("hybrid").hasArg().isRequired(false).withDescription("Specify which hybrid to extract").create("hybrid"); + @SuppressWarnings("static-access") + private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false).withDescription("Specify realizations in which project to extract").create("project"); + + @SuppressWarnings("static-access") + private static final Option OPTION_INCLUDE_SEGMENTS = OptionBuilder.withArgName("includeSegments").hasArg().isRequired(false).withDescription("set this to true if want extract the segments info, related dicts, etc.").create("includeSegments"); + @SuppressWarnings("static-access") + private static final Option OPTION_INCLUDE_JOB = OptionBuilder.withArgName("includeJobs").hasArg().isRequired(false).withDescription("set this to true if want to extract job info/outputs too").create("includeJobs"); + + @SuppressWarnings("static-access") + private static final Option OPTION_DEST = OptionBuilder.withArgName("destDir").hasArg().isRequired(false).withDescription("specify the dest dir to save the related metadata").create("destDir"); + + private Options options = null; + private KylinConfig kylinConfig; + private MetadataManager metadataManager; + private ProjectManager projectManager; + private HybridManager hybridManager; + private CubeManager cubeManager; + private CubeDescManager cubeDescManager; + private IIManager iiManager; + private IIDescManager iiDescManager; + private ExecutableDao executableDao; + RealizationRegistry realizationRegistry; + + public CubeMetaExtractor() { + options = new Options(); + + OptionGroup realizationOrProject = new OptionGroup(); + realizationOrProject.addOption(OPTION_CUBE); + realizationOrProject.addOption(OPTION_PROJECT); + realizationOrProject.addOption(OPTION_HYBRID); + realizationOrProject.setRequired(true); + + options.addOptionGroup(realizationOrProject); + options.addOption(OPTION_INCLUDE_SEGMENTS); + options.addOption(OPTION_INCLUDE_JOB); + options.addOption(OPTION_DEST); + + } + + @Override + protected Options getOptions() { + return options; + } + + @Override + protected void execute(OptionsHelper optionsHelper) throws Exception { + boolean includeSegments = optionsHelper.hasOption(OPTION_INCLUDE_SEGMENTS) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_SEGMENTS)) : true; + boolean includeJobs = optionsHelper.hasOption(OPTION_INCLUDE_JOB) ? Boolean.valueOf(optionsHelper.getOptionValue(OPTION_INCLUDE_JOB)) : true; + String dest = null; + if (optionsHelper.hasOption(OPTION_DEST)) { + dest = optionsHelper.getOptionValue(OPTION_DEST); + } + + if (!includeSegments) { + throw new RuntimeException("Does not support skip segments for now"); + } + + kylinConfig = KylinConfig.getInstanceFromEnv(); + metadataManager = MetadataManager.getInstance(kylinConfig); + projectManager = ProjectManager.getInstance(kylinConfig); + hybridManager = HybridManager.getInstance(kylinConfig); + cubeManager = CubeManager.getInstance(kylinConfig); + cubeDescManager = CubeDescManager.getInstance(kylinConfig); + iiManager = IIManager.getInstance(kylinConfig); + iiDescManager = IIDescManager.getInstance(kylinConfig); + executableDao = ExecutableDao.getInstance(kylinConfig); + realizationRegistry = RealizationRegistry.getInstance(kylinConfig); + + List<String> requiredResources = Lists.newArrayList(); + List<String> optionalResources = Lists.newArrayList(); + + if (optionsHelper.hasOption(OPTION_PROJECT)) { + ProjectInstance projectInstance = projectManager.getProject(optionsHelper.getOptionValue(OPTION_PROJECT)); + if (projectInstance == null) { + throw new IllegalArgumentException("Project " + optionsHelper.getOptionValue(OPTION_PROJECT) + " does not exist"); + } + addRequired(requiredResources, ProjectInstance.concatResourcePath(projectInstance.getName())); + List<RealizationEntry> realizationEntries = projectInstance.getRealizationEntries(); + for (RealizationEntry realizationEntry : realizationEntries) { + retrieveResourcePath(getRealization(realizationEntry), includeSegments, includeJobs, requiredResources, optionalResources); + } + } else if (optionsHelper.hasOption(OPTION_CUBE)) { + String cubeName = optionsHelper.getOptionValue(OPTION_CUBE); + IRealization realization; + + if ((realization = cubeManager.getRealization(cubeName)) != null) { + retrieveResourcePath(realization, includeSegments, includeJobs, requiredResources, optionalResources); + } else { + throw new IllegalArgumentException("No cube found with name of " + cubeName); + } + } else if (optionsHelper.hasOption(OPTION_HYBRID)) { + String hybridName = optionsHelper.getOptionValue(OPTION_HYBRID); + IRealization realization; + + if ((realization = hybridManager.getRealization(hybridName)) != null) { + retrieveResourcePath(realization, includeSegments, includeJobs, requiredResources, optionalResources); + } else { + throw new IllegalArgumentException("No hybrid found with name of" + hybridName); + } + } + + executeExtraction(requiredResources, optionalResources, dest); + } + + private void executeExtraction(List<String> requiredPaths, List<String> optionalPaths, String dest) { + logger.info("The resource paths going to be extracted:"); + for (String s : requiredPaths) { + logger.info(s + "(required)"); + } + for (String s : optionalPaths) { + logger.info(s + "(optional)"); + } + + if (dest == null) { + logger.info("Dest is not set, exit directly without extracting"); + } else { + try { + ResourceTool.copy(KylinConfig.getInstanceFromEnv(), KylinConfig.createInstanceFromUri(dest)); + } catch (IOException e) { + throw new RuntimeException("IOException", e); + } + } + } + + private IRealization getRealization(RealizationEntry realizationEntry) { + return realizationRegistry.getRealization(realizationEntry.getType(), realizationEntry.getRealization()); + } + + private void retrieveResourcePath(IRealization realization, boolean includeSegments, boolean includeJobs, List<String> requiredResources, List<String> optionalResources) { + + logger.info("Deal with realization {} of type {}", realization.getName(), realization.getType()); + + if (realization instanceof CubeInstance) { + CubeInstance cube = (CubeInstance) realization; + String descName = cube.getDescName(); + CubeDesc cubeDesc = cubeDescManager.getCubeDesc(descName); + String modelName = cubeDesc.getModelName(); + DataModelDesc modelDesc = metadataManager.getDataModelDesc(modelName); + + for (String tableName : modelDesc.getAllTables()) { + addRequired(requiredResources, TableDesc.concatResourcePath(tableName)); + addOptional(optionalResources, TableDesc.concatExdResourcePath(tableName)); + } + + addRequired(requiredResources, DataModelDesc.concatResourcePath(modelDesc.getName())); + addRequired(requiredResources, CubeDesc.concatResourcePath(cubeDesc.getName())); + + if (includeSegments) { + addRequired(requiredResources, CubeInstance.concatResourcePath(cube.getName())); + for (CubeSegment segment : cube.getSegments()) { + for (String dictPat : segment.getDictionaryPaths()) { + addRequired(requiredResources, dictPat); + } + for (String snapshotPath : segment.getSnapshotPaths()) { + addRequired(requiredResources, snapshotPath); + } + addRequired(requiredResources, segment.getStatisticsResourcePath()); + + if (includeJobs) { + String lastJobId = segment.getLastBuildJobID(); + if (!StringUtils.isEmpty(lastJobId)) { + logger.warn("No job exist for segment {}", segment); + } else { + try { + ExecutablePO executablePO = executableDao.getJob(lastJobId); + addRequired(requiredResources, ExecutableDao.pathOfJob(lastJobId)); + addRequired(requiredResources, ExecutableDao.pathOfJobOutput(lastJobId)); + for (ExecutablePO task : executablePO.getTasks()) { + addRequired(requiredResources, ExecutableDao.pathOfJob(task.getUuid())); + addRequired(requiredResources, ExecutableDao.pathOfJobOutput(task.getUuid())); + } + } catch (PersistentException e) { + throw new RuntimeException("PersistentException", e); + } + } + } else { + logger.info("Job info will not be extracted"); + } + } + } else { + if (includeJobs) { + logger.warn("It's useless to set includeJobs to true when includeSegments is set to false"); + } + + throw new IllegalStateException("Does not support skip segments now"); + } + } else if (realization instanceof HybridInstance) { + HybridInstance hybridInstance = (HybridInstance) realization; + addRequired(requiredResources, HybridInstance.concatResourcePath(hybridInstance.getName())); + for (IRealization iRealization : hybridInstance.getRealizations()) { + retrieveResourcePath(iRealization, includeSegments, includeJobs, requiredResources, optionalResources); + } + } else if (realization instanceof IIInstance) { + throw new IllegalStateException("Does not support extract II instance or hybrid that contains II"); + } else { + throw new IllegalStateException("Unknown realization type: " + realization.getType()); + } + } + + private void addRequired(List<String> resourcePaths, String record) { + logger.info("adding required resource {}", record); + resourcePaths.add(record); + } + + private void addOptional(List<String> optionalPaths, String record) { + logger.info("adding optional resource {}", record); + optionalPaths.add(record); + } + + public static void main(String[] args) { + CubeMetaExtractor extractor = new CubeMetaExtractor(); + extractor.execute(args); + } +}
http://git-wip-us.apache.org/repos/asf/kylin/blob/1428bbc4/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/ITStorageTest.java ---------------------------------------------------------------------- diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/ITStorageTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/ITStorageTest.java deleted file mode 100644 index f8aab6a..0000000 --- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/common/ITStorageTest.java +++ /dev/null @@ -1,163 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. -*/ - -package org.apache.kylin.storage.hbase.common; - -import static org.junit.Assert.*; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; - -import org.apache.kylin.common.KylinConfig; -import org.apache.kylin.cube.CubeInstance; -import org.apache.kylin.cube.CubeManager; -import org.apache.kylin.metadata.filter.TupleFilter; -import org.apache.kylin.metadata.model.FunctionDesc; -import org.apache.kylin.metadata.model.MeasureDesc; -import org.apache.kylin.metadata.model.TblColRef; -import org.apache.kylin.metadata.realization.SQLDigest; -import org.apache.kylin.metadata.tuple.ITuple; -import org.apache.kylin.metadata.tuple.ITupleIterator; -import org.apache.kylin.storage.IStorageQuery; -import org.apache.kylin.storage.StorageContext; -import org.apache.kylin.storage.StorageFactory; -import org.apache.kylin.storage.cache.StorageMockUtils; -import org.apache.kylin.storage.exception.ScanOutOfLimitException; -import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.Ignore; -import org.junit.Test; - -public class ITStorageTest extends HBaseMetadataTestCase { - - private IStorageQuery storageEngine; - private CubeInstance cube; - private StorageContext context; - - @BeforeClass - public static void setupResource() throws Exception { - } - - @AfterClass - public static void tearDownResource() { - } - - @Before - public void setUp() throws Exception { - this.createTestMetadata(); - - CubeManager cubeMgr = CubeManager.getInstance(getTestConfig()); - cube = cubeMgr.getCube("test_kylin_cube_without_slr_left_join_empty"); - Assert.assertNotNull(cube); - storageEngine = StorageFactory.createQuery(cube); - String url = KylinConfig.getInstanceFromEnv().getStorageUrl(); - context = new StorageContext(); - context.setConnUrl(url); - } - - @After - public void tearDown() throws Exception { - this.cleanupTestMetadata(); - } - - @Test(expected = ScanOutOfLimitException.class) - @Ignore - public void testScanOutOfLimit() { - context.setThreshold(1); - List<TblColRef> groups = StorageMockUtils.buildGroups(); - List<FunctionDesc> aggregations = StorageMockUtils.buildAggregations(); - - search(groups, aggregations, null, context); - } - - @Test - public void test01() { - List<TblColRef> groups = StorageMockUtils.buildGroups(); - List<FunctionDesc> aggregations = StorageMockUtils.buildAggregations(); - TupleFilter filter = StorageMockUtils.buildFilter1(groups.get(0)); - - int count = search(groups, aggregations, filter, context); - assertTrue(count > 0); - } - - /* - @Test - public void test02() { - List<TblColRef> groups = buildGroups(); - List<FunctionDesc> aggregations = buildAggregations(); - TupleFilter filter = buildFilter2(groups.get(1)); - - int count = search(groups, aggregations, filter, context); - assertTrue(count > 0); - } - - @Test - public void test03() { - List<TblColRef> groups = buildGroups(); - List<FunctionDesc> aggregations = buildAggregations(); - TupleFilter filter = buildAndFilter(groups); - - int count = search(groups, aggregations, filter, context); - assertTrue(count > 0); - } - - @Test - public void test04() { - List<TblColRef> groups = buildGroups(); - List<FunctionDesc> aggregations = buildAggregations(); - TupleFilter filter = buildOrFilter(groups); - - int count = search(groups, aggregations, filter, context); - assertTrue(count > 0); - } - - @Test - public void test05() { - List<TblColRef> groups = buildGroups(); - List<FunctionDesc> aggregations = buildAggregations(); - - int count = search(groups, aggregations, null, context); - assertTrue(count > 0); - } - */ - private int search(List<TblColRef> groups, List<FunctionDesc> aggregations, TupleFilter filter, StorageContext context) { - int count = 0; - ITupleIterator iterator = null; - try { - SQLDigest sqlDigest = new SQLDigest("default.test_kylin_fact", filter, null, Collections.<TblColRef> emptySet(), groups, Collections.<TblColRef> emptySet(), Collections.<TblColRef> emptySet(), aggregations, new ArrayList<MeasureDesc>(), new ArrayList<SQLDigest.OrderEnum>()); - iterator = storageEngine.search(context, sqlDigest, StorageMockUtils.newTupleInfo(groups, aggregations)); - while (iterator.hasNext()) { - ITuple tuple = iterator.next(); - System.out.println("Tuple = " + tuple); - count++; - } - } catch (Exception e) { - e.printStackTrace(); - } finally { - if (iterator != null) - iterator.close(); - } - return count; - } - -} http://git-wip-us.apache.org/repos/asf/kylin/blob/1428bbc4/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/ITInvertedIndexHBaseTest.java ---------------------------------------------------------------------- diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/ITInvertedIndexHBaseTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/ITInvertedIndexHBaseTest.java deleted file mode 100644 index e9812da..0000000 --- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/ii/ITInvertedIndexHBaseTest.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. -*/ - -package org.apache.kylin.storage.hbase.ii; - -import java.util.List; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.client.HConnection; -import org.apache.hadoop.hbase.client.HConnectionManager; -import org.apache.kylin.common.KylinConfig; -import org.apache.kylin.common.util.BytesUtil; -import org.apache.kylin.invertedindex.IIInstance; -import org.apache.kylin.invertedindex.IIManager; -import org.apache.kylin.invertedindex.IISegment; -import org.apache.kylin.invertedindex.index.RawTableRecord; -import org.apache.kylin.invertedindex.index.Slice; -import org.apache.kylin.invertedindex.index.TableRecord; -import org.apache.kylin.invertedindex.index.TableRecordInfo; -import org.apache.kylin.invertedindex.model.IIDesc; -import org.apache.kylin.invertedindex.model.IIKeyValueCodec; -import org.apache.kylin.storage.hbase.HBaseConnection; -import org.apache.kylin.storage.hbase.cube.v1.HBaseClientKVIterator; -import org.apache.kylin.storage.hbase.steps.HBaseMetadataTestCase; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import com.google.common.collect.Lists; - -/** - * @author yangli9 - */ -public class ITInvertedIndexHBaseTest extends HBaseMetadataTestCase { - - IIInstance ii; - IISegment seg; - HConnection hconn; - - TableRecordInfo info; - - @Before - public void setup() throws Exception { - this.createTestMetadata(); - - this.ii = IIManager.getInstance(getTestConfig()).getII("test_kylin_ii_left_join"); - this.seg = ii.getFirstSegment(); - - Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration(); - hconn = HConnectionManager.createConnection(hconf); - - this.info = new TableRecordInfo(seg); - } - - @After - public void after() throws Exception { - this.cleanupTestMetadata(); - } - - @Test - public void testLoad() throws Exception { - - String tableName = seg.getStorageLocationIdentifier(); - IIKeyValueCodec codec = new IIKeyValueCodec(info.getDigest()); - - List<Slice> slices = Lists.newArrayList(); - HBaseClientKVIterator kvIterator = new HBaseClientKVIterator(hconn, tableName, IIDesc.HBASE_FAMILY_BYTES); - try { - for (Slice slice : codec.decodeKeyValue(kvIterator)) { - slices.add(slice); - } - } finally { - kvIterator.close(); - } - - List<TableRecord> records = iterateRecords(slices); - //dump(records); - System.out.println("table name:" + tableName + " has " + records.size() + " records"); - } - - private List<TableRecord> iterateRecords(List<Slice> slices) { - List<TableRecord> records = Lists.newArrayList(); - for (Slice slice : slices) { - for (RawTableRecord rec : slice) { - records.add(new TableRecord((RawTableRecord) rec.clone(), info)); - } - } - return records; - } - - @SuppressWarnings("unused") - private void dump(Iterable<TableRecord> records) { - for (TableRecord rec : records) { - byte[] x = rec.getBytes(); - String y = BytesUtil.toReadableText(x); - System.out.println(y); - System.out.println(); - } - } - -} http://git-wip-us.apache.org/repos/asf/kylin/blob/1428bbc4/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/HBaseMetadataTestCase.java ---------------------------------------------------------------------- diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/HBaseMetadataTestCase.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/HBaseMetadataTestCase.java deleted file mode 100644 index 2a0adc3..0000000 --- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/HBaseMetadataTestCase.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. -*/ - -package org.apache.kylin.storage.hbase.steps; - -import java.io.File; - -import org.apache.kylin.common.KylinConfig; -import org.apache.kylin.common.util.AbstractKylinTestCase; -import org.apache.kylin.common.util.ClassUtil; - -/** - * @author ysong1 - */ -public class HBaseMetadataTestCase extends AbstractKylinTestCase { - - static { - try { - ClassUtil.addClasspath(new File("../examples/test_case_data/sandbox/").getAbsolutePath()); - } catch (Exception e) { - e.printStackTrace(); - } - } - - @Override - public void createTestMetadata() throws Exception { - staticCreateTestMetadata(); - } - - @Override - public void cleanupTestMetadata() { - staticCleanupTestMetadata(); - } - - public static void staticCreateTestMetadata() throws Exception { - staticCreateTestMetadata(SANDBOX_TEST_DATA); - } - - public static void staticCreateTestMetadata(String kylinConfigFolder) { - - KylinConfig.destoryInstance(); - - if (System.getProperty(KylinConfig.KYLIN_CONF) == null && System.getenv(KylinConfig.KYLIN_CONF) == null) - System.setProperty(KylinConfig.KYLIN_CONF, kylinConfigFolder); - - } - -} http://git-wip-us.apache.org/repos/asf/kylin/blob/1428bbc4/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/ITHBaseResourceStoreTest.java ---------------------------------------------------------------------- diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/ITHBaseResourceStoreTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/ITHBaseResourceStoreTest.java deleted file mode 100644 index c21bf78..0000000 --- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/ITHBaseResourceStoreTest.java +++ /dev/null @@ -1,211 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. -*/ - -package org.apache.kylin.storage.hbase.steps; - -import static org.junit.Assert.*; - -import java.io.DataInputStream; -import java.io.DataOutputStream; -import java.io.IOException; -import java.util.ArrayList; - -import org.apache.commons.lang.StringUtils; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FSDataInputStream; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.kylin.common.KylinConfig; -import org.apache.kylin.common.persistence.ResourceStore; -import org.apache.kylin.common.persistence.RootPersistentEntity; -import org.apache.kylin.common.persistence.Serializer; -import org.apache.kylin.storage.hbase.HBaseConnection; -import org.apache.kylin.storage.hbase.HBaseResourceStore; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -public class ITHBaseResourceStoreTest extends HBaseMetadataTestCase { - - @Before - public void setup() throws Exception { - this.createTestMetadata(); - } - - @After - public void after() throws Exception { - this.cleanupTestMetadata(); - } - - @Test - public void testHBaseStore() throws Exception { - testAStore(ResourceStore.getStore(KylinConfig.getInstanceFromEnv())); - } - - @Test - public void testHBaseStoreWithLargeCell() throws Exception { - String path = "/cube/_test_large_cell.json"; - String largeContent = "THIS_IS_A_LARGE_CELL"; - StringEntity content = new StringEntity(largeContent); - KylinConfig config = KylinConfig.getInstanceFromEnv(); - int origSize = config.getHBaseKeyValueSize(); - ResourceStore store = ResourceStore.getStore(KylinConfig.getInstanceFromEnv()); - - try { - config.setProperty("kylin.hbase.client.keyvalue.maxsize", String.valueOf(largeContent.length() - 1)); - - store.deleteResource(path); - - store.putResource(path, content, StringEntity.serializer); - assertTrue(store.exists(path)); - StringEntity t = store.getResource(path, StringEntity.class, StringEntity.serializer); - assertEquals(content, t); - - Path redirectPath = ((HBaseResourceStore) store).bigCellHDFSPath(path); - Configuration hconf = HBaseConnection.getCurrentHBaseConfiguration(); - FileSystem fileSystem = FileSystem.get(hconf); - assertTrue(fileSystem.exists(redirectPath)); - - FSDataInputStream in = fileSystem.open(redirectPath); - assertEquals(largeContent, in.readUTF()); - in.close(); - - store.deleteResource(path); - } finally { - config.setProperty("kylin.hbase.client.keyvalue.maxsize", "" + origSize); - store.deleteResource(path); - } - } - - void testAStore(ResourceStore store) throws IOException { - String dir1 = "/cube"; - String path1 = "/cube/_test.json"; - StringEntity content1 = new StringEntity("anything"); - String dir2 = "/table"; - String path2 = "/table/_test.json"; - StringEntity content2 = new StringEntity("something"); - - // cleanup legacy if any - store.deleteResource(path1); - store.deleteResource(path2); - - StringEntity t; - - // put/get - store.putResource(path1, content1, StringEntity.serializer); - assertTrue(store.exists(path1)); - t = store.getResource(path1, StringEntity.class, StringEntity.serializer); - assertEquals(content1, t); - - store.putResource(path2, content2, StringEntity.serializer); - assertTrue(store.exists(path2)); - t = store.getResource(path2, StringEntity.class, StringEntity.serializer); - assertEquals(content2, t); - - // overwrite - t.str = "new string"; - store.putResource(path2, t, StringEntity.serializer); - - // write conflict - try { - t.setLastModified(t.getLastModified() - 1); - store.putResource(path2, t, StringEntity.serializer); - fail("write conflict should trigger IllegalStateException"); - } catch (IllegalStateException e) { - // expected - } - - // list - ArrayList<String> list; - - list = store.listResources(dir1); - assertTrue(list.contains(path1)); - assertTrue(list.contains(path2) == false); - - list = store.listResources(dir2); - assertTrue(list.contains(path2)); - assertTrue(list.contains(path1) == false); - - list = store.listResources("/"); - assertTrue(list.contains(dir1)); - assertTrue(list.contains(dir2)); - assertTrue(list.contains(path1) == false); - assertTrue(list.contains(path2) == false); - - list = store.listResources(path1); - assertNull(list); - list = store.listResources(path2); - assertNull(list); - - // delete/exist - store.deleteResource(path1); - assertTrue(store.exists(path1) == false); - list = store.listResources(dir1); - assertTrue(list == null || list.contains(path1) == false); - - store.deleteResource(path2); - assertTrue(store.exists(path2) == false); - list = store.listResources(dir2); - assertTrue(list == null || list.contains(path2) == false); - } - - public static class StringEntity extends RootPersistentEntity { - - static final Serializer<StringEntity> serializer = new Serializer<StringEntity>() { - @Override - public void serialize(StringEntity obj, DataOutputStream out) throws IOException { - out.writeUTF(obj.str); - } - - @Override - public StringEntity deserialize(DataInputStream in) throws IOException { - String str = in.readUTF(); - return new StringEntity(str); - } - }; - - String str; - - public StringEntity(String str) { - this.str = str; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = super.hashCode(); - result = prime * result + ((str == null) ? 0 : str.hashCode()); - return result; - } - - @Override - public boolean equals(Object obj) { - if (obj == this) - return true; - if (!(obj instanceof StringEntity)) - return false; - return StringUtils.equals(this.str, ((StringEntity) obj).str); - } - - @Override - public String toString() { - return str; - } - } - -} http://git-wip-us.apache.org/repos/asf/kylin/blob/1428bbc4/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/ITHdfsOpsTest.java ---------------------------------------------------------------------- diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/ITHdfsOpsTest.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/ITHdfsOpsTest.java deleted file mode 100644 index 25ac2c6..0000000 --- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/ITHdfsOpsTest.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. -*/ - -package org.apache.kylin.storage.hbase.steps; - -import java.io.IOException; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.kylin.common.KylinConfig; -import org.apache.kylin.engine.mr.HadoopUtil; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -/** - */ -public class ITHdfsOpsTest extends HBaseMetadataTestCase { - - FileSystem fileSystem; - - @Before - public void setup() throws Exception { - - this.createTestMetadata(); - - Configuration hconf = HadoopUtil.getCurrentConfiguration(); - - fileSystem = FileSystem.get(hconf); - } - - @Test - public void TestPath() throws IOException { - String hdfsWorkingDirectory = KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory(); - Path coprocessorDir = new Path(hdfsWorkingDirectory, "test"); - fileSystem.mkdirs(coprocessorDir); - - Path newFile = new Path(coprocessorDir, "test_file"); - newFile = newFile.makeQualified(fileSystem.getUri(), null); - FSDataOutputStream stream = fileSystem.create(newFile); - stream.write(new byte[] { 0, 1, 2 }); - stream.close(); - } - - @After - public void after() throws Exception { - this.cleanupTestMetadata(); - } -} http://git-wip-us.apache.org/repos/asf/kylin/blob/1428bbc4/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java ---------------------------------------------------------------------- diff --git a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java index a003d6a..7b3b698 100644 --- a/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java +++ b/storage-hbase/src/test/java/org/apache/kylin/storage/hbase/steps/SandboxMetastoreCLI.java @@ -20,11 +20,13 @@ package org.apache.kylin.storage.hbase.steps; import java.io.File; +import org.apache.commons.lang3.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.kylin.common.KylinConfig; import org.apache.kylin.common.persistence.ResourceTool; import org.apache.kylin.common.util.ClassUtil; +import org.apache.kylin.common.util.HBaseMetadataTestCase; /** * This is a helper class for developer to directly manipulate the metadata store in sandbox @@ -34,14 +36,14 @@ import org.apache.kylin.common.util.ClassUtil; * It is desinged to run in hadoop CLI, both in sandbox or in real hadoop environment */ public class SandboxMetastoreCLI { - + private static final Log logger = LogFactory.getLog(SandboxMetastoreCLI.class); public static void main(String[] args) throws Exception { logger.info("Adding to classpath: " + new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath()); ClassUtil.addClasspath(new File(HBaseMetadataTestCase.SANDBOX_TEST_DATA).getAbsolutePath()); System.setProperty(KylinConfig.KYLIN_CONF, "../examples/test_case_data/sandbox"); - if (System.getProperty("hdp.version") == null) { + if (StringUtils.isEmpty(System.getProperty("hdp.version"))) { throw new RuntimeException("No hdp.version set; Please set hdp.version in your jvm option, for example: -Dhdp.version=2.2.4.2-2"); }