http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/test/scala/org/apache/atlas/typesystem/json/TypesSerializationTest.scala ---------------------------------------------------------------------- diff --git a/typesystem/src/test/scala/org/apache/atlas/typesystem/json/TypesSerializationTest.scala b/typesystem/src/test/scala/org/apache/atlas/typesystem/json/TypesSerializationTest.scala new file mode 100755 index 0000000..ba4782c --- /dev/null +++ b/typesystem/src/test/scala/org/apache/atlas/typesystem/json/TypesSerializationTest.scala @@ -0,0 +1,300 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.atlas.typesystem.json + +import com.google.common.collect.ImmutableList +import org.apache.atlas.typesystem.types._ +import org.junit.{Assert, Test} + +class TypesSerializationTest extends BaseTest with TypeHelpers { + + @Test def test1: Unit = { + + val ts = getTypeSystem + + val sDef = structDef("ts1", requiredAttr("a", DataTypes.INT_TYPE), + optionalAttr("b", DataTypes.BOOLEAN_TYPE), + optionalAttr("c", DataTypes.BYTE_TYPE), + optionalAttr("d", DataTypes.SHORT_TYPE), + optionalAttr("e", DataTypes.INT_TYPE), + optionalAttr("f", DataTypes.INT_TYPE), + optionalAttr("g", DataTypes.LONG_TYPE), + optionalAttr("h", DataTypes.FLOAT_TYPE), + optionalAttr("i", DataTypes.DOUBLE_TYPE), + optionalAttr("j", DataTypes.BIGINTEGER_TYPE), + optionalAttr("k", DataTypes.BIGDECIMAL_TYPE), + optionalAttr("l", DataTypes.DATE_TYPE), + optionalAttr("m", DataTypes.arrayTypeName(DataTypes.INT_TYPE)), + optionalAttr("n", DataTypes.arrayTypeName(DataTypes.BIGDECIMAL_TYPE)), + optionalAttr("o", DataTypes.mapTypeName(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE))) + + + ts.defineTypes(ImmutableList.of[StructTypeDefinition](sDef), + ImmutableList.of[HierarchicalTypeDefinition[TraitType]], + ImmutableList.of[HierarchicalTypeDefinition[ClassType]] + ) + + val A: HierarchicalTypeDefinition[TraitType] = createTraitTypeDef("A", List(), + requiredAttr("a", DataTypes.INT_TYPE), + optionalAttr("b", DataTypes.BOOLEAN_TYPE), + optionalAttr("c", DataTypes.BYTE_TYPE), + optionalAttr("d", DataTypes.SHORT_TYPE)) + val B: HierarchicalTypeDefinition[TraitType] = + createTraitTypeDef("B", Seq("A"), optionalAttr("b", DataTypes.BOOLEAN_TYPE)) + val C: HierarchicalTypeDefinition[TraitType] = + createTraitTypeDef("C", Seq("A"), optionalAttr("c", DataTypes.BYTE_TYPE)) + val D: HierarchicalTypeDefinition[TraitType] = + createTraitTypeDef("D", Seq("B", "C"), optionalAttr("d", DataTypes.SHORT_TYPE)) + + defineTraits(ts, A, B, C, D) + + ts.defineEnumType("HiveObjectType", + new EnumValue("GLOBAL", 1), + new EnumValue("DATABASE", 2), + new EnumValue("TABLE", 3), + new EnumValue("PARTITION", 4), + new EnumValue("COLUMN", 5)) + + ts.defineEnumType("PrincipalType", + new EnumValue("USER", 1), + new EnumValue("ROLE", 2), + new EnumValue("GROUP", 3)) + + ts.defineEnumType("TxnState", + new EnumValue("COMMITTED", 1), + new EnumValue("ABORTED", 2), + new EnumValue("OPEN", 3)) + + ts.defineEnumType("LockLevel", + new EnumValue("DB", 1), + new EnumValue("TABLE", 2), + new EnumValue("PARTITION", 3)) + + defineClassType(ts, createClassTypeDef("t4", List(), + requiredAttr("a", DataTypes.INT_TYPE), + optionalAttr("b", DataTypes.BOOLEAN_TYPE), + optionalAttr("c", DataTypes.BYTE_TYPE), + optionalAttr("d", DataTypes.SHORT_TYPE), + optionalAttr("enum1", ts.getDataType(classOf[EnumType], "HiveObjectType")), + optionalAttr("e", DataTypes.INT_TYPE), + optionalAttr("f", DataTypes.INT_TYPE), + optionalAttr("g", DataTypes.LONG_TYPE), + optionalAttr("enum2", ts.getDataType(classOf[EnumType], "PrincipalType")), + optionalAttr("h", DataTypes.FLOAT_TYPE), + optionalAttr("i", DataTypes.DOUBLE_TYPE), + optionalAttr("j", DataTypes.BIGINTEGER_TYPE), + optionalAttr("k", DataTypes.BIGDECIMAL_TYPE), + optionalAttr("enum3", ts.getDataType(classOf[EnumType], "TxnState")), + optionalAttr("l", DataTypes.DATE_TYPE), + optionalAttr("m", ts.defineArrayType(DataTypes.INT_TYPE)), + optionalAttr("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)), + optionalAttr("o", ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)), + optionalAttr("enum4", ts.getDataType(classOf[EnumType], "LockLevel")))) + + val deptTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Department", List(), + requiredAttr("name", DataTypes.STRING_TYPE), + new AttributeDefinition("employees", String.format("array<%s>", "Person"), + Multiplicity.COLLECTION, true, "department")) + val personTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Person", List(), + requiredAttr("name", DataTypes.STRING_TYPE), + new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"), + new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates") + ) + val managerTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Manager", List("Person"), + new AttributeDefinition("subordinates", String.format("array<%s>", "Person"), + Multiplicity.COLLECTION, false, "manager") + ) + val securityClearanceTypeDef: HierarchicalTypeDefinition[TraitType] = createTraitTypeDef("SecurityClearance", List(), + requiredAttr("level", DataTypes.INT_TYPE) + ) + ts.defineTypes(ImmutableList.of[StructTypeDefinition], + ImmutableList.of[HierarchicalTypeDefinition[TraitType]](securityClearanceTypeDef), + ImmutableList.of[HierarchicalTypeDefinition[ClassType]](deptTypeDef, personTypeDef, managerTypeDef)) + + val ser = TypesSerialization.toJson(ts, _ => true) + + val typesDef1 = TypesSerialization.fromJson(ser) + + val ts1 = TypeSystem.getInstance() + ts1.reset() + + typesDef1.enumTypes.foreach(ts1.defineEnumType(_)) + + ts1.defineTypes(ImmutableList.copyOf(typesDef1.structTypes.toArray), + ImmutableList.copyOf(typesDef1.traitTypes.toArray), + ImmutableList.copyOf(typesDef1.classTypes.toArray) + ) + val ser2 = TypesSerialization.toJson(ts1, _ => true) + val typesDef2 = TypesSerialization.fromJson(ser2) + + Assert.assertEquals(typesDef1, typesDef2) + } + + @Test def test2: Unit = { + + val sDef = structDef("ts1", requiredAttr("a", DataTypes.INT_TYPE), + optionalAttr("b", DataTypes.BOOLEAN_TYPE), + optionalAttr("c", DataTypes.BYTE_TYPE), + optionalAttr("d", DataTypes.SHORT_TYPE), + optionalAttr("e", DataTypes.INT_TYPE), + optionalAttr("f", DataTypes.INT_TYPE), + optionalAttr("g", DataTypes.LONG_TYPE), + optionalAttr("h", DataTypes.FLOAT_TYPE), + optionalAttr("i", DataTypes.DOUBLE_TYPE), + optionalAttr("j", DataTypes.BIGINTEGER_TYPE), + optionalAttr("k", DataTypes.BIGDECIMAL_TYPE), + optionalAttr("l", DataTypes.DATE_TYPE), + optionalAttr("m", DataTypes.arrayTypeName(DataTypes.INT_TYPE)), + optionalAttr("n", DataTypes.arrayTypeName(DataTypes.BIGDECIMAL_TYPE)), + optionalAttr("o", DataTypes.mapTypeName(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE))) + + + + val ser2 = TypesSerialization.toJson(sDef) + val typesDef2 = TypesSerialization.fromJson(ser2) + + Assert.assertEquals(sDef, typesDef2.structTypes(0)) + } + + @Test def test3: Unit = { + + val sDef = structDef("ts1", requiredAttr("a", DataTypes.INT_TYPE), + optionalAttr("b", DataTypes.BOOLEAN_TYPE), + optionalAttr("c", DataTypes.BYTE_TYPE), + optionalAttr("d", DataTypes.SHORT_TYPE), + optionalAttr("e", DataTypes.INT_TYPE), + optionalAttr("f", DataTypes.INT_TYPE), + optionalAttr("g", DataTypes.LONG_TYPE), + optionalAttr("h", DataTypes.FLOAT_TYPE), + optionalAttr("i", DataTypes.DOUBLE_TYPE), + optionalAttr("j", DataTypes.BIGINTEGER_TYPE), + optionalAttr("k", DataTypes.BIGDECIMAL_TYPE), + optionalAttr("l", DataTypes.DATE_TYPE), + optionalAttr("m", DataTypes.arrayTypeName(DataTypes.INT_TYPE)), + optionalAttr("n", DataTypes.arrayTypeName(DataTypes.BIGDECIMAL_TYPE)), + optionalAttr("o", DataTypes.mapTypeName(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE))) + + + + val ser2 = TypesSerialization.toJson(sDef) + val typesDef2 = TypesSerialization.fromJson(ser2) + + Assert.assertEquals(sDef, typesDef2.structTypes(0)) + } + + @Test def test4 : Unit = { + + val A: HierarchicalTypeDefinition[TraitType] = createTraitTypeDef("A", List(), + requiredAttr("a", DataTypes.INT_TYPE), + optionalAttr("b", DataTypes.BOOLEAN_TYPE), + optionalAttr("c", DataTypes.BYTE_TYPE), + optionalAttr("d", DataTypes.SHORT_TYPE)) + val B: HierarchicalTypeDefinition[TraitType] = + createTraitTypeDef("B", Seq("A"), optionalAttr("b", DataTypes.BOOLEAN_TYPE)) + val C: HierarchicalTypeDefinition[TraitType] = + createTraitTypeDef("C", Seq("A"), optionalAttr("c", DataTypes.BYTE_TYPE)) + val D: HierarchicalTypeDefinition[TraitType] = + createTraitTypeDef("D", Seq("B", "C"), optionalAttr("d", DataTypes.SHORT_TYPE)) + + val typDefs = Seq(A,B,C,D) + typDefs.foreach { tDef => + val ser2 = TypesSerialization.toJson(tDef, true) + val typesDef2 = TypesSerialization.fromJson(ser2) + Assert.assertEquals(tDef, typesDef2.traitTypes(0)) + + } + } + + @Test def test5 : Unit = { + val e1 = new EnumTypeDefinition("HiveObjectType", + new EnumValue("GLOBAL", 1), + new EnumValue("DATABASE", 2), + new EnumValue("TABLE", 3), + new EnumValue("PARTITION", 4), + new EnumValue("COLUMN", 5)) + + val e2 = new EnumTypeDefinition("PrincipalType", + new EnumValue("USER", 1), + new EnumValue("ROLE", 2), + new EnumValue("GROUP", 3)) + + val e3 = new EnumTypeDefinition("TxnState", + new EnumValue("COMMITTED", 1), + new EnumValue("ABORTED", 2), + new EnumValue("OPEN", 3)) + + val e4 = new EnumTypeDefinition("LockLevel", + new EnumValue("DB", 1), + new EnumValue("TABLE", 2), + new EnumValue("PARTITION", 3)) + + val typDefs = Seq(e1,e2,e3,e4) + typDefs.foreach { tDef => + val ser2 = TypesSerialization.toJson(tDef) + val typesDef2 = TypesSerialization.fromJson(ser2) + Assert.assertEquals(tDef, typesDef2.enumTypes(0)) + + } + } + + @Test def test6 : Unit = { + val typDef = createClassTypeDef("t4", List(), + requiredAttr("a", DataTypes.INT_TYPE), + optionalAttr("b", DataTypes.BOOLEAN_TYPE), + optionalAttr("c", DataTypes.BYTE_TYPE), + optionalAttr("d", DataTypes.SHORT_TYPE), + optionalAttr("enum1", "HiveObjectType"), + optionalAttr("e", DataTypes.INT_TYPE), + optionalAttr("f", DataTypes.INT_TYPE), + optionalAttr("g", DataTypes.LONG_TYPE), + optionalAttr("enum2", "PrincipalType"), + optionalAttr("h", DataTypes.FLOAT_TYPE), + optionalAttr("i", DataTypes.DOUBLE_TYPE), + optionalAttr("j", DataTypes.BIGINTEGER_TYPE), + optionalAttr("k", DataTypes.BIGDECIMAL_TYPE), + optionalAttr("enum3", "TxnState"), + optionalAttr("l", DataTypes.DATE_TYPE), + optionalAttr("m", DataTypes.INT_TYPE), + optionalAttr("n", DataTypes.BIGDECIMAL_TYPE), + optionalAttr("o", DataTypes.mapTypeName(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)), + optionalAttr("enum4", "LockLevel")) + + val deptTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Department", List(), + requiredAttr("name", DataTypes.STRING_TYPE), + new AttributeDefinition("employees", String.format("array<%s>", "Person"), + Multiplicity.COLLECTION, true, "department")) + val personTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Person", List(), + requiredAttr("name", DataTypes.STRING_TYPE), + new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"), + new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates") + ) + val managerTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Manager", List("Person"), + new AttributeDefinition("subordinates", String.format("array<%s>", "Person"), + Multiplicity.COLLECTION, false, "manager") + ) + + val typDefs = Seq(typDef, deptTypeDef, personTypeDef, managerTypeDef) + typDefs.foreach { tDef => + val ser2 = TypesSerialization.toJson(tDef, false) + val typesDef2 = TypesSerialization.fromJson(ser2) + Assert.assertEquals(tDef, typesDef2.classTypes(0)) + + } + } +}
http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/builders/BuilderTest.scala ---------------------------------------------------------------------- diff --git a/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/builders/BuilderTest.scala b/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/builders/BuilderTest.scala deleted file mode 100644 index 496ebf9..0000000 --- a/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/builders/BuilderTest.scala +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.typesystem.builders - -import org.apache.hadoop.metadata.typesystem.TypesDef -import org.apache.hadoop.metadata.typesystem.types.TypeSystem -import org.scalatest.{BeforeAndAfter, FunSuite} - -abstract class BuilderTest extends FunSuite with BeforeAndAfter { - - var tDef : TypesDef = null - - before { - TypeSystem.getInstance().reset() - - val b = new TypesBuilder - import b._ - - tDef = types { - - _trait("Dimension") {} - _trait("PII") {} - _trait("Metric") {} - _trait("ETL") {} - _trait("JdbcAccess") {} - - _class("DB") { - "name" ~ (string, required, indexed, unique) - "owner" ~ (string) - "createTime" ~ (int) - } - - _class("StorageDesc") { - "inputFormat" ~ (string, required) - "outputFormat" ~ (string, required) - } - - _class("Column") { - "name" ~ (string, required) - "dataType" ~ (string, required) - "sd" ~ ("StorageDesc", required) - } - - _class("Table", List()) { - "name" ~ (string, required, indexed) - "db" ~ ("DB", required) - "sd" ~ ("StorageDesc", required) - } - - _class("LoadProcess") { - "name" ~ (string, required) - "inputTables" ~ (array("Table"), collection) - "outputTable" ~ ("Table", required) - - } - - _class("View") { - "name" ~ (string, required) - "inputTables" ~ (array("Table"), collection) - } - } - } - -} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/builders/InstanceBuilderTest.scala ---------------------------------------------------------------------- diff --git a/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/builders/InstanceBuilderTest.scala b/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/builders/InstanceBuilderTest.scala deleted file mode 100644 index 227b524..0000000 --- a/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/builders/InstanceBuilderTest.scala +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.typesystem.builders - -import org.apache.hadoop.metadata.typesystem.types.{Multiplicity, ClassType, TypeSystem} - - -class InstanceBuilderTest extends BuilderTest { - - test("test1") { - TypeSystem.getInstance().defineTypes(tDef) - - val b = new InstanceBuilder - import b._ - - val instances = b create { - - val salesDB = instance("DB") { // use instance to create Referenceables. use closure to - // set attributes of instance - 'name ~ "Sales" // use '~' to set attributes. Use a Symbol (names starting with ') for - // attribute names. - 'owner ~ "John ETL" - 'createTime ~ 1000 - } - - val salesFact = instance("Table") { - 'name ~ "sales_fact" - 'db ~ salesDB - val sd = instance("StorageDesc") { // any valid scala allowed in closure. - 'inputFormat ~ "TextIputFormat" - 'outputFormat ~ "TextOutputFormat" - } - 'sd ~ sd // use ~ to set references, collections and maps. - val columns = Seq( - instance("Column") { - 'name ~ "time_id" - 'dataType ~ "int" - 'sd ~ sd - }, - instance("Column") { - 'name ~ "product_id" - 'dataType ~ "int" - 'sd ~ sd - }, - instance("Column") { - 'name ~ "customer_id" - 'dataType ~ "int" - 'sd ~ sd - }, - instance("Column", "Metric") { - 'name ~ "sales" - 'dataType ~ "int" - 'sd ~ sd - 'Metric("x") ~ 1 // use 'TraitName("attrName") to set values on traits. - } - ) - - 'columns ~ columns - - } - - salesFact.sd.inputFormat ~ "TextInputFormat" // use dot navigation to alter attributes in the object graph. - // here I am fixing the typo in "TextInputFormat" - // dot navigation also works for arrays. - // here I am fixing column(3). Metric trait has no attributes. - val c = salesFact.columns - c(3) = instance("Column", "Metric") { - 'name ~ "sales" - 'dataType ~ "int" - 'sd ~ salesFact.sd - } - - } - - val ts = TypeSystem.getInstance() - - import scala.collection.JavaConversions._ - val typedInstances = instances.map { i => - val iTyp = ts.getDataType(classOf[ClassType], i.getTypeName) - iTyp.convert(i, Multiplicity.REQUIRED) - } - - typedInstances.foreach { i => - println(i) - } - - } - -} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/builders/MultiplicityTest.scala ---------------------------------------------------------------------- diff --git a/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/builders/MultiplicityTest.scala b/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/builders/MultiplicityTest.scala deleted file mode 100644 index 73d912b..0000000 --- a/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/builders/MultiplicityTest.scala +++ /dev/null @@ -1,125 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.typesystem.builders - -import org.apache.hadoop.metadata.MetadataException -import org.apache.hadoop.metadata.typesystem.types.{Multiplicity, ClassType, TypeSystem} -import org.scalatest.{BeforeAndAfterAll, FunSuite} - - -class MultiplicityTest extends FunSuite with BeforeAndAfterAll { - - override def beforeAll() = { - TypeSystem.getInstance().reset() - - val b = new TypesBuilder - import b._ - - val tDef = types { - - _trait("Dimension") {} - _trait("PII") {} - _trait("Metric") {} - _trait("ETL") {} - _trait("JdbcAccess") {} - - _class("DB") { - "name" ~ (string, required, indexed, unique) - "owner" ~ (string) - "createTime" ~ (int) - } - - _class("StorageDesc") { - "inputFormat" ~ (string, required) - "outputFormat" ~ (string, required) - } - - _class("Column") { - "name" ~ (string, required) - "dataType" ~ (string, required) - "sd" ~ ("StorageDesc", required) - } - - _class("Table", List()) { - "name" ~ (string, required, indexed) - "db" ~ ("DB", required) - "sd" ~ ("StorageDesc", required) - } - - _class("LoadProcess") { - "name" ~ (string, required) - "inputTables" ~ (array("Table"), collection) - "outputTable" ~ ("Table", required) - - } - - _class("View") { - "name" ~ (string, required) - "inputTables" ~ (array("Table"), collection) - } - - _class("AT") { - "name" ~ (string, required) - "stringSet" ~ (array("string"), multiplicty(0, Int.MaxValue, true)) - } - } - - TypeSystem.getInstance().defineTypes(tDef) - } - - test("test1") { - - val b = new InstanceBuilder - import b._ - - val instances = b create { - val a = instance("AT") { // use instance to create Referenceables. use closure to - // set attributes of instance - 'name ~ "A1" // use '~' to set attributes. Use a Symbol (names starting with ') for - 'stringSet ~ Seq("a", "a") - } - } - - val ts = TypeSystem.getInstance() - import scala.collection.JavaConversions._ - val typedInstances = instances.map { i => - val iTyp = ts.getDataType(classOf[ClassType], i.getTypeName) - iTyp.convert(i, Multiplicity.REQUIRED) - } - - typedInstances.foreach { i => - println(i) - } - } - - test("WrongMultiplicity") { - val b = new TypesBuilder - import b._ - val tDef = types { - _class("Wrong") { - "name" ~ (string, required) - "stringSet" ~ (string, multiplicty(0, Int.MaxValue, true)) - } - } - val me = intercept[MetadataException] { - TypeSystem.getInstance().defineTypes(tDef) - } - assert("A multiplicty of more than one requires a collection type for attribute 'stringSet'" == me.getMessage) - } -} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/builders/TypesBuilderTest.scala ---------------------------------------------------------------------- diff --git a/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/builders/TypesBuilderTest.scala b/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/builders/TypesBuilderTest.scala deleted file mode 100644 index f1f9024..0000000 --- a/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/builders/TypesBuilderTest.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.typesystem.builders - -import org.apache.hadoop.metadata.typesystem.json.TypesSerialization -import org.apache.hadoop.metadata.typesystem.types.{TypeSystem, BaseTest} -import org.junit.runner.RunWith -import org.scalatest.{BeforeAndAfter, FunSuite} -import org.scalatest.junit.JUnitRunner - -@RunWith(classOf[JUnitRunner]) -class TypesBuilderTest extends BuilderTest { - - - test("test1") { - TypeSystem.getInstance().defineTypes(tDef) - - println(TypesSerialization.toJson(TypeSystem.getInstance(), x => true)) - } -} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/json/SerializationTest.scala ---------------------------------------------------------------------- diff --git a/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/json/SerializationTest.scala b/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/json/SerializationTest.scala deleted file mode 100755 index 86cdb72..0000000 --- a/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/json/SerializationTest.scala +++ /dev/null @@ -1,242 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.typesystem.json - -import com.google.common.collect.ImmutableList -import org.apache.hadoop.metadata.typesystem.persistence.{ReferenceableInstance, StructInstance, Id} -import org.apache.hadoop.metadata.typesystem.types._ -import org.apache.hadoop.metadata.typesystem.types.utils.TypesUtil -import org.apache.hadoop.metadata.typesystem.{ITypedReferenceableInstance, ITypedStruct, Referenceable, Struct} -import org.json4s.native.JsonMethods._ -import org.json4s.native.Serialization.{write => swrite, _} -import org.json4s.{NoTypeHints, _} -import org.junit.{Assert, Before, Test} - -class SerializationTest extends BaseTest { - - private[metadata] var structType: StructType = null - private[metadata] var recursiveStructType: StructType = null - - @Before - override def setup { - super.setup - structType = getTypeSystem.getDataType(classOf[StructType], BaseTest.STRUCT_TYPE_1).asInstanceOf[StructType] - recursiveStructType = getTypeSystem.getDataType(classOf[StructType], BaseTest.STRUCT_TYPE_2).asInstanceOf[StructType] - } - - @Test def test1 { - val s: Struct = BaseTest.createStruct() - val ts: ITypedStruct = structType.convert(s, Multiplicity.REQUIRED) - - Assert.assertEquals(ts.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t" + BaseTest.TEST_DATE + "\n\tm : \t[1, 1]\n\tn : \t[1.1, 1.1]\n\to : \t{b=2.0, a=1.0}\n}") - - implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + - new BigDecimalSerializer + new BigIntegerSerializer - - //Json representation - val ser = swrite(ts) - Assert.assertEquals(ser, "{\"$typeName$\":\"t1\",\"e\":1,\"n\":[1.1,1.1],\"h\":1.0,\"b\":true,\"k\":1,\"j\":1,\"d\":2,\"m\":[1,1],\"g\":1,\"a\":1,\"i\":1.0,\"c\":1,\"l\":\"" + BaseTest.TEST_DATE + "\",\"f\":1,\"o\":{\"b\":2.0,\"a\":1.0}}") - - // Typed Struct read back - val ts1 = read[StructInstance](ser) - Assert.assertEquals(ts1.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t" + BaseTest.TEST_DATE + "\n\tm : \t[1, 1]\n\tn : \t[1.100000000000000088817841970012523233890533447265625, 1.100000000000000088817841970012523233890533447265625]\n\to : \t{b=2.0, a=1.0}\n}") - } - - @Test def test2 { - val s: Struct = BaseTest.createStruct() - val ts: ITypedStruct = structType.convert(s, Multiplicity.REQUIRED) - - implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + - new BigDecimalSerializer + new BigIntegerSerializer - - val ts1 = read[StructInstance]( - """ - {"$typeName$":"t1","e":1,"n":[1.1,1.1],"h":1.0,"b":true,"k":1,"j":1,"d":2,"m":[1,1],"g":1,"a":1,"i":1.0, - "c":1,"l":"2014-12-03T19:38:55.053Z","f":1,"o":{"b":2.0,"a":1.0}}""") - // Typed Struct read from string - Assert.assertEquals(ts1.toString, "{\n\ta : \t1\n\tb : \ttrue\n\tc : \t1\n\td : \t2\n\te : \t1\n\tf : \t1\n\tg : \t1\n\th : \t1.0\n\ti : \t1.0\n\tj : \t1\n\tk : \t1\n\tl : \t2014-12-03T19:38:55.053Z\n\tm : \t[1, 1]\n\tn : \t[1.100000000000000088817841970012523233890533447265625, 1.100000000000000088817841970012523233890533447265625]\n\to : \t{b=2.0, a=1.0}\n}") - } - - @Test def testTrait { - val A: HierarchicalTypeDefinition[TraitType] = TypesUtil.createTraitTypeDef("A", null, - TypesUtil.createRequiredAttrDef("a", DataTypes.INT_TYPE), - TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE), - TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE), - TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE)) - val B: HierarchicalTypeDefinition[TraitType] = TypesUtil.createTraitTypeDef( - "B", ImmutableList.of[String]("A"), - TypesUtil.createOptionalAttrDef("b", DataTypes.BOOLEAN_TYPE)) - val C: HierarchicalTypeDefinition[TraitType] = TypesUtil.createTraitTypeDef( - "C", ImmutableList.of[String]("A"), - TypesUtil.createOptionalAttrDef("c", DataTypes.BYTE_TYPE)) - val D: HierarchicalTypeDefinition[TraitType] = TypesUtil.createTraitTypeDef( - "D", ImmutableList.of[String]("B", "C"), - TypesUtil.createOptionalAttrDef("d", DataTypes.SHORT_TYPE)) - - defineTraits(A, B, C, D) - - val DType: TraitType = getTypeSystem.getDataType(classOf[TraitType], "D").asInstanceOf[TraitType] - val s1: Struct = new Struct("D") - s1.set("d", 1) - s1.set("c", 1) - s1.set("b", true) - s1.set("a", 1) - s1.set("A.B.D.b", true) - s1.set("A.B.D.c", 2) - s1.set("A.B.D.d", 2) - s1.set("A.C.D.a", 3) - s1.set("A.C.D.b", false) - s1.set("A.C.D.c", 3) - s1.set("A.C.D.d", 3) - - val s: Struct = BaseTest.createStruct() - val ts: ITypedStruct = DType.convert(s1, Multiplicity.REQUIRED) - - implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + - new BigDecimalSerializer + new BigIntegerSerializer - - // Typed Struct : - Assert.assertEquals(ts.toString, "{\n\td : \t1\n\tb : \ttrue\n\tc : \t1\n\ta : \t1\n\tA.B.D.b : \ttrue\n\tA.B.D.c : \t2\n\tA.B.D.d : \t2\n\tA.C.D.a : \t3\n\tA.C.D.b : \tfalse\n\tA.C.D.c : \t3\n\tA.C.D.d : \t3\n}") - - // Json representation : - val ser = swrite(ts) - Assert.assertEquals(ser, "{\"$typeName$\":\"D\",\"A.C.D.d\":3,\"A.B.D.c\":2,\"b\":true,\"A.C.D.c\":3,\"d\":1,\"A.B.D.b\":true,\"a\":1,\"A.C.D.b\":false,\"A.B.D.d\":2,\"c\":1,\"A.C.D.a\":3}") - - val ts1 = read[StructInstance]( - """ - {"$typeName$":"D","A.C.D.d":3,"A.B.D.c":2,"b":true,"A.C.D.c":3,"d":1, - "A.B.D.b":true,"a":1,"A.C.D.b":false,"A.B.D.d":2,"c":1,"A.C.D.a":3}""") - // Typed Struct read from string: - Assert.assertEquals(ts1.toString, "{\n\td : \t1\n\tb : \ttrue\n\tc : \t1\n\ta : \t1\n\tA.B.D.b : \ttrue\n\tA.B.D.c : \t2\n\tA.B.D.d : \t2\n\tA.C.D.a : \t3\n\tA.C.D.b : \tfalse\n\tA.C.D.c : \t3\n\tA.C.D.d : \t3\n}") - } - - def defineHRTypes(ts: TypeSystem) : Unit = { - val deptTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef( - "Department", - ImmutableList.of[String], - TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE), - new AttributeDefinition("employees", String.format("array<%s>", "Person"), - Multiplicity.COLLECTION, true, "department")) - val personTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef( - "Person", ImmutableList.of[String], - TypesUtil.createRequiredAttrDef("name", DataTypes.STRING_TYPE), - new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"), - new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates")) - val managerTypeDef: HierarchicalTypeDefinition[ClassType] = TypesUtil.createClassTypeDef( - "Manager", ImmutableList.of[String]("Person"), - new AttributeDefinition("subordinates", String.format("array<%s>", "Person"), - Multiplicity.COLLECTION, false, "manager")) - val securityClearanceTypeDef: HierarchicalTypeDefinition[TraitType] = - TypesUtil.createTraitTypeDef("SecurityClearance", ImmutableList.of[String], - TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE)) - - ts.defineTypes(ImmutableList.of[StructTypeDefinition], - ImmutableList.of[HierarchicalTypeDefinition[TraitType]](securityClearanceTypeDef), - ImmutableList.of[HierarchicalTypeDefinition[ClassType]](deptTypeDef, personTypeDef, managerTypeDef) - ) - - } - - def defineHRDept() : Referenceable = { - val hrDept: Referenceable = new Referenceable("Department") - val john: Referenceable = new Referenceable("Person") - val jane: Referenceable = new Referenceable("Manager", "SecurityClearance") - hrDept.set("name", "hr") - john.set("name", "John") - john.set("department", hrDept.getId) - jane.set("name", "Jane") - jane.set("department", hrDept.getId) - john.set("manager", jane.getId) - hrDept.set("employees", ImmutableList.of[Referenceable](john, jane)) - jane.set("subordinates", ImmutableList.of[Id](john.getId)) - jane.getTrait("SecurityClearance").set("level", 1) - hrDept - } - - @Test def testClass { - - val ts: TypeSystem = getTypeSystem - defineHRTypes(ts) - val hrDept: Referenceable = defineHRDept() - - val deptType: ClassType = ts.getDataType(classOf[ClassType], "Department") - val hrDept2: ITypedReferenceableInstance = deptType.convert(hrDept, Multiplicity.REQUIRED) - - println(s"HR Dept Object Graph:\n${hrDept2}\n") - - implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + - new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer - - val ser = swrite(hrDept2) - println(s"HR Dept JSON:\n${pretty(render(parse(ser)))}\n") - - println(s"HR Dept Object Graph read from JSON:${read[ReferenceableInstance](ser)}\n") - } - - @Test def testReference { - - val ts: TypeSystem = getTypeSystem - defineHRTypes(ts) - val hrDept: Referenceable = defineHRDept() - - - val jsonStr = InstanceSerialization.toJson(hrDept) - val hrDept2 = InstanceSerialization.fromJsonReferenceable(jsonStr) - - val deptType: ClassType = ts.getDataType(classOf[ClassType], "Department") - val hrDept3: ITypedReferenceableInstance = deptType.convert(hrDept2, Multiplicity.REQUIRED) - - println(s"HR Dept Object Graph:\n${hrDept3}\n") - - implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + - new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer - - val ser = swrite(hrDept3) - println(s"HR Dept JSON:\n${pretty(render(parse(ser)))}\n") - - println(s"HR Dept Object Graph read from JSON:${read[ReferenceableInstance](ser)}\n") - } - - @Test def testReference2 { - - val ts: TypeSystem = getTypeSystem - defineHRTypes(ts) - val hrDept: Referenceable = defineHRDept() - - val deptType: ClassType = ts.getDataType(classOf[ClassType], "Department") - val hrDept2: ITypedReferenceableInstance = deptType.convert(hrDept, Multiplicity.REQUIRED) - - val jsonStr = InstanceSerialization.toJson(hrDept2) - val hrDept3 = InstanceSerialization.fromJsonReferenceable(jsonStr) - - val hrDept4: ITypedReferenceableInstance = deptType.convert(hrDept2, Multiplicity.REQUIRED) - - println(s"HR Dept Object Graph:\n${hrDept4}\n") - - implicit val formats = org.json4s.native.Serialization.formats(NoTypeHints) + new TypedStructSerializer + - new TypedReferenceableInstanceSerializer + new BigDecimalSerializer + new BigIntegerSerializer - - val ser = swrite(hrDept4) - println(s"HR Dept JSON:\n${pretty(render(parse(ser)))}\n") - - println(s"HR Dept Object Graph read from JSON:${read[ReferenceableInstance](ser)}\n") - - } - -} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/json/TypesSerializationTest.scala ---------------------------------------------------------------------- diff --git a/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/json/TypesSerializationTest.scala b/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/json/TypesSerializationTest.scala deleted file mode 100755 index 7b4e44d..0000000 --- a/typesystem/src/test/scala/org/apache/hadoop/metadata/typesystem/json/TypesSerializationTest.scala +++ /dev/null @@ -1,300 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.metadata.typesystem.json - -import com.google.common.collect.ImmutableList -import org.apache.hadoop.metadata.typesystem.types._ -import org.junit.{Assert, Test} - -class TypesSerializationTest extends BaseTest with TypeHelpers { - - @Test def test1: Unit = { - - val ts = getTypeSystem - - val sDef = structDef("ts1", requiredAttr("a", DataTypes.INT_TYPE), - optionalAttr("b", DataTypes.BOOLEAN_TYPE), - optionalAttr("c", DataTypes.BYTE_TYPE), - optionalAttr("d", DataTypes.SHORT_TYPE), - optionalAttr("e", DataTypes.INT_TYPE), - optionalAttr("f", DataTypes.INT_TYPE), - optionalAttr("g", DataTypes.LONG_TYPE), - optionalAttr("h", DataTypes.FLOAT_TYPE), - optionalAttr("i", DataTypes.DOUBLE_TYPE), - optionalAttr("j", DataTypes.BIGINTEGER_TYPE), - optionalAttr("k", DataTypes.BIGDECIMAL_TYPE), - optionalAttr("l", DataTypes.DATE_TYPE), - optionalAttr("m", DataTypes.arrayTypeName(DataTypes.INT_TYPE)), - optionalAttr("n", DataTypes.arrayTypeName(DataTypes.BIGDECIMAL_TYPE)), - optionalAttr("o", DataTypes.mapTypeName(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE))) - - - ts.defineTypes(ImmutableList.of[StructTypeDefinition](sDef), - ImmutableList.of[HierarchicalTypeDefinition[TraitType]], - ImmutableList.of[HierarchicalTypeDefinition[ClassType]] - ) - - val A: HierarchicalTypeDefinition[TraitType] = createTraitTypeDef("A", List(), - requiredAttr("a", DataTypes.INT_TYPE), - optionalAttr("b", DataTypes.BOOLEAN_TYPE), - optionalAttr("c", DataTypes.BYTE_TYPE), - optionalAttr("d", DataTypes.SHORT_TYPE)) - val B: HierarchicalTypeDefinition[TraitType] = - createTraitTypeDef("B", Seq("A"), optionalAttr("b", DataTypes.BOOLEAN_TYPE)) - val C: HierarchicalTypeDefinition[TraitType] = - createTraitTypeDef("C", Seq("A"), optionalAttr("c", DataTypes.BYTE_TYPE)) - val D: HierarchicalTypeDefinition[TraitType] = - createTraitTypeDef("D", Seq("B", "C"), optionalAttr("d", DataTypes.SHORT_TYPE)) - - defineTraits(ts, A, B, C, D) - - ts.defineEnumType("HiveObjectType", - new EnumValue("GLOBAL", 1), - new EnumValue("DATABASE", 2), - new EnumValue("TABLE", 3), - new EnumValue("PARTITION", 4), - new EnumValue("COLUMN", 5)) - - ts.defineEnumType("PrincipalType", - new EnumValue("USER", 1), - new EnumValue("ROLE", 2), - new EnumValue("GROUP", 3)) - - ts.defineEnumType("TxnState", - new EnumValue("COMMITTED", 1), - new EnumValue("ABORTED", 2), - new EnumValue("OPEN", 3)) - - ts.defineEnumType("LockLevel", - new EnumValue("DB", 1), - new EnumValue("TABLE", 2), - new EnumValue("PARTITION", 3)) - - defineClassType(ts, createClassTypeDef("t4", List(), - requiredAttr("a", DataTypes.INT_TYPE), - optionalAttr("b", DataTypes.BOOLEAN_TYPE), - optionalAttr("c", DataTypes.BYTE_TYPE), - optionalAttr("d", DataTypes.SHORT_TYPE), - optionalAttr("enum1", ts.getDataType(classOf[EnumType], "HiveObjectType")), - optionalAttr("e", DataTypes.INT_TYPE), - optionalAttr("f", DataTypes.INT_TYPE), - optionalAttr("g", DataTypes.LONG_TYPE), - optionalAttr("enum2", ts.getDataType(classOf[EnumType], "PrincipalType")), - optionalAttr("h", DataTypes.FLOAT_TYPE), - optionalAttr("i", DataTypes.DOUBLE_TYPE), - optionalAttr("j", DataTypes.BIGINTEGER_TYPE), - optionalAttr("k", DataTypes.BIGDECIMAL_TYPE), - optionalAttr("enum3", ts.getDataType(classOf[EnumType], "TxnState")), - optionalAttr("l", DataTypes.DATE_TYPE), - optionalAttr("m", ts.defineArrayType(DataTypes.INT_TYPE)), - optionalAttr("n", ts.defineArrayType(DataTypes.BIGDECIMAL_TYPE)), - optionalAttr("o", ts.defineMapType(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)), - optionalAttr("enum4", ts.getDataType(classOf[EnumType], "LockLevel")))) - - val deptTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Department", List(), - requiredAttr("name", DataTypes.STRING_TYPE), - new AttributeDefinition("employees", String.format("array<%s>", "Person"), - Multiplicity.COLLECTION, true, "department")) - val personTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Person", List(), - requiredAttr("name", DataTypes.STRING_TYPE), - new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"), - new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates") - ) - val managerTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Manager", List("Person"), - new AttributeDefinition("subordinates", String.format("array<%s>", "Person"), - Multiplicity.COLLECTION, false, "manager") - ) - val securityClearanceTypeDef: HierarchicalTypeDefinition[TraitType] = createTraitTypeDef("SecurityClearance", List(), - requiredAttr("level", DataTypes.INT_TYPE) - ) - ts.defineTypes(ImmutableList.of[StructTypeDefinition], - ImmutableList.of[HierarchicalTypeDefinition[TraitType]](securityClearanceTypeDef), - ImmutableList.of[HierarchicalTypeDefinition[ClassType]](deptTypeDef, personTypeDef, managerTypeDef)) - - val ser = TypesSerialization.toJson(ts, _ => true) - - val typesDef1 = TypesSerialization.fromJson(ser) - - val ts1 = TypeSystem.getInstance() - ts1.reset() - - typesDef1.enumTypes.foreach(ts1.defineEnumType(_)) - - ts1.defineTypes(ImmutableList.copyOf(typesDef1.structTypes.toArray), - ImmutableList.copyOf(typesDef1.traitTypes.toArray), - ImmutableList.copyOf(typesDef1.classTypes.toArray) - ) - val ser2 = TypesSerialization.toJson(ts1, _ => true) - val typesDef2 = TypesSerialization.fromJson(ser2) - - Assert.assertEquals(typesDef1, typesDef2) - } - - @Test def test2: Unit = { - - val sDef = structDef("ts1", requiredAttr("a", DataTypes.INT_TYPE), - optionalAttr("b", DataTypes.BOOLEAN_TYPE), - optionalAttr("c", DataTypes.BYTE_TYPE), - optionalAttr("d", DataTypes.SHORT_TYPE), - optionalAttr("e", DataTypes.INT_TYPE), - optionalAttr("f", DataTypes.INT_TYPE), - optionalAttr("g", DataTypes.LONG_TYPE), - optionalAttr("h", DataTypes.FLOAT_TYPE), - optionalAttr("i", DataTypes.DOUBLE_TYPE), - optionalAttr("j", DataTypes.BIGINTEGER_TYPE), - optionalAttr("k", DataTypes.BIGDECIMAL_TYPE), - optionalAttr("l", DataTypes.DATE_TYPE), - optionalAttr("m", DataTypes.arrayTypeName(DataTypes.INT_TYPE)), - optionalAttr("n", DataTypes.arrayTypeName(DataTypes.BIGDECIMAL_TYPE)), - optionalAttr("o", DataTypes.mapTypeName(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE))) - - - - val ser2 = TypesSerialization.toJson(sDef) - val typesDef2 = TypesSerialization.fromJson(ser2) - - Assert.assertEquals(sDef, typesDef2.structTypes(0)) - } - - @Test def test3: Unit = { - - val sDef = structDef("ts1", requiredAttr("a", DataTypes.INT_TYPE), - optionalAttr("b", DataTypes.BOOLEAN_TYPE), - optionalAttr("c", DataTypes.BYTE_TYPE), - optionalAttr("d", DataTypes.SHORT_TYPE), - optionalAttr("e", DataTypes.INT_TYPE), - optionalAttr("f", DataTypes.INT_TYPE), - optionalAttr("g", DataTypes.LONG_TYPE), - optionalAttr("h", DataTypes.FLOAT_TYPE), - optionalAttr("i", DataTypes.DOUBLE_TYPE), - optionalAttr("j", DataTypes.BIGINTEGER_TYPE), - optionalAttr("k", DataTypes.BIGDECIMAL_TYPE), - optionalAttr("l", DataTypes.DATE_TYPE), - optionalAttr("m", DataTypes.arrayTypeName(DataTypes.INT_TYPE)), - optionalAttr("n", DataTypes.arrayTypeName(DataTypes.BIGDECIMAL_TYPE)), - optionalAttr("o", DataTypes.mapTypeName(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE))) - - - - val ser2 = TypesSerialization.toJson(sDef) - val typesDef2 = TypesSerialization.fromJson(ser2) - - Assert.assertEquals(sDef, typesDef2.structTypes(0)) - } - - @Test def test4 : Unit = { - - val A: HierarchicalTypeDefinition[TraitType] = createTraitTypeDef("A", List(), - requiredAttr("a", DataTypes.INT_TYPE), - optionalAttr("b", DataTypes.BOOLEAN_TYPE), - optionalAttr("c", DataTypes.BYTE_TYPE), - optionalAttr("d", DataTypes.SHORT_TYPE)) - val B: HierarchicalTypeDefinition[TraitType] = - createTraitTypeDef("B", Seq("A"), optionalAttr("b", DataTypes.BOOLEAN_TYPE)) - val C: HierarchicalTypeDefinition[TraitType] = - createTraitTypeDef("C", Seq("A"), optionalAttr("c", DataTypes.BYTE_TYPE)) - val D: HierarchicalTypeDefinition[TraitType] = - createTraitTypeDef("D", Seq("B", "C"), optionalAttr("d", DataTypes.SHORT_TYPE)) - - val typDefs = Seq(A,B,C,D) - typDefs.foreach { tDef => - val ser2 = TypesSerialization.toJson(tDef, true) - val typesDef2 = TypesSerialization.fromJson(ser2) - Assert.assertEquals(tDef, typesDef2.traitTypes(0)) - - } - } - - @Test def test5 : Unit = { - val e1 = new EnumTypeDefinition("HiveObjectType", - new EnumValue("GLOBAL", 1), - new EnumValue("DATABASE", 2), - new EnumValue("TABLE", 3), - new EnumValue("PARTITION", 4), - new EnumValue("COLUMN", 5)) - - val e2 = new EnumTypeDefinition("PrincipalType", - new EnumValue("USER", 1), - new EnumValue("ROLE", 2), - new EnumValue("GROUP", 3)) - - val e3 = new EnumTypeDefinition("TxnState", - new EnumValue("COMMITTED", 1), - new EnumValue("ABORTED", 2), - new EnumValue("OPEN", 3)) - - val e4 = new EnumTypeDefinition("LockLevel", - new EnumValue("DB", 1), - new EnumValue("TABLE", 2), - new EnumValue("PARTITION", 3)) - - val typDefs = Seq(e1,e2,e3,e4) - typDefs.foreach { tDef => - val ser2 = TypesSerialization.toJson(tDef) - val typesDef2 = TypesSerialization.fromJson(ser2) - Assert.assertEquals(tDef, typesDef2.enumTypes(0)) - - } - } - - @Test def test6 : Unit = { - val typDef = createClassTypeDef("t4", List(), - requiredAttr("a", DataTypes.INT_TYPE), - optionalAttr("b", DataTypes.BOOLEAN_TYPE), - optionalAttr("c", DataTypes.BYTE_TYPE), - optionalAttr("d", DataTypes.SHORT_TYPE), - optionalAttr("enum1", "HiveObjectType"), - optionalAttr("e", DataTypes.INT_TYPE), - optionalAttr("f", DataTypes.INT_TYPE), - optionalAttr("g", DataTypes.LONG_TYPE), - optionalAttr("enum2", "PrincipalType"), - optionalAttr("h", DataTypes.FLOAT_TYPE), - optionalAttr("i", DataTypes.DOUBLE_TYPE), - optionalAttr("j", DataTypes.BIGINTEGER_TYPE), - optionalAttr("k", DataTypes.BIGDECIMAL_TYPE), - optionalAttr("enum3", "TxnState"), - optionalAttr("l", DataTypes.DATE_TYPE), - optionalAttr("m", DataTypes.INT_TYPE), - optionalAttr("n", DataTypes.BIGDECIMAL_TYPE), - optionalAttr("o", DataTypes.mapTypeName(DataTypes.STRING_TYPE, DataTypes.DOUBLE_TYPE)), - optionalAttr("enum4", "LockLevel")) - - val deptTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Department", List(), - requiredAttr("name", DataTypes.STRING_TYPE), - new AttributeDefinition("employees", String.format("array<%s>", "Person"), - Multiplicity.COLLECTION, true, "department")) - val personTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Person", List(), - requiredAttr("name", DataTypes.STRING_TYPE), - new AttributeDefinition("department", "Department", Multiplicity.REQUIRED, false, "employees"), - new AttributeDefinition("manager", "Manager", Multiplicity.OPTIONAL, false, "subordinates") - ) - val managerTypeDef: HierarchicalTypeDefinition[ClassType] = createClassTypeDef("Manager", List("Person"), - new AttributeDefinition("subordinates", String.format("array<%s>", "Person"), - Multiplicity.COLLECTION, false, "manager") - ) - - val typDefs = Seq(typDef, deptTypeDef, personTypeDef, managerTypeDef) - typDefs.foreach { tDef => - val ser2 = TypesSerialization.toJson(tDef, false) - val typesDef2 = TypesSerialization.fromJson(ser2) - Assert.assertEquals(tDef, typesDef2.classTypes(0)) - - } - } -} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/webapp/src/main/java/org/apache/atlas/Main.java ---------------------------------------------------------------------- diff --git a/webapp/src/main/java/org/apache/atlas/Main.java b/webapp/src/main/java/org/apache/atlas/Main.java new file mode 100755 index 0000000..76ca965 --- /dev/null +++ b/webapp/src/main/java/org/apache/atlas/Main.java @@ -0,0 +1,144 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.atlas; + +import org.apache.atlas.web.service.EmbeddedServer; +import org.apache.commons.cli.CommandLine; +import org.apache.commons.cli.GnuParser; +import org.apache.commons.cli.Option; +import org.apache.commons.cli.Options; +import org.apache.commons.cli.ParseException; +import org.apache.commons.configuration.PropertiesConfiguration; +import org.apache.commons.lang.StringUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.Iterator; + +/** + * Driver for running Metadata as a standalone server with embedded jetty server. + */ +public final class Main { + private static final Logger LOG = LoggerFactory.getLogger(Main.class); + private static final String APP_PATH = "app"; + private static final String APP_PORT = "port"; + private static final String METADATA_HOME = "atlas.home"; + private static final String METADATA_LOG_DIR = "atlas.log.dir"; + + /** + * Prevent users from constructing this. + */ + private Main() { + } + + private static CommandLine parseArgs(String[] args) throws ParseException { + Options options = new Options(); + Option opt; + + opt = new Option(APP_PATH, true, "Application Path"); + opt.setRequired(false); + options.addOption(opt); + + opt = new Option(APP_PORT, true, "Application Port"); + opt.setRequired(false); + options.addOption(opt); + + return new GnuParser().parse(options, args); + } + + public static void main(String[] args) throws Exception { + CommandLine cmd = parseArgs(args); + PropertiesConfiguration buildConfiguration = + new PropertiesConfiguration("atlas-buildinfo.properties"); + String appPath = "webapp/target/atlas-webapp-" + getProjectVersion(buildConfiguration); + + if (cmd.hasOption(APP_PATH)) { + appPath = cmd.getOptionValue(APP_PATH); + } + + setApplicationHome(); + PropertiesConfiguration configuration = PropertiesUtil.getApplicationProperties(); + final String enableTLSFlag = configuration.getString("atlas.enableTLS"); + final int appPort = getApplicationPort(cmd, enableTLSFlag); + final boolean enableTLS = isTLSEnabled(enableTLSFlag, appPort); + configuration.setProperty("atlas.enableTLS", String.valueOf(enableTLS)); + + showStartupInfo(buildConfiguration, enableTLS, appPort); + EmbeddedServer server = EmbeddedServer.newServer(appPort, appPath, enableTLS); + server.start(); + } + + private static void setApplicationHome() { + if (System.getProperty(METADATA_HOME) == null) { + System.setProperty(METADATA_HOME, "target"); + } + if (System.getProperty(METADATA_LOG_DIR) == null) { + System.setProperty(METADATA_LOG_DIR, "target/logs"); + } + } + + public static String getProjectVersion(PropertiesConfiguration buildConfiguration) { + return buildConfiguration.getString("project.version"); + } + + private static int getApplicationPort(CommandLine cmd, String enableTLSFlag) { + final int appPort; + if (cmd.hasOption(APP_PORT)) { + appPort = Integer.valueOf(cmd.getOptionValue(APP_PORT)); + } else { + // default : metadata.enableTLS is true + appPort = StringUtils.isEmpty(enableTLSFlag) + || enableTLSFlag.equals("true") ? 21443 : 21000; + } + + return appPort; + } + + private static boolean isTLSEnabled(String enableTLSFlag, int appPort) { + return Boolean.valueOf(StringUtils.isEmpty(enableTLSFlag) + ? System + .getProperty("atlas.enableTLS", (appPort % 1000) == 443 ? "true" : "false") + : enableTLSFlag); + } + + private static void showStartupInfo(PropertiesConfiguration buildConfiguration, + boolean enableTLS, int appPort) { + StringBuilder buffer = new StringBuilder(); + buffer.append("\n############################################"); + buffer.append("############################################"); + buffer.append("\n DGI Server (STARTUP)"); + buffer.append("\n"); + try { + final Iterator<String> keys = buildConfiguration.getKeys(); + while (keys.hasNext()) { + String key = keys.next(); + buffer.append('\n').append('\t').append(key). + append(":\t").append(buildConfiguration.getProperty(key)); + } + } catch (Throwable e) { + buffer.append("*** Unable to get build info ***"); + } + buffer.append("\n############################################"); + buffer.append("############################################"); + LOG.info(buffer.toString()); + LOG.info(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>"); + LOG.info("Server starting with TLS ? {} on port {}", enableTLS, appPort); + LOG.info("<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<"); + } +} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/30711973/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java ---------------------------------------------------------------------- diff --git a/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java b/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java new file mode 100755 index 0000000..4c0ffe8 --- /dev/null +++ b/webapp/src/main/java/org/apache/atlas/examples/QuickStart.java @@ -0,0 +1,478 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.atlas.examples; + +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableList; +import org.apache.atlas.MetadataServiceClient; +import org.apache.atlas.typesystem.Referenceable; +import org.apache.atlas.typesystem.TypesDef; +import org.apache.atlas.typesystem.json.InstanceSerialization; +import org.apache.atlas.typesystem.json.TypesSerialization; +import org.apache.atlas.typesystem.persistence.Id; +import org.apache.atlas.typesystem.types.AttributeDefinition; +import org.apache.atlas.typesystem.types.ClassType; +import org.apache.atlas.typesystem.types.DataTypes; +import org.apache.atlas.typesystem.types.EnumTypeDefinition; +import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition; +import org.apache.atlas.typesystem.types.IDataType; +import org.apache.atlas.typesystem.types.Multiplicity; +import org.apache.atlas.typesystem.types.StructTypeDefinition; +import org.apache.atlas.typesystem.types.TraitType; +import org.apache.atlas.typesystem.types.TypeUtils; +import org.apache.atlas.typesystem.types.utils.TypesUtil; +import org.codehaus.jettison.json.JSONArray; +import org.codehaus.jettison.json.JSONObject; + +import java.util.List; + +/** + * A driver that sets up sample types and data for testing purposes. + * Please take a look at QueryDSL in docs for the Meta Model. + * todo - move this to examples module. + */ +public class QuickStart { + + public static void main(String[] args) throws Exception { + String baseUrl = getServerUrl(args); + QuickStart quickStart = new QuickStart(baseUrl); + + // Shows how to create types in DGI for your meta model + quickStart.createTypes(); + + // Shows how to create entities (instances) for the added types in DGI + quickStart.createEntities(); + + // Shows some search queries using DSL based on types + quickStart.search(); + } + + static String getServerUrl(String[] args) { + String baseUrl = "http://localhost:21000"; + if (args.length > 0) { + baseUrl = args[0]; + } + + return baseUrl; + } + + private static final String DATABASE_TYPE = "DB"; + private static final String COLUMN_TYPE = "Column"; + private static final String TABLE_TYPE = "Table"; + private static final String VIEW_TYPE = "View"; + private static final String LOAD_PROCESS_TYPE = "LoadProcess"; + private static final String STORAGE_DESC_TYPE = "StorageDesc"; + + private static final String[] TYPES = { + DATABASE_TYPE, TABLE_TYPE, STORAGE_DESC_TYPE, COLUMN_TYPE, LOAD_PROCESS_TYPE, VIEW_TYPE, + "JdbcAccess", "ETL", "Metric", "PII", "Fact", "Dimension" + }; + + private final MetadataServiceClient metadataServiceClient; + + QuickStart(String baseUrl) { + metadataServiceClient = new MetadataServiceClient(baseUrl); + } + + void createTypes() throws Exception { + TypesDef typesDef = createTypeDefinitions(); + + String typesAsJSON = TypesSerialization.toJson(typesDef); + System.out.println("typesAsJSON = " + typesAsJSON); + metadataServiceClient.createType(typesAsJSON); + + // verify types created + verifyTypesCreated(); + } + + TypesDef createTypeDefinitions() throws Exception { + HierarchicalTypeDefinition<ClassType> dbClsDef + = TypesUtil.createClassTypeDef(DATABASE_TYPE, null, + attrDef("name", DataTypes.STRING_TYPE), + attrDef("description", DataTypes.STRING_TYPE), + attrDef("locationUri", DataTypes.STRING_TYPE), + attrDef("owner", DataTypes.STRING_TYPE), + attrDef("createTime", DataTypes.INT_TYPE) + ); + + HierarchicalTypeDefinition<ClassType> storageDescClsDef = + TypesUtil.createClassTypeDef(STORAGE_DESC_TYPE, null, + attrDef("location", DataTypes.STRING_TYPE), + attrDef("inputFormat", DataTypes.STRING_TYPE), + attrDef("outputFormat", DataTypes.STRING_TYPE), + attrDef("compressed", DataTypes.STRING_TYPE, + Multiplicity.REQUIRED, false, null) + ); + + HierarchicalTypeDefinition<ClassType> columnClsDef = + TypesUtil.createClassTypeDef(COLUMN_TYPE, null, + attrDef("name", DataTypes.STRING_TYPE), + attrDef("dataType", DataTypes.STRING_TYPE), + attrDef("comment", DataTypes.STRING_TYPE) + ); + + HierarchicalTypeDefinition<ClassType> tblClsDef = + TypesUtil.createClassTypeDef(TABLE_TYPE, ImmutableList.of("DataSet"), + new AttributeDefinition("db", DATABASE_TYPE, + Multiplicity.REQUIRED, false, null), + new AttributeDefinition("sd", STORAGE_DESC_TYPE, + Multiplicity.REQUIRED, true, null), + attrDef("owner", DataTypes.STRING_TYPE), + attrDef("createTime", DataTypes.INT_TYPE), + attrDef("lastAccessTime", DataTypes.INT_TYPE), + attrDef("retention", DataTypes.INT_TYPE), + attrDef("viewOriginalText", DataTypes.STRING_TYPE), + attrDef("viewExpandedText", DataTypes.STRING_TYPE), + attrDef("tableType", DataTypes.STRING_TYPE), + attrDef("temporary", DataTypes.BOOLEAN_TYPE), + new AttributeDefinition("columns", + DataTypes.arrayTypeName(COLUMN_TYPE), + Multiplicity.COLLECTION, true, null) + ); + + HierarchicalTypeDefinition<ClassType> loadProcessClsDef = + TypesUtil.createClassTypeDef(LOAD_PROCESS_TYPE, ImmutableList.of("Process"), + attrDef("userName", DataTypes.STRING_TYPE), + attrDef("startTime", DataTypes.INT_TYPE), + attrDef("endTime", DataTypes.INT_TYPE), + attrDef("queryText", DataTypes.STRING_TYPE, Multiplicity.REQUIRED), + attrDef("queryPlan", DataTypes.STRING_TYPE, Multiplicity.REQUIRED), + attrDef("queryId", DataTypes.STRING_TYPE, Multiplicity.REQUIRED), + attrDef("queryGraph", DataTypes.STRING_TYPE, Multiplicity.REQUIRED) + ); + + HierarchicalTypeDefinition<ClassType> viewClsDef = + TypesUtil.createClassTypeDef(VIEW_TYPE, null, + attrDef("name", DataTypes.STRING_TYPE), + new AttributeDefinition("db", DATABASE_TYPE, + Multiplicity.REQUIRED, false, null), + new AttributeDefinition("inputTables", + DataTypes.arrayTypeName(TABLE_TYPE), + Multiplicity.COLLECTION, false, null) + ); + + HierarchicalTypeDefinition<TraitType> dimTraitDef = + TypesUtil.createTraitTypeDef("Dimension", null); + + HierarchicalTypeDefinition<TraitType> factTraitDef = + TypesUtil.createTraitTypeDef("Fact", null); + + HierarchicalTypeDefinition<TraitType> piiTraitDef = + TypesUtil.createTraitTypeDef("PII", null); + + HierarchicalTypeDefinition<TraitType> metricTraitDef = + TypesUtil.createTraitTypeDef("Metric", null); + + HierarchicalTypeDefinition<TraitType> etlTraitDef = + TypesUtil.createTraitTypeDef("ETL", null); + + HierarchicalTypeDefinition<TraitType> jdbcTraitDef = + TypesUtil.createTraitTypeDef("JdbcAccess", null); + + return TypeUtils.getTypesDef( + ImmutableList.<EnumTypeDefinition>of(), + ImmutableList.<StructTypeDefinition>of(), + ImmutableList.of(dimTraitDef, factTraitDef, + piiTraitDef, metricTraitDef, etlTraitDef, jdbcTraitDef), + ImmutableList.of(dbClsDef, storageDescClsDef, columnClsDef, + tblClsDef, loadProcessClsDef, viewClsDef) + ); + } + + AttributeDefinition attrDef(String name, IDataType dT) { + return attrDef(name, dT, Multiplicity.OPTIONAL, false, null); + } + + AttributeDefinition attrDef(String name, IDataType dT, Multiplicity m) { + return attrDef(name, dT, m, false, null); + } + + AttributeDefinition attrDef(String name, IDataType dT, + Multiplicity m, boolean isComposite, String reverseAttributeName) { + Preconditions.checkNotNull(name); + Preconditions.checkNotNull(dT); + return new AttributeDefinition(name, dT.getName(), m, isComposite, reverseAttributeName); + } + + void createEntities() throws Exception { + Id salesDB = database( + "Sales", "Sales Database", "John ETL", "hdfs://host:8000/apps/warehouse/sales"); + + + Referenceable sd = rawStorageDescriptor("hdfs://host:8000/apps/warehouse/sales", + "TextInputFormat", "TextOutputFormat", true); + + List<Referenceable> salesFactColumns = ImmutableList.of( + rawColumn("time_id", "int", "time id"), + rawColumn("product_id", "int", "product id"), + rawColumn("customer_id", "int", "customer id", "PII"), + rawColumn("sales", "double", "product id", "Metric") + ); + + Id salesFact = table("sales_fact", "sales fact table", + salesDB, sd, "Joe", "Managed", salesFactColumns, "Fact"); + + List<Referenceable> productDimColumns = ImmutableList.of( + rawColumn("product_id", "int", "product id"), + rawColumn("product_name", "string", "product name"), + rawColumn("brand_name", "int", "brand name") + ); + + Id productDim = table("product_dim", "product dimension table", + salesDB, sd, "John Doe", "Managed", productDimColumns, "Dimension"); + + List<Referenceable> timeDimColumns = ImmutableList.of( + rawColumn("time_id", "int", "time id"), + rawColumn("dayOfYear", "int", "day Of Year"), + rawColumn("weekDay", "int", "week Day") + ); + + Id timeDim = table("time_dim", "time dimension table", + salesDB, sd, "John Doe", "External", timeDimColumns, "Dimension"); + + + List<Referenceable> customerDimColumns = ImmutableList.of( + rawColumn("customer_id", "int", "customer id", "PII"), + rawColumn("name", "string", "customer name", "PII"), + rawColumn("address", "string", "customer address", "PII") + ); + + Id customerDim = table("customer_dim", "customer dimension table", + salesDB, sd, "fetl", "External", customerDimColumns, "Dimension"); + + + Id reportingDB = database("Reporting", "reporting database", "Jane BI", + "hdfs://host:8000/apps/warehouse/reporting"); + + Id salesFactDaily = table("sales_fact_daily_mv", + "sales fact daily materialized view", reportingDB, sd, + "Joe BI", "Managed", salesFactColumns, "Metric"); + + loadProcess("loadSalesDaily", "hive query for daily summary", "John ETL", + ImmutableList.of(salesFact, timeDim), ImmutableList.of(salesFactDaily), + "create table as select ", "plan", "id", "graph", + "ETL"); + + view("product_dim_view", reportingDB, + ImmutableList.of(productDim), "Dimension", "JdbcAccess"); + + view("customer_dim_view", reportingDB, + ImmutableList.of(customerDim), "Dimension", "JdbcAccess"); + + Id salesFactMonthly = table("sales_fact_monthly_mv", + "sales fact monthly materialized view", + reportingDB, sd, "Jane BI", "Managed", salesFactColumns, "Metric"); + + loadProcess("loadSalesMonthly", "hive query for monthly summary", "John ETL", + ImmutableList.of(salesFactDaily), ImmutableList.of(salesFactMonthly), + "create table as select ", "plan", "id", "graph", + "ETL"); + } + + private Id createInstance(Referenceable referenceable) throws Exception { + String typeName = referenceable.getTypeName(); + + String entityJSON = InstanceSerialization.toJson(referenceable, true); + System.out.println("Submitting new entity= " + entityJSON); + JSONObject jsonObject = metadataServiceClient.createEntity(entityJSON); + String guid = jsonObject.getString(MetadataServiceClient.GUID); + System.out.println("created instance for type " + typeName + ", guid: " + guid); + + // return the Id for created instance with guid + return new Id(guid, referenceable.getId().getVersion(), referenceable.getTypeName()); + } + + Id database(String name, String description, + String owner, String locationUri, + String... traitNames) throws Exception { + Referenceable referenceable = new Referenceable(DATABASE_TYPE, traitNames); + referenceable.set("name", name); + referenceable.set("description", description); + referenceable.set("owner", owner); + referenceable.set("locationUri", locationUri); + referenceable.set("createTime", System.currentTimeMillis()); + + return createInstance(referenceable); + } + + Referenceable rawStorageDescriptor(String location, String inputFormat, + String outputFormat, + boolean compressed) throws Exception { + Referenceable referenceable = new Referenceable(STORAGE_DESC_TYPE); + referenceable.set("location", location); + referenceable.set("inputFormat", inputFormat); + referenceable.set("outputFormat", outputFormat); + referenceable.set("compressed", compressed); + + return referenceable; + } + + Referenceable rawColumn(String name, String dataType, String comment, + String... traitNames) throws Exception { + Referenceable referenceable = new Referenceable(COLUMN_TYPE, traitNames); + referenceable.set("name", name); + referenceable.set("dataType", dataType); + referenceable.set("comment", comment); + + return referenceable; + } + + Id table(String name, String description, + Id dbId, Referenceable sd, + String owner, String tableType, + List<Referenceable> columns, + String... traitNames) throws Exception { + Referenceable referenceable = new Referenceable(TABLE_TYPE, traitNames); + referenceable.set("name", name); + referenceable.set("description", description); + referenceable.set("owner", owner); + referenceable.set("tableType", tableType); + referenceable.set("createTime", System.currentTimeMillis()); + referenceable.set("lastAccessTime", System.currentTimeMillis()); + referenceable.set("retention", System.currentTimeMillis()); + referenceable.set("db", dbId); + referenceable.set("sd", sd); + referenceable.set("columns", columns); + + return createInstance(referenceable); + } + + Id loadProcess(String name, String description, String user, + List<Id> inputTables, + List<Id> outputTables, + String queryText, String queryPlan, + String queryId, String queryGraph, + String... traitNames) throws Exception { + Referenceable referenceable = new Referenceable(LOAD_PROCESS_TYPE, traitNames); + // super type attributes + referenceable.set("name", name); + referenceable.set("description", description); + referenceable.set("inputs", inputTables); + referenceable.set("outputs", outputTables); + + referenceable.set("user", user); + referenceable.set("startTime", System.currentTimeMillis()); + referenceable.set("endTime", System.currentTimeMillis() + 10000); + + referenceable.set("queryText", queryText); + referenceable.set("queryPlan", queryPlan); + referenceable.set("queryId", queryId); + referenceable.set("queryGraph", queryGraph); + + return createInstance(referenceable); + } + + Id view(String name, Id dbId, + List<Id> inputTables, + String... traitNames) throws Exception { + Referenceable referenceable = new Referenceable(VIEW_TYPE, traitNames); + referenceable.set("name", name); + referenceable.set("db", dbId); + + referenceable.set("inputTables", inputTables); + + return createInstance(referenceable); + } + + private void verifyTypesCreated() throws Exception { + List<String> types = metadataServiceClient.listTypes(); + for (String type : TYPES) { + assert types.contains(type); + } + } + + private String[] getDSLQueries() { + return new String[]{ + "from DB", + "DB", + "DB where name=\"Reporting\"", + "DB where DB.name=\"Reporting\"", + "DB name = \"Reporting\"", + "DB DB.name = \"Reporting\"", + "DB where name=\"Reporting\" select name, owner", + "DB where DB.name=\"Reporting\" select name, owner", + "DB has name", + "DB where DB has name", + "DB, Table", + "DB is JdbcAccess", + /* + "DB, hive_process has name", + "DB as db1, Table where db1.name = \"Reporting\"", + "DB where DB.name=\"Reporting\" and DB.createTime < " + System.currentTimeMillis()}, + */ + "from Table", + "Table", + "Table is Dimension", + "Column where Column isa PII", + "View is Dimension", + /*"Column where Column isa PII select Column.name",*/ + "Column select Column.name", + "Column select name", + "Column where Column.name=\"customer_id\"", + "from Table select Table.name", + "DB where (name = \"Reporting\")", + "DB where (name = \"Reporting\") select name as _col_0, owner as _col_1", + "DB where DB is JdbcAccess", + "DB where DB has name", + "DB Table", + "DB where DB has name", + "DB as db1 Table where (db1.name = \"Reporting\")", + "DB where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 ", + /* + todo: does not work + "DB where (name = \"Reporting\") and ((createTime + 1) > 0)", + "DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") select db1.name as dbName, tab.name as tabName", + "DB as db1 Table as tab where ((db1.createTime + 1) > 0) or (db1.name = \"Reporting\") select db1.name as dbName, tab.name as tabName", + "DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName", + "DB as db1 Table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner select db1.name as dbName, tab.name as tabName", + */ + // trait searches + "Dimension", + /*"Fact", - todo: does not work*/ + "JdbcAccess", + "ETL", + "Metric", + "PII", + /* + // Lineage - todo - fix this, its not working + "Table hive_process outputTables", + "Table loop (hive_process outputTables)", + "Table as _loop0 loop (hive_process outputTables) withPath", + "Table as src loop (hive_process outputTables) as dest select src.name as srcTable, dest.name as destTable withPath", + */ + "Table where name=\"sales_fact\", columns", + "Table where name=\"sales_fact\", columns as column select column.name, column.dataType, column.comment", + "from DataSet", + "from Process", + }; + } + + private void search() throws Exception { + for (String dslQuery : getDSLQueries()) { + JSONObject response = metadataServiceClient.searchEntity(dslQuery); + JSONObject results = response.getJSONObject(MetadataServiceClient.RESULTS); + if (!results.isNull("rows")) { + JSONArray rows = results.getJSONArray("rows"); + System.out.println("query [" + dslQuery + "] returned [" + rows.length() + "] rows"); + } else { + System.out.println("query [" + dslQuery + "] failed, results:" + results.toString()); + } + } + } +}
