http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/test/java/org/apache/atlas/web/integration/EntityV2JerseyResourceIT.java ---------------------------------------------------------------------- diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/EntityV2JerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/EntityV2JerseyResourceIT.java new file mode 100755 index 0000000..98a7abc --- /dev/null +++ b/webapp/src/test/java/org/apache/atlas/web/integration/EntityV2JerseyResourceIT.java @@ -0,0 +1,769 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * <p/> + * http://www.apache.org/licenses/LICENSE-2.0 + * <p/> + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.atlas.web.integration; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Lists; +import com.sun.jersey.api.client.ClientResponse; +import org.apache.atlas.AtlasClient; +import org.apache.atlas.AtlasServiceException; +import org.apache.atlas.EntityAuditEvent; +import org.apache.atlas.kafka.NotificationProvider; +import org.apache.atlas.model.instance.AtlasClassification; +import org.apache.atlas.model.instance.AtlasClassification.AtlasClassifications; +import org.apache.atlas.model.instance.AtlasEntity; +import org.apache.atlas.model.instance.AtlasEntity.AtlasEntityWithExtInfo; +import org.apache.atlas.model.instance.AtlasEntityHeader; +import org.apache.atlas.model.instance.AtlasObjectId; +import org.apache.atlas.model.instance.EntityMutationResponse; +import org.apache.atlas.model.instance.EntityMutations; +import org.apache.atlas.model.typedef.AtlasClassificationDef; +import org.apache.atlas.model.typedef.AtlasEntityDef; +import org.apache.atlas.model.typedef.AtlasTypesDef; +import org.apache.atlas.notification.NotificationConsumer; +import org.apache.atlas.notification.NotificationInterface; +import org.apache.atlas.notification.entity.EntityNotification; +import org.apache.atlas.type.AtlasTypeUtil; +import org.apache.atlas.typesystem.types.TypeUtils; +import org.apache.commons.lang.RandomStringUtils; +import org.codehaus.jettison.json.JSONArray; +import org.joda.time.DateTime; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.testng.Assert; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.Test; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.testng.Assert.*; + + +/** + * Integration tests for Entity Jersey Resource. + */ +public class EntityV2JerseyResourceIT extends BaseResourceIT { + + private static final Logger LOG = LoggerFactory.getLogger(EntityV2JerseyResourceIT.class); + + private final String DATABASE_NAME = "db" + randomString(); + private final String TABLE_NAME = "table" + randomString(); + private String traitName; + + private AtlasEntity dbEntity; + private AtlasEntity tableEntity; + private NotificationInterface notificationInterface = NotificationProvider.get(); + private NotificationConsumer<EntityNotification> notificationConsumer; + + @BeforeClass + public void setUp() throws Exception { + super.setUp(); + + createTypeDefinitionsV2(); + + List<NotificationConsumer<EntityNotification>> consumers = + notificationInterface.createConsumers(NotificationInterface.NotificationType.ENTITIES, 1); + + notificationConsumer = consumers.iterator().next(); + } + + @Test + public void testSubmitEntity() throws Exception { + TypeUtils.Pair dbAndTable = createDBAndTable(); + assertNotNull(dbAndTable); + assertNotNull(dbAndTable.left); + assertNotNull(dbAndTable.right); + } + + @Test + public void testCreateNestedEntities() throws Exception { + AtlasEntity.AtlasEntitiesWithExtInfo entities = new AtlasEntity.AtlasEntitiesWithExtInfo(); + + AtlasEntity databaseInstance = new AtlasEntity(DATABASE_TYPE_V2, "name", "db1"); + databaseInstance.setAttribute("name", "db1"); + databaseInstance.setAttribute("description", "foo database"); + databaseInstance.setAttribute("owner", "user1"); + databaseInstance.setAttribute("locationUri", "/tmp"); + databaseInstance.setAttribute("createTime",1000); + entities.addEntity(databaseInstance); + + int nTables = 5; + int colsPerTable=3; + + for(int i = 0; i < nTables; i++) { + String tableName = "db1-table-" + i; + + AtlasEntity tableInstance = new AtlasEntity(HIVE_TABLE_TYPE_V2, "name", tableName); + tableInstance.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName); + tableInstance.setAttribute("db", AtlasTypeUtil.getAtlasObjectId(databaseInstance)); + tableInstance.setAttribute("description", tableName + " table"); + entities.addEntity(tableInstance); + + List<AtlasObjectId> columns = new ArrayList<>(); + for(int j = 0; j < colsPerTable; j++) { + AtlasEntity columnInstance = new AtlasEntity(COLUMN_TYPE_V2); + columnInstance.setAttribute("name", tableName + "-col-" + j); + columnInstance.setAttribute("dataType", "String"); + columnInstance.setAttribute("comment", "column " + j + " for table " + i); + + columns.add(AtlasTypeUtil.getAtlasObjectId(columnInstance)); + + entities.addReferredEntity(columnInstance); + } + tableInstance.setAttribute("columns", columns); + } + + //Create the tables. The database and columns should be created automatically, since + //the tables reference them. + + EntityMutationResponse response = atlasClientV2.createEntities(entities); + Assert.assertNotNull(response); + + Map<String,String> guidsCreated = response.getGuidAssignments(); + assertEquals(guidsCreated.size(), nTables * colsPerTable + nTables + 1); + assertNotNull(guidsCreated.get(databaseInstance.getGuid())); + + for(AtlasEntity r : entities.getEntities()) { + assertNotNull(guidsCreated.get(r.getGuid())); + } + + for(AtlasEntity r : entities.getReferredEntities().values()) { + assertNotNull(guidsCreated.get(r.getGuid())); + } + } + + @Test + public void testRequestUser() throws Exception { + AtlasEntity hiveDBInstanceV2 = createHiveDB(randomString()); + List<EntityAuditEvent> events = atlasClientV1.getEntityAuditEvents(hiveDBInstanceV2.getGuid(), (short) 10); + assertEquals(events.size(), 1); + assertEquals(events.get(0).getUser(), "admin"); + } + + @Test + public void testEntityDeduping() throws Exception { + JSONArray results = searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE_V2, DATABASE_NAME)); + assertEquals(results.length(), 1); + + final AtlasEntity hiveDBInstanceV2 = createHiveDB(); + // Do the notification thing here + waitForNotification(notificationConsumer, MAX_WAIT_TIME, new NotificationPredicate() { + @Override + public boolean evaluate(EntityNotification notification) throws Exception { + return notification != null && notification.getEntity().getId()._getId().equals(hiveDBInstanceV2.getGuid()); + } + }); + + + results = searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE_V2, DATABASE_NAME)); + assertEquals(results.length(), 1); + + //Test the same across references + final String tableName = randomString(); + AtlasEntity hiveTableInstanceV2 = createHiveTableInstanceV2(hiveDBInstanceV2, tableName); + hiveTableInstanceV2.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tableName); + + EntityMutationResponse entity = atlasClientV2.createEntity(new AtlasEntityWithExtInfo(hiveTableInstanceV2)); + assertNotNull(entity); + assertNotNull(entity.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE)); + results = searchByDSL(String.format("%s where name='%s'", DATABASE_TYPE_V2, DATABASE_NAME)); + assertEquals(results.length(), 1); + } + + private void assertEntityAudit(String dbid, EntityAuditEvent.EntityAuditAction auditAction) + throws Exception { + List<EntityAuditEvent> events = atlasClientV1.getEntityAuditEvents(dbid, (short) 100); + for (EntityAuditEvent event : events) { + if (event.getAction() == auditAction) { + return; + } + } + fail("Expected audit event with action = " + auditAction); + } + + @Test + public void testEntityDefinitionAcrossTypeUpdate() throws Exception { + //create type + AtlasEntityDef entityDef = AtlasTypeUtil + .createClassTypeDef(randomString(), + ImmutableSet.<String>of(), + AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string") + ); + AtlasTypesDef typesDef = new AtlasTypesDef(); + typesDef.getEntityDefs().add(entityDef); + + AtlasTypesDef created = atlasClientV2.createAtlasTypeDefs(typesDef); + assertNotNull(created); + assertNotNull(created.getEntityDefs()); + assertEquals(created.getEntityDefs().size(), 1); + + //create entity for the type + AtlasEntity instance = new AtlasEntity(entityDef.getName()); + instance.setAttribute("name", randomString()); + EntityMutationResponse mutationResponse = atlasClientV2.createEntity(new AtlasEntityWithExtInfo(instance)); + assertNotNull(mutationResponse); + assertNotNull(mutationResponse.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE)); + assertEquals(mutationResponse.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE).size(),1 ); + String guid = mutationResponse.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE).get(0).getGuid(); + + //update type - add attribute + entityDef = AtlasTypeUtil.createClassTypeDef(entityDef.getName(), ImmutableSet.<String>of(), + AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"), + AtlasTypeUtil.createOptionalAttrDef("description", "string")); + + typesDef = new AtlasTypesDef(); + typesDef.getEntityDefs().add(entityDef); + + AtlasTypesDef updated = atlasClientV2.updateAtlasTypeDefs(typesDef); + assertNotNull(updated); + assertNotNull(updated.getEntityDefs()); + assertEquals(updated.getEntityDefs().size(), 1); + + //Get definition after type update - new attributes should be null + AtlasEntity entityByGuid = getEntityByGuid(guid); + assertNull(entityByGuid.getAttribute("description")); + assertEquals(entityByGuid.getAttribute("name"), instance.getAttribute("name")); + } + + @Test + public void testEntityInvalidValue() throws Exception { + AtlasEntity databaseInstance = new AtlasEntity(DATABASE_TYPE_V2); + String dbName = randomString(); + String nullString = null; + String emptyString = ""; + databaseInstance.setAttribute("name", dbName); + databaseInstance.setAttribute("description", nullString); + AtlasEntityHeader created = createEntity(databaseInstance); + + // null valid value for required attr - description + assertNull(created); + + databaseInstance.setAttribute("description", emptyString); + created = createEntity(databaseInstance); + + // empty string valid value for required attr + assertNotNull(created); + + databaseInstance.setGuid(created.getGuid()); + databaseInstance.setAttribute("owner", nullString); + databaseInstance.setAttribute("locationUri", emptyString); + + created = updateEntity(databaseInstance); + + // null/empty string valid value for optional attr + assertNotNull(created); + } + + @Test + public void testGetEntityByAttribute() throws Exception { + AtlasEntity hiveDB = createHiveDB(); + String qualifiedName = (String) hiveDB.getAttribute(NAME); + //get entity by attribute + + AtlasEntity byAttribute = atlasClientV2.getEntityByAttribute(DATABASE_TYPE_V2, toMap(NAME, qualifiedName)).getEntity(); + assertEquals(byAttribute.getTypeName(), DATABASE_TYPE_V2); + assertEquals(byAttribute.getAttribute(NAME), qualifiedName); + } + + @Test + public void testSubmitEntityWithBadDateFormat() throws Exception { + AtlasEntity hiveDBEntity = createHiveDBInstanceV2("db" + randomString()); + AtlasEntityHeader hiveDBHeader = createEntity(hiveDBEntity); + hiveDBEntity.setGuid(hiveDBHeader.getGuid()); + + AtlasEntity tableInstance = createHiveTableInstanceV2(hiveDBEntity, "table" + randomString()); + //Dates with an invalid format are simply nulled out. This does not produce + //an error. See AtlasBuiltInTypes.AtlasDateType.getNormalizedValue(). + tableInstance.setAttribute("lastAccessTime", 1107201407); + AtlasEntityHeader tableEntityHeader = createEntity(tableInstance); + assertNotNull(tableEntityHeader); + } + + @Test(dependsOnMethods = "testSubmitEntity") + public void testAddProperty() throws Exception { + //add property + String description = "bar table - new desc"; + addProperty(createHiveTable().getGuid(), "description", description); + + AtlasEntity entityByGuid = getEntityByGuid(createHiveTable().getGuid()); + Assert.assertNotNull(entityByGuid); + + entityByGuid.setAttribute("description", description); + + // TODO: This behavior should've been consistent across APIs +// //invalid property for the type +// try { +// addProperty(table.getGuid(), "invalid_property", "bar table"); +// Assert.fail("Expected AtlasServiceException"); +// } catch (AtlasServiceException e) { +// assertNotNull(e.getStatus()); +// assertEquals(e.getStatus(), ClientResponse.Status.BAD_REQUEST); +// } + + //non-string property, update + Object currentTime = new DateTime(); + addProperty(createHiveTable().getGuid(), "createTime", currentTime); + + entityByGuid = getEntityByGuid(createHiveTable().getGuid()); + Assert.assertNotNull(entityByGuid); + } + + @Test + public void testAddNullPropertyValue() throws Exception { + // FIXME: Behavior has changed between v1 and v2 + //add property +// try { + addProperty(createHiveTable().getGuid(), "description", null); +// Assert.fail("Expected AtlasServiceException"); +// } catch(AtlasServiceException e) { +// Assert.assertEquals(e.getStatus().getStatusCode(), Response.Status.BAD_REQUEST.getStatusCode()); +// } + } + + @Test(expectedExceptions = AtlasServiceException.class) + public void testGetInvalidEntityDefinition() throws Exception { + getEntityByGuid("blah"); + } + + @Test(dependsOnMethods = "testSubmitEntity", enabled = false) + public void testGetEntityList() throws Exception { + // TODO: Can only be done when there's a search API exposed from entity REST + } + + @Test(enabled = false) + public void testGetEntityListForBadEntityType() throws Exception { + // FIXME: Complete test when search interface is in place + } + + @Test(enabled = false) + public void testGetEntityListForNoInstances() throws Exception { + // FIXME: Complete test when search interface is in place + /* + String typeName = ""; + + ClientResponse clientResponse = + service.path(ENTITIES).queryParam("type", typeName).accept(Servlets.JSON_MEDIA_TYPE) + .type(Servlets.JSON_MEDIA_TYPE).method(HttpMethod.GET, ClientResponse.class); + Assert.assertEquals(clientResponse.getStatus(), Response.Status.OK.getStatusCode()); + + String responseAsString = clientResponse.getEntity(String.class); + Assert.assertNotNull(responseAsString); + + JSONObject response = new JSONObject(responseAsString); + Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID)); + + final JSONArray list = response.getJSONArray(AtlasClient.RESULTS); + Assert.assertEquals(list.length(), 0); + */ + } + + private String addNewType() throws Exception { + String typeName = "test" + randomString(); + AtlasEntityDef classTypeDef = AtlasTypeUtil + .createClassTypeDef(typeName, ImmutableSet.<String>of(), + AtlasTypeUtil.createRequiredAttrDef("name", "string"), + AtlasTypeUtil.createRequiredAttrDef("description", "string")); + AtlasTypesDef typesDef = new AtlasTypesDef(); + typesDef.getEntityDefs().add(classTypeDef); + createType(typesDef); + return typeName; + } + + @Test(dependsOnMethods = "testSubmitEntity") + public void testGetTraitNames() throws Exception { + AtlasClassifications classifications = atlasClientV2.getClassifications(createHiveTable().getGuid()); + assertNotNull(classifications); + assertTrue(classifications.getList().size() > 0); + assertEquals(classifications.getList().size(), 8); + } + + @Test(dependsOnMethods = "testSubmitEntity") + public void testCommonAttributes() throws Exception{ + AtlasEntity entity = getEntityByGuid(createHiveTable().getGuid()); + Assert.assertNotNull(entity.getStatus()); + Assert.assertNotNull(entity.getVersion()); + Assert.assertNotNull(entity.getCreatedBy()); + Assert.assertNotNull(entity.getCreateTime()); + Assert.assertNotNull(entity.getUpdatedBy()); + Assert.assertNotNull(entity.getUpdateTime()); + } + + private void addProperty(String guid, String property, Object value) throws AtlasServiceException { + + AtlasEntity entityByGuid = getEntityByGuid(guid); + entityByGuid.setAttribute(property, value); + EntityMutationResponse response = atlasClientV2.updateEntity(new AtlasEntityWithExtInfo(entityByGuid)); + assertNotNull(response); + assertNotNull(response.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE)); + } + + private AtlasEntity createHiveDB() { + if (dbEntity == null) { + dbEntity = createHiveDB(DATABASE_NAME); + } + return dbEntity; + } + + private AtlasEntity createHiveDB(String dbName) { + AtlasEntity hiveDBInstanceV2 = createHiveDBInstanceV2(dbName); + AtlasEntityHeader entityHeader = createEntity(hiveDBInstanceV2); + assertNotNull(entityHeader); + assertNotNull(entityHeader.getGuid()); + hiveDBInstanceV2.setGuid(entityHeader.getGuid()); + return hiveDBInstanceV2; + } + + private TypeUtils.Pair<AtlasEntity, AtlasEntity> createDBAndTable() throws Exception { + AtlasEntity dbInstanceV2 = createHiveDB(); + AtlasEntity hiveTableInstanceV2 = createHiveTable(); + return TypeUtils.Pair.of(dbInstanceV2, hiveTableInstanceV2); + } + + private AtlasEntity createHiveTable() throws Exception { + if (tableEntity == null) { + tableEntity = createHiveTable(createHiveDB(), TABLE_NAME); + } + return tableEntity; + + } + + private AtlasEntity createHiveTable(AtlasEntity dbInstanceV2, String tableName) throws Exception { + AtlasEntity hiveTableInstanceV2 = createHiveTableInstanceV2(dbInstanceV2, tableName); + AtlasEntityHeader createdHeader = createEntity(hiveTableInstanceV2); + assertNotNull(createdHeader); + assertNotNull(createdHeader.getGuid()); + hiveTableInstanceV2.setGuid(createdHeader.getGuid()); + tableEntity = hiveTableInstanceV2; + return hiveTableInstanceV2; + } + + @Test(dependsOnMethods = "testGetTraitNames") + public void testAddTrait() throws Exception { + traitName = "PII_Trait" + randomString(); + AtlasClassificationDef piiTrait = + AtlasTypeUtil.createTraitTypeDef(traitName, ImmutableSet.<String>of()); + AtlasTypesDef typesDef = new AtlasTypesDef(); + typesDef.getClassificationDefs().add(piiTrait); + createType(typesDef); + + atlasClientV2.addClassifications(createHiveTable().getGuid(), ImmutableList.of(new AtlasClassification(piiTrait.getName()))); + + assertEntityAudit(createHiveTable().getGuid(), EntityAuditEvent.EntityAuditAction.TAG_ADD); + } + + @Test(dependsOnMethods = "testSubmitEntity") + public void testGetTraitDefinitionForEntity() throws Exception{ + traitName = "PII_Trait" + randomString(); + AtlasClassificationDef piiTrait = + AtlasTypeUtil.createTraitTypeDef(traitName, ImmutableSet.<String>of()); + AtlasTypesDef typesDef = new AtlasTypesDef(); + typesDef.getClassificationDefs().add(piiTrait); + createType(typesDef); + + AtlasClassificationDef classificationByName = atlasClientV2.getClassificationDefByName(traitName); + assertNotNull(classificationByName); + + AtlasEntity hiveTable = createHiveTable(); + assertEquals(hiveTable.getClassifications().size(), 7); + + AtlasClassification piiClassification = new AtlasClassification(piiTrait.getName()); + + atlasClientV2.addClassifications(hiveTable.getGuid(), Lists.newArrayList(piiClassification)); + + AtlasClassifications classifications = atlasClientV2.getClassifications(hiveTable.getGuid()); + assertNotNull(classifications); + assertTrue(classifications.getList().size() > 0); + assertEquals(classifications.getList().size(), 8); + } + + + @Test(dependsOnMethods = "testGetTraitNames") + public void testAddTraitWithAttribute() throws Exception { + final String traitName = "PII_Trait" + randomString(); + AtlasClassificationDef piiTrait = AtlasTypeUtil + .createTraitTypeDef(traitName, ImmutableSet.<String>of(), + AtlasTypeUtil.createRequiredAttrDef("type", "string")); + AtlasTypesDef typesDef = new AtlasTypesDef(); + typesDef.getClassificationDefs().add(piiTrait); + createType(typesDef); + + AtlasClassification traitInstance = new AtlasClassification(traitName); + traitInstance.setAttribute("type", "SSN"); + + final String guid = createHiveTable().getGuid(); + atlasClientV2.addClassifications(guid, ImmutableList.of(traitInstance)); + + // verify the response + AtlasEntity withAssociationByGuid = atlasClientV2.getEntityByGuid(guid).getEntity(); + assertNotNull(withAssociationByGuid); + assertFalse(withAssociationByGuid.getClassifications().isEmpty()); + + boolean found = false; + for (AtlasClassification atlasClassification : withAssociationByGuid.getClassifications()) { + String attribute = (String)atlasClassification.getAttribute("type"); + if (attribute != null && attribute.equals("SSN")) { + found = true; + break; + } + } + assertTrue(found); + } + + @Test(expectedExceptions = AtlasServiceException.class) + public void testAddTraitWithNoRegistration() throws Exception { + final String traitName = "PII_Trait" + randomString(); + AtlasTypeUtil.createTraitTypeDef(traitName, ImmutableSet.<String>of()); + + AtlasClassification traitInstance = new AtlasClassification(traitName); + + atlasClientV2.addClassifications("random", ImmutableList.of(traitInstance)); + } + + @Test(dependsOnMethods = "testAddTrait") + public void testDeleteTrait() throws Exception { + final String guid = createHiveTable().getGuid(); + + try { + atlasClientV2.deleteClassification(guid, traitName); + } catch (AtlasServiceException ex) { + fail("Deletion should've succeeded"); + } + assertEntityAudit(guid, EntityAuditEvent.EntityAuditAction.TAG_DELETE); + } + + @Test + public void testDeleteTraitNonExistent() throws Exception { + final String traitName = "blah_trait"; + + try { + atlasClientV2.deleteClassification("random", traitName); + fail("Deletion for bogus names shouldn't have succeeded"); + } catch (AtlasServiceException ex) { + assertNotNull(ex.getStatus()); +// assertEquals(ex.getStatus(), ClientResponse.Status.NOT_FOUND); + assertEquals(ex.getStatus(), ClientResponse.Status.BAD_REQUEST); + // Should it be a 400 or 404 + } + } + + @Test(dependsOnMethods = "testSubmitEntity") + public void testDeleteExistentTraitNonExistentForEntity() throws Exception { + + final String guid = createHiveTable().getGuid(); + final String traitName = "PII_Trait" + randomString(); + AtlasClassificationDef piiTrait = AtlasTypeUtil + .createTraitTypeDef(traitName, ImmutableSet.<String>of(), + AtlasTypeUtil.createRequiredAttrDef("type", "string")); + AtlasTypesDef typesDef = new AtlasTypesDef(); + typesDef.getClassificationDefs().add(piiTrait); + createType(typesDef); + + try { + atlasClientV2.deleteClassification(guid, traitName); + fail("Deletion should've failed for non-existent trait association"); + } catch (AtlasServiceException ex) { + Assert.assertNotNull(ex.getStatus()); + assertEquals(ex.getStatus(), ClientResponse.Status.NOT_FOUND); + } + } + + private String random() { + return RandomStringUtils.random(10); + } + + @Test + public void testUTF8() throws Exception { + String classType = randomString(); + String attrName = random(); + String attrValue = random(); + + AtlasEntityDef classTypeDef = AtlasTypeUtil + .createClassTypeDef(classType, ImmutableSet.<String>of(), + AtlasTypeUtil.createUniqueRequiredAttrDef(attrName, "string")); + AtlasTypesDef atlasTypesDef = new AtlasTypesDef(); + atlasTypesDef.getEntityDefs().add(classTypeDef); + createType(atlasTypesDef); + + AtlasEntity instance = new AtlasEntity(classType); + instance.setAttribute(attrName, attrValue); + AtlasEntityHeader entity = createEntity(instance); + assertNotNull(entity); + assertNotNull(entity.getGuid()); + + AtlasEntity entityByGuid = getEntityByGuid(entity.getGuid()); + assertEquals(entityByGuid.getAttribute(attrName), attrValue); + } + + @Test(dependsOnMethods = "testSubmitEntity") + public void testPartialUpdate() throws Exception { + final List<AtlasEntity> columns = new ArrayList<>(); + Map<String, Object> values = new HashMap<>(); + values.put("name", "col1"); + values.put(NAME, "qualifiedName.col1"); + values.put("type", "string"); + values.put("comment", "col1 comment"); + + AtlasEntity colEntity = new AtlasEntity(BaseResourceIT.COLUMN_TYPE_V2, values); + columns.add(colEntity); + AtlasEntity hiveTable = createHiveTable(); + AtlasEntity tableUpdated = hiveTable; + + hiveTable.setAttribute("columns", AtlasTypeUtil.toObjectIds(columns)); + + AtlasEntityWithExtInfo entityInfo = new AtlasEntityWithExtInfo(tableUpdated); + entityInfo.addReferredEntity(colEntity); + + LOG.debug("Full Update entity= " + tableUpdated); + EntityMutationResponse updateResult = atlasClientV2.updateEntity(entityInfo); + assertNotNull(updateResult); + assertNotNull(updateResult.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE)); + assertTrue(updateResult.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE).size() > 0); + + String guid = hiveTable.getGuid(); + AtlasEntity entityByGuid1 = getEntityByGuid(guid); + assertNotNull(entityByGuid1); + entityByGuid1.getAttribute("columns"); + + values.put("type", "int"); + colEntity = new AtlasEntity(BaseResourceIT.COLUMN_TYPE_V2, values); + columns.clear(); + columns.add(colEntity); + + tableUpdated = new AtlasEntity(HIVE_TABLE_TYPE_V2, "name", entityByGuid1.getAttribute("name")); + tableUpdated.setGuid(entityByGuid1.getGuid()); + tableUpdated.setAttribute("columns", AtlasTypeUtil.toObjectIds(columns)); + + // tableUpdated = hiveTable; + // tableUpdated.setAttribute("columns", AtlasTypeUtil.toObjectIds(columns)); + + LOG.debug("Partial Update entity by unique attributes= " + tableUpdated); + Map<String, String> uniqAttributes = new HashMap<>(); + uniqAttributes.put(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, (String) hiveTable.getAttribute("name")); + + entityInfo = new AtlasEntityWithExtInfo(tableUpdated); + entityInfo.addReferredEntity(colEntity); + + EntityMutationResponse updateResponse = atlasClientV2.updateEntityByAttribute(BaseResourceIT.HIVE_TABLE_TYPE_V2, uniqAttributes, entityInfo); + + assertNotNull(updateResponse); + assertNotNull(updateResponse.getEntitiesByOperation(EntityMutations.EntityOperation.PARTIAL_UPDATE)); + assertTrue(updateResponse.getEntitiesByOperation(EntityMutations.EntityOperation.PARTIAL_UPDATE).size() > 0); + + AtlasEntity entityByGuid2 = getEntityByGuid(guid); + assertNotNull(entityByGuid2); + } + + private AtlasEntity getEntityByGuid(String guid) throws AtlasServiceException { + return atlasClientV2.getEntityByGuid(guid).getEntity(); + } + + @Test(dependsOnMethods = "testSubmitEntity") + public void testCompleteUpdate() throws Exception { + final List<AtlasEntity> columns = new ArrayList<>(); + Map<String, Object> values1 = new HashMap<>(); + values1.put("name", "col3"); + values1.put(NAME, "qualifiedName.col3"); + values1.put("type", "string"); + values1.put("comment", "col3 comment"); + + Map<String, Object> values2 = new HashMap<>(); + values2.put("name", "col4"); + values2.put(NAME, "qualifiedName.col4"); + values2.put("type", "string"); + values2.put("comment", "col4 comment"); + + AtlasEntity colEntity1 = new AtlasEntity(BaseResourceIT.COLUMN_TYPE_V2, values1); + AtlasEntity colEntity2 = new AtlasEntity(BaseResourceIT.COLUMN_TYPE_V2, values2); + columns.add(colEntity1); + columns.add(colEntity2); + AtlasEntity hiveTable = createHiveTable(); + hiveTable.setAttribute("columns", AtlasTypeUtil.toObjectIds(columns)); + + AtlasEntityWithExtInfo entityInfo = new AtlasEntityWithExtInfo(hiveTable); + entityInfo.addReferredEntity(colEntity1); + entityInfo.addReferredEntity(colEntity2); + + EntityMutationResponse updateEntityResult = atlasClientV2.updateEntity(entityInfo); + assertNotNull(updateEntityResult); + assertNotNull(updateEntityResult.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE)); + assertNotNull(updateEntityResult.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE)); + //2 columns are being created, and 1 hiveTable is being updated + assertEquals(updateEntityResult.getEntitiesByOperation(EntityMutations.EntityOperation.UPDATE).size(), 1); + assertEquals(updateEntityResult.getEntitiesByOperation(EntityMutations.EntityOperation.CREATE).size(), 2); + + AtlasEntity entityByGuid = getEntityByGuid(hiveTable.getGuid()); + List<AtlasObjectId> refs = (List<AtlasObjectId>) entityByGuid.getAttribute("columns"); + assertEquals(refs.size(), 2); + } + + @Test + public void testDeleteEntities() throws Exception { + // Create 2 database entities + AtlasEntity db1 = new AtlasEntity(DATABASE_TYPE_V2); + String dbName1 = randomString(); + db1.setAttribute("name", dbName1); + db1.setAttribute(NAME, dbName1); + db1.setAttribute("clusterName", randomString()); + db1.setAttribute("description", randomString()); + AtlasEntityHeader entity1Header = createEntity(db1); + AtlasEntity db2 = new AtlasEntity(DATABASE_TYPE_V2); + String dbName2 = randomString(); + db2.setAttribute("name", dbName2); + db2.setAttribute(NAME, dbName2); + db2.setAttribute("clusterName", randomString()); + db2.setAttribute("description", randomString()); + AtlasEntityHeader entity2Header = createEntity(db2); + + // Delete the database entities + EntityMutationResponse deleteResponse = atlasClientV2.deleteEntitiesByGuids(ImmutableList.of(entity1Header.getGuid(), entity2Header.getGuid())); + + // Verify that deleteEntities() response has database entity guids + assertNotNull(deleteResponse); + assertNotNull(deleteResponse.getEntitiesByOperation(EntityMutations.EntityOperation.DELETE)); + assertEquals(deleteResponse.getEntitiesByOperation(EntityMutations.EntityOperation.DELETE).size(), 2); + + // Verify entities were deleted from the repository. + } + + @Test + public void testDeleteEntityByUniqAttribute() throws Exception { + // Create database entity + AtlasEntity hiveDB = createHiveDB(DATABASE_NAME + random()); + + // Delete the database entity + EntityMutationResponse deleteResponse = atlasClientV2.deleteEntityByAttribute(DATABASE_TYPE_V2, toMap(NAME, (String) hiveDB.getAttribute(NAME))); + + // Verify that deleteEntities() response has database entity guids + assertNotNull(deleteResponse); + assertNotNull(deleteResponse.getEntitiesByOperation(EntityMutations.EntityOperation.DELETE)); + assertEquals(deleteResponse.getEntitiesByOperation(EntityMutations.EntityOperation.DELETE).size(), 1); + + // Verify entities were deleted from the repository. + } + + private Map<String, String> toMap(final String name, final String value) { + return new HashMap<String, String>() {{ + put(name, value); + }}; + } +}
http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/test/java/org/apache/atlas/web/integration/MetadataDiscoveryJerseyResourceIT.java ---------------------------------------------------------------------- diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/MetadataDiscoveryJerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/MetadataDiscoveryJerseyResourceIT.java new file mode 100755 index 0000000..3fa85b1 --- /dev/null +++ b/webapp/src/test/java/org/apache/atlas/web/integration/MetadataDiscoveryJerseyResourceIT.java @@ -0,0 +1,267 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.atlas.web.integration; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.sun.jersey.api.client.ClientResponse; +import com.sun.jersey.core.util.MultivaluedMapImpl; +import org.apache.atlas.AtlasClient; +import org.apache.atlas.AtlasServiceException; +import org.apache.atlas.typesystem.Referenceable; +import org.apache.atlas.typesystem.Struct; +import org.apache.atlas.typesystem.TypesDef; +import org.apache.atlas.typesystem.persistence.Id; +import org.apache.atlas.typesystem.types.ClassType; +import org.apache.atlas.typesystem.types.DataTypes; +import org.apache.atlas.typesystem.types.EnumTypeDefinition; +import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition; +import org.apache.atlas.typesystem.types.StructTypeDefinition; +import org.apache.atlas.typesystem.types.TraitType; +import org.apache.atlas.typesystem.types.utils.TypesUtil; +import org.codehaus.jettison.json.JSONArray; +import org.codehaus.jettison.json.JSONObject; +import org.testng.Assert; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.Test; + +import javax.ws.rs.core.MultivaluedMap; +import java.util.List; + +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.fail; + +/** + * Search Integration Tests. + */ +public class MetadataDiscoveryJerseyResourceIT extends BaseResourceIT { + + private String tagName; + private String dbName; + + @BeforeClass + public void setUp() throws Exception { + super.setUp(); + dbName = "db"+randomString(); + createTypes(); + createInstance( createHiveDBInstanceV1(dbName) ); + } + + @Test + public void testSearchByDSL() throws Exception { + String dslQuery = "from "+ DATABASE_TYPE + " name=\"" + dbName + "\""; + MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl(); + queryParams.add("query", dslQuery); + JSONObject response = atlasClientV1.callAPIWithQueryParams(AtlasClient.API.SEARCH_DSL, queryParams); + + Assert.assertNotNull(response); + Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID)); + + assertEquals(response.getString("query"), dslQuery); + assertEquals(response.getString("queryType"), "dsl"); + + JSONArray results = response.getJSONArray(AtlasClient.RESULTS); + assertNotNull(results); + assertEquals(results.length(), 1); + + int numRows = response.getInt(AtlasClient.COUNT); + assertEquals(numRows, 1); + } + + @Test + public void testSearchDSLLimits() throws Exception { + + //search without new parameters of limit and offset should work + String dslQuery = "from "+ DATABASE_TYPE + " name=\"" + dbName + "\""; + MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl(); + queryParams.add("query", dslQuery); + JSONObject response = atlasClientV1.callAPIWithQueryParams(AtlasClient.API.SEARCH_DSL, queryParams); + assertNotNull(response); + + //higher limit, all results returned + JSONArray results = atlasClientV1.searchByDSL(dslQuery, 10, 0); + assertEquals(results.length(), 1); + + //default limit and offset -1, all results returned + results = atlasClientV1.searchByDSL(dslQuery, -1, -1); + assertEquals(results.length(), 1); + + //uses the limit parameter passed + results = atlasClientV1.searchByDSL(dslQuery, 1, 0); + assertEquals(results.length(), 1); + + //uses the offset parameter passed + results = atlasClientV1.searchByDSL(dslQuery, 10, 1); + assertEquals(results.length(), 0); + + //limit > 0 + try { + atlasClientV1.searchByDSL(dslQuery, 0, 10); + fail("Expected BAD_REQUEST"); + } catch (AtlasServiceException e) { + assertEquals(e.getStatus(), ClientResponse.Status.BAD_REQUEST, "Got " + e.getStatus()); + } + + //limit > maxlimit + try { + atlasClientV1.searchByDSL(dslQuery, Integer.MAX_VALUE, 10); + fail("Expected BAD_REQUEST"); + } catch (AtlasServiceException e) { + assertEquals(e.getStatus(), ClientResponse.Status.BAD_REQUEST, "Got " + e.getStatus()); + } + + //offset >= 0 + try { + atlasClientV1.searchByDSL(dslQuery, 10, -2); + fail("Expected BAD_REQUEST"); + } catch (AtlasServiceException e) { + assertEquals(e.getStatus(), ClientResponse.Status.BAD_REQUEST, "Got " + e.getStatus()); + } + } + + @Test(expectedExceptions = AtlasServiceException.class) + public void testSearchByDSLForUnknownType() throws Exception { + String dslQuery = "from blah"; + MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl(); + queryParams.add("query", dslQuery); + atlasClientV1.callAPIWithQueryParams(AtlasClient.API.SEARCH_DSL, queryParams); + } + + @Test + public void testSearchUsingGremlin() throws Exception { + String query = "g.V.has('type', '" + BaseResourceIT.HIVE_TABLE_TYPE + "').toList()"; + MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl(); + queryParams.add("query", query); + + JSONObject response = atlasClientV1.callAPIWithQueryParams(AtlasClient.API.GREMLIN_SEARCH, queryParams); + + assertNotNull(response); + assertNotNull(response.get(AtlasClient.REQUEST_ID)); + + assertEquals(response.getString("query"), query); + assertEquals(response.getString("queryType"), "gremlin"); + } + + @Test + public void testSearchUsingDSL() throws Exception { + //String query = "from dsl_test_type"; + String query = "from "+ DATABASE_TYPE + " name=\"" + dbName +"\""; + MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl(); + queryParams.add("query", query); + JSONObject response = atlasClientV1.callAPIWithQueryParams(AtlasClient.API.SEARCH, queryParams); + + Assert.assertNotNull(response); + Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID)); + + assertEquals(response.getString("query"), query); + assertEquals(response.getString("queryType"), "dsl"); + } + + @Test + public void testSearchFullTextOnDSLFailure() throws Exception { + String query = "*"; + MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl(); + queryParams.add("query", query); + JSONObject response = atlasClientV1.callAPIWithQueryParams(AtlasClient.API.SEARCH, queryParams); + + Assert.assertNotNull(response); + Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID)); + + assertEquals(response.getString("query"), query); + assertEquals(response.getString("queryType"), "full-text"); + } + + @Test(dependsOnMethods = "testSearchDSLLimits") + public void testSearchUsingFullText() throws Exception { + JSONObject response = atlasClientV1.searchByFullText(dbName, 10, 0); + assertNotNull(response.get(AtlasClient.REQUEST_ID)); + + assertEquals(response.getString("query"), dbName); + assertEquals(response.getString("queryType"), "full-text"); + + JSONArray results = response.getJSONArray(AtlasClient.RESULTS); + assertEquals(results.length(), 1, "Results: " + results); + + JSONObject row = results.getJSONObject(0); + assertNotNull(row.get("guid")); + assertEquals(row.getString("typeName"), DATABASE_TYPE); + assertNotNull(row.get("score")); + + int numRows = response.getInt(AtlasClient.COUNT); + assertEquals(numRows, 1); + + //API works without limit and offset + String query = dbName; + MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl(); + queryParams.add("query", query); + response = atlasClientV1.callAPIWithQueryParams(AtlasClient.API.SEARCH_FULL_TEXT, queryParams); + results = response.getJSONArray(AtlasClient.RESULTS); + assertEquals(results.length(), 1); + + //verify passed in limits and offsets are used + //higher limit and 0 offset returns all results + results = atlasClientV1.searchByFullText(query, 10, 0).getJSONArray(AtlasClient.RESULTS); + assertEquals(results.length(), 1); + + //offset is used + results = atlasClientV1.searchByFullText(query, 10, 1).getJSONArray(AtlasClient.RESULTS); + assertEquals(results.length(), 0); + + //limit is used + results = atlasClientV1.searchByFullText(query, 1, 0).getJSONArray(AtlasClient.RESULTS); + assertEquals(results.length(), 1); + + //higher offset returns 0 results + results = atlasClientV1.searchByFullText(query, 1, 2).getJSONArray(AtlasClient.RESULTS); + assertEquals(results.length(), 0); + } + + private void createTypes() throws Exception { + createTypeDefinitionsV1(); + + HierarchicalTypeDefinition<ClassType> dslTestTypeDefinition = TypesUtil + .createClassTypeDef("dsl_test_type", ImmutableSet.<String>of(), + TypesUtil.createUniqueRequiredAttrDef("name", DataTypes.STRING_TYPE), + TypesUtil.createRequiredAttrDef("description", DataTypes.STRING_TYPE)); + + HierarchicalTypeDefinition<TraitType> classificationTraitDefinition = TypesUtil + .createTraitTypeDef("Classification", ImmutableSet.<String>of(), + TypesUtil.createRequiredAttrDef("tag", DataTypes.STRING_TYPE)); + TypesDef typesDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(), + ImmutableList.of(classificationTraitDefinition), ImmutableList.of(dslTestTypeDefinition)); + createType(typesDef); + } + + private Id createInstance() throws Exception { + Referenceable entityInstance = new Referenceable("dsl_test_type", "Classification"); + entityInstance.set("name", randomString()); + entityInstance.set("description", randomString()); + + + Struct traitInstance = (Struct) entityInstance.getTrait("Classification"); + tagName = randomString(); + traitInstance.set("tag", tagName); + + List<String> traits = entityInstance.getTraits(); + assertEquals(traits.size(), 1); + + return createInstance(entityInstance); + } +} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/test/java/org/apache/atlas/web/integration/TypedefsJerseyResourceIT.java ---------------------------------------------------------------------- diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/TypedefsJerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/TypedefsJerseyResourceIT.java new file mode 100644 index 0000000..c46689c --- /dev/null +++ b/webapp/src/test/java/org/apache/atlas/web/integration/TypedefsJerseyResourceIT.java @@ -0,0 +1,370 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.atlas.web.integration; + +import com.google.common.collect.ImmutableSet; +import com.sun.jersey.core.util.MultivaluedMapImpl; +import org.apache.atlas.AtlasClientV2; +import org.apache.atlas.AtlasServiceException; +import org.apache.atlas.model.SearchFilter; +import org.apache.atlas.model.TypeCategory; +import org.apache.atlas.model.typedef.AtlasBaseTypeDef; +import org.apache.atlas.model.typedef.AtlasClassificationDef; +import org.apache.atlas.model.typedef.AtlasEntityDef; +import org.apache.atlas.model.typedef.AtlasEnumDef; +import org.apache.atlas.model.typedef.AtlasStructDef; +import org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef; +import org.apache.atlas.model.typedef.AtlasStructDef.AtlasConstraintDef; +import org.apache.atlas.model.typedef.AtlasTypesDef; +import org.apache.atlas.type.AtlasTypeUtil; +import org.apache.atlas.typesystem.types.DataTypes; +import org.apache.atlas.utils.AuthenticationUtil; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang.StringUtils; +import org.testng.annotations.AfterClass; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.Test; + +import javax.ws.rs.core.MultivaluedMap; +import javax.ws.rs.core.Response; +import java.util.Collections; + +import static org.apache.atlas.model.typedef.AtlasStructDef.AtlasAttributeDef.Cardinality; +import static org.apache.atlas.type.AtlasTypeUtil.createClassTypeDef; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.assertTrue; +import static org.testng.Assert.fail; + +/** + * Integration test for types jersey resource. + */ +public class TypedefsJerseyResourceIT extends BaseResourceIT { + + private AtlasTypesDef typeDefinitions; + + private AtlasClientV2 clientV2; + + @BeforeClass + public void setUp() throws Exception { + super.setUp(); + + typeDefinitions = createHiveTypesV2(); + + if (!AuthenticationUtil.isKerberosAuthenticationEnabled()) { + clientV2 = new AtlasClientV2(atlasUrls, new String[]{"admin", "admin"}); + } else { + clientV2 = new AtlasClientV2(atlasUrls); + } + } + + @AfterClass + public void tearDown() throws Exception { + emptyTypeDefs(typeDefinitions); + } + + @Test + public void testCreate() throws Exception { + createType(typeDefinitions); + + for (AtlasEnumDef enumDef : typeDefinitions.getEnumDefs()) { + AtlasEnumDef byName = atlasClientV2.getEnumDefByName(enumDef.getName()); + assertNotNull(byName); + } + for (AtlasStructDef structDef : typeDefinitions.getStructDefs()) { + AtlasStructDef byName = atlasClientV2.getStructDefByName(structDef.getName()); + assertNotNull(byName); + } + for (AtlasClassificationDef classificationDef : typeDefinitions.getClassificationDefs()) { + AtlasClassificationDef byName = atlasClientV2.getClassificationDefByName(classificationDef.getName()); + assertNotNull(byName); + } + for (AtlasEntityDef entityDef : typeDefinitions.getEntityDefs()) { + AtlasEntityDef byName = atlasClientV2.getEntityDefByName(entityDef.getName()); + assertNotNull(byName); + } + + } + + @Test + public void testDuplicateCreate() throws Exception { + AtlasEntityDef type = createClassTypeDef(randomString(), + ImmutableSet.<String>of(), AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string")); + AtlasTypesDef typesDef = new AtlasTypesDef(); + typesDef.getEntityDefs().add(type); + + AtlasTypesDef created = clientV2.createAtlasTypeDefs(typesDef); + assertNotNull(created); + + try { + created = clientV2.createAtlasTypeDefs(typesDef); + fail("Expected 409"); + } catch (AtlasServiceException e) { + assertEquals(e.getStatus().getStatusCode(), Response.Status.CONFLICT.getStatusCode()); + } + } + + @Test + public void testUpdate() throws Exception { + String entityType = randomString(); + AtlasEntityDef typeDefinition = + createClassTypeDef(entityType, ImmutableSet.<String>of(), + AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string")); + + AtlasTypesDef atlasTypesDef = new AtlasTypesDef(); + atlasTypesDef.getEntityDefs().add(typeDefinition); + + AtlasTypesDef createdTypeDefs = clientV2.createAtlasTypeDefs(atlasTypesDef); + assertNotNull(createdTypeDefs); + assertEquals(createdTypeDefs.getEntityDefs().size(), atlasTypesDef.getEntityDefs().size()); + + //Add attribute description + typeDefinition = createClassTypeDef(typeDefinition.getName(), + ImmutableSet.<String>of(), + AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"), + AtlasTypeUtil.createOptionalAttrDef("description", "string")); + + emptyTypeDefs(atlasTypesDef); + + atlasTypesDef.getEntityDefs().add(typeDefinition); + + AtlasTypesDef updatedTypeDefs = clientV2.updateAtlasTypeDefs(atlasTypesDef); + assertNotNull(updatedTypeDefs); + assertEquals(updatedTypeDefs.getEntityDefs().size(), atlasTypesDef.getEntityDefs().size()); + assertEquals(updatedTypeDefs.getEntityDefs().get(0).getName(), atlasTypesDef.getEntityDefs().get(0).getName()); + + MultivaluedMap<String, String> filterParams = new MultivaluedMapImpl(); + filterParams.add(SearchFilter.PARAM_TYPE, "ENTITY"); + AtlasTypesDef allTypeDefs = clientV2.getAllTypeDefs(new SearchFilter(filterParams)); + assertNotNull(allTypeDefs); + Boolean entityDefFound = false; + for (AtlasEntityDef atlasEntityDef : allTypeDefs.getEntityDefs()){ + if (atlasEntityDef.getName().equals(typeDefinition.getName())) { + assertEquals(atlasEntityDef.getAttributeDefs().size(), 2); + entityDefFound = true; + break; + } + } + assertTrue(entityDefFound, "Required entityDef not found."); + } + + @Test(dependsOnMethods = "testCreate") + public void testGetDefinition() throws Exception { + if (CollectionUtils.isNotEmpty(typeDefinitions.getEnumDefs())) { + for (AtlasEnumDef atlasEnumDef : typeDefinitions.getEnumDefs()) { + verifyByNameAndGUID(atlasEnumDef); + } + } + + if (CollectionUtils.isNotEmpty(typeDefinitions.getStructDefs())) { + for (AtlasStructDef structDef : typeDefinitions.getStructDefs()) { + verifyByNameAndGUID(structDef); + } + } + + if (CollectionUtils.isNotEmpty(typeDefinitions.getClassificationDefs())) { + for (AtlasClassificationDef classificationDef : typeDefinitions.getClassificationDefs()) { + verifyByNameAndGUID(classificationDef); + } + } + + if (CollectionUtils.isNotEmpty(typeDefinitions.getEntityDefs())) { + for (AtlasEntityDef entityDef : typeDefinitions.getEntityDefs()) { + verifyByNameAndGUID(entityDef); + } + } + } + + @Test + public void testInvalidGets() throws Exception { + try { + AtlasEnumDef byName = clientV2.getEnumDefByName("blah"); + fail("Get for invalid name should have reported a failure"); + } catch (AtlasServiceException e) { + assertEquals(e.getStatus().getStatusCode(), Response.Status.NOT_FOUND.getStatusCode(), + "Should've returned a 404"); + } + + try { + AtlasEnumDef byGuid = clientV2.getEnumDefByGuid("blah"); + fail("Get for invalid name should have reported a failure"); + } catch (AtlasServiceException e) { + assertEquals(e.getStatus().getStatusCode(), Response.Status.NOT_FOUND.getStatusCode(), + "Should've returned a 404"); + } + + try { + AtlasStructDef byName = clientV2.getStructDefByName("blah"); + fail("Get for invalid name should have reported a failure"); + } catch (AtlasServiceException e) { + assertEquals(e.getStatus().getStatusCode(), Response.Status.NOT_FOUND.getStatusCode(), + "Should've returned a 404"); + } + + try { + AtlasStructDef byGuid = clientV2.getStructDefByGuid("blah"); + fail("Get for invalid name should have reported a failure"); + } catch (AtlasServiceException e) { + assertEquals(e.getStatus().getStatusCode(), Response.Status.NOT_FOUND.getStatusCode(), + "Should've returned a 404"); + } + + try { + AtlasClassificationDef byName = clientV2.getClassificationDefByName("blah"); + fail("Get for invalid name should have reported a failure"); + } catch (AtlasServiceException e) { + assertEquals(e.getStatus().getStatusCode(), Response.Status.NOT_FOUND.getStatusCode(), + "Should've returned a 404"); + } + + try { + AtlasClassificationDef byGuid = clientV2.getClassificationDefByGuid("blah"); + fail("Get for invalid name should have reported a failure"); + } catch (AtlasServiceException e) { + assertEquals(e.getStatus().getStatusCode(), Response.Status.NOT_FOUND.getStatusCode(), + "Should've returned a 404"); + } + + try { + AtlasEntityDef byName = clientV2.getEntityDefByName("blah"); + fail("Get for invalid name should have reported a failure"); + } catch (AtlasServiceException e) { + assertEquals(e.getStatus().getStatusCode(), Response.Status.NOT_FOUND.getStatusCode(), + "Should've returned a 404"); + } + + try { + AtlasEntityDef byGuid = clientV2.getEntityDefByGuid("blah"); + fail("Get for invalid name should have reported a failure"); + } catch (AtlasServiceException e) { + assertEquals(e.getStatus().getStatusCode(), Response.Status.NOT_FOUND.getStatusCode(), + "Should've returned a 404"); + } + + + } + + @Test + public void testListTypesByFilter() throws Exception { + AtlasAttributeDef attr = AtlasTypeUtil.createOptionalAttrDef("attr", "string"); + AtlasEntityDef classDefA = AtlasTypeUtil.createClassTypeDef("A" + randomString(), ImmutableSet.<String>of(), attr); + AtlasEntityDef classDefA1 = AtlasTypeUtil.createClassTypeDef("A1" + randomString(), ImmutableSet.of(classDefA.getName()), attr); + AtlasEntityDef classDefB = AtlasTypeUtil.createClassTypeDef("B" + randomString(), ImmutableSet.<String>of(), attr); + AtlasEntityDef classDefC = AtlasTypeUtil.createClassTypeDef("C" + randomString(), ImmutableSet.of(classDefB.getName(), classDefA.getName()), attr); + + AtlasTypesDef atlasTypesDef = new AtlasTypesDef(); + atlasTypesDef.getEntityDefs().add(classDefA); + atlasTypesDef.getEntityDefs().add(classDefA1); + atlasTypesDef.getEntityDefs().add(classDefB); + atlasTypesDef.getEntityDefs().add(classDefC); + + AtlasTypesDef created = clientV2.createAtlasTypeDefs(atlasTypesDef); + assertNotNull(created); + assertEquals(created.getEntityDefs().size(), atlasTypesDef.getEntityDefs().size()); + + MultivaluedMap<String, String> searchParams = new MultivaluedMapImpl(); + searchParams.add(SearchFilter.PARAM_TYPE, "CLASS"); + searchParams.add(SearchFilter.PARAM_SUPERTYPE, classDefA.getName()); + SearchFilter searchFilter = new SearchFilter(searchParams); + AtlasTypesDef searchDefs = clientV2.getAllTypeDefs(searchFilter); + assertNotNull(searchDefs); + assertEquals(searchDefs.getEntityDefs().size(), 2); + + searchParams.add(SearchFilter.PARAM_NOT_SUPERTYPE, classDefB.getName()); + searchFilter = new SearchFilter(searchParams); + searchDefs = clientV2.getAllTypeDefs(searchFilter); + assertNotNull(searchDefs); + assertEquals(searchDefs.getEntityDefs().size(), 1); + } + + private AtlasTypesDef createHiveTypesV2() throws Exception { + AtlasTypesDef atlasTypesDef = new AtlasTypesDef(); + + AtlasEntityDef databaseTypeDefinition = + createClassTypeDef("database", ImmutableSet.<String>of(), + AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"), + AtlasTypeUtil.createRequiredAttrDef("description", "string")); + atlasTypesDef.getEntityDefs().add(databaseTypeDefinition); + + AtlasEntityDef tableTypeDefinition = + createClassTypeDef("table", ImmutableSet.<String>of(), + AtlasTypeUtil.createUniqueRequiredAttrDef("name", "string"), + AtlasTypeUtil.createRequiredAttrDef("description", "string"), + AtlasTypeUtil.createOptionalAttrDef("columnNames", DataTypes.arrayTypeName("string")), + AtlasTypeUtil.createOptionalAttrDef("created", "date"), + AtlasTypeUtil.createOptionalAttrDef("parameters", + DataTypes.mapTypeName("string", "string")), + AtlasTypeUtil.createRequiredAttrDef("type", "string"), + new AtlasAttributeDef("database", "database", + false, + Cardinality.SINGLE, 1, 1, + true, true, + Collections.<AtlasConstraintDef>emptyList())); + atlasTypesDef.getEntityDefs().add(tableTypeDefinition); + + AtlasClassificationDef fetlTypeDefinition = AtlasTypeUtil + .createTraitTypeDef("fetl", ImmutableSet.<String>of(), + AtlasTypeUtil.createRequiredAttrDef("level", "int")); + atlasTypesDef.getClassificationDefs().add(fetlTypeDefinition); + + return atlasTypesDef; + } + + private void verifyByNameAndGUID(AtlasBaseTypeDef typeDef) { + try { + AtlasBaseTypeDef byName = null; + if (typeDef.getCategory() == TypeCategory.ENUM) { + byName = clientV2.getEnumDefByName(typeDef.getName()); + } else if (typeDef.getCategory() == TypeCategory.ENTITY) { + byName = clientV2.getEntityDefByName(typeDef.getName()); + } else if (typeDef.getCategory() == TypeCategory.CLASSIFICATION) { + byName = clientV2.getClassificationDefByName(typeDef.getName()); + } else if (typeDef.getCategory() == TypeCategory.STRUCT) { + byName = clientV2.getStructDefByName(typeDef.getName()); + } + assertNotNull(byName); + } catch (AtlasServiceException e) { + fail("Get byName should've succeeded", e); + } + if (StringUtils.isNotBlank(typeDef.getGuid())) { + try { + AtlasBaseTypeDef byGuid = null; + if (typeDef.getCategory() == TypeCategory.ENUM) { + byGuid = clientV2.getEnumDefByGuid(typeDef.getGuid()); + } else if (typeDef.getCategory() == TypeCategory.ENTITY) { + byGuid = clientV2.getEntityDefByGuid(typeDef.getGuid()); + } else if (typeDef.getCategory() == TypeCategory.CLASSIFICATION) { + byGuid = clientV2.getClassificationDefByGuid(typeDef.getGuid()); + } else if (typeDef.getCategory() == TypeCategory.STRUCT) { + byGuid = clientV2.getStructDefByGuid(typeDef.getGuid()); + } + assertNotNull(byGuid); + } catch (AtlasServiceException e) { + fail("Get byGuid should've succeeded", e); + } + } + } + + private void emptyTypeDefs(AtlasTypesDef def) { + def.getEnumDefs().clear(); + def.getStructDefs().clear(); + def.getClassificationDefs().clear(); + def.getEntityDefs().clear(); + } +} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/test/java/org/apache/atlas/web/integration/TypesJerseyResourceIT.java ---------------------------------------------------------------------- diff --git a/webapp/src/test/java/org/apache/atlas/web/integration/TypesJerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/integration/TypesJerseyResourceIT.java new file mode 100755 index 0000000..ded3e05 --- /dev/null +++ b/webapp/src/test/java/org/apache/atlas/web/integration/TypesJerseyResourceIT.java @@ -0,0 +1,262 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.atlas.web.integration; + +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; +import com.sun.jersey.core.util.MultivaluedMapImpl; +import org.apache.atlas.AtlasClient; +import org.apache.atlas.AtlasServiceException; +import org.apache.atlas.typesystem.TypesDef; +import org.apache.atlas.typesystem.json.TypesSerialization; +import org.apache.atlas.typesystem.json.TypesSerialization$; +import org.apache.atlas.typesystem.types.AttributeDefinition; +import org.apache.atlas.typesystem.types.ClassType; +import org.apache.atlas.typesystem.types.DataTypes; +import org.apache.atlas.typesystem.types.EnumTypeDefinition; +import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition; +import org.apache.atlas.typesystem.types.Multiplicity; +import org.apache.atlas.typesystem.types.StructTypeDefinition; +import org.apache.atlas.typesystem.types.TraitType; +import org.apache.atlas.typesystem.types.utils.TypesUtil; +import org.codehaus.jettison.json.JSONArray; +import org.codehaus.jettison.json.JSONObject; +import org.testng.Assert; +import org.testng.annotations.AfterClass; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.Test; + +import javax.ws.rs.core.MultivaluedMap; +import javax.ws.rs.core.Response; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import static org.apache.atlas.typesystem.types.utils.TypesUtil.createOptionalAttrDef; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertNotNull; +import static org.testng.Assert.fail; + +/** + * Integration test for types jersey resource. + */ +public class TypesJerseyResourceIT extends BaseResourceIT { + + private List<HierarchicalTypeDefinition> typeDefinitions; + + @BeforeClass + public void setUp() throws Exception { + super.setUp(); + + typeDefinitions = createHiveTypes(); + } + + @AfterClass + public void tearDown() throws Exception { + typeDefinitions.clear(); + } + + @Test + public void testSubmit() throws Exception { + for (HierarchicalTypeDefinition typeDefinition : typeDefinitions) { + try{ + atlasClientV1.getType(typeDefinition.typeName); + } catch (AtlasServiceException ase){ + String typesAsJSON = TypesSerialization.toJson(typeDefinition, false); + System.out.println("typesAsJSON = " + typesAsJSON); + + JSONObject response = atlasClientV1.callAPIWithBody(AtlasClient.API.CREATE_TYPE, typesAsJSON); + Assert.assertNotNull(response); + + + JSONArray typesAdded = response.getJSONArray(AtlasClient.TYPES); + assertEquals(typesAdded.length(), 1); + assertEquals(typesAdded.getJSONObject(0).getString(NAME), typeDefinition.typeName); + Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID));} + } + } + + @Test + public void testDuplicateSubmit() throws Exception { + HierarchicalTypeDefinition<ClassType> type = TypesUtil.createClassTypeDef(randomString(), + ImmutableSet.<String>of(), TypesUtil.createUniqueRequiredAttrDef(NAME, DataTypes.STRING_TYPE)); + TypesDef typesDef = + TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), ImmutableList.<StructTypeDefinition>of(), + ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(), ImmutableList.of(type)); + atlasClientV1.createType(typesDef); + + try { + atlasClientV1.createType(typesDef); + fail("Expected 409"); + } catch (AtlasServiceException e) { + assertEquals(e.getStatus().getStatusCode(), Response.Status.CONFLICT.getStatusCode()); + } + } + + @Test + public void testUpdate() throws Exception { + HierarchicalTypeDefinition<ClassType> typeDefinition = TypesUtil + .createClassTypeDef(randomString(), ImmutableSet.<String>of(), + TypesUtil.createUniqueRequiredAttrDef(NAME, DataTypes.STRING_TYPE)); + List<String> typesCreated = atlasClientV1.createType(TypesSerialization.toJson(typeDefinition, false)); + assertEquals(typesCreated.size(), 1); + assertEquals(typesCreated.get(0), typeDefinition.typeName); + + //Add attribute description + typeDefinition = TypesUtil.createClassTypeDef(typeDefinition.typeName, + ImmutableSet.<String>of(), + TypesUtil.createUniqueRequiredAttrDef(NAME, DataTypes.STRING_TYPE), + createOptionalAttrDef(DESCRIPTION, DataTypes.STRING_TYPE)); + TypesDef typeDef = TypesUtil.getTypesDef(ImmutableList.<EnumTypeDefinition>of(), + ImmutableList.<StructTypeDefinition>of(), ImmutableList.<HierarchicalTypeDefinition<TraitType>>of(), + ImmutableList.of(typeDefinition)); + List<String> typesUpdated = atlasClientV1.updateType(typeDef); + assertEquals(typesUpdated.size(), 1); + Assert.assertTrue(typesUpdated.contains(typeDefinition.typeName)); + + TypesDef updatedTypeDef = atlasClientV1.getType(typeDefinition.typeName); + assertNotNull(updatedTypeDef); + + HierarchicalTypeDefinition<ClassType> updatedType = updatedTypeDef.classTypesAsJavaList().get(0); + assertEquals(updatedType.attributeDefinitions.length, 2); + } + + @Test(dependsOnMethods = "testSubmit") + public void testGetDefinition() throws Exception { + for (HierarchicalTypeDefinition typeDefinition : typeDefinitions) { + System.out.println("typeName = " + typeDefinition.typeName); + + JSONObject response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API.LIST_TYPES, null, typeDefinition.typeName); + + Assert.assertNotNull(response); + Assert.assertNotNull(response.get(AtlasClient.DEFINITION)); + Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID)); + + String typesJson = response.getString(AtlasClient.DEFINITION); + final TypesDef typesDef = TypesSerialization.fromJson(typesJson); + List<HierarchicalTypeDefinition<ClassType>> hierarchicalTypeDefinitions = typesDef.classTypesAsJavaList(); + for (HierarchicalTypeDefinition<ClassType> classType : hierarchicalTypeDefinitions) { + for (AttributeDefinition attrDef : classType.attributeDefinitions) { + if (NAME.equals(attrDef.name)) { + assertEquals(attrDef.isIndexable, true); + assertEquals(attrDef.isUnique, true); + } + } + } + } + } + + @Test(expectedExceptions = AtlasServiceException.class) + public void testGetDefinitionForNonexistentType() throws Exception { + JSONObject response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API.LIST_TYPES, null, "blah"); + } + + @Test(dependsOnMethods = "testSubmit") + public void testGetTypeNames() throws Exception { + JSONObject response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API.LIST_TYPES, null, (String[]) null); + Assert.assertNotNull(response); + + Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID)); + + final JSONArray list = response.getJSONArray(AtlasClient.RESULTS); + Assert.assertNotNull(list); + + //Verify that primitive and core types are not returned + String typesString = list.join(" "); + Assert.assertFalse(typesString.contains(" \"__IdType\" ")); + Assert.assertFalse(typesString.contains(" \"string\" ")); + } + + @Test + public void testGetTraitNames() throws Exception { + String[] traitsAdded = addTraits(); + + MultivaluedMap<String, String> queryParams = new MultivaluedMapImpl(); + queryParams.add("type", DataTypes.TypeCategory.TRAIT.name()); + + JSONObject response = atlasClientV1.callAPIWithQueryParams(AtlasClient.API.LIST_TYPES, queryParams); + Assert.assertNotNull(response); + + Assert.assertNotNull(response.get(AtlasClient.REQUEST_ID)); + + final JSONArray list = response.getJSONArray(AtlasClient.RESULTS); + Assert.assertNotNull(list); + Assert.assertTrue(list.length() >= traitsAdded.length); + } + + @Test + public void testListTypesByFilter() throws Exception { + AttributeDefinition attr = TypesUtil.createOptionalAttrDef("attr", DataTypes.STRING_TYPE); + String a = createType(TypesSerialization.toJson( + TypesUtil.createClassTypeDef("A" + randomString(), ImmutableSet.<String>of(), attr), false)).get(0); + String a1 = createType(TypesSerialization.toJson( + TypesUtil.createClassTypeDef("A1" + randomString(), ImmutableSet.of(a), attr), false)).get(0); + String b = createType(TypesSerialization.toJson( + TypesUtil.createClassTypeDef("B" + randomString(), ImmutableSet.<String>of(), attr), false)).get(0); + String c = createType(TypesSerialization.toJson( + TypesUtil.createClassTypeDef("C" + randomString(), ImmutableSet.of(a, b), attr), false)).get(0); + + List<String> results = atlasClientV1.listTypes(DataTypes.TypeCategory.CLASS, a, b); + assertEquals(results, Arrays.asList(a1), "Results: " + results); + } + + private String[] addTraits() throws Exception { + String[] traitNames = {"class_trait", "secure_trait", "pii_trait", "ssn_trait", "salary_trait", "sox_trait",}; + + for (String traitName : traitNames) { + HierarchicalTypeDefinition<TraitType> traitTypeDef = + TypesUtil.createTraitTypeDef(traitName, ImmutableSet.<String>of()); + String json = TypesSerialization$.MODULE$.toJson(traitTypeDef, true); + createType(json); + } + + return traitNames; + } + + private List<HierarchicalTypeDefinition> createHiveTypes() throws Exception { + ArrayList<HierarchicalTypeDefinition> typeDefinitions = new ArrayList<>(); + + HierarchicalTypeDefinition<ClassType> databaseTypeDefinition = TypesUtil + .createClassTypeDef("database", ImmutableSet.<String>of(), + TypesUtil.createUniqueRequiredAttrDef(NAME, DataTypes.STRING_TYPE), + TypesUtil.createRequiredAttrDef(DESCRIPTION, DataTypes.STRING_TYPE), + TypesUtil.createRequiredAttrDef(QUALIFIED_NAME, DataTypes.STRING_TYPE)); + typeDefinitions.add(databaseTypeDefinition); + + HierarchicalTypeDefinition<ClassType> tableTypeDefinition = TypesUtil + .createClassTypeDef("table", ImmutableSet.<String>of(), + TypesUtil.createUniqueRequiredAttrDef(NAME, DataTypes.STRING_TYPE), + TypesUtil.createRequiredAttrDef(DESCRIPTION, DataTypes.STRING_TYPE), + TypesUtil.createRequiredAttrDef(QUALIFIED_NAME, DataTypes.STRING_TYPE), + createOptionalAttrDef("columnNames", DataTypes.arrayTypeName(DataTypes.STRING_TYPE)), + createOptionalAttrDef("created", DataTypes.DATE_TYPE), + createOptionalAttrDef("parameters", + DataTypes.mapTypeName(DataTypes.STRING_TYPE, DataTypes.STRING_TYPE)), + TypesUtil.createRequiredAttrDef("type", DataTypes.STRING_TYPE), + new AttributeDefinition("database", "database", Multiplicity.REQUIRED, false, "database")); + typeDefinitions.add(tableTypeDefinition); + + HierarchicalTypeDefinition<TraitType> fetlTypeDefinition = TypesUtil + .createTraitTypeDef("fetl", ImmutableSet.<String>of(), + TypesUtil.createRequiredAttrDef("level", DataTypes.INT_TYPE)); + typeDefinitions.add(fetlTypeDefinition); + + return typeDefinitions; + } +} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/test/java/org/apache/atlas/web/listeners/TestGuiceServletConfig.java ---------------------------------------------------------------------- diff --git a/webapp/src/test/java/org/apache/atlas/web/listeners/TestGuiceServletConfig.java b/webapp/src/test/java/org/apache/atlas/web/listeners/TestGuiceServletConfig.java deleted file mode 100644 index da221fc..0000000 --- a/webapp/src/test/java/org/apache/atlas/web/listeners/TestGuiceServletConfig.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.atlas.web.listeners; - -import javax.servlet.ServletContextEvent; - -import org.apache.atlas.ApplicationProperties; -import org.apache.atlas.AtlasException; -import org.apache.atlas.repository.graph.AtlasGraphProvider; -import org.apache.atlas.repository.graphdb.AtlasGraph; -import org.apache.commons.configuration.Configuration; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.google.inject.Module; - -public class TestGuiceServletConfig extends GuiceServletConfig { - - private static final Logger LOG = LoggerFactory.getLogger(TestGuiceServletConfig.class); - private boolean servicesEnabled; - - @Override - public void contextInitialized(ServletContextEvent servletContextEvent) { - LOG.info("Initializing test servlet listener"); - super.contextInitialized(servletContextEvent); - } - - @Override - public void contextDestroyed(ServletContextEvent servletContextEvent) { - super.contextDestroyed(servletContextEvent); - - if(injector != null) { - AtlasGraph graph = AtlasGraphProvider.getGraphInstance(); - - LOG.info("Clearing graph store"); - try { - AtlasGraphProvider.cleanup(); - } catch (Exception e) { - LOG.warn("Clearing graph store failed ", e); - } - } - } - - @Override - protected Module getRepositoryModule() { - return new TestModule(); - } - - @Override - protected void startServices() { - try { - Configuration conf = ApplicationProperties.get(); - servicesEnabled = conf.getBoolean("atlas.services.enabled", true); - if (servicesEnabled) { - super.startServices(); - } - } catch (AtlasException e) { - throw new RuntimeException(e); - } - } - - @Override - protected void stopServices() { - if (servicesEnabled) { - super.stopServices(); - } - } -} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/test/java/org/apache/atlas/web/listeners/TestModule.java ---------------------------------------------------------------------- diff --git a/webapp/src/test/java/org/apache/atlas/web/listeners/TestModule.java b/webapp/src/test/java/org/apache/atlas/web/listeners/TestModule.java deleted file mode 100644 index b00080f..0000000 --- a/webapp/src/test/java/org/apache/atlas/web/listeners/TestModule.java +++ /dev/null @@ -1,34 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * <p/> - * http://www.apache.org/licenses/LICENSE-2.0 - * <p/> - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.listeners; - -import com.google.inject.Binder; - -import org.apache.atlas.RepositoryMetadataModule; -import org.apache.atlas.repository.audit.EntityAuditRepository; -import org.apache.atlas.repository.audit.InMemoryEntityAuditRepository; -import org.apache.commons.configuration.Configuration; - -public class TestModule extends RepositoryMetadataModule { - @Override - protected void bindAuditRepository(Binder binder, Configuration configuration) { - //Map EntityAuditRepository interface to hbase based implementation - binder.bind(EntityAuditRepository.class).to(InMemoryEntityAuditRepository.class).asEagerSingleton(); - } -} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/test/java/org/apache/atlas/web/resources/AdminJerseyResourceIT.java ---------------------------------------------------------------------- diff --git a/webapp/src/test/java/org/apache/atlas/web/resources/AdminJerseyResourceIT.java b/webapp/src/test/java/org/apache/atlas/web/resources/AdminJerseyResourceIT.java deleted file mode 100755 index 177785c..0000000 --- a/webapp/src/test/java/org/apache/atlas/web/resources/AdminJerseyResourceIT.java +++ /dev/null @@ -1,49 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.atlas.web.resources; - -import org.apache.atlas.AtlasClient; -import org.apache.commons.configuration.PropertiesConfiguration; -import org.codehaus.jettison.json.JSONObject; -import org.testng.Assert; -import org.testng.annotations.BeforeClass; -import org.testng.annotations.Test; - -/** - * Integration test for Admin jersey resource. - */ -public class AdminJerseyResourceIT extends BaseResourceIT { - - @BeforeClass - public void setUp() throws Exception { - super.setUp(); - } - - @Test - public void testGetVersion() throws Exception { - JSONObject response = atlasClientV1.callAPIWithBodyAndParams(AtlasClient.API.VERSION, null, (String[]) null); - Assert.assertNotNull(response); - - PropertiesConfiguration buildConfiguration = new PropertiesConfiguration("atlas-buildinfo.properties"); - - Assert.assertEquals(response.get("Version"), buildConfiguration.getString("build.version")); - Assert.assertEquals(response.get("Name"), buildConfiguration.getString("project.name")); - Assert.assertEquals(response.get("Description"), buildConfiguration.getString("project.description")); - } -} http://git-wip-us.apache.org/repos/asf/incubator-atlas/blob/35e5828f/webapp/src/test/java/org/apache/atlas/web/resources/AdminResourceTest.java ---------------------------------------------------------------------- diff --git a/webapp/src/test/java/org/apache/atlas/web/resources/AdminResourceTest.java b/webapp/src/test/java/org/apache/atlas/web/resources/AdminResourceTest.java index c0bbf09..1fe3119 100644 --- a/webapp/src/test/java/org/apache/atlas/web/resources/AdminResourceTest.java +++ b/webapp/src/test/java/org/apache/atlas/web/resources/AdminResourceTest.java @@ -48,7 +48,7 @@ public class AdminResourceTest { when(serviceState.getState()).thenReturn(ServiceState.ServiceStateValue.ACTIVE); - AdminResource adminResource = new AdminResource(serviceState, null, null, null, null); + AdminResource adminResource = new AdminResource(serviceState, null, null, null, null, null); Response response = adminResource.getStatus(); assertEquals(response.getStatus(), HttpServletResponse.SC_OK); JSONObject entity = (JSONObject) response.getEntity(); @@ -59,7 +59,7 @@ public class AdminResourceTest { public void testResourceGetsValueFromServiceState() throws JSONException { when(serviceState.getState()).thenReturn(ServiceState.ServiceStateValue.PASSIVE); - AdminResource adminResource = new AdminResource(serviceState, null, null, null, null); + AdminResource adminResource = new AdminResource(serviceState, null, null, null, null, null); Response response = adminResource.getStatus(); verify(serviceState).getState();
