yuqi1129 commented on code in PR #4948:
URL: https://github.com/apache/gravitino/pull/4948#discussion_r1771275905
##########
authorizations/authorization-ranger/src/test/java/org/apache/gravitino/authorization/ranger/integration/test/RangerHiveE2EIT.java:
##########
@@ -77,63 +78,207 @@ public class RangerHiveE2EIT extends AbstractIT {
private static RangerAuthorizationPlugin rangerAuthPlugin;
public static final String metalakeName =
-
GravitinoITUtils.genRandomName("RangerHiveAuthIT_metalake").toLowerCase();
+ GravitinoITUtils.genRandomName("RangerHiveE2EIT_metalake").toLowerCase();
public static final String catalogName =
- GravitinoITUtils.genRandomName("RangerHiveAuthIT_catalog").toLowerCase();
+ GravitinoITUtils.genRandomName("RangerHiveE2EIT_catalog").toLowerCase();
public static final String schemaName =
- GravitinoITUtils.genRandomName("RangerHiveAuthIT_schema").toLowerCase();
- public static final String tableName =
- GravitinoITUtils.genRandomName("RangerHiveAuthIT_table").toLowerCase();
-
- public static final String HIVE_COL_NAME1 = "hive_col_name1";
- public static final String HIVE_COL_NAME2 = "hive_col_name2";
- public static final String HIVE_COL_NAME3 = "hive_col_name3";
+ GravitinoITUtils.genRandomName("RangerHiveE2EIT_schema").toLowerCase();
private static GravitinoMetalake metalake;
private static Catalog catalog;
private static final String provider = "hive";
private static String HIVE_METASTORE_URIS;
+ private static SparkSession sparkSession = null;
+ private final AuditInfo auditInfo =
+
AuditInfo.builder().withCreator("test").withCreateTime(Instant.now()).build();
+ private static final String HADOOP_USER_NAME = "HADOOP_USER_NAME";
+ private static final String TEST_USER_NAME = "e2e_it_user";
+
+ private static final String SQL_SHOW_DATABASES =
+ String.format("SHOW DATABASES like '%s'", schemaName);
+
+ private static String RANGER_ADMIN_URL = null;
+
@BeforeAll
public static void startIntegrationTest() throws Exception {
+ // Enable Gravitino Authorization mode
Map<String, String> configs = Maps.newHashMap();
configs.put(Configs.ENABLE_AUTHORIZATION.getKey(), String.valueOf(true));
- configs.put(Configs.SERVICE_ADMINS.getKey(), AuthConstants.ANONYMOUS_USER);
+ configs.put(Configs.SERVICE_ADMINS.getKey(), RangerITEnv.HADOOP_USER_NAME);
+ configs.put(Configs.AUTHENTICATORS.getKey(),
AuthenticatorType.SIMPLE.name().toLowerCase());
+ configs.put("SimpleAuthUserName", TEST_USER_NAME);
registerCustomConfigs(configs);
AbstractIT.startIntegrationTest();
RangerITEnv.setup();
- containerSuite.startHiveContainer();
+ RangerITEnv.startHiveRangerContainer();
+
+ RANGER_ADMIN_URL =
+ String.format(
+ "http://%s:%d",
+ containerSuite.getRangerContainer().getContainerIpAddress(),
RANGER_SERVER_PORT);
+
HIVE_METASTORE_URIS =
String.format(
"thrift://%s:%d",
- containerSuite.getHiveContainer().getContainerIpAddress(),
+ containerSuite.getHiveRangerContainer().getContainerIpAddress(),
HiveContainer.HIVE_METASTORE_PORT);
+ generateRangerSparkSecurityXML();
+
+ sparkSession =
+ SparkSession.builder()
+ .master("local[1]")
+ .appName("Hive Catalog integration test")
+ .config("hive.metastore.uris", HIVE_METASTORE_URIS)
+ .config(
+ "spark.sql.warehouse.dir",
+ String.format(
+ "hdfs://%s:%d/user/hive/warehouse",
+
containerSuite.getHiveRangerContainer().getContainerIpAddress(),
+ HiveContainer.HDFS_DEFAULTFS_PORT))
+ .config("spark.sql.storeAssignmentPolicy", "LEGACY")
+ .config("mapreduce.input.fileinputformat.input.dir.recursive",
"true")
+ .config(
+ "spark.sql.extensions",
+
"org.apache.kyuubi.plugin.spark.authz.ranger.RangerSparkExtension")
+ .enableHiveSupport()
+ .getOrCreate();
+
createMetalake();
createCatalogAndRangerAuthPlugin();
- createSchema();
- createHiveTable();
+ }
+
+ private static void generateRangerSparkSecurityXML() throws IOException {
+ String templatePath =
+ String.join(
+ File.separator,
+ System.getenv("GRAVITINO_ROOT_DIR"),
+ "authorizations",
+ "authorization-ranger",
+ "src",
+ "test",
+ "resources",
+ "ranger-spark-security.xml.template");
+ String xmlPath =
+ String.join(
+ File.separator,
+ System.getenv("GRAVITINO_ROOT_DIR"),
+ "authorizations",
+ "authorization-ranger",
+ "build",
+ "resources",
+ "test",
+ "ranger-spark-security.xml");
+
+ FileInputStream inputStream = new FileInputStream(templatePath);
+ String templateContext = null;
+ try {
+ templateContext = IOUtils.toString(inputStream, StandardCharsets.UTF_8);
+
+ templateContext = templateContext.replace("__REPLACE__RANGER_ADMIN_URL",
RANGER_ADMIN_URL);
+ templateContext =
+ templateContext.replace(
+ "__REPLACE__RANGER_HIVE_REPO_NAME",
RangerITEnv.RANGER_HIVE_REPO_NAME);
+ } finally {
+ inputStream.close();
+ }
+
+ FileOutputStream outputStream = new FileOutputStream(xmlPath);
+ try {
+ IOUtils.write(templateContext, outputStream, StandardCharsets.UTF_8);
+ } finally {
+ outputStream.close();
Review Comment:
```suggestion
String templateContext = FileUtils.readFileToString(new
File(templatePath), StandardCharsets.UTF_8);
templateContext =
templateContext.replace("__REPLACE__RANGER_ADMIN_URL", RANGER_ADMIN_URL)
.replace("__REPLACE__RANGER_HIVE_REPO_NAME",
RangerITEnv.RANGER_HIVE_REPO_NAME);
FileUtils.writeStringToFile(new File(xmlPath), templateContext,
StandardCharsets.UTF_8);
```
##########
core/src/main/java/org/apache/gravitino/storage/relational/database/H2Database.java:
##########
@@ -54,6 +54,12 @@ public String startH2Database(Config config) {
String connectionUrl = constructH2URI(originalJDBCUrl, storagePath);
+ try {
+ Class.forName("org.h2.Driver");
Review Comment:
Why do we need to explicitly load the driver? I remember that the driver-jar
is already in the class path.
##########
integration-test-common/build.gradle.kts:
##########
@@ -54,25 +61,41 @@ dependencies {
exclude("org.elasticsearch")
exclude("org.elasticsearch.client")
exclude("org.elasticsearch.plugin")
+ exclude("com.amazonaws", "aws-java-sdk-bundle")
}
+ testImplementation(libs.apiguardian.api)
+ testImplementation(libs.bundles.metrics)
+ testImplementation(libs.junit.jupiter.params)
+ testImplementation(libs.junit.jupiter.api)
+ testImplementation(libs.junit.jupiter.engine)
+ testImplementation(libs.bundles.jersey)
Review Comment:
Please keep them in alphabetical order.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]