haospotai commented on issue #1284: [SUPPORT]
URL: https://github.com/apache/incubator-hudi/issues/1284#issuecomment-579010412
 
 
   Hello @n3nash
   
   ```
   class FunHudi:
       def __init__(self, app_name):
           load_dotenv('../.env')
           # the lib passed
           # hudi-spark-bundle-0.5.0-incubating.jar
           spark_bundle = ROOT_DIR + "/resources/" + 
os.environ['HUDI_SPARK_BUNDLE']
           self.host = os.environ['HDFS_HOST']
           self.hive_base_path = os.environ['HIVE_BASH_PATH']
           self.spark = SparkSession.builder \
               .master(os.environ['SPARK_MASTER']) \
               .appName(app_name) \
               .config("spark.jars", spark_bundle) \
               .config("spark.driver.extraClassPath", spark_bundle) \
               .getOrCreate()
   
       def insert_data_hudi_sync_hive(self, tablename, data_hdfs):
           df = self.spark.read.json(data_hdfs)
           df.write.format("org.apache.hudi") \
               .option("hoodie.datasource.write.precombine.field", "uuid") \
               .option("hoodie.table.name", tablename) \
               .option("hoodie.datasource.hive_sync.partition_fields", 
"partitionpath") \
               .option("hoodie.datasource.write.partitionpath.field", 
"partitionpath") \
               .option("hoodie.datasource.hive_sync.database", "default") \
               .option("hoodie.datasource.hive_sync.enable", "true")\
               .option("hoodie.datasource.hive_sync.table", tablename) \
               .option("hoodie.datasource.hive_sync.jdbcurl", 
os.environ['HIVE_JDBC_URL']) \
               .option("hoodie.datasource.hive_sync.username", 
os.environ['HIVE_USER']) \
               .option("hoodie.datasource.hive_sync.password", 
os.environ['HIVE_PASSWORD']) \
               .mode("Overwrite") \
               .save(self.host + self.hive_base_path)
   ```
   
   ```
   class TestSoptaiHudi(unittest.TestCase):
       def setUp(self) -> None:
           load_dotenv('.env')
           self.host = os.getenv('HDFS_HOST')
           self.hudiclient = FunHudi("testApp")
           self.path = '/data/peoplejson.json'
   
       def test_insert_data_hudi_sync_hive(self):
           self.hudiclient.insert_data_hudi_sync_hive("soptaitest", self.path)
   
   ```
   The exception will be thrown by set ```"hoodie.datasource.hive_sync.enable", 
"true"```
   Cuz the app syncs data to hive at the end

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to