http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/e7e4c3a7/griffin-doc/service/postman/griffin_environment.json
----------------------------------------------------------------------
diff --git a/griffin-doc/service/postman/griffin_environment.json 
b/griffin-doc/service/postman/griffin_environment.json
new file mode 100644
index 0000000..9a3da5b
--- /dev/null
+++ b/griffin-doc/service/postman/griffin_environment.json
@@ -0,0 +1,16 @@
+{
+  "id": "b0a42a84-0418-4bb6-226d-ca9d6d5f23d7",
+  "name": "Griffin Environment",
+  "values": [
+    {
+      "enabled": true,
+      "key": "BASE_PATH",
+      "value": "http://localhost:8080";,
+      "type": "text"
+    }
+  ],
+  "timestamp": 1508998036167,
+  "_postman_variable_scope": "environment",
+  "_postman_exported_at": "2017-10-30T01:58:11.275Z",
+  "_postman_exported_using": "Postman/5.3.2"
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/e7e4c3a7/griffin-doc/ui/dockerUIguide.md
----------------------------------------------------------------------
diff --git a/griffin-doc/ui/dockerUIguide.md b/griffin-doc/ui/dockerUIguide.md
new file mode 100644
index 0000000..2d434e3
--- /dev/null
+++ b/griffin-doc/ui/dockerUIguide.md
@@ -0,0 +1,50 @@
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+## Docker webUI Guide
+
+### Preparatory work
+
+Follow the steps 
[here](https://github.com/apache/incubator-griffin#how-to-run-in-docker), 
prepare your docker container of griffin, and get your webUI ready.
+
+### webUI test case guide
+
+1.  Click "Data Assets" at the top right corner, to watch all the exist data 
assets.  
+    In docker, we've prepared two data asset in Hive, through this page, you 
can see all the table metadata in Hive.
+
+2.  Click "Measures" button at the top left corner to watch all the measures 
here, and you can also create a new DQ measurement by following steps.  
+    1) Click "Create Measure" button at the top left corner, choose the top 
left block "Accuracy", at current we only support accuracy type.  
+    2) Choose Source: find "demo_src" in the left tree, select some or all 
attributes in the right block, click "Next".  
+    3) Choose Target: find "demo_tgt" in the left tree, select the matching 
attributes with source data asset in the right block, click "Next".  
+    4) Mapping Source and Target: select "Source Fields" of each row, to match 
the corresponding field in target table, e.g. id maps to id, age maps to age, 
desc maps to desc.   
+    Finish all the mapping, click "Next".  
+    5) Fill out the required table as required, "Organization" is the group of 
this measurement.  
+    Submit and save, you can see your new DQ measurement created in the 
measures list.  
+
+3.  Now you've created a new DQ measurement, the measurement needs to be 
scheduled to run in the docker container. Click "Jobs" button to watch all the 
jobs here, at current there is no job, you need to create a new one.
+    Click "Create Job" button at the top left corner, fill out all the blocks 
as below.
+    ```
+    "Source Partition": YYYYMMdd-HH
+    "Target Partition": YYYYMMdd-HH
+    "Measure Name": <choose the measure you just created>
+    "Start After(s)": 0
+    "Interval": 300
+    ```
+    The source and target partition means the partition pattern of the demo 
data, which is based on timestamp, "Start After(s)" means the job will start 
after n seconds, "Interval" is the interval of job, the unit is second. In the 
example above, the job will run every 5 minutes.
+
+    Wait for about 1 minute, after the calculation, results would be published 
to web UI, then you can watch the dashboard by clicking "DQ Metrics" at the top 
right corner.

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/e7e4c3a7/griffin-doc/ui/test-case.md
----------------------------------------------------------------------
diff --git a/griffin-doc/ui/test-case.md b/griffin-doc/ui/test-case.md
new file mode 100644
index 0000000..3b703eb
--- /dev/null
+++ b/griffin-doc/ui/test-case.md
@@ -0,0 +1,78 @@
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+#Functional Test cases
+
+
+|TestCase ID|Test Page|Test Case Description|Test Case Steps|Test 
Data|Expected Result|Actual Result|Test Result|Jira Bug ID|
+|---|---|---|---|---|---|---|---|---|
+|0101|login page|invalid corp id - check user cannot login the system with 
invalid corp id.|1. Input invalid corp id.<br>2. Input password.<br>3. click 
'log in' button.||1. login failed.||Passed||
+|0102|login page|invalid password - check user cannot login the system with 
invalid password.|1. input valid corp id.<br>2.input invalid 
password<br>3.click 'log in' button.||1. login failed.||Passed||
+|0103|login page|valid corp id and passoword - check user can login the system 
with valid corp id and password.|1. Input the corp id and password.<br>2 click 
'log in' button.||1. login succesfully||Passed||
+|0104|login page|remember password|1. Input the corp id and password.<br>2. 
select 'remember password'.<br>3.click 'log in' button.<br>4. close the 
brower.<br>5. open the brower again.<br>6. visit the griffin page.||1.the id 
and password are valid.<br>2.'remember password' is checked.<br>3.logged in the 
griffin homepage.<br>4.the brower is closed.<br>5.the brower is 
reopened.<br>6.the griffin homepage is opened, instead of the login 
page.||Passed||
+|0105|login page|not remember password|1. Input the corp id and 
password.<br>2. unselect 'remember password'.<br>3.click 'log in' button.<br>4. 
close the brower.<br>5. open the brower again.<br>6. visit the griffin 
page.||1.the id and password are valid.<br>2.'remember password' is 
unchecked.<br>3.logged in the griffin homepage.<br>4.the brower is 
closed.<br>5.the brower is reopened.<br>6.the login page is opened.||Passed||
+|0201|main page|menu bar - check all links in the menu work.|1. click 
'health'.<br>2. click 'models'.<br>3.click 'Data profiling'.<br>4. click your 
username -> 'API Docs'.||1.show 'health' page.<br>2.show 'models' 
page.<br>3.show 'data profiling' page<br>4.open new page for API page.||Passed||
+|0202|main page|menu bar - search|1.input a word in the search box.<br>2.do 
search.||1. show search result.|unimplemented|||
+|0203|main page|menu bar - user profile|1. click username -> 'user 
profile'||1. show user profile page|unimplemented|||
+|0204|main page|menu bar - setting|1. click username -> 'setting'||1. show 
setting page.|unimplemented|||
+|0205|main page|right side - DataAssets|1. click '*** DataAssets' link||1.show 
the data assets page.||Passed||
+|0206|main page|right side - DQ Metrics|1. click '*** DQ Metrics' link.||1. 
show DQ Metrics page||Passed||
+|0207|main page|right side - health percentage |1. check the pie for the 
health percentage.||1. show the health percentage.||Passed||
+|0208|main page|right side - issue tracking|1. click 'issue tracking'||1. show 
'issue tracking' page|unimplemented|||
+|0209|main page|right side - statistics for the DQ data.|1. check the DQ data 
with the name, last updated time, and the data quality.<br>2. show more for one 
item, check the dq trend chart. <br>3. click the chart.<br>4. close the 
zoomed-in chart.||1.show all the dq data.<br>2.show the latest dq trend chart 
for the item.<br>3.the dq chart is zoomed in.<br>4.the zoomed-in chart is 
closed.||Passed||
+|0210|main page|right side - report issue.|1. click 'Report issue'||1. open 
the jira page.||Passed||
+|0301|health page|heatmap|1. open 'heatmap' tab.<br>2. check the data quality 
metrics heatmap.<br>3. click inside the heatmap.||1.show the heatmap.<br>2.all 
the data are shown successfully.<br>3.show the metrics page.||Passed||
+|0302|health page|Topology|1. open 'Topology' tab.<br>2. check the data.||1. 
show topology.|unimplemented|||
+|0303|health page|check the UI layout when the page is zoomed in and 
out.|1.zoom in the page.<br>2.zoom out the page.||1. display the page 
correctly.||Passed||
+|0401|metrics page|check metrics data|1. check the dq charts for the 
metrics.<br>2. click one chart.||1. all the data in the dq charts are 
correct.<br>2. the chart is zoomed in.||Passed||
+|0402|metrics page|Download Sample|1. click 'download sample'.||1. the sample 
is downloaded to the local path.|unimplemented|||
+|0403|metrics page|Profiling|1. click 'profiling'||1. show 
'profiling'|unimplemented|||
+|0404|metrics page|check the UI layout when the page is zoomed in and 
out.|1.zoom in the page.<br>2.zoom out the page.||1. display the page 
correctly.||Passed||
+|0501|models page|check the models data|1. check all the columns are correct 
or not.<br>2. click one model name.||1. all the data are correct.<br>2. show 
more information of the model.||Passed||
+|0502|models page|edit model|1. click 'edit' icon.||1. open the edit 
page.|unimplemented|||
+|0503|models page|delete model|1. click 'delete' icon for one model.<br>2. 
confirm to delete the model.||1. open delete confirmation page.<br>2. the model 
is removed from the models table.||Passed||
+|0504|models page|subscribe|1. click 'subscribe' icon for one model.||1. open 
subscribe page|unimplemented|||
+|0505|models page|table paging|1. click other pages in the models 
table.||1.all the data in other pages are show correctly.||Passed||
+|0506|models page|create DQ model|1. click 'create DQ model' button||1. open 
'create DQ model' page.||Passed||
+|0507|models page|check the UI layout when the page is zoomed in and 
out.|1.zoom in the page.<br>2.zoom out the page.||1. display the page 
correctly.||Passed||
+|0601|create dq model - accuracy|create accuracy|1. click 'models' -> 'create 
DQ model' -> 'Accuracy'<br>2.choose the source. Select a schema, e.g. 'apollo 
-> Sojorner -> sog_search_event'.<br>3.select one or more attributes, e.g. uid, 
site_id.<br>4. click 'next'.<br>5. choose the target. Select a schema, e.g. 
'appollo -> Bullseye -> adchoice_user_pref'.<br>6. select one or more 
attributes, e.g. user_id, scope.<br>7. click 'next'.<br>8. select a primary 
key, e.g. Bullseye.achoice_user_pref.user_id.<br>9. select 'Map To' 
exactly.<br>10. select a source field for each target.<br>11. click 
'next'.<br>12. input the required information, e.g. model name 'atest', 
notification email 'a...@ebay.com'.<br>13.click 'submit'.<br>14. confirm to 
save.|source schema: 'apollo -> Sojorner -> sog_search_event'.<br>Source 
attributes: uid, site_id.<br>target schema: 'appollo -> Bullseye -> 
adchoice_user_pref'.<br>target attributes, e.g. user_id, scope.<br>primary key: 
Bullseye.achoice_user_pref.user_id.<
 br>model name: 'atest', <br>notification email: 'a...@ebay.com'.|1. open 
'create accuracy' page.<br>2. the source shcema is selected. The corresponding 
attributes are shown in the attributes table.<br>3. the source attributes are 
selected.<br>4. go to 'choose target' step.<br>5. the target schema is 
selected. The corresponding attributes are shown in the attributes table.<br>6. 
the target attributes are selected.<br>7. go to 'mapping source and target' 
step.<br>8. the PK is selected.<br>9. exactly map to the source.<br>10. the 
source field is selected for each target.<br>11. go to 'configuration' 
step.<br>12. the required info are input correctly.<br>13. open a confirmation 
page.<br>14. the new model 'atest' is created. It is shown in the models 
table||Passed||
+|0602|create dq model - accuracy|show error message if no source attribute is 
selected.|1. click 'models' -> 'create DQ model' -> 'Accuracy'.<br>2. click 
'next'||1. open 'create accuracy' page.<br>2. show error message to select at 
least one attribute.||Passed||
+|0603|create dq model - accuracy|show error message if no target attribute is 
selected.|1. click 'models' -> 'create DQ model' -> 'Accuracy'<br>2.choose the 
source. Select a schema, e.g. 'apollo -> Sojorner -> 
sog_search_event'.<br>3.select one or more attributes, e.g. uid, site_id.<br>4. 
click 'next'.<br>5. in the 'target' step, click 'next'.|source schema: 'apollo 
-> Sojorner -> sog_search_event'.<br>Source attributes: uid, site_id.|"1. open 
'create accuracy' page.<br>2. the source shcema is selected. The corresponding 
attributes are shown in the attributes table.<br>3. the source attributes are 
selected.<br>4. go to 'choose target' step.<br>5. show error message to select 
at least one attribute.||Passed||
+|0604|create dq model - accuracy|show error message if 'map fields' is not 
set.|1. click 'models' -> 'create DQ model' -> 'Accuracy'<br>2.choose the 
source. Select a schema, e.g. 'apollo -> Sojorner -> 
sog_search_event'.<br>3.select one or more attributes, e.g. uid, site_id.<br>4. 
click 'next'.<br>5. choose the target. Select a schema, e.g. 'appollo -> 
Bullseye -> adchoice_user_pref'.<br>6. select one or more attributes, e.g. 
user_id, scope.<br>7. click 'next'.<br>8. no selection. click 'next'.<br>9. 
select a primary key. click 'next'.|source schema: 'apollo -> Sojorner -> 
sog_search_event'.<br>Source attributes: uid, site_id.<br>target schema: 
'appollo -> Bullseye -> adchoice_user_pref'.<br>target attributes, e.g. 
user_id, scope.<br>primary key: Bullseye.achoice_user_pref.user_id.|1. open 
'create accuracy' page.<br>2. the source shcema is selected. The corresponding 
attributes are shown in the attributes table.<br>3. the source attributes are 
selected.<br>4. go to 'choose target' s
 tep.<br>5. the target schema is selected. The corresponding attributes are 
shown in the attributes table.<br>6. the target attributes are selected.<br>7. 
go to 'mapping source and target' step.<br>8. no PK is selected.<br>9. show 
error message.||Passed||
+|0605|create dq model - accuracy|show error if the configuration is invalid|1. 
click 'models' -> 'create DQ model' -> 'Accuracy'<br>2.choose the source. 
Select a schema, e.g. 'apollo -> Sojorner -> sog_search_event'.<br>3.select one 
or more attributes, e.g. uid, site_id.<br>4. click 'next'.<br>5. choose the 
target. Select a schema, e.g. 'appollo -> Bullseye -> 
adchoice_user_pref'.<br>6. select one or more attributes, e.g. user_id, 
scope.<br>7. click 'next'.<br>8. select a primary key, e.g. 
Bullseye.achoice_user_pref.user_id.<br>9. select 'Map To' exactly.<br>10. 
select a source field for each target.<br>11. click 'next'.<br>12. input 
invalid value for each field, e.g. model name 'a test', notification email 
'aa'.|source schema: 'apollo -> Sojorner -> sog_search_event'.<br>Source 
attributes: uid, site_id.<br>target schema: 'appollo -> Bullseye -> 
adchoice_user_pref'.<br>target attributes, e.g. user_id, scope.<br>primary key: 
Bullseye.achoice_user_pref.user_id.<br>model name: 'a test'
 , <br>notification email: 'aa'.|1. open 'create accuracy' page.<br>2. the 
source shcema is selected. The corresponding attributes are shown in the 
attributes table.<br>3. the source attributes are selected.<br>4. go to 'choose 
target' step.<br>5. the target schema is selected. The corresponding attributes 
are shown in the attributes table.<br>6. the target attributes are 
selected.<br>7. go to 'mapping source and target' step.<br>8. the PK is 
selected.<br>9. exactly map to the source.<br>10. the source field is selected 
for each target.<br>11. go to 'configuration' step.<br>12. show error for 
invalid value.||Passed||
+|0606|create dq model - accuracy|check the link to add new data asset.|1. 
click the link for adding new data asset.||1. go to the 'register data asset' 
page.||Passed||
+|0607|create dq model - accuracy|check the UI layout for all the steps when 
the page is zoomed in and out.|1.zoom in the page.<br>2.zoom out the 
page.||1.all the steps in the page can be shown correctly.||Passed||
+|0701|create dq model - validity|check dq model with validity type can be 
created.|1. click 'models' -> 'create DQ model' -> Validity<br>2.choose the 
target. Select a schema, e.g. 'apollo -> Sojorner -> 
sog_search_event'.<br>3.select one attribute, e.g. uid.<br>4. click 
'next'.<br>5. choose one validity model, e.g. unique count.<br>6. click 
'next'.<br>7. input valid value for the configuration fields. e.g. model name 
'avalidity', email 'a...@b.com'.<br>8. click 'submit'<br>9. click 
'save'.|schema: 'apollo -> Sojorner -> sog_search_event'.<br>Attribute: 
uid.<br>validity model: unique count.<br>model name: 'a validity', <br>email: 
'aa'.|1. open 'create validity' page.<br>2. the target schem is selected. The 
corresponding attributes are shown in the attributes table.<br>3. the attribute 
is selected.<br>4. go to 'select model' page.<br>5. the validity model is 
selected. The description of the model is shown as well.<br>6. go to 
'configuration' step.<br>7. all the value are valid.<br>8. op
 en a confirmation page.<br>9. the new model 'avalidity' is created 
successfully. it is shown in the models page.||Passed||
+|0702|create dq model - validity|show error if no target is selected.|1. click 
'models' -> 'create DQ model' -> Validity<br>2. not choose the target.<br>3. 
click 'next'.||1. open 'create validity' page.<br>2. no target schem is 
selected.<br>3. show error.||Passed||
+|0703|create dq model - validity|show error if any field is invalid.|1. click 
'models' -> 'create DQ model' -> Validity<br>2.choose the target. Select a 
schema, e.g. 'apollo -> Sojorner -> sog_search_event'.<br>3.select one 
attribute, e.g. uid.<br>4. click 'next'.<br>5. choose one validity model, e.g. 
unique count.<br>6. click 'next'.<br>7. input invalid value for the 
configuration fields.|schema: 'apollo -> Sojorner -> 
sog_search_event'.<br>validity model: unique count.<br>Attribute: uid.<br>model 
name: 'a validity', <br>email: 'aa'.|1. open 'create validity' page.<br>2. the 
target schem is selected. The corresponding attributes are shown in the 
attributes table.<br>3. the attribute is selected.<br>4. go to 'select model' 
page.<br>5. the validity model is selected. The description of the model is 
shown as well.<br>6. go to 'configuration' step.<br>7. show error for the 
invalid value.||Passed||
+|0704|create dq model - validity|check the UI layout for all the steps when 
the page is zoomed in and out.|1.zoom in the page.<br>2.zoom out the 
page.||1.all the steps in the page can be shown correctly.||Passed||
+|0801|create dq model - anomaly detection|check the dq model with anomaly 
detection can be created.|1. click 'models' -> 'create DQ model' -> 
Validity<br>2.choose the target. Select a schema, e.g. 'apollo -> Sojorner -> 
sog_search_event'.<br>3. click 'next'.<br>4. choose one statistical techniques, 
e.g. bollinger bands detection.<br>5. click 'next'.<br>6. input valid value for 
the configuration fields. e.g. model name 'anomaly', email 'a...@b.com'.<br>7. 
click 'submit'<br>8. click 'save'.|schema: 'apollo -> Sojorner -> 
sog_search_event'.<br>statistical techniques: bollinger bands 
detection.<br>model name 'anomaly', <br>email 'a...@b.com'.|1. open 'create 
validity' page.<br>2. the target schem is selected. The corresponding 
attributes are shown in the attributes table.<br>3. go to 'select model' 
page.<br>4. the validity model is selected. The description of the model is 
shown as well.<br>5. go to 'configuration' step.<br>6. all the value are 
valid.<br>7. open a confirmation page.<br>8. t
 wo new models, 'anomaly' with 'anomaly detection' type, and 'Count_anomaly_1' 
with 'validity' type are created successfully. They are shown in the models 
page.||Passed||
+|0802|create dq model - anomaly detection|show error if no target is 
selected.|1. click 'models' -> 'create DQ model' -> Validity<br>2. not choose 
the target.<br>3. click 'next'.||1. open 'create validity' page.<br>2. no 
target schem is selected.<br>3. show error.||Passed||
+|0803|create dq model - anomaly detection|show error if any field is 
invalid.|1. click 'models' -> 'create DQ model' -> Validity<br>2.choose the 
target. Select a schema, e.g. 'apollo -> Sojorner -> sog_search_event'.<br>3. 
click 'next'.<br>4. choose one statistical techniques, e.g. bollinger bands 
detection.<br>5. click 'next'.<br>6. input invalid value for the configuration 
fields.|schema: 'apollo.Sojorner. sog_search_event'<br>model name: 'a nomaly', 
<br>email: 'aa'.|1. open 'create validity' page.<br>. the target schem is 
selected. The corresponding attributes are shown in the attributes table.<br>3. 
go to 'select model' page.<br>4. the validity model is selected. The 
description of the model is shown as well.<br>5. go to 'configuration' 
step.<br>6. show error for the invalid value.||Passed||
+|0804|create dq model - anomaly detection|check the UI layout for all the 
steps when the page is zoomed in and out.|1.zoom in the page.<br>2.zoom out the 
page.||1.all the steps in the page can be shown correctly.||Passed||
+|0901|create dq model - publish DQ data directly|check the dq model with 
publish type can be created.|1. click 'models' -> 'create DQ model' -> publish 
DQ data directly.<br>2.input valid value for the configuration fields.<br>3. 
click 'submit'<br>4. click 'save'.|model name 'apu', <br>organization 'hadoop', 
<br>email 'a...@b.com'.|1. open 'create validity' page.<br>2. all the value are 
valid.<br>3. open a confirmation page.<br>4. the new model 'apu' is created 
successfully. It is shown in the models page.||Passed||
+|0902|create dq model - publish DQ data directly|show error if any field is 
invalid.|1. click 'models' -> 'create DQ model' -> publish DQ data 
directly.<br>2.input invalid value for the configuration fields. |model name 'a 
pu', email 'aa'.|1. open 'create validity' page.<br>2. show error for the 
invalid value.||Passed||
+|0903|create dq model - publish DQ data directly|check the UI layout for all 
the steps when the page is zoomed in and out.|1.zoom in the page.<br>2.zoom out 
the page.||1.all the steps in the page can be shown correctly.||Passed||
+|1001|data assets|check the data assets information|1. check all the columns 
are correct or not.<br>2. show more for an asset.||1. all the data are 
correct.<br>2. show the schemas of the asset.||Passed||
+|1002|data assets|edit asset|1. click 'edit' icon for an asset, e.g. 
'abc'.<br>2. edit the schema description and sample.<br>3. click 
'submit'.<br>4. confirm to save.<br>5. in the asset table, show more for the 
asset 'abc'.||1. open the edit page.<br>2. the schema description and sample 
are valid.<br>3. open a confirmation page.<br>4. the asset info are 
saved.<br>5. the schema info are updated.||Passed||
+|1003|data assets|delete asset|1. click 'delete' icon for an asset, e.g. 
'abc'.<br>2. confirm to delete the asset.||1. open delete confirmation 
page.<br>2. the asset is removed from the  table.||Passed||
+|1004|data assets|table paging|1. click other pages in the table.||1.all the 
data in other pages are show correctly.||Passed||
+|1005|data assets|check the UI layout when the page is zoomed in and 
out.|1.zoom in the page.<br>2.zoom out the page.||1. display the page 
correctly.||Passed||
+|1101|register data asset|check data asset can be registered.|1. click 
'register data asset' in the 'data assets' page.<br>2. input valid value.<br>3. 
click 'submit'.<br>4. confirm to save.|asset name: 'atest', <br>type: 
'hdfsfile',<br>HDFS path: '/var', <br>data folder pattern: 
'16-06-01',<br>platform: 'Apollo',<br>organization: 'GPS',<br>schema: name 
'dmg', type 'string'|1. open 'register data asset' page.<br>2. all the value 
are valid.<br>3. open a confirmation page.<br>4. the new asset is registered 
successfully. It is shown in the assets table.||Passed||
+|1102|register data asset|show error if any field is invalid.|1. click 
'register data asset' in the 'data assets' page.<br>2. input some invalid 
value.<br>3. click 'submit'.|asset name: 'a test', <br>type: 
'hdfsfile',<br>HDFS path: '/var', <br>data folder pattern: 
'16-06-01',<br>platform: 'Apollo',<br>organization: null,<br>schema: name 
'dmg', type 'string',|1. open 'register data asset' page.<br>2. some value are 
invalid.<br>3. show error for the invalid value.||Passed||
+|1103|register data asset|check the UI layout when the page is zoomed in and 
out.|1.zoom in the page.<br>2.zoom out the page.||1. display the page 
correctly.||Passed||

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/e7e4c3a7/griffin-doc/ui/user-guide.md
----------------------------------------------------------------------
diff --git a/griffin-doc/ui/user-guide.md b/griffin-doc/ui/user-guide.md
new file mode 100644
index 0000000..651af3e
--- /dev/null
+++ b/griffin-doc/ui/user-guide.md
@@ -0,0 +1,193 @@
+<!--
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+-->
+# Apache Griffin User Guide
+
+## 1 Introduction & Access
+
+- Apache Griffin is an open source Data Quality solution for distributed data 
systems at any scale in both streaming or batch data context.
+- Users will primarily access this application from a PC.
+
+## 2 Procedures
+
+![bpmn](../img/userguide/Capture.PNG)
+
+After you log into the system, you may follow the steps:
+
+1. First, create a new measure.
+2. Then, create a job to process the measure periodically.
+3. Finally, the heatmap and dashboard will show the data diagram of the 
measures.
+
+### 2.1 Data Asset
+
+You can check the data assets by clicking "DataAssets" on the top right corner.
+
+<img src="img/userguide/data asset new.png" >
+
+Then you can see all the data assets listed here.
+
+<img src="img/userguide/asset list.png">
+
+### 2.2 Create measure
+
+By clicking "Measures", and then choose "Create Measure". You can use the 
measure to process data and get the result you want.
+
+<img src="img/userguide/create measure.png">
+
+There are mainly four kinds of measures for you to choose, which are:
+
+1. if you want to measure the match rate between source and target, choose 
accuracy.
+2. if you want to check the specific value of the data(such as: null column 
count), choose validity.
+3. if you want to detect wrong data information, choose anomaly detection.
+4. if you have already processed the data by yourself, choose publish DQ 
metrics directly, POST it any time you want, and see the trend from data 
diagrams.
+
+At current we only support accuracy measure creation from UI.
+
+#### **2.2.1 Accuracy** [[1]](#accuracy)
+
+<img src="img/userguide/accuracy.png" >
+
+**Definition:**
+
+Measured by how the values agree with an identified source of truth.
+
+**Steps:**
+
+1) Choose source
+
+Select the source dataset and fields which will be used for comparision.
+
+<img src="img/userguide/source new.png">
+
+For example, we choose 2 columns here.
+
+2) Choose target:
+
+Select the target dataset and fields which will be used for comparision.
+
+<img src="img/userguide/target new.png">
+
+3) Mapping source and target
+
+- Step1: "Map To": Select which rule to match the source and the target. Here 
are 6 options to choose:
+  1. == : data of the two columns should be exactly matched.
+  2. !== : data of the two columns should be different.
+  3. \> : the target column data should be bigger than the source one.
+  3. \>= : the target column data should be bigger than or equal to the source 
one.
+  3. < : the target column data should be smaller than the source one.
+  3. <= : the target column data should be smaller than or equal to the source 
one.
+- Step2: "Source fields": choose the source column that you want to compare 
with the target column.
+
+<img src="img/userguide/mapping.png">
+
+4) Configuration
+
+Set up the measure required information.  
+The organization means the group of your measure, you can manage your 
measurement dashboard by group later.
+
+<img src="img/userguide/confirm new.png">
+
+5) Measure information
+
+After you create a new accuracy measure, you can check the measure you've 
created by selecting it in the listed measurements' page.
+
+<img src="img/userguide/measure info.png" >
+
+**Example:**
+
+Suppose the source table A has 1000 records and the target table B only has 
999 records which can perfectly match with A in selected fields, then the 
accuracy rate=999/1000*100%=99.9%.
+
+### 2.3 Create Job
+
+By clicking "Jobs", and then choose "Create Job". You can submit a job to 
execute your measure periodically.
+
+<img src="img/userguide/create job.png">
+
+At current we only support simple periodically scheduling job for measures.
+
+<img src="img/userguide/job config.png">
+
+Fill out the block of job configuration.
+- Source Partition: the pattern of the source data partition based on 
timestamp. "YYYYMMdd-HH" means the data source partitions are "dt" and "hour", 
for example, the partitions of data is like "dt = 20170605, hour = 15".
+- Target Partition: the pattern of the target data partition based on 
timestamp.
+- Measure Name: name of the measure you want to schedule. you need to choose 
it from the list of measures you've created before.
+- Start After(s): after how much time in seconds the job starts. It should not 
be negative. For example: 0 means start now.
+- Interval: the interval time in seconds of the job scheduling. It should be a 
integer. For example: 300 means schedule this job every 5 minutes.
+
+After submit the job, griffin will schedule the job in background, and after 
calculation, you can monitor the dashboard to view the result on UI.
+
+## 3 Metrics dashboard
+
+After the processing work has done, here are 3 ways to show the data diagram.  
 
+
+1. Click on "Health", it shows the heatmap of metrics data. The blocks in 
green represent health and the blocks in red represent unhealthy.    
+
+   <img src="img/userguide/heatmap.png" >
+
+2. Click on "DQ Metrics".
+
+   <img src="img/userguide/dq metrics.png">  
+
+   You can see the diagrams of metrics.
+
+   <img src="img/userguide/metrics dashboard.png" >
+
+   By clicking on the diagram, you can get the zoom-in picture of it, and know 
the metrics at the selected time window.  
+
+   <img src="img/userguide/dashboard big.png" >
+
+3. The metrics is shown on the right side of the page. By clicking on the 
measure, you can get the diagram and details about the measure result.   
+
+   <img src="img/userguide/right bar.png" >
+
+## References
+###Six core data quality dimensions
+
+*Content adapted from [THE SIX PRIMARY DIMENSIONS FOR DATA QUALITY 
ASSESSMENT](http://www.damauk.org/RWFilePub.php?&cat=403&dx=1&ob=3&rpn=catviewleafpublic403&id=106193),
 DAMA, UK*
+
+<a id="accuracy" name="accuracy"></a>
+
+| Title     | Accuracy |
+| ----------|----------|
+| Definition  | The degree to which data correctly describes the "real world" 
object or event being described.    |
+| Reference | Ideally the "real world" truth is established through primary 
research. However, as this is often not practical, it is common to use 3rd 
party reference data from sources which are deemed trustworthy and of the same 
chronology. |
+| Measure  | The degree to which the data mirrors the characteristics of the 
real world object or objects it represents. |
+|   Scope  | Any "real world" object or objects that may be characterized or 
described by data, held as data item, record, data set or database. |
+| Unit of Measure | The percentage of data entries that pass the data accuracy 
rules. |
+| Type of Measure: 
<br/><ul><li>Assessment</li><li>Continuous</li><li>Discrete</li></ul>| 
Assessment, e.g. primary research or reference against trusted data. Continuous 
Measurement, e.g. age of students derived from the relationship between the 
students’ dates of birth and the current date. Discrete Measurement, e.g. 
date of birth recorded. |
+| Related Dimension | Validity is a related dimension because, in order to be 
accurate, values must be valid, the right value and in the correct 
representation.|
+| Optionality | Mandatory because - when inaccurate - data may not be fit for 
use. |
+| Applicability | |
+| Example(s) | A European school is receiving applications for its annual 
September intake and requires students to be aged 5 before the 31st August of 
the intake year. <br/> <br/>In this scenario, the parent, a US Citizen, 
applying to a European school completes the Date of Birth (D.O.B) on the 
application form in the US date format, MM/DD/YYYY rather than the European 
DD/MM/YYYY format, causing the representation of days and months to be 
reversed. <br/> <br/> As a result, 09/08/YYYY really meant 08/09/YYYY causing 
the student to be accepted as the age of 5 on the 31st August in YYYY. <br/> 
<br/> The representation of the student’s D.O.B.–whilst valid in its US 
context–means that in Europe the age was not derived correctly and the value 
recorded was consequently not accurate |
+| Pseudo code | ((Count of accurate objects)/ (Count of accurate objects + 
Counts of inaccurate objects)) x 100 <br/> Example: (Count of children who 
applied aged 5 before August/YYYY)/ (Count of children who applied aged 5 
before August 31st YYYY+ Count of children who applied aged 5 after August 
/YYYY and before December 31st/YYYY) x 100|
+
+<a id="validity" name="validity"></a>
+
+| Title   | Validity |
+| ------- | -------- |
+| Definition| Data are valid if it conforms to the syntax (format, type, 
range) of its definition.|
+| Reference| Database, metadata or documentation rules as to the allowable 
types (string, integer, floating point etc.), the format (length, number of 
digits etc.) and range (minimum, maximum or contained within a set of allowable 
values).|
+| Measure | Comparison between the data and the metadata or documentation for 
the data item.|
+| Scope | All data can typically be measured for Validity. Validity applies at 
the data item level and record level (for combinations of valid values).|
+| Unit of Measure |Percentage of data items deemed Valid to Invalid. |
+| Type of Measure: 
<br/><ul><li>Assessment</li><li>Continuous</li><li>Discrete</li></ul>|Assessment,
 Continuous and Discrete|
+|Related dimension|Accuracy, Completeness, Consistency and Uniqueness|
+|Optionality|Mandatory|
+|Applicability| |
+|Example(s)|Each class in a UK secondary school is allocated a class 
identifier; this consists of the 3 initials of the teacher plus a two digit 
year group number of the class. It is declared as AAA99 (3 Alpha characters and 
two numeric characters).<br/><br/>Scenario 1:<br/>A new year 9 teacher, Sally 
Hearn (without a middle name) is appointed therefore there are only two 
initials. A decision must be made as to how to represent two initials or the 
rule will fail and the database will reject the class identifier of “SH09”. 
It is decided that an additional character “Z” will be added to pad the 
letters to 3: “SZH09”, however this could break the accuracy rule. A better 
solution would be to amend the database to accept 2 or 3 initials and 1 or 2 
numbers.<br/><br/> Scenario 2:<br/>The age at entry to a UK primary & junior 
school is captured on the form for school applications. This is entered into a 
database and checked that it is between 4 and 11. If it were captured on the
  form as 14 or N/A it would be rejected as invalid.|
+|Pseudo code|Scenario 1:<br/>Evaluate that the Class Identifier is 2 or 3 
letters a-z followed by 1 or 2 numbers 7 – 11.<br/><br/>Scenario 
2:<br/>Evaluate that the age is numeric and that it is greater than or equal to 
4 and less than or equal to 11. |

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/e7e4c3a7/griffin-doc/userguide.md
----------------------------------------------------------------------
diff --git a/griffin-doc/userguide.md b/griffin-doc/userguide.md
deleted file mode 100644
index a27f00f..0000000
--- a/griffin-doc/userguide.md
+++ /dev/null
@@ -1,193 +0,0 @@
-<!--
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
--->
-# Apache Griffin User Guide
-
-## 1 Introduction & Access
-
-- Apache Griffin is an open source Data Quality solution for distributed data 
systems at any scale in both streaming or batch data context.
-- Users will primarily access this application from a PC.
-
-## 2 Procedures
-
-![bpmn](img/userguide/Capture.PNG)
-
-After you log into the system, you may follow the steps:
-
-1. First, create a new measure.
-2. Then, create a job to process the measure periodically.
-3. Finally, the heatmap and dashboard will show the data diagram of the 
measures.
-
-### 2.1 Data Asset
-
-You can check the data assets by clicking "DataAssets" on the top right corner.
-
-<img src="img/userguide/data asset new.png" >
-
-Then you can see all the data assets listed here.
-
-<img src="img/userguide/asset list.png">
-
-### 2.2 Create measure
-
-By clicking "Measures", and then choose "Create Measure". You can use the 
measure to process data and get the result you want.
-
-<img src="img/userguide/create measure.png">
-
-There are mainly four kinds of measures for you to choose, which are:
-
-1. if you want to measure the match rate between source and target, choose 
accuracy.
-2. if you want to check the specific value of the data(such as: null column 
count), choose validity.
-3. if you want to detect wrong data information, choose anomaly detection.
-4. if you have already processed the data by yourself, choose publish DQ 
metrics directly, POST it any time you want, and see the trend from data 
diagrams.
-
-At current we only support accuracy measure creation from UI.
-
-#### **2.2.1 Accuracy** [[1]](#accuracy)
-
-<img src="img/userguide/accuracy.png" >
-
-**Definition:**
-
-Measured by how the values agree with an identified source of truth.
-
-**Steps:**
-
-1) Choose source
-
-Select the source dataset and fields which will be used for comparision.
-
-<img src="img/userguide/source new.png">
-
-For example, we choose 2 columns here.
-
-2) Choose target:
-
-Select the target dataset and fields which will be used for comparision.
-
-<img src="img/userguide/target new.png">
-
-3) Mapping source and target
-
-- Step1: "Map To": Select which rule to match the source and the target. Here 
are 6 options to choose:
-  1. == : data of the two columns should be exactly matched.
-  2. !== : data of the two columns should be different.
-  3. \> : the target column data should be bigger than the source one.
-  3. \>= : the target column data should be bigger than or equal to the source 
one.
-  3. < : the target column data should be smaller than the source one.
-  3. <= : the target column data should be smaller than or equal to the source 
one.
-- Step2: "Source fields": choose the source column that you want to compare 
with the target column.
-
-<img src="img/userguide/mapping.png">
-
-4) Configuration
-
-Set up the measure required information.  
-The organization means the group of your measure, you can manage your 
measurement dashboard by group later.
-
-<img src="img/userguide/confirm new.png">
-
-5) Measure information
-
-After you create a new accuracy measure, you can check the measure you've 
created by selecting it in the listed measurements' page.
-
-<img src="img/userguide/measure info.png" >
-
-**Example:**
-
-Suppose the source table A has 1000 records and the target table B only has 
999 records which can perfectly match with A in selected fields, then the 
accuracy rate=999/1000*100%=99.9%.
-
-### 2.3 Create Job
-
-By clicking "Jobs", and then choose "Create Job". You can submit a job to 
execute your measure periodically.
-
-<img src="img/userguide/create job.png">
-
-At current we only support simple periodically scheduling job for measures.
-
-<img src="img/userguide/job config.png">
-
-Fill out the block of job configuration.
-- Source Partition: the pattern of the source data partition based on 
timestamp. "YYYYMMdd-HH" means the data source partitions are "dt" and "hour", 
for example, the partitions of data is like "dt = 20170605, hour = 15".
-- Target Partition: the pattern of the target data partition based on 
timestamp.
-- Measure Name: name of the measure you want to schedule. you need to choose 
it from the list of measures you've created before.
-- Start After(s): after how much time in seconds the job starts. It should not 
be negative. For example: 0 means start now.
-- Interval: the interval time in seconds of the job scheduling. It should be a 
integer. For example: 300 means schedule this job every 5 minutes.
-
-After submit the job, griffin will schedule the job in background, and after 
calculation, you can monitor the dashboard to view the result on UI.
-
-## 3 Metrics dashboard
-
-After the processing work has done, here are 3 ways to show the data diagram.  
 
-
-1. Click on "Health", it shows the heatmap of metrics data. The blocks in 
green represent health and the blocks in red represent unhealthy.    
-
-   <img src="img/userguide/heatmap.png" >
-
-2. Click on "DQ Metrics".
-
-   <img src="img/userguide/dq metrics.png">  
-
-   You can see the diagrams of metrics.
-
-   <img src="img/userguide/metrics dashboard.png" >
-
-   By clicking on the diagram, you can get the zoom-in picture of it, and know 
the metrics at the selected time window.  
-
-   <img src="img/userguide/dashboard big.png" >
-
-3. The metrics is shown on the right side of the page. By clicking on the 
measure, you can get the diagram and details about the measure result.   
-
-   <img src="img/userguide/right bar.png" >
-
-## References
-###Six core data quality dimensions
-
-*Content adapted from [THE SIX PRIMARY DIMENSIONS FOR DATA QUALITY 
ASSESSMENT](http://www.damauk.org/RWFilePub.php?&cat=403&dx=1&ob=3&rpn=catviewleafpublic403&id=106193),
 DAMA, UK*
-
-<a id="accuracy" name="accuracy"></a>
-
-| Title     | Accuracy |
-| ----------|----------|
-| Definition  | The degree to which data correctly describes the "real world" 
object or event being described.    |
-| Reference | Ideally the "real world" truth is established through primary 
research. However, as this is often not practical, it is common to use 3rd 
party reference data from sources which are deemed trustworthy and of the same 
chronology. |
-| Measure  | The degree to which the data mirrors the characteristics of the 
real world object or objects it represents. |
-|   Scope  | Any "real world" object or objects that may be characterized or 
described by data, held as data item, record, data set or database. |
-| Unit of Measure | The percentage of data entries that pass the data accuracy 
rules. |
-| Type of Measure: 
<br/><ul><li>Assessment</li><li>Continuous</li><li>Discrete</li></ul>| 
Assessment, e.g. primary research or reference against trusted data. Continuous 
Measurement, e.g. age of students derived from the relationship between the 
students’ dates of birth and the current date. Discrete Measurement, e.g. 
date of birth recorded. |
-| Related Dimension | Validity is a related dimension because, in order to be 
accurate, values must be valid, the right value and in the correct 
representation.|
-| Optionality | Mandatory because - when inaccurate - data may not be fit for 
use. |
-| Applicability | |
-| Example(s) | A European school is receiving applications for its annual 
September intake and requires students to be aged 5 before the 31st August of 
the intake year. <br/> <br/>In this scenario, the parent, a US Citizen, 
applying to a European school completes the Date of Birth (D.O.B) on the 
application form in the US date format, MM/DD/YYYY rather than the European 
DD/MM/YYYY format, causing the representation of days and months to be 
reversed. <br/> <br/> As a result, 09/08/YYYY really meant 08/09/YYYY causing 
the student to be accepted as the age of 5 on the 31st August in YYYY. <br/> 
<br/> The representation of the student’s D.O.B.–whilst valid in its US 
context–means that in Europe the age was not derived correctly and the value 
recorded was consequently not accurate |
-| Pseudo code | ((Count of accurate objects)/ (Count of accurate objects + 
Counts of inaccurate objects)) x 100 <br/> Example: (Count of children who 
applied aged 5 before August/YYYY)/ (Count of children who applied aged 5 
before August 31st YYYY+ Count of children who applied aged 5 after August 
/YYYY and before December 31st/YYYY) x 100|
-
-<a id="validity" name="validity"></a>
-
-| Title   | Validity |
-| ------- | -------- |
-| Definition| Data are valid if it conforms to the syntax (format, type, 
range) of its definition.|
-| Reference| Database, metadata or documentation rules as to the allowable 
types (string, integer, floating point etc.), the format (length, number of 
digits etc.) and range (minimum, maximum or contained within a set of allowable 
values).|
-| Measure | Comparison between the data and the metadata or documentation for 
the data item.|
-| Scope | All data can typically be measured for Validity. Validity applies at 
the data item level and record level (for combinations of valid values).|
-| Unit of Measure |Percentage of data items deemed Valid to Invalid. |
-| Type of Measure: 
<br/><ul><li>Assessment</li><li>Continuous</li><li>Discrete</li></ul>|Assessment,
 Continuous and Discrete|
-|Related dimension|Accuracy, Completeness, Consistency and Uniqueness|
-|Optionality|Mandatory|
-|Applicability| |
-|Example(s)|Each class in a UK secondary school is allocated a class 
identifier; this consists of the 3 initials of the teacher plus a two digit 
year group number of the class. It is declared as AAA99 (3 Alpha characters and 
two numeric characters).<br/><br/>Scenario 1:<br/>A new year 9 teacher, Sally 
Hearn (without a middle name) is appointed therefore there are only two 
initials. A decision must be made as to how to represent two initials or the 
rule will fail and the database will reject the class identifier of “SH09”. 
It is decided that an additional character “Z” will be added to pad the 
letters to 3: “SZH09”, however this could break the accuracy rule. A better 
solution would be to amend the database to accept 2 or 3 initials and 1 or 2 
numbers.<br/><br/> Scenario 2:<br/>The age at entry to a UK primary & junior 
school is captured on the form for school applications. This is entered into a 
database and checked that it is between 4 and 11. If it were captured on the
  form as 14 or N/A it would be rejected as invalid.|
-|Pseudo code|Scenario 1:<br/>Evaluate that the Class Identifier is 2 or 3 
letters a-z followed by 1 or 2 numbers 7 – 11.<br/><br/>Scenario 
2:<br/>Evaluate that the age is numeric and that it is greater than or equal to 
4 and less than or equal to 11. |

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/e7e4c3a7/service/pom.xml
----------------------------------------------------------------------
diff --git a/service/pom.xml b/service/pom.xml
index ebd8c23..502b312 100644
--- a/service/pom.xml
+++ b/service/pom.xml
@@ -17,7 +17,8 @@ KIND, either express or implied.  See the License for the
 specific language governing permissions and limitations
 under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/maven-v4_0_0.xsd";>
+<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/maven-v4_0_0.xsd";>
     <modelVersion>4.0.0</modelVersion>
 
     <parent>
@@ -34,7 +35,7 @@ under the License.
     <properties>
         <java.version>1.8</java.version>
         <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-        <hadoop.version>2.6.0</hadoop.version>
+        <hadoop.version>2.7.1</hadoop.version>
         <hive.version>1.2.1</hive.version>
         <scala.version>2.10</scala.version>
         <spring.boot.version>1.5.1.RELEASE</spring.boot.version>
@@ -44,6 +45,7 @@ under the License.
         <powermock.version>1.6.6</powermock.version>
         <mockito.version>1.10.19</mockito.version>
         
<spring-boot-maven-plugin.version>1.5.1.RELEASE</spring-boot-maven-plugin.version>
+        <derby.version>10.14.1.0</derby.version>
     </properties>
 
     <repositories>
@@ -88,6 +90,12 @@ under the License.
         </dependency>
 
         <dependency>
+            <groupId>org.apache.derby</groupId>
+            <artifactId>derbyclient</artifactId>
+            <version>${derby.version}</version>
+        </dependency>
+
+        <dependency>
             <groupId>com.fasterxml.jackson.core</groupId>
             <artifactId>jackson-databind</artifactId>
             <version>2.6.3</version>
@@ -97,6 +105,7 @@ under the License.
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-client</artifactId>
             <version>${hadoop.version}</version>
+            <!--<scope>provided</scope>-->
             <exclusions>
                 <exclusion>
                     <artifactId>servlet-api</artifactId>
@@ -170,6 +179,11 @@ under the License.
             <groupId>com.h2database</groupId>
             <artifactId>h2</artifactId>
         </dependency>
+        <dependency>
+            <groupId>org.elasticsearch.client</groupId>
+            <artifactId>elasticsearch-rest-client</artifactId>
+            <version>6.0.1</version>
+        </dependency>
     </dependencies>
     <profiles>
     </profiles>
@@ -186,6 +200,11 @@ under the License.
                         </goals>
                     </execution>
                 </executions>
+                <configuration>
+                    <fork>true</fork>
+                    <layout>ZIP</layout>
+                    
<mainClass>org.apache.griffin.core.GriffinWebApplication</mainClass>
+                </configuration>
             </plugin>
             <plugin>
                 <groupId>org.apache.maven.plugins</groupId>

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/e7e4c3a7/service/src/main/java/org/apache/griffin/core/config/CacheConfig.java
----------------------------------------------------------------------
diff --git 
a/service/src/main/java/org/apache/griffin/core/config/CacheConfig.java 
b/service/src/main/java/org/apache/griffin/core/config/CacheConfig.java
index 35d68bb..a5b0137 100644
--- a/service/src/main/java/org/apache/griffin/core/config/CacheConfig.java
+++ b/service/src/main/java/org/apache/griffin/core/config/CacheConfig.java
@@ -18,10 +18,24 @@ under the License.
 */
 package org.apache.griffin.core.config;
 
+import org.apache.commons.lang.StringUtils;
 import org.springframework.cache.annotation.EnableCaching;
+import org.springframework.cache.interceptor.KeyGenerator;
+import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
 
 @Configuration
 @EnableCaching
 public class CacheConfig {
+    @Bean
+    public KeyGenerator cacheKeyGenerator() {
+        return (o, method, objects) -> {
+            StringBuilder sb = new StringBuilder(method.getName());
+            String params = StringUtils.join(objects);
+            if (!StringUtils.isEmpty(params)) {
+                sb.append(params);
+            }
+            return sb.toString();
+        };
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/e7e4c3a7/service/src/main/java/org/apache/griffin/core/config/PropertiesConfig.java
----------------------------------------------------------------------
diff --git 
a/service/src/main/java/org/apache/griffin/core/config/PropertiesConfig.java 
b/service/src/main/java/org/apache/griffin/core/config/PropertiesConfig.java
new file mode 100644
index 0000000..95b8676
--- /dev/null
+++ b/service/src/main/java/org/apache/griffin/core/config/PropertiesConfig.java
@@ -0,0 +1,96 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.config;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.griffin.core.util.PropertiesUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.core.io.ClassPathResource;
+import org.springframework.core.io.InputStreamResource;
+import org.springframework.core.io.Resource;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.util.Properties;
+
+@Configuration
+public class PropertiesConfig {
+
+    private static final Logger LOGGER = 
LoggerFactory.getLogger(PropertiesConfig.class);
+
+    private String location;
+
+    public PropertiesConfig(@Value("${external.config.location}") String 
location) {
+        LOGGER.info("external.config.location : {}", location);
+        this.location = location;
+    }
+
+    private String getPath(String defaultPath, String name) {
+        String path = defaultPath;
+        File file = new File(location);
+        LOGGER.info("File absolute path:" + file.getAbsolutePath());
+        File[] files = file.listFiles();
+        if (files == null || files.length == 0) {
+            LOGGER.error("The defaultPath {} does not exist.Please check your 
config in application.properties.", location);
+            throw new NullPointerException();
+        }
+        for (File f : files) {
+            if (f.getName().equals(name)) {
+                path = location + File.separator + name;
+                LOGGER.info("config real path: {}", path);
+            }
+        }
+        return path;
+    }
+
+
+    @Bean(name = "appConf")
+    public Properties appConf() {
+        String path = "/application.properties";
+        return PropertiesUtil.getProperties(path, new ClassPathResource(path));
+    }
+
+    @Bean(name = "livyConf")
+    public Properties livyConf() throws FileNotFoundException {
+        String path = "/sparkJob.properties";
+        if (StringUtils.isEmpty(location)) {
+            return PropertiesUtil.getProperties(path, new 
ClassPathResource(path));
+        }
+        path = getPath(path, "sparkJob.properties");
+        Resource resource = new InputStreamResource(new FileInputStream(path));
+        return PropertiesUtil.getProperties(path, resource);
+    }
+
+    @Bean(name = "quartzConf")
+    public Properties quartzConf() throws FileNotFoundException {
+        String path = "/quartz.properties";
+        if (StringUtils.isEmpty(location)) {
+            return PropertiesUtil.getProperties(path, new 
ClassPathResource(path));
+        }
+        path = getPath(path, "quartz.properties");
+        Resource resource = new InputStreamResource(new FileInputStream(path));
+        return PropertiesUtil.getProperties(path, resource);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/e7e4c3a7/service/src/main/java/org/apache/griffin/core/config/SchedulerConfig.java
----------------------------------------------------------------------
diff --git 
a/service/src/main/java/org/apache/griffin/core/config/SchedulerConfig.java 
b/service/src/main/java/org/apache/griffin/core/config/SchedulerConfig.java
new file mode 100644
index 0000000..896ab00
--- /dev/null
+++ b/service/src/main/java/org/apache/griffin/core/config/SchedulerConfig.java
@@ -0,0 +1,60 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.config;
+
+import org.apache.griffin.core.job.factory.AutowiringSpringBeanJobFactory;
+import org.apache.griffin.core.util.PropertiesUtil;
+import org.quartz.spi.JobFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.context.ApplicationContext;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.scheduling.quartz.SchedulerFactoryBean;
+
+import javax.sql.DataSource;
+import java.util.Properties;
+
+@Configuration
+public class SchedulerConfig {
+
+    @Autowired
+    @Qualifier("quartzConf")
+    private Properties quartzConf;
+
+    @Bean
+    public JobFactory jobFactory(ApplicationContext applicationContext) {
+        AutowiringSpringBeanJobFactory jobFactory = new 
AutowiringSpringBeanJobFactory();
+        jobFactory.setApplicationContext(applicationContext);
+        return jobFactory;
+    }
+
+    @Bean
+    public SchedulerFactoryBean schedulerFactoryBean(DataSource dataSource, 
JobFactory jobFactory) {
+        SchedulerFactoryBean factory = new SchedulerFactoryBean();
+        factory.setOverwriteExistingJobs(true);
+        factory.setDataSource(dataSource);
+        factory.setJobFactory(jobFactory);
+        factory.setQuartzProperties(quartzConf);
+        return factory;
+    }
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/e7e4c3a7/service/src/main/java/org/apache/griffin/core/config/jobConfig/AutowiringSpringBeanJobFactory.java
----------------------------------------------------------------------
diff --git 
a/service/src/main/java/org/apache/griffin/core/config/jobConfig/AutowiringSpringBeanJobFactory.java
 
b/service/src/main/java/org/apache/griffin/core/config/jobConfig/AutowiringSpringBeanJobFactory.java
deleted file mode 100644
index be2c02d..0000000
--- 
a/service/src/main/java/org/apache/griffin/core/config/jobConfig/AutowiringSpringBeanJobFactory.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-
-package org.apache.griffin.core.config.jobConfig;
-
-import org.quartz.spi.TriggerFiredBundle;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.beans.factory.config.AutowireCapableBeanFactory;
-import org.springframework.context.ApplicationContext;
-import org.springframework.context.ApplicationContextAware;
-import org.springframework.scheduling.quartz.SpringBeanJobFactory;
-
-public final class AutowiringSpringBeanJobFactory extends SpringBeanJobFactory
-        implements ApplicationContextAware {
-    private static final Logger LOGGER = 
LoggerFactory.getLogger(AutowiringSpringBeanJobFactory.class);
-
-    private transient AutowireCapableBeanFactory beanFactory;
-
-    @Override
-    public void setApplicationContext(final ApplicationContext context) {
-        beanFactory = context.getAutowireCapableBeanFactory();
-    }
-
-    @Override
-    protected Object createJobInstance(final TriggerFiredBundle bundle) {
-
-        try {
-            final Object job = super.createJobInstance(bundle);
-            beanFactory.autowireBean(job);
-            return job;
-
-        } catch (Exception e) {
-            LOGGER.error("fail to create job instance. {}", e.getMessage());
-        }
-        return null;
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/e7e4c3a7/service/src/main/java/org/apache/griffin/core/config/jobConfig/SchedulerConfig.java
----------------------------------------------------------------------
diff --git 
a/service/src/main/java/org/apache/griffin/core/config/jobConfig/SchedulerConfig.java
 
b/service/src/main/java/org/apache/griffin/core/config/jobConfig/SchedulerConfig.java
deleted file mode 100644
index ef71fe1..0000000
--- 
a/service/src/main/java/org/apache/griffin/core/config/jobConfig/SchedulerConfig.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-
-package org.apache.griffin.core.config.jobConfig;
-
-import org.apache.griffin.core.util.JsonUtil;
-import org.apache.griffin.core.util.PropertiesUtil;
-import org.quartz.spi.JobFactory;
-import org.springframework.context.ApplicationContext;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-import org.springframework.scheduling.quartz.SchedulerFactoryBean;
-
-import javax.sql.DataSource;
-import java.util.Properties;
-
-@Configuration
-public class SchedulerConfig {
-
-       @Bean
-       public JobFactory jobFactory(ApplicationContext applicationContext) {
-               AutowiringSpringBeanJobFactory jobFactory = new 
AutowiringSpringBeanJobFactory();
-               jobFactory.setApplicationContext(applicationContext);
-               return jobFactory;
-       }
-
-       @Bean
-       public SchedulerFactoryBean schedulerFactoryBean(DataSource dataSource, 
JobFactory jobFactory) {
-               SchedulerFactoryBean factory = new SchedulerFactoryBean();
-               factory.setOverwriteExistingJobs(true);
-               factory.setDataSource(dataSource);
-               factory.setJobFactory(jobFactory);
-
-               factory.setQuartzProperties(quartzProperties());
-
-               return factory;
-       }
-
-       @Bean
-       public Properties quartzProperties() {
-               return PropertiesUtil.getProperties("/quartz.properties");
-       }
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/e7e4c3a7/service/src/main/java/org/apache/griffin/core/config/jobConfig/SparkJobConfig.java
----------------------------------------------------------------------
diff --git 
a/service/src/main/java/org/apache/griffin/core/config/jobConfig/SparkJobConfig.java
 
b/service/src/main/java/org/apache/griffin/core/config/jobConfig/SparkJobConfig.java
deleted file mode 100644
index ffaef70..0000000
--- 
a/service/src/main/java/org/apache/griffin/core/config/jobConfig/SparkJobConfig.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing,
-software distributed under the License is distributed on an
-"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-KIND, either express or implied.  See the License for the
-specific language governing permissions and limitations
-under the License.
-*/
-
-package org.apache.griffin.core.config.jobConfig;
-
-import org.apache.griffin.core.util.PropertiesUtil;
-import org.springframework.context.annotation.Bean;
-import org.springframework.context.annotation.Configuration;
-
-import java.util.Properties;
-
-@Configuration
-public class SparkJobConfig {
-    @Bean(name = "sparkJobProps")
-    public Properties sparkJobProperties() {
-        return PropertiesUtil.getProperties("/sparkJob.properties");
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/e7e4c3a7/service/src/main/java/org/apache/griffin/core/job/FileExistPredicator.java
----------------------------------------------------------------------
diff --git 
a/service/src/main/java/org/apache/griffin/core/job/FileExistPredicator.java 
b/service/src/main/java/org/apache/griffin/core/job/FileExistPredicator.java
new file mode 100644
index 0000000..a97d812
--- /dev/null
+++ b/service/src/main/java/org/apache/griffin/core/job/FileExistPredicator.java
@@ -0,0 +1,67 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.job;
+
+import org.apache.griffin.core.job.entity.SegmentPredicate;
+import org.apache.griffin.core.util.FSUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.util.Map;
+
+import static org.apache.griffin.core.job.JobInstance.PATH_CONNECTOR_CHARACTER;
+
+public class FileExistPredicator implements Predicator {
+    private static final Logger LOGGER = 
LoggerFactory.getLogger(FileExistPredicator.class);
+
+    public static final String PREDICT_PATH = "path";
+    public static final String PREDICT_ROOT_PATH = "root.path";
+
+    private SegmentPredicate predicate;
+
+    public FileExistPredicator(SegmentPredicate predicate) {
+        this.predicate = predicate;
+    }
+
+    @Override
+    public boolean predicate() throws IOException {
+        Map<String, String> config = predicate.getConfigMap();
+        String[] paths = null;
+        if (config.get(PREDICT_PATH) != null) {
+            paths = config.get(PREDICT_PATH).split(PATH_CONNECTOR_CHARACTER);
+        }
+        String rootPath = config.get(PREDICT_ROOT_PATH);
+        if (paths == null || rootPath == null) {
+            LOGGER.error("Predicate path is null.Please check predicates 
config root.path and path.");
+            throw new NullPointerException();
+        }
+        for (String path : paths) {
+            String hdfsPath = rootPath + path;
+            LOGGER.info("Predicate path: {}", hdfsPath);
+            if (!FSUtil.isFileExist(hdfsPath)) {
+                LOGGER.info("Predicate path: " + hdfsPath + " doesn't exist.");
+                return false;
+            }
+            LOGGER.info("Predicate path: " + hdfsPath + " exists.");
+        }
+        return true;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/e7e4c3a7/service/src/main/java/org/apache/griffin/core/job/JobController.java
----------------------------------------------------------------------
diff --git 
a/service/src/main/java/org/apache/griffin/core/job/JobController.java 
b/service/src/main/java/org/apache/griffin/core/job/JobController.java
index 222006e..50f6614 100644
--- a/service/src/main/java/org/apache/griffin/core/job/JobController.java
+++ b/service/src/main/java/org/apache/griffin/core/job/JobController.java
@@ -19,50 +19,49 @@ under the License.
 
 package org.apache.griffin.core.job;
 
+import org.apache.griffin.core.job.entity.JobDataBean;
 import org.apache.griffin.core.job.entity.JobHealth;
-import org.apache.griffin.core.job.entity.JobInstance;
-import org.apache.griffin.core.job.entity.JobRequestBody;
+import org.apache.griffin.core.job.entity.JobInstanceBean;
+import org.apache.griffin.core.job.entity.JobSchedule;
 import org.apache.griffin.core.util.GriffinOperationMessage;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.web.bind.annotation.*;
 
-import java.io.Serializable;
 import java.util.List;
-import java.util.Map;
 
 @RestController
-@RequestMapping("/api/v1/jobs")
+@RequestMapping("/api/v1")
 public class JobController {
-    private static final Logger LOGGER = 
LoggerFactory.getLogger(JobController.class);
 
     @Autowired
     private JobService jobService;
 
-    @RequestMapping(value = "", method = RequestMethod.GET)
-    public List<Map<String, Serializable>> getJobs() {
+    @RequestMapping(value = "/jobs", method = RequestMethod.GET)
+    public List<JobDataBean> getJobs() {
         return jobService.getAliveJobs();
     }
 
-    @RequestMapping(value = "", method = RequestMethod.POST)
-    public GriffinOperationMessage addJob(@RequestParam("group") String 
groupName, @RequestParam("jobName") String jobName,
-                                          @RequestParam("measureId") Long 
measureId, @RequestBody JobRequestBody jobRequestBody) {
-        return jobService.addJob(groupName, jobName, measureId, 
jobRequestBody);
+    @RequestMapping(value = "/jobs", method = RequestMethod.POST)
+    public GriffinOperationMessage addJob(@RequestBody JobSchedule 
jobSchedule) {
+        return jobService.addJob(jobSchedule);
     }
 
-    @RequestMapping(value = "", method = RequestMethod.DELETE)
-    public GriffinOperationMessage deleteJob(@RequestParam("group") String 
group, @RequestParam("jobName") String jobName) {
-        return jobService.deleteJob(group, jobName);
+    @RequestMapping(value = "/jobs", method = RequestMethod.DELETE)
+    public GriffinOperationMessage deleteJob(@RequestParam("jobName") String 
jobName) {
+        return jobService.deleteJob(jobName);
     }
 
-    @RequestMapping(value = "/instances", method = RequestMethod.GET)
-    public List<JobInstance> findInstancesOfJob(@RequestParam("group") String 
group, @RequestParam("jobName") String jobName,
-                                                @RequestParam("page") int 
page, @RequestParam("size") int size) {
-        return jobService.findInstancesOfJob(group, jobName, page, size);
+    @RequestMapping(value = "/jobs/{id}", method = RequestMethod.DELETE)
+    public GriffinOperationMessage deleteJob(@PathVariable("id") Long id) {
+        return jobService.deleteJob(id);
     }
 
-    @RequestMapping(value = "/health", method = RequestMethod.GET)
+    @RequestMapping(value = "/jobs/instances", method = RequestMethod.GET)
+    public List<JobInstanceBean> findInstancesOfJob(@RequestParam("jobId") 
Long id, @RequestParam("page") int page, @RequestParam("size") int size) {
+        return jobService.findInstancesOfJob(id, page, size);
+    }
+
+    @RequestMapping(value = "/jobs/health", method = RequestMethod.GET)
     public JobHealth getHealthInfo() {
         return jobService.getHealthInfo();
     }

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/e7e4c3a7/service/src/main/java/org/apache/griffin/core/job/JobInstance.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/job/JobInstance.java 
b/service/src/main/java/org/apache/griffin/core/job/JobInstance.java
new file mode 100644
index 0000000..a785fbf
--- /dev/null
+++ b/service/src/main/java/org/apache/griffin/core/job/JobInstance.java
@@ -0,0 +1,282 @@
+/*
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+
+  http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+*/
+
+package org.apache.griffin.core.job;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import org.apache.commons.lang.StringUtils;
+import org.apache.griffin.core.job.entity.*;
+import org.apache.griffin.core.job.repo.GriffinJobRepo;
+import org.apache.griffin.core.job.repo.JobScheduleRepo;
+import org.apache.griffin.core.measure.entity.DataConnector;
+import org.apache.griffin.core.measure.entity.DataSource;
+import org.apache.griffin.core.measure.entity.GriffinMeasure;
+import org.apache.griffin.core.measure.repo.GriffinMeasureRepo;
+import org.apache.griffin.core.util.JsonUtil;
+import org.apache.griffin.core.util.TimeUtil;
+import org.quartz.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.scheduling.quartz.SchedulerFactoryBean;
+
+import java.io.IOException;
+import java.text.ParseException;
+import java.util.*;
+
+import static org.apache.griffin.core.job.JobServiceImpl.GRIFFIN_JOB_ID;
+import static org.apache.griffin.core.job.JobServiceImpl.JOB_SCHEDULE_ID;
+import static org.quartz.JobBuilder.newJob;
+import static org.quartz.JobKey.jobKey;
+import static org.quartz.TriggerBuilder.newTrigger;
+import static org.quartz.TriggerKey.triggerKey;
+
+@PersistJobDataAfterExecution
+@DisallowConcurrentExecution
+public class JobInstance implements Job {
+    private static final Logger LOGGER = 
LoggerFactory.getLogger(JobInstance.class);
+    static final String MEASURE_KEY = "measure";
+    static final String PREDICATES_KEY = "predicts";
+    static final String PREDICATE_JOB_NAME = "predicateJobName";
+    static final String JOB_NAME = "jobName";
+    static final String PATH_CONNECTOR_CHARACTER = ",";
+
+    @Autowired
+    private SchedulerFactoryBean factory;
+    @Autowired
+    private GriffinMeasureRepo measureRepo;
+    @Autowired
+    private GriffinJobRepo jobRepo;
+    @Autowired
+    private JobScheduleRepo jobScheduleRepo;
+    @Autowired
+    @Qualifier("appConf")
+    private Properties appConfProps;
+
+    private JobSchedule jobSchedule;
+    private GriffinMeasure measure;
+    private GriffinJob griffinJob;
+    private List<SegmentPredicate> mPredicts;
+    private Long jobStartTime;
+
+
+    @Override
+    public void execute(JobExecutionContext context) throws 
JobExecutionException {
+        try {
+            initParam(context);
+            setSourcesPartitionsAndPredicates(measure.getDataSources());
+            createJobInstance(jobSchedule.getConfigMap());
+        } catch (Exception e) {
+            LOGGER.error("Create predicate job failure.", e);
+        }
+    }
+
+    private void initParam(JobExecutionContext context) throws 
SchedulerException {
+        mPredicts = new ArrayList<>();
+        JobDetail jobDetail = context.getJobDetail();
+        Long jobScheduleId = 
jobDetail.getJobDataMap().getLong(JOB_SCHEDULE_ID);
+        Long griffinJobId = jobDetail.getJobDataMap().getLong(GRIFFIN_JOB_ID);
+        jobSchedule = jobScheduleRepo.findOne(jobScheduleId);
+        Long measureId = jobSchedule.getMeasureId();
+        griffinJob = jobRepo.findOne(griffinJobId);
+        measure = measureRepo.findOne(measureId);
+        setJobStartTime(jobDetail);
+
+    }
+
+    private void setJobStartTime(JobDetail jobDetail) throws 
SchedulerException {
+        Scheduler scheduler = factory.getObject();
+        JobKey jobKey = jobDetail.getKey();
+        List<Trigger> triggers = (List<Trigger>) 
scheduler.getTriggersOfJob(jobKey);
+        Date triggerTime = triggers.get(0).getPreviousFireTime();
+        jobStartTime = triggerTime.getTime();
+    }
+
+
+    private void setSourcesPartitionsAndPredicates(List<DataSource> sources) 
throws Exception {
+        boolean isFirstBaseline = true;
+        for (JobDataSegment jds : jobSchedule.getSegments()) {
+            if (jds.getBaseline() && isFirstBaseline) {
+                Long tsOffset = 
TimeUtil.str2Long(jds.getSegmentRange().getBegin());
+                measure.setTimestamp(jobStartTime + tsOffset);
+                isFirstBaseline = false;
+            }
+            for (DataSource ds : sources) {
+                setDataSourcePartitions(jds, ds);
+            }
+        }
+    }
+
+    private void setDataSourcePartitions(JobDataSegment jds, DataSource ds) 
throws Exception {
+        List<DataConnector> connectors = ds.getConnectors();
+        for (DataConnector dc : connectors) {
+            setDataConnectorPartitions(jds, dc);
+        }
+    }
+
+
+    private void setDataConnectorPartitions(JobDataSegment jds, DataConnector 
dc) throws Exception {
+        String dcName = jds.getDataConnectorName();
+        if (dcName.equals(dc.getName())) {
+            Long[] sampleTs = genSampleTs(jds.getSegmentRange(), dc);
+            setConnectorConf(dc, sampleTs);
+            setConnectorPredicates(dc, sampleTs);
+        }
+    }
+
+    /**
+     * split data into several part and get every part start timestamp
+     *
+     * @param segRange config of data
+     * @return split timestamps of data
+     */
+    private Long[] genSampleTs(SegmentRange segRange, DataConnector dc) throws 
IOException {
+        Long offset = TimeUtil.str2Long(segRange.getBegin());
+        Long range = TimeUtil.str2Long(segRange.getLength());
+        Long dataUnit = TimeUtil.str2Long(dc.getDataUnit());
+        //offset usually is negative
+        Long dataStartTime = jobStartTime + offset;
+        if (range < 0) {
+            dataStartTime += range;
+            range = Math.abs(range);
+        }
+        if (Math.abs(dataUnit) >= range || dataUnit == 0) {
+            return new Long[]{dataStartTime};
+        }
+        int count = (int) (range / dataUnit);
+        Long[] timestamps = new Long[count];
+        for (int index = 0; index < count; index++) {
+            timestamps[index] = dataStartTime + index * dataUnit;
+        }
+        return timestamps;
+    }
+
+    /**
+     * set data connector predicates
+     *
+     * @param sampleTs collection of data split start timestamp
+     */
+    private void setConnectorPredicates(DataConnector dc, Long[] sampleTs) 
throws IOException {
+        List<SegmentPredicate> predicates = dc.getPredicates();
+        if (predicates != null) {
+            for (SegmentPredicate predicate : predicates) {
+                genConfMap(predicate.getConfigMap(), sampleTs);
+                //Do not forget to update origin string config
+                predicate.setConfigMap(predicate.getConfigMap());
+                mPredicts.add(predicate);
+            }
+        }
+    }
+
+    /**
+     * set data connector configs
+     *
+     * @param sampleTs collection of data split start timestamp
+     */
+    private void setConnectorConf(DataConnector dc, Long[] sampleTs) throws 
IOException {
+        genConfMap(dc.getConfigMap(), sampleTs);
+        dc.setConfigMap(dc.getConfigMap());
+    }
+
+
+    /**
+     * @param conf     map with file predicate,data split and partitions info
+     * @param sampleTs collection of data split start timestamp
+     * @return all config data combine,like {"where": "year=2017 AND month=11 
AND dt=15 AND hour=09,year=2017 AND month=11 AND dt=15 AND hour=10"}
+     * or like {"path": 
"/year=#2017/month=11/dt=15/hour=09/_DONE,/year=#2017/month=11/dt=15/hour=10/_DONE"}
+     */
+    private void genConfMap(Map<String, String> conf, Long[] sampleTs) {
+        for (Map.Entry<String, String> entry : conf.entrySet()) {
+            String value = entry.getValue();
+            Set<String> set = new HashSet<>();
+            for (Long timestamp : sampleTs) {
+                set.add(TimeUtil.format(value, timestamp));
+            }
+            conf.put(entry.getKey(), StringUtils.join(set, 
PATH_CONNECTOR_CHARACTER));
+        }
+    }
+
+    private boolean createJobInstance(Map<String, Object> confMap) throws 
Exception {
+        Map<String, Object> config = (Map<String, Object>) 
confMap.get("checkdonefile.schedule");
+        Long interval = TimeUtil.str2Long((String) config.get("interval"));
+        Integer repeat = Integer.valueOf(config.get("repeat").toString());
+        String groupName = "PG";
+        String jobName = griffinJob.getJobName() + "_predicate_" + 
System.currentTimeMillis();
+        Scheduler scheduler = factory.getObject();
+        TriggerKey triggerKey = triggerKey(jobName, groupName);
+        return !(scheduler.checkExists(triggerKey)
+                || !saveGriffinJob(jobName, groupName)
+                || !createJobInstance(scheduler, triggerKey, interval, repeat, 
jobName));
+    }
+
+    private boolean saveGriffinJob(String pName, String pGroup) {
+        List<JobInstanceBean> instances = griffinJob.getJobInstances();
+        Long tms = System.currentTimeMillis();
+        Long expireTms = 
Long.valueOf(appConfProps.getProperty("jobInstance.expired.milliseconds")) + 
tms;
+        instances.add(new JobInstanceBean(LivySessionStates.State.finding, 
pName, pGroup, tms, expireTms));
+        griffinJob = jobRepo.save(griffinJob);
+        return true;
+    }
+
+    private boolean createJobInstance(Scheduler scheduler, TriggerKey 
triggerKey, Long interval, Integer repeatCount, String pJobName) throws 
Exception {
+        JobDetail jobDetail = addJobDetail(scheduler, triggerKey, pJobName);
+        scheduler.scheduleJob(newTriggerInstance(triggerKey, jobDetail, 
interval, repeatCount));
+        return true;
+    }
+
+
+    private Trigger newTriggerInstance(TriggerKey triggerKey, JobDetail jd, 
Long interval, Integer repeatCount) throws ParseException {
+        return newTrigger()
+                .withIdentity(triggerKey)
+                .forJob(jd)
+                .startNow()
+                .withSchedule(SimpleScheduleBuilder.simpleSchedule()
+                        .withIntervalInMilliseconds(interval)
+                        .withRepeatCount(repeatCount)
+                )
+                .build();
+    }
+
+    private JobDetail addJobDetail(Scheduler scheduler, TriggerKey triggerKey, 
String pJobName) throws SchedulerException, JsonProcessingException {
+        JobKey jobKey = jobKey(triggerKey.getName(), triggerKey.getGroup());
+        JobDetail jobDetail;
+        Boolean isJobKeyExist = scheduler.checkExists(jobKey);
+        if (isJobKeyExist) {
+            jobDetail = scheduler.getJobDetail(jobKey);
+        } else {
+            jobDetail = newJob(SparkSubmitJob.class)
+                    .storeDurably()
+                    .withIdentity(jobKey)
+                    .build();
+        }
+        setJobDataMap(jobDetail, pJobName);
+        scheduler.addJob(jobDetail, isJobKeyExist);
+        return jobDetail;
+    }
+
+    private void setJobDataMap(JobDetail jobDetail, String pJobName) throws 
JsonProcessingException {
+        JobDataMap dataMap = jobDetail.getJobDataMap();
+        dataMap.put(MEASURE_KEY, JsonUtil.toJson(measure));
+        dataMap.put(PREDICATES_KEY, JsonUtil.toJson(mPredicts));
+        dataMap.put(JOB_NAME, griffinJob.getJobName());
+        dataMap.put(PREDICATE_JOB_NAME, pJobName);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin/blob/e7e4c3a7/service/src/main/java/org/apache/griffin/core/job/JobService.java
----------------------------------------------------------------------
diff --git a/service/src/main/java/org/apache/griffin/core/job/JobService.java 
b/service/src/main/java/org/apache/griffin/core/job/JobService.java
index ca6f2f9..a210105 100644
--- a/service/src/main/java/org/apache/griffin/core/job/JobService.java
+++ b/service/src/main/java/org/apache/griffin/core/job/JobService.java
@@ -19,28 +19,28 @@ under the License.
 
 package org.apache.griffin.core.job;
 
+import org.apache.griffin.core.job.entity.JobDataBean;
 import org.apache.griffin.core.job.entity.JobHealth;
-import org.apache.griffin.core.job.entity.JobInstance;
-import org.apache.griffin.core.job.entity.JobRequestBody;
+import org.apache.griffin.core.job.entity.JobInstanceBean;
+import org.apache.griffin.core.job.entity.JobSchedule;
 import org.apache.griffin.core.util.GriffinOperationMessage;
+import org.quartz.SchedulerException;
 
-import java.io.Serializable;
 import java.util.List;
-import java.util.Map;
 
 public interface JobService {
 
-    List<Map<String, Serializable>> getAliveJobs();
+    List<JobDataBean> getAliveJobs();
 
-    GriffinOperationMessage addJob(String groupName, String jobName, Long 
measureId, JobRequestBody jobRequestBody);
+    GriffinOperationMessage addJob(JobSchedule jobSchedule);
 
-    GriffinOperationMessage pauseJob(String group, String name);
+    boolean pauseJob(String group, String name) throws SchedulerException;
 
-    GriffinOperationMessage deleteJob(String groupName, String jobName);
+    GriffinOperationMessage deleteJob(Long jobId);
 
-    List<JobInstance> findInstancesOfJob(String group, String name, int page, 
int size);
+    GriffinOperationMessage deleteJob(String jobName);
 
-    Map<String, List<Map<String, Serializable>>> 
getJobDetailsGroupByMeasureId();
+    List<JobInstanceBean> findInstancesOfJob(Long jobId, int page, int size);
 
     JobHealth getHealthInfo();
 }


Reply via email to