e-site.xml into hive-site.xml
put hive-site.xml kylin/conf/ directory
apache_...@163.com
From: Billy Liu
Date: 2017-11-05 10:59
To: dev
Subject: Re: cube run 3 setp:org.apache.thrift.transport.TTransportException
Try copy all properties in hivemetastore-site.xml into hive-site.xml
2017-11-
INFO [Job 8557407a-8e87-49aa-9cff-b7e3f58c2f12-102]
execution.ExecutableManager:425 : job
id:8557407a-8e87-49aa-9cff-b7e3f58c2f12-02 from RUNNING to ERROR
apache_...@163.com
c2f12-102]
execution.ExecutableManager:425 : job
id:8557407a-8e87-49aa-9cff-b7e3f58c2f12-02 from RUNNING to ERROR
apache_...@163.com
765 ms
/etc/host:
127.0.0.1 localhost localhost.localdomain localhost4 localhost4.localdomain4
xx.xx.xx.xx bd05
xx.xx.xx.xx bd03
xx.xx.xx.xxbd02
xx.xx.xx.xx bd04
xx.xx.xx.xx bd01
apache_...@163.com
_NAME",
"table": "DIM_ENT_SCA",
"column": null,
"derived": [
"ENT_SCA_NAME"
]
},
{
"name": "INDU_CD",
"table": "DIM_INDU",
"column": "INDU_CD",
"derived": null
},
{
"name": "INDU_NAME",
"table": "DIM_INDU",
"column": null,
"derived": [
"INDU_NAME"
]
},
{
"name": "INDU_LV",
"table": "DIM_INDU",
"column": null,
"derived": [
"INDU_LV"
]
},
{
"name": "INDU_CD_LV1",
"table": "DIM_INDU",
"column": null,
"derived": [
"INDU_CD_LV1"
]
},
{
"name": "INDU_NAME_LV1",
"table": "DIM_INDU",
"column": null,
"derived": [
"INDU_NAME_LV1"
]
},
{
"name": "ENT_PSN_CD",
"table": "DIM_ENT_PSN",
"column": "ENT_PSN_CD",
"derived": null
},
{
"name": "ENT_PSN_NAME",
"table": "DIM_ENT_PSN",
"column": null,
"derived": [
"ENT_PSN_NAME"
]
},
{
"name": "PER_LOAN_CD",
"table": "DIM_PER_LOAN",
"column": "PER_LOAN_CD",
"derived": null
},
{
"name": "PER_LOAN_NAME",
"table": "DIM_PER_LOAN",
"column": null,
"derived": [
"PER_LOAN_NAME"
]
},
{
"name": "PER_LOAN_LV",
"table": "DIM_PER_LOAN",
"column": null,
"derived": [
"PER_LOAN_LV"
]
},
{
"name": "PER_LOAN_CD_LV1",
"table": "DIM_PER_LOAN",
"column": null,
"derived": [
"PER_LOAN_CD_LV1"
]
},
{
"name": "PER_LOAN_NAME_LV1",
"table": "DIM_PER_LOAN",
"column": null,
"derived": [
"PER_LOAN_NAME_LV1"
]
},
{
"name": "APPR_CREXT_RENT",
"table": "DIM_APPR_CREXT",
"column": "APPR_CREXT_RENT",
"derived": null
},
{
"name": "APPR_CREXT_RENT_DES",
"table": "DIM_APPR_CREXT",
"column": null,
"derived": [
"APPR_CREXT_RENT_DES"
]
},
{
"name": "CUST_LON_TYPE",
"table": "DIM_CUST_LON",
"column": "CUST_LON_TYPE",
"derived": null
},
{
"name": "CUST_LON_NAME",
"table": "DIM_CUST_LON",
"column": null,
"derived": [
"CUST_LON_NAME"
]
},
{
"name": "CLASS_FIVE_CD",
"table": "DIM_CLASS_FIVE",
"column": "CLASS_FIVE_CD",
"derived": null
},
{
"name": "CLASS_FIVE_NAME",
"table": "DIM_CLASS_FIVE",
"column": null,
"derived": [
"CLASS_FIVE_NAME"
]
},
{
"name": "CLASS_FIVE_TYPE",
"table": "DIM_CLASS_FIVE",
"column": null,
"derived": [
"CLASS_FIVE_TYPE"
]
},
{
"name": "MAIN_GUAR_CD",
"table": "DIM_MAIN_GUAR",
"column": "MAIN_GUAR_CD",
"derived": null
},
{
"name": "MAIN_GUAR_NAME",
"table": "DIM_MAIN_GUAR",
"column": null,
"derived": [
"MAIN_GUAR_NAME"
]
},
{
"name": "ESTATE_CD",
"table": "DIM_ESTATE_LON",
"column": "ESTATE_CD",
"derived": null
},
{
"name": "ESTATE_DES",
"table": "DIM_ESTATE_LON",
"column": null,
"derived": [
"ESTATE_DES"
]
},
{
"name": "ESTATE_LV",
"table": "DIM_ESTATE_LON",
"column": null,
"derived": [
"ESTATE_LV"
]
},
{
"name": "ESTATE_CD_LV1",
"table": "DIM_ESTATE_LON",
"column": null,
"derived": [
"ESTATE_CD_LV1"
]
},
{
"name": "ESTATE_DES_LV1",
"table": "DIM_ESTATE_LON",
"column": null,
"derived": [
"ESTATE_DES_LV1"
]
},
{
"name": "GL_ACC_CD",
"table": "DIM_GL_ACC",
"column": "GL_ACC_CD",
"derived": null
},
{
"name": "GL_ACC_NAME",
"table": "DIM_GL_ACC",
"column": null,
"derived": [
"GL_ACC_NAME"
]
}
],
"measures": [
{
"name": "_COUNT_",
"function": {
"expression": "COUNT",
"parameter": {
"type": "constant",
"value": "1"
},
"returntype": "bigint"
}
},
{
"name": "1",
"function": {
"expression": "SUM",
"parameter": {
"type": "column",
"value": "LOAN_GROUP.BAL"
},
"returntype": "decimal(19,4)"
}
}
],
"dictionaries": [],
"rowkey": {
"rowkey_columns": [
{
"column": "LOAN_GROUP.DATE_CD",
"encoding": "date",
"isShardBy": false
},
{
"column": "LOAN_GROUP.ORG_CD",
"encoding": "dict",
"isShardBy": false
},
{
"column": "LOAN_GROUP.CURR_CD",
"encoding": "dict",
"isShardBy": false
},
{
"column": "LOAN_GROUP.GL_ACC_CD",
"encoding": "dict",
"isShardBy": false
},
{
"column": "LOAN_GROUP.ENT_SCA_CD",
"encoding": "dict",
"isShardBy": false
},
{
"column": "LOAN_GROUP.INDU_CD",
"encoding": "dict",
"isShardBy": false
},
{
"column": "LOAN_GROUP.ENT_PSN_CD",
"encoding": "dict",
"isShardBy": false
},
{
"column": "LOAN_GROUP.PER_LON_CD",
"encoding": "dict",
"isShardBy": false
},
{
"column": "LOAN_GROUP.APPR_CREXT_RENT",
"encoding": "dict",
"isShardBy": false
},
{
"column": "LOAN_GROUP.PRD_NO",
"encoding": "dict",
"isShardBy": false
},
{
"column": "LOAN_GROUP.CUST_LON_TYPE",
"encoding": "dict",
"isShardBy": false
},
{
"column": "LOAN_GROUP.PRD_PINGZ",
"encoding": "dict",
"isShardBy": false
},
{
"column": "LOAN_GROUP.CLASS_FIVE_CD",
"encoding": "dict",
"isShardBy": false
},
{
"column": "LOAN_GROUP.MAIN_GUAR_CD",
"encoding": "dict",
"isShardBy": false
},
{
"column": "LOAN_GROUP.REG_ADMI_AREA",
"encoding": "dict",
"isShardBy": false
},
{
"column": "LOAN_GROUP.FORE_LON_TYPE",
"encoding": "dict",
"isShardBy": false
},
{
"column": "LOAN_GROUP.BC_FLAG",
"encoding": "dict",
"isShardBy": false
},
{
"column": "LOAN_GROUP.STRA_INDU_NAME",
"encoding": "dict",
"isShardBy": false
},
{
"column": "LOAN_GROUP.INDU_TRF_UPD_PRO",
"encoding": "dict",
"isShardBy": false
},
{
"column": "LOAN_GROUP.ATRE_TYPE_NAME",
"encoding": "dict",
"isShardBy": false
},
{
"column": "LOAN_GROUP.ESTATE_TYPE",
"encoding": "dict",
"isShardBy": false
},
{
"column": "DIM_DATE.DATE_CD",
"encoding": "date",
"isShardBy": false
},
{
"column": "DIM_ORG.ORG_CD",
"encoding": "dict",
"isShardBy": false
},
{
"column": "DIM_CURR.CURR_CD",
"encoding": "dict",
"isShardBy": false
},
{
"column": "DIM_ENT_SCA.ENT_SCA_CD",
"encoding": "dict",
"isShardBy": false
},
{
"column": "DIM_INDU.INDU_CD",
"encoding": "dict",
"isShardBy": false
},
{
"column": "DIM_ENT_PSN.ENT_PSN_CD",
"encoding": "dict",
"isShardBy": false
},
{
"column": "DIM_PER_LOAN.PER_LOAN_CD",
"encoding": "dict",
"isShardBy": false
},
{
"column": "DIM_APPR_CREXT.APPR_CREXT_RENT",
"encoding": "dict",
"isShardBy": false
},
{
"column": "DIM_CUST_LON.CUST_LON_TYPE",
"encoding": "dict",
"isShardBy": false
},
{
"column": "DIM_CLASS_FIVE.CLASS_FIVE_CD",
"encoding": "dict",
"isShardBy": false
},
{
"column": "DIM_MAIN_GUAR.MAIN_GUAR_CD",
"encoding": "dict",
"isShardBy": false
},
{
"column": "DIM_ESTATE_LON.ESTATE_CD",
"encoding": "dict",
"isShardBy": false
},
{
"column": "DIM_GL_ACC.GL_ACC_CD",
"encoding": "dict",
"isShardBy": false
}
]
},
"hbase_mapping": {
"column_family": [
{
"name": "F1",
"columns": [
{
"qualifier": "M",
"measure_refs": [
"_COUNT_",
"1"
]
}
]
}
]
},
"aggregation_groups": [
{
"includes": [
"LOAN_GROUP.DATE_CD",
"LOAN_GROUP.ORG_CD",
"LOAN_GROUP.ENT_SCA_CD",
"LOAN_GROUP.INDU_CD",
"LOAN_GROUP.ENT_PSN_CD",
"LOAN_GROUP.PER_LON_CD",
"LOAN_GROUP.APPR_CREXT_RENT",
"LOAN_GROUP.ESTATE_TYPE",
"DIM_APPR_CREXT.APPR_CREXT_RENT"
],
"select_rule": {
"hierarchy_dims": [],
"mandatory_dims": [],
"joint_dims": []
}
}
],
"signature": "QmuvExlbmaV2+dowq1fs3w==",
"notify_list": [],
"status_need_notify": [
"ERROR",
"DISCARDED",
"SUCCEED"
],
"partition_date_start": 145160640,
"partition_date_end": 31536,
"auto_merge_time_ranges": [
60480,
241920
],
"retention_range": 0,
"engine_type": 2,
"storage_type": 2,
"override_kylin_properties": {},
"cuboid_black_list": [],
"parent_forward": 3
}
apache_...@163.com
Hi,
How to look at SQL's execution plan in Kylin?
Think you.
apache_...@163.com
(Subject.java:422) at
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1724)
at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:162)
What is the cause?
apache_...@163.com
INFO [pool-8-thread-1] threadpool.DefaultScheduler:123
: Job Fetcher: 0 should running, 0 actual running, 0 stopped, 0 ready, 136
already succeed, 21 error, 5 discarded, 0 others
apache_...@163.com
From: Li Yang
Date: 2017-09-17 15:17
To: dev
Subject: Re: How to clean up the invisible cube
Th
Hi:
Sometime,When I delete a model, Prompt is referenced by cube, But I can't
see the cube by webUI
So,how to delete this model?
apache_...@163.com
thiks.
apache_...@163.com
From: Billy Liu
Date: 2017-09-06 16:43
To: dev
Subject: Re: How to set time parameter on the kylin server
Kylin use GMT timezone to process the the segment timestamp. Check code
makeSegmentName in CubeSegment class.
2017-09-06 14:27 GMT+08:00 apache_...@163.com
s overlap: day1[2012020100_2012020700] and
day1[2012020616_20120208155959]",
The time difference between the request and the request is 8 hours.
How should set the time zone on the server to get the correct request time?
Thinks .
apache_...@163.com
Hi,
Does it delete HBase data on hdfs through api/kylin API(DELETE submit )?
Does it delete old HBase data on hdfs when refresh a cube?
apache_...@163.com
Thinks.
version: apache-kylin-2.1.0-bin-hbase1x.tar.gz
deployed: single .
apache_...@163.com
From: ShaoFeng Shi
Date: 2017-09-03 22:31
To: dev
Subject: Re: Can't edit cube
You'd better provide Kylin version, and how it be deployed (single or
clustere
apache_...@163.com
();
info.put("timeZone", "GMT +08");
rs.getDate(part_dt) ,result is 20170731
rs.getString(part_dt),result is 20170330.
rs is java resultset
What's the reason?
How can I set the result right(getString(part_dt) is 20170331)?
apache_...@163.com
From: apache_...@16
Hi,
This problem have now been sorted, Thinks
Properties info = new Properties();
info.put("timeZone", "GMT +08");
apache_...@163.com
From: apache_...@163.com
Date: 2017-08-29 11:57
To: dev
Subject: How to set timezone in JDBC parameter?
Hi,
when i query da
.
apache_...@163.com
.
apache_...@163.com
Hi,
I run a cube by api(/kylin/api/cubes/{cubeName}/build), It's daily task。
How do I get the status job information(job Id,job name,job steup,job
status,job result) for this task?
Can I get it from cube build task response body?
Thinks.
apache_...@163.com
Hi,
I run a cube by api(/kylin/api/cubes/{cubeName}/build), It's daily task。
How do I get the status job information(job Id,job name,job steup,job
status,job result) for this task?
Thinks.
apache_...@163.com
Hi,
e.g:
A cube task has been running for a week(from day 7 to day 13),but at day
13, found day 11 data is error, day 12 and day 13 is right.
So how to purge date for day 11 only?
apache_...@163.com
This problem have now been sorted, Thinks
apache_...@163.com
From: apache_...@163.com
Date: 2017-08-24 12:11
To: dev
Subject: kylin2.1 odbc not work
Hi,
Env:kylin2.1 HDP2.5.3. odbc2.1 32bit.
odbc2.1 set sucess,but can't get any kylin table info from excel.
ap
Hi,
Env:kylin2.1 HDP2.5.3. odbc2.1 32bit.
odbc2.1 set sucess,but can't get any kylin table info from excel.
apache_...@163.com
Thinks .
This issue not found in kylin v2.1.
apache_...@163.com
From: Li Yang
Date: 2017-08-20 14:31
To: dev
Subject: Re: Subquery cann't work
Seems a bug to me. Please log a JIRA.
On Thu, Aug 17, 2017 at 10:17 AM, apache_...@163.com
wrote:
> Hi,
>
> There seems to be a pr
Hi,
There are a lot of surprises in kylin 2.1 version.It's amazing.
Ask a question:
What functions can be used to calculate the date of the end of the month in
kylin?
Like oracle LAST_DAY function
apache_...@163.com
CAL_DT.MONTH_BEG_DT AND T8_0.SELLER_ID = T8_2.SELLER_ID ) GROUP BY
PART_DT LIMIT 50000": Can't find any realization. Please confirm with
providers. SQL digest: fact table DEFAULT.KYLIN_CAL_DT,group by
[DEFAULT.KYLIN_SALES.PART_DT],filter on [],with aggregates[FunctionDesc
[expression=SUM, parameter=-($2, $9), returnType=null]].
apache_...@163.com
Run :
select date '2011-03-31' - INTERVAL '1' month from KYLIN_CAL_DT
got same result :2011-03-01
The result is incorrect
apache_...@163.com
From: Alberto Ramón
Date: 2017-08-15 16:21
To: dev
Subject: Re: Leap Month calculate error
Try to use DATE statement
Hi,
when i run sql by kyline GUI,Right result is 2011-02-28,but i got
2011-03-01,is bug?
select cast('2011-03-31' as date) - INTERVAL '1' month from KYLIN_CAL_DT
apache_...@163.com
pdate HTTP/1.1" 500 8766
(rest load table log,failed)
192.168.224.4 - - [14/Aug/2017:16:27:56 +0800] "POST
/kylin/api/tables/metric.fact2,/api HTTP/1.1" 500 148
apache_...@163.com
From: ShaoFeng Shi
Date: 2017-08-14 13:33
To: dev
Subject: Re: Re: kylin load hive table rest
4/Aug/2017:11:39:25 +0800] "POST
/kylin/api/tables/metric.date_dim/load HTTP/1.1" 500 9919
apache_...@163.com
From: Billy Liu
Date: 2017-08-13 23:35
To: dev
Subject: Re: Re: kylin load hive table rest is error
As the document says {tables} and {project} are path parameters,
Yes.
metric.date_dim is table name in hive
load is kylin project.
Executing it in two environments have same error.
Kylin version:2.0
apache_...@163.com
From: Billy Liu
Date: 2017-08-13 23:35
To: dev
Subject: Re: Re: kylin load hive table rest is error
As the document says {tables
be loaded into.
apache_...@163.com
From: ShaoFeng Shi
Date: 2017-08-13 14:23
To: dev
Subject: Re: Re: kylin load hive table rest is error
Besides, can you sync the table from Kylin GUI? If GUI doesn't work either,
that should be env problem. Otherwise it would be API usage mistake.
20
tables/metric.date_dim/load
return error:
Server returned HTTP response code: 500 for URL:
http://xx.xx.xx.xx:7070/kylin/api/tables/metric.date_dim/load
but no relevant logs in kylin.log
apache_...@163.com
From: ShaoFeng Shi
Date: 2017-08-12 14:59
To: dev
Subject: Re: kylin load hive table rest is er
/p>
</div>
<div class="callout callout-danger">
<h4>Streaming Schema</h4>
<pre>{{streamingSchema}}</pre>
</div>
<div class="callout callout-danger">
<h4>Kafka Schema</h4>
<pre>{{kfkSchema}}</pre>
</div>
<div class="callout callout-info">
<p>{{text}}</p>
</div>
apache_...@163.com
/p>
</div>
<div class="callout callout-danger">
<h4>Streaming Schema</h4>
<pre>{{streamingSchema}}</pre>
</div>
<div class="callout callout-danger">
<h4>Kafka Schema</h4>
<pre>{{kfkSchema}}</pre>
</div>
<div class="callout callout-info">
<p>{{text}}</p>
</div>
apache_...@163.com
ART_DT in('2012-01-01', '2012-01-02', '2012-01-03')
group by KYLIN_SALES.PART_DT, KYLIN_SALES.SELLER_ID
) T8
) T7
group by F16
apache_...@163.com
From: apache_...@163.com
Date: 2017-08-02 12:41
To: dev
Subject: Re: Re: query Sql run Error
I would like to expla
java:617)
at
org.apache.tomcat.util.threads.TaskThread$WrappingRunnable.run(TaskThread.java:61)
at java.lang.Thread.run(Thread.java:748)
apache_...@163.com
From: apache_...@163.com
Date: 2017-08-03 17:24
To: dev
Subject: ql often running error
Hi,
sql does often running error,but after restart
Hi,
Kylin2.0 on HDP2.5.3.
sql does often running error,but after restart kylin server,SQL can execute
properly
How do I find out the reason?
apache_...@163.com
Hi,
sql does often running error,but after restart kylin server,SQL can execute
properly
How do I find out the reason?
apache_...@163.com
These problems have now been sorted
thinks.
apache_...@163.com
发件人: apache_...@163.com
发送时间: 2017-08-02 12:14
收件人: dev
主题: How can I edit model & cube?
Hi,
How can I edit model & cube by rest interface? could you share some ways
to edit it by api(not kylin UI
I would like to explain one point ,first sql is Bi(smartbi) tools auto
created,This example has normal typical.
apache_...@163.com
From: apache_...@163.com
Date: 2017-08-01 18:36
To: dev
Subject: Re: Re: query Sql run Error
thinks,
Kylin2.0 on HDP2.5.3.
sql is not work.
select
Hi,
How can I edit model & cube by rest interface? could you share some ways
to edit it by api(not kylin UI),thinks .
apache_...@163.com
;, '2012-01-02', '2012-01-03')
group by KYLIN_SALES.PART_DT, KYLIN_SALES.SELLER_ID
) T8
on T8.PART_DT = T17.CAL_DT
) T7
group by F16,F0
) T7
group by T7.F16
What's the reason?
apache_...@163.com
Hi,
Have any method edit model(Scenario:add some dimensions or measures after
base hive table is chanage,but don't want to create a new model),This model
have a include some cub.
Only add some dimensions or measures in exist model.
apache_...@163.com
Yes ,3ks.
apache_...@163.com
From: ShaoFeng Shi
Date: 2017-07-27 11:25
To: dev
Subject: Re: Re: sum function error
I know what's the problem now, after checking your document.
You defined A, B, C as dimensions, with no measure, and then query "SUM(B),
SUM(C) GROUP BY A". Alt
Sorry,
Kylin2.0 on HDP2.5.3
step printscreent in word,download url,please view in mail subject.
apache_...@163.com
From: Billy Liu
Date: 2017-07-26 22:51
To: dev
Subject: Re: sum function error
When you describe something is wrong, could you show the reproduce steps,
versions, expected
sorry,
printScreen File(2.4M) is too large for mail server,could you download it in
attachments url, maybe attachments mail was intercepted ,please view mail
subject
apache_...@163.com
From: Billy Liu
Date: 2017-07-26 22:51
To: dev
Subject: Re: sum function error
When you describe
sorry,
printScreen File(2.4M) is too large for mail server,could you download it in
attachments url:
apache_...@163.com
From: Billy Liu
Date: 2017-07-26 22:51
To: dev
Subject: Re: sum function error
When you describe something is wrong, could you show the reproduce steps,
versions
Photo maybe cann't open.
I run insight:
select sum(TEM.C) from TEM,result is wrong
but run:
select sum(TEM.B) from TEM,result is right
apache_...@163.com
From: apache_...@163.com
Date: 2017-07-26 11:47
To: dev
Subject: sum function error
Hi,
when i run sum function,
;B',3,1);
insert into tem values('B',1,1);
insert into tem values('C',4,1);
apache_...@163.com
Hi,
column level.
Similar to the create many view(one or many column) on one table(result data)
in rdbms.
apache_...@163.com
From: ShaoFeng Shi
Date: 2017-07-25 21:24
To: dev
Subject: Re: Cube view
Do you mean more fine granularify access control (row level and column
level)? For
XCEPT [ ALL | DISTINCT ] query |
query MINUS [ ALL | DISTINCT ] query | query INTERSECT [ ALL | DISTINCT ] query
} [ ORDER BY orderItem [, orderItem ]* ] [ LIMIT { count | ALL } ] [ OFFSET
start { ROW | ROWS } ] [ FETCH { FIRST | NEXT } [ count ] { ROW | ROWS } ]
apache_...@163.com
Hi,
Sometime,the Cube result must share some departments,department can only access
part of cube columns, Have any plans to support the Cube view?
apache_...@163.com
Hi,
After i created model(m01) and cube(c001 base m01),when i build the c001
cube, get a error, "cube c001 doesn't contain and ready segment",What's the
reason, please?
apache_...@163.com
shaofeng,
thinks.
Is it(measures result) calculated during the cube running or calculated at
query time?
apache_...@163.com
From: ShaoFeng Shi
Date: 2017-07-18 17:31
To: dev
Subject: Re: about Measures result stroage
One Cube may has 1 or multiple Cube segments; Each segment
Hi,
I have a question, when i create model(have many measures) ,and run a cube
base this model
what store about measures result in hbase , Other Single hbase table ?
apache_...@163.com
56 matches
Mail list logo