spark git commit: [SPARK-7226] [SPARKR] Support math functions in R DataFrame
Repository: spark Updated Branches: refs/heads/master 9b6cf285d - 50da9e891 [SPARK-7226] [SPARKR] Support math functions in R DataFrame Author: qhuang qian.hu...@intel.com Closes #6170 from hqzizania/master and squashes the following commits: f20c39f [qhuang] add tests units and fixes 2a7d121 [qhuang] use a function name more familiar to R users 07aa72e [qhuang] Support math functions in R DataFrame Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/50da9e89 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/50da9e89 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/50da9e89 Branch: refs/heads/master Commit: 50da9e89161faa0ecdc1feb3ffee6c822a742034 Parents: 9b6cf28 Author: qhuang qian.hu...@intel.com Authored: Fri May 15 14:06:16 2015 -0700 Committer: Shivaram Venkataraman shiva...@cs.berkeley.edu Committed: Fri May 15 14:06:16 2015 -0700 -- R/pkg/NAMESPACE | 23 ++ R/pkg/R/column.R | 36 --- R/pkg/R/generics.R | 20 +++ R/pkg/inst/tests/test_sparkSQL.R | 24 +++ 4 files changed, 100 insertions(+), 3 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/50da9e89/R/pkg/NAMESPACE -- diff --git a/R/pkg/NAMESPACE b/R/pkg/NAMESPACE index ba29614..64ffdcf 100644 --- a/R/pkg/NAMESPACE +++ b/R/pkg/NAMESPACE @@ -59,33 +59,56 @@ exportMethods(arrange, exportClasses(Column) exportMethods(abs, + acos, alias, approxCountDistinct, asc, + asin, + atan, + atan2, avg, cast, + cbrt, + ceiling, contains, + cos, + cosh, countDistinct, desc, endsWith, + exp, + expm1, + floor, getField, getItem, + hypot, isNotNull, isNull, last, like, + log, + log10, + log1p, lower, max, mean, min, n, n_distinct, + rint, rlike, + sign, + sin, + sinh, sqrt, startsWith, substr, sum, sumDistinct, + tan, + tanh, + toDegrees, + toRadians, upper) exportClasses(GroupedData) http://git-wip-us.apache.org/repos/asf/spark/blob/50da9e89/R/pkg/R/column.R -- diff --git a/R/pkg/R/column.R b/R/pkg/R/column.R index 9a68445..80e92d3 100644 --- a/R/pkg/R/column.R +++ b/R/pkg/R/column.R @@ -55,12 +55,17 @@ operators - list( + = plus, - = minus, * = multiply, / = divide, %% = mod, == = equalTo, = gt, = lt, != = notEqual, = = leq, = = geq, # we can not override `` and `||`, so use `` and `|` instead - = and, | = or #, ! = unary_$bang + = and, | = or, #, ! = unary_$bang + ^ = pow ) column_functions1 - c(asc, desc, isNull, isNotNull) column_functions2 - c(like, rlike, startsWith, endsWith, getField, getItem, contains) functions - c(min, max, sum, avg, mean, count, abs, sqrt, - first, last, lower, upper, sumDistinct) + first, last, lower, upper, sumDistinct, + acos, asin, atan, cbrt, ceiling, cos, cosh, exp, + expm1, floor, log, log10, log1p, rint, sign, + sin, sinh, tan, tanh, toDegrees, toRadians) +binary_mathfunctions- c(atan2, hypot) createOperator - function(op) { setMethod(op, @@ -76,7 +81,11 @@ createOperator - function(op) { if (class(e2) == Column) { e2 - e2@jc } -callJMethod(e1@jc, operators[[op]], e2) +if (op == ^) { + jc - callJStatic(org.apache.spark.sql.functions, operators[[op]], e1@jc, e2) +} else { + callJMethod(e1@jc, operators[[op]], e2) +} } column(jc) }) @@ -106,11 +115,29 @@ createStaticFunction - function(name) { setMethod(name, signature(x = Column), function(x) { + if (name == ceiling) { + name - ceil + } + if (name == sign) { + name - signum + } jc - callJStatic(org.apache.spark.sql.functions, name, x@jc)
spark git commit: [SPARK-7226] [SPARKR] Support math functions in R DataFrame
Repository: spark Updated Branches: refs/heads/branch-1.4 a5f7b3b9c - 9ef6d743a [SPARK-7226] [SPARKR] Support math functions in R DataFrame Author: qhuang qian.hu...@intel.com Closes #6170 from hqzizania/master and squashes the following commits: f20c39f [qhuang] add tests units and fixes 2a7d121 [qhuang] use a function name more familiar to R users 07aa72e [qhuang] Support math functions in R DataFrame (cherry picked from commit 50da9e89161faa0ecdc1feb3ffee6c822a742034) Signed-off-by: Shivaram Venkataraman shiva...@cs.berkeley.edu Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/9ef6d743 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/9ef6d743 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/9ef6d743 Branch: refs/heads/branch-1.4 Commit: 9ef6d743a65cb3f962e4f2e0716f55dbe7efb084 Parents: a5f7b3b Author: qhuang qian.hu...@intel.com Authored: Fri May 15 14:06:16 2015 -0700 Committer: Shivaram Venkataraman shiva...@cs.berkeley.edu Committed: Fri May 15 14:06:39 2015 -0700 -- R/pkg/NAMESPACE | 23 ++ R/pkg/R/column.R | 36 --- R/pkg/R/generics.R | 20 +++ R/pkg/inst/tests/test_sparkSQL.R | 24 +++ 4 files changed, 100 insertions(+), 3 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/9ef6d743/R/pkg/NAMESPACE -- diff --git a/R/pkg/NAMESPACE b/R/pkg/NAMESPACE index ba29614..64ffdcf 100644 --- a/R/pkg/NAMESPACE +++ b/R/pkg/NAMESPACE @@ -59,33 +59,56 @@ exportMethods(arrange, exportClasses(Column) exportMethods(abs, + acos, alias, approxCountDistinct, asc, + asin, + atan, + atan2, avg, cast, + cbrt, + ceiling, contains, + cos, + cosh, countDistinct, desc, endsWith, + exp, + expm1, + floor, getField, getItem, + hypot, isNotNull, isNull, last, like, + log, + log10, + log1p, lower, max, mean, min, n, n_distinct, + rint, rlike, + sign, + sin, + sinh, sqrt, startsWith, substr, sum, sumDistinct, + tan, + tanh, + toDegrees, + toRadians, upper) exportClasses(GroupedData) http://git-wip-us.apache.org/repos/asf/spark/blob/9ef6d743/R/pkg/R/column.R -- diff --git a/R/pkg/R/column.R b/R/pkg/R/column.R index 9a68445..80e92d3 100644 --- a/R/pkg/R/column.R +++ b/R/pkg/R/column.R @@ -55,12 +55,17 @@ operators - list( + = plus, - = minus, * = multiply, / = divide, %% = mod, == = equalTo, = gt, = lt, != = notEqual, = = leq, = = geq, # we can not override `` and `||`, so use `` and `|` instead - = and, | = or #, ! = unary_$bang + = and, | = or, #, ! = unary_$bang + ^ = pow ) column_functions1 - c(asc, desc, isNull, isNotNull) column_functions2 - c(like, rlike, startsWith, endsWith, getField, getItem, contains) functions - c(min, max, sum, avg, mean, count, abs, sqrt, - first, last, lower, upper, sumDistinct) + first, last, lower, upper, sumDistinct, + acos, asin, atan, cbrt, ceiling, cos, cosh, exp, + expm1, floor, log, log10, log1p, rint, sign, + sin, sinh, tan, tanh, toDegrees, toRadians) +binary_mathfunctions- c(atan2, hypot) createOperator - function(op) { setMethod(op, @@ -76,7 +81,11 @@ createOperator - function(op) { if (class(e2) == Column) { e2 - e2@jc } -callJMethod(e1@jc, operators[[op]], e2) +if (op == ^) { + jc - callJStatic(org.apache.spark.sql.functions, operators[[op]], e1@jc, e2) +} else { + callJMethod(e1@jc, operators[[op]], e2) +} } column(jc) }) @@ -106,11 +115,29 @@ createStaticFunction - function(name) { setMethod(name, signature(x = Column), function(x) { + if (name == ceiling) { + name - ceil + } + if (name ==