Repository: spark
Updated Branches:
  refs/heads/master 48caec251 -> 736a7911c


[SPARK-16581][SPARKR] Make JVM backend calling functions public

## What changes were proposed in this pull request?

This change exposes a public API in SparkR to create objects, call methods on 
the Spark driver JVM

## How was this patch tested?

(Please explain how this patch was tested. E.g. unit tests, integration tests, 
manual tests)

Unit tests, CRAN checks

Author: Shivaram Venkataraman <shiva...@cs.berkeley.edu>

Closes #14775 from shivaram/sparkr-java-api.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/736a7911
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/736a7911
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/736a7911

Branch: refs/heads/master
Commit: 736a7911cb0335cdb2b2f6c87f9e3c32047b5bbb
Parents: 48caec2
Author: Shivaram Venkataraman <shiva...@cs.berkeley.edu>
Authored: Mon Aug 29 12:55:32 2016 -0700
Committer: Shivaram Venkataraman <shiva...@cs.berkeley.edu>
Committed: Mon Aug 29 12:55:32 2016 -0700

----------------------------------------------------------------------
 R/pkg/DESCRIPTION                        |   5 +-
 R/pkg/NAMESPACE                          |   4 +
 R/pkg/R/jvm.R                            | 117 ++++++++++++++++++++++++++
 R/pkg/inst/tests/testthat/test_jvm_api.R |  43 ++++++++++
 4 files changed, 167 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/736a7911/R/pkg/DESCRIPTION
----------------------------------------------------------------------
diff --git a/R/pkg/DESCRIPTION b/R/pkg/DESCRIPTION
index e5afed2..5a83883 100644
--- a/R/pkg/DESCRIPTION
+++ b/R/pkg/DESCRIPTION
@@ -2,7 +2,7 @@ Package: SparkR
 Type: Package
 Title: R Frontend for Apache Spark
 Version: 2.0.0
-Date: 2016-07-07
+Date: 2016-08-27
 Authors@R: c(person("Shivaram", "Venkataraman", role = c("aut", "cre"),
                     email = "shiva...@cs.berkeley.edu"),
              person("Xiangrui", "Meng", role = "aut",
@@ -11,7 +11,7 @@ Authors@R: c(person("Shivaram", "Venkataraman", role = 
c("aut", "cre"),
                     email = "felixche...@apache.org"),
              person(family = "The Apache Software Foundation", role = c("aut", 
"cph")))
 URL: http://www.apache.org/ http://spark.apache.org/
-BugReports: 
https://issues.apache.org/jira/secure/CreateIssueDetails!init.jspa?pid=12315420&components=12325400&issuetype=4
+BugReports: 
https://cwiki.apache.org/confluence/display/SPARK/Contributing+to+Spark#ContributingtoSpark-ContributingBugReports
 Depends:
     R (>= 3.0),
     methods
@@ -39,6 +39,7 @@ Collate:
     'deserialize.R'
     'functions.R'
     'install.R'
+    'jvm.R'
     'mllib.R'
     'serialize.R'
     'sparkR.R'

http://git-wip-us.apache.org/repos/asf/spark/blob/736a7911/R/pkg/NAMESPACE
----------------------------------------------------------------------
diff --git a/R/pkg/NAMESPACE b/R/pkg/NAMESPACE
index ad587a6..5e625b2 100644
--- a/R/pkg/NAMESPACE
+++ b/R/pkg/NAMESPACE
@@ -364,4 +364,8 @@ S3method(structField, jobj)
 S3method(structType, jobj)
 S3method(structType, structField)
 
+export("sparkR.newJObject")
+export("sparkR.callJMethod")
+export("sparkR.callJStatic")
+
 export("install.spark")

http://git-wip-us.apache.org/repos/asf/spark/blob/736a7911/R/pkg/R/jvm.R
----------------------------------------------------------------------
diff --git a/R/pkg/R/jvm.R b/R/pkg/R/jvm.R
new file mode 100644
index 0000000..bb5c775
--- /dev/null
+++ b/R/pkg/R/jvm.R
@@ -0,0 +1,117 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Methods to directly access the JVM running the SparkR backend.
+
+#' Call Java Methods
+#'
+#' Call a Java method in the JVM running the Spark driver. The return
+#' values are automatically converted to R objects for simple objects. Other
+#' values are returned as "jobj" which are references to objects on JVM.
+#'
+#' @details
+#' This is a low level function to access the JVM directly and should only be 
used
+#' for advanced use cases. The arguments and return values that are primitive R
+#' types (like integer, numeric, character, lists) are automatically 
translated to/from
+#' Java types (like Integer, Double, String, Array). A full list can be found 
in
+#' serialize.R and deserialize.R in the Apache Spark code base.
+#'
+#' @param x object to invoke the method on. Should be a "jobj" created by 
newJObject.
+#' @param methodName method name to call.
+#' @param ... parameters to pass to the Java method.
+#' @return the return value of the Java method. Either returned as a R object
+#'  if it can be deserialized or returned as a "jobj". See details section for 
more.
+#' @export
+#' @seealso \link{sparkR.callJStatic}, \link{sparkR.newJObject}
+#' @rdname sparkR.callJMethod
+#' @examples
+#' \dontrun{
+#' sparkR.session() # Need to have a Spark JVM running before calling 
newJObject
+#' # Create a Java ArrayList and populate it
+#' jarray <- sparkR.newJObject("java.util.ArrayList")
+#' sparkR.callJMethod(jarray, "add", 42L)
+#' sparkR.callJMethod(jarray, "get", 0L) # Will print 42
+#' }
+#' @note sparkR.callJMethod since 2.0.1
+sparkR.callJMethod <- function(x, methodName, ...) {
+  callJMethod(x, methodName, ...)
+}
+
+#' Call Static Java Methods
+#'
+#' Call a static method in the JVM running the Spark driver. The return
+#' value is automatically converted to R objects for simple objects. Other
+#' values are returned as "jobj" which are references to objects on JVM.
+#'
+#' @details
+#' This is a low level function to access the JVM directly and should only be 
used
+#' for advanced use cases. The arguments and return values that are primitive R
+#' types (like integer, numeric, character, lists) are automatically 
translated to/from
+#' Java types (like Integer, Double, String, Array). A full list can be found 
in
+#' serialize.R and deserialize.R in the Apache Spark code base.
+#'
+#' @param x fully qualified Java class name that contains the static method to 
invoke.
+#' @param methodName name of static method to invoke.
+#' @param ... parameters to pass to the Java method.
+#' @return the return value of the Java method. Either returned as a R object
+#'  if it can be deserialized or returned as a "jobj". See details section for 
more.
+#' @export
+#' @seealso \link{sparkR.callJMethod}, \link{sparkR.newJObject}
+#' @rdname sparkR.callJStatic
+#' @examples
+#' \dontrun{
+#' sparkR.session() # Need to have a Spark JVM running before calling 
callJStatic
+#' sparkR.callJStatic("java.lang.System", "currentTimeMillis")
+#' sparkR.callJStatic("java.lang.System", "getProperty", "java.home")
+#' }
+#' @note sparkR.callJStatic since 2.0.1
+sparkR.callJStatic <- function(x, methodName, ...) {
+  callJStatic(x, methodName, ...)
+}
+
+#' Create Java Objects
+#'
+#' Create a new Java object in the JVM running the Spark driver. The return
+#' value is automatically converted to an R object for simple objects. Other
+#' values are returned as a "jobj" which is a reference to an object on JVM.
+#'
+#' @details
+#' This is a low level function to access the JVM directly and should only be 
used
+#' for advanced use cases. The arguments and return values that are primitive R
+#' types (like integer, numeric, character, lists) are automatically 
translated to/from
+#' Java types (like Integer, Double, String, Array). A full list can be found 
in
+#' serialize.R and deserialize.R in the Apache Spark code base.
+#'
+#' @param x fully qualified Java class name.
+#' @param ... arguments to be passed to the constructor.
+#' @return the object created. Either returned as a R object
+#'   if it can be deserialized or returned as a "jobj". See details section 
for more.
+#' @export
+#' @seealso \link{sparkR.callJMethod}, \link{sparkR.callJStatic}
+#' @rdname sparkR.newJObject
+#' @examples
+#' \dontrun{
+#' sparkR.session() # Need to have a Spark JVM running before calling 
newJObject
+#' # Create a Java ArrayList and populate it
+#' jarray <- sparkR.newJObject("java.util.ArrayList")
+#' sparkR.callJMethod(jarray, "add", 42L)
+#' sparkR.callJMethod(jarray, "get", 0L) # Will print 42
+#' }
+#' @note sparkR.newJObject since 2.0.1
+sparkR.newJObject <- function(x, ...) {
+  newJObject(x, ...)
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/736a7911/R/pkg/inst/tests/testthat/test_jvm_api.R
----------------------------------------------------------------------
diff --git a/R/pkg/inst/tests/testthat/test_jvm_api.R 
b/R/pkg/inst/tests/testthat/test_jvm_api.R
new file mode 100644
index 0000000..151c529
--- /dev/null
+++ b/R/pkg/inst/tests/testthat/test_jvm_api.R
@@ -0,0 +1,43 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+context("JVM API")
+
+sparkSession <- sparkR.session(enableHiveSupport = FALSE)
+
+test_that("Create and call methods on object", {
+  jarr <- newJObject("java.util.ArrayList")
+  # Add an element to the array
+  callJMethod(jarr, "add", 1L)
+  # Check if get returns the same element
+  expect_equal(callJMethod(jarr, "get", 0L), 1L)
+})
+
+test_that("Call static methods", {
+  # Convert a boolean to a string
+  strTrue <- callJStatic("java.lang.String", "valueOf", TRUE)
+  expect_equal(strTrue, "true")
+})
+
+test_that("Manually garbage collect objects", {
+  jarr <- newJObject("java.util.ArrayList")
+  cleanup.jobj(jarr)
+  # Using a jobj after GC should throw an error
+  expect_error(print(jarr), "Error in invokeJava.*")
+})
+
+sparkR.session.stop()


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to