Github user felixcheung commented on a diff in the pull request:

    https://github.com/apache/spark/pull/19243#discussion_r139602239
  
    --- Diff: R/pkg/R/DataFrame.R ---
    @@ -998,33 +998,39 @@ setMethod("unique",
     #' sparkR.session()
     #' path <- "path/to/file.json"
     #' df <- read.json(path)
    +#' collect(sample(df, fraction=0.5))
     #' collect(sample(df, FALSE, 0.5))
    -#' collect(sample(df, TRUE, 0.5))
    +#' collect(sample(df, TRUE, 0.5, seed=3))
     #'}
     #' @note sample since 1.4.0
     setMethod("sample",
    -          signature(x = "SparkDataFrame", withReplacement = "logical",
    -                    fraction = "numeric"),
    -          function(x, withReplacement, fraction, seed) {
    -            if (fraction < 0.0) stop(cat("Negative fraction value:", 
fraction))
    +          signature(x = "SparkDataFrame"),
    +          function(x, withReplacement = FALSE, fraction, seed) {
    +            if (!is.numeric(fraction)) {
    +              stop(paste("fraction must be numeric; however, got", 
class(fraction)))
    +            }
    +            if (!is.logical(withReplacement)) {
    +              stop(paste("withReplacement must be logical; however, got", 
class(withReplacement)))
    +            }
                 if (!missing(seed)) {
                   # TODO : Figure out how to send integer as java.lang.Long to 
JVM so
                   # we can send seed as an argument through callJMethod
    -              sdf <- callJMethod(x@sdf, "sample", withReplacement, 
fraction, as.integer(seed))
    +              sdf <- handledCallJMethod(x@sdf, "sample", withReplacement,
    --- End diff --
    
    I'd then wrap `withReplacement` as `as.logical(withReplacement)` and 
`fraction` as `as.numeric(fraction)`
    because it might be coercible (note the L)
    ```
    > is.numeric(1L)
    [1] TRUE
    ```
    but passing as integer could cause callJMethod to match to a different 
signature on the JVM.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to