This is an automated email from the ASF dual-hosted git repository.
jiayu pushed a commit to branch viz-to-spark
in repository https://gitbox.apache.org/repos/asf/sedona.git
The following commit(s) were added to refs/heads/viz-to-spark by this push:
new f5b78bed4 Update CI and R dependencies
f5b78bed4 is described below
commit f5b78bed469135f4874c4929764bb3550ee7a7d8
Author: Jia Yu <[email protected]>
AuthorDate: Wed Sep 6 17:49:26 2023 -0700
Update CI and R dependencies
---
.github/workflows/java.yml | 1 -
.github/workflows/scripts/prepare_sparklyr_sedona_test_env.sh | 2 +-
R/R/dependencies.R | 2 +-
R/README.md | 4 ++--
R/vignettes/articles/apache-sedona.Rmd | 2 +-
R/vignettes/articles/raster.Rmd | 2 +-
6 files changed, 6 insertions(+), 7 deletions(-)
diff --git a/.github/workflows/java.yml b/.github/workflows/java.yml
index 55e168def..18277f581 100644
--- a/.github/workflows/java.yml
+++ b/.github/workflows/java.yml
@@ -73,7 +73,6 @@ jobs:
fi
mvn -q clean install -Dspark=${SPARK_COMPAT_VERSION}
-Dscala=${SCALA_VERSION:0:4} -Dspark.version=${SPARK_VERSION} ${SKIP_TESTS}
- run: mkdir staging
- - run: cp viz/target/sedona-*.jar staging
- run: cp spark-shaded/target/sedona-*.jar staging
- run: |
[ -d "flink-shaded/target/" ] && cp flink-shaded/target/sedona-*.jar
staging 2>/dev/null || true
diff --git a/.github/workflows/scripts/prepare_sparklyr_sedona_test_env.sh
b/.github/workflows/scripts/prepare_sparklyr_sedona_test_env.sh
index 9907c0c85..0f98ef992 100644
--- a/.github/workflows/scripts/prepare_sparklyr_sedona_test_env.sh
+++ b/.github/workflows/scripts/prepare_sparklyr_sedona_test_env.sh
@@ -19,7 +19,7 @@
sedona_jar_files () {
local subdir
- for subdir in 'spark-shaded' 'viz'; do
+ for subdir in 'spark-shaded'; do
echo "$(pwd)/$(find ${subdir}/target -name 'sedona-*.jar' \! -name
'*javadoc*')"
done
}
diff --git a/R/R/dependencies.R b/R/R/dependencies.R
index ebf7a7369..83bff75b0 100644
--- a/R/R/dependencies.R
+++ b/R/R/dependencies.R
@@ -38,7 +38,7 @@ spark_dependencies <- function(spark_version, scala_version,
...) {
packages <- c(
paste0(
"org.apache.sedona:sedona-",
- c("spark-shaded", "viz"),
+ c("spark-shaded"),
sprintf("-%s_%s:1.4.1", spark_version, scala_version)
),
packages
diff --git a/R/README.md b/R/README.md
index 1deccb89b..c37b33985 100644
--- a/R/README.md
+++ b/R/README.md
@@ -31,7 +31,7 @@ To get the latest Sedona jars you can:
* **Compile the Sedona code yourself**, see [Compile the
code](https://sedona.apache.org/latest-snapshot/setup/compile/)
* **Get the latest generated jars** from the [GitHub 'Java build'
action](https://github.com/apache/sedona/actions/workflows/java.yml); click on
the latest run, the generated jars are at the bottom of the page
-The path to the sedona-spark-shaded and sedona-viz jars needs to be put in the
`SEDONA_JAR_FILES` environment variables (see below).
+The path to the sedona-spark-shaded jars needs to be put in the
`SEDONA_JAR_FILES` environment variables (see below).
## Usage
@@ -45,7 +45,7 @@ library(sparklyr)
library(apache.sedona)
## Only if using development version:
-Sys.setenv("SEDONA_JAR_FILES" = "<path to sedona-spark-shaded jar>:<path to
sedona-viz jar>")
+Sys.setenv("SEDONA_JAR_FILES" = "<path to sedona-spark-shaded jar>")
sc <- spark_connect(master = "local")
polygon_sdf <- spark_read_geojson(sc, location = "/tmp/polygon.json")
diff --git a/R/vignettes/articles/apache-sedona.Rmd
b/R/vignettes/articles/apache-sedona.Rmd
index 1755c9d19..0d28210b5 100644
--- a/R/vignettes/articles/apache-sedona.Rmd
+++ b/R/vignettes/articles/apache-sedona.Rmd
@@ -77,7 +77,7 @@ sc <- spark_connect(master = "yarn", spark_home = spark_home)
will create a Sedona-capable Spark connection in YARN client mode, and
```{r include=FALSE}
-Sys.setenv("SEDONA_JAR_FILES" =
"~/WORK/MISC_CODE/sedona/spark-shaded/target/sedona-spark-shaded-3.0_2.12-1.4.0-SNAPSHOT.jar:~/WORK/MISC_CODE/sedona/viz/target/sedona-viz-3.0_2.12-1.4.0-SNAPSHOT.jar")
+Sys.setenv("SEDONA_JAR_FILES" =
"~/WORK/MISC_CODE/sedona/spark-shaded/target/sedona-spark-shaded-3.0_2.12-1.4.0-SNAPSHOT.jar")
```
```{r message=FALSE, warning=FALSE}
diff --git a/R/vignettes/articles/raster.Rmd b/R/vignettes/articles/raster.Rmd
index e3cea040c..1c342012d 100644
--- a/R/vignettes/articles/raster.Rmd
+++ b/R/vignettes/articles/raster.Rmd
@@ -24,7 +24,7 @@ Raster data in GeoTiff and ArcInfoAsciiGrid formats can be
read into and written
Raster data in GeoTiff and ArcInfo Grid format can be loaded directly into
Spark using the `sparklyr::spark_read_binary` and Sedona constructors
`RS_FromGeoTiff` and `RS_FromArcInfoAsciiGrid`.
```{r include=FALSE}
-Sys.setenv("SEDONA_JAR_FILES" =
"~/WORK/MISC_CODE/sedona/spark-shaded/target/sedona-spark-shaded-3.0_2.12-1.4.0-SNAPSHOT.jar:~/WORK/MISC_CODE/sedona/viz/target/sedona-viz-3.0_2.12-1.4.0-SNAPSHOT.jar")
+Sys.setenv("SEDONA_JAR_FILES" =
"~/WORK/MISC_CODE/sedona/spark-shaded/target/sedona-spark-shaded-3.0_2.12-1.4.0-SNAPSHOT.jar")
```
```{r message=FALSE, warning=FALSE}