spark git commit: [SPARK-12000] Fix API doc generation issues

2015-11-30 Thread joshrosen
Repository: spark
Updated Branches:
  refs/heads/master edb26e7f4 -> d3ca8cfac


[SPARK-12000] Fix API doc generation issues

This pull request fixes multiple issues with API doc generation.

- Modify the Jekyll plugin so that the entire doc build fails if API docs 
cannot be generated. This will make it easy to detect when the doc build 
breaks, since this will now trigger Jenkins failures.
- Change how we handle the `-target` compiler option flag in order to fix 
`javadoc` generation.
- Incorporate doc changes from thunterdb (in #10048).

Closes #10048.

Author: Josh Rosen 
Author: Timothy Hunter 

Closes #10049 from JoshRosen/fix-doc-build.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/d3ca8cfa
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/d3ca8cfa
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/d3ca8cfa

Branch: refs/heads/master
Commit: d3ca8cfac286ae19f8bedc736877ea9d0a0a072c
Parents: edb26e7
Author: Josh Rosen 
Authored: Mon Nov 30 16:37:27 2015 -0800
Committer: Josh Rosen 
Committed: Mon Nov 30 16:37:27 2015 -0800

--
 docs/_plugins/copy_api_dirs.rb   |  6 +++---
 .../org/apache/spark/network/client/StreamCallback.java  |  4 ++--
 .../java/org/apache/spark/network/server/RpcHandler.java |  2 +-
 project/SparkBuild.scala | 11 ---
 4 files changed, 14 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/d3ca8cfa/docs/_plugins/copy_api_dirs.rb
--
diff --git a/docs/_plugins/copy_api_dirs.rb b/docs/_plugins/copy_api_dirs.rb
index 01718d9..f2f3e2e 100644
--- a/docs/_plugins/copy_api_dirs.rb
+++ b/docs/_plugins/copy_api_dirs.rb
@@ -27,7 +27,7 @@ if not (ENV['SKIP_API'] == '1')
 cd("..")
 
 puts "Running 'build/sbt -Pkinesis-asl clean compile unidoc' from " + pwd 
+ "; this may take a few minutes..."
-puts `build/sbt -Pkinesis-asl clean compile unidoc`
+system("build/sbt -Pkinesis-asl clean compile unidoc") || raise("Unidoc 
generation failed")
 
 puts "Moving back into docs dir."
 cd("docs")
@@ -117,7 +117,7 @@ if not (ENV['SKIP_API'] == '1')
 
   puts "Moving to python/docs directory and building sphinx."
   cd("../python/docs")
-  puts `make html`
+  system(make html) || raise("Python doc generation failed")
 
   puts "Moving back into home dir."
   cd("../../")
@@ -131,7 +131,7 @@ if not (ENV['SKIP_API'] == '1')
   # Build SparkR API docs
   puts "Moving to R directory and building roxygen docs."
   cd("R")
-  puts `./create-docs.sh`
+  system("./create-docs.sh") || raise("R doc generation failed")
 
   puts "Moving back into home dir."
   cd("../")

http://git-wip-us.apache.org/repos/asf/spark/blob/d3ca8cfa/network/common/src/main/java/org/apache/spark/network/client/StreamCallback.java
--
diff --git 
a/network/common/src/main/java/org/apache/spark/network/client/StreamCallback.java
 
b/network/common/src/main/java/org/apache/spark/network/client/StreamCallback.java
index 093fada..51d34ca 100644
--- 
a/network/common/src/main/java/org/apache/spark/network/client/StreamCallback.java
+++ 
b/network/common/src/main/java/org/apache/spark/network/client/StreamCallback.java
@@ -21,8 +21,8 @@ import java.io.IOException;
 import java.nio.ByteBuffer;
 
 /**
- * Callback for streaming data. Stream data will be offered to the {@link 
onData(ByteBuffer)}
- * method as it arrives. Once all the stream data is received, {@link 
onComplete()} will be
+ * Callback for streaming data. Stream data will be offered to the {@link 
onData(String, ByteBuffer)}
+ * method as it arrives. Once all the stream data is received, {@link 
onComplete(String)} will be
  * called.
  * 
  * The network library guarantees that a single thread will call these methods 
at a time, but

http://git-wip-us.apache.org/repos/asf/spark/blob/d3ca8cfa/network/common/src/main/java/org/apache/spark/network/server/RpcHandler.java
--
diff --git 
a/network/common/src/main/java/org/apache/spark/network/server/RpcHandler.java 
b/network/common/src/main/java/org/apache/spark/network/server/RpcHandler.java
index 65109dd..1a11f7b 100644
--- 
a/network/common/src/main/java/org/apache/spark/network/server/RpcHandler.java
+++ 
b/network/common/src/main/java/org/apache/spark/network/server/RpcHandler.java
@@ -55,7 +55,7 @@ public abstract class RpcHandler {
 
   /**
* Receives an RPC message that does not expect a reply. The default 
implementation will
-   * call "{@link receive(TransportClient, byte[], 

spark git commit: [SPARK-12000] Fix API doc generation issues

2015-11-30 Thread joshrosen
Repository: spark
Updated Branches:
  refs/heads/branch-1.6 436151780 -> 43ffa0373


[SPARK-12000] Fix API doc generation issues

This pull request fixes multiple issues with API doc generation.

- Modify the Jekyll plugin so that the entire doc build fails if API docs 
cannot be generated. This will make it easy to detect when the doc build 
breaks, since this will now trigger Jenkins failures.
- Change how we handle the `-target` compiler option flag in order to fix 
`javadoc` generation.
- Incorporate doc changes from thunterdb (in #10048).

Closes #10048.

Author: Josh Rosen 
Author: Timothy Hunter 

Closes #10049 from JoshRosen/fix-doc-build.

(cherry picked from commit d3ca8cfac286ae19f8bedc736877ea9d0a0a072c)
Signed-off-by: Josh Rosen 


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/43ffa037
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/43ffa037
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/43ffa037

Branch: refs/heads/branch-1.6
Commit: 43ffa03738d1ffdf99604ac18de137c60c930550
Parents: 4361517
Author: Josh Rosen 
Authored: Mon Nov 30 16:37:27 2015 -0800
Committer: Josh Rosen 
Committed: Mon Nov 30 16:37:53 2015 -0800

--
 docs/_plugins/copy_api_dirs.rb   |  6 +++---
 .../org/apache/spark/network/client/StreamCallback.java  |  4 ++--
 .../java/org/apache/spark/network/server/RpcHandler.java |  2 +-
 project/SparkBuild.scala | 11 ---
 4 files changed, 14 insertions(+), 9 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/43ffa037/docs/_plugins/copy_api_dirs.rb
--
diff --git a/docs/_plugins/copy_api_dirs.rb b/docs/_plugins/copy_api_dirs.rb
index 01718d9..f2f3e2e 100644
--- a/docs/_plugins/copy_api_dirs.rb
+++ b/docs/_plugins/copy_api_dirs.rb
@@ -27,7 +27,7 @@ if not (ENV['SKIP_API'] == '1')
 cd("..")
 
 puts "Running 'build/sbt -Pkinesis-asl clean compile unidoc' from " + pwd 
+ "; this may take a few minutes..."
-puts `build/sbt -Pkinesis-asl clean compile unidoc`
+system("build/sbt -Pkinesis-asl clean compile unidoc") || raise("Unidoc 
generation failed")
 
 puts "Moving back into docs dir."
 cd("docs")
@@ -117,7 +117,7 @@ if not (ENV['SKIP_API'] == '1')
 
   puts "Moving to python/docs directory and building sphinx."
   cd("../python/docs")
-  puts `make html`
+  system(make html) || raise("Python doc generation failed")
 
   puts "Moving back into home dir."
   cd("../../")
@@ -131,7 +131,7 @@ if not (ENV['SKIP_API'] == '1')
   # Build SparkR API docs
   puts "Moving to R directory and building roxygen docs."
   cd("R")
-  puts `./create-docs.sh`
+  system("./create-docs.sh") || raise("R doc generation failed")
 
   puts "Moving back into home dir."
   cd("../")

http://git-wip-us.apache.org/repos/asf/spark/blob/43ffa037/network/common/src/main/java/org/apache/spark/network/client/StreamCallback.java
--
diff --git 
a/network/common/src/main/java/org/apache/spark/network/client/StreamCallback.java
 
b/network/common/src/main/java/org/apache/spark/network/client/StreamCallback.java
index 093fada..51d34ca 100644
--- 
a/network/common/src/main/java/org/apache/spark/network/client/StreamCallback.java
+++ 
b/network/common/src/main/java/org/apache/spark/network/client/StreamCallback.java
@@ -21,8 +21,8 @@ import java.io.IOException;
 import java.nio.ByteBuffer;
 
 /**
- * Callback for streaming data. Stream data will be offered to the {@link 
onData(ByteBuffer)}
- * method as it arrives. Once all the stream data is received, {@link 
onComplete()} will be
+ * Callback for streaming data. Stream data will be offered to the {@link 
onData(String, ByteBuffer)}
+ * method as it arrives. Once all the stream data is received, {@link 
onComplete(String)} will be
  * called.
  * 
  * The network library guarantees that a single thread will call these methods 
at a time, but

http://git-wip-us.apache.org/repos/asf/spark/blob/43ffa037/network/common/src/main/java/org/apache/spark/network/server/RpcHandler.java
--
diff --git 
a/network/common/src/main/java/org/apache/spark/network/server/RpcHandler.java 
b/network/common/src/main/java/org/apache/spark/network/server/RpcHandler.java
index 65109dd..1a11f7b 100644
--- 
a/network/common/src/main/java/org/apache/spark/network/server/RpcHandler.java
+++ 
b/network/common/src/main/java/org/apache/spark/network/server/RpcHandler.java
@@ -55,7 +55,7 @@ public abstract class RpcHandler {
 
   /**
* Receives an RPC