This is an automated email from the ASF dual-hosted git repository.

zabetak pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new d89f7c553d0 HIVE-29102: Replace deprecated cwiki links and point to 
the Website (#6031)
d89f7c553d0 is described below

commit d89f7c553d0e2324a5bd8ad4b728ed715d260538
Author: Raghav Aggarwal <[email protected]>
AuthorDate: Fri Sep 12 18:59:26 2025 +0530

    HIVE-29102: Replace deprecated cwiki links and point to the Website (#6031)
---
 README.md                                                    | 10 +++++-----
 dev-support/hive-personality.sh                              |  2 +-
 druid-handler/README.md                                      |  2 +-
 hbase-handler/README.md                                      |  2 +-
 hcatalog/README.txt                                          |  2 +-
 .../apache/hive/hcatalog/data/schema/HCatFieldSchema.java    |  2 +-
 hcatalog/src/test/e2e/hcatalog/tests/hive_nightly.conf       |  2 +-
 hcatalog/src/test/e2e/templeton/README.txt                   | 12 ++++++------
 .../test/e2e/templeton/deployers/config/hive/hive-site.xml   |  2 +-
 .../main/java/org/apache/hive/hcatalog/templeton/Server.java |  2 +-
 .../hadoop/hive/ql/io/TeradataBinaryFileInputFormat.java     |  2 +-
 .../hadoop/hive/ql/io/TeradataBinaryFileOutputFormat.java    |  2 +-
 ql/src/test/queries/clientpositive/char_udf1.q               |  2 +-
 .../hive/serde2/objectinspector/ObjectInspectorUtils.java    |  2 +-
 .../hadoop/hive/serde2/teradata/TeradataBinarySerde.java     |  2 +-
 15 files changed, 24 insertions(+), 24 deletions(-)

diff --git a/README.md b/README.md
index 7a2f82c9fd8..b842aa6293c 100644
--- a/README.md
+++ b/README.md
@@ -63,17 +63,17 @@ Getting Started
 ===============
 
 - Installation Instructions and a quick tutorial:
-  https://cwiki.apache.org/confluence/display/Hive/GettingStarted
-  https://hive.apache.org/development/quickstart/
+  https://hive.apache.org/development/gettingstarted-latest
+  https://hive.apache.org/development/quickstart
 
 - Instructions to build Hive from source:
-  
https://cwiki.apache.org/confluence/display/Hive/GettingStarted#GettingStarted-BuildingHivefromSource
+  
https://hive.apache.org/development/gettingstarted-latest/#building-hive-from-source
 
 - A longer tutorial that covers more features of HiveQL:
-  https://cwiki.apache.org/confluence/display/Hive/Tutorial
+  https://hive.apache.org/docs/latest/user/tutorial
 
 - The HiveQL Language Manual:
-  https://cwiki.apache.org/confluence/display/Hive/LanguageManual
+  https://hive.apache.org/docs/latest/language/languagemanual
 
 
 Requirements
diff --git a/dev-support/hive-personality.sh b/dev-support/hive-personality.sh
index 574c0b01f35..8bce231e74e 100644
--- a/dev-support/hive-personality.sh
+++ b/dev-support/hive-personality.sh
@@ -28,7 +28,7 @@ function personality_globals
   #shellcheck disable=SC2034
   PATCH_BRANCH_DEFAULT=master
   #shellcheck disable=SC2034
-  
PATCH_NAMING_RULE="http://cwiki.apache.org/confluence/display/Hive/HowToContribute";
+  
PATCH_NAMING_RULE="https://hive.apache.org/community/resources/howtocontribute";
   #shellcheck disable=SC2034
   JIRA_ISSUE_RE='^HIVE-[0-9]+$'
   #shellcheck disable=SC2034
diff --git a/druid-handler/README.md b/druid-handler/README.md
index 4db8002102a..285f7ec162a 100644
--- a/druid-handler/README.md
+++ b/druid-handler/README.md
@@ -18,4 +18,4 @@ limitations under the License.
 -->
 # Druid Storage Handler
 
-[Link for documentation]( 
https://cwiki.apache.org/confluence/display/Hive/Druid+Integration) 
+[Link for 
documentation](https://hive.apache.org/docs/latest/user/druid-integration)
diff --git a/hbase-handler/README.md b/hbase-handler/README.md
index 83a8896f5b5..2c20fea5afa 100644
--- a/hbase-handler/README.md
+++ b/hbase-handler/README.md
@@ -18,4 +18,4 @@ limitations under the License.
 -->
 # Hbase Storage Handler
 
-[Link for documentation]( 
https://cwiki.apache.org/confluence/display/Hive/HBaseIntegration)
+[Link for 
documentation](https://hive.apache.org/docs/latest/user/hbaseintegration)
diff --git a/hcatalog/README.txt b/hcatalog/README.txt
index 2146b0dac52..8599970547c 100644
--- a/hcatalog/README.txt
+++ b/hcatalog/README.txt
@@ -33,4 +33,4 @@ For the latest information about HCatalog, please visit our 
website at:
 
 and our wiki, at:
 
-   https://cwiki.apache.org/confluence/display/HCATALOG
+   https://hive.apache.org/docs/latest/hcatalog/hcatalog-base
diff --git 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatFieldSchema.java
 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatFieldSchema.java
index 30af54f1156..2a00c179fb2 100644
--- 
a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatFieldSchema.java
+++ 
b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/schema/HCatFieldSchema.java
@@ -264,7 +264,7 @@ private void setName(String name) {
   public HCatFieldSchema(String fieldName, Type type, Type mapKeyType, 
HCatSchema mapValueSchema, String comment) throws HCatException {
     assertTypeInCategory(type, Category.MAP, fieldName);
     //Hive only supports primitive map keys: 
-    
//https://cwiki.apache.org/confluence/display/Hive/LanguageManual+Types#LanguageManualTypes-ComplexTypes
+    // 
https://hive.apache.org/docs/latest/language/languagemanual-types/#complex-types
     assertTypeInCategory(mapKeyType, Category.PRIMITIVE, fieldName);
     this.fieldName = fieldName;
     this.type = Type.MAP;
diff --git a/hcatalog/src/test/e2e/hcatalog/tests/hive_nightly.conf 
b/hcatalog/src/test/e2e/hcatalog/tests/hive_nightly.conf
index d026872f883..34da23ddc8f 100644
--- a/hcatalog/src/test/e2e/hcatalog/tests/hive_nightly.conf
+++ b/hcatalog/src/test/e2e/hcatalog/tests/hive_nightly.conf
@@ -1000,7 +1000,7 @@ $cfg = {
       # Need to test multiple insert  - Need harness enhancements
       # Need to test insert into directory - Need harness enhancements
       # Need to test casts
-      # Need to test all built in expressions and UDF (see 
https://cwiki.apache.org/confluence/display/Hive/LanguageManual+UDF)
+      # Need to test all built in expressions and UDF (see 
https://hive.apache.org/docs/latest/language/languagemanual-udf)
       # Need to test xpath functionality
       # Need to test regular expression based projection
       # Need to test semi joins - Mysql doesn't support, how do I express 
semi-join?
diff --git a/hcatalog/src/test/e2e/templeton/README.txt 
b/hcatalog/src/test/e2e/templeton/README.txt
index c461a478187..456518ca793 100644
--- a/hcatalog/src/test/e2e/templeton/README.txt
+++ b/hcatalog/src/test/e2e/templeton/README.txt
@@ -21,9 +21,9 @@ End to end tests in templeton runs tests against an existing 
templeton server.
 It runs hcat, mapreduce, streaming, hive and pig tests.
 This requires Hadoop cluster and Hive metastore running.
 
-It's a good idea to look at current versions of
-https://cwiki.apache.org/confluence/display/Hive/WebHCat+InstallWebHCat and 
-https://cwiki.apache.org/confluence/display/Hive/WebHCat+Configure
+It's a good idea to look at current versions of:
+https://hive.apache.org/docs/latest/webhcat/webhcat-installwebhcat
+https://hive.apache.org/docs/latest/webhcat/webhcat-configure
 
 See deployers/README.txt for help automating some of the steps in this 
document.
 
@@ -98,13 +98,13 @@ Tips:
 be obtained from Pig and the other two are obtained from your Hadoop 
distribution.
 For Hadoop 1.x you would need to upload hadoop-examples.jar twice to HDFS one 
as hclient.jar and other as hexamples.jar.
 For Hadoop 2.x you would need to upload hadoop-mapreduce-client-jobclient.jar 
to HDFS as hclient.jar and hadoop-mapreduce-examples.jar to HDFS as 
hexamples.jar. 
-Also see 
https://cwiki.apache.org/confluence/display/Hive/WebHCat+InstallWebHCat#WebHCatInstallWebHCat-HadoopDistributedCache
+Also see 
https://hive.apache.org/docs/latest/webhcat/webhcat-installwebhcat/#hadoop-distributed-cache
  for notes on additional JAR files to copy to HDFS.
 
 5. Make sure TEMPLETON_HOME environment variable is set
 
 6. hadoop/conf/core-site.xml should have items described in
-https://cwiki.apache.org/confluence/display/Hive/WebHCat+InstallWebHCat#WebHCatInstallWebHCat-Permissions
+https://hive.apache.org/docs/latest/webhcat/webhcat-installwebhcat/#permissions
 
 7. Currently Pig tar file available on http://pig.apache.org/ contains jar 
files compiled to work with Hadoop 1.x.
 To run WebHCat tests on Hadoop 2.x you need to build your own Pig tar for 
Hadoop 2. To do that download the 
@@ -173,7 +173,7 @@ and webhcat.proxyuser.hue.hosts defined, i.e. 'hue' should 
be allowed to imperso
 [Of course, 'hcat' proxyuser should be configured in core-site.xml for the 
command to succeed.]
 
 Furthermore, metastore side file based security should be enabled. 
-(See 
https://cwiki.apache.org/confluence/display/Hive/LanguageManual+Authorization#LanguageManualAuthorization-MetastoreServerSecurity
 for more info) 
+(See 
https://hive.apache.org/docs/latest/language/languagemanual-authorization/#hive-authorization-options
 for more info) 
 
 To do this 3 properties in hive-site.xml should be configured:
 1) hive.security.metastore.authorization.manager set to 
diff --git 
a/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.xml 
b/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.xml
index cd5ac68fa33..924b44bb865 100644
--- a/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.xml
+++ b/hcatalog/src/test/e2e/templeton/deployers/config/hive/hive-site.xml
@@ -54,7 +54,7 @@
     <!--
     enable file based auth for Hive on metastore side, i.e. enforce metadata 
     security as if it were stored together with data
-    
https://cwiki.apache.org/confluence/display/Hive/LanguageManual+Authorization
+    https://hive.apache.org/docs/latest/language/languagemanual-authorization
     <property>
         <name>hive.metastore.execute.setugi</name>
         <value>true</value>
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
index 8bf6f155daf..8e99901beb0 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Server.java
@@ -837,7 +837,7 @@ public EnqueueBean sqoop(@FormParam("command") String 
command,
    * @param srcFile    name of hive script file to run, equivalent to "-f" 
from hive
    *                   command line
    * @param hiveArgs   additional command line argument passed to the hive 
command line.
-   *                   Please check 
https://cwiki.apache.org/Hive/languagemanual-cli.html
+   *                   Please check 
https://hive.apache.org/docs/latest/language/languagemanual-cli
    *                   for detailed explanation of command line arguments
    * @param otherFiles additional files to be shipped to the launcher, such as 
the jars
    *                   used in "add jar" statement in hive script
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/TeradataBinaryFileInputFormat.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/TeradataBinaryFileInputFormat.java
index bed87c582ea..a9c7573f5aa 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/io/TeradataBinaryFileInputFormat.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/io/TeradataBinaryFileInputFormat.java
@@ -32,7 +32,7 @@
 import org.apache.hadoop.mapred.Reporter;
 
 /**
- * https://cwiki.apache.org/confluence/display/Hive/TeradataBinarySerde.
+ * https://hive.apache.org/docs/latest/user/teradatabinaryserde
  * FileInputFormat for Teradata binary files.
  *
  * In the Teradata Binary File, each record constructs as below:
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/TeradataBinaryFileOutputFormat.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/TeradataBinaryFileOutputFormat.java
index a2b9f275a9c..f5cf4b9f9d9 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/io/TeradataBinaryFileOutputFormat.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/io/TeradataBinaryFileOutputFormat.java
@@ -38,7 +38,7 @@
 import static java.lang.String.format;
 
 /**
- * https://cwiki.apache.org/confluence/display/Hive/TeradataBinarySerde.
+ * https://hive.apache.org/docs/latest/user/teradatabinaryserde
  * FileOutputFormat for Teradata binary files.
  *
  * In the Teradata Binary File, each record constructs as below:
diff --git a/ql/src/test/queries/clientpositive/char_udf1.q 
b/ql/src/test/queries/clientpositive/char_udf1.q
index 4a6ac4020c3..7e96e0527c0 100644
--- a/ql/src/test/queries/clientpositive/char_udf1.q
+++ b/ql/src/test/queries/clientpositive/char_udf1.q
@@ -75,7 +75,7 @@ select
   ltrim(c2) = ltrim(c4)
 from char_udf_1 limit 1;
 
--- In hive wiki page 
https://cwiki.apache.org/confluence/display/Hive/LanguageManual+UDF
+-- In hive wiki page 
https://hive.apache.org/docs/latest/language/languagemanual-udf
 -- we only allow A regexp B, not regexp (A,B).
 
 select
diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
 
b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
index c373f5e1e03..5169871259f 100644
--- 
a/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
+++ 
b/serde/src/java/org/apache/hadoop/hive/serde2/objectinspector/ObjectInspectorUtils.java
@@ -656,7 +656,7 @@ public static int getBucketNumberOld(Object[] bucketFields, 
ObjectInspector[] bu
   }
 
   /**
-   * 
https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL+BucketedTables
+   * 
https://hive.apache.org/docs/latest/language/languagemanual-ddl-bucketedtables
    * @param hashCode as produced by {@link #getBucketHashCode(Object[], 
ObjectInspector[])}
    */
   public static int getBucketNumber(int hashCode, int numberOfBuckets) {
diff --git 
a/serde/src/java/org/apache/hadoop/hive/serde2/teradata/TeradataBinarySerde.java
 
b/serde/src/java/org/apache/hadoop/hive/serde2/teradata/TeradataBinarySerde.java
index 543ae14f9ea..22832e6fa72 100644
--- 
a/serde/src/java/org/apache/hadoop/hive/serde2/teradata/TeradataBinarySerde.java
+++ 
b/serde/src/java/org/apache/hadoop/hive/serde2/teradata/TeradataBinarySerde.java
@@ -77,7 +77,7 @@
 import static java.lang.String.format;
 
 /**
- * https://cwiki.apache.org/confluence/display/Hive/TeradataBinarySerde.
+ * https://hive.apache.org/docs/latest/user/teradatabinaryserde
  * TeradataBinarySerde handles the serialization and deserialization of 
Teradata Binary Record
  * passed from TeradataBinaryRecordReader.
  *

Reply via email to