This is an automated email from the ASF dual-hosted git repository.
paksyd pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hbase.git
The following commit(s) were added to refs/heads/master by this push:
new 8af9d1c2473 HBASE-29878 Add missing footnotes (addendum) (#7878)
8af9d1c2473 is described below
commit 8af9d1c2473c124d9bf3d33722f6254b02c64541
Author: Yurii Palamarchuk <[email protected]>
AuthorDate: Mon Mar 9 15:15:17 2026 +0100
HBASE-29878 Add missing footnotes (addendum) (#7878)
Signed-off-by: Dávid Paksy <[email protected]>
Signed-off-by: Peng Lu <[email protected]>
Reviewed-by: Liu Xiao <[email protected]>
---
hbase-website/README.md | 5 ++
.../metrics-and-monitoring.mdx | 4 +-
.../_mdx/(multi-page)/upgrading/version-number.mdx | 14 ++--
hbase-website/unit-tests/validate-headings.test.ts | 10 ++-
.../unit-tests/validate-references.test.ts | 92 ++++++++++++++++++++++
5 files changed, 118 insertions(+), 7 deletions(-)
diff --git a/hbase-website/README.md b/hbase-website/README.md
index 400d4d8146f..a67a8569975 100644
--- a/hbase-website/README.md
+++ b/hbase-website/README.md
@@ -322,6 +322,11 @@ This starts a local development server with:
2. Add the new file to the relevant `meta.json` in the same section folder so
it appears in navigation.
3. Import the page into `app/pages/_docs/docs/_mdx/single-page/index.mdx` and
add an `#` header so it renders in the single-page docs.
+> **Important:** All heading IDs and all footnote reference IDs must be
**unique across every multi-page MDX file**. This is because all files are
combined into a single page (see `single-page/index.mdx`), so any duplicate ID
would collide in the combined document. There are automated unit tests that
enforce both constraints — `unit-tests/validate-headings.test.ts` for heading
IDs and `unit-tests/validate-references.test.ts` for footnote reference IDs.
+>
+> - **Headings**: Use an explicit `[#page-specific-id]` anchor suffix on any
heading whose auto-generated slug would clash with a heading in another file.
For example: `## Overview [#my-topic-overview]`.
+> - **Footnotes**: Use page-specific numeric or named identifiers so they
remain unique globally. For example, instead of `[^1]` in every file, simply
continue the global numbering (e.g. if existing files already use
`[^1]`–`[^5]`, start at `[^6]` in a new file).
+
**Update content:**
- Edit the appropriate `.md` or `.json` file
diff --git
a/hbase-website/app/pages/_docs/docs/_mdx/(multi-page)/operational-management/metrics-and-monitoring.mdx
b/hbase-website/app/pages/_docs/docs/_mdx/(multi-page)/operational-management/metrics-and-monitoring.mdx
index 4393795d82a..88f8957c91f 100644
---
a/hbase-website/app/pages/_docs/docs/_mdx/(multi-page)/operational-management/metrics-and-monitoring.mdx
+++
b/hbase-website/app/pages/_docs/docs/_mdx/(multi-page)/operational-management/metrics-and-monitoring.mdx
@@ -5,7 +5,7 @@ description: "HBase metrics system configuration, JMX
monitoring, master and Reg
## HBase Metrics
-HBase emits metrics which adhere to the [Hadoop
Metrics](https://hadoop.apache.org/docs/stable/hadoop-project-dist/hadoop-common/Metrics.html)
API. Starting with HBase 0.95[^1], HBase is configured to emit a default set
of metrics with a default sampling period of every 10 seconds. You can use
HBase metrics in conjunction with Ganglia. You can also filter which metrics
are emitted and extend the metrics framework to capture custom metrics
appropriate for your environment.
+HBase emits metrics which adhere to the [Hadoop
Metrics](https://hadoop.apache.org/docs/stable/hadoop-project-dist/hadoop-common/Metrics.html)
API. Starting with HBase 0.95 [^5], HBase is configured to emit a default set
of metrics with a default sampling period of every 10 seconds. You can use
HBase metrics in conjunction with Ganglia. You can also filter which metrics
are emitted and extend the metrics framework to capture custom metrics
appropriate for your environment.
### Metric Setup
@@ -601,3 +601,5 @@ The format of Archived Storefile Size is NNN(MMM). NNN is
the total Storefile si
**_Empty Snapshot Storfile Stats Example_**

+
+[^5]: The Metrics system was redone in HBase 0.96. See Migration to the New
Metrics Hotness – Metrics2 by Elliot Clark for detail.
diff --git
a/hbase-website/app/pages/_docs/docs/_mdx/(multi-page)/upgrading/version-number.mdx
b/hbase-website/app/pages/_docs/docs/_mdx/(multi-page)/upgrading/version-number.mdx
index 21293d61f04..000143c2004 100644
---
a/hbase-website/app/pages/_docs/docs/_mdx/(multi-page)/upgrading/version-number.mdx
+++
b/hbase-website/app/pages/_docs/docs/_mdx/(multi-page)/upgrading/version-number.mdx
@@ -84,17 +84,17 @@ In addition to the usual API versioning considerations
HBase has other compatibi
#### Summary [!toc]
-- A patch upgrade is a drop-in replacement. Any change that is not Java binary
and source compatible would not be allowed. Downgrading versions within patch
releases may not be compatible. [^2]
+- A patch upgrade is a drop-in replacement. Any change that is not Java binary
and source compatible would not be allowed.[^2] Downgrading versions within
patch releases may not be compatible.
- A minor upgrade requires no application/client code modification. Ideally it
would be a drop-in replacement but client code, coprocessors, filters, etc
might have to be recompiled if new jars are used.
- A major upgrade allows the HBase community to make breaking changes.
-#### Compatibility Matrix: [!toc]
+#### Compatibility Matrix: [^3] [!toc]
| | Major | Minor | Patch |
| ----------------------------------------- | :----: | :---: | :---: |
| Client-Server wire Compatibility | N | Y | Y |
| Server-Server Compatibility | N | Y | Y |
-| File Format Compatibility | N [^1] | Y | Y |
+| File Format Compatibility | N [^4] | Y | Y |
| Client API Compatibility | N | Y | Y |
| Client Binary Compatibility | N | N | Y |
| **Server-Side Limited API Compatibility** | | | |
@@ -145,6 +145,10 @@ All classes annotated with InterfaceAudience.Private or
all classes that do not
When we say two HBase versions are compatible, we mean that the versions are
wire and binary compatible. Compatible HBase versions means that clients can
talk to compatible but differently versioned servers. It means too that you can
just swap out the jars of one version and replace them with the jars of
another, compatible version and all will just work. Unless otherwise specified,
HBase point versions are (mostly) binary compatible. You can safely do rolling
upgrades between binary com [...]
-[^1]: Running an offline upgrade tool without downgrade might be needed. We
will typically only support migrating data from major version X to major
version X+1.
+[^1]: See 'Source Compatibility'
https://wiki.openjdk.org/spaces/csr/pages/32342052/Kinds+of+Compatibility
-[^2]: See http://docs.oracle.com/javase/specs/jls/se7/html/jls-13.html.
+[^2]: See http://docs.oracle.com/javase/specs/jls/se8/html/jls-13.html.
+
+[^3]: Note that this indicates what could break, not that it will break. We
will/should add specifics in our release notes.
+
+[^4]: Running an offline upgrade tool without downgrade might be needed. We
will typically only support migrating data from major version X to major
version X+1.
diff --git a/hbase-website/unit-tests/validate-headings.test.ts
b/hbase-website/unit-tests/validate-headings.test.ts
index 6fd39dc782f..c8e6955bf6a 100644
--- a/hbase-website/unit-tests/validate-headings.test.ts
+++ b/hbase-website/unit-tests/validate-headings.test.ts
@@ -99,9 +99,17 @@ describe("MDX Heading ID Uniqueness Validation", () => {
});
}
+ // IDs that are auto-generated by the markdown processor and cannot be
+ // overridden by authors. "footnote-label" is emitted by the footnotes
+ // plugin for every page that contains footnotes; it is expected to appear
+ // in multiple pages and does not cause a collision problem in practice
+ // because the single-page build suppresses the per-page footnote sections.
+ const IGNORED_IDS = new Set(["footnote-label"]);
+
// Find IDs that appear in multiple different pages
const duplicates = Array.from(idToPages.entries())
- .filter(([, occurrences]) => {
+ .filter(([id, occurrences]) => {
+ if (IGNORED_IDS.has(id)) return false;
// Check if this ID appears in multiple DIFFERENT pages
const uniquePages = new Set(occurrences.map((o) => o.url));
return uniquePages.size > 1;
diff --git a/hbase-website/unit-tests/validate-references.test.ts
b/hbase-website/unit-tests/validate-references.test.ts
new file mode 100644
index 00000000000..bf66d79e85c
--- /dev/null
+++ b/hbase-website/unit-tests/validate-references.test.ts
@@ -0,0 +1,92 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+import { describe, it, expect } from "vitest";
+import fs from "node:fs";
+import path from "node:path";
+
+const MULTI_PAGE_DIR = path.join(process.cwd(),
"app/pages/_docs/docs/_mdx/(multi-page)");
+
+function collectMdxFiles(dir: string): string[] {
+ const results: string[] = [];
+ for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {
+ const fullPath = path.join(dir, entry.name);
+ if (entry.isDirectory()) {
+ results.push(...collectMdxFiles(fullPath));
+ } else if (entry.isFile() && entry.name.endsWith(".mdx")) {
+ results.push(fullPath);
+ }
+ }
+ return results;
+}
+
+/** Extract all footnote definition IDs (e.g. [^1]:, [^note]:) from a file. */
+function parseFootnoteIds(content: string): string[] {
+ const re = /^\[\^([^\]]+)\]:/gm;
+ const ids: string[] = [];
+ let m: RegExpExecArray | null;
+ while ((m = re.exec(content)) !== null) {
+ ids.push(m[1]);
+ }
+ return ids;
+}
+
+describe("MDX Footnote Reference Uniqueness Validation", () => {
+ it("should not have duplicate footnote reference IDs across all multi-page
documentation files", () => {
+ const files = collectMdxFiles(MULTI_PAGE_DIR);
+
+ // Map of footnote ID -> array of relative file paths that define it
+ const idToFiles = new Map<string, string[]>();
+
+ for (const filePath of files) {
+ const content = fs.readFileSync(filePath, "utf-8");
+ const ids = parseFootnoteIds(content);
+ const relPath = path.relative(process.cwd(), filePath);
+
+ for (const id of ids) {
+ if (!idToFiles.has(id)) {
+ idToFiles.set(id, []);
+ }
+ idToFiles.get(id)!.push(relPath);
+ }
+ }
+
+ const duplicates = Array.from(idToFiles.entries())
+ .filter(([, filePaths]) => filePaths.length > 1)
+ .map(([id, filePaths]) => ({ id, filePaths }));
+
+ if (duplicates.length > 0) {
+ console.error("\n❌ Duplicate footnote reference IDs found across
different files:\n");
+
+ duplicates.forEach(({ id, filePaths }) => {
+ console.error(` Reference ID: "[^${id}]"`);
+ console.error(` Defined in ${filePaths.length} files:\n`);
+ filePaths.forEach((f) => console.error(` • ${f}`));
+ console.error();
+ });
+
+ console.error(
+ "💡 To fix: Footnote IDs must be unique across all multi-page docs
because\n" +
+ " they are combined into a single page. Rename conflicting
references to\n" +
+ " use unique identifiers, e.g. [^1] → [^version-number-1].\n"
+ );
+ }
+
+ expect(duplicates.length).toBe(0);
+ });
+});