This is an automated email from the ASF dual-hosted git repository.

yuxia pushed a commit to branch stagreadme-tool
in repository https://gitbox.apache.org/repos/asf/fluss-rust.git

commit ba112b4d36e3f10224641fe0918a9dfd6d50a402
Author: luoyuxia <[email protected]>
AuthorDate: Tue Mar 3 15:28:44 2026 +0800

    stage stagreadme-tool
---
 Cargo.toml                                   |   2 +-
 crates/staging-readme-preview/Cargo.toml     |  21 ++++++
 crates/staging-readme-preview/README.md      | 104 +++++++++++++++++++++++++++
 crates/staging-readme-preview/README.tool.md |  24 +++++++
 crates/staging-readme-preview/src/lib.rs     |   1 +
 5 files changed, 151 insertions(+), 1 deletion(-)

diff --git a/Cargo.toml b/Cargo.toml
index 4099d9a..d0f8348 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -29,7 +29,7 @@ keywords = ["fluss", "streaming-storage", "datalake"]
 
 [workspace]
 resolver = "2"
-members = ["crates/fluss", "crates/examples", "bindings/python", 
"bindings/cpp"]
+members = ["crates/fluss", "crates/examples", "bindings/python", 
"bindings/cpp", "crates/staging-readme-preview"]
 
 [workspace.dependencies]
 fluss = { package = "fluss-rs", version = "0.2.0", path = "crates/fluss", 
features = ["storage-all"] }
diff --git a/crates/staging-readme-preview/Cargo.toml 
b/crates/staging-readme-preview/Cargo.toml
new file mode 100644
index 0000000..c342eab
--- /dev/null
+++ b/crates/staging-readme-preview/Cargo.toml
@@ -0,0 +1,21 @@
+# Minimal crate to publish to staging.crates.io and preview the fluss-rs 
README.
+# No dependencies — copy README from ../fluss/README.md and publish.
+# Reuses workspace metadata so package info stays in sync with fluss-rs.
+
+[package]
+name = "fluss-rs-readme-preview"
+edition = { workspace = true }
+license.workspace = true
+rust-version = { workspace = true }
+version = "0.1.4"
+authors = { workspace = true }
+description = "The offical rust client of Apache Fluss (Incubating)"
+homepage = "https://clients.fluss.apache.org/";
+repository = { workspace = true }
+keywords = { workspace = true }
+categories = { workspace = true }
+documentation = "https://docs.rs/fluss-rs";
+publish = ["staging"]
+
+[lib]
+path = "src/lib.rs"
diff --git a/crates/staging-readme-preview/README.md 
b/crates/staging-readme-preview/README.md
new file mode 100644
index 0000000..169ed3a
--- /dev/null
+++ b/crates/staging-readme-preview/README.md
@@ -0,0 +1,104 @@
+# Apache Fluss (Incubating) Official Rust Client
+
+Official Rust client library for [Apache Fluss 
(Incubating)](https://fluss.apache.org/).
+
+[![crates.io](https://img.shields.io/crates/v/fluss-rs.svg)](https://crates.io/crates/fluss-rs)
+[![docs.rs](https://img.shields.io/docsrs/fluss-rs)](https://docs.rs/fluss-rs/)
+
+## Usage
+
+The following example shows both **primary key (KV) tables** and **log 
tables** in one flow: connect, create a KV table (upsert + lookup), then create 
a log table (append + scan).
+
+```rust
+use fluss::client::EARLIEST_OFFSET;
+use fluss::client::FlussConnection;
+use fluss::config::Config;
+use fluss::error::Result;
+use fluss::metadata::{DataTypes, Schema, TableDescriptor, TablePath};
+use fluss::row::{GenericRow, InternalRow};
+use std::time::Duration;
+
+#[tokio::main]
+async fn main() -> Result<()> {
+    let mut config = Config::default();
+    config.bootstrap_servers = "127.0.0.1:9123".to_string();
+    let connection = FlussConnection::new(config).await?;
+    let admin = connection.get_admin().await?;
+
+    // ---- Primary key (KV) table: upsert and lookup ----
+    let kv_path = TablePath::new("fluss", "users");
+    let mut kv_schema = Schema::builder()
+        .column("id", DataTypes::int())
+        .column("name", DataTypes::string())
+        .column("age", DataTypes::bigint())
+        .primary_key(vec!["id"]);
+    let kv_descriptor = TableDescriptor::builder()
+        .schema(kv_schema.build()?)
+        .build()?;
+    admin.create_table(&kv_path, &kv_descriptor, false).await?;
+
+    let kv_table = connection.get_table(&kv_path).await?;
+    let upsert_writer = kv_table.new_upsert()?.create_writer()?;
+    let mut row = GenericRow::new(3);
+    row.set_field(0, 1i32);
+    row.set_field(1, "Alice");
+    row.set_field(2, 30i64);
+    upsert_writer.upsert(&row)?;
+    upsert_writer.flush().await?;
+
+    let mut lookuper = kv_table.new_lookup()?.create_lookuper()?;
+    let mut key = GenericRow::new(1);
+    key.set_field(0, 1i32);
+    let result = lookuper.lookup(&key).await?;
+    if let Some(r) = result.get_single_row()? {
+        println!("KV lookup: id={}, name={}, age={}",
+                 r.get_int(0)?, r.get_string(1)?, r.get_long(2)?);
+    }
+
+    // ---- Log table: append and scan ----
+    let log_path = TablePath::new("fluss", "events");
+    let mut log_schema_builder = Schema::builder()
+        .column("ts", DataTypes::bigint())
+        .column("message", DataTypes::string());
+    let log_descriptor = TableDescriptor::builder()
+        .schema(log_schema_builder.build()?)
+        .build()?;
+    admin.create_table(&log_path, &log_descriptor, false).await?;
+
+    let log_table = connection.get_table(&log_path).await?;
+    let append_writer = log_table.new_append()?.create_writer()?;
+    let mut event = GenericRow::new(2);
+    event.set_field(0, 1700000000i64);
+    event.set_field(1, "hello");
+    append_writer.append(&event)?;
+    append_writer.flush().await?;
+
+    let scanner = log_table.new_scan().create_log_scanner()?;
+    scanner.subscribe(0, EARLIEST_OFFSET).await?;
+    let scan_records = scanner.poll(Duration::from_secs(1)).await?;
+    for record in scan_records {
+        let r = record.row();
+        println!("Log scan: ts={}, message={}", r.get_long(0)?, 
r.get_string(1)?);
+    }
+
+    Ok(())
+}
+```
+
+## Storage Support
+
+The Fluss client reads remote data by accessing Fluss’s **remote files** (e.g. 
log segments and snapshots) directly. The following **remote file systems** are 
supported; enable the matching feature(s) for your deployment:
+
+| Storage Backend | Feature Flag | Status | Description |
+|----------------|--------------|--------|-------------|
+| Local Filesystem | `storage-fs` | ✅ Stable | Local filesystem storage |
+| Amazon S3 | `storage-s3` | ✅ Stable | Amazon S3 storage |
+| Alibaba Cloud OSS | `storage-oss` | ✅ Stable | Alibaba Cloud Object Storage 
Service |
+
+You can enable all storage backends at once using the `storage-all` feature 
flag.
+
+Example usage in Cargo.toml:
+```toml
+[dependencies]
+fluss-rs = { version = "0.1.0", features = ["storage-s3", "storage-fs"] }
+```
diff --git a/crates/staging-readme-preview/README.tool.md 
b/crates/staging-readme-preview/README.tool.md
new file mode 100644
index 0000000..14afe86
--- /dev/null
+++ b/crates/staging-readme-preview/README.tool.md
@@ -0,0 +1,24 @@
+# Staging README preview
+
+This crate has **no dependencies**. It exists only to publish to 
[staging.crates.io](https://staging.crates.io) and preview how the fluss-rs 
README will look on a crate page. It reuses workspace metadata (edition, 
license, version, etc.) so package info stays in sync with fluss-rs.
+
+## Usage
+
+1. **Update README** (when you change the real one):
+   ```bash
+   cp crates/fluss/README.md crates/staging-readme-preview/README.md
+   ```
+
+2. **Login to staging** (once; get token from 
https://staging.crates.io/settings/tokens):
+   ```bash
+   cargo login --registry staging
+   ```
+
+3. **Publish to staging** (from repo root):
+   ```bash
+   cargo publish -p fluss-rs-readme-preview --registry staging
+   ```
+
+4. Open https://staging.crates.io/crates/fluss-rs-readme-preview to view the 
README.
+
+Bump `version` in the root `[workspace.package]` (or override in this crate's 
Cargo.toml) if you publish again; staging requires version bumps.
diff --git a/crates/staging-readme-preview/src/lib.rs 
b/crates/staging-readme-preview/src/lib.rs
new file mode 100644
index 0000000..b0d33a4
--- /dev/null
+++ b/crates/staging-readme-preview/src/lib.rs
@@ -0,0 +1 @@
+// Dummy lib so the crate builds. Only used for README preview on staging.

Reply via email to