This is an automated email from the ASF dual-hosted git repository.

lzljs3620320 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/paimon.git


The following commit(s) were added to refs/heads/master by this push:
     new 5b60553da3 [python] Prepare to support Python 3.6 (#6174)
5b60553da3 is described below

commit 5b60553da35a060663cf8bf96d3fb5bb13ffbc29
Author: jerry <[email protected]>
AuthorDate: Fri Aug 29 14:14:45 2025 +0800

    [python] Prepare to support Python 3.6 (#6174)
---
 paimon-python/pypaimon/api/api_response.py         |   4 +-
 paimon-python/pypaimon/api/client.py               |  10 +--
 paimon-python/pypaimon/api/resource_paths.py       |  33 +++----
 paimon-python/pypaimon/api/rest_api.py             |   2 +-
 paimon-python/pypaimon/api/token_loader.py         |  16 ++--
 paimon-python/pypaimon/catalog/catalog_factory.py  |   7 +-
 paimon-python/pypaimon/catalog/database.py         |   4 +-
 paimon-python/pypaimon/common/identifier.py        |   6 +-
 paimon-python/pypaimon/common/predicate.py         |   6 +-
 paimon-python/pypaimon/filesystem/pvfs.py          | 100 ++++++++-------------
 .../pypaimon/manifest/schema/data_file_meta.py     |  14 +--
 .../pypaimon/manifest/schema/simple_stats.py       |   4 +-
 paimon-python/pypaimon/read/split_read.py          |   4 +-
 paimon-python/pypaimon/schema/data_types.py        |  34 +++----
 paimon-python/pypaimon/schema/schema_manager.py    |   4 +-
 15 files changed, 115 insertions(+), 133 deletions(-)

diff --git a/paimon-python/pypaimon/api/api_response.py 
b/paimon-python/pypaimon/api/api_response.py
index 0cc82f31b5..12f0bb39ea 100644
--- a/paimon-python/pypaimon/api/api_response.py
+++ b/paimon-python/pypaimon/api/api_response.py
@@ -83,7 +83,7 @@ class AuditRESTResponse(RESTResponse):
     def get_updated_by(self) -> Optional[str]:
         return self.updated_by
 
-    def put_audit_options_to(self, options: dict[str, str]) -> None:
+    def put_audit_options_to(self, options: Dict[str, str]) -> None:
         """Puts audit-related options into the provided dictionary."""
         options[self.FIELD_OWNER] = self.get_owner()
         options[self.FIELD_CREATED_BY] = str(self.get_created_by())
@@ -204,7 +204,7 @@ class GetDatabaseResponse(AuditRESTResponse):
     name: Optional[str] = json_field(FIELD_NAME, default=None)
     location: Optional[str] = json_field(FIELD_LOCATION, default=None)
     options: Optional[Dict[str, str]] = json_field(
-        FIELD_OPTIONS, default_factory=dict)
+        FIELD_OPTIONS, default_factory=Dict)
 
     def __init__(
             self,
diff --git a/paimon-python/pypaimon/api/client.py 
b/paimon-python/pypaimon/api/client.py
index 9e30df598b..cb946b87be 100644
--- a/paimon-python/pypaimon/api/client.py
+++ b/paimon-python/pypaimon/api/client.py
@@ -96,7 +96,7 @@ class DefaultErrorHandler(ErrorHandler):
             message = error.message
         else:
             # If we have a requestId, append it to the message
-            message = f"{error.message} requestId:{request_id}"
+            message = "{} requestId:{}".format(error.message, request_id)
 
         # Handle different error codes
         if code == 400:
@@ -217,7 +217,7 @@ def _normalize_uri(uri: str) -> str:
         server_uri = server_uri[:-1]
 
     if not server_uri.startswith("http://";) and not 
server_uri.startswith("https://";):
-        server_uri = f"http://{server_uri}";
+        server_uri = "http://{}".format(server_uri)
 
     return server_uri
 
@@ -344,7 +344,7 @@ class HttpClient(RESTClient):
 
         if query_params:
             query_string = urllib.parse.urlencode(query_params)
-            full_path = f"{full_path}?{query_string}"
+            full_path = "{}?{}".format(full_path, query_string)
 
         return full_path
 
@@ -356,14 +356,14 @@ class HttpClient(RESTClient):
                          headers: Optional[Dict[str, str]] = None,
                          response_type: Optional[Type[T]] = None) -> T:
         try:
-            start_time = time.time_ns()
+            start_time = int(time.time() * 1_000_000_000)
             response = self.session.request(
                 method=method,
                 url=url,
                 data=data.encode('utf-8') if data else None,
                 headers=headers
             )
-            duration_ms = (time.time_ns() - start_time) // 1_000_000
+            duration_ms = (int(time.time() * 1_000_000_000) - start_time) // 
1_000_000
             response_request_id = response.headers.get(self.REQUEST_ID_KEY, 
self.DEFAULT_REQUEST_ID)
 
             self.logger.info(
diff --git a/paimon-python/pypaimon/api/resource_paths.py 
b/paimon-python/pypaimon/api/resource_paths.py
index 0214ab0529..259ed57057 100644
--- a/paimon-python/pypaimon/api/resource_paths.py
+++ b/paimon-python/pypaimon/api/resource_paths.py
@@ -15,7 +15,7 @@
 #  specific language governing permissions and limitations
 #  under the License.
 
-from typing import Optional
+from typing import Optional, Dict
 
 from pypaimon.api.rest_util import RESTUtil
 from pypaimon.common.config import CatalogOptions
@@ -28,43 +28,44 @@ class ResourcePaths:
     TABLE_DETAILS = "table-details"
 
     def __init__(self, prefix: str):
-        self.base_path = f"/{self.V1}/{prefix}".rstrip("/")
+        self.base_path = "/{}/{}".format(self.V1, prefix).rstrip("/")
 
     @classmethod
     def for_catalog_properties(
-            cls, options: dict[str, str]) -> "ResourcePaths":
+            cls, options: Dict[str, str]) -> "ResourcePaths":
         prefix = options.get(CatalogOptions.PREFIX, "")
         return cls(prefix)
 
     @staticmethod
     def config() -> str:
-        return f"/{ResourcePaths.V1}/config"
+        return "/{}/config".format(ResourcePaths.V1)
 
     def databases(self) -> str:
-        return f"{self.base_path}/{self.DATABASES}"
+        return "{}/{}".format(self.base_path, self.DATABASES)
 
     def database(self, name: str) -> str:
-        return 
f"{self.base_path}/{self.DATABASES}/{RESTUtil.encode_string(name)}"
+        return "{}/{}/{}".format(self.base_path, self.DATABASES, 
RESTUtil.encode_string(name))
 
     def tables(self, database_name: Optional[str] = None) -> str:
         if database_name:
-            return 
f"{self.base_path}/{self.DATABASES}/{RESTUtil.encode_string(database_name)}/{self.TABLES}"
-        return f"{self.base_path}/{self.TABLES}"
+            return "{}/{}/{}/{}".format(self.base_path, self.DATABASES,
+                                        RESTUtil.encode_string(database_name), 
self.TABLES)
+        return "{}/{}".format(self.base_path, self.TABLES)
 
     def table(self, database_name: str, table_name: str) -> str:
-        return 
(f"{self.base_path}/{self.DATABASES}/{RESTUtil.encode_string(database_name)}"
-                f"/{self.TABLES}/{RESTUtil.encode_string(table_name)}")
+        return ("{}/{}/{}/{}/{}".format(self.base_path, self.DATABASES, 
RESTUtil.encode_string(database_name),
+                self.TABLES, RESTUtil.encode_string(table_name)))
 
     def table_details(self, database_name: str) -> str:
-        return 
f"{self.base_path}/{self.DATABASES}/{database_name}/{self.TABLE_DETAILS}"
+        return "{}/{}/{}/{}".format(self.base_path, self.DATABASES, 
database_name, self.TABLE_DETAILS)
 
     def table_token(self, database_name: str, table_name: str) -> str:
-        return 
(f"{self.base_path}/{self.DATABASES}/{RESTUtil.encode_string(database_name)}"
-                f"/{self.TABLES}/{RESTUtil.encode_string(table_name)}/token")
+        return ("{}/{}/{}/{}/{}/token".format(self.base_path, self.DATABASES, 
RESTUtil.encode_string(database_name),
+                self.TABLES, RESTUtil.encode_string(table_name)))
 
     def rename_table(self) -> str:
-        return f"{self.base_path}/{self.TABLES}/rename"
+        return "{}/{}/rename".format(self.base_path, self.TABLES)
 
     def commit_table(self, database_name: str, table_name: str) -> str:
-        return 
(f"{self.base_path}/{self.DATABASES}/{RESTUtil.encode_string(database_name)}"
-                f"/{self.TABLES}/{RESTUtil.encode_string(table_name)}/commit")
+        return ("{}/{}/{}/{}/{}/commit".format(self.base_path, self.DATABASES, 
RESTUtil.encode_string(database_name),
+                self.TABLES, RESTUtil.encode_string(table_name)))
diff --git a/paimon-python/pypaimon/api/rest_api.py 
b/paimon-python/pypaimon/api/rest_api.py
index 2e341733bd..037a531e58 100644
--- a/paimon-python/pypaimon/api/rest_api.py
+++ b/paimon-python/pypaimon/api/rest_api.py
@@ -118,7 +118,7 @@ class RESTApi:
 
         return results
 
-    def get_options(self) -> dict[str, str]:
+    def get_options(self) -> Dict[str, str]:
         return self.options
 
     def list_databases(self) -> List[str]:
diff --git a/paimon-python/pypaimon/api/token_loader.py 
b/paimon-python/pypaimon/api/token_loader.py
index 8e65846bf8..6ee25c8777 100644
--- a/paimon-python/pypaimon/api/token_loader.py
+++ b/paimon-python/pypaimon/api/token_loader.py
@@ -152,7 +152,7 @@ class DLFECSTokenLoader(DLFTokenLoader):
             return self._get_token(token_url)
 
         except Exception as e:
-            raise RuntimeError(f"Token loading failed: {e}") from e
+            raise RuntimeError("Token loading failed: {}".format(e)) from e
 
     def description(self) -> str:
         return self.ecs_metadata_url
@@ -161,7 +161,7 @@ class DLFECSTokenLoader(DLFTokenLoader):
         try:
             return self._get_response_body(url)
         except Exception as e:
-            raise RuntimeError(f"Get role failed, error: {e}") from e
+            raise RuntimeError("Get role failed, error: {}".format(e)) from e
 
     def _get_token(self, url: str) -> DLFToken:
         try:
@@ -169,9 +169,9 @@ class DLFECSTokenLoader(DLFTokenLoader):
             return JSON.from_json(token_json, DLFToken)
         except OSError as e:
             # Python equivalent of UncheckedIOException
-            raise OSError(f"IO error while getting token: {e}") from e
+            raise OSError("IO error while getting token: {}".format(e)) from e
         except Exception as e:
-            raise RuntimeError(f"Get token failed, error: {e}") from e
+            raise RuntimeError("Get token failed, error: {}".format(e)) from e
 
     def _get_response_body(self, url: str) -> str:
         try:
@@ -182,7 +182,9 @@ class DLFECSTokenLoader(DLFTokenLoader):
                 raise RuntimeError("Get response failed, response is None")
 
             if not response.ok:
-                raise RuntimeError(f"Get response failed, response: 
{response.status_code} {response.reason}")
+                raise RuntimeError("Get response failed, response: {} 
{}".format(
+                    response.status_code, response.reason
+                ))
 
             response_body = response.text
             if response_body is None:
@@ -193,9 +195,9 @@ class DLFECSTokenLoader(DLFTokenLoader):
             # Re-raise RuntimeError as-is
             raise
         except RequestException as e:
-            raise RuntimeError(f"Request failed: {e}") from e
+            raise RuntimeError("Request failed: {}".format(e)) from e
         except Exception as e:
-            raise RuntimeError(f"Get response failed, error: {e}") from e
+            raise RuntimeError("Get response failed, error: {}".format(e)) 
from e
 
 
 # Factory and utility functions
diff --git a/paimon-python/pypaimon/catalog/catalog_factory.py 
b/paimon-python/pypaimon/catalog/catalog_factory.py
index 865ffe4766..8986f3f810 100644
--- a/paimon-python/pypaimon/catalog/catalog_factory.py
+++ b/paimon-python/pypaimon/catalog/catalog_factory.py
@@ -15,6 +15,7 @@
 #  See the License for the specific language governing permissions and
 # limitations under the License.
 
################################################################################
+from typing import Dict
 
 from pypaimon.api.options import Options
 from pypaimon.catalog.catalog import Catalog
@@ -32,12 +33,12 @@ class CatalogFactory:
     }
 
     @staticmethod
-    def create(catalog_options: dict) -> Catalog:
+    def create(catalog_options: Dict) -> Catalog:
         identifier = catalog_options.get(CatalogOptions.METASTORE, 
"filesystem")
         catalog_class = CatalogFactory.CATALOG_REGISTRY.get(identifier)
         if catalog_class is None:
-            raise ValueError(f"Unknown catalog identifier: {identifier}. "
-                             f"Available types: 
{list(CatalogFactory.CATALOG_REGISTRY.keys())}")
+            raise ValueError("Unknown catalog identifier: {}. "
+                             "Available types: {}".format(identifier, 
list(CatalogFactory.CATALOG_REGISTRY.keys())))
         return catalog_class(
             CatalogContext.create_from_options(Options(catalog_options))) if 
identifier == "rest" else catalog_class(
             catalog_options)
diff --git a/paimon-python/pypaimon/catalog/database.py 
b/paimon-python/pypaimon/catalog/database.py
index 35da9ed2e7..74d0ea6522 100644
--- a/paimon-python/pypaimon/catalog/database.py
+++ b/paimon-python/pypaimon/catalog/database.py
@@ -16,13 +16,13 @@
 # limitations under the License.
 
#################################################################################
 
-from typing import Optional
+from typing import Optional, Dict
 
 
 class Database:
     """Structure of a Database."""
 
-    def __init__(self, name: str, options: dict, comment: Optional[str] = 
None):
+    def __init__(self, name: str, options: Dict, comment: Optional[str] = 
None):
         self.name = name
         self.options = options
         self.comment = comment
diff --git a/paimon-python/pypaimon/common/identifier.py 
b/paimon-python/pypaimon/common/identifier.py
index d3a4fcda7f..0731db50db 100644
--- a/paimon-python/pypaimon/common/identifier.py
+++ b/paimon-python/pypaimon/common/identifier.py
@@ -43,12 +43,12 @@ class Identifier:
         elif len(parts) == 3:
             return cls(parts[0], parts[1], parts[2])
         else:
-            raise ValueError(f"Invalid identifier format: {full_name}")
+            raise ValueError("Invalid identifier format: {}".format(full_name))
 
     def get_full_name(self) -> str:
         if self.branch_name:
-            return 
f"{self.database_name}.{self.object_name}.{self.branch_name}"
-        return f"{self.database_name}.{self.object_name}"
+            return "{}.{}.{}".format(self.database_name, self.object_name, 
self.branch_name)
+        return "{}.{}".format(self.database_name, self.object_name)
 
     def get_database_name(self) -> str:
         return self.database_name
diff --git a/paimon-python/pypaimon/common/predicate.py 
b/paimon-python/pypaimon/common/predicate.py
index a9fefcdef6..ee13aca99b 100644
--- a/paimon-python/pypaimon/common/predicate.py
+++ b/paimon-python/pypaimon/common/predicate.py
@@ -32,7 +32,7 @@ from pypaimon.table.row.internal_row import InternalRow
 class Predicate:
     method: str
     index: Optional[int]
-    field: str | None
+    field: Optional[str]
     literals: Optional[List[Any]] = None
 
     def test(self, record: InternalRow) -> bool:
@@ -80,7 +80,7 @@ class Predicate:
             t = any(p.test(record) for p in self.literals)
             return t
         else:
-            raise ValueError(f"Unsupported predicate method: {self.method}")
+            raise ValueError("Unsupported predicate method: 
{}".format(self.method))
 
     def to_arrow(self) -> pyarrow_compute.Expression | bool:
         if self.method == 'equal':
@@ -122,4 +122,4 @@ class Predicate:
             return reduce(lambda x, y: x | y,
                           [p.to_arrow() for p in self.literals])
         else:
-            raise ValueError(f"Unsupported predicate method: {self.method}")
+            raise ValueError("Unsupported predicate method: 
{}".format(self.method))
diff --git a/paimon-python/pypaimon/filesystem/pvfs.py 
b/paimon-python/pypaimon/filesystem/pvfs.py
index 7e60a558d1..d7dabbbcaa 100644
--- a/paimon-python/pypaimon/filesystem/pvfs.py
+++ b/paimon-python/pypaimon/filesystem/pvfs.py
@@ -49,7 +49,7 @@ class PVFSIdentifier(ABC):
     endpoint: str
 
     def get_cache_key(self) -> str:
-        return f"{self.catalog}.{self.__remove_endpoint_schema(self.endpoint)}"
+        return "{}.{}".format(self.catalog, 
self.__remove_endpoint_schema(self.endpoint))
 
     @staticmethod
     def __remove_endpoint_schema(url):
@@ -91,18 +91,18 @@ class PVFSTableIdentifier(PVFSIdentifier):
 
     def get_actual_path(self, storage_location: str):
         if self.sub_path:
-            return 
f'{storage_location.rstrip("/")}/{self.sub_path.lstrip("/")}'
+            return '{}/{}'.format(storage_location.rstrip("/"), 
self.sub_path.lstrip("/"))
         return storage_location
 
     def get_virtual_location(self):
-        return (f'{PROTOCOL_NAME}://{self.catalog}'
-                f'/{self.database}/{self.table}')
+        return ('{}://{}'.format(PROTOCOL_NAME, self.catalog) +
+                '/{}/{}'.format(self.database, self.table))
 
     def get_identifier(self):
         return Identifier.create(self.database, self.table)
 
     def name(self):
-        return f'{self.catalog}.{self.database}.{self.table}'
+        return '{}.{}.{}'.format(self.catalog, self.database, self.table)
 
 
 @dataclass
@@ -218,7 +218,7 @@ class PaimonVirtualFileSystem(fsspec.AbstractFileSystem):
     def info(self, path, **kwargs):
         pvfs_identifier = self._extract_pvfs_identifier(path)
         if isinstance(pvfs_identifier, PVFSCatalogIdentifier):
-            return 
self._create_dir_detail(f'{PROTOCOL_NAME}://{pvfs_identifier.catalog}')
+            return self._create_dir_detail('{}://{}'.format(PROTOCOL_NAME, 
pvfs_identifier.catalog))
         elif isinstance(pvfs_identifier, PVFSDatabaseIdentifier):
             return self._create_dir_detail(
                 self._convert_database_virtual_path(pvfs_identifier.catalog, 
pvfs_identifier.database)
@@ -279,9 +279,7 @@ class PaimonVirtualFileSystem(fsspec.AbstractFileSystem):
                 self._strip_storage_protocol(storage_type, target_actual_path),
             )
             return None
-        raise Exception(
-            f"cp is not supported for path: {path1} to path: {path2}"
-        )
+        raise Exception("cp is not supported for path: {} to path: 
{}".format(path1, path2))
 
     def mv(self, path1, path2, recursive=False, maxdepth=None, **kwargs):
         source = self._extract_pvfs_identifier(path1)
@@ -315,9 +313,7 @@ class PaimonVirtualFileSystem(fsspec.AbstractFileSystem):
                         self._strip_storage_protocol(storage_type, 
target_actual_path),
                     )
                 return None
-        raise Exception(
-            f"Mv is not supported for path: {path1} to path: {path2}"
-        )
+        raise Exception("Mv is not supported for path: {} to path: 
{}".format(path1, path2))
 
     def rm(self, path, recursive=False, maxdepth=None):
         pvfs_identifier = self._extract_pvfs_identifier(path)
@@ -351,7 +347,7 @@ class PaimonVirtualFileSystem(fsspec.AbstractFileSystem):
                 maxdepth,
             )
         raise Exception(
-            f"Rm is not supported for path: {path}."
+            "Rm is not supported for path: {}.".format(path)
         )
 
     def rm_file(self, path):
@@ -367,9 +363,7 @@ class PaimonVirtualFileSystem(fsspec.AbstractFileSystem):
                 return fs.rm_file(
                     self._strip_storage_protocol(storage_type, actual_path),
                 )
-        raise Exception(
-            f"Rm file is not supported for path: {path}."
-        )
+        raise Exception("Rm is not supported for path: {}.".format(path))
 
     def rmdir(self, path):
         files = self.ls(path)
@@ -394,13 +388,9 @@ class PaimonVirtualFileSystem(fsspec.AbstractFileSystem):
                 return fs.rmdir(
                     self._strip_storage_protocol(storage_type, actual_path)
                 )
-            raise Exception(
-                f"Rm dir is not supported for path: {path}."
-            )
+            raise Exception("Rm dir is not supported for path: 
{}.".format(path))
         else:
-            raise Exception(
-                f"Rm dir is not supported for path: {path} as it is not empty."
-            )
+            raise Exception("Rm dir is not supported for path: {} as it is not 
empty.".format(path))
 
     def open(
             self,
@@ -413,20 +403,16 @@ class PaimonVirtualFileSystem(fsspec.AbstractFileSystem):
     ):
         pvfs_identifier = self._extract_pvfs_identifier(path)
         if isinstance(pvfs_identifier, PVFSCatalogIdentifier):
-            raise Exception(
-                f"open is not supported for path: {path}"
-            )
+            raise Exception("open is not supported for path: {}".format(path))
         elif isinstance(pvfs_identifier, PVFSDatabaseIdentifier):
             raise Exception(
-                f"open is not supported for path: {path}"
+                "open is not supported for path: {}".format(path)
             )
         elif isinstance(pvfs_identifier, PVFSTableIdentifier):
             rest_api = self.__rest_api(pvfs_identifier)
             table_path = self._get_table_store(rest_api, pvfs_identifier).path
             if pvfs_identifier.sub_path is None:
-                raise Exception(
-                    f"open is not supported for path: {path}"
-                )
+                raise Exception("open is not supported for path: 
{}".format(path))
             else:
                 storage_type = self._get_storage_type(table_path)
                 storage_location = table_path
@@ -445,9 +431,7 @@ class PaimonVirtualFileSystem(fsspec.AbstractFileSystem):
         pvfs_identifier = self._extract_pvfs_identifier(path)
         rest_api = self.__rest_api(pvfs_identifier)
         if isinstance(pvfs_identifier, PVFSCatalogIdentifier):
-            raise Exception(
-                f"mkdir is not supported for path: {path}"
-            )
+            raise Exception("mkdir is not supported for path: {}".format(path))
         elif isinstance(pvfs_identifier, PVFSDatabaseIdentifier):
             rest_api.create_database(pvfs_identifier.database, {})
         elif isinstance(pvfs_identifier, PVFSTableIdentifier):
@@ -488,9 +472,7 @@ class PaimonVirtualFileSystem(fsspec.AbstractFileSystem):
         pvfs_identifier = self._extract_pvfs_identifier(path)
         rest_api = self.__rest_api(pvfs_identifier)
         if isinstance(pvfs_identifier, PVFSCatalogIdentifier):
-            raise Exception(
-                f"makedirs is not supported for path: {path}"
-            )
+            raise Exception("makedirs is not supported for path: 
{}".format(path))
         elif isinstance(pvfs_identifier, PVFSDatabaseIdentifier):
             try:
                 rest_api.create_database(pvfs_identifier.catalog, {})
@@ -528,9 +510,7 @@ class PaimonVirtualFileSystem(fsspec.AbstractFileSystem):
         pvfs_identifier = self._extract_pvfs_identifier(path)
         rest_api = self.__rest_api(pvfs_identifier)
         if isinstance(pvfs_identifier, PVFSCatalogIdentifier):
-            raise Exception(
-                f"created is not supported for path: {path}"
-            )
+            raise Exception("created is not supported for path: 
{}".format(path))
         elif isinstance(pvfs_identifier, PVFSDatabaseIdentifier):
             return 
self.__converse_ts_to_datatime(rest_api.get_database(pvfs_identifier.database).created_at)
         elif isinstance(pvfs_identifier, PVFSTableIdentifier):
@@ -550,9 +530,7 @@ class PaimonVirtualFileSystem(fsspec.AbstractFileSystem):
         pvfs_identifier = self._extract_pvfs_identifier(path)
         rest_api = self.__rest_api(pvfs_identifier)
         if isinstance(pvfs_identifier, PVFSCatalogIdentifier):
-            raise Exception(
-                f"modified is not supported for path: {path}"
-            )
+            raise Exception("modified is not supported for path: 
{}".format(path))
         elif isinstance(pvfs_identifier, PVFSDatabaseIdentifier):
             return 
self.__converse_ts_to_datatime(rest_api.get_database(pvfs_identifier.database).updated_at)
         elif isinstance(pvfs_identifier, PVFSTableIdentifier):
@@ -573,16 +551,16 @@ class PaimonVirtualFileSystem(fsspec.AbstractFileSystem):
         pvfs_identifier = self._extract_pvfs_identifier(path)
         if isinstance(pvfs_identifier, PVFSCatalogIdentifier):
             raise Exception(
-                f"cat file is not supported for path: {path}"
+                "cat file is not supported for path: {}".format(path)
             )
         elif isinstance(pvfs_identifier, PVFSDatabaseIdentifier):
             raise Exception(
-                f"cat file is not supported for path: {path}"
+                "cat file is not supported for path: {}".format(path)
             )
         elif isinstance(pvfs_identifier, PVFSTableIdentifier):
             if pvfs_identifier.sub_path is None:
                 raise Exception(
-                    f"cat file is not supported for path: {path}"
+                    "cat file is not supported for path: {}".format(path)
                 )
             else:
                 rest_api = self.__rest_api(pvfs_identifier)
@@ -602,17 +580,17 @@ class PaimonVirtualFileSystem(fsspec.AbstractFileSystem):
         pvfs_identifier = self._extract_pvfs_identifier(rpath)
         if isinstance(pvfs_identifier, PVFSCatalogIdentifier):
             raise Exception(
-                f"get file is not supported for path: {rpath}"
+                "get file is not supported for path: {}".format(rpath)
             )
         elif isinstance(pvfs_identifier, PVFSDatabaseIdentifier):
             raise Exception(
-                f"get file is not supported for path: {rpath}"
+                "get file is not supported for path: {}".format(rpath)
             )
         elif isinstance(pvfs_identifier, PVFSTableIdentifier):
             rest_api = self.__rest_api(pvfs_identifier)
             if pvfs_identifier.sub_path is None:
                 raise Exception(
-                    f"get file is not supported for path: {rpath}"
+                    "get file is not supported for path: {}".format(rpath)
                 )
             else:
                 table = self._get_table_store(rest_api, pvfs_identifier)
@@ -644,17 +622,17 @@ class PaimonVirtualFileSystem(fsspec.AbstractFileSystem):
     @staticmethod
     def _strip_storage_protocol(storage_type: StorageType, path: str):
         if storage_type == StorageType.LOCAL:
-            return path[len(f"{StorageType.LOCAL.value}:"):]
+            return path[len("{}:".format(StorageType.LOCAL.value)):]
 
         # OSS has different behavior than S3 and GCS, if we do not remove the
         # protocol, it will always return an empty array.
         if storage_type == StorageType.OSS:
-            if path.startswith(f"{StorageType.OSS.value}://"):
-                return path[len(f"{StorageType.OSS.value}://"):]
+            if path.startswith("{}://".format(StorageType.OSS.value)):
+                return path[len("{}://".format(StorageType.OSS.value)):]
             return path
 
         raise Exception(
-            f"Storage type:{storage_type} doesn't support now."
+            "Storage type:{} doesn't support now.".format(storage_type)
         )
 
     @staticmethod
@@ -695,7 +673,7 @@ class PaimonVirtualFileSystem(fsspec.AbstractFileSystem):
             catalog_name: str,
             database_name: str
     ):
-        return f'{PROTOCOL_NAME}://{catalog_name}/{database_name}'
+        return '{}://{}/{}'.format(PROTOCOL_NAME, catalog_name, database_name)
 
     @staticmethod
     def _convert_table_virtual_path(
@@ -703,7 +681,7 @@ class PaimonVirtualFileSystem(fsspec.AbstractFileSystem):
             database_name: str,
             table_name: str
     ):
-        return f'{PROTOCOL_NAME}://{catalog_name}/{database_name}/{table_name}'
+        return '{}://{}/{}/{}'.format(PROTOCOL_NAME, catalog_name, 
database_name, table_name)
 
     @staticmethod
     def _convert_actual_path(
@@ -719,21 +697,21 @@ class PaimonVirtualFileSystem(fsspec.AbstractFileSystem):
         if len(sub_location) == 0:
             return normalized_pvfs
         else:
-            return f'{normalized_pvfs}/{sub_location}'
+            return '{}/{}'.format(normalized_pvfs, sub_location)
 
     @staticmethod
     def _get_path_without_schema(storage_type: StorageType, path: str) -> str:
         if storage_type == StorageType.LOCAL and 
path.startswith(StorageType.LOCAL.value):
-            return path[len(f"{StorageType.LOCAL.value}://"):]
+            return path[len("{}://".format(StorageType.LOCAL.value)):]
         elif storage_type == StorageType.OSS and 
path.startswith(StorageType.OSS.value):
-            return path[len(f"{StorageType.OSS.value}://"):]
+            return path[len("{}://".format(StorageType.OSS.value)):]
         return path
 
     def _extract_pvfs_identifier(self, path: str) -> 
Optional['PVFSIdentifier']:
         if not isinstance(path, str):
             raise Exception("path is not a string")
         path_without_protocol = path
-        if path.startswith(f'{PROTOCOL_NAME}://'):
+        if path.startswith('{}://'.format(PROTOCOL_NAME)):
             path_without_protocol = path[7:]
 
         if not path_without_protocol:
@@ -856,7 +834,7 @@ class PaimonVirtualFileSystem(fsspec.AbstractFileSystem):
                 self._fs_cache[pvfs_table_identifier] = paimon_real_storage
             else:
                 raise Exception(
-                    f"Storage type: `{storage_type}` doesn't support now."
+                    "Storage type: `{}` doesn't support 
now.".format(storage_type)
                 )
             return fs
         finally:
@@ -864,12 +842,12 @@ class PaimonVirtualFileSystem(fsspec.AbstractFileSystem):
 
     @staticmethod
     def _get_storage_type(path: str):
-        if path.startswith(f"{StorageType.LOCAL.value}:/"):
+        if path.startswith("{}:/".format(StorageType.LOCAL.value)):
             return StorageType.LOCAL
-        elif path.startswith(f"{StorageType.OSS.value}://"):
+        elif path.startswith("{}://".format(StorageType.OSS.value)):
             return StorageType.OSS
         raise Exception(
-            f"Storage type doesn't support now. Path:{path}"
+            "Storage type doesn't support now. Path:{}".format(path)
         )
 
     @staticmethod
diff --git a/paimon-python/pypaimon/manifest/schema/data_file_meta.py 
b/paimon-python/pypaimon/manifest/schema/data_file_meta.py
index e1f60bf2e1..cd1034ead9 100644
--- a/paimon-python/pypaimon/manifest/schema/data_file_meta.py
+++ b/paimon-python/pypaimon/manifest/schema/data_file_meta.py
@@ -19,7 +19,7 @@
 from dataclasses import dataclass
 from datetime import datetime
 from pathlib import Path
-from typing import List
+from typing import List, Optional
 
 from pypaimon.manifest.schema.simple_stats import (SIMPLE_STATS_SCHEMA,
                                                    SimpleStats)
@@ -41,12 +41,12 @@ class DataFileMeta:
     level: int
     extra_files: List[str]
 
-    creation_time: datetime | None = None
-    delete_row_count: int | None = None
-    embedded_index: bytes | None = None
-    file_source: str | None = None
-    value_stats_cols: List[str] | None = None
-    external_path: str | None = None
+    creation_time: Optional[datetime] = None
+    delete_row_count: Optional[int] = None
+    embedded_index: Optional[bytes] = None
+    file_source: Optional[str] = None
+    value_stats_cols: Optional[List[str]] = None
+    external_path: Optional[str] = None
 
     # not a schema field, just for internal usage
     file_path: str = None
diff --git a/paimon-python/pypaimon/manifest/schema/simple_stats.py 
b/paimon-python/pypaimon/manifest/schema/simple_stats.py
index dd6924fb2e..4a73d3eee4 100644
--- a/paimon-python/pypaimon/manifest/schema/simple_stats.py
+++ b/paimon-python/pypaimon/manifest/schema/simple_stats.py
@@ -17,7 +17,7 @@
 
################################################################################
 
 from dataclasses import dataclass
-from typing import List
+from typing import List, Optional
 
 from pypaimon.table.row.binary_row import BinaryRow
 
@@ -26,7 +26,7 @@ from pypaimon.table.row.binary_row import BinaryRow
 class SimpleStats:
     min_value: BinaryRow
     max_value: BinaryRow
-    null_count: List[int] | None
+    null_count: Optional[List[int]]
 
 
 SIMPLE_STATS_SCHEMA = {
diff --git a/paimon-python/pypaimon/read/split_read.py 
b/paimon-python/pypaimon/read/split_read.py
index f085bac444..99f8a4da21 100644
--- a/paimon-python/pypaimon/read/split_read.py
+++ b/paimon-python/pypaimon/read/split_read.py
@@ -19,7 +19,7 @@
 import os
 from abc import ABC, abstractmethod
 from functools import partial
-from typing import List, Optional
+from typing import List, Optional, Tuple
 
 from pypaimon.common.predicate import Predicate
 from pypaimon.read.interval_partition import IntervalPartition, SortedRun
@@ -182,7 +182,7 @@ class SplitRead(ABC):
         return [field.name for field in fields_without_partition]
 
     def _get_trimmed_fields(self, read_data_fields: List[DataField],
-                            all_data_fields: List[DataField]) -> 
tuple[List[int], List[DataField]]:
+                            all_data_fields: List[DataField]) -> 
Tuple[List[int], List[DataField]]:
         trimmed_mapping = [0] * len(read_data_fields)
         trimmed_fields = []
 
diff --git a/paimon-python/pypaimon/schema/data_types.py 
b/paimon-python/pypaimon/schema/data_types.py
index a5186cc56c..b53a779b41 100644
--- a/paimon-python/pypaimon/schema/data_types.py
+++ b/paimon-python/pypaimon/schema/data_types.py
@@ -83,7 +83,7 @@ class AtomicType(DataType):
 
     def __str__(self) -> str:
         null_suffix = "" if self.nullable else " NOT NULL"
-        return f"{self.type}{null_suffix}"
+        return "{}{}".format(self.type, null_suffix)
 
 
 @dataclass
@@ -107,7 +107,7 @@ class ArrayType(DataType):
 
     def __str__(self) -> str:
         null_suffix = "" if self.nullable else " NOT NULL"
-        return f"ARRAY<{self.element}>{null_suffix}"
+        return "ARRAY<{}>{}".format(self.element, null_suffix)
 
 
 @dataclass
@@ -120,7 +120,7 @@ class MultisetType(DataType):
 
     def to_dict(self) -> Dict[str, Any]:
         return {
-            "type": f"MULTISET{'<' + str(self.element) + '>' if self.element 
else ''}",
+            "type": "MULTISET{}".format('<' + str(self.element) + '>' if 
self.element else ''),
             "element": self.element.to_dict() if self.element else None,
             "nullable": self.nullable,
         }
@@ -131,7 +131,7 @@ class MultisetType(DataType):
 
     def __str__(self) -> str:
         null_suffix = "" if self.nullable else " NOT NULL"
-        return f"MULTISET<{self.element}>{null_suffix}"
+        return "MULTISET<{}>{}".format(self.element, null_suffix)
 
 
 @dataclass
@@ -150,7 +150,7 @@ class MapType(DataType):
 
     def to_dict(self) -> Dict[str, Any]:
         return {
-            "type": f"MAP<{self.key}, {self.value}>",
+            "type": "MAP<{}, {}>".format(self.key, self.value),
             "key": self.key.to_dict() if self.key else None,
             "value": self.value.to_dict() if self.value else None,
             "nullable": self.nullable,
@@ -162,7 +162,7 @@ class MapType(DataType):
 
     def __str__(self) -> str:
         null_suffix = "" if self.nullable else " NOT NULL"
-        return f"MAP<{self.key}, {self.value}>{null_suffix}"
+        return "MAP<{}, {}>{}".format(self.key, self.value, null_suffix)
 
 
 @dataclass
@@ -233,9 +233,9 @@ class RowType(DataType):
         return DataTypeParser.parse_data_type(data)
 
     def __str__(self) -> str:
-        field_strs = [f"{field.name}: {field.type}" for field in self.fields]
+        field_strs = ["{}: {}".format(field.name, field.type) for field in 
self.fields]
         null_suffix = "" if self.nullable else " NOT NULL"
-        return f"ROW<{', '.join(field_strs)}>{null_suffix}"
+        return "ROW<{}>{}".format(', '.join(field_strs), null_suffix)
 
 
 class Keyword(Enum):
@@ -291,7 +291,7 @@ class DataTypeParser:
                 type_upper, DataTypeParser.parse_nullability(type_string)
             )
         except ValueError:
-            raise Exception(f"Unknown type: {base_type}")
+            raise Exception("Unknown type: {}".format(base_type))
 
     @staticmethod
     def parse_data_type(
@@ -303,7 +303,7 @@ class DataTypeParser:
 
         if isinstance(json_data, dict):
             if "type" not in json_data:
-                raise ValueError(f"Missing 'type' field in JSON: {json_data}")
+                raise ValueError("Missing 'type' field in JSON: 
{}".format(json_data))
 
             type_string = json_data["type"]
 
@@ -342,7 +342,7 @@ class DataTypeParser:
             else:
                 return DataTypeParser.parse_atomic_type_sql_string(type_string)
 
-        raise ValueError(f"Cannot parse data type: {json_data}")
+        raise ValueError("Cannot parse data type: {}".format(json_data))
 
     @staticmethod
     def parse_data_field(
@@ -417,7 +417,7 @@ class PyarrowFieldParser:
                 else:
                     return pyarrow.decimal128(38, 18)
             else:
-                raise ValueError(f"Unsupported data type: {type_name}")
+                raise ValueError("Unsupported data type: {}".format(type_name))
         elif isinstance(data_type, ArrayType):
             return 
pyarrow.list_(PyarrowFieldParser.from_paimon_type(data_type.element))
         elif isinstance(data_type, MapType):
@@ -425,7 +425,7 @@ class PyarrowFieldParser:
             value_type = PyarrowFieldParser.from_paimon_type(data_type.value)
             return pyarrow.map_(key_type, value_type)
         else:
-            raise ValueError(f"Unsupported data type: {data_type}")
+            raise ValueError("Unsupported data type: {}".format(data_type))
 
     @staticmethod
     def from_paimon_field(data_field: DataField) -> pyarrow.Field:
@@ -471,7 +471,7 @@ class PyarrowFieldParser:
             match = re.match(r'decimal\((\d+),\s*(\d+)\)', type_name)
             if match:
                 precision, scale = map(int, match.groups())
-                type_name = f'DECIMAL({precision},{scale})'
+                type_name = 'DECIMAL({},{})'.format(precision, scale)
             else:
                 type_name = 'DECIMAL(38,18)'
         elif type_name.startswith('list'):
@@ -484,7 +484,7 @@ class PyarrowFieldParser:
             value_type = PyarrowFieldParser.to_paimon_type(pa_type.item_type, 
nullable)
             return MapType(nullable, key_type, value_type)
         else:
-            raise ValueError(f"Unknown type: {type_name}")
+            raise ValueError("Unknown type: {}".format(type_name))
         return AtomicType(type_name, nullable)
 
     @staticmethod
@@ -550,9 +550,9 @@ class PyarrowFieldParser:
                 "items": PyarrowFieldParser.to_avro_type(value_field.type, 
value_field.name)
             }
         elif pyarrow.types.is_struct(field_type):
-            return PyarrowFieldParser.to_avro_schema(field_type, 
name=f"{field_name}_record")
+            return PyarrowFieldParser.to_avro_schema(field_type, 
name="{}_record".format(field_name))
 
-        raise ValueError(f"Unsupported pyarrow type for Avro conversion: 
{field_type}")
+        raise ValueError("Unsupported pyarrow type for Avro conversion: 
{}".format(field_type))
 
     @staticmethod
     def to_avro_schema(pyarrow_schema: Union[pyarrow.Schema, 
pyarrow.StructType],
diff --git a/paimon-python/pypaimon/schema/schema_manager.py 
b/paimon-python/pypaimon/schema/schema_manager.py
index 2af8481b49..31297cc2b3 100644
--- a/paimon-python/pypaimon/schema/schema_manager.py
+++ b/paimon-python/pypaimon/schema/schema_manager.py
@@ -16,7 +16,7 @@
 # limitations under the License.
 
################################################################################
 from pathlib import Path
-from typing import Optional
+from typing import Optional, List
 
 from pypaimon.common.file_io import FileIO
 from pypaimon.common.json_util import JSON
@@ -71,7 +71,7 @@ class SchemaManager:
 
         return TableSchema.from_path(self.file_io, schema_path)
 
-    def _list_versioned_files(self) -> list[int]:
+    def _list_versioned_files(self) -> List[int]:
         if not self.file_io.exists(self.schema_path):
             return []
 


Reply via email to