amitgilad3 commented on code in PR #1036:
URL: https://github.com/apache/iceberg-python/pull/1036#discussion_r1720268289


##########
tests/integration/test_add_files.py:
##########
@@ -732,3 +732,76 @@ def test_add_files_subset_of_schema(spark: SparkSession, 
session_catalog: Catalo
     for column in written_arrow_table.column_names:
         for left, right in zip(lhs[column].to_list(), rhs[column].to_list()):
             assert left == right
+
+
[email protected]
+def test_add_files_with_duplicate_files_in_file_paths(spark: SparkSession, 
session_catalog: Catalog, format_version: int) -> None:
+    identifier = f"default.test_table_duplicate_add_files_v{format_version}"
+    tbl = _create_table(session_catalog, identifier, format_version)
+
+    file_paths = 
[f"s3://warehouse/default/unpartitioned/v{format_version}/test-{i}.parquet" for 
i in range(5)]
+    
file_paths.append(f"s3://warehouse/default/unpartitioned/v{format_version}/test-1.parquet")
+    # write parquet files
+    for file_path in file_paths:
+        fo = tbl.io.new_output(file_path)
+        with fo.create(overwrite=True) as fos:
+            with pq.ParquetWriter(fos, schema=ARROW_SCHEMA) as writer:
+                writer.write_table(ARROW_TABLE)
+
+    # add the parquet files as data files
+    with pytest.raises(ValueError) as exc_info:
+        tbl.add_files(file_paths=file_paths)
+    assert "File paths must be unique" in str(exc_info.value)
+
+
[email protected]
+def test_add_files_that_referenced_by_current_snapshot(
+    spark: SparkSession, session_catalog: Catalog, format_version: int
+) -> None:
+    identifier = f"default.test_table_add_referenced_file_v{format_version}"
+    tbl = _create_table(session_catalog, identifier, format_version)
+
+    file_paths = 
[f"s3://warehouse/default/unpartitioned/v{format_version}/test-{i}.parquet" for 
i in range(5)]
+    referenced_file = 
f"s3://warehouse/default/unpartitioned/v{format_version}/test-1.parquet"
+    # write parquet files
+    for file_path in file_paths:
+        fo = tbl.io.new_output(file_path)
+        with fo.create(overwrite=True) as fos:
+            with pq.ParquetWriter(fos, schema=ARROW_SCHEMA) as writer:
+                writer.write_table(ARROW_TABLE)
+
+    # add the parquet files as data files
+    tbl.add_files(file_paths=file_paths)
+
+    with pytest.raises(ValueError) as exc_info:
+        tbl.add_files(file_paths=[referenced_file])
+    assert f"Cannot add files that are already referenced by table, files: 
{referenced_file}" in str(exc_info.value)
+
+
[email protected]
+def 
test_add_files_that_referenced_by_current_snapshot_with_check_duplicate_files_false(

Review Comment:
   So when you set check_duplicate_files to False you are essentially taking 
responsibility for scenarios  where duplicate files can be added, but the 
default is to validate



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to