pabloem commented on a change in pull request #12151: URL: https://github.com/apache/beam/pull/12151#discussion_r451294464
########## File path: sdks/java/io/snowflake/src/main/java/org/apache/beam/sdk/io/snowflake/SnowflakeIO.java ########## @@ -684,14 +819,61 @@ private void checkArguments() { (getDataSourceProviderFn() != null), "withDataSourceConfiguration() or withDataSourceProviderFn() is required"); - checkArgument(getTable() != null, "withTable() is required"); + if (input.isBounded() == PCollection.IsBounded.UNBOUNDED) { + checkArgument(getSnowPipe() != null, "withSnowPipe() is required"); + } else { + checkArgument(getTable() != null, "to() is required"); + } } - private PCollection<String> write(PCollection<T> input, String stagingBucketDir) { + private PCollection<T> writeStream(PCollection<T> input, String stagingBucketDir) { SnowflakeService snowflakeService = - getSnowflakeService() != null ? getSnowflakeService() : new SnowflakeServiceImpl(); + getSnowflakeService() != null + ? getSnowflakeService() + : new SnowflakeStreamingServiceImpl(); + + /* Ensure that files will be created after specific record count or duration specified */ + PCollection<T> inputInGlobalWindow = + input.apply( + "rewindowIntoGlobal", + Window.<T>into(new GlobalWindows()) + .triggering( + Repeatedly.forever( + AfterFirst.of( + AfterProcessingTime.pastFirstElementInPane() + .plusDelayOf(getFlushTimeLimit()), + AfterPane.elementCountAtLeast(getFlushRowLimit())))) + .discardingFiredPanes()); + + int shards = (getShardsNumber() > 0) ? getShardsNumber() : DEFAULT_STREAMING_SHARDS_NUMBER; + PCollection files = writeFiles(inputInGlobalWindow, stagingBucketDir, shards); + + /* Ensuring that files will be ingested after flush time */ + files = + (PCollection) + files.apply( + "applyUserTrigger", + Window.<T>into(new GlobalWindows()) + .triggering( + Repeatedly.forever( + AfterProcessingTime.pastFirstElementInPane() + .plusDelayOf(getFlushTimeLimit()))) + .discardingFiredPanes()); Review comment: A PCollection after a combine / GBK / etc - keeps the trigger configuration from upstream. Did you find that you needed to restate the trigger like this? ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org