KoviAnusha commented on code in PR #56910:
URL: https://github.com/apache/airflow/pull/56910#discussion_r2449105108
##########
providers/amazon/src/airflow/providers/amazon/aws/triggers/s3.py:
##########
@@ -108,11 +115,25 @@ async def run(self) -> AsyncIterator[TriggerEvent]:
client, self.bucket_name, self.bucket_key,
self.wildcard_match, self.use_regex
):
if self.should_check_fn:
- s3_objects = await self.hook.get_files_async(
+ raw_objects = await self.hook.get_files_async(
client, self.bucket_name, self.bucket_key,
self.wildcard_match
)
- await asyncio.sleep(self.poke_interval)
- yield TriggerEvent({"status": "running", "files":
s3_objects})
+ files = []
+ for f in raw_objects:
+ metadata = {}
+ if "*" in self.metadata_keys:
+ metadata = await
self.hook.head_object_async(f, self.bucket_name)
+ else:
+ for mk in self.metadata_keys:
+ try:
+ metadata[mk] = f[mk]
+ except KeyError:
+ self.log.info("Key %s not found,
performing head_object", mk)
+ obj = await
self.hook.head_object_async(f, self.bucket_name)
+ metadata[mk] = obj.get(mk, None)
+ files.append(metadata)
+
+ yield TriggerEvent({"status": "running", "files":
files})
Review Comment:
I just saw that you added the tests. That completes the fix. Everything
looks clean and consistent now. Great job getting this all wrapped up quickly.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]