santhoshct commented on code in PR #17938:
URL: https://github.com/apache/kafka/pull/17938#discussion_r1880082439
##########
.github/scripts/develocity_reports.py:
##########
@@ -0,0 +1,863 @@
+import os
+import requests
+import json
+from dataclasses import dataclass, field, asdict
+from typing import Dict, List, Tuple, Optional
+from datetime import datetime, timedelta
+import pytz # Add this import for timezone handling
+from collections import defaultdict
+import time
+import logging
+import concurrent.futures # Add this import at the top
+import pathlib
+import pickle
+from abc import ABC, abstractmethod
+
+logger = logging.getLogger(__name__)
+
+@dataclass
+class TestOutcome:
+ passed: int
+ failed: int
+ skipped: int
+ flaky: int
+ not_selected: int = field(metadata={'name': 'notSelected'})
+ total: int
+
+@dataclass
+class BuildInfo:
+ id: str
+ timestamp: datetime
+ duration: int
+ has_failed: bool
+
+@dataclass
+class TestTimelineEntry:
+ build_id: str
+ timestamp: datetime
+ outcome: str # "passed", "failed", "flaky", etc.
+
+@dataclass
+class TestResult:
+ name: str
+ outcome_distribution: TestOutcome
+ first_seen: datetime
+ timeline: List[TestTimelineEntry] = field(default_factory=list)
+ recent_failure_rate: float = 0.0 # Added to track recent failure trends
+
+@dataclass
+class TestContainerResult:
+ build_id: str
+ outcome: str
+ timestamp: Optional[datetime] = None
+
+@dataclass
+class TestCaseResult(TestResult):
+ """Extends TestResult to include container-specific information"""
+ container_name: str = ""
+
+@dataclass
+class BuildCache:
+ last_update: datetime
+ builds: Dict[str, 'BuildInfo']
+
+ def to_dict(self):
+ return {
+ 'last_update': self.last_update.isoformat(),
+ 'builds': {k: asdict(v) for k, v in self.builds.items()}
+ }
+
+ @classmethod
+ def from_dict(cls, data: dict) -> 'BuildCache':
+ return cls(
+ last_update=datetime.fromisoformat(data['last_update']),
+ builds={k: BuildInfo(**v) for k, v in data['builds'].items()}
+ )
+
+class CacheProvider(ABC):
+ @abstractmethod
+ def get_cache(self) -> Optional[BuildCache]:
+ pass
+
+ @abstractmethod
+ def save_cache(self, cache: BuildCache):
+ pass
+
+class LocalCacheProvider(CacheProvider):
+ def __init__(self, cache_dir: str = None):
+ if cache_dir is None:
+ cache_dir = os.path.join(os.path.expanduser("~"),
".develocity_cache")
+ self.cache_file = os.path.join(cache_dir, "build_cache.pkl")
+ os.makedirs(cache_dir, exist_ok=True)
+
+ def get_cache(self) -> Optional[BuildCache]:
+ try:
+ if os.path.exists(self.cache_file):
+ with open(self.cache_file, 'rb') as f:
+ return pickle.load(f)
+ except Exception as e:
+ logger.warning(f"Failed to load local cache: {e}")
+ return None
+
+ def save_cache(self, cache: BuildCache):
+ try:
+ with open(self.cache_file, 'wb') as f:
+ pickle.dump(cache, f)
+ except Exception as e:
+ logger.warning(f"Failed to save local cache: {e}")
+
+class GitHubActionsCacheProvider(CacheProvider):
+ def __init__(self):
+ self.cache_key = "develocity-build-cache"
+
+ def get_cache(self) -> Optional[BuildCache]:
+ try:
+ # Check if running in GitHub Actions
+ if not os.environ.get('GITHUB_ACTIONS'):
+ return None
+
+ cache_path = os.environ.get('GITHUB_WORKSPACE', '')
+ cache_file = os.path.join(cache_path, self.cache_key + '.json')
+
+ if os.path.exists(cache_file):
+ with open(cache_file, 'r') as f:
+ data = json.load(f)
+ return BuildCache.from_dict(data)
+ except Exception as e:
+ logger.warning(f"Failed to load GitHub Actions cache: {e}")
+ return None
+
+ def save_cache(self, cache: BuildCache):
+ try:
+ if not os.environ.get('GITHUB_ACTIONS'):
+ return
+
+ cache_path = os.environ.get('GITHUB_WORKSPACE', '')
+ cache_file = os.path.join(cache_path, self.cache_key + '.json')
+
+ with open(cache_file, 'w') as f:
+ json.dump(cache.to_dict(), f)
+ except Exception as e:
+ logger.warning(f"Failed to save GitHub Actions cache: {e}")
+
+class TestAnalyzer:
+ def __init__(self, base_url: str, auth_token: str):
+ self.base_url = base_url
+ self.headers = {
+ 'Authorization': f'Bearer {auth_token}',
+ 'Accept': 'application/json'
+ }
+ self.default_chunk_size = timedelta(days=14)
+ self.api_retry_delay = 2 # seconds
+ self.max_api_retries = 3
+
+ # Initialize cache providers
+ self.cache_providers = [
+ GitHubActionsCacheProvider(),
+ LocalCacheProvider()
+ ]
+ self.build_cache = None
+ self._load_cache()
+
+ def _load_cache(self):
+ """Load cache from the first available provider"""
+ for provider in self.cache_providers:
+ cache = provider.get_cache()
+ if cache is not None:
+ self.build_cache = cache
+ logger.info(f"Loaded cache from {provider.__class__.__name__}")
+ return
+ logger.info("No existing cache found")
+
+ def _save_cache(self):
+ """Save cache to all providers"""
+ if self.build_cache:
+ for provider in self.cache_providers:
+ provider.save_cache(self.build_cache)
+ logger.info(f"Saved cache to {provider.__class__.__name__}")
+
+ def build_query(self, project: str, chunk_start: datetime, chunk_end:
datetime, test_type: str) -> str:
+ """
+ Constructs the query string to be used in both build info and test
containers API calls.
+
+ Args:
+ project: The project name.
+ chunk_start: The start datetime for the chunk.
+ chunk_end: The end datetime for the chunk.
+ test_type: The type of tests to query.
+
+ Returns:
+ A formatted query string.
+ """
+ return f'project:{project} buildStartTime:[{chunk_start.isoformat()}
TO {chunk_end.isoformat()}] gradle.requestedTasks:{test_type}'
+
+ def process_chunk(self, chunk_start: datetime, chunk_end: datetime,
project: str,
Review Comment:
Corrected this.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]