kbendick commented on code in PR #5427:
URL: https://github.com/apache/iceberg/pull/5427#discussion_r941585889
##########
api/src/main/java/org/apache/iceberg/metrics/ScanReport.java:
##########
@@ -117,12 +149,285 @@ public Builder fromScanMetrics(ScanMetrics
newScanMetrics) {
return this;
}
+ public Builder fromScanMetricsResult(ScanMetricsResult
newScanMetricsResult) {
+ this.scanMetricsResult = newScanMetricsResult;
+ return this;
+ }
+
public ScanReport build() {
Preconditions.checkArgument(null != tableName, "Invalid table name:
null");
Preconditions.checkArgument(null != filter, "Invalid expression filter:
null");
Preconditions.checkArgument(null != projection, "Invalid schema
projection: null");
+ Preconditions.checkArgument(
+ null != scanMetrics || null != scanMetricsResult, "Invalid scan
metrics: null");
+ return new ScanReport(
+ tableName,
+ snapshotId,
+ filter,
+ projection,
+ null != scanMetrics ? ScanMetricsResult.fromScanMetrics(scanMetrics)
: scanMetricsResult);
+ }
+ }
+
+ /** A serializable version of a {@link Timer} that carries its result. */
+ public static class TimerResult implements Serializable {
+ private final String name;
+ private final TimeUnit timeUnit;
+ private final Duration totalDuration;
+ private final long count;
+
+ public TimerResult(String name, TimeUnit timeUnit, Duration totalDuration,
long count) {
+ Preconditions.checkArgument(null != name, "Invalid timer name: null");
+ Preconditions.checkArgument(null != timeUnit, "Invalid time unit: null");
+ Preconditions.checkArgument(null != totalDuration, "Invalid duration:
null");
+ this.name = name;
+ this.timeUnit = timeUnit;
+ this.totalDuration = totalDuration;
+ this.count = count;
+ }
+
+ public String name() {
+ return name;
+ }
+
+ public TimeUnit timeUnit() {
+ return timeUnit;
+ }
+
+ public Duration totalDuration() {
+ return totalDuration;
+ }
+
+ public long count() {
+ return count;
+ }
+
+ @Override
+ public String toString() {
+ return MoreObjects.toStringHelper(name())
+ .add("duration", totalDuration())
+ .add("count", count)
+ .add("timeUnit", timeUnit)
+ .toString();
+ }
+
+ public static TimerResult fromTimer(Timer timer) {
+ Preconditions.checkArgument(null != timer, "Invalid timer: null");
+ return new TimerResult(timer.name(), timer.unit(),
timer.totalDuration(), timer.count());
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ TimerResult that = (TimerResult) o;
+ return count == that.count
+ && Objects.equal(name, that.name)
+ && timeUnit == that.timeUnit
+ && Objects.equal(totalDuration, that.totalDuration);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hashCode(name, timeUnit, totalDuration, count);
+ }
+ }
+
+ /**
+ * A serializable version of a {@link Counter} that carries its result.
+ *
+ * @param <T> The type of the {@link CounterResult}.
+ */
+ public static class CounterResult<T extends Number> implements Serializable {
Review Comment:
I also agree with this and I've found that in places where some of the
Hadoop metrics (or any Hadoop tracking) that uses an integer, they really have
no way of dealing with overflow.
So we'd still be adhering to the Hadoop APIs if one _needed_ to send them an
integer -- we could continue with the undefined behavior or return INT_MAX at
overflow or just rely on the `Number` semantics.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]