Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove merge times from command line report #767

Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 0 additions & 41 deletions esrally/reporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -306,13 +306,6 @@ def __init__(self, d=None):
self.merge_throttle_time_per_shard = self.v(d, "merge_throttle_time_per_shard", default={})
self.ml_processing_time = self.v(d, "ml_processing_time", default=[])

self.merge_part_time_postings = self.v(d, "merge_part_time_postings")
self.merge_part_time_stored_fields = self.v(d, "merge_part_time_stored_fields")
self.merge_part_time_doc_values = self.v(d, "merge_part_time_doc_values")
self.merge_part_time_norms = self.v(d, "merge_part_time_norms")
self.merge_part_time_vectors = self.v(d, "merge_part_time_vectors")
self.merge_part_time_points = self.v(d, "merge_part_time_points")

self.young_gc_time = self.v(d, "young_gc_time")
self.old_gc_time = self.v(d, "old_gc_time")

Expand Down Expand Up @@ -432,7 +425,6 @@ def report(self):
warnings = []
metrics_table = []
metrics_table.extend(self.report_totals(stats))
metrics_table.extend(self.report_merge_part_times(stats))
metrics_table.extend(self.report_ml_processing_times(stats))

metrics_table.extend(self.report_gc_times(stats))
Expand Down Expand Up @@ -538,18 +530,6 @@ def report_total_count(self, name, total_count):
self.line("Cumulative {} of primary shards".format(name), "", total_count, ""),
)

def report_merge_part_times(self, stats):
# note that these times are not(!) wall clock time results but total times summed up over multiple threads
unit = "min"
return self.join(
self.line("Merge time (postings)", "", stats.merge_part_time_postings, unit, convert.ms_to_minutes),
self.line("Merge time (stored fields)", "", stats.merge_part_time_stored_fields, unit, convert.ms_to_minutes),
self.line("Merge time (doc values)", "", stats.merge_part_time_doc_values, unit, convert.ms_to_minutes),
self.line("Merge time (norms)", "", stats.merge_part_time_norms, unit, convert.ms_to_minutes),
self.line("Merge time (vectors)", "", stats.merge_part_time_vectors, unit, convert.ms_to_minutes),
self.line("Merge time (points)", "", stats.merge_part_time_points, unit, convert.ms_to_minutes)
)

def report_ml_processing_times(self, stats):
lines = []
for processing_time in stats.ml_processing_time:
Expand Down Expand Up @@ -652,8 +632,6 @@ def metrics_table(self, baseline_stats, contender_stats, plain):
self.plain = plain
metrics_table = []
metrics_table.extend(self.report_total_times(baseline_stats, contender_stats))
metrics_table.extend(self.report_merge_part_times(baseline_stats, contender_stats))
metrics_table.extend(self.report_merge_part_times(baseline_stats, contender_stats))
metrics_table.extend(self.report_ml_processing_times(baseline_stats, contender_stats))
metrics_table.extend(self.report_gc_times(baseline_stats, contender_stats))
metrics_table.extend(self.report_disk_usage(baseline_stats, contender_stats))
Expand Down Expand Up @@ -716,25 +694,6 @@ def report_error_rate(self, baseline_stats, contender_stats, task):
treat_increase_as_improvement=False, formatter=convert.factor(100.0))
)

def report_merge_part_times(self, baseline_stats, contender_stats):
return self.join(
self.line("Merge time (postings)", baseline_stats.merge_part_time_postings,
contender_stats.merge_part_time_postings,
"", "min", treat_increase_as_improvement=False, formatter=convert.ms_to_minutes),
self.line("Merge time (stored fields)", baseline_stats.merge_part_time_stored_fields,
contender_stats.merge_part_time_stored_fields,
"", "min", treat_increase_as_improvement=False, formatter=convert.ms_to_minutes),
self.line("Merge time (doc values)", baseline_stats.merge_part_time_doc_values,
contender_stats.merge_part_time_doc_values,
"", "min", treat_increase_as_improvement=False, formatter=convert.ms_to_minutes),
self.line("Merge time (norms)", baseline_stats.merge_part_time_norms,
contender_stats.merge_part_time_norms,
"", "min", treat_increase_as_improvement=False, formatter=convert.ms_to_minutes),
self.line("Merge time (vectors)", baseline_stats.merge_part_time_vectors,
contender_stats.merge_part_time_vectors,
"", "min", treat_increase_as_improvement=False, formatter=convert.ms_to_minutes)
)

def report_ml_processing_times(self, baseline_stats, contender_stats):
lines = []
for baseline in baseline_stats.ml_processing_time:
Expand Down