-
Christoph Alt authoredChristoph Alt authored
dashboard_pystencils.py 5.98 KiB
from dashboards.dashboard_base import (DashboardOptions,
build_dashboard,
get_commit_annotation,
pack_in_row,
Repeat,)
from dashboards.panels import PanelInfos, get_time_series_panel, is_regex
from dashboards.variables import get_dashboard_variable, Filter, get_measurement_filter
from dashboards.influx_queries import join_variable_and
from dashboards.legends import Units
import cbutil.ncu_keys as ncu_keys
import cbutil.likwid_keys as likwid_keys
from dashboards.influx_queries import Query
INTEL_LINESTYLE = "solid"
GCC_LINESTYLE = "dashed"
def dashboard_pystencils_cpu():
data_source = "pystencils"
row_repeat = "host"
options = DashboardOptions(
title="pystencils CPU Benchmarks",
description="Benchmarks for pystencils",
tags=['benchmark', 'pystencils', 'CPU'],
timezone="browser",
)
filters = [
Filter("host", default_value="icx32"),
Filter("PYSTENCILS_PROJECT_ID", default_value="pycodegen/pystencils"),
Filter("PYSTENCILS_BRANCH", default_value="master"),
Filter("compiler"),
]
fields = [PanelInfos(likwid_keys.runtime_key, Units.seconds),
PanelInfos(likwid_keys.dp_key, Units.mflop_sec),
PanelInfos(likwid_keys.oi_key, Units.flop_per_byte),
PanelInfos(f'{likwid_keys.avx_dp_key}"/"{likwid_keys.dp_key}', Units.percent),
PanelInfos(likwid_keys.memory_bandwidth_key, Units.mbytes_per_second),
PanelInfos(likwid_keys.memory_write_bandwidth_key, Units.mbytes_per_second),
PanelInfos(likwid_keys.memory_read_bandwidth_key, Units.mbytes_per_second),
PanelInfos(likwid_keys.memory_data_key, Units.gigabyte),
PanelInfos(likwid_keys.memory_read_key, Units.gigabyte),
PanelInfos(likwid_keys.memory_write_key, Units.gigabyte),
PanelInfos(likwid_keys.energy_key, Units.joule),
PanelInfos(likwid_keys.power_key, Units.watt),
PanelInfos(likwid_keys.clock_key, Units.megahertz), ]
filter_vars = [get_dashboard_variable(filter, "", data_source) for filter in filters]
benchmark = get_measurement_filter("benchmark", data_source, filter_pattern="[^u]$")
row_repeat_var = [fv for fv in filter_vars if fv.name == row_repeat][0]
where = join_variable_and([f.name for f in filters])
annotations = get_commit_annotation(data_source, "red", "commits", "vadd", commit_key="pystencils-commit")
group_by = [f.name for f in filters]
group_by.append("array_shape")
panels = [
get_time_series_panel(
field,
data_source,
[Query(select_=field.name,
from_=f"/^${benchmark.name}$/",
where_=where,
group_by=group_by,
from_string=not is_regex(f"/^${benchmark.name}$/"),
select_string=not is_regex(field.name))],
)
for field in fields]
row = pack_in_row(
title=f"{row_repeat}: ${row_repeat_var.name}",
panels=[*panels],
repeat=Repeat('v', row_repeat_var.name),
)
return build_dashboard(options,
rows=[row],
templating=[*filter_vars, benchmark],
annotations=annotations)
def dashboard_pystencils_gpu():
data_source = "pystencils"
row_repeat = "host"
options = DashboardOptions(
title="pystencils GPU Benchmarks",
description="Benchmarks for pystencils",
tags=['benchmark', 'pystencils', 'GPU'],
timezone="browser",
)
filters = [
Filter("host", default_value="medusa"),
Filter("PYSTENCILS_PROJECT_ID", default_value="pycodegen/pystencils"),
Filter("PYSTENCILS_BRANCH", default_value="master"),
Filter("GPU"),
]
fields = [PanelInfos(ncu_keys.runtime_key, Units.seconds),
PanelInfos(ncu_keys.dp_key, Units.mflop_sec),
PanelInfos(ncu_keys.p_max_key, Units.mflop_sec),
PanelInfos(f'{ncu_keys.dp_key}"/"{ncu_keys.p_max_key}', Units.percent),
PanelInfos(ncu_keys.operational_intensity_key, Units.flop_per_byte),
PanelInfos(ncu_keys.memory_bandwidth_key, Units.mbytes_per_second),
PanelInfos(ncu_keys.memory_write_bandwidth_key, Units.mbytes_per_second),
PanelInfos(ncu_keys.memory_read_bandwidth_key, Units.mbytes_per_second),
PanelInfos(ncu_keys.memory_data_key, Units.gigabyte),
PanelInfos(ncu_keys.memory_write_data_key, Units.gigabyte),
PanelInfos(ncu_keys.memory_read_data_key, Units.gigabyte),
]
filter_vars = [get_dashboard_variable(filter, "", data_source) for filter in filters]
benchmark = get_measurement_filter("benchmark", data_source, filter_pattern="_gpu$")
row_repeat_var = [fv for fv in filter_vars if fv.name == row_repeat][0]
where = join_variable_and([f.name for f in filters])
annotations = get_commit_annotation(data_source, "red", "commits", "vadd_gpu", commit_key="pystencils-commit")
group_by = [f.name for f in filters]
group_by.append("array_shape")
panels = [
get_time_series_panel(
field,
data_source,
[Query(select_=field.name,
from_=f"/^${benchmark.name}$/",
where_=where,
group_by=group_by,
from_string=not is_regex(f"/^${benchmark.name}$/"),
select_string=not is_regex(field.name))],
)
for field in fields]
row = pack_in_row(
title=f"{row_repeat}: ${row_repeat_var.name}",
panels=[*panels],
repeat=Repeat('v', row_repeat_var.name),
)
return build_dashboard(options,
rows=[row],
templating=[*filter_vars, benchmark],
annotations=annotations)