diff --git a/lisa/microsoft/testsuites/dpdk/dpdkperf.py b/lisa/microsoft/testsuites/dpdk/dpdkperf.py index 335463e2d7..d68eac4b93 100644 --- a/lisa/microsoft/testsuites/dpdk/dpdkperf.py +++ b/lisa/microsoft/testsuites/dpdk/dpdkperf.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Tuple +from typing import Any, Dict, Tuple, cast from assertpy import assert_that from microsoft.testsuites.dpdk.common import Pmd, force_dpdk_default_source @@ -25,9 +25,11 @@ ) from lisa.features import Gpu, Infiniband, Sriov from lisa.messages import ( + MetricRelativity, NetworkPPSPerformanceMessage, TransportProtocol, create_perf_message, + send_unified_perf_message, ) from lisa.testsuite import TestResult from lisa.tools import Lscpu @@ -87,6 +89,12 @@ def perf_dpdk_send_only_failsafe_pmd( test_case_name, sender_fields, ) + + # Send unified performance messages + self._send_pps_unified_perf_messages( + sender_fields, node, test_case_name, result + ) + notifier.notify(send_results) @TestCaseMetadata( @@ -133,6 +141,12 @@ def perf_dpdk_send_only_netvsc_pmd( test_case_name, sender_fields, ) + + # Send unified performance messages + self._send_pps_unified_perf_messages( + sender_fields, node, test_case_name, result + ) + notifier.notify(send_results) @TestCaseMetadata( @@ -354,8 +368,118 @@ def _create_pps_performance_results( receiver_fields, ) + # Send unified performance messages + self._send_pps_unified_perf_messages( + sender_fields, send_kit.node, test_case_name, test_result + ) + self._send_pps_unified_perf_messages( + receiver_fields, receive_kit.node, test_case_name, test_result + ) + return send_results, receive_results + def _send_pps_unified_perf_messages( + self, + result_fields: Dict[str, Any], + node: Node, + test_case_name: str, + test_result: TestResult, + ) -> None: + """Send unified performance messages for PPS metrics.""" + tool = constants.NETWORK_PERFORMANCE_TOOL_DPDK_TESTPMD + + metrics = [] + + # Add rx metrics if they exist + if "rx_pps_maximum" in result_fields: + metrics.extend( + [ + { + "name": "rx_pps_maximum", + "value": float(result_fields["rx_pps_maximum"]), + "relativity": MetricRelativity.HigherIsBetter, + "unit": "packets/second", + }, + { + "name": "rx_pps_average", + "value": float(result_fields["rx_pps_average"]), + "relativity": MetricRelativity.HigherIsBetter, + "unit": "packets/second", + }, + { + "name": "rx_pps_minimum", + "value": float(result_fields["rx_pps_minimum"]), + "relativity": MetricRelativity.HigherIsBetter, + "unit": "packets/second", + }, + ] + ) + + # Add tx metrics if they exist + if "tx_pps_maximum" in result_fields: + metrics.extend( + [ + { + "name": "tx_pps_maximum", + "value": float(result_fields["tx_pps_maximum"]), + "relativity": MetricRelativity.HigherIsBetter, + "unit": "packets/second", + }, + { + "name": "tx_pps_average", + "value": float(result_fields["tx_pps_average"]), + "relativity": MetricRelativity.HigherIsBetter, + "unit": "packets/second", + }, + { + "name": "tx_pps_minimum", + "value": float(result_fields["tx_pps_minimum"]), + "relativity": MetricRelativity.HigherIsBetter, + "unit": "packets/second", + }, + ] + ) + + # Add parameter metrics + test_type = result_fields.get("test_type", "") + if test_type: + metrics.append( + { + "name": "test_type", + "str_value": test_type, + "relativity": MetricRelativity.Parameter, + "unit": "", + } + ) + + role = result_fields.get("role", "") + if role: + metrics.append( + { + "name": "role", + "str_value": role, + "relativity": MetricRelativity.Parameter, + "unit": "", + } + ) + + # Get protocol_type from result_fields if it exists + protocol_type = result_fields.get("protocol_type") + + for metric in metrics: + send_unified_perf_message( + node=node, + test_result=test_result, + test_case_name=test_case_name, + tool=tool, + metric_name=cast(str, metric["name"]), + metric_value=cast(float, metric["value"]), + metric_unit=cast(str, metric["unit"]), + metric_str_value=cast(str, metric.get("str_value", "")), + metric_relativity=cast(MetricRelativity, metric["relativity"]), + protocol_type=protocol_type, + ) + def _validate_core_counts_are_equal(self, test_result: TestResult) -> None: environment = test_result.environment assert environment, "fail to get environment from testresult" diff --git a/lisa/tools/sar.py b/lisa/tools/sar.py index df9cea87a9..a5ae043708 100644 --- a/lisa/tools/sar.py +++ b/lisa/tools/sar.py @@ -7,7 +7,12 @@ from assertpy.assertpy import assert_that from lisa.executable import Tool -from lisa.messages import NetworkPPSPerformanceMessage, create_perf_message +from lisa.messages import ( + MetricRelativity, + NetworkPPSPerformanceMessage, + create_perf_message, + send_unified_perf_message, +) from lisa.operating_system import Posix from lisa.util import constants, find_groups_in_lines from lisa.util.process import ExecutableResult, Process @@ -147,6 +152,15 @@ def create_pps_performance_messages( result_fields["rx_tx_pps_maximum"] = max(tx_rx_pps) result_fields["rx_tx_pps_average"] = Decimal(sum(tx_rx_pps) / len(tx_rx_pps)) result_fields["rx_tx_pps_minimum"] = min(tx_rx_pps) + + # Send unified performance messages + self.send_pps_unified_perf_messages( + result_fields, + test_case_name, + test_type, + test_result, + ) + message = create_perf_message( NetworkPPSPerformanceMessage, self.node, @@ -156,6 +170,92 @@ def create_pps_performance_messages( ) return message + def send_pps_unified_perf_messages( + self, + result_fields: Dict[str, Any], + test_case_name: str, + test_type: str, + test_result: "TestResult", + ) -> None: + """Send unified performance messages for PPS metrics.""" + tool = constants.NETWORK_PERFORMANCE_TOOL_SAR + + metrics: List[Dict[str, Any]] = [ + { + "name": "rx_pps_maximum", + "value": float(result_fields["rx_pps_maximum"]), + "relativity": MetricRelativity.HigherIsBetter, + "unit": "packets/second", + }, + { + "name": "rx_pps_average", + "value": float(result_fields["rx_pps_average"]), + "relativity": MetricRelativity.HigherIsBetter, + "unit": "packets/second", + }, + { + "name": "rx_pps_minimum", + "value": float(result_fields["rx_pps_minimum"]), + "relativity": MetricRelativity.HigherIsBetter, + "unit": "packets/second", + }, + { + "name": "tx_pps_maximum", + "value": float(result_fields["tx_pps_maximum"]), + "relativity": MetricRelativity.HigherIsBetter, + "unit": "packets/second", + }, + { + "name": "tx_pps_average", + "value": float(result_fields["tx_pps_average"]), + "relativity": MetricRelativity.HigherIsBetter, + "unit": "packets/second", + }, + { + "name": "tx_pps_minimum", + "value": float(result_fields["tx_pps_minimum"]), + "relativity": MetricRelativity.HigherIsBetter, + "unit": "packets/second", + }, + { + "name": "rx_tx_pps_maximum", + "value": float(result_fields["rx_tx_pps_maximum"]), + "relativity": MetricRelativity.HigherIsBetter, + "unit": "packets/second", + }, + { + "name": "rx_tx_pps_average", + "value": float(result_fields["rx_tx_pps_average"]), + "relativity": MetricRelativity.HigherIsBetter, + "unit": "packets/second", + }, + { + "name": "rx_tx_pps_minimum", + "value": float(result_fields["rx_tx_pps_minimum"]), + "relativity": MetricRelativity.HigherIsBetter, + "unit": "packets/second", + }, + { + "name": "test_type", + "str_value": test_type, + "relativity": MetricRelativity.Parameter, + "unit": "", + }, + ] + + for metric in metrics: + send_unified_perf_message( + node=self.node, + test_result=test_result, + test_case_name=test_case_name, + tool=tool, + metric_name=metric["name"], + metric_value=metric.get("value", 0.0), + metric_unit=metric["unit"], + metric_str_value=metric.get("str_value", ""), + metric_relativity=metric["relativity"], + ) + def _initialize(self, *args: Any, **kwargs: Any) -> None: firewall = self.node.tools[Firewall] firewall.stop()