Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
126 changes: 125 additions & 1 deletion lisa/microsoft/testsuites/dpdk/dpdkperf.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Any, Dict, Tuple
from typing import Any, Dict, Tuple, cast

from assertpy import assert_that
from microsoft.testsuites.dpdk.common import Pmd, force_dpdk_default_source
Expand All @@ -25,9 +25,11 @@
)
from lisa.features import Gpu, Infiniband, Sriov
from lisa.messages import (
MetricRelativity,
NetworkPPSPerformanceMessage,
TransportProtocol,
create_perf_message,
send_unified_perf_message,
)
from lisa.testsuite import TestResult
from lisa.tools import Lscpu
Expand Down Expand Up @@ -87,6 +89,12 @@ def perf_dpdk_send_only_failsafe_pmd(
test_case_name,
sender_fields,
)

# Send unified performance messages
self._send_pps_unified_perf_messages(
sender_fields, node, test_case_name, result
)

notifier.notify(send_results)

@TestCaseMetadata(
Expand Down Expand Up @@ -133,6 +141,12 @@ def perf_dpdk_send_only_netvsc_pmd(
test_case_name,
sender_fields,
)

# Send unified performance messages
self._send_pps_unified_perf_messages(
sender_fields, node, test_case_name, result
)

notifier.notify(send_results)

@TestCaseMetadata(
Expand Down Expand Up @@ -354,8 +368,118 @@ def _create_pps_performance_results(
receiver_fields,
)

# Send unified performance messages
self._send_pps_unified_perf_messages(
sender_fields, send_kit.node, test_case_name, test_result
)
self._send_pps_unified_perf_messages(
receiver_fields, receive_kit.node, test_case_name, test_result
)

return send_results, receive_results

def _send_pps_unified_perf_messages(
self,
result_fields: Dict[str, Any],
node: Node,
test_case_name: str,
test_result: TestResult,
) -> None:
"""Send unified performance messages for PPS metrics."""
tool = constants.NETWORK_PERFORMANCE_TOOL_DPDK_TESTPMD

metrics = []

# Add rx metrics if they exist
if "rx_pps_maximum" in result_fields:
metrics.extend(
[
{
"name": "rx_pps_maximum",
"value": float(result_fields["rx_pps_maximum"]),
"relativity": MetricRelativity.HigherIsBetter,
"unit": "packets/second",
},
{
"name": "rx_pps_average",
"value": float(result_fields["rx_pps_average"]),
"relativity": MetricRelativity.HigherIsBetter,
"unit": "packets/second",
},
{
"name": "rx_pps_minimum",
"value": float(result_fields["rx_pps_minimum"]),
"relativity": MetricRelativity.HigherIsBetter,
"unit": "packets/second",
},
]
)

# Add tx metrics if they exist
if "tx_pps_maximum" in result_fields:
metrics.extend(
[
{
"name": "tx_pps_maximum",
"value": float(result_fields["tx_pps_maximum"]),
"relativity": MetricRelativity.HigherIsBetter,
"unit": "packets/second",
},
{
"name": "tx_pps_average",
"value": float(result_fields["tx_pps_average"]),
"relativity": MetricRelativity.HigherIsBetter,
"unit": "packets/second",
},
{
"name": "tx_pps_minimum",
"value": float(result_fields["tx_pps_minimum"]),
"relativity": MetricRelativity.HigherIsBetter,
"unit": "packets/second",
},
]
)

# Add parameter metrics
test_type = result_fields.get("test_type", "")
if test_type:
metrics.append(
{
"name": "test_type",
"str_value": test_type,
"relativity": MetricRelativity.Parameter,
"unit": "",
}
)

role = result_fields.get("role", "")
if role:
metrics.append(
{
"name": "role",
"str_value": role,
"relativity": MetricRelativity.Parameter,
"unit": "",
}
)

# Get protocol_type from result_fields if it exists
protocol_type = result_fields.get("protocol_type")

for metric in metrics:
send_unified_perf_message(
node=node,
test_result=test_result,
test_case_name=test_case_name,
tool=tool,
metric_name=cast(str, metric["name"]),
metric_value=cast(float, metric["value"]),
metric_unit=cast(str, metric["unit"]),
metric_str_value=cast(str, metric.get("str_value", "")),
metric_relativity=cast(MetricRelativity, metric["relativity"]),
protocol_type=protocol_type,
)

def _validate_core_counts_are_equal(self, test_result: TestResult) -> None:
environment = test_result.environment
assert environment, "fail to get environment from testresult"
Expand Down
102 changes: 101 additions & 1 deletion lisa/tools/sar.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,12 @@
from assertpy.assertpy import assert_that

from lisa.executable import Tool
from lisa.messages import NetworkPPSPerformanceMessage, create_perf_message
from lisa.messages import (
MetricRelativity,
NetworkPPSPerformanceMessage,
create_perf_message,
send_unified_perf_message,
)
from lisa.operating_system import Posix
from lisa.util import constants, find_groups_in_lines
from lisa.util.process import ExecutableResult, Process
Expand Down Expand Up @@ -147,6 +152,15 @@ def create_pps_performance_messages(
result_fields["rx_tx_pps_maximum"] = max(tx_rx_pps)
result_fields["rx_tx_pps_average"] = Decimal(sum(tx_rx_pps) / len(tx_rx_pps))
result_fields["rx_tx_pps_minimum"] = min(tx_rx_pps)

# Send unified performance messages
self.send_pps_unified_perf_messages(
result_fields,
test_case_name,
test_type,
test_result,
)

message = create_perf_message(
NetworkPPSPerformanceMessage,
self.node,
Expand All @@ -156,6 +170,92 @@ def create_pps_performance_messages(
)
return message

def send_pps_unified_perf_messages(
self,
result_fields: Dict[str, Any],
test_case_name: str,
test_type: str,
test_result: "TestResult",
) -> None:
"""Send unified performance messages for PPS metrics."""
tool = constants.NETWORK_PERFORMANCE_TOOL_SAR

metrics: List[Dict[str, Any]] = [
{
"name": "rx_pps_maximum",
"value": float(result_fields["rx_pps_maximum"]),
"relativity": MetricRelativity.HigherIsBetter,
"unit": "packets/second",
},
{
"name": "rx_pps_average",
"value": float(result_fields["rx_pps_average"]),
"relativity": MetricRelativity.HigherIsBetter,
"unit": "packets/second",
},
{
"name": "rx_pps_minimum",
"value": float(result_fields["rx_pps_minimum"]),
"relativity": MetricRelativity.HigherIsBetter,
"unit": "packets/second",
},
{
"name": "tx_pps_maximum",
"value": float(result_fields["tx_pps_maximum"]),
"relativity": MetricRelativity.HigherIsBetter,
"unit": "packets/second",
},
{
"name": "tx_pps_average",
"value": float(result_fields["tx_pps_average"]),
"relativity": MetricRelativity.HigherIsBetter,
"unit": "packets/second",
},
{
"name": "tx_pps_minimum",
"value": float(result_fields["tx_pps_minimum"]),
"relativity": MetricRelativity.HigherIsBetter,
"unit": "packets/second",
},
{
"name": "rx_tx_pps_maximum",
"value": float(result_fields["rx_tx_pps_maximum"]),
"relativity": MetricRelativity.HigherIsBetter,
"unit": "packets/second",
},
{
"name": "rx_tx_pps_average",
"value": float(result_fields["rx_tx_pps_average"]),
"relativity": MetricRelativity.HigherIsBetter,
"unit": "packets/second",
},
{
"name": "rx_tx_pps_minimum",
"value": float(result_fields["rx_tx_pps_minimum"]),
"relativity": MetricRelativity.HigherIsBetter,
"unit": "packets/second",
},
{
"name": "test_type",
"str_value": test_type,
"relativity": MetricRelativity.Parameter,
"unit": "",
},
]

for metric in metrics:
send_unified_perf_message(
node=self.node,
test_result=test_result,
test_case_name=test_case_name,
tool=tool,
metric_name=metric["name"],
metric_value=metric.get("value", 0.0),
metric_unit=metric["unit"],
metric_str_value=metric.get("str_value", ""),
metric_relativity=metric["relativity"],
)

def _initialize(self, *args: Any, **kwargs: Any) -> None:
firewall = self.node.tools[Firewall]
firewall.stop()
Expand Down
Loading