Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions jobs/competitive-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,10 @@ jobs:
retry_attempt_count: ${{ parameters.retry_attempt_count }}
credential_type: ${{ parameters.credential_type }}
ssh_key_enabled: ${{ parameters.ssh_key_enabled }}
- template: /steps/extract-job-parameters.yml
parameters:
cloud: ${{ parameters.cloud }}
matrix: ${{ parameters.matrix }}
- template: /steps/provision-resources.yml
parameters:
cloud: ${{ parameters.cloud }}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,26 @@ def parse_module_path(full_path):

return module_name, submodule_path, resource_name

def get_job_tags():
"""Read job_tags from the JOB_TAGS environment variable.

The JOB_TAGS variable is set by the set-job-tags.yml pipeline step as a JSON string
containing the current matrix entry's key-value pairs.

Returns:
str or None: The job tags as a JSON string, or None if not available.
"""
job_tags_str = os.getenv("JOB_TAGS", "")
if not job_tags_str or job_tags_str == "{}":
return None

try:
json.loads(job_tags_str) # validate it's valid JSON
return job_tags_str
except json.JSONDecodeError as e:
print(f"[WARNING] Failed to parse JOB_TAGS: {e}")
return None

def process_terraform_logs(log_path, _command_type, _scenario_type, _scenario_name):
log_file = os.path.join(log_path, f"terraform_{_command_type}.log")
run_id = os.getenv("RUN_ID", "")
Expand All @@ -42,6 +62,8 @@ def process_terraform_logs(log_path, _command_type, _scenario_type, _scenario_na
print(f"[WARNING] Log file not found: {log_file}")
return results

job_tags = get_job_tags()

try:
with open(log_file, "r", encoding='utf-8') as f:
for line in f:
Expand All @@ -51,7 +73,7 @@ def process_terraform_logs(log_path, _command_type, _scenario_type, _scenario_na
seconds = time_to_seconds(time_str)
module, submodule, resource = parse_module_path(full_path)

results.append({
result = {
"timestamp": datetime.datetime.now().isoformat(),
"run_id": run_id,
"scenario_type": _scenario_type,
Expand All @@ -61,7 +83,12 @@ def process_terraform_logs(log_path, _command_type, _scenario_type, _scenario_na
"resource_name": resource,
"action": _command_type,
"time_taken_seconds": seconds
})
}

if job_tags is not None:
result["job_tags"] = job_tags

results.append(result)
except Exception as e:
print(f"[ERROR] Failed to process log file '{log_file}': {e}")

Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
import unittest
import os
import json
from unittest.mock import patch, mock_open

from terraform.extract_terraform_operation_metadata import (
time_to_seconds,
parse_module_path,
process_terraform_logs,
get_job_tags,
)

class TestExtractTerraformOperationMetadata(unittest.TestCase):
Expand Down Expand Up @@ -169,5 +171,95 @@ def test_process_terraform_logs_with_invalid_log_line(self, mock_open_file, mock
mock_open_file.assert_called_once_with('/fake/path/terraform_apply.log', 'r', encoding='utf-8')
mock_isfile.assert_called_once_with("/fake/path/terraform_apply.log")

# --- get_job_tags tests ---

@patch.dict(os.environ, {
"JOB_TAGS": '{"node_count": 10, "max_pods": 30, "kubernetes_version": "1.31"}'
}, clear=False)
def test_get_job_tags_with_valid_json(self):
job_tags = get_job_tags()
self.assertIsNotNone(job_tags)
parsed = json.loads(job_tags)
self.assertEqual(parsed["node_count"], 10)
self.assertEqual(parsed["max_pods"], 30)
self.assertEqual(parsed["kubernetes_version"], "1.31")

@patch.dict(os.environ, {
"JOB_TAGS": "invalid json"
}, clear=False)
def test_get_job_tags_with_invalid_json(self):
job_tags = get_job_tags()
self.assertIsNone(job_tags)

@patch.dict(os.environ, {
"JOB_TAGS": "{}"
}, clear=False)
def test_get_job_tags_with_empty_object(self):
job_tags = get_job_tags()
self.assertIsNone(job_tags)

@patch.dict(os.environ, {
"JOB_TAGS": ""
}, clear=False)
def test_get_job_tags_with_empty_string(self):
job_tags = get_job_tags()
self.assertIsNone(job_tags)

def test_get_job_tags_with_no_env_var(self):
saved = os.environ.pop("JOB_TAGS", None)
try:
job_tags = get_job_tags()
self.assertIsNone(job_tags)
finally:
if saved is not None:
os.environ["JOB_TAGS"] = saved

# --- process_terraform_logs with job_tags integration tests ---

@patch("os.path.isfile", return_value=True)
@patch("builtins.open", new_callable=mock_open, read_data="module.aks.cluster: Creation complete after 1m30s\n")
@patch.dict(os.environ, {
"RUN_ID": "test-run-123",
"JOB_TAGS": '{"node_count": 10, "max_pods": 30}'
}, clear=False)
def test_process_terraform_logs_with_job_tags(self, mock_open_file, mock_isfile):
results = process_terraform_logs(
log_path="/fake/path",
_command_type="apply",
_scenario_type="perf-eval",
_scenario_name="cri-resource-consume",
)

self.assertEqual(len(results), 1)
self.assertEqual(results[0]["run_id"], "test-run-123")
self.assertEqual(results[0]["module_name"], "aks")
self.assertEqual(results[0]["resource_name"], "cluster")
self.assertEqual(results[0]["action"], "apply")
self.assertEqual(results[0]["time_taken_seconds"], 90)
self.assertIn("job_tags", results[0])
parsed_tags = json.loads(results[0]["job_tags"])
self.assertEqual(parsed_tags["node_count"], 10)
self.assertEqual(parsed_tags["max_pods"], 30)

@patch("os.path.isfile", return_value=True)
@patch("builtins.open", new_callable=mock_open, read_data="module.aks.cluster: Creation complete after 1m30s\n")
def test_process_terraform_logs_without_job_tags(self, mock_open_file, mock_isfile):
saved = os.environ.pop("JOB_TAGS", None)
try:
os.environ["RUN_ID"] = "test-run-456"
results = process_terraform_logs(
log_path="/fake/path",
_command_type="apply",
_scenario_type="perf-eval",
_scenario_name="test-scenario",
)

self.assertEqual(len(results), 1)
self.assertEqual(results[0]["run_id"], "test-run-456")
self.assertNotIn("job_tags", results[0])
finally:
if saved is not None:
os.environ["JOB_TAGS"] = saved

if __name__ == "__main__":
unittest.main()
1 change: 1 addition & 0 deletions steps/collect-terraform-operation-metadata.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ steps:
workingDirectory: modules/python/terraform
env:
PYTHON_SCRIPT_FILE: $(Pipeline.Workspace)/s/modules/python/terraform/extract_terraform_operation_metadata.py
JOB_TAGS: $(JOB_TAGS)

- template: /steps/cloud/azure/upload-storage-account.yml
parameters:
Expand Down
30 changes: 30 additions & 0 deletions steps/extract-job-parameters.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
parameters:
- name: cloud
type: string
- name: matrix
type: object
default: {}

steps:
- script: |
set -eo pipefail

FULL_MATRIX='${{ convertToJson(parameters.matrix) }}'
CLOUD="${{ parameters.cloud }}"
JOB_NAME="$(Agent.JobName)"

# Agent.JobName format: "{cloud} {matrix_key}" e.g. "azure n10-p300-memory"
PREFIX="${CLOUD} "
if [[ "$JOB_NAME" == "${PREFIX}"* ]]; then
MATRIX_KEY="${JOB_NAME#${PREFIX}}"
JOB_TAGS=$(echo "$FULL_MATRIX" | jq --arg key "$MATRIX_KEY" '.[$key] // empty' -c)
fi

if [ -z "$JOB_TAGS" ]; then
echo "No job tags found, setting empty object"
JOB_TAGS="{}"
fi

echo "Job Tags: $JOB_TAGS"
echo "##vso[task.setvariable variable=JOB_TAGS]$JOB_TAGS"
displayName: "Extract Job Parameters"
Loading