From 22ddcd0deffd90fd3e3a519ec542f6140378b232 Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Wed, 8 May 2024 14:39:19 +0200 Subject: [PATCH 01/29] fix(79): Ensure test supposed to fail also fail, when monitoring is disabled (--no-monitor cmd flag) changelog updated in this commit too The issue was an exception not being raised when calling wrapped_function when monitoring is disabled in line 216. The raise is being handled now in the following lines 218ff. --- docs/sources/changelog.rst | 2 +- pytest_monitor/pytest_monitor.py | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/sources/changelog.rst b/docs/sources/changelog.rst index 0b77e9e..5d2616d 100644 --- a/docs/sources/changelog.rst +++ b/docs/sources/changelog.rst @@ -1,7 +1,7 @@ ========= Changelog ========= - +* :bug: `#79` Fix a bug concerning commandline flag `--no-monitor` causing tests that are supposed to fail to pass instead * :release:`1.6.6 <2023-05-06>` * :bug:`#64` Prepare version 1.7.0 of pytest-monitor. Last version to support Python <= 3.7 and all pytest <= 5.* * :bug:`#0` Improve and fix some CI issues, notably one that may cause python to not be the requested one but a more recent one. diff --git a/pytest_monitor/pytest_monitor.py b/pytest_monitor/pytest_monitor.py index 3e9c12c..7eb5a2d 100644 --- a/pytest_monitor/pytest_monitor.py +++ b/pytest_monitor/pytest_monitor.py @@ -213,7 +213,9 @@ def prof(): setattr(pyfuncitem, "monitor_results", True) if not PYTEST_MONITORING_ENABLED: - wrapped_function() + e = wrapped_function() + if isinstance(e, BaseException): + raise e else: if not pyfuncitem.session.config.option.mtr_disable_gc: gc.collect() From 4a8c5d9be3da6d4e2d409db60e84f1459e86d0bc Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 14 May 2024 16:00:04 +0200 Subject: [PATCH 02/29] Add vscode config to debug tests --- .vscode/settings.json | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 .vscode/settings.json diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..9b38853 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,7 @@ +{ + "python.testing.pytestArgs": [ + "tests" + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true +} \ No newline at end of file From b56ffb71392070df20ceff8b4e2230c2f2d0d080 Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 14 May 2024 17:08:46 +0200 Subject: [PATCH 03/29] Refactor: Move profiling logic from unmaintained memory-profiler module to own internal module --- pytest_monitor/profiler.py | 195 +++++++++++++++++++++++++++++++ pytest_monitor/pytest_monitor.py | 6 +- pytest_monitor/session.py | 4 +- 3 files changed, 201 insertions(+), 4 deletions(-) create mode 100644 pytest_monitor/profiler.py diff --git a/pytest_monitor/profiler.py b/pytest_monitor/profiler.py new file mode 100644 index 0000000..f6c9c7c --- /dev/null +++ b/pytest_monitor/profiler.py @@ -0,0 +1,195 @@ +import os +import time +from signal import SIGKILL +from typing import Any, Callable + +import psutil + +_TWO_20 = float(2**20) + +try: + from multiprocessing import Pipe, Process +except ImportError: + # from multiprocessing.dummy import Pipe + raise + + +def memory_usage(proc: tuple[Callable, Any, Any], retval=False): + """ + Return the memory usage of a process or piece of code + + Parameters + ---------- + proc : {int, string, tuple}, optional + The process to monitor. Is a tuple + representing a Python function. The tuple contains three + values (f, args, kw) and specifies to run the function + f(*args, **kw). + Set to -1 (default) for current process. + + Returns + ------- + mem_usage : list of floating-point values + memory usage, in MiB. It's length is always < timeout / interval + if max_usage is given, returns the two elements maximum memory and + number of measurements effectuated + ret : return value of the profiled function + Only returned if retval is set to True + """ + + ret = -1 + max_iter = 1 + interval = 0.1 + + if callable(proc): + proc = (proc, (), {}) + + if isinstance(proc, (list, tuple)): + if len(proc) == 1: + f, args, kw = (proc[0], (), {}) + elif len(proc) == 2: + f, args, kw = (proc[0], proc[1], {}) + elif len(proc) == 3: + f, args, kw = (proc[0], proc[1], proc[2]) + else: + raise ValueError + + current_iter = 0 + while True: + current_iter += 1 + child_conn, parent_conn = Pipe() # this will store MemTimer's results + p = MemTimer(os.getpid(), interval, child_conn, timestamps=False, max_usage=True, include_children=False) + p.start() + parent_conn.recv() # wait until we start getting memory + + # When there is an exception in the "proc" - the (spawned) monitoring processes don't get killed. + # Therefore, the whole process hangs indefinitely. Here, we are ensuring that the process gets killed! + try: + returned = f(*args, **kw) + parent_conn.send(0) # finish timing + ret = parent_conn.recv() + n_measurements = parent_conn.recv() + # Convert the one element list produced by MemTimer to a singular value + ret = ret[0] + if retval: + ret = ret, returned + except Exception: + parent = psutil.Process(os.getpid()) + for child in parent.children(recursive=True): + os.kill(child.pid, SIGKILL) + p.join(0) + raise + + p.join(5 * interval) + + if (n_measurements > 4) or (current_iter == max_iter) or (interval < 1e-6): + break + interval /= 10.0 + else: + raise ValueError("proc is no valid function") + + return ret + + +class MemTimer(Process): + """ + Fetch memory consumption from over a time interval + """ + + def __init__(self, monitor_pid, interval, pipe, max_usage=False, *args, **kw): + self.monitor_pid = monitor_pid + self.interval = interval + self.pipe = pipe + self.cont = True + self.max_usage = max_usage + self.n_measurements = 1 + + self.timestamps = kw.pop("timestamps", False) + self.include_children = kw.pop("include_children", False) + + # get baseline memory usage + self.mem_usage = [ + _get_memory(self.monitor_pid, timestamps=self.timestamps, include_children=self.include_children) + ] + super(MemTimer, self).__init__(*args, **kw) + + def run(self): + self.pipe.send(0) # we're ready + stop = False + while True: + cur_mem = _get_memory( + self.monitor_pid, + timestamps=self.timestamps, + include_children=self.include_children, + ) + if not self.max_usage: + self.mem_usage.append(cur_mem) + else: + self.mem_usage[0] = max(cur_mem, self.mem_usage[0]) + self.n_measurements += 1 + if stop: + break + stop = self.pipe.poll(self.interval) + # do one more iteration + + self.pipe.send(self.mem_usage) + self.pipe.send(self.n_measurements) + + +def _get_memory(pid, timestamps=False, include_children=False): + + def ps_util_tool(): + # .. cross-platform but but requires psutil .. + process = psutil.Process(pid) + try: + # avoid using get_memory_info since it does not exists + # in psutil > 2.0 and accessing it will cause exception. + meminfo_attr = "memory_info" if hasattr(process, "memory_info") else "get_memory_info" + mem = getattr(process, meminfo_attr)()[0] / _TWO_20 + if include_children: + mem += sum([mem for (_, mem) in _get_child_memory(process, meminfo_attr)]) + + if timestamps: + return mem, time.time() + + return mem + except psutil.AccessDenied: + pass + # continue and try to get this from ps + + # .. low function to get memory consumption .. + if pid == -1: + pid = os.getpid() + + return ps_util_tool() + + +def _get_child_memory(process, meminfo_attr=None, memory_metric=0): + """ + Returns a generator that yields memory for all child processes. + """ + # Convert a pid to a process + if isinstance(process, int): + if process == -1: + process = os.getpid() + process = psutil.Process(process) + + if not meminfo_attr: + # Use the psutil 2.0 attr if the older version isn't passed in. + meminfo_attr = "memory_info" if hasattr(process, "memory_info") else "get_memory_info" + + # Select the psutil function get the children similar to how we selected + # the memory_info attr (a change from excepting the AttributeError). + children_attr = "children" if hasattr(process, "children") else "get_children" + + # Loop over the child processes and yield their memory + try: + for child in getattr(process, children_attr)(recursive=True): + if isinstance(memory_metric, str): + meminfo = getattr(child, meminfo_attr)() + yield child.pid, getattr(meminfo, memory_metric) / _TWO_20 + else: + yield child.pid, getattr(child, meminfo_attr)()[memory_metric] / _TWO_20 + except (psutil.NoSuchProcess, psutil.AccessDenied): + # https://github.com/fabianp/memory_profiler/issues/71 + yield (0, 0.0) diff --git a/pytest_monitor/pytest_monitor.py b/pytest_monitor/pytest_monitor.py index 7eb5a2d..ee4c391 100644 --- a/pytest_monitor/pytest_monitor.py +++ b/pytest_monitor/pytest_monitor.py @@ -3,11 +3,12 @@ import time import warnings -import memory_profiler import pytest from pytest_monitor.session import PyTestMonitorSession +from .profiler import memory_usage + # These dictionaries are used to compute members set on each items. # KEY is the marker set on a test function # value is a tuple: @@ -205,9 +206,10 @@ def wrapped_function(): return e def prof(): - m = memory_profiler.memory_usage((wrapped_function, ()), max_iterations=1, max_usage=True, retval=True) + m = memory_usage((wrapped_function, ()), retval=True) if isinstance(m[1], BaseException): # Do we have any outcome? raise m[1] + memuse = m[0][0] if type(m[0]) is list else m[0] setattr(pyfuncitem, "mem_usage", memuse) setattr(pyfuncitem, "monitor_results", True) diff --git a/pytest_monitor/session.py b/pytest_monitor/session.py index 677362c..4ab0f8e 100644 --- a/pytest_monitor/session.py +++ b/pytest_monitor/session.py @@ -5,11 +5,11 @@ import warnings from http import HTTPStatus -import memory_profiler import psutil import requests from pytest_monitor.handler import DBHandler +from pytest_monitor.profiler import memory_usage from pytest_monitor.sys_utils import ( ExecutionContext, collect_ci_info, @@ -124,7 +124,7 @@ def prepare(self): def dummy(): return True - memuse = memory_profiler.memory_usage((dummy,), max_iterations=1, max_usage=True) + memuse = memory_usage((dummy,)) self.__mem_usage_base = memuse[0] if type(memuse) is list else memuse def add_test_info( From d74320b1990172dba3d86c819c962d085811ff24 Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 14 May 2024 17:12:13 +0200 Subject: [PATCH 04/29] Refactor: Add docstring for retval in memory_usage func --- pytest_monitor/profiler.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pytest_monitor/profiler.py b/pytest_monitor/profiler.py index f6c9c7c..5433230 100644 --- a/pytest_monitor/profiler.py +++ b/pytest_monitor/profiler.py @@ -27,6 +27,11 @@ def memory_usage(proc: tuple[Callable, Any, Any], retval=False): f(*args, **kw). Set to -1 (default) for current process. + retval : bool, optional + For profiling python functions. Save the return value of the profiled + function. Return value of memory_usage becomes a tuple: + (mem_usage, retval) + Returns ------- mem_usage : list of floating-point values From e63ca6c8cc0baf6bc5c57d40c1e675e5e755c5b4 Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 14 May 2024 17:27:03 +0200 Subject: [PATCH 05/29] Refactor profiler.py: Simplify functions and MemTimer Class to only use needed parameters --- pytest_monitor/profiler.py | 86 +++++++------------------------------- 1 file changed, 16 insertions(+), 70 deletions(-) diff --git a/pytest_monitor/profiler.py b/pytest_monitor/profiler.py index 5433230..0e7985d 100644 --- a/pytest_monitor/profiler.py +++ b/pytest_monitor/profiler.py @@ -1,5 +1,4 @@ import os -import time from signal import SIGKILL from typing import Any, Callable @@ -63,7 +62,7 @@ def memory_usage(proc: tuple[Callable, Any, Any], retval=False): while True: current_iter += 1 child_conn, parent_conn = Pipe() # this will store MemTimer's results - p = MemTimer(os.getpid(), interval, child_conn, timestamps=False, max_usage=True, include_children=False) + p = MemTimer(os.getpid(), interval, child_conn) p.start() parent_conn.recv() # wait until we start getting memory @@ -101,36 +100,23 @@ class MemTimer(Process): Fetch memory consumption from over a time interval """ - def __init__(self, monitor_pid, interval, pipe, max_usage=False, *args, **kw): + def __init__(self, monitor_pid, interval, pipe, *args, **kw): self.monitor_pid = monitor_pid self.interval = interval self.pipe = pipe self.cont = True - self.max_usage = max_usage self.n_measurements = 1 - self.timestamps = kw.pop("timestamps", False) - self.include_children = kw.pop("include_children", False) - # get baseline memory usage - self.mem_usage = [ - _get_memory(self.monitor_pid, timestamps=self.timestamps, include_children=self.include_children) - ] + self.mem_usage = [_get_memory(self.monitor_pid)] super(MemTimer, self).__init__(*args, **kw) def run(self): self.pipe.send(0) # we're ready stop = False while True: - cur_mem = _get_memory( - self.monitor_pid, - timestamps=self.timestamps, - include_children=self.include_children, - ) - if not self.max_usage: - self.mem_usage.append(cur_mem) - else: - self.mem_usage[0] = max(cur_mem, self.mem_usage[0]) + cur_mem = _get_memory(self.monitor_pid) + self.mem_usage[0] = max(cur_mem, self.mem_usage[0]) self.n_measurements += 1 if stop: break @@ -141,60 +127,20 @@ def run(self): self.pipe.send(self.n_measurements) -def _get_memory(pid, timestamps=False, include_children=False): - - def ps_util_tool(): - # .. cross-platform but but requires psutil .. - process = psutil.Process(pid) - try: - # avoid using get_memory_info since it does not exists - # in psutil > 2.0 and accessing it will cause exception. - meminfo_attr = "memory_info" if hasattr(process, "memory_info") else "get_memory_info" - mem = getattr(process, meminfo_attr)()[0] / _TWO_20 - if include_children: - mem += sum([mem for (_, mem) in _get_child_memory(process, meminfo_attr)]) - - if timestamps: - return mem, time.time() - - return mem - except psutil.AccessDenied: - pass - # continue and try to get this from ps - +def _get_memory(pid): # .. low function to get memory consumption .. if pid == -1: pid = os.getpid() - return ps_util_tool() - - -def _get_child_memory(process, meminfo_attr=None, memory_metric=0): - """ - Returns a generator that yields memory for all child processes. - """ - # Convert a pid to a process - if isinstance(process, int): - if process == -1: - process = os.getpid() - process = psutil.Process(process) - - if not meminfo_attr: - # Use the psutil 2.0 attr if the older version isn't passed in. + # .. cross-platform but but requires psutil .. + process = psutil.Process(pid) + try: + # avoid using get_memory_info since it does not exists + # in psutil > 2.0 and accessing it will cause exception. meminfo_attr = "memory_info" if hasattr(process, "memory_info") else "get_memory_info" + mem = getattr(process, meminfo_attr)()[0] / _TWO_20 + return mem - # Select the psutil function get the children similar to how we selected - # the memory_info attr (a change from excepting the AttributeError). - children_attr = "children" if hasattr(process, "children") else "get_children" - - # Loop over the child processes and yield their memory - try: - for child in getattr(process, children_attr)(recursive=True): - if isinstance(memory_metric, str): - meminfo = getattr(child, meminfo_attr)() - yield child.pid, getattr(meminfo, memory_metric) / _TWO_20 - else: - yield child.pid, getattr(child, meminfo_attr)()[memory_metric] / _TWO_20 - except (psutil.NoSuchProcess, psutil.AccessDenied): - # https://github.com/fabianp/memory_profiler/issues/71 - yield (0, 0.0) + except psutil.AccessDenied: + pass + # continue and try to get this from ps From 50c3f3afcf3c0d11baa091b50522f9b0ab238c22 Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 14 May 2024 17:37:50 +0200 Subject: [PATCH 06/29] License and Copyright notice profiler.py: Add memory_profiler copyright notice --- pytest_monitor/profiler.py | 37 +++++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) diff --git a/pytest_monitor/profiler.py b/pytest_monitor/profiler.py index 0e7985d..d6bc114 100644 --- a/pytest_monitor/profiler.py +++ b/pytest_monitor/profiler.py @@ -1,3 +1,40 @@ +# The following code has been copied from the memory_profiler project and +# modified to fit the new usecase. +# Homepage of memory_profiler: https://github.com/pythonprofilers/memory_profiler +# +# Memory_Profiler License: +# New BSD License + +# Copyright (c) 2007–2014 Fabian Pedregosa. +# All rights reserved. + + +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are met: + +# a. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# b. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# c. Neither the name of the memory_profiler developers nor the names of +# its contributors may be used to endorse or promote products +# derived from this software without specific prior written +# permission. + + +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR +# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH +# DAMAGE. + import os from signal import SIGKILL from typing import Any, Callable From ca5b209cdd454075d6f0f3cdc4aa2a56607f9b94 Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 14 May 2024 18:14:45 +0200 Subject: [PATCH 07/29] Refactor (profiler, pytest_monitor, session): Adjust memory_usage function to always return memory usage no matter if function throws exception or not --- pytest_monitor/profiler.py | 11 ++++++++--- pytest_monitor/pytest_monitor.py | 12 +++++++----- pytest_monitor/session.py | 6 ++++-- 3 files changed, 19 insertions(+), 10 deletions(-) diff --git a/pytest_monitor/profiler.py b/pytest_monitor/profiler.py index d6bc114..667cf29 100644 --- a/pytest_monitor/profiler.py +++ b/pytest_monitor/profiler.py @@ -111,15 +111,20 @@ def memory_usage(proc: tuple[Callable, Any, Any], retval=False): ret = parent_conn.recv() n_measurements = parent_conn.recv() # Convert the one element list produced by MemTimer to a singular value - ret = ret[0] + ret = ret[0], None if retval: ret = ret, returned - except Exception: + except Exception as e: + parent_conn.send(0) # finish timing + ret = parent_conn.recv() + n_measurements = parent_conn.recv() + # Convert the one element list produced by MemTimer to a singular value + ret = ret[0], e parent = psutil.Process(os.getpid()) for child in parent.children(recursive=True): os.kill(child.pid, SIGKILL) p.join(0) - raise + break p.join(5 * interval) diff --git a/pytest_monitor/pytest_monitor.py b/pytest_monitor/pytest_monitor.py index ee4c391..578129c 100644 --- a/pytest_monitor/pytest_monitor.py +++ b/pytest_monitor/pytest_monitor.py @@ -206,14 +206,16 @@ def wrapped_function(): return e def prof(): - m = memory_usage((wrapped_function, ()), retval=True) - if isinstance(m[1], BaseException): # Do we have any outcome? - raise m[1] - - memuse = m[0][0] if type(m[0]) is list else m[0] + (memuse, exception) = memory_usage((wrapped_function, ())) setattr(pyfuncitem, "mem_usage", memuse) setattr(pyfuncitem, "monitor_results", True) + if isinstance(exception, BaseException): # Do we have any outcome? + setattr(pyfuncitem, "passed", False) + raise exception + + setattr(pyfuncitem, "passed", True) + if not PYTEST_MONITORING_ENABLED: e = wrapped_function() if isinstance(e, BaseException): diff --git a/pytest_monitor/session.py b/pytest_monitor/session.py index 4ab0f8e..c4d02d8 100644 --- a/pytest_monitor/session.py +++ b/pytest_monitor/session.py @@ -124,8 +124,10 @@ def prepare(self): def dummy(): return True - memuse = memory_usage((dummy,)) - self.__mem_usage_base = memuse[0] if type(memuse) is list else memuse + (memuse, exception) = memory_usage((dummy,)) + self.__mem_usage_base = memuse + if isinstance(exception, BaseException): + raise def add_test_info( self, From ba71d581393639b4f2e100cbbbecf40d97200431 Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 14 May 2024 19:33:24 +0200 Subject: [PATCH 08/29] Refactor (handler.py, pytest_monitor.py, session.py): Add table column TEST_PASSED (type boolean) to TEST_METRICS table to indicate if the test being logged passed. --- pytest_monitor/handler.py | 7 +++++-- pytest_monitor/pytest_monitor.py | 2 ++ pytest_monitor/session.py | 3 +++ 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/pytest_monitor/handler.py b/pytest_monitor/handler.py index 6aaa208..1cd0c2d 100644 --- a/pytest_monitor/handler.py +++ b/pytest_monitor/handler.py @@ -35,13 +35,14 @@ def insert_metric( kernel_time, cpu_usage, mem_usage, + passed: bool, ): with self.__cnx: self.__cnx.execute( "insert into TEST_METRICS(SESSION_H,ENV_H,ITEM_START_TIME,ITEM," "ITEM_PATH,ITEM_VARIANT,ITEM_FS_LOC,KIND,COMPONENT,TOTAL_TIME," - "USER_TIME,KERNEL_TIME,CPU_USAGE,MEM_USAGE) " - "values (?,?,?,?,?,?,?,?,?,?,?,?,?,?)", + "USER_TIME,KERNEL_TIME,CPU_USAGE,MEM_USAGE,TEST_PASSED) " + "values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)", ( session_id, env_id, @@ -57,6 +58,7 @@ def insert_metric( kernel_time, cpu_usage, mem_usage, + passed, ), ) @@ -109,6 +111,7 @@ def prepare(self): KERNEL_TIME float, -- time spent in kernel space CPU_USAGE float, -- cpu usage MEM_USAGE float, -- Max resident memory used. + TEST_PASSED boolean, -- boolean indicating if test passed FOREIGN KEY (ENV_H) REFERENCES EXECUTION_CONTEXTS(ENV_H), FOREIGN KEY (SESSION_H) REFERENCES TEST_SESSIONS(SESSION_H) );""" diff --git a/pytest_monitor/pytest_monitor.py b/pytest_monitor/pytest_monitor.py index 578129c..832a016 100644 --- a/pytest_monitor/pytest_monitor.py +++ b/pytest_monitor/pytest_monitor.py @@ -290,6 +290,7 @@ def _prf_module_tracer(request): ptimes_b.user - ptimes_a.user, ptimes_b.system - ptimes_a.system, rss, + True, ) @@ -316,4 +317,5 @@ def _prf_tracer(request): ptimes_b.user - ptimes_a.user, ptimes_b.system - ptimes_a.system, request.node.mem_usage, + getattr(request.node, "passed", False), ) diff --git a/pytest_monitor/session.py b/pytest_monitor/session.py index c4d02d8..a196916 100644 --- a/pytest_monitor/session.py +++ b/pytest_monitor/session.py @@ -142,6 +142,7 @@ def add_test_info( user_time, kernel_time, mem_usage, + passed: bool, ): if kind not in self.__scope: return @@ -168,6 +169,7 @@ def add_test_info( kernel_time, cpu_usage, mem_usage, + passed, ) if self.__remote and self.remote_env_id is not None: r = requests.post( @@ -187,6 +189,7 @@ def add_test_info( "kernel_time": kernel_time, "cpu_usage": cpu_usage, "mem_usage": mem_usage, + "test_passed": passed, }, ) if r.status_code != HTTPStatus.CREATED: From c5974c918c858912d7fe52578e9e62393f63a617 Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 14 May 2024 19:33:24 +0200 Subject: [PATCH 09/29] Refactor (handler.py, pytest_monitor.py, session.py): Add table column TEST_PASSED (type boolean) to TEST_METRICS table to indicate if the test being logged passed. Testing: Add proper test to test successful database migration --- pytest_monitor/handler.py | 19 ++++- pytest_monitor/pytest_monitor.py | 2 + pytest_monitor/session.py | 3 + tests/test_monitor.py | 133 +++++++++++++++++++++++++++++++ 4 files changed, 155 insertions(+), 2 deletions(-) diff --git a/pytest_monitor/handler.py b/pytest_monitor/handler.py index 6aaa208..e1dfe6e 100644 --- a/pytest_monitor/handler.py +++ b/pytest_monitor/handler.py @@ -6,6 +6,18 @@ def __init__(self, db_path): self.__db = db_path self.__cnx = sqlite3.connect(self.__db) if db_path else None self.prepare() + # check if new table column is existent, if not create it + self.check_create_test_passed_column() + + def check_create_test_passed_column(self): + cursor = self.__cnx.cursor() + # check for test_passed column, + # table exists bc call happens after prepare() + cursor.execute("PRAGMA table_info(TEST_METRICS)") + has_test_column = any(column[1] == "TEST_PASSED" for column in cursor.fetchall()) + if not has_test_column: + cursor.execute("ALTER TABLE TEST_METRICS ADD COLUMN TEST_PASSED BOOLEAN DEFAULT TRUE;") + self.__cnx.commit() def query(self, what, bind_to, many=False): cursor = self.__cnx.cursor() @@ -35,13 +47,14 @@ def insert_metric( kernel_time, cpu_usage, mem_usage, + passed: bool, ): with self.__cnx: self.__cnx.execute( "insert into TEST_METRICS(SESSION_H,ENV_H,ITEM_START_TIME,ITEM," "ITEM_PATH,ITEM_VARIANT,ITEM_FS_LOC,KIND,COMPONENT,TOTAL_TIME," - "USER_TIME,KERNEL_TIME,CPU_USAGE,MEM_USAGE) " - "values (?,?,?,?,?,?,?,?,?,?,?,?,?,?)", + "USER_TIME,KERNEL_TIME,CPU_USAGE,MEM_USAGE,TEST_PASSED) " + "values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)", ( session_id, env_id, @@ -57,6 +70,7 @@ def insert_metric( kernel_time, cpu_usage, mem_usage, + passed, ), ) @@ -109,6 +123,7 @@ def prepare(self): KERNEL_TIME float, -- time spent in kernel space CPU_USAGE float, -- cpu usage MEM_USAGE float, -- Max resident memory used. + TEST_PASSED boolean, -- boolean indicating if test passed FOREIGN KEY (ENV_H) REFERENCES EXECUTION_CONTEXTS(ENV_H), FOREIGN KEY (SESSION_H) REFERENCES TEST_SESSIONS(SESSION_H) );""" diff --git a/pytest_monitor/pytest_monitor.py b/pytest_monitor/pytest_monitor.py index 578129c..832a016 100644 --- a/pytest_monitor/pytest_monitor.py +++ b/pytest_monitor/pytest_monitor.py @@ -290,6 +290,7 @@ def _prf_module_tracer(request): ptimes_b.user - ptimes_a.user, ptimes_b.system - ptimes_a.system, rss, + True, ) @@ -316,4 +317,5 @@ def _prf_tracer(request): ptimes_b.user - ptimes_a.user, ptimes_b.system - ptimes_a.system, request.node.mem_usage, + getattr(request.node, "passed", False), ) diff --git a/pytest_monitor/session.py b/pytest_monitor/session.py index c4d02d8..a196916 100644 --- a/pytest_monitor/session.py +++ b/pytest_monitor/session.py @@ -142,6 +142,7 @@ def add_test_info( user_time, kernel_time, mem_usage, + passed: bool, ): if kind not in self.__scope: return @@ -168,6 +169,7 @@ def add_test_info( kernel_time, cpu_usage, mem_usage, + passed, ) if self.__remote and self.remote_env_id is not None: r = requests.post( @@ -187,6 +189,7 @@ def add_test_info( "kernel_time": kernel_time, "cpu_usage": cpu_usage, "mem_usage": mem_usage, + "test_passed": passed, }, ) if r.status_code != HTTPStatus.CREATED: diff --git a/tests/test_monitor.py b/tests/test_monitor.py index 85b4ff6..11ab17f 100644 --- a/tests/test_monitor.py +++ b/tests/test_monitor.py @@ -406,3 +406,136 @@ def run(a, b): # make sure that that we get a '0' exit code for the testsuite result.assert_outcomes(passed=1) assert not pymon_path.exists() + + +def test_monitor_DBHandler_check_create_test_passed_column(pytester): + import datetime + + from pytest_monitor.handler import DBHandler + from pytest_monitor.sys_utils import determine_scm_revision + + # import os + + def prepare_mock_db(conn: sqlite3.Connection): + cursor = conn.cursor() + cursor.execute( + """ +CREATE TABLE IF NOT EXISTS TEST_SESSIONS( + SESSION_H varchar(64) primary key not null unique, -- Session identifier + RUN_DATE varchar(64), -- Date of test run + SCM_ID varchar(128), -- SCM change id + RUN_DESCRIPTION json +);""" + ) + cursor.execute( + """ +CREATE TABLE IF NOT EXISTS TEST_METRICS ( + SESSION_H varchar(64), -- Session identifier + ENV_H varchar(64), -- Environment description identifier + ITEM_START_TIME varchar(64), -- Effective start time of the test + ITEM_PATH varchar(4096), -- Path of the item, following Python import specification + ITEM varchar(2048), -- Name of the item + ITEM_VARIANT varchar(2048), -- Optional parametrization of an item. + ITEM_FS_LOC varchar(2048), -- Relative path from pytest invocation directory to the item's module. + KIND varchar(64), -- Package, Module or function + COMPONENT varchar(512) NULL, -- Tested component if any + TOTAL_TIME float, -- Total time spent running the item + USER_TIME float, -- time spent in user space + KERNEL_TIME float, -- time spent in kernel space + CPU_USAGE float, -- cpu usage + MEM_USAGE float, -- Max resident memory used. + FOREIGN KEY (ENV_H) REFERENCES EXECUTION_CONTEXTS(ENV_H), + FOREIGN KEY (SESSION_H) REFERENCES TEST_SESSIONS(SESSION_H) +);""" + ) + + cursor.execute( + """ +CREATE TABLE IF NOT EXISTS EXECUTION_CONTEXTS ( + ENV_H varchar(64) primary key not null unique, + CPU_COUNT integer, + CPU_FREQUENCY_MHZ integer, + CPU_TYPE varchar(64), + CPU_VENDOR varchar(256), + RAM_TOTAL_MB integer, + MACHINE_NODE varchar(512), + MACHINE_TYPE varchar(32), + MACHINE_ARCH varchar(16), + SYSTEM_INFO varchar(256), + PYTHON_INFO varchar(512) +); +""" + ) + conn.commit() + return conn + + dbpath = "mockdb" + # open database in memory + mockdb = sqlite3.connect(dbpath) + # prepare old database format + mockdb = prepare_mock_db(mockdb) + + # attach to DBHandler object + db = DBHandler(":memory:") + db.__cnx = mockdb + db._DBHandler__cnx = mockdb + db._DBHandler__db = "mockdb" + + # insert old style entry + # TODO + run_date = datetime.datetime.now().isoformat() + db.insert_session("1", run_date, determine_scm_revision(), "Test Session") + db.__cnx.cursor().execute( + "insert into TEST_METRICS(SESSION_H,ENV_H,ITEM_START_TIME,ITEM," + "ITEM_PATH,ITEM_VARIANT,ITEM_FS_LOC,KIND,COMPONENT,TOTAL_TIME," + "USER_TIME,KERNEL_TIME,CPU_USAGE,MEM_USAGE) " + "values (?,?,?,?,?,?,?,?,?,?,?,?,?,?)", + ( + "1", + "Environment", + "Startdate", + "name of item", + "Item path", + "Optional Param", + "relative path", + "NULL", + "NULL", + 42, + 42, + 42, + 42, + 42, + ), + ) + db.__cnx.commit() + + mcursor = db.__cnx.cursor() + mcursor.execute("PRAGMA table_info(TEST_METRICS)") + has_test_column = any(column[1] == "TEST_PASSED" for column in mcursor.fetchall()) + mcursor = None + + try: + assert not has_test_column + + # run function to test + db.check_create_test_passed_column() + + # check for new column + mcursor = db.__cnx.cursor() + mcursor.execute("PRAGMA table_info(TEST_METRICS)") + has_test_column = any(column[1] == "TEST_PASSED" for column in mcursor.fetchall()) + assert has_test_column + + # check for default value TRUE in existing entry + mcursor.execute("SELECT TEST_PASSED FROM TEST_METRICS LIMIT 1") + default_is_passed = mcursor.fetchone() + + # default value true(1) for entries after migration + assert default_is_passed[0] == 1 + + except Exception: + raise + finally: + import os + + os.remove(dbpath) From 6fbcfb27d7b5ad6331e570930c9f9267a553b036 Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 21 May 2024 16:07:41 +0200 Subject: [PATCH 10/29] Refactor check_create_test_passed_column to use a in memory db --- tests/test_monitor.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/tests/test_monitor.py b/tests/test_monitor.py index 11ab17f..7f9d4ec 100644 --- a/tests/test_monitor.py +++ b/tests/test_monitor.py @@ -469,20 +469,18 @@ def prepare_mock_db(conn: sqlite3.Connection): conn.commit() return conn - dbpath = "mockdb" # open database in memory - mockdb = sqlite3.connect(dbpath) + mockdb = sqlite3.connect(":memory:") # prepare old database format mockdb = prepare_mock_db(mockdb) - # attach to DBHandler object + # attach mocked db to DBHandler object db = DBHandler(":memory:") db.__cnx = mockdb db._DBHandler__cnx = mockdb db._DBHandler__db = "mockdb" # insert old style entry - # TODO run_date = datetime.datetime.now().isoformat() db.insert_session("1", run_date, determine_scm_revision(), "Test Session") db.__cnx.cursor().execute( @@ -535,7 +533,3 @@ def prepare_mock_db(conn: sqlite3.Connection): except Exception: raise - finally: - import os - - os.remove(dbpath) From a0990be3dadfb45f41cc4696181521751c6f475d Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 21 May 2024 17:07:05 +0200 Subject: [PATCH 11/29] test_monitor.py: Add test to check for proper setup of new database config on Initialization of DBHandler (test if new column existent) --- tests/test_monitor.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/test_monitor.py b/tests/test_monitor.py index 7f9d4ec..18ad7ee 100644 --- a/tests/test_monitor.py +++ b/tests/test_monitor.py @@ -533,3 +533,12 @@ def prepare_mock_db(conn: sqlite3.Connection): except Exception: raise + + +def test_monitor_DBHandler_check_new_db_setup(pytester): + from pytest_monitor.handler import DBHandler + + # db handler + db = DBHandler(":memory:") + table_cols = db.query("PRAGMA table_info(TEST_METRICS)", (), many=True) + assert any(column[1] == "TEST_PASSED" for column in table_cols) From 4d57393b62a0fa254266a5823f1c6e59cdb580aa Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 28 May 2024 15:29:31 +0200 Subject: [PATCH 12/29] Refactor test_monitor.py: Move db handler tests into own test source file test_monitor_handler.py --- tests/test_monitor.py | 136 --------------------------------- tests/test_monitor_handler.py | 138 ++++++++++++++++++++++++++++++++++ 2 files changed, 138 insertions(+), 136 deletions(-) create mode 100644 tests/test_monitor_handler.py diff --git a/tests/test_monitor.py b/tests/test_monitor.py index 18ad7ee..85b4ff6 100644 --- a/tests/test_monitor.py +++ b/tests/test_monitor.py @@ -406,139 +406,3 @@ def run(a, b): # make sure that that we get a '0' exit code for the testsuite result.assert_outcomes(passed=1) assert not pymon_path.exists() - - -def test_monitor_DBHandler_check_create_test_passed_column(pytester): - import datetime - - from pytest_monitor.handler import DBHandler - from pytest_monitor.sys_utils import determine_scm_revision - - # import os - - def prepare_mock_db(conn: sqlite3.Connection): - cursor = conn.cursor() - cursor.execute( - """ -CREATE TABLE IF NOT EXISTS TEST_SESSIONS( - SESSION_H varchar(64) primary key not null unique, -- Session identifier - RUN_DATE varchar(64), -- Date of test run - SCM_ID varchar(128), -- SCM change id - RUN_DESCRIPTION json -);""" - ) - cursor.execute( - """ -CREATE TABLE IF NOT EXISTS TEST_METRICS ( - SESSION_H varchar(64), -- Session identifier - ENV_H varchar(64), -- Environment description identifier - ITEM_START_TIME varchar(64), -- Effective start time of the test - ITEM_PATH varchar(4096), -- Path of the item, following Python import specification - ITEM varchar(2048), -- Name of the item - ITEM_VARIANT varchar(2048), -- Optional parametrization of an item. - ITEM_FS_LOC varchar(2048), -- Relative path from pytest invocation directory to the item's module. - KIND varchar(64), -- Package, Module or function - COMPONENT varchar(512) NULL, -- Tested component if any - TOTAL_TIME float, -- Total time spent running the item - USER_TIME float, -- time spent in user space - KERNEL_TIME float, -- time spent in kernel space - CPU_USAGE float, -- cpu usage - MEM_USAGE float, -- Max resident memory used. - FOREIGN KEY (ENV_H) REFERENCES EXECUTION_CONTEXTS(ENV_H), - FOREIGN KEY (SESSION_H) REFERENCES TEST_SESSIONS(SESSION_H) -);""" - ) - - cursor.execute( - """ -CREATE TABLE IF NOT EXISTS EXECUTION_CONTEXTS ( - ENV_H varchar(64) primary key not null unique, - CPU_COUNT integer, - CPU_FREQUENCY_MHZ integer, - CPU_TYPE varchar(64), - CPU_VENDOR varchar(256), - RAM_TOTAL_MB integer, - MACHINE_NODE varchar(512), - MACHINE_TYPE varchar(32), - MACHINE_ARCH varchar(16), - SYSTEM_INFO varchar(256), - PYTHON_INFO varchar(512) -); -""" - ) - conn.commit() - return conn - - # open database in memory - mockdb = sqlite3.connect(":memory:") - # prepare old database format - mockdb = prepare_mock_db(mockdb) - - # attach mocked db to DBHandler object - db = DBHandler(":memory:") - db.__cnx = mockdb - db._DBHandler__cnx = mockdb - db._DBHandler__db = "mockdb" - - # insert old style entry - run_date = datetime.datetime.now().isoformat() - db.insert_session("1", run_date, determine_scm_revision(), "Test Session") - db.__cnx.cursor().execute( - "insert into TEST_METRICS(SESSION_H,ENV_H,ITEM_START_TIME,ITEM," - "ITEM_PATH,ITEM_VARIANT,ITEM_FS_LOC,KIND,COMPONENT,TOTAL_TIME," - "USER_TIME,KERNEL_TIME,CPU_USAGE,MEM_USAGE) " - "values (?,?,?,?,?,?,?,?,?,?,?,?,?,?)", - ( - "1", - "Environment", - "Startdate", - "name of item", - "Item path", - "Optional Param", - "relative path", - "NULL", - "NULL", - 42, - 42, - 42, - 42, - 42, - ), - ) - db.__cnx.commit() - - mcursor = db.__cnx.cursor() - mcursor.execute("PRAGMA table_info(TEST_METRICS)") - has_test_column = any(column[1] == "TEST_PASSED" for column in mcursor.fetchall()) - mcursor = None - - try: - assert not has_test_column - - # run function to test - db.check_create_test_passed_column() - - # check for new column - mcursor = db.__cnx.cursor() - mcursor.execute("PRAGMA table_info(TEST_METRICS)") - has_test_column = any(column[1] == "TEST_PASSED" for column in mcursor.fetchall()) - assert has_test_column - - # check for default value TRUE in existing entry - mcursor.execute("SELECT TEST_PASSED FROM TEST_METRICS LIMIT 1") - default_is_passed = mcursor.fetchone() - - # default value true(1) for entries after migration - assert default_is_passed[0] == 1 - - except Exception: - raise - - -def test_monitor_DBHandler_check_new_db_setup(pytester): - from pytest_monitor.handler import DBHandler - - # db handler - db = DBHandler(":memory:") - table_cols = db.query("PRAGMA table_info(TEST_METRICS)", (), many=True) - assert any(column[1] == "TEST_PASSED" for column in table_cols) diff --git a/tests/test_monitor_handler.py b/tests/test_monitor_handler.py new file mode 100644 index 0000000..4e890dc --- /dev/null +++ b/tests/test_monitor_handler.py @@ -0,0 +1,138 @@ +# -*- coding: utf-8 -*- +import sqlite3 + + +def test_monitor_DBHandler_check_create_test_passed_column(pytester): + import datetime + + from pytest_monitor.handler import DBHandler + from pytest_monitor.sys_utils import determine_scm_revision + + # import os + + def prepare_mock_db(conn: sqlite3.Connection): + cursor = conn.cursor() + cursor.execute( + """ +CREATE TABLE IF NOT EXISTS TEST_SESSIONS( + SESSION_H varchar(64) primary key not null unique, -- Session identifier + RUN_DATE varchar(64), -- Date of test run + SCM_ID varchar(128), -- SCM change id + RUN_DESCRIPTION json +);""" + ) + cursor.execute( + """ +CREATE TABLE IF NOT EXISTS TEST_METRICS ( + SESSION_H varchar(64), -- Session identifier + ENV_H varchar(64), -- Environment description identifier + ITEM_START_TIME varchar(64), -- Effective start time of the test + ITEM_PATH varchar(4096), -- Path of the item, following Python import specification + ITEM varchar(2048), -- Name of the item + ITEM_VARIANT varchar(2048), -- Optional parametrization of an item. + ITEM_FS_LOC varchar(2048), -- Relative path from pytest invocation directory to the item's module. + KIND varchar(64), -- Package, Module or function + COMPONENT varchar(512) NULL, -- Tested component if any + TOTAL_TIME float, -- Total time spent running the item + USER_TIME float, -- time spent in user space + KERNEL_TIME float, -- time spent in kernel space + CPU_USAGE float, -- cpu usage + MEM_USAGE float, -- Max resident memory used. + FOREIGN KEY (ENV_H) REFERENCES EXECUTION_CONTEXTS(ENV_H), + FOREIGN KEY (SESSION_H) REFERENCES TEST_SESSIONS(SESSION_H) +);""" + ) + + cursor.execute( + """ +CREATE TABLE IF NOT EXISTS EXECUTION_CONTEXTS ( + ENV_H varchar(64) primary key not null unique, + CPU_COUNT integer, + CPU_FREQUENCY_MHZ integer, + CPU_TYPE varchar(64), + CPU_VENDOR varchar(256), + RAM_TOTAL_MB integer, + MACHINE_NODE varchar(512), + MACHINE_TYPE varchar(32), + MACHINE_ARCH varchar(16), + SYSTEM_INFO varchar(256), + PYTHON_INFO varchar(512) +); +""" + ) + conn.commit() + return conn + + # open database in memory + mockdb = sqlite3.connect(":memory:") + # prepare old database format + mockdb = prepare_mock_db(mockdb) + + # attach mocked db to DBHandler object + db = DBHandler(":memory:") + db.__cnx = mockdb + db._DBHandler__cnx = mockdb + db._DBHandler__db = "mockdb" + + # insert old style entry + run_date = datetime.datetime.now().isoformat() + db.insert_session("1", run_date, determine_scm_revision(), "Test Session") + db.__cnx.cursor().execute( + "insert into TEST_METRICS(SESSION_H,ENV_H,ITEM_START_TIME,ITEM," + "ITEM_PATH,ITEM_VARIANT,ITEM_FS_LOC,KIND,COMPONENT,TOTAL_TIME," + "USER_TIME,KERNEL_TIME,CPU_USAGE,MEM_USAGE) " + "values (?,?,?,?,?,?,?,?,?,?,?,?,?,?)", + ( + "1", + "Environment", + "Startdate", + "name of item", + "Item path", + "Optional Param", + "relative path", + "NULL", + "NULL", + 42, + 42, + 42, + 42, + 42, + ), + ) + db.__cnx.commit() + + mcursor = db.__cnx.cursor() + mcursor.execute("PRAGMA table_info(TEST_METRICS)") + has_test_column = any(column[1] == "TEST_PASSED" for column in mcursor.fetchall()) + mcursor = None + + try: + assert not has_test_column + + # run function to test + db.check_create_test_passed_column() + + # check for new column + mcursor = db.__cnx.cursor() + mcursor.execute("PRAGMA table_info(TEST_METRICS)") + has_test_column = any(column[1] == "TEST_PASSED" for column in mcursor.fetchall()) + assert has_test_column + + # check for default value TRUE in existing entry + mcursor.execute("SELECT TEST_PASSED FROM TEST_METRICS LIMIT 1") + default_is_passed = mcursor.fetchone() + + # default value true(1) for entries after migration + assert default_is_passed[0] == 1 + + except Exception: + raise + + +def test_monitor_DBHandler_check_new_db_setup(pytester): + from pytest_monitor.handler import DBHandler + + # db handler + db = DBHandler(":memory:") + table_cols = db.query("PRAGMA table_info(TEST_METRICS)", (), many=True) + assert any(column[1] == "TEST_PASSED" for column in table_cols) From 6985c72c4e0564c7d3c182605a40ab87d1b28168 Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 28 May 2024 15:33:59 +0200 Subject: [PATCH 13/29] Refactor test_monitor_handler.py: Rename handler test functions to specify the sql backend. --- tests/test_monitor_handler.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_monitor_handler.py b/tests/test_monitor_handler.py index 4e890dc..cbbf705 100644 --- a/tests/test_monitor_handler.py +++ b/tests/test_monitor_handler.py @@ -2,7 +2,7 @@ import sqlite3 -def test_monitor_DBHandler_check_create_test_passed_column(pytester): +def test_sqlite_handler_check_create_test_passed_column(pytester): import datetime from pytest_monitor.handler import DBHandler @@ -129,7 +129,7 @@ def prepare_mock_db(conn: sqlite3.Connection): raise -def test_monitor_DBHandler_check_new_db_setup(pytester): +def test_sqlite_handler_check_new_db_setup(pytester): from pytest_monitor.handler import DBHandler # db handler From 57d56b0e01112ecb44101b0b17ea2315ef72c026 Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Fri, 7 Jun 2024 11:20:51 +0200 Subject: [PATCH 14/29] Add support for Bitbucket CI: Includes function to generate description for Bitbucket CI. (Part of session or execution context table) --- pytest_monitor/sys_utils.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/pytest_monitor/sys_utils.py b/pytest_monitor/sys_utils.py index b07db51..2ed66dd 100644 --- a/pytest_monitor/sys_utils.py +++ b/pytest_monitor/sys_utils.py @@ -47,6 +47,13 @@ def collect_ci_info(): "pipeline_build_no": os.environ["CI_PIPELINE_ID"], "__ci__": "gitlabci", } + # Test for Bitbucket CI + if "BITBUCKET_BRANCH" in os.environ and "BITBUCKET_BUILD_NUMBER" in os.environ: + return { + "pipeline_branch": os.environ["BITBUCKET_BRANCH"], + "pipeline_build_no": os.environ["BITBUCKET_BUILD_NUMBER"], + "__ci__": "bitbucketci", + } return {} From b83b5f329b9d12fc5813bfd42798f6fc51778c1d Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Mon, 10 Jun 2024 16:49:49 +0200 Subject: [PATCH 15/29] Changes: Update the sqlite test functions to the newest versions Updates the sqlite test functions to the newest versions that are also used in the PostgresDBHandler branch. --- pytest_monitor/handler.py | 117 +++++++++++--------- pytest_monitor/pytest_monitor.py | 80 +++++++++++--- pytest_monitor/session.py | 18 ++- tests/test_monitor_handler.py | 184 ++++++++++++++++++++----------- 4 files changed, 265 insertions(+), 134 deletions(-) diff --git a/pytest_monitor/handler.py b/pytest_monitor/handler.py index e1dfe6e..cfcba05 100644 --- a/pytest_monitor/handler.py +++ b/pytest_monitor/handler.py @@ -9,14 +9,24 @@ def __init__(self, db_path): # check if new table column is existent, if not create it self.check_create_test_passed_column() + def close(self): + self.__cnx.close() + + def __del__(self): + self.__cnx.close() + def check_create_test_passed_column(self): cursor = self.__cnx.cursor() # check for test_passed column, # table exists bc call happens after prepare() cursor.execute("PRAGMA table_info(TEST_METRICS)") - has_test_column = any(column[1] == "TEST_PASSED" for column in cursor.fetchall()) + has_test_column = any( + column[1] == "TEST_PASSED" for column in cursor.fetchall() + ) if not has_test_column: - cursor.execute("ALTER TABLE TEST_METRICS ADD COLUMN TEST_PASSED BOOLEAN DEFAULT TRUE;") + cursor.execute( + "ALTER TABLE TEST_METRICS ADD COLUMN TEST_PASSED BOOLEAN DEFAULT TRUE;" + ) self.__cnx.commit() def query(self, what, bind_to, many=False): @@ -25,11 +35,12 @@ def query(self, what, bind_to, many=False): return cursor.fetchall() if many else cursor.fetchone() def insert_session(self, h, run_date, scm_id, description): - with self.__cnx: - self.__cnx.execute( - "insert into TEST_SESSIONS(SESSION_H, RUN_DATE, SCM_ID, RUN_DESCRIPTION)" " values (?,?,?,?)", - (h, run_date, scm_id, description), - ) + self.__cnx.execute( + "insert into TEST_SESSIONS(SESSION_H, RUN_DATE, SCM_ID, RUN_DESCRIPTION)" + " values (?,?,?,?)", + (h, run_date, scm_id, description), + ) + self.__cnx.commit() def insert_metric( self, @@ -49,51 +60,51 @@ def insert_metric( mem_usage, passed: bool, ): - with self.__cnx: - self.__cnx.execute( - "insert into TEST_METRICS(SESSION_H,ENV_H,ITEM_START_TIME,ITEM," - "ITEM_PATH,ITEM_VARIANT,ITEM_FS_LOC,KIND,COMPONENT,TOTAL_TIME," - "USER_TIME,KERNEL_TIME,CPU_USAGE,MEM_USAGE,TEST_PASSED) " - "values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)", - ( - session_id, - env_id, - item_start_date, - item, - item_path, - item_variant, - item_loc, - kind, - component, - total_time, - user_time, - kernel_time, - cpu_usage, - mem_usage, - passed, - ), - ) + self.__cnx.execute( + "insert into TEST_METRICS(SESSION_H,ENV_H,ITEM_START_TIME,ITEM," + "ITEM_PATH,ITEM_VARIANT,ITEM_FS_LOC,KIND,COMPONENT,TOTAL_TIME," + "USER_TIME,KERNEL_TIME,CPU_USAGE,MEM_USAGE,TEST_PASSED) " + "values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)", + ( + session_id, + env_id, + item_start_date, + item, + item_path, + item_variant, + item_loc, + kind, + component, + total_time, + user_time, + kernel_time, + cpu_usage, + mem_usage, + passed, + ), + ) + self.__cnx.commit() def insert_execution_context(self, exc_context): - with self.__cnx: - self.__cnx.execute( - "insert into EXECUTION_CONTEXTS(CPU_COUNT,CPU_FREQUENCY_MHZ,CPU_TYPE,CPU_VENDOR," - "RAM_TOTAL_MB,MACHINE_NODE,MACHINE_TYPE,MACHINE_ARCH,SYSTEM_INFO," - "PYTHON_INFO,ENV_H) values (?,?,?,?,?,?,?,?,?,?,?)", - ( - exc_context.cpu_count, - exc_context.cpu_frequency, - exc_context.cpu_type, - exc_context.cpu_vendor, - exc_context.ram_total, - exc_context.fqdn, - exc_context.machine, - exc_context.architecture, - exc_context.system_info, - exc_context.python_info, - exc_context.compute_hash(), - ), - ) + self.__cnx.execute( + "insert into EXECUTION_CONTEXTS(CPU_COUNT,CPU_FREQUENCY_MHZ,CPU_TYPE,CPU_VENDOR," + "RAM_TOTAL_MB,MACHINE_NODE,MACHINE_TYPE,MACHINE_ARCH,SYSTEM_INFO," + "PYTHON_INFO,ENV_H) values (?,?,?,?,?,?,?,?,?,?,?)", + ( + exc_context.cpu_count, + exc_context.cpu_frequency, + exc_context.cpu_type, + exc_context.cpu_vendor, + exc_context.ram_total, + exc_context.fqdn, + exc_context.machine, + exc_context.architecture, + exc_context.system_info, + exc_context.python_info, + exc_context.compute_hash(), + ), + ) + self.__cnx.commit() def prepare(self): cursor = self.__cnx.cursor() @@ -146,3 +157,9 @@ def prepare(self): """ ) self.__cnx.commit() + + def get_env_id(self, env_hash): + query_result = self.query( + "SELECT ENV_H FROM EXECUTION_CONTEXTS WHERE ENV_H= ?", (env_hash,) + ) + return query_result[0] if query_result else None diff --git a/pytest_monitor/pytest_monitor.py b/pytest_monitor/pytest_monitor.py index 832a016..0dbc6d1 100644 --- a/pytest_monitor/pytest_monitor.py +++ b/pytest_monitor/pytest_monitor.py @@ -23,7 +23,9 @@ "monitor_test_if": (True, "monitor_force_test", lambda x: bool(x), False), } PYTEST_MONITOR_DEPRECATED_MARKERS = {} -PYTEST_MONITOR_ITEM_LOC_MEMBER = "_location" if tuple(pytest.__version__.split(".")) < ("5", "3") else "location" +PYTEST_MONITOR_ITEM_LOC_MEMBER = ( + "_location" if tuple(pytest.__version__.split(".")) < ("5", "3") else "location" +) PYTEST_MONITORING_ENABLED = True @@ -45,7 +47,9 @@ def pytest_addoption(parser): help="Set this option to distinguish parametrized tests given their values." " This requires the parameters to be stringifiable.", ) - group.addoption("--no-monitor", action="store_true", dest="mtr_none", help="Disable all traces") + group.addoption( + "--no-monitor", action="store_true", dest="mtr_none", help="Disable all traces" + ) group.addoption( "--remote-server", action="store", @@ -69,13 +73,15 @@ def pytest_addoption(parser): "--force-component", action="store", dest="mtr_force_component", - help="Force the component to be set at the given value for the all tests run" " in this session.", + help="Force the component to be set at the given value for the all tests run" + " in this session.", ) group.addoption( "--component-prefix", action="store", dest="mtr_component_prefix", - help="Prefix each found components with the given value (applies to all tests" " run in this session).", + help="Prefix each found components with the given value (applies to all tests" + " run in this session).", ) group.addoption( "--no-gc", @@ -100,10 +106,13 @@ def pytest_addoption(parser): def pytest_configure(config): - config.addinivalue_line("markers", "monitor_skip_test: mark test to be executed but not monitored.") + config.addinivalue_line( + "markers", "monitor_skip_test: mark test to be executed but not monitored." + ) config.addinivalue_line( "markers", - "monitor_skip_test_if(cond): mark test to be executed but " "not monitored if cond is verified.", + "monitor_skip_test_if(cond): mark test to be executed but " + "not monitored if cond is verified.", ) config.addinivalue_line( "markers", @@ -127,14 +136,24 @@ def pytest_runtest_setup(item): """ if not PYTEST_MONITORING_ENABLED: return - item_markers = {mark.name: mark for mark in item.iter_markers() if mark and mark.name.startswith("monitor_")} + item_markers = { + mark.name: mark + for mark in item.iter_markers() + if mark and mark.name.startswith("monitor_") + } mark_to_del = [] for set_marker in item_markers.keys(): if set_marker not in PYTEST_MONITOR_VALID_MARKERS: - warnings.warn("Nothing known about marker {}. Marker will be dropped.".format(set_marker)) + warnings.warn( + "Nothing known about marker {}. Marker will be dropped.".format( + set_marker + ) + ) mark_to_del.append(set_marker) if set_marker in PYTEST_MONITOR_DEPRECATED_MARKERS: - warnings.warn(f"Marker {set_marker} is deprecated. Consider upgrading your tests") + warnings.warn( + f"Marker {set_marker} is deprecated. Consider upgrading your tests" + ) for marker in mark_to_del: del item_markers[marker] @@ -239,12 +258,26 @@ def pytest_sessionstart(session): Instantiate a monitor session to save collected metrics. We yield at the end to let pytest pursue the execution. """ - if session.config.option.mtr_force_component and session.config.option.mtr_component_prefix: - raise pytest.UsageError("Invalid usage: --force-component and --component-prefix are incompatible options!") - if session.config.option.mtr_no_db and not session.config.option.mtr_remote and not session.config.option.mtr_none: - warnings.warn("pytest-monitor: No storage specified but monitoring is requested. Disabling monitoring.") + if ( + session.config.option.mtr_force_component + and session.config.option.mtr_component_prefix + ): + raise pytest.UsageError( + "Invalid usage: --force-component and --component-prefix are incompatible options!" + ) + if ( + session.config.option.mtr_no_db + and not session.config.option.mtr_remote + and not session.config.option.mtr_none + ): + warnings.warn( + "pytest-monitor: No storage specified but monitoring is requested. Disabling monitoring." + ) session.config.option.mtr_none = True - component = session.config.option.mtr_force_component or session.config.option.mtr_component_prefix + component = ( + session.config.option.mtr_force_component + or session.config.option.mtr_component_prefix + ) if session.config.option.mtr_component_prefix: component += ".{user_component}" if not component: @@ -254,13 +287,24 @@ def pytest_sessionstart(session): if (session.config.option.mtr_none or session.config.option.mtr_no_db) else session.config.option.mtr_db_out ) - remote = None if session.config.option.mtr_none else session.config.option.mtr_remote + remote = ( + None if session.config.option.mtr_none else session.config.option.mtr_remote + ) session.pytest_monitor = PyTestMonitorSession( db=db, remote=remote, component=component, scope=session.config.option.mtr_scope ) global PYTEST_MONITORING_ENABLED PYTEST_MONITORING_ENABLED = not session.config.option.mtr_none - session.pytest_monitor.compute_info(session.config.option.mtr_description, session.config.option.mtr_tags) + session.pytest_monitor.compute_info( + session.config.option.mtr_description, session.config.option.mtr_tags + ) + yield + + +@pytest.hookimpl(hookwrapper=True) +def pytest_sessionfinish(session): + if session.pytest_monitor is not None: + session.pytest_monitor.close() yield @@ -302,7 +346,9 @@ def _prf_tracer(request): ptimes_a = request.session.pytest_monitor.process.cpu_times() yield ptimes_b = request.session.pytest_monitor.process.cpu_times() - if not request.node.monitor_skip_test and getattr(request.node, "monitor_results", False): + if not request.node.monitor_skip_test and getattr( + request.node, "monitor_results", False + ): item_name = request.node.originalname or request.node.name item_loc = getattr(request.node, PYTEST_MONITOR_ITEM_LOC_MEMBER)[0] request.session.pytest_monitor.add_test_info( diff --git a/pytest_monitor/session.py b/pytest_monitor/session.py index a196916..acb3b02 100644 --- a/pytest_monitor/session.py +++ b/pytest_monitor/session.py @@ -31,6 +31,10 @@ def __init__(self, db=None, remote=None, component="", scope=None, tracing=True) self.__mem_usage_base = None self.__process = psutil.Process(os.getpid()) + def close(self): + if self.__db is not None: + self.__db.close() + @property def monitoring_enabled(self): return self.__monitor_enabled @@ -50,7 +54,10 @@ def process(self): def get_env_id(self, env): db, remote = None, None if self.__db: - row = self.__db.query("SELECT ENV_H FROM EXECUTION_CONTEXTS WHERE ENV_H= ?", (env.compute_hash(),)) + row = self.__db.query( + "SELECT ENV_H FROM EXECUTION_CONTEXTS WHERE ENV_H= ?", + (env.compute_hash(),), + ) db = row[0] if row else None if self.__remote: r = requests.get(f"{self.__remote}/contexts/{env.compute_hash()}") @@ -109,12 +116,17 @@ def set_environment_info(self, env): db_id, remote_id = self.__eid if self.__db and db_id is None: self.__db.insert_execution_context(env) - db_id = self.__db.query("select ENV_H from EXECUTION_CONTEXTS where ENV_H = ?", (env.compute_hash(),))[0] + db_id = self.__db.query( + "select ENV_H from EXECUTION_CONTEXTS where ENV_H = ?", + (env.compute_hash(),), + )[0] if self.__remote and remote_id is None: # We must postpone that to be run at the end of the pytest session. r = requests.post(f"{self.__remote}/contexts/", json=env.to_dict()) if r.status_code != HTTPStatus.CREATED: - warnings.warn(f"Cannot insert execution context in remote server (rc={r.status_code}! Deactivating...") + warnings.warn( + f"Cannot insert execution context in remote server (rc={r.status_code}! Deactivating..." + ) self.__remote = "" else: remote_id = json.loads(r.text)["h"] diff --git a/tests/test_monitor_handler.py b/tests/test_monitor_handler.py index cbbf705..e7ac734 100644 --- a/tests/test_monitor_handler.py +++ b/tests/test_monitor_handler.py @@ -1,28 +1,74 @@ # -*- coding: utf-8 -*- +import datetime import sqlite3 +import pytest -def test_sqlite_handler_check_create_test_passed_column(pytester): - import datetime +from pytest_monitor.handler import DBHandler +from pytest_monitor.sys_utils import determine_scm_revision - from pytest_monitor.handler import DBHandler - from pytest_monitor.sys_utils import determine_scm_revision +DB_Context = sqlite3.Connection - # import os - def prepare_mock_db(conn: sqlite3.Connection): - cursor = conn.cursor() - cursor.execute( - """ +# helper function +def reset_db(db_context: DB_Context): + # cleanup_cursor.execute("DROP DATABASE postgres") + # cleanup_cursor.execute("CREATE DATABASE postgres") + cleanup_cursor = db_context.cursor() + cleanup_cursor.execute("DROP TABLE IF EXISTS TEST_METRICS") + cleanup_cursor.execute("DROP TABLE IF EXISTS TEST_SESSIONS") + cleanup_cursor.execute("DROP TABLE IF EXISTS EXECUTION_CONTEXTS") + db_context.commit() + cleanup_cursor.close() + + # cleanup_cursor.execute("CREATE SCHEMA public;") + # cleanup_cursor.execute("ALTER DATABASE postgres SET search_path TO public;") + # cleanup_cursor.execute("ALTER ROLE postgres SET search_path TO public;") + # cleanup_cursor.execute("ALTER SCHEMA public OWNER to postgres;") + # cleanup_cursor.execute("GRANT ALL ON SCHEMA public TO postgres;") + # cleanup_cursor.execute("GRANT ALL ON SCHEMA public TO public;") + + +@pytest.fixture +def sqlite_empty_mock_db() -> sqlite3.Connection: + mockdb = sqlite3.connect(":memory:") + yield mockdb + mockdb.close() + + +@pytest.fixture +def prepared_mocked_SqliteDBHandler(sqlite_empty_mock_db) -> DBHandler: + mockdb = sqlite_empty_mock_db + db_cursor = mockdb.cursor() + db_cursor.execute( + """ CREATE TABLE IF NOT EXISTS TEST_SESSIONS( SESSION_H varchar(64) primary key not null unique, -- Session identifier RUN_DATE varchar(64), -- Date of test run SCM_ID varchar(128), -- SCM change id RUN_DESCRIPTION json );""" - ) - cursor.execute( - """ + ) + db_cursor.execute( + """ +CREATE TABLE IF NOT EXISTS EXECUTION_CONTEXTS ( + ENV_H varchar(64) primary key not null unique, + CPU_COUNT integer, + CPU_FREQUENCY_MHZ integer, + CPU_TYPE varchar(64), + CPU_VENDOR varchar(256), + RAM_TOTAL_MB integer, + MACHINE_NODE varchar(512), + MACHINE_TYPE varchar(32), + MACHINE_ARCH varchar(16), + SYSTEM_INFO varchar(256), + PYTHON_INFO varchar(512) +); +""" + ) + + db_cursor.execute( + """ CREATE TABLE IF NOT EXISTS TEST_METRICS ( SESSION_H varchar(64), -- Session identifier ENV_H varchar(64), -- Environment description identifier @@ -41,57 +87,54 @@ def prepare_mock_db(conn: sqlite3.Connection): FOREIGN KEY (ENV_H) REFERENCES EXECUTION_CONTEXTS(ENV_H), FOREIGN KEY (SESSION_H) REFERENCES TEST_SESSIONS(SESSION_H) );""" - ) - - cursor.execute( - """ -CREATE TABLE IF NOT EXISTS EXECUTION_CONTEXTS ( - ENV_H varchar(64) primary key not null unique, - CPU_COUNT integer, - CPU_FREQUENCY_MHZ integer, - CPU_TYPE varchar(64), - CPU_VENDOR varchar(256), - RAM_TOTAL_MB integer, - MACHINE_NODE varchar(512), - MACHINE_TYPE varchar(32), - MACHINE_ARCH varchar(16), - SYSTEM_INFO varchar(256), - PYTHON_INFO varchar(512) -); -""" - ) - conn.commit() - return conn + ) - # open database in memory - mockdb = sqlite3.connect(":memory:") - # prepare old database format - mockdb = prepare_mock_db(mockdb) + db_cursor.execute( + "insert into TEST_SESSIONS(SESSION_H, RUN_DATE, SCM_ID, RUN_DESCRIPTION)" + " values (?,?,?,?)", + ( + "1", + datetime.datetime.now().isoformat(), + determine_scm_revision(), + '{ "descr": "Test Session" }', + ), + ) - # attach mocked db to DBHandler object - db = DBHandler(":memory:") - db.__cnx = mockdb - db._DBHandler__cnx = mockdb - db._DBHandler__db = "mockdb" + db_cursor.execute( + "insert into EXECUTION_CONTEXTS(CPU_COUNT,CPU_FREQUENCY_MHZ,CPU_TYPE,CPU_VENDOR," + "RAM_TOTAL_MB,MACHINE_NODE,MACHINE_TYPE,MACHINE_ARCH,SYSTEM_INFO," + "PYTHON_INFO,ENV_H) values (?,?,?,?,?,?,?,?,?,?,?)", + ( + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 1, + ), + ) # insert old style entry - run_date = datetime.datetime.now().isoformat() - db.insert_session("1", run_date, determine_scm_revision(), "Test Session") - db.__cnx.cursor().execute( + db_cursor.execute( "insert into TEST_METRICS(SESSION_H,ENV_H,ITEM_START_TIME,ITEM," "ITEM_PATH,ITEM_VARIANT,ITEM_FS_LOC,KIND,COMPONENT,TOTAL_TIME," "USER_TIME,KERNEL_TIME,CPU_USAGE,MEM_USAGE) " "values (?,?,?,?,?,?,?,?,?,?,?,?,?,?)", ( "1", - "Environment", + "1", "Startdate", "name of item", "Item path", "Optional Param", "relative path", - "NULL", - "NULL", + None, + None, 42, 42, 42, @@ -99,28 +142,43 @@ def prepare_mock_db(conn: sqlite3.Connection): 42, ), ) - db.__cnx.commit() + db = DBHandler(":memory:") + db.__cnx = mockdb + db._DBHandler__cnx = mockdb + db._DBHandler__db = "mockdb" - mcursor = db.__cnx.cursor() - mcursor.execute("PRAGMA table_info(TEST_METRICS)") - has_test_column = any(column[1] == "TEST_PASSED" for column in mcursor.fetchall()) - mcursor = None + return db - try: - assert not has_test_column - # run function to test - db.check_create_test_passed_column() +def test_sqlite_handler_check_create_test_passed_column( + pytester, prepared_mocked_SqliteDBHandler +): + # mockedDBHandler with old style database attached + mockedHandler = prepared_mocked_SqliteDBHandler + mock_cursor = mockedHandler.__cnx.cursor() + + # test for old style db + mock_cursor.execute("PRAGMA table_info(TEST_METRICS)") + has_test_column = any( + column[1] == "TEST_PASSED" for column in mock_cursor.fetchall() + ) + assert not has_test_column + + try: + # run function to test (migration) + mockedHandler.check_create_test_passed_column() # check for new column - mcursor = db.__cnx.cursor() - mcursor.execute("PRAGMA table_info(TEST_METRICS)") - has_test_column = any(column[1] == "TEST_PASSED" for column in mcursor.fetchall()) + mock_cursor = mockedHandler.__cnx.cursor() + mock_cursor.execute("PRAGMA table_info(TEST_METRICS)") + has_test_column = any( + column[1] == "TEST_PASSED" for column in mock_cursor.fetchall() + ) assert has_test_column # check for default value TRUE in existing entry - mcursor.execute("SELECT TEST_PASSED FROM TEST_METRICS LIMIT 1") - default_is_passed = mcursor.fetchone() + mock_cursor.execute("SELECT TEST_PASSED FROM TEST_METRICS LIMIT 1") + default_is_passed = mock_cursor.fetchone() # default value true(1) for entries after migration assert default_is_passed[0] == 1 @@ -130,8 +188,6 @@ def prepare_mock_db(conn: sqlite3.Connection): def test_sqlite_handler_check_new_db_setup(pytester): - from pytest_monitor.handler import DBHandler - # db handler db = DBHandler(":memory:") table_cols = db.query("PRAGMA table_info(TEST_METRICS)", (), many=True) From 17d1c1ec19f585553a837d179bd179a6c5981050 Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Thu, 13 Jun 2024 15:47:30 +0200 Subject: [PATCH 16/29] Remove memory_profiler dependency (profiler now inside own module) --- requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 5182b1c..ebe0b8d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,3 @@ psutil>=5.1.0 -memory_profiler>=0.58 pytest requests From 69f50206f59995b949946bca0dceea1489aa234b Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 18 Jun 2024 10:38:31 +0200 Subject: [PATCH 17/29] Fix: Let profiler.py only kill the recently created MemTimer when a test fails --- pytest_monitor/profiler.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/pytest_monitor/profiler.py b/pytest_monitor/profiler.py index 667cf29..1a87b0d 100644 --- a/pytest_monitor/profiler.py +++ b/pytest_monitor/profiler.py @@ -120,7 +120,9 @@ def memory_usage(proc: tuple[Callable, Any, Any], retval=False): n_measurements = parent_conn.recv() # Convert the one element list produced by MemTimer to a singular value ret = ret[0], e - parent = psutil.Process(os.getpid()) + # parent = psutil.Process(os.getpid()) + # kill only the just spawned MemTimer process and its potential children + parent = psutil.Process(p.pid) for child in parent.children(recursive=True): os.kill(child.pid, SIGKILL) p.join(0) @@ -179,7 +181,9 @@ def _get_memory(pid): try: # avoid using get_memory_info since it does not exists # in psutil > 2.0 and accessing it will cause exception. - meminfo_attr = "memory_info" if hasattr(process, "memory_info") else "get_memory_info" + meminfo_attr = ( + "memory_info" if hasattr(process, "memory_info") else "get_memory_info" + ) mem = getattr(process, meminfo_attr)()[0] / _TWO_20 return mem From a0a55b0e9a648bf34187444334882f87d2bfacbc Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 18 Jun 2024 10:56:45 +0200 Subject: [PATCH 18/29] Add cmd flag to disabling monitoring for failed tests. --- pytest_monitor/pytest_monitor.py | 8 +++ tests/test_monitor.py | 104 +++++++++++++++++++++++++++++-- 2 files changed, 107 insertions(+), 5 deletions(-) diff --git a/pytest_monitor/pytest_monitor.py b/pytest_monitor/pytest_monitor.py index 0dbc6d1..8fc50a2 100644 --- a/pytest_monitor/pytest_monitor.py +++ b/pytest_monitor/pytest_monitor.py @@ -83,6 +83,12 @@ def pytest_addoption(parser): help="Prefix each found components with the given value (applies to all tests" " run in this session).", ) + group.addoption( + "--no-failed", + action="store_true", + dest="mtr_disable_monitoring_failed", + help="Disable monitoring of failed tests and only monitor successful tests", + ) group.addoption( "--no-gc", action="store_true", @@ -230,6 +236,8 @@ def prof(): setattr(pyfuncitem, "monitor_results", True) if isinstance(exception, BaseException): # Do we have any outcome? + if pyfuncitem.session.config.option.mtr_disable_monitoring_failed: + setattr(pyfuncitem, "monitor_results", False) setattr(pyfuncitem, "passed", False) raise exception diff --git a/tests/test_monitor.py b/tests/test_monitor.py index 85b4ff6..4150bd9 100644 --- a/tests/test_monitor.py +++ b/tests/test_monitor.py @@ -39,12 +39,39 @@ def test_ok(): cursor.execute("SELECT ITEM FROM TEST_METRICS;") assert len(cursor.fetchall()) == 1 cursor = db.cursor() - tags = json.loads(cursor.execute("SELECT RUN_DESCRIPTION FROM TEST_SESSIONS;").fetchone()[0]) + tags = json.loads( + cursor.execute("SELECT RUN_DESCRIPTION FROM TEST_SESSIONS;").fetchone()[0] + ) assert "description" not in tags assert "version" in tags assert tags["version"] == "12.3.5" +def test_monitor_basic_test_failing(testdir): + """Make sure that pytest-monitor does the job without impacting user tests.""" + # create a temporary pytest test module + testdir.makepyfile( + """ + import time + + def test_fail(): + time.sleep(0.5) + x = [ "hello" ] + assert len(x) == 2 + +""" + ) + + # run pytest with the following cmd args + result = testdir.runpytest("") + + pymon_path = pathlib.Path(str(testdir)) / ".pymon" + assert pymon_path.exists() + + # make sure that that we get a '0' exit code for the test suite + result.assert_outcomes(failed=1) + + def test_monitor_basic_test_description(testdir): """Make sure that pytest-monitor does the job without impacting user tests.""" # create a temporary pytest test module @@ -62,7 +89,9 @@ def test_ok(): ) # run pytest with the following cmd args - result = testdir.runpytest("-vv", "--description", '"Test"', "--tag", "version=12.3.5") + result = testdir.runpytest( + "-vv", "--description", '"Test"', "--tag", "version=12.3.5" + ) # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines(["*::test_ok PASSED*"]) @@ -78,7 +107,9 @@ def test_ok(): cursor.execute("SELECT ITEM FROM TEST_METRICS;") assert len(cursor.fetchall()) == 1 cursor = db.cursor() - tags = json.loads(cursor.execute("SELECT RUN_DESCRIPTION FROM TEST_SESSIONS;").fetchone()[0]) + tags = json.loads( + cursor.execute("SELECT RUN_DESCRIPTION FROM TEST_SESSIONS;").fetchone()[0] + ) assert "description" in tags assert tags["description"] == '"Test"' assert "version" in tags @@ -176,7 +207,9 @@ def test_ok(): result = testdir.runpytest("-v") # fnmatch_lines does an assertion internally - result.stdout.fnmatch_lines(["*::test_ok PASSED*", "*Nothing known about marker monitor_bad_marker*"]) + result.stdout.fnmatch_lines( + ["*::test_ok PASSED*", "*Nothing known about marker monitor_bad_marker*"] + ) pymon_path = pathlib.Path(str(testdir)) / ".pymon" assert pymon_path.exists() @@ -298,7 +331,9 @@ def test_monitored(): result = testdir.runpytest("-v") # fnmatch_lines does an assertion internally - result.stdout.fnmatch_lines(["*::test_not_monitored PASSED*", "*::test_monitored PASSED*"]) + result.stdout.fnmatch_lines( + ["*::test_not_monitored PASSED*", "*::test_monitored PASSED*"] + ) pymon_path = pathlib.Path(str(testdir)) / ".pymon" assert pymon_path.exists() @@ -406,3 +441,62 @@ def run(a, b): # make sure that that we get a '0' exit code for the testsuite result.assert_outcomes(passed=1) assert not pymon_path.exists() + + +def test_monitor_monitor_failed_tests(testdir): + testdir.makepyfile( + """ + def test_failing_test(): + assert False + """ + ) + + result = testdir.runpytest("") + # # make sure that that we get a '0' exit code for the testsuite + result.assert_outcomes(failed=1) + + pymon_path = pathlib.Path(str(testdir)) / ".pymon" + assert pymon_path.exists() + db = sqlite3.connect(str(pymon_path)) + cursor = db.cursor() + + # TEST_METRICS table is supposed to be have 1 entry (1 failed test) + cursor.execute("SELECT * FROM TEST_METRICS") + test_metrics = cursor.fetchall() + assert len(test_metrics) == 1 + + +def test_monitor_no_monitor_failed(testdir): + testdir.makepyfile( + """ + def test_failing_test(): + assert False + """ + ) + pymon_path = pathlib.Path(str(testdir)) / ".pymon" + db = sqlite3.connect(str(pymon_path)) + cursor = db.cursor() + + result = testdir.runpytest("--no-failed") + result.assert_outcomes(failed=1) + + # TEST_METRICS table is supposed to be empty (only one failing test) + cursor.execute("SELECT * FROM TEST_METRICS") + ncursor = cursor.fetchall() + print(ncursor) + assert not len(ncursor) + + testdir.makepyfile( + """ + def test_successful_test(): + assert True + """ + ) + + result = testdir.runpytest("--no-failed") + # make sure that that we get a '0' exit code for the testsuite + result.assert_outcomes(passed=1) + + # TEST_METRICS table is supposed to have 1 entry (2 tests, 1 successful) + cursor.execute("SELECT * FROM TEST_METRICS") + assert len(cursor.fetchall()) == 1 From e4ed1e4dc5c9ad5f303fdec760fdedd926c9c16a Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 18 Jun 2024 11:42:20 +0200 Subject: [PATCH 19/29] Add docstrings to tests and add a basic sqlite handler test. --- tests/test_monitor.py | 4 +++- tests/test_monitor_handler.py | 25 ++++++++++++++++++++----- 2 files changed, 23 insertions(+), 6 deletions(-) diff --git a/tests/test_monitor.py b/tests/test_monitor.py index 4150bd9..cead8e6 100644 --- a/tests/test_monitor.py +++ b/tests/test_monitor.py @@ -48,7 +48,7 @@ def test_ok(): def test_monitor_basic_test_failing(testdir): - """Make sure that pytest-monitor does the job without impacting user tests.""" + """Make sure that pytest-monitor handles failing tests properly (without ending in deadlock when nested)""" # create a temporary pytest test module testdir.makepyfile( """ @@ -444,6 +444,7 @@ def run(a, b): def test_monitor_monitor_failed_tests(testdir): + """Check new standard behavior that monitors failed tests in database""" testdir.makepyfile( """ def test_failing_test(): @@ -467,6 +468,7 @@ def test_failing_test(): def test_monitor_no_monitor_failed(testdir): + """Ensure cmd flag --no-failed works and turns of monitoring failed tests""" testdir.makepyfile( """ def test_failing_test(): diff --git a/tests/test_monitor_handler.py b/tests/test_monitor_handler.py index e7ac734..9007a3f 100644 --- a/tests/test_monitor_handler.py +++ b/tests/test_monitor_handler.py @@ -31,6 +31,7 @@ def reset_db(db_context: DB_Context): @pytest.fixture def sqlite_empty_mock_db() -> sqlite3.Connection: + """Initialize empty sqlite3 db""" mockdb = sqlite3.connect(":memory:") yield mockdb mockdb.close() @@ -38,6 +39,7 @@ def sqlite_empty_mock_db() -> sqlite3.Connection: @pytest.fixture def prepared_mocked_SqliteDBHandler(sqlite_empty_mock_db) -> DBHandler: + """Pepare a sqlite db handler with the old style database table (without passed column)""" mockdb = sqlite_empty_mock_db db_cursor = mockdb.cursor() db_cursor.execute( @@ -149,10 +151,28 @@ def prepared_mocked_SqliteDBHandler(sqlite_empty_mock_db) -> DBHandler: return db +def test_sqlite_handler(pytester): + """Ensure the Sqlite DB Handler works as expected""" + # db handler + db = DBHandler(":memory:") + session, metrics, exc_context = db.query( + "SELECT name FROM sqlite_master where type='table'", (), many=True + ) + assert session[0] == "TEST_SESSIONS" + assert metrics[0] == "TEST_METRICS" + assert exc_context[0] == "EXECUTION_CONTEXTS" + +def test_sqlite_handler_check_new_db_setup(pytester): + """Check the Sqlite Handler initializes the new Test_Metrics table configuration""" + # db handler + db = DBHandler(":memory:") + table_cols = db.query("PRAGMA table_info(TEST_METRICS)", (), many=True) + assert any(column[1] == "TEST_PASSED" for column in table_cols) def test_sqlite_handler_check_create_test_passed_column( pytester, prepared_mocked_SqliteDBHandler ): + """Check automatic migration from existing old database to new database style (passed column in TEST_METRICS)""" # mockedDBHandler with old style database attached mockedHandler = prepared_mocked_SqliteDBHandler mock_cursor = mockedHandler.__cnx.cursor() @@ -187,8 +207,3 @@ def test_sqlite_handler_check_create_test_passed_column( raise -def test_sqlite_handler_check_new_db_setup(pytester): - # db handler - db = DBHandler(":memory:") - table_cols = db.query("PRAGMA table_info(TEST_METRICS)", (), many=True) - assert any(column[1] == "TEST_PASSED" for column in table_cols) From ba45b42036a239c0403010338853b3be64478b6c Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 18 Jun 2024 11:52:50 +0200 Subject: [PATCH 20/29] Profiler.py: Add hint explaining kill of only recent MemTimer Add hint in profiler.py to explain why only the recently created MemTimer is being killed on failed tests throwing an exception. --- pytest_monitor/profiler.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pytest_monitor/profiler.py b/pytest_monitor/profiler.py index 1a87b0d..9a2b2ff 100644 --- a/pytest_monitor/profiler.py +++ b/pytest_monitor/profiler.py @@ -122,6 +122,7 @@ def memory_usage(proc: tuple[Callable, Any, Any], retval=False): ret = ret[0], e # parent = psutil.Process(os.getpid()) # kill only the just spawned MemTimer process and its potential children + # instead of all children of the main process (could lead to issues when using testdir fixture) parent = psutil.Process(p.pid) for child in parent.children(recursive=True): os.kill(child.pid, SIGKILL) From 39d573dab8238d89eb46541b2cb413e5da1815fc Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 18 Jun 2024 11:55:03 +0200 Subject: [PATCH 21/29] Update changelog for feature #65 --- docs/sources/changelog.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docs/sources/changelog.rst b/docs/sources/changelog.rst index 6549ecd..e0c3446 100644 --- a/docs/sources/changelog.rst +++ b/docs/sources/changelog.rst @@ -3,7 +3,8 @@ Changelog ========= * :release:`to be discussed` -* :bug: `#79` Fix a bug concerning commandline flag `--no-monitor` causing tests that are supposed to fail to pass instead +* :feature: `#65` Also monitor failed test as default and add flag ``--no-failed`` to turn monitoring failed tests off. +* :bug: `#79` Fix a bug concerning commandline flag ``--no-monitor`` causing tests that are supposed to fail to pass instead * :feature:`#75` Automatically gather CI build information for Bitbucket CI. * :release:`1.6.6 <2023-05-06>` From e7e35881558f7391ebd2c1f9caa4ed7fd42c6984 Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 18 Jun 2024 12:06:58 +0200 Subject: [PATCH 22/29] Update documentation to explain new feature/changes. The documentation files are updated to include the newest changes. --- docs/sources/configuration.rst | 8 ++++++++ docs/sources/operating.rst | 2 ++ docs/sources/remote.rst | 3 ++- 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/docs/sources/configuration.rst b/docs/sources/configuration.rst index 90c29a6..a9acfa9 100644 --- a/docs/sources/configuration.rst +++ b/docs/sources/configuration.rst @@ -79,6 +79,14 @@ Disable monitoring If you need for some reason to disable the monitoring, pass the *\-\-no-monitor* option. +Disable failed tests +-------------------- + +By default failing tests are monitored in the database. The database has an additional column that +indicates if a test passed (boolean value). If you only need to monitor successful tests, pass +the *\-\-no-failed* option. + + Describing a run ---------------- diff --git a/docs/sources/operating.rst b/docs/sources/operating.rst index 8de5b7d..68b2ab7 100644 --- a/docs/sources/operating.rst +++ b/docs/sources/operating.rst @@ -129,5 +129,7 @@ CPU_USAGE (FLOAT) System-wide CPU usage as a percentage (100 % is equivalent to one core). MEM_USAGE (FLOAT) Maximum resident memory used during the test execution (in megabytes). +TEST_PASSED (BOOLEAN) + Boolean Value indicating if a test passed. In the local database, these Metrics are stored in table `TEST_METRICS`. diff --git a/docs/sources/remote.rst b/docs/sources/remote.rst index 340f508..4067d7b 100644 --- a/docs/sources/remote.rst +++ b/docs/sources/remote.rst @@ -115,7 +115,8 @@ POST /metrics/ user_time: float, kernel_time: float, cpu_usage: float, - mem_usage: float + mem_usage: float, + passed: bool, } **Return Codes**: Must return *201* (*CREATED*) if the **Metrics** has been created From 59461b1f5c3d7a3decc11c358362b6cd04732bbb Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 18 Jun 2024 12:51:55 +0200 Subject: [PATCH 23/29] Fix issues reported by Flake8 and Ruff --- pytest_monitor/session.py | 2 +- tests/test_monitor.py | 2 +- tests/test_monitor_handler.py | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pytest_monitor/session.py b/pytest_monitor/session.py index acb3b02..ad1ce47 100644 --- a/pytest_monitor/session.py +++ b/pytest_monitor/session.py @@ -83,7 +83,7 @@ def compute_info(self, description, tags): if description: d["description"] = description for tag in tags: - if type(tag) is str: + if isinstance(tag, str): _tag_info = tag.split("=", 1) d[_tag_info[0]] = _tag_info[1] else: diff --git a/tests/test_monitor.py b/tests/test_monitor.py index cead8e6..f5506ee 100644 --- a/tests/test_monitor.py +++ b/tests/test_monitor.py @@ -57,7 +57,7 @@ def test_monitor_basic_test_failing(testdir): def test_fail(): time.sleep(0.5) x = [ "hello" ] - assert len(x) == 2 + assert len(x) == 2 """ ) diff --git a/tests/test_monitor_handler.py b/tests/test_monitor_handler.py index 9007a3f..0da62ac 100644 --- a/tests/test_monitor_handler.py +++ b/tests/test_monitor_handler.py @@ -29,7 +29,7 @@ def reset_db(db_context: DB_Context): # cleanup_cursor.execute("GRANT ALL ON SCHEMA public TO public;") -@pytest.fixture +@pytest.fixture() def sqlite_empty_mock_db() -> sqlite3.Connection: """Initialize empty sqlite3 db""" mockdb = sqlite3.connect(":memory:") @@ -37,7 +37,7 @@ def sqlite_empty_mock_db() -> sqlite3.Connection: mockdb.close() -@pytest.fixture +@pytest.fixture() def prepared_mocked_SqliteDBHandler(sqlite_empty_mock_db) -> DBHandler: """Pepare a sqlite db handler with the old style database table (without passed column)""" mockdb = sqlite_empty_mock_db From 64cd8438518618699814986066d3b10a23277f3c Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 18 Jun 2024 12:56:24 +0200 Subject: [PATCH 24/29] Fix: Add backwards compatible type annotation in profiler.py --- pytest_monitor/profiler.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pytest_monitor/profiler.py b/pytest_monitor/profiler.py index 9a2b2ff..c20441f 100644 --- a/pytest_monitor/profiler.py +++ b/pytest_monitor/profiler.py @@ -38,6 +38,7 @@ import os from signal import SIGKILL from typing import Any, Callable +from typing import Tuple import psutil @@ -50,7 +51,7 @@ raise -def memory_usage(proc: tuple[Callable, Any, Any], retval=False): +def memory_usage(proc: Tuple[Callable, Any, Any], retval=False): """ Return the memory usage of a process or piece of code From aec9da90abc08686873a0014ffea3b0608868cc6 Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 18 Jun 2024 13:03:12 +0200 Subject: [PATCH 25/29] Fix: Remove unneeded pytester fixture in test_monitor_handler.py --- tests/test_monitor_handler.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_monitor_handler.py b/tests/test_monitor_handler.py index 0da62ac..ad998af 100644 --- a/tests/test_monitor_handler.py +++ b/tests/test_monitor_handler.py @@ -151,7 +151,7 @@ def prepared_mocked_SqliteDBHandler(sqlite_empty_mock_db) -> DBHandler: return db -def test_sqlite_handler(pytester): +def test_sqlite_handler(): """Ensure the Sqlite DB Handler works as expected""" # db handler db = DBHandler(":memory:") @@ -162,7 +162,7 @@ def test_sqlite_handler(pytester): assert metrics[0] == "TEST_METRICS" assert exc_context[0] == "EXECUTION_CONTEXTS" -def test_sqlite_handler_check_new_db_setup(pytester): +def test_sqlite_handler_check_new_db_setup(): """Check the Sqlite Handler initializes the new Test_Metrics table configuration""" # db handler db = DBHandler(":memory:") @@ -170,7 +170,7 @@ def test_sqlite_handler_check_new_db_setup(pytester): assert any(column[1] == "TEST_PASSED" for column in table_cols) def test_sqlite_handler_check_create_test_passed_column( - pytester, prepared_mocked_SqliteDBHandler + prepared_mocked_SqliteDBHandler ): """Check automatic migration from existing old database to new database style (passed column in TEST_METRICS)""" # mockedDBHandler with old style database attached From aeadf18487768b5efbd9e16edb41e28ff6f9df0c Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 18 Jun 2024 13:11:41 +0200 Subject: [PATCH 26/29] Update AUTHORS file. --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index 3a4ff25..8f52141 100644 --- a/AUTHORS +++ b/AUTHORS @@ -4,3 +4,4 @@ Contributors include: - Raymond Gauthier (jraygauthier) added Python 3.5 support. - Kyle Altendorf (altendky) fixed bugs on session teardown - Hannes Engelhardt (veritogen) added Bitbucket CI support. + - Lucas Haupt (lhpt2) added profiler.py and track failed test support. From b2b437ab643b0f03b8adfabe427065a8b81405ac Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Wed, 3 Jul 2024 15:49:00 +0200 Subject: [PATCH 27/29] Changes to fix, raise exception inside wrapped_function(), avoid issues Returning exceptions inside the wrapped_function() can lead to assertions being ignored, therefore they need to be raised instantly and parent calls need to wrap their call in a try except block. --- pytest_monitor/pytest_monitor.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/pytest_monitor/pytest_monitor.py b/pytest_monitor/pytest_monitor.py index 7eb5a2d..7eb14d4 100644 --- a/pytest_monitor/pytest_monitor.py +++ b/pytest_monitor/pytest_monitor.py @@ -202,7 +202,7 @@ def wrapped_function(): except Exception: raise except BaseException as e: - return e + raise e def prof(): m = memory_profiler.memory_usage((wrapped_function, ()), max_iterations=1, max_usage=True, retval=True) @@ -213,9 +213,10 @@ def prof(): setattr(pyfuncitem, "monitor_results", True) if not PYTEST_MONITORING_ENABLED: - e = wrapped_function() - if isinstance(e, BaseException): - raise e + try: + wrapped_function() + except BaseException: + raise else: if not pyfuncitem.session.config.option.mtr_disable_gc: gc.collect() From d831b4f2ea3e11211612b7d8aff51ebc08b663bc Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Wed, 3 Jul 2024 18:11:45 +0200 Subject: [PATCH 28/29] Minor fix: Don't raise e, just use raise instead (no var needed here) --- pytest_monitor/pytest_monitor.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pytest_monitor/pytest_monitor.py b/pytest_monitor/pytest_monitor.py index 7eb14d4..55eb7ae 100644 --- a/pytest_monitor/pytest_monitor.py +++ b/pytest_monitor/pytest_monitor.py @@ -201,8 +201,8 @@ def wrapped_function(): pyfuncitem.obj(**testargs) except Exception: raise - except BaseException as e: - raise e + except BaseException: + raise def prof(): m = memory_profiler.memory_usage((wrapped_function, ()), max_iterations=1, max_usage=True, retval=True) From 3a0632eb58458ab5f72f1034504a012013e74038 Mon Sep 17 00:00:00 2001 From: Lucas Haupt Date: Tue, 9 Jul 2024 17:39:44 +0200 Subject: [PATCH 29/29] Fix: Catch BaseException instead of Exception in profiler.py In order to handle all exceptions of a failing test, catch exceptions with BaseException in profiler.py. --- pytest_monitor/profiler.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/pytest_monitor/profiler.py b/pytest_monitor/profiler.py index c20441f..b295e47 100644 --- a/pytest_monitor/profiler.py +++ b/pytest_monitor/profiler.py @@ -37,8 +37,7 @@ import os from signal import SIGKILL -from typing import Any, Callable -from typing import Tuple +from typing import Any, Callable, Tuple import psutil @@ -115,7 +114,7 @@ def memory_usage(proc: Tuple[Callable, Any, Any], retval=False): ret = ret[0], None if retval: ret = ret, returned - except Exception as e: + except BaseException as e: parent_conn.send(0) # finish timing ret = parent_conn.recv() n_measurements = parent_conn.recv()