From d905882b6e78809f91012898caac8f33f709dc69 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Tue, 18 Apr 2023 17:39:11 -0400 Subject: [PATCH 01/49] grass.experimental: Add object to access modules as functions This adds a Tools class which allows to access GRASS tools (modules) to be accessed using methods. Once an instance is created, calling a tool is calling a function (method) similarly to grass.jupyter.Map. Unlike grass.script, this does not require generic function name and unlike grass.pygrass module shortcuts, this does not require special objects to mimic the module families. Outputs are handled through a returned object which is result of automatic capture of outputs and can do conversions from known formats using properties. Usage example is in the _test() function in the file. The code is included under new grass.experimental package which allows merging the code even when further breaking changes are anticipated. --- python/grass/experimental/__init__.py | 0 python/grass/experimental/tools.py | 104 ++++++++++++++++++++++++++ 2 files changed, 104 insertions(+) create mode 100644 python/grass/experimental/__init__.py create mode 100644 python/grass/experimental/tools.py diff --git a/python/grass/experimental/__init__.py b/python/grass/experimental/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py new file mode 100644 index 00000000000..9ae922221bb --- /dev/null +++ b/python/grass/experimental/tools.py @@ -0,0 +1,104 @@ +import sys +import shutil + +import grass.script as gs + + +class ExecutedTool: + def __init__(self, name, kwargs, stdout, stderr): + self._name = name + self._stdout = stdout + + @property + def text(self): + return self._stdout + + @property + def json(self): + import json + + return json.loads(self._stdout) + + @property + def keyval(self): + return gs.parse_key_val(self._stdout) + + +class SubExecutor: + """use as tools().params(a="x", b="y").g_region()""" + + # Can support other envs or all PIPE and encoding read command supports + + +class Tools: + def __init__(self): + # TODO: fix region, so that external g.region call in the middle + # is not a problem + # i.e. region is independent/internal/fixed + pass + + def run(self, name, /, **kwargs): + """Run modules from the GRASS display family (modules starting with "d."). + + This function passes arguments directly to grass.script.run_command() + so the syntax is the same. + + :param str module: name of GRASS module + :param `**kwargs`: named arguments passed to run_command()""" + # alternatively use dev null as default or provide it as convenient settings + kwargs["stdout"] = gs.PIPE + kwargs["stderr"] = gs.PIPE + process = gs.pipe_command(name, **kwargs) + stdout, stderr = process.communicate() + stderr = gs.utils.decode(stderr) + returncode = process.poll() + # TODO: instead of printing, do exception right away + if returncode: + # Print only when we are capturing it and there was some output. + # (User can request ignoring the subprocess stderr and then + # we get only None.) + if stderr: + sys.stderr.write(stderr) + gs.handle_errors(returncode, stdout, [name], kwargs) + return ExecutedTool(name=name, kwargs=kwargs, stdout=stdout, stderr=stderr) + + def __getattr__(self, name): + """Parse attribute to GRASS display module. Attribute should be in + the form 'd_module_name'. For example, 'd.rast' is called with 'd_rast'. + """ + # Reformat string + grass_module = name.replace("_", ".") + # Assert module exists + if not shutil.which(grass_module): + raise AttributeError( + _( + "Cannot find GRASS tool {}. " + "Is the session set up and the tool on path?" + ).format(grass_module) + ) + + def wrapper(**kwargs): + # Run module + return self.run(grass_module, **kwargs) + + return wrapper + + +def _test(): + gs.setup.init("~/grassdata/nc_spm_08_grass7/user1") + + tools = Tools() + tools.g_region(raster="elevation") + tools.r_slope_aspect(elevation="elevation", slope="slope", overwrite=True) + print(tools.r_univar(map="slope", flags="g").keyval) + + print(tools.v_info(map="bridges", flags="c").text) + print( + tools.v_db_univar(map="bridges", column="YEAR_BUILT", format="json").json[ + "statistics" + ]["mean"] + ) + + +if __name__ == "__main__": + _test() From aaef183a2dc7226c032036bac9f92f136b3feb99 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Fri, 21 Apr 2023 17:40:29 -0400 Subject: [PATCH 02/49] Support verbosity, overwrite and region freezing --- python/grass/experimental/tools.py | 97 ++++++++++++++++++++++++++++-- 1 file changed, 92 insertions(+), 5 deletions(-) diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py index 9ae922221bb..64b103b28fc 100644 --- a/python/grass/experimental/tools.py +++ b/python/grass/experimental/tools.py @@ -1,3 +1,4 @@ +import os import sys import shutil @@ -8,10 +9,11 @@ class ExecutedTool: def __init__(self, name, kwargs, stdout, stderr): self._name = name self._stdout = stdout + self._decoded_stdout = gs.decode(self._stdout) @property def text(self): - return self._stdout + return self._decoded_stdout.strip() @property def json(self): @@ -21,21 +23,80 @@ def json(self): @property def keyval(self): + # TODO: possibly use or add _text_to_key_value_dict + # which converts int and float automatically return gs.parse_key_val(self._stdout) + @property + def comma_items(self): + return self.text_split(",") + + @property + def space_items(self): + return self.text_split(None) + + def text_split(self, separator=None): + # The use of strip is assuming that the output is one line which + # ends with a newline character which is for display only. + return self._decoded_stdout.strip("\n").split(separator) + class SubExecutor: """use as tools().params(a="x", b="y").g_region()""" + # a and b would be overwrite or stdin + # Can support other envs or all PIPE and encoding read command supports class Tools: - def __init__(self): + def __init__( + self, + *, + session=None, + env=None, + overwrite=True, + quiet=False, + verbose=False, + superquiet=False, + freeze_region=False, + ): # TODO: fix region, so that external g.region call in the middle # is not a problem # i.e. region is independent/internal/fixed - pass + if env: + self._env = env.copy() + elif session and hasattr(session, "env"): + self._env = session.env.copy() + else: + self._env = os.environ.copy() + self._region_is_frozen = False + if freeze_region: + self._freeze_region() + if overwrite: + self._overwrite() + # This hopefully sets the numbers directly. An alternative implementation would + # be to pass the parameter every time. + # Does not check for multiple set at the same time, but the most versbose wins + # for safety. + if superquiet: + self._env["GRASS_VERBOSE"] = "0" + if quiet: + self._env["GRASS_VERBOSE"] = "1" + if verbose: + self._env["GRASS_VERBOSE"] = "3" + + # These could be public, not protected. + def _freeze_region(self): + self._env["GRASS_REGION"] = gs.region_env(env=self._env) + self._region_is_frozen = True + + def _overwrite(self): + self._env["GRASS_OVERWRITE"] = "1" + + @property + def env(self): + return self._env def run(self, name, /, **kwargs): """Run modules from the GRASS display family (modules starting with "d."). @@ -48,7 +109,7 @@ def run(self, name, /, **kwargs): # alternatively use dev null as default or provide it as convenient settings kwargs["stdout"] = gs.PIPE kwargs["stderr"] = gs.PIPE - process = gs.pipe_command(name, **kwargs) + process = gs.pipe_command(name, env=self._env, **kwargs) stdout, stderr = process.communicate() stderr = gs.utils.decode(stderr) returncode = process.poll() @@ -85,7 +146,7 @@ def wrapper(**kwargs): def _test(): - gs.setup.init("~/grassdata/nc_spm_08_grass7/user1") + session = gs.setup.init("~/grassdata/nc_spm_08_grass7/user1") tools = Tools() tools.g_region(raster="elevation") @@ -99,6 +160,32 @@ def _test(): ]["mean"] ) + print(tools.g_mapset(flags="p").text) + print(tools.g_mapsets(flags="l").text_split()) + print(tools.g_mapsets(flags="l").space_items) + print(tools.g_gisenv(get="GISDBASE,LOCATION_NAME,MAPSET", sep="comma").comma_items) + + print(tools.g_region(flags="g").keyval) + + env = os.environ.copy() + env["GRASS_REGION"] = gs.region_env(res=250) + coarse_computation = Tools(env=env) + current_region = coarse_computation.g_region(flags="g").keyval + print( + current_region["ewres"], current_region["nsres"] + ) # TODO: should keyval convert? + coarse_computation.r_slope_aspect( + elevation="elevation", slope="slope", flags="a", overwrite=True + ) + print(coarse_computation.r_info(map="slope", flags="g").keyval) + + independent_computation = Tools(session=session, freeze_region=True) + tools.g_region(res=500) # we would do this for another computation elsewhere + print(independent_computation.g_region(flags="g").keyval["ewres"]) + + tools_pro = Tools(session=session, freeze_region=True, superquiet=True) + tools_pro.r_slope_aspect(elevation="elevation", slope="slope") + if __name__ == "__main__": _test() From 54db575a33db12bc3d7e0665da3358c8e9776fba Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Sat, 22 Apr 2023 14:31:14 -0400 Subject: [PATCH 03/49] Raise exception instead of calling handle_errors --- python/grass/experimental/tools.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py index 64b103b28fc..ebf5e16bf91 100644 --- a/python/grass/experimental/tools.py +++ b/python/grass/experimental/tools.py @@ -114,7 +114,16 @@ def run(self, name, /, **kwargs): stderr = gs.utils.decode(stderr) returncode = process.poll() # TODO: instead of printing, do exception right away + # but right now, handle errors does not accept stderr + # or don't use handle errors and raise instead if returncode: + raise gs.CalledModuleError( + name, + code=" ".join([f"{key}={value}" for key, value in kwargs.items()]), + returncode=returncode, + errors=stderr, + ) + # Print only when we are capturing it and there was some output. # (User can request ignoring the subprocess stderr and then # we get only None.) From 82f5894cf324700e9e7642f4e49cfaedd587d8e8 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Sat, 22 Apr 2023 14:33:29 -0400 Subject: [PATCH 04/49] Allow to specify stdin and use a new instance of Tools itself to execute with that stdin --- python/grass/experimental/tools.py | 66 ++++++++++++++++++++++++++---- 1 file changed, 59 insertions(+), 7 deletions(-) diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py index ebf5e16bf91..7bab6c2a4c5 100644 --- a/python/grass/experimental/tools.py +++ b/python/grass/experimental/tools.py @@ -9,7 +9,10 @@ class ExecutedTool: def __init__(self, name, kwargs, stdout, stderr): self._name = name self._stdout = stdout - self._decoded_stdout = gs.decode(self._stdout) + if self._stdout: + self._decoded_stdout = gs.decode(self._stdout) + else: + self._decoded_stdout = "" @property def text(self): @@ -45,8 +48,14 @@ class SubExecutor: """use as tools().params(a="x", b="y").g_region()""" # a and b would be overwrite or stdin - # Can support other envs or all PIPE and encoding read command supports + def __init__(self, *, tools, env, stdin=None): + self._tools = tools + self._env = env + self._stdin = stdin + + def run(self, name, /, **kwargs): + pass class Tools: @@ -60,6 +69,7 @@ def __init__( verbose=False, superquiet=False, freeze_region=False, + stdin=None, ): # TODO: fix region, so that external g.region call in the middle # is not a problem @@ -85,6 +95,7 @@ def __init__( self._env["GRASS_VERBOSE"] = "1" if verbose: self._env["GRASS_VERBOSE"] = "3" + self._set_stdin(stdin) # These could be public, not protected. def _freeze_region(self): @@ -94,6 +105,10 @@ def _freeze_region(self): def _overwrite(self): self._env["GRASS_OVERWRITE"] = "1" + def _set_stdin(self, stdin, /): + print("_set_stdin", stdin) + self._stdin = stdin + @property def env(self): return self._env @@ -107,10 +122,23 @@ def run(self, name, /, **kwargs): :param str module: name of GRASS module :param `**kwargs`: named arguments passed to run_command()""" # alternatively use dev null as default or provide it as convenient settings - kwargs["stdout"] = gs.PIPE - kwargs["stderr"] = gs.PIPE - process = gs.pipe_command(name, env=self._env, **kwargs) - stdout, stderr = process.communicate() + stdout_pipe = gs.PIPE + stderr_pipe = gs.PIPE + if self._stdin: + stdin_pipe = gs.PIPE + stdin = gs.utils.encode(self._stdin) + else: + stdin_pipe = None + stdin = None + process = gs.start_command( + name, + env=self._env, + **kwargs, + stdin=stdin_pipe, + stdout=stdout_pipe, + stderr=stderr_pipe, + ) + stdout, stderr = process.communicate(input=stdin) stderr = gs.utils.decode(stderr) returncode = process.poll() # TODO: instead of printing, do exception right away @@ -131,6 +159,11 @@ def run(self, name, /, **kwargs): sys.stderr.write(stderr) gs.handle_errors(returncode, stdout, [name], kwargs) return ExecutedTool(name=name, kwargs=kwargs, stdout=stdout, stderr=stderr) + # executor = SubExecutor(tools=self, env=self._env, stdin=self._stdin) + # return executor.run(name, **kwargs) + + def feed_input_to(self, stdin, /): + return Tools(env=self._env, stdin=stdin) def __getattr__(self, name): """Parse attribute to GRASS display module. Attribute should be in @@ -192,8 +225,27 @@ def _test(): tools.g_region(res=500) # we would do this for another computation elsewhere print(independent_computation.g_region(flags="g").keyval["ewres"]) - tools_pro = Tools(session=session, freeze_region=True, superquiet=True) + tools_pro = Tools( + session=session, freeze_region=True, overwrite=True, superquiet=True + ) + # gs.feed_command("v.in.ascii", + # input="-", output="point", separator=",", + # stdin="13.45,29.96,200", overwrite=True) tools_pro.r_slope_aspect(elevation="elevation", slope="slope") + tools_pro.feed_input_to("13.45,29.96,200").v_in_ascii( + input="-", output="point", separator="," + ) + print(tools_pro.v_info(map="point", flags="t").keyval["points"]) + + # try: + tools_pro.feed_input_to("13.45,29.96,200").v_in_ascii( + input="-", + output="point", + format="xstandard", + ) + # except gs.CalledModuleError as error: + # print("Exception text:") + # print(error) if __name__ == "__main__": From 0f1e210a9148bb7ffe5f9411712ca20d71f64efc Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Sat, 22 Apr 2023 17:56:57 -0400 Subject: [PATCH 05/49] Add ignore errors, r_mapcalc example, draft tests --- .../experimental/tests/grass_tools_test.py | 26 +++++++++++++++++++ python/grass/experimental/tools.py | 14 +++++++++- 2 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 python/grass/experimental/tests/grass_tools_test.py diff --git a/python/grass/experimental/tests/grass_tools_test.py b/python/grass/experimental/tests/grass_tools_test.py new file mode 100644 index 00000000000..7a39c187e40 --- /dev/null +++ b/python/grass/experimental/tests/grass_tools_test.py @@ -0,0 +1,26 @@ +from grass.experimental.tools import Tools + + +def test_key_value_parser(xy_dataset_session): + tools = Tools(session=xy_dataset_session) + assert tools.g_region(flags="g").keyval["nsres"] == 1 + + +# def test_json_parser(xy_dataset_session): +# print( +# tools.v_db_univar(map="bridges", column="YEAR_BUILT", format="json").json[ +# "statistics" +# ]["mean"] +# ) + +# def test_direct_overwrite(xy_dataset_session): +# tools = Tools(session=xy_dataset_session) +# tools.r_slope_aspect(elevation="elevation", slope="slope") +# tools.r_slope_aspect(elevation="elevation", slope="slope", overwrite=True) + + +def test_stdin(xy_dataset_session): + tools = Tools(session=xy_dataset_session) + tools.feed_input_to("13.45,29.96,200").v_in_ascii( + input="-", output="point", separator="," + ) diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py index 7bab6c2a4c5..964194e8ac4 100644 --- a/python/grass/experimental/tools.py +++ b/python/grass/experimental/tools.py @@ -70,6 +70,7 @@ def __init__( superquiet=False, freeze_region=False, stdin=None, + errors=None, ): # TODO: fix region, so that external g.region call in the middle # is not a problem @@ -96,6 +97,7 @@ def __init__( if verbose: self._env["GRASS_VERBOSE"] = "3" self._set_stdin(stdin) + self._errors = errors # These could be public, not protected. def _freeze_region(self): @@ -144,7 +146,7 @@ def run(self, name, /, **kwargs): # TODO: instead of printing, do exception right away # but right now, handle errors does not accept stderr # or don't use handle errors and raise instead - if returncode: + if returncode and self._errors != "ignore": raise gs.CalledModuleError( name, code=" ".join([f"{key}={value}" for key, value in kwargs.items()]), @@ -165,6 +167,9 @@ def run(self, name, /, **kwargs): def feed_input_to(self, stdin, /): return Tools(env=self._env, stdin=stdin) + def ignore_errors_of(self): + return Tools(env=self._env, errors="ignore") + def __getattr__(self, name): """Parse attribute to GRASS display module. Attribute should be in the form 'd_module_name'. For example, 'd.rast' is called with 'd_rast'. @@ -237,6 +242,13 @@ def _test(): ) print(tools_pro.v_info(map="point", flags="t").keyval["points"]) + print(tools_pro.ignore_errors_of().g_version(flags="rge").keyval) + + elevation = "elevation" + exaggerated = "exaggerated" + tools_pro.r_mapcalc(expression=f"{exaggerated} = 5 * {elevation}") + tools_pro.feed_input_to(f"{exaggerated} = 5 * {elevation}").r_mapcalc(file="-") + # try: tools_pro.feed_input_to("13.45,29.96,200").v_in_ascii( input="-", From f4e3fede7578c74d7710f1997995afa501d8ff7b Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Mon, 24 Apr 2023 15:58:35 -0400 Subject: [PATCH 06/49] Add test for exceptions --- python/grass/experimental/tests/grass_tools_test.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/python/grass/experimental/tests/grass_tools_test.py b/python/grass/experimental/tests/grass_tools_test.py index 7a39c187e40..3d471373176 100644 --- a/python/grass/experimental/tests/grass_tools_test.py +++ b/python/grass/experimental/tests/grass_tools_test.py @@ -1,3 +1,6 @@ +import pytest + +import grass.script as gs from grass.experimental.tools import Tools @@ -24,3 +27,13 @@ def test_stdin(xy_dataset_session): tools.feed_input_to("13.45,29.96,200").v_in_ascii( input="-", output="point", separator="," ) + + +def test_raises(xy_dataset_session): + tools = Tools(session=xy_dataset_session) + with pytest.raises(gs.CalledModuleError, match="xstandard"): + tools.feed_input_to("13.45,29.96,200").v_in_ascii( + input="-", + output="point", + format="xstandard", + ) From 04087e827c51baffef88e20bb4d4751d29d9d0be Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Thu, 4 May 2023 11:53:11 -0400 Subject: [PATCH 07/49] Add tests and Makefile --- python/grass/experimental/Makefile | 20 +++ python/grass/experimental/tests/conftest.py | 11 ++ .../experimental/tests/grass_tools_test.py | 115 ++++++++++++++++-- 3 files changed, 133 insertions(+), 13 deletions(-) create mode 100644 python/grass/experimental/Makefile create mode 100644 python/grass/experimental/tests/conftest.py diff --git a/python/grass/experimental/Makefile b/python/grass/experimental/Makefile new file mode 100644 index 00000000000..2ce55963c3c --- /dev/null +++ b/python/grass/experimental/Makefile @@ -0,0 +1,20 @@ +MODULE_TOPDIR = ../../.. + +include $(MODULE_TOPDIR)/include/Make/Other.make +include $(MODULE_TOPDIR)/include/Make/Python.make + +DSTDIR = $(ETC)/python/grass/experimental + +MODULES = \ + tools + +PYFILES := $(patsubst %,$(DSTDIR)/%.py,$(MODULES) __init__) +PYCFILES := $(patsubst %,$(DSTDIR)/%.pyc,$(MODULES) __init__) + +default: $(PYFILES) $(PYCFILES) + +$(DSTDIR): + $(MKDIR) $@ + +$(DSTDIR)/%: % | $(DSTDIR) + $(INSTALL_DATA) $< $@ diff --git a/python/grass/experimental/tests/conftest.py b/python/grass/experimental/tests/conftest.py new file mode 100644 index 00000000000..9f8ad95b436 --- /dev/null +++ b/python/grass/experimental/tests/conftest.py @@ -0,0 +1,11 @@ +import pytest + +import grass.script as gs + + +@pytest.fixture +def xy_dataset_session(tmp_path): + """Creates a session with a mapset which has vector with a float column""" + gs.core._create_location_xy(tmp_path, "test") # pylint: disable=protected-access + with gs.setup.init(tmp_path / "test") as session: + yield session diff --git a/python/grass/experimental/tests/grass_tools_test.py b/python/grass/experimental/tests/grass_tools_test.py index 3d471373176..4088a3ce0a8 100644 --- a/python/grass/experimental/tests/grass_tools_test.py +++ b/python/grass/experimental/tests/grass_tools_test.py @@ -1,25 +1,113 @@ +import os import pytest import grass.script as gs from grass.experimental.tools import Tools -def test_key_value_parser(xy_dataset_session): +def test_key_value_parser_number(xy_dataset_session): + """Check that numbers are parsed as numbers""" tools = Tools(session=xy_dataset_session) assert tools.g_region(flags="g").keyval["nsres"] == 1 -# def test_json_parser(xy_dataset_session): -# print( -# tools.v_db_univar(map="bridges", column="YEAR_BUILT", format="json").json[ -# "statistics" -# ]["mean"] -# ) +@pytest.mark.fails +def test_key_value_parser_multiple_values(xy_dataset_session): + """Check that strings and floats are parsed""" + tools = Tools(session=xy_dataset_session) + name = "surface" + tools.r_surf_gauss(output=name) # needs seed + result = tools.r_info(map=name, flags="g").keyval + assert result["datatype"] == "DCELL" + assert result["nsres"] == 1 + result = tools.r_univar(map=name, flags="g").keyval + assert result["mean"] == pytest.approx(-0.756762744552762) + + +def test_json_parser(xy_dataset_session): + """Check that JSON is parsed""" + tools = Tools(session=xy_dataset_session) + assert ( + tools.g_search_modules(keyword="random", flags="j").json[0]["name"] + == "r.random" + ) + + +def test_stdout_as_text(xy_dataset_session): + """Check that simple text is parsed and has no whitespace""" + tools = Tools(session=xy_dataset_session) + assert tools.g_mapset(flags="p").text == "PERMANENT" -# def test_direct_overwrite(xy_dataset_session): -# tools = Tools(session=xy_dataset_session) -# tools.r_slope_aspect(elevation="elevation", slope="slope") -# tools.r_slope_aspect(elevation="elevation", slope="slope", overwrite=True) + +def test_stdout_as_space_items(xy_dataset_session): + """Check that whitespace-separated items are parsed""" + tools = Tools(session=xy_dataset_session) + assert tools.g_mapset(flags="l").space_items == ["PERMANENT"] + + +def test_stdout_split_whitespace(xy_dataset_session): + tools = Tools(session=xy_dataset_session) + assert tools.g_mapset(flags="l").text_split() == ["PERMANENT"] + + +def test_stdout_split_space(xy_dataset_session): + tools = Tools(session=xy_dataset_session) + # Not a good example usage, but it tests the functionality. + assert tools.g_mapset(flags="l").text_split(" ") == ["PERMANENT", ""] + + +def test_direct_overwrite(xy_dataset_session): + """Check overwrite as a parameter""" + tools = Tools(session=xy_dataset_session) + tools.r_random_surface(output="surface", seed=42) + tools.r_random_surface(output="surface", seed=42, overwrite=True) + + +def test_object_overwrite(xy_dataset_session): + """Check overwrite as parameter of the tools object""" + tools = Tools(session=xy_dataset_session, overwrite=True) + tools.r_random_surface(output="surface", seed=42) + tools.r_random_surface(output="surface", seed=42) + + +def test_no_overwrite(xy_dataset_session): + """Check that it fails without overwrite""" + tools = Tools(session=xy_dataset_session) + tools.r_random_surface(output="surface", seed=42) + with pytest.raises(gs.CalledModuleError, match="overwrite"): + tools.r_random_surface(output="surface", seed=42) + + +def test_env_overwrite(xy_dataset_session): + """Check that overwrite from env parameter is used""" + # env = xy_dataset_session.env.copy() # ideally + env = os.environ.copy() # for now + env["GRASS_OVERWRITE"] = "1" + tools = Tools(session=xy_dataset_session, env=env) + tools.r_random_surface(output="surface", seed=42) + tools.r_random_surface(output="surface", seed=42) + + +def test_global_overwrite_vs_env(xy_dataset_session): + """Check that global overwrite is not used when separate env is used""" + # env = xy_dataset_session.env.copy() # ideally + env = os.environ.copy() # for now + os.environ["GRASS_OVERWRITE"] = "1" # change to xy_dataset_session.env + tools = Tools(session=xy_dataset_session, env=env) + tools.r_random_surface(output="surface", seed=42) + with pytest.raises(gs.CalledModuleError, match="overwrite"): + tools.r_random_surface(output="surface", seed=42) + del os.environ["GRASS_OVERWRITE"] # check or ideally remove this + + +def test_global_overwrite_vs_init(xy_dataset_session): + """Check that global overwrite is not used when separate env is used""" + tools = Tools(session=xy_dataset_session) + os.environ["GRASS_OVERWRITE"] = "1" # change to xy_dataset_session.env + tools.r_random_surface(output="surface", seed=42) + with pytest.raises(gs.CalledModuleError, match="overwrite"): + tools.r_random_surface(output="surface", seed=42) + del os.environ["GRASS_OVERWRITE"] # check or ideally remove this def test_stdin(xy_dataset_session): @@ -31,9 +119,10 @@ def test_stdin(xy_dataset_session): def test_raises(xy_dataset_session): tools = Tools(session=xy_dataset_session) - with pytest.raises(gs.CalledModuleError, match="xstandard"): + wrong_name = "wrong_standard" + with pytest.raises(gs.CalledModuleError, match=wrong_name): tools.feed_input_to("13.45,29.96,200").v_in_ascii( input="-", output="point", - format="xstandard", + format=wrong_name, ) From 6ab8e40d68e211d44666279c1a51aaa0cf792b6b Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Thu, 4 May 2023 11:54:21 -0400 Subject: [PATCH 08/49] Convert values to ints and floats in keyval --- python/grass/experimental/tools.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py index 964194e8ac4..9ffdea873fa 100644 --- a/python/grass/experimental/tools.py +++ b/python/grass/experimental/tools.py @@ -26,9 +26,18 @@ def json(self): @property def keyval(self): - # TODO: possibly use or add _text_to_key_value_dict - # which converts int and float automatically - return gs.parse_key_val(self._stdout) + def conversion(value): + try: + return int(value) + except ValueError: + pass + try: + return float(value) + except ValueError: + pass + return value + + return gs.parse_key_val(self._stdout, val_type=conversion) @property def comma_items(self): From 744cfacce9c11ad830226c9741f5cb3516539eac Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Thu, 4 May 2023 11:54:53 -0400 Subject: [PATCH 09/49] Do not overwrite by default to follow default behavior in GRASS GIS --- python/grass/experimental/tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py index 9ffdea873fa..54287bd91cb 100644 --- a/python/grass/experimental/tools.py +++ b/python/grass/experimental/tools.py @@ -73,7 +73,7 @@ def __init__( *, session=None, env=None, - overwrite=True, + overwrite=False, quiet=False, verbose=False, superquiet=False, From 24c27e62a61461f357a04034bf1378ef88505a1d Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Sun, 4 Jun 2023 00:55:34 +0200 Subject: [PATCH 10/49] Add doc, remove old code and todos --- python/grass/experimental/tools.py | 79 +++++++++++++++++------------- 1 file changed, 45 insertions(+), 34 deletions(-) diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py index 54287bd91cb..e7641792d2c 100644 --- a/python/grass/experimental/tools.py +++ b/python/grass/experimental/tools.py @@ -1,32 +1,59 @@ +#!/usr/bin/env python + +############################################################################## +# AUTHOR(S): Vaclav Petras +# +# PURPOSE: API to call GRASS tools (modules) as Python functions +# +# COPYRIGHT: (C) 2023 Vaclav Petras and the GRASS Development Team +# +# This program is free software under the GNU General Public +# License (>=v2). Read the file COPYING that comes with GRASS +# for details. +############################################################################## + +"""API to call GRASS tools (modules) as Python functions""" + +import json import os -import sys import shutil import grass.script as gs class ExecutedTool: + """Result returned after executing a tool""" + def __init__(self, name, kwargs, stdout, stderr): self._name = name + self._kwargs = kwargs self._stdout = stdout + self._stderr = stderr if self._stdout: self._decoded_stdout = gs.decode(self._stdout) else: self._decoded_stdout = "" @property - def text(self): + def text(self) -> str: + """Text output as decoded string""" return self._decoded_stdout.strip() @property def json(self): - import json + """Text output read as JSON + This returns the nested structure of dictionaries and lists or fails when + the output is not JSON. + """ return json.loads(self._stdout) @property def keyval(self): + """Text output read as key-value pairs separated by equal signs""" + def conversion(value): + """Convert text to int or float if possible, otherwise return it as is""" try: return int(value) except ValueError: @@ -41,33 +68,30 @@ def conversion(value): @property def comma_items(self): + """Text output read as comma-separated list""" return self.text_split(",") @property def space_items(self): + """Text output read as whitespace-separated list""" return self.text_split(None) def text_split(self, separator=None): + """Parse text output read as list separated by separators + + Any leading or trailing newlines are removed prior to parsing. + """ # The use of strip is assuming that the output is one line which # ends with a newline character which is for display only. return self._decoded_stdout.strip("\n").split(separator) -class SubExecutor: - """use as tools().params(a="x", b="y").g_region()""" - - # a and b would be overwrite or stdin - # Can support other envs or all PIPE and encoding read command supports - def __init__(self, *, tools, env, stdin=None): - self._tools = tools - self._env = env - self._stdin = stdin - - def run(self, name, /, **kwargs): - pass +class Tools: + """Call GRASS tools as methods + GRASS tools (modules) can be executed as methods of this class. + """ -class Tools: def __init__( self, *, @@ -81,9 +105,6 @@ def __init__( stdin=None, errors=None, ): - # TODO: fix region, so that external g.region call in the middle - # is not a problem - # i.e. region is independent/internal/fixed if env: self._env = env.copy() elif session and hasattr(session, "env"): @@ -122,6 +143,7 @@ def _set_stdin(self, stdin, /): @property def env(self): + """Internally used environment (reference to it, not a copy)""" return self._env def run(self, name, /, **kwargs): @@ -152,9 +174,6 @@ def run(self, name, /, **kwargs): stdout, stderr = process.communicate(input=stdin) stderr = gs.utils.decode(stderr) returncode = process.poll() - # TODO: instead of printing, do exception right away - # but right now, handle errors does not accept stderr - # or don't use handle errors and raise instead if returncode and self._errors != "ignore": raise gs.CalledModuleError( name, @@ -162,21 +181,14 @@ def run(self, name, /, **kwargs): returncode=returncode, errors=stderr, ) - - # Print only when we are capturing it and there was some output. - # (User can request ignoring the subprocess stderr and then - # we get only None.) - if stderr: - sys.stderr.write(stderr) - gs.handle_errors(returncode, stdout, [name], kwargs) return ExecutedTool(name=name, kwargs=kwargs, stdout=stdout, stderr=stderr) - # executor = SubExecutor(tools=self, env=self._env, stdin=self._stdin) - # return executor.run(name, **kwargs) def feed_input_to(self, stdin, /): + """Get a new object which will feed text input to a tool or tools""" return Tools(env=self._env, stdin=stdin) def ignore_errors_of(self): + """Get a new object which will ignore errors of the called tools""" return Tools(env=self._env, errors="ignore") def __getattr__(self, name): @@ -202,6 +214,7 @@ def wrapper(**kwargs): def _test(): + """Ad-hoc tests and examples of the Tools class""" session = gs.setup.init("~/grassdata/nc_spm_08_grass7/user1") tools = Tools() @@ -227,9 +240,7 @@ def _test(): env["GRASS_REGION"] = gs.region_env(res=250) coarse_computation = Tools(env=env) current_region = coarse_computation.g_region(flags="g").keyval - print( - current_region["ewres"], current_region["nsres"] - ) # TODO: should keyval convert? + print(current_region["ewres"], current_region["nsres"]) coarse_computation.r_slope_aspect( elevation="elevation", slope="slope", flags="a", overwrite=True ) From ff187a6e3fe052b01a751ebf759778fe088f8df9 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Sun, 4 Jun 2023 01:21:47 +0200 Subject: [PATCH 11/49] Add to top Makefile --- python/grass/Makefile | 1 + 1 file changed, 1 insertion(+) diff --git a/python/grass/Makefile b/python/grass/Makefile index 9e34f1281bf..cc54ca583b6 100644 --- a/python/grass/Makefile +++ b/python/grass/Makefile @@ -9,6 +9,7 @@ SUBDIRS = \ app \ benchmark \ exceptions \ + experimental \ grassdb \ gunittest \ imaging \ From 22773c89b696c3f1a4d10cb74bf9b022b197d8d3 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Sun, 4 Jun 2023 01:25:19 +0200 Subject: [PATCH 12/49] Add docs for tests --- python/grass/experimental/tests/grass_tools_test.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/python/grass/experimental/tests/grass_tools_test.py b/python/grass/experimental/tests/grass_tools_test.py index 4088a3ce0a8..6c2a2950067 100644 --- a/python/grass/experimental/tests/grass_tools_test.py +++ b/python/grass/experimental/tests/grass_tools_test.py @@ -1,3 +1,5 @@ +"""Test grass.experimental.Tools class""" + import os import pytest @@ -46,11 +48,13 @@ def test_stdout_as_space_items(xy_dataset_session): def test_stdout_split_whitespace(xy_dataset_session): + """Check that whitespace-based split function works""" tools = Tools(session=xy_dataset_session) assert tools.g_mapset(flags="l").text_split() == ["PERMANENT"] def test_stdout_split_space(xy_dataset_session): + """Check that the split function works with space""" tools = Tools(session=xy_dataset_session) # Not a good example usage, but it tests the functionality. assert tools.g_mapset(flags="l").text_split(" ") == ["PERMANENT", ""] @@ -111,6 +115,7 @@ def test_global_overwrite_vs_init(xy_dataset_session): def test_stdin(xy_dataset_session): + """Test that stdin is accepted""" tools = Tools(session=xy_dataset_session) tools.feed_input_to("13.45,29.96,200").v_in_ascii( input="-", output="point", separator="," @@ -118,6 +123,7 @@ def test_stdin(xy_dataset_session): def test_raises(xy_dataset_session): + """Test that exception is raised for wrong parameter value""" tools = Tools(session=xy_dataset_session) wrong_name = "wrong_standard" with pytest.raises(gs.CalledModuleError, match=wrong_name): From 29110652bba1519ab43e95e7506db3ea398e5f2a Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Sun, 4 Jun 2023 10:32:55 +0200 Subject: [PATCH 13/49] Allow test to fail because of the missing seed parameter (so results are different now) --- python/grass/experimental/tests/grass_tools_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/grass/experimental/tests/grass_tools_test.py b/python/grass/experimental/tests/grass_tools_test.py index 6c2a2950067..c311c5f5855 100644 --- a/python/grass/experimental/tests/grass_tools_test.py +++ b/python/grass/experimental/tests/grass_tools_test.py @@ -13,7 +13,7 @@ def test_key_value_parser_number(xy_dataset_session): assert tools.g_region(flags="g").keyval["nsres"] == 1 -@pytest.mark.fails +@pytest.mark.xfail def test_key_value_parser_multiple_values(xy_dataset_session): """Check that strings and floats are parsed""" tools = Tools(session=xy_dataset_session) From 437d46e24f4d2f8224401ce7bc44abb5e1a10e3e Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Wed, 23 Apr 2025 16:40:10 -0400 Subject: [PATCH 14/49] Allow for optional output capture (error handling and printing still needs to be improved there). Allow usage through attributes, run with run_command syntax, and subprocess-like execution. --- .../experimental/tests/grass_tools_test.py | 10 ++--- python/grass/experimental/tools.py | 38 +++++++++++++------ 2 files changed, 32 insertions(+), 16 deletions(-) diff --git a/python/grass/experimental/tests/grass_tools_test.py b/python/grass/experimental/tests/grass_tools_test.py index c311c5f5855..498b2073332 100644 --- a/python/grass/experimental/tests/grass_tools_test.py +++ b/python/grass/experimental/tests/grass_tools_test.py @@ -3,8 +3,8 @@ import os import pytest -import grass.script as gs from grass.experimental.tools import Tools +from grass.exceptions import CalledModuleError def test_key_value_parser_number(xy_dataset_session): @@ -78,7 +78,7 @@ def test_no_overwrite(xy_dataset_session): """Check that it fails without overwrite""" tools = Tools(session=xy_dataset_session) tools.r_random_surface(output="surface", seed=42) - with pytest.raises(gs.CalledModuleError, match="overwrite"): + with pytest.raises(CalledModuleError, match="overwrite"): tools.r_random_surface(output="surface", seed=42) @@ -99,7 +99,7 @@ def test_global_overwrite_vs_env(xy_dataset_session): os.environ["GRASS_OVERWRITE"] = "1" # change to xy_dataset_session.env tools = Tools(session=xy_dataset_session, env=env) tools.r_random_surface(output="surface", seed=42) - with pytest.raises(gs.CalledModuleError, match="overwrite"): + with pytest.raises(CalledModuleError, match="overwrite"): tools.r_random_surface(output="surface", seed=42) del os.environ["GRASS_OVERWRITE"] # check or ideally remove this @@ -109,7 +109,7 @@ def test_global_overwrite_vs_init(xy_dataset_session): tools = Tools(session=xy_dataset_session) os.environ["GRASS_OVERWRITE"] = "1" # change to xy_dataset_session.env tools.r_random_surface(output="surface", seed=42) - with pytest.raises(gs.CalledModuleError, match="overwrite"): + with pytest.raises(CalledModuleError, match="overwrite"): tools.r_random_surface(output="surface", seed=42) del os.environ["GRASS_OVERWRITE"] # check or ideally remove this @@ -126,7 +126,7 @@ def test_raises(xy_dataset_session): """Test that exception is raised for wrong parameter value""" tools = Tools(session=xy_dataset_session) wrong_name = "wrong_standard" - with pytest.raises(gs.CalledModuleError, match=wrong_name): + with pytest.raises(CalledModuleError, match=wrong_name): tools.feed_input_to("13.45,29.96,200").v_in_ascii( input="-", output="point", diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py index e7641792d2c..898a92f0695 100644 --- a/python/grass/experimental/tools.py +++ b/python/grass/experimental/tools.py @@ -19,6 +19,7 @@ import shutil import grass.script as gs +from grass.exceptions import CalledModuleError class ExecutedTool: @@ -104,6 +105,7 @@ def __init__( freeze_region=False, stdin=None, errors=None, + capture_output=True, ): if env: self._env = env.copy() @@ -128,6 +130,7 @@ def __init__( self._env["GRASS_VERBOSE"] = "3" self._set_stdin(stdin) self._errors = errors + self._capture_output = capture_output # These could be public, not protected. def _freeze_region(self): @@ -154,34 +157,47 @@ def run(self, name, /, **kwargs): :param str module: name of GRASS module :param `**kwargs`: named arguments passed to run_command()""" + args, popen_options = gs.popen_args_command(name, **kwargs) + return self._execute_tool(args, **popen_options) + + def _execute_tool(self, command, **popen_options): # alternatively use dev null as default or provide it as convenient settings - stdout_pipe = gs.PIPE - stderr_pipe = gs.PIPE + if self._capture_output: + stdout_pipe = gs.PIPE + stderr_pipe = gs.PIPE + else: + stdout_pipe = None + stderr_pipe = None if self._stdin: stdin_pipe = gs.PIPE stdin = gs.utils.encode(self._stdin) else: stdin_pipe = None stdin = None - process = gs.start_command( - name, - env=self._env, - **kwargs, + # Allowing to overwrite env, but that's just to have maximum flexibility when + # the session is actually set up, but it may be confusing. + if "env" not in popen_options: + popen_options["env"] = self._env + process = gs.Popen( + command, stdin=stdin_pipe, stdout=stdout_pipe, stderr=stderr_pipe, + **popen_options, ) stdout, stderr = process.communicate(input=stdin) - stderr = gs.utils.decode(stderr) + if stderr: + stderr = gs.utils.decode(stderr) returncode = process.poll() if returncode and self._errors != "ignore": - raise gs.CalledModuleError( - name, - code=" ".join([f"{key}={value}" for key, value in kwargs.items()]), + raise CalledModuleError( + command[0], + code=" ".join(command), returncode=returncode, errors=stderr, ) - return ExecutedTool(name=name, kwargs=kwargs, stdout=stdout, stderr=stderr) + # We don't have the keyword arguments to pass to the resulting object. + return ExecutedTool(name=command[0], kwargs=None, stdout=stdout, stderr=stderr) def feed_input_to(self, stdin, /): """Get a new object which will feed text input to a tool or tools""" From 61972d4ce5b79ffe748c7787ca8b839690652f18 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Fri, 25 Apr 2025 00:14:31 -0400 Subject: [PATCH 15/49] Access JSON as dict directly without an attribute using getitem. Suggest a tool when there is a close match for the name. --- .../experimental/tests/grass_tools_test.py | 81 +++++++++++++++++ python/grass/experimental/tools.py | 90 ++++++++++++++----- 2 files changed, 149 insertions(+), 22 deletions(-) diff --git a/python/grass/experimental/tests/grass_tools_test.py b/python/grass/experimental/tests/grass_tools_test.py index 498b2073332..2f3a8f202a0 100644 --- a/python/grass/experimental/tests/grass_tools_test.py +++ b/python/grass/experimental/tests/grass_tools_test.py @@ -1,8 +1,13 @@ """Test grass.experimental.Tools class""" import os +import json + import pytest + +import grass.script as gs +from grass.experimental.mapset import TemporaryMapsetSession from grass.experimental.tools import Tools from grass.exceptions import CalledModuleError @@ -34,6 +39,32 @@ def test_json_parser(xy_dataset_session): == "r.random" ) +def test_json_direct_access(xy_dataset_session): + """Check that JSON is parsed""" + tools = Tools(session=xy_dataset_session) + assert ( + tools.g_search_modules(keyword="random", flags="j")[0]["name"] + == "r.random" + ) + +def test_json_direct_access_bad_key_type(xy_dataset_session): + """Check that JSON is parsed""" + tools = Tools(session=xy_dataset_session) + with pytest.raises(TypeError): + tools.g_search_modules(keyword="random", flags="j")["name"] + +def test_json_direct_access_bad_key_value(xy_dataset_session): + """Check that JSON is parsed""" + tools = Tools(session=xy_dataset_session) + high_number = 100_000_000 + with pytest.raises(IndexError): + tools.g_search_modules(keyword="random", flags="j")[high_number] + +def test_json_direct_access_not_json(xy_dataset_session): + """Check that JSON is parsed""" + tools = Tools(session=xy_dataset_session) + with pytest.raises(json.JSONDecodeError): + tools.g_search_modules(keyword="random")[0]["name"] def test_stdout_as_text(xy_dataset_session): """Check that simple text is parsed and has no whitespace""" @@ -132,3 +163,53 @@ def test_raises(xy_dataset_session): output="point", format=wrong_name, ) + +def test_run_command(xy_dataset_session): + """Check run_command and its overwrite parameter""" + tools = Tools(session=xy_dataset_session) + tools.run_command("r.random.surface", output="surface", seed=42) + tools.run_command("r.random.surface", output="surface", seed=42, overwrite=True) + + +def test_parse_command_key_value(xy_dataset_session): + tools = Tools(session=xy_dataset_session) + assert tools.parse_command("g.region", flags="g")["nsres"] == "1" + + +def test_parse_command_json(xy_dataset_session): + tools = Tools(session=xy_dataset_session) + assert tools.parse_command("g.region", flags="g", format="json")["region"]["ns-res"] == 1 + + +def test_with_context_managers(tmpdir): + project = tmpdir / "project" + gs.create_project(project) + with gs.setup.init(project) as session: + tools = Tools(session=session) + tools.r_random_surface(output="surface", seed=42) + with TemporaryMapsetSession(env=tools.env) as mapset: + tools.r_random_surface(output="surface", seed=42, env=mapset.env) + with gs.MaskManager(env=mapset.env) as mask: + # TODO: Do actual test + tools.r_univar(map="surface", env=mask.env, format="json")[0]["mean"] + +def test_misspelling(xy_dataset_session): + tools = Tools(session=xy_dataset_session) + with pytest.raises(AttributeError, match="r.slope.aspect"): + tools.r_sloppy_respect() + +def test_multiple_suggestions(xy_dataset_session): + tools = Tools(session=xy_dataset_session) + with pytest.raises(AttributeError, match="v.db.univar|db.univar"): + tools.db_v_uni_var() + + +def test_tool_group_vs_model_name(xy_dataset_session): + tools = Tools(session=xy_dataset_session) + with pytest.raises(AttributeError, match="r.sim.water"): + tools.rSIMWEwater() + +def test_wrong_attribute(xy_dataset_session): + tools = Tools(session=xy_dataset_session) + with pytest.raises(AttributeError, match="execute_big_command"): + tools.execute_big_command() diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py index 898a92f0695..a31c8e1e2dd 100644 --- a/python/grass/experimental/tools.py +++ b/python/grass/experimental/tools.py @@ -5,7 +5,7 @@ # # PURPOSE: API to call GRASS tools (modules) as Python functions # -# COPYRIGHT: (C) 2023 Vaclav Petras and the GRASS Development Team +# COPYRIGHT: (C) 2023-2025 Vaclav Petras and the GRASS Development Team # # This program is free software under the GNU General Public # License (>=v2). Read the file COPYING that comes with GRASS @@ -86,6 +86,16 @@ def text_split(self, separator=None): # ends with a newline character which is for display only. return self._decoded_stdout.strip("\n").split(separator) + def __getitem__(self, name): + # TODO: cache parsed JSON + if self._stdout: + # We are testing just std out and letting rest to the parse and the user. + # This makes no assumption about how JSON is produced by the tool. + print(self.json, name) + return self.json[name] + msg = f"Output of the tool {self._name} is not JSON" + raise ValueError(msg) + class Tools: """Call GRASS tools as methods @@ -158,9 +168,17 @@ def run(self, name, /, **kwargs): :param str module: name of GRASS module :param `**kwargs`: named arguments passed to run_command()""" args, popen_options = gs.popen_args_command(name, **kwargs) - return self._execute_tool(args, **popen_options) + # We approximate tool_kwargs as original kwargs. + return self._execute_tool(args, tool_kwargs=kwargs, **popen_options) + + def run_command(self, name, /, **kwargs): + # Adjust error handling or provide custom implementation for full control? + return gs.run_command(name, **kwargs, env=self._env) - def _execute_tool(self, command, **popen_options): + def parse_command(self, name, /, **kwargs): + return gs.parse_command(name, **kwargs, env=self._env) + + def _execute_tool(self, command, tool_kwargs=None, **popen_options): # alternatively use dev null as default or provide it as convenient settings if self._capture_output: stdout_pipe = gs.PIPE @@ -196,8 +214,11 @@ def _execute_tool(self, command, **popen_options): returncode=returncode, errors=stderr, ) + # TODO: solve tool_kwargs is None # We don't have the keyword arguments to pass to the resulting object. - return ExecutedTool(name=command[0], kwargs=None, stdout=stdout, stderr=stderr) + return ExecutedTool( + name=command[0], kwargs=tool_kwargs, stdout=stdout, stderr=stderr + ) def feed_input_to(self, stdin, /): """Get a new object which will feed text input to a tool or tools""" @@ -207,6 +228,37 @@ def ignore_errors_of(self): """Get a new object which will ignore errors of the called tools""" return Tools(env=self._env, errors="ignore") + def levenshtein_distance(self, text1: str, text2: str) -> int: + if len(text1) < len(text2): + return self.levenshtein_distance(text2, text1) + + if len(text2) == 0: + return len(text1) + + previous_row = list(range(len(text2) + 1)) + for i, char1 in enumerate(text1): + current_row = [i + 1] + for j, char2 in enumerate(text2): + insertions = previous_row[j + 1] + 1 + deletions = current_row[j] + 1 + substitutions = previous_row[j] + (char1 != char2) + current_row.append(min(insertions, deletions, substitutions)) + previous_row = current_row + + return previous_row[-1] + + def suggest_tools(self, tool): + # TODO: cache commands also for dir + all_names = list(gs.get_commands()[0]) + result = [] + max_suggestions = 10 + for name in all_names: + if self.levenshtein_distance(tool, name) < len(tool) / 2: + result.append(name) + if len(result) >= max_suggestions: + break + return result + def __getattr__(self, name): """Parse attribute to GRASS display module. Attribute should be in the form 'd_module_name'. For example, 'd.rast' is called with 'd_rast'. @@ -215,12 +267,19 @@ def __getattr__(self, name): grass_module = name.replace("_", ".") # Assert module exists if not shutil.which(grass_module): - raise AttributeError( - _( - "Cannot find GRASS tool {}. " - "Is the session set up and the tool on path?" - ).format(grass_module) + suggesions = self.suggest_tools(grass_module) + if suggesions: + msg = ( + f"Tool {grass_module} not found. " + f"Did you mean: {', '.join(suggesions)}?" + ) + raise AttributeError(msg) + msg = ( + f"Tool or attribute {name} not found. " + "If you are executing a tool, is the session set up and the tool on path? " + "If you are looking for an attribute, is it in the documentation?" ) + raise AttributeError(msg) def wrapper(**kwargs): # Run module @@ -269,9 +328,6 @@ def _test(): tools_pro = Tools( session=session, freeze_region=True, overwrite=True, superquiet=True ) - # gs.feed_command("v.in.ascii", - # input="-", output="point", separator=",", - # stdin="13.45,29.96,200", overwrite=True) tools_pro.r_slope_aspect(elevation="elevation", slope="slope") tools_pro.feed_input_to("13.45,29.96,200").v_in_ascii( input="-", output="point", separator="," @@ -285,16 +341,6 @@ def _test(): tools_pro.r_mapcalc(expression=f"{exaggerated} = 5 * {elevation}") tools_pro.feed_input_to(f"{exaggerated} = 5 * {elevation}").r_mapcalc(file="-") - # try: - tools_pro.feed_input_to("13.45,29.96,200").v_in_ascii( - input="-", - output="point", - format="xstandard", - ) - # except gs.CalledModuleError as error: - # print("Exception text:") - # print(error) - if __name__ == "__main__": _test() From c86d8ffadfb9935543820ade251c13231a3cbe77 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Fri, 25 Apr 2025 15:10:06 -0400 Subject: [PATCH 16/49] Fix whitespace and regexp --- .../experimental/tests/grass_tools_test.py | 29 ++++++++++++------- 1 file changed, 19 insertions(+), 10 deletions(-) diff --git a/python/grass/experimental/tests/grass_tools_test.py b/python/grass/experimental/tests/grass_tools_test.py index 2f3a8f202a0..182ab344196 100644 --- a/python/grass/experimental/tests/grass_tools_test.py +++ b/python/grass/experimental/tests/grass_tools_test.py @@ -39,13 +39,12 @@ def test_json_parser(xy_dataset_session): == "r.random" ) + def test_json_direct_access(xy_dataset_session): """Check that JSON is parsed""" tools = Tools(session=xy_dataset_session) - assert ( - tools.g_search_modules(keyword="random", flags="j")[0]["name"] - == "r.random" - ) + assert tools.g_search_modules(keyword="random", flags="j")[0]["name"] == "r.random" + def test_json_direct_access_bad_key_type(xy_dataset_session): """Check that JSON is parsed""" @@ -53,6 +52,7 @@ def test_json_direct_access_bad_key_type(xy_dataset_session): with pytest.raises(TypeError): tools.g_search_modules(keyword="random", flags="j")["name"] + def test_json_direct_access_bad_key_value(xy_dataset_session): """Check that JSON is parsed""" tools = Tools(session=xy_dataset_session) @@ -60,12 +60,14 @@ def test_json_direct_access_bad_key_value(xy_dataset_session): with pytest.raises(IndexError): tools.g_search_modules(keyword="random", flags="j")[high_number] + def test_json_direct_access_not_json(xy_dataset_session): """Check that JSON is parsed""" tools = Tools(session=xy_dataset_session) with pytest.raises(json.JSONDecodeError): tools.g_search_modules(keyword="random")[0]["name"] + def test_stdout_as_text(xy_dataset_session): """Check that simple text is parsed and has no whitespace""" tools = Tools(session=xy_dataset_session) @@ -164,6 +166,7 @@ def test_raises(xy_dataset_session): format=wrong_name, ) + def test_run_command(xy_dataset_session): """Check run_command and its overwrite parameter""" tools = Tools(session=xy_dataset_session) @@ -178,7 +181,10 @@ def test_parse_command_key_value(xy_dataset_session): def test_parse_command_json(xy_dataset_session): tools = Tools(session=xy_dataset_session) - assert tools.parse_command("g.region", flags="g", format="json")["region"]["ns-res"] == 1 + assert ( + tools.parse_command("g.region", flags="g", format="json")["region"]["ns-res"] + == 1 + ) def test_with_context_managers(tmpdir): @@ -193,22 +199,25 @@ def test_with_context_managers(tmpdir): # TODO: Do actual test tools.r_univar(map="surface", env=mask.env, format="json")[0]["mean"] + def test_misspelling(xy_dataset_session): tools = Tools(session=xy_dataset_session) - with pytest.raises(AttributeError, match="r.slope.aspect"): + with pytest.raises(AttributeError, match=r"r\.slope\.aspect"): tools.r_sloppy_respect() - + + def test_multiple_suggestions(xy_dataset_session): tools = Tools(session=xy_dataset_session) - with pytest.raises(AttributeError, match="v.db.univar|db.univar"): - tools.db_v_uni_var() + with pytest.raises(AttributeError, match=r"v\.db\.univar|db\.univar"): + tools.db_v_uni_var() def test_tool_group_vs_model_name(xy_dataset_session): tools = Tools(session=xy_dataset_session) - with pytest.raises(AttributeError, match="r.sim.water"): + with pytest.raises(AttributeError, match=r"r\.sim\.water"): tools.rSIMWEwater() + def test_wrong_attribute(xy_dataset_session): tools = Tools(session=xy_dataset_session) with pytest.raises(AttributeError, match="execute_big_command"): From 3b995c984dc32bd5edc6a4113c76ecb2a4746789 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Fri, 25 Apr 2025 15:55:35 -0400 Subject: [PATCH 17/49] Represent not captured stdout as None, not empty string. --- .../experimental/tests/grass_tools_test.py | 7 ++++++ python/grass/experimental/tools.py | 23 +++++++++++-------- 2 files changed, 20 insertions(+), 10 deletions(-) diff --git a/python/grass/experimental/tests/grass_tools_test.py b/python/grass/experimental/tests/grass_tools_test.py index 182ab344196..d02b5337fa3 100644 --- a/python/grass/experimental/tests/grass_tools_test.py +++ b/python/grass/experimental/tests/grass_tools_test.py @@ -93,6 +93,13 @@ def test_stdout_split_space(xy_dataset_session): assert tools.g_mapset(flags="l").text_split(" ") == ["PERMANENT", ""] +def test_stdout_without_capturing(xy_dataset_session): + """Check that text is not present when not capturing it""" + tools = Tools(session=xy_dataset_session, capture_output=False) + assert not tools.g_mapset(flags="p").text + assert tools.g_mapset(flags="p").text is None + + def test_direct_overwrite(xy_dataset_session): """Check overwrite as a parameter""" tools = Tools(session=xy_dataset_session) diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py index a31c8e1e2dd..6c4d8fb4a0c 100644 --- a/python/grass/experimental/tools.py +++ b/python/grass/experimental/tools.py @@ -30,14 +30,17 @@ def __init__(self, name, kwargs, stdout, stderr): self._kwargs = kwargs self._stdout = stdout self._stderr = stderr - if self._stdout: + if self._stdout is not None: self._decoded_stdout = gs.decode(self._stdout) else: - self._decoded_stdout = "" + self._decoded_stdout = None + self._cached_json = None @property def text(self) -> str: """Text output as decoded string""" + if self._decoded_stdout is None: + return None return self._decoded_stdout.strip() @property @@ -47,7 +50,9 @@ def json(self): This returns the nested structure of dictionaries and lists or fails when the output is not JSON. """ - return json.loads(self._stdout) + if self._cached_json is None: + self._cached_json = json.loads(self._stdout) + return self._cached_json @property def keyval(self): @@ -91,7 +96,6 @@ def __getitem__(self, name): if self._stdout: # We are testing just std out and letting rest to the parse and the user. # This makes no assumption about how JSON is produced by the tool. - print(self.json, name) return self.json[name] msg = f"Output of the tool {self._name} is not JSON" raise ValueError(msg) @@ -151,7 +155,6 @@ def _overwrite(self): self._env["GRASS_OVERWRITE"] = "1" def _set_stdin(self, stdin, /): - print("_set_stdin", stdin) self._stdin = stdin @property @@ -264,13 +267,13 @@ def __getattr__(self, name): the form 'd_module_name'. For example, 'd.rast' is called with 'd_rast'. """ # Reformat string - grass_module = name.replace("_", ".") + tool_name = name.replace("_", ".") # Assert module exists - if not shutil.which(grass_module): - suggesions = self.suggest_tools(grass_module) + if not shutil.which(tool_name): + suggesions = self.suggest_tools(tool_name) if suggesions: msg = ( - f"Tool {grass_module} not found. " + f"Tool {tool_name} not found. " f"Did you mean: {', '.join(suggesions)}?" ) raise AttributeError(msg) @@ -283,7 +286,7 @@ def __getattr__(self, name): def wrapper(**kwargs): # Run module - return self.run(grass_module, **kwargs) + return self.run(tool_name, **kwargs) return wrapper From 4cc5a325c49abdcd7679982a0dd1f16bbb100901 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Tue, 29 Apr 2025 13:32:01 -0400 Subject: [PATCH 18/49] Add run subcommand to have a CLI use case for the tools. It runs one tool in XY project, so useful only for things like g.extension or m.proj, but, with a significant workaround for argparse --help, it can do --help for a tool. --- python/grass/app/cli.py | 98 +++++++++++++++++++++++++++++- python/grass/experimental/tools.py | 3 +- 2 files changed, 99 insertions(+), 2 deletions(-) diff --git a/python/grass/app/cli.py b/python/grass/app/cli.py index c2bc8f40d0b..5f55f414caf 100644 --- a/python/grass/app/cli.py +++ b/python/grass/app/cli.py @@ -20,9 +20,48 @@ import tempfile import os import sys +import subprocess from pathlib import Path + import grass.script as gs +from grass.app.data import lock_mapset, unlock_mapset, MapsetLockingException +from grass.experimental.tools import Tools + + +def subcommand_run_tool(args, tool_args: list, help: bool): + command = [args.tool_name, *tool_args] + with tempfile.TemporaryDirectory() as tmp_dir_name: + project_name = "project" + project_path = Path(tmp_dir_name) / project_name + gs.create_project(project_path) + with gs.setup.init(project_path) as session: + if help: + result = subprocess.run(command, env=session.env) + return result.returncode + tools = Tools(capture_output=False) + try: + tools.run_from_list(command) + except subprocess.CalledProcessError as error: + return error.returncode + + +def subcommand_lock_mapset(args): + gs.setup.setup_runtime_env() + try: + lock_mapset( + args.mapset_path, + force_lock_removal=args.force_remove_lock, + timeout=args.timeout, + message_callback=print, + process_id=args.process_id, + ) + except MapsetLockingException as e: + print(str(e), file=sys.stderr) + + +def subcommand_unlock_mapset(args): + unlock_mapset(args.mapset_path) def call_g_manual(**kwargs): @@ -60,6 +99,41 @@ def main(args=None, program=None): # Subcommand parsers + subparser = subparsers.add_parser("run", help="run a tool") + subparser.add_argument("tool_name", type=str) + subparser.set_defaults(func=subcommand_run_tool) + + subparser = subparsers.add_parser("lock", help="lock a mapset") + subparser.add_argument("mapset_path", type=str) + subparser.add_argument( + "--process-id", + metavar="PID", + type=int, + default=1, + help=_( + "process ID of the process locking the mapset (a mapset can be " + "automatically unlocked if there is no process with this PID)" + ), + ) + subparser.add_argument( + "--timeout", + metavar="TIMEOUT", + type=float, + default=30, + help=_("mapset locking timeout in seconds"), + ) + subparser.add_argument( + "-f", + "--force-remove-lock", + action="store_true", + help=_("remove lock if present"), + ) + subparser.set_defaults(func=subcommand_lock_mapset) + + subparser = subparsers.add_parser("unlock", help="unlock a mapset") + subparser.add_argument("mapset_path", type=str) + subparser.set_defaults(func=subcommand_unlock_mapset) + subparser = subparsers.add_parser( "help", help="show HTML documentation for a tool or topic" ) @@ -72,5 +146,27 @@ def main(args=None, program=None): subparser.add_argument("page", type=str) subparser.set_defaults(func=subcommand_show_man) - parsed_args = parser.parse_args(args) + # Parsing + + if not args: + args = sys.argv[1:] + raw_args = args.copy() + add_back = None + if len(raw_args) > 2 and raw_args[0] == "run": + # Getting the --help of tools needs to work around the standard help mechanism + # of argparse. + # Maybe a better workaround is to use custom --help, action="help", print_help, + # and dedicated tool help function complimentary with g.manual subcommand + # interface. + if "--help" in raw_args[2:]: + raw_args.remove("--help") + add_back = "--help" + elif "--h" in raw_args[2:]: + raw_args.remove("--h") + add_back = "--h" + parsed_args, other_args = parser.parse_known_args(raw_args) + if parsed_args.subcommand == "run": + if add_back: + other_args.append(add_back) + return parsed_args.func(parsed_args, other_args, help=bool(add_back)) return parsed_args.func(parsed_args) diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py index 6c4d8fb4a0c..a88a3fd93f2 100644 --- a/python/grass/experimental/tools.py +++ b/python/grass/experimental/tools.py @@ -181,7 +181,8 @@ def run_command(self, name, /, **kwargs): def parse_command(self, name, /, **kwargs): return gs.parse_command(name, **kwargs, env=self._env) - def _execute_tool(self, command, tool_kwargs=None, **popen_options): + # Make this an overload of run. + def run_from_list(self, command, tool_kwargs=None, **popen_options): # alternatively use dev null as default or provide it as convenient settings if self._capture_output: stdout_pipe = gs.PIPE From 459b2ad1c4c388313b0a51fccdfad496143e764f Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Wed, 30 Apr 2025 18:20:28 -0400 Subject: [PATCH 19/49] Update function name --- python/grass/experimental/tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py index a88a3fd93f2..56ec27725a7 100644 --- a/python/grass/experimental/tools.py +++ b/python/grass/experimental/tools.py @@ -172,7 +172,7 @@ def run(self, name, /, **kwargs): :param `**kwargs`: named arguments passed to run_command()""" args, popen_options = gs.popen_args_command(name, **kwargs) # We approximate tool_kwargs as original kwargs. - return self._execute_tool(args, tool_kwargs=kwargs, **popen_options) + return self.run_from_list(args, tool_kwargs=kwargs, **popen_options) def run_command(self, name, /, **kwargs): # Adjust error handling or provide custom implementation for full control? From 513c9f8c92baff716db8c19c581bd5d80f624e2e Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Mon, 2 Jun 2025 08:49:40 -0400 Subject: [PATCH 20/49] Add prototype code for numpy support --- .../experimental/tests/grass_tools_test.py | 35 ++++++++++ python/grass/experimental/tools.py | 64 ++++++++++++++++++- 2 files changed, 98 insertions(+), 1 deletion(-) diff --git a/python/grass/experimental/tests/grass_tools_test.py b/python/grass/experimental/tests/grass_tools_test.py index d02b5337fa3..c92b8dd65e4 100644 --- a/python/grass/experimental/tests/grass_tools_test.py +++ b/python/grass/experimental/tests/grass_tools_test.py @@ -229,3 +229,38 @@ def test_wrong_attribute(xy_dataset_session): tools = Tools(session=xy_dataset_session) with pytest.raises(AttributeError, match="execute_big_command"): tools.execute_big_command() + +import numpy as np + +def test_numpy_one_input(xy_dataset_session): + """Check that global overwrite is not used when separate env is used""" + tools = Tools(session=xy_dataset_session) + tools.r_slope_aspect(elevation=np.ones((1, 1)), slope="slope") + assert tools.r_info(map="slope", format="json")["datatype"] == "FCELL" + +# Other possible ways how to handle the syntax: + +# class ToNumpy: +# pass + +# class AsInput: +# pass + +# def test_numpy_one_input(xy_dataset_session): +# """Check that global overwrite is not used when separate env is used""" +# tools = Tools(session=xy_dataset_session) +# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope="slope", aspect="aspect", force_numpy_for_output=True) +# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope=np.nulls(0,0), aspect="aspect") +# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope=ToNumpy(), aspect="aspect") +# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope=np.ndarray, aspect="aspect") +# tools.r_slope_aspect.ufunc(np.ones((1, 1)), slope=True, aspect=True, overwrite=True) # (np.array, np.array) +# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope=AsInput, aspect=AsInput) # {"slope": np.array(...), "aspect": np.array(...) } +# assert tools.r_info(map="slope", format="json")["datatype"] == "FCELL" + +def test_numpy_one_input_one_output(xy_dataset_session): + """Check that global overwrite is not used when separate env is used""" + tools = Tools(session=xy_dataset_session) + tools.g_region(rows=2, cols=3) + slope = tools.r_slope_aspect(elevation=np.ones((2, 3)), slope=np.ndarray) + assert slope.shape == (2, 3) + assert slope[0] == 0 diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py index 56ec27725a7..ec315a734d5 100644 --- a/python/grass/experimental/tools.py +++ b/python/grass/experimental/tools.py @@ -162,6 +162,29 @@ def env(self): """Internally used environment (reference to it, not a copy)""" return self._env + def _digest_data_parameters(self, parameters, command): + # Uses parameters, but modifies the command. + input_rasters = [] + if "inputs" in parameters: + for item in parameters["inputs"]: + if item["value"].endswith(".grass_raster"): + input_rasters.append(Path(item["value"])) + for i, arg in enumerate(command): + if arg.startswith(f"{item['param']}="): + arg = arg.replace(item["value"], Path(item["value"]).stem) + command[i] = arg + output_rasters = [] + if "outputs" in parameters: + for item in parameters["outputs"]: + if item["value"].endswith(".grass_raster"): + output_rasters.append(Path(item["value"])) + for i, arg in enumerate(command): + if arg.startswith(f"{item['param']}="): + arg = arg.replace(item["value"], Path(item["value"]).stem) + command[i] = arg + return input_rasters, output_rasters + + def run(self, name, /, **kwargs): """Run modules from the GRASS display family (modules starting with "d."). @@ -170,9 +193,48 @@ def run(self, name, /, **kwargs): :param str module: name of GRASS module :param `**kwargs`: named arguments passed to run_command()""" + original = {} + original_outputs = {} + import grass.script.array as garray + import numpy as np + for key, value in kwargs.items(): + if isinstance(value, np.ndarray): + kwargs[key] = "tmp_serialized_array" + original[key] = value + elif value == np.ndarray: + kwargs[key] = "tmp_future_serialized_array" + original_outputs[key] = value + args, popen_options = gs.popen_args_command(name, **kwargs) + + env = popen_options.get("env", self._env) + + import subprocess + parameters = json.loads( + subprocess.check_output( + [*args, "--json"], text=True, env=env + ) + ) + if "inputs" in parameters: + for param in parameters["inputs"]: + if param["param"] not in original: + continue + map2d = garray.array() + print(param) + map2d[:] = original[param["param"]] + map2d.write("tmp_serialized_array", overwrite=True) + # We approximate tool_kwargs as original kwargs. - return self.run_from_list(args, tool_kwargs=kwargs, **popen_options) + result = self.run_from_list(args, tool_kwargs=kwargs, **popen_options) + + if "outputs" in parameters: + for param in parameters["outputs"]: + if param["param"] not in original_outputs: + continue + output_array = garray.array("tmp_future_serialized_array") + result = output_array + + return result def run_command(self, name, /, **kwargs): # Adjust error handling or provide custom implementation for full control? From 4a1e3745bc3dcccd5c19be5b6bdaaef97c59aff3 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Wed, 11 Jun 2025 08:35:38 -0400 Subject: [PATCH 21/49] Make the special features standalone objects used by composition --- python/grass/experimental/tests/conftest.py | 38 ++ .../experimental/tests/grass_tools_test.py | 230 +++++++++- python/grass/experimental/tools.py | 425 +++++++++++++----- python/grass/script/core.py | 9 +- 4 files changed, 562 insertions(+), 140 deletions(-) diff --git a/python/grass/experimental/tests/conftest.py b/python/grass/experimental/tests/conftest.py index b33ae757f8e..910036b3717 100644 --- a/python/grass/experimental/tests/conftest.py +++ b/python/grass/experimental/tests/conftest.py @@ -77,3 +77,41 @@ def xy_mapset_non_permament(xy_session): # pylint: disable=redefined-outer-name "test1", create=True, env=xy_session.env ) as session: yield session + + +@pytest.fixture +def rows_raster_file3x3(tmp_path): + project = tmp_path / "xy_test3x3" + gs.create_project(project) + with gs.setup.init(project, env=os.environ.copy()) as session: + gs.run_command("g.region", rows=3, cols=3, env=session.env) + gs.mapcalc("rows = row()", env=session.env) + output_file = tmp_path / "rows3x3.grass_raster" + gs.run_command( + "r.pack", + input="rows", + output=output_file, + flags="c", + superquiet=True, + env=session.env, + ) + return output_file + + +@pytest.fixture +def rows_raster_file4x5(tmp_path): + project = tmp_path / "xy_test4x5" + gs.create_project(project) + with gs.setup.init(project, env=os.environ.copy()) as session: + gs.run_command("g.region", rows=4, cols=5, env=session.env) + gs.mapcalc("rows = row()", env=session.env) + output_file = tmp_path / "rows4x5.grass_raster" + gs.run_command( + "r.pack", + input="rows", + output=output_file, + flags="c", + superquiet=True, + env=session.env, + ) + return output_file diff --git a/python/grass/experimental/tests/grass_tools_test.py b/python/grass/experimental/tests/grass_tools_test.py index c92b8dd65e4..1cba3e9e1c9 100644 --- a/python/grass/experimental/tests/grass_tools_test.py +++ b/python/grass/experimental/tests/grass_tools_test.py @@ -1,11 +1,11 @@ """Test grass.experimental.Tools class""" import os -import json +import io +import numpy as np import pytest - import grass.script as gs from grass.experimental.mapset import TemporaryMapsetSession from grass.experimental.tools import Tools @@ -40,6 +40,24 @@ def test_json_parser(xy_dataset_session): ) +def test_json_with_name_and_parameter_call(xy_dataset_session): + """Check that JSON is parsed with a name-and-parameters style call""" + tools = Tools(session=xy_dataset_session) + assert ( + tools.run("g.search.modules", keyword="random", flags="j")[0]["name"] + == "r.random" + ) + + +def test_json_with_subprocess_run_like_call(xy_dataset_session): + """Check that JSON is parsed with a name-and-parameters style call""" + tools = Tools(session=xy_dataset_session) + assert ( + tools.run_from_list(["g.search.modules", "keyword=random", "-j"])[0]["name"] + == "r.random" + ) + + def test_json_direct_access(xy_dataset_session): """Check that JSON is parsed""" tools = Tools(session=xy_dataset_session) @@ -62,9 +80,12 @@ def test_json_direct_access_bad_key_value(xy_dataset_session): def test_json_direct_access_not_json(xy_dataset_session): - """Check that JSON is parsed""" + """Check that JSON parsing creates an ValueError + + Specifically, this tests the case when format="json" is not set. + """ tools = Tools(session=xy_dataset_session) - with pytest.raises(json.JSONDecodeError): + with pytest.raises(ValueError, match=r"format.*json"): tools.g_search_modules(keyword="random")[0]["name"] @@ -204,7 +225,7 @@ def test_with_context_managers(tmpdir): tools.r_random_surface(output="surface", seed=42, env=mapset.env) with gs.MaskManager(env=mapset.env) as mask: # TODO: Do actual test - tools.r_univar(map="surface", env=mask.env, format="json")[0]["mean"] + tools.r_univar(map="surface", env=mask.env, format="json")["mean"] def test_misspelling(xy_dataset_session): @@ -230,7 +251,6 @@ def test_wrong_attribute(xy_dataset_session): with pytest.raises(AttributeError, match="execute_big_command"): tools.execute_big_command() -import numpy as np def test_numpy_one_input(xy_dataset_session): """Check that global overwrite is not used when separate env is used""" @@ -238,29 +258,191 @@ def test_numpy_one_input(xy_dataset_session): tools.r_slope_aspect(elevation=np.ones((1, 1)), slope="slope") assert tools.r_info(map="slope", format="json")["datatype"] == "FCELL" -# Other possible ways how to handle the syntax: - -# class ToNumpy: -# pass -# class AsInput: -# pass +# NumPy syntax for outputs +# While inputs are straightforward, there is several possible ways how to handle +# syntax for outputs. +# Output is the type of function for creating NumPy arrays, return value is now the arrays: +# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope=np.ndarray, aspect=np.array) +# Output is explicitly requested: +# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope="slope", aspect="aspect", force_numpy_for_output=True) +# Output is explicitly requested at the object level: +# Tools(force_numpy_for_output=True).r_slope_aspect(elevation=np.ones((1, 1)), slope="slope", aspect="aspect") +# Output is always array or arrays when at least on input is an array: +# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope="slope", aspect="aspect") +# An empty array is passed to signal the desired output: +# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope=np.nulls((0, 0))) +# An array to be filled with data is passed, the return value is kept as is: +# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope=np.nulls((1, 1))) +# NumPy universal function concept can be used explicitly to indicate, +# possibly more easily allowing for nameless args as opposed to keyword arguments, +# but outputs still need to be explicitly requested: +# Returns by value (tuple: (np.array, np.array)): +# tools.r_slope_aspect.ufunc(np.ones((1, 1)), slope=True, aspect=True) +# Modifies its arguments in-place: +# tools.r_slope_aspect.ufunc(np.ones((1, 1)), slope=True, aspect=True, out=(np.array((1, 1)), np.array((1, 1)))) +# Custom signaling classes or objects are passed (assuming empty classes AsNumpy and AsInput): +# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope=ToNumpy(), aspect=ToNumpy()) +# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope=AsInput, aspect=AsInput) +# NumPy functions usually return a tuple, for multiple outputs. Universal function does +# unless the output is written to out parameter which is also provided as a tuple. We +# have names, so generally, we can return a dictionary: +# {"slope": np.array(...), "aspect": np.array(...) }. -# def test_numpy_one_input(xy_dataset_session): -# """Check that global overwrite is not used when separate env is used""" -# tools = Tools(session=xy_dataset_session) -# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope="slope", aspect="aspect", force_numpy_for_output=True) -# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope=np.nulls(0,0), aspect="aspect") -# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope=ToNumpy(), aspect="aspect") -# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope=np.ndarray, aspect="aspect") -# tools.r_slope_aspect.ufunc(np.ones((1, 1)), slope=True, aspect=True, overwrite=True) # (np.array, np.array) -# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope=AsInput, aspect=AsInput) # {"slope": np.array(...), "aspect": np.array(...) } -# assert tools.r_info(map="slope", format="json")["datatype"] == "FCELL" def test_numpy_one_input_one_output(xy_dataset_session): - """Check that global overwrite is not used when separate env is used""" + """Check that a NumPy array works as input and for signaling output + + It tests that the np.ndarray class is supported to signal output. + Return type is not strictly defined, so we are not testing for it explicitly + (only by actually using it as an NumPy array). + """ tools = Tools(session=xy_dataset_session) tools.g_region(rows=2, cols=3) slope = tools.r_slope_aspect(elevation=np.ones((2, 3)), slope=np.ndarray) assert slope.shape == (2, 3) - assert slope[0] == 0 + assert np.all(slope == np.full((2, 3), 0)) + + +def test_numpy_with_name_and_parameter(xy_dataset_session): + """Check that a NumPy array works as input and for signaling output + + It tests that the np.ndarray class is supported to signal output. + Return type is not strictly defined, so we are not testing for it explicitly + (only by actually using it as an NumPy array). + """ + tools = Tools(session=xy_dataset_session) + tools.g_region(rows=2, cols=3) + slope = tools.run("r.slope.aspect", elevation=np.ones((2, 3)), slope=np.ndarray) + assert slope.shape == (2, 3) + assert np.all(slope == np.full((2, 3), 0)) + + +def test_numpy_one_input_multiple_outputs(xy_dataset_session): + """Check that a NumPy array function works for signaling multiple outputs + + Besides multiple outputs it tests that np.array is supported to signal output. + """ + tools = Tools(session=xy_dataset_session) + tools.g_region(rows=2, cols=3) + (slope, aspect) = tools.r_slope_aspect( + elevation=np.ones((2, 3)), slope=np.array, aspect=np.array + ) + assert slope.shape == (2, 3) + assert np.all(slope == np.full((2, 3), 0)) + assert aspect.shape == (2, 3) + assert np.all(aspect == np.full((2, 3), 0)) + + +def test_numpy_multiple_inputs_one_output(xy_dataset_session): + """Check that a NumPy array works for multiple inputs""" + tools = Tools(session=xy_dataset_session) + tools.g_region(rows=2, cols=3) + result = tools.r_mapcalc_simple( + expression="A + B", a=np.full((2, 3), 2), b=np.full((2, 3), 5), output=np.array + ) + assert result.shape == (2, 3) + assert np.all(result == np.full((2, 3), 7)) + + +def test_numpy_grass_array_input_output(xy_dataset_session): + """Check that global overwrite is not used when separate env is used + + When grass array output is requested, we explicitly test the return value type. + """ + tools = Tools(session=xy_dataset_session) + rows = 2 + cols = 3 + tools.g_region(rows=rows, cols=cols) + tools.r_mapcalc_simple(expression="5", output="const_5") + const_5 = gs.array.array("const_5") + result = tools.r_mapcalc_simple( + expression="2 * A", a=const_5, output=gs.array.array + ) + assert result.shape == (rows, cols) + assert np.all(result == np.full((rows, cols), 10)) + assert isinstance(result, gs.array.array) + + +def test_pack_input_output(xy_dataset_session, rows_raster_file3x3): + """Check that global overwrite is not used when separate env is used""" + tools = Tools(session=xy_dataset_session) + tools.g_region(rows=3, cols=3) + assert os.path.exists(rows_raster_file3x3) + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope="file.grass_raster") + assert os.path.exists("file.grass_raster") + + +def test_pack_input_output_with_name_and_parameter_call( + xy_dataset_session, rows_raster_file3x3 +): + """Check that global overwrite is not used when separate env is used""" + tools = Tools(session=xy_dataset_session) + tools.g_region(rows=3, cols=3) + assert os.path.exists(rows_raster_file3x3) + tools.run( + "r.slope.aspect", elevation=rows_raster_file3x3, slope="file.grass_raster" + ) + assert os.path.exists("file.grass_raster") + + +def test_pack_input_output_with_subprocess_run_like_call( + xy_dataset_session, rows_raster_file3x3 +): + tools = Tools(session=xy_dataset_session) + assert os.path.exists(rows_raster_file3x3) + tools.run_from_list( + [ + "r.slope.aspect", + f"elevation={rows_raster_file3x3}", + "aspect=file.grass_raster", + ] + ) + assert os.path.exists("file.grass_raster") + + +def test_tool_groups_raster(xy_dataset_session): + """Check that global overwrite is not used when separate env is used""" + raster = Tools(session=xy_dataset_session, prefix="r") + raster.mapcalc(expression="streams = if(row() > 1, 1, null())") + raster.buffer(input="streams", output="buffer", distance=1) + assert raster.info(map="streams", format="json")["datatype"] == "CELL" + + +def test_tool_groups_vector(xy_dataset_session): + """Check that global overwrite is not used when separate env is used""" + vector = Tools(prefix="v") + vector.edit(map="points", type="point", tool="create", env=xy_dataset_session.env) + # Here, the feed_input_to style does not make sense, but we are not using StringIO + # here to test the feed_input_to functionality and avoid dependence on the StringIO + # functionality. + # The ASCII format is for one point with no categories. + vector.feed_input_to("P 1 0\n 10 20").edit( + map="points", + type="point", + tool="add", + input="-", + flags="n", + env=xy_dataset_session.env, + ) + vector.buffer( + input="points", output="buffer", distance=1, env=xy_dataset_session.env + ) + assert ( + vector.info(map="buffer", format="json", env=xy_dataset_session.env)["areas"] + == 1 + ) + + +def test_stdin_as_stringio_object(xy_dataset_session): + """Check that global overwrite is not used when separate env is used""" + tools = Tools(session=xy_dataset_session) + tools.v_edit(map="points", type="point", tool="create") + tools.v_edit( + map="points", + type="point", + tool="add", + input=io.StringIO("P 1 0\n 10 20"), + flags="n", + ) + assert tools.v_info(map="points", format="json")["points"] == 1 diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py index ec315a734d5..b6a4a2f5539 100644 --- a/python/grass/experimental/tools.py +++ b/python/grass/experimental/tools.py @@ -17,11 +17,211 @@ import json import os import shutil +import subprocess +from pathlib import Path +from io import StringIO + +import numpy as np import grass.script as gs +import grass.script.array as garray from grass.exceptions import CalledModuleError +class PackImporterExporter: + def __init__(self, *, run_function, env=None): + self._run_function = run_function + self._env = env + + @classmethod + def is_raster_pack_file(cls, value): + return value.endswith((".grass_raster", ".pack", ".rpack", ".grr")) + + def modify_and_ingest_argument_list(self, args, parameters): + # Uses parameters, but modifies the command, generates list of rasters and vectors. + self.input_rasters = [] + if "inputs" in parameters: + for item in parameters["inputs"]: + if self.is_raster_pack_file(item["value"]): + self.input_rasters.append(Path(item["value"])) + # No need to change that for the original kwargs. + # kwargs[item["param"]] = Path(item["value"]).stem + # Actual parameters to execute are now a list. + for i, arg in enumerate(args): + if arg.startswith(f"{item['param']}="): + arg = arg.replace(item["value"], Path(item["value"]).stem) + args[i] = arg + self.output_rasters = [] + if "outputs" in parameters: + for item in parameters["outputs"]: + if self.is_raster_pack_file(item["value"]): + self.output_rasters.append(Path(item["value"])) + # kwargs[item["param"]] = Path(item["value"]).stem + for i, arg in enumerate(args): + if arg.startswith(f"{item['param']}="): + arg = arg.replace(item["value"], Path(item["value"]).stem) + args[i] = arg + + def import_rasters(self): + for raster_file in self.input_rasters: + # Currently we override the projection check. + self._run_function( + "r.unpack", + input=raster_file, + output=raster_file.stem, + overwrite=True, + superquiet=True, + # flags="o", + env=self._env, + ) + + def export_rasters(self): + # Pack the output raster + for raster in self.output_rasters: + # Overwriting a file is a warning, so to avoid it, we delete the file first. + Path(raster).unlink(missing_ok=True) + + self._run_function( + "r.pack", + input=raster.stem, + output=raster, + flags="c", + overwrite=True, + superquiet=True, + ) + + def import_data(self): + self.import_rasters() + + def export_data(self): + self.export_rasters() + + +class ObjectParameterHandler: + def __init__(self): + self._numpy_inputs = {} + self._numpy_outputs = {} + self._numpy_inputs_ordered = [] + self.stdin = None + + def process_parameters(self, kwargs): + for key, value in kwargs.items(): + if isinstance(value, np.ndarray): + kwargs[key] = gs.append_uuid("tmp_serialized_input_array") + self._numpy_inputs[key] = value + self._numpy_inputs_ordered.append(value) + elif value in (np.ndarray, np.array, garray.array): + # We test for class or the function. + kwargs[key] = gs.append_uuid("tmp_serialized_output_array") + self._numpy_outputs[key] = value + elif isinstance(value, StringIO): + kwargs[key] = "-" + self.stdin = value.getvalue() + + def translate_objects_to_data(self, kwargs, parameters, env): + if "inputs" in parameters: + for param in parameters["inputs"]: + if param["param"] in self._numpy_inputs: + map2d = garray.array(env=env) + map2d[:] = self._numpy_inputs[param["param"]] + map2d.write(kwargs[param["param"]]) + + def input_rows_columns(self): + if not len(self._numpy_inputs_ordered): + return None + return self._numpy_inputs_ordered[0].shape + + def translate_data_to_objects(self, kwargs, parameters, env): + output_arrays = [] + if "outputs" in parameters: + for param in parameters["outputs"]: + if param["param"] not in self._numpy_outputs: + continue + output_array = garray.array(kwargs[param["param"]], env=env) + output_arrays.append(output_array) + if len(output_arrays) == 1: + self.result = output_arrays[0] + return True + if len(output_arrays) > 1: + self.result = tuple(output_arrays) + return True + self.result = None + return False + + +class ToolFunctionNameHelper: + def __init__(self, *, run_function, env, prefix=None): + self._run_function = run_function + self._env = env + self._prefix = prefix + + # def __getattr__(self, name): + # self.get_function(name, exception_type=AttributeError) + + def get_function(self, name, exception_type): + """Parse attribute to GRASS display module. Attribute should be in + the form 'd_module_name'. For example, 'd.rast' is called with 'd_rast'. + """ + if self._prefix: + name = f"{self._prefix}.{name}" + # Reformat string + tool_name = name.replace("_", ".") + # Assert module exists + if not shutil.which(tool_name, path=self._env["PATH"]): + suggestions = self.suggest_tools(tool_name) + if suggestions: + msg = ( + f"Tool {tool_name} not found. " + f"Did you mean: {', '.join(suggestions)}?" + ) + raise AttributeError(msg) + msg = ( + f"Tool or attribute {name} not found. " + "If you are executing a tool, is the session set up and the tool on path? " + "If you are looking for an attribute, is it in the documentation?" + ) + raise AttributeError(msg) + + def wrapper(**kwargs): + # Run module + return self._run_function(tool_name, **kwargs) + + return wrapper + + @staticmethod + def levenshtein_distance(text1: str, text2: str) -> int: + if len(text1) < len(text2): + return ToolFunctionNameHelper.levenshtein_distance(text2, text1) + + if len(text2) == 0: + return len(text1) + + previous_row = list(range(len(text2) + 1)) + for i, char1 in enumerate(text1): + current_row = [i + 1] + for j, char2 in enumerate(text2): + insertions = previous_row[j + 1] + 1 + deletions = current_row[j] + 1 + substitutions = previous_row[j] + (char1 != char2) + current_row.append(min(insertions, deletions, substitutions)) + previous_row = current_row + + return previous_row[-1] + + @staticmethod + def suggest_tools(tool): + # TODO: cache commands also for dir + all_names = list(gs.get_commands()[0]) + result = [] + max_suggestions = 10 + for name in all_names: + if ToolFunctionNameHelper.levenshtein_distance(tool, name) < len(tool) / 2: + result.append(name) + if len(result) >= max_suggestions: + break + return result + + class ExecutedTool: """Result returned after executing a tool""" @@ -92,12 +292,20 @@ def text_split(self, separator=None): return self._decoded_stdout.strip("\n").split(separator) def __getitem__(self, name): - # TODO: cache parsed JSON if self._stdout: # We are testing just std out and letting rest to the parse and the user. # This makes no assumption about how JSON is produced by the tool. - return self.json[name] - msg = f"Output of the tool {self._name} is not JSON" + try: + return self.json[name] + except json.JSONDecodeError as error: + if self._kwargs.get("format") == "json": + raise + msg = ( + f"Output of {self._name} cannot be parsed as JSON. " + 'Did you use format="json"?' + ) + raise ValueError(msg) from error + msg = f"No text output for {self._name} to be parsed as JSON" raise ValueError(msg) @@ -120,6 +328,7 @@ def __init__( stdin=None, errors=None, capture_output=True, + prefix=None, ): if env: self._env = env.copy() @@ -134,7 +343,7 @@ def __init__( self._overwrite() # This hopefully sets the numbers directly. An alternative implementation would # be to pass the parameter every time. - # Does not check for multiple set at the same time, but the most versbose wins + # Does not check for multiple set at the same time, but the most verbose wins # for safety. if superquiet: self._env["GRASS_VERBOSE"] = "0" @@ -145,6 +354,8 @@ def __init__( self._set_stdin(stdin) self._errors = errors self._capture_output = capture_output + self._prefix = prefix + self._name_helper = None # These could be public, not protected. def _freeze_region(self): @@ -162,28 +373,12 @@ def env(self): """Internally used environment (reference to it, not a copy)""" return self._env - def _digest_data_parameters(self, parameters, command): - # Uses parameters, but modifies the command. - input_rasters = [] - if "inputs" in parameters: - for item in parameters["inputs"]: - if item["value"].endswith(".grass_raster"): - input_rasters.append(Path(item["value"])) - for i, arg in enumerate(command): - if arg.startswith(f"{item['param']}="): - arg = arg.replace(item["value"], Path(item["value"]).stem) - command[i] = arg - output_rasters = [] - if "outputs" in parameters: - for item in parameters["outputs"]: - if item["value"].endswith(".grass_raster"): - output_rasters.append(Path(item["value"])) - for i, arg in enumerate(command): - if arg.startswith(f"{item['param']}="): - arg = arg.replace(item["value"], Path(item["value"]).stem) - command[i] = arg - return input_rasters, output_rasters + def _process_parameters(self, command, popen_options): + env = popen_options.get("env", self._env) + return subprocess.run( + [*command, "--json"], text=True, capture_output=True, env=env + ) def run(self, name, /, **kwargs): """Run modules from the GRASS display family (modules starting with "d."). @@ -193,58 +388,99 @@ def run(self, name, /, **kwargs): :param str module: name of GRASS module :param `**kwargs`: named arguments passed to run_command()""" - original = {} - original_outputs = {} - import grass.script.array as garray - import numpy as np - for key, value in kwargs.items(): - if isinstance(value, np.ndarray): - kwargs[key] = "tmp_serialized_array" - original[key] = value - elif value == np.ndarray: - kwargs[key] = "tmp_future_serialized_array" - original_outputs[key] = value - args, popen_options = gs.popen_args_command(name, **kwargs) + object_parameter_handler = ObjectParameterHandler() + object_parameter_handler.process_parameters(kwargs) - env = popen_options.get("env", self._env) + args, popen_options = gs.popen_args_command(name, **kwargs) - import subprocess - parameters = json.loads( - subprocess.check_output( - [*args, "--json"], text=True, env=env + interface_result = self._process_parameters(args, popen_options) + if interface_result.returncode != 0: + # This is only for the error states. + return gs.handle_errors( + interface_result.returncode, + result=None, + args=[name], + kwargs=kwargs, + stderr=interface_result.stderr, + handler="raise", ) + parameters = json.loads(interface_result.stdout) + object_parameter_handler.translate_objects_to_data( + kwargs, parameters, env=self._env ) - if "inputs" in parameters: - for param in parameters["inputs"]: - if param["param"] not in original: - continue - map2d = garray.array() - print(param) - map2d[:] = original[param["param"]] - map2d.write("tmp_serialized_array", overwrite=True) # We approximate tool_kwargs as original kwargs. - result = self.run_from_list(args, tool_kwargs=kwargs, **popen_options) + result = self.run_from_list( + args, + tool_kwargs=kwargs, + processed_parameters=parameters, + stdin=object_parameter_handler.stdin, + **popen_options, + ) + use_objects = object_parameter_handler.translate_data_to_objects( + kwargs, parameters, env=self._env + ) + if use_objects: + result = object_parameter_handler.result + return result - if "outputs" in parameters: - for param in parameters["outputs"]: - if param["param"] not in original_outputs: - continue - output_array = garray.array("tmp_future_serialized_array") - result = output_array + def run_from_list( + self, + command, + tool_kwargs=None, + stdin=None, + processed_parameters=None, + **popen_options, + ): + if not processed_parameters: + interface_result = self._process_parameters(command, popen_options) + if interface_result.returncode != 0: + # This is only for the error states. + return gs.handle_errors( + interface_result.returncode, + result=None, + args=[command], + kwargs=tool_kwargs, + stderr=interface_result.stderr, + handler="raise", + ) + processed_parameters = json.loads(interface_result.stdout) + pack_importer_exporter = PackImporterExporter(run_function=self.no_nonsense_run) + pack_importer_exporter.modify_and_ingest_argument_list( + command, processed_parameters + ) + pack_importer_exporter.import_data() + + # We approximate tool_kwargs as original kwargs. + result = self.no_nonsense_run_from_list( + command, + tool_kwargs=tool_kwargs, + stdin=stdin, + **popen_options, + ) + pack_importer_exporter.export_data() return result def run_command(self, name, /, **kwargs): - # Adjust error handling or provide custom implementation for full control? + # TODO: Provide custom implementation for full control return gs.run_command(name, **kwargs, env=self._env) def parse_command(self, name, /, **kwargs): + # TODO: Provide custom implementation for full control return gs.parse_command(name, **kwargs, env=self._env) + def no_nonsense_run(self, name, /, *, tool_kwargs=None, stdin=None, **kwargs): + args, popen_options = gs.popen_args_command(name, **kwargs) + return self.no_nonsense_run_from_list( + args, tool_kwargs=tool_kwargs, stdin=stdin, **popen_options + ) + # Make this an overload of run. - def run_from_list(self, command, tool_kwargs=None, **popen_options): + def no_nonsense_run_from_list( + self, command, tool_kwargs=None, stdin=None, **popen_options + ): # alternatively use dev null as default or provide it as convenient settings if self._capture_output: stdout_pipe = gs.PIPE @@ -255,6 +491,9 @@ def run_from_list(self, command, tool_kwargs=None, **popen_options): if self._stdin: stdin_pipe = gs.PIPE stdin = gs.utils.encode(self._stdin) + elif stdin: + stdin_pipe = gs.PIPE + stdin = gs.utils.encode(stdin) else: stdin_pipe = None stdin = None @@ -288,70 +527,30 @@ def run_from_list(self, command, tool_kwargs=None, **popen_options): def feed_input_to(self, stdin, /): """Get a new object which will feed text input to a tool or tools""" - return Tools(env=self._env, stdin=stdin) + return Tools( + env=self._env, + stdin=stdin, + freeze_region=self._region_is_frozen, + errors=self._errors, + capture_output=self._capture_output, + prefix=self._prefix, + ) def ignore_errors_of(self): """Get a new object which will ignore errors of the called tools""" return Tools(env=self._env, errors="ignore") - def levenshtein_distance(self, text1: str, text2: str) -> int: - if len(text1) < len(text2): - return self.levenshtein_distance(text2, text1) - - if len(text2) == 0: - return len(text1) - - previous_row = list(range(len(text2) + 1)) - for i, char1 in enumerate(text1): - current_row = [i + 1] - for j, char2 in enumerate(text2): - insertions = previous_row[j + 1] + 1 - deletions = current_row[j] + 1 - substitutions = previous_row[j] + (char1 != char2) - current_row.append(min(insertions, deletions, substitutions)) - previous_row = current_row - - return previous_row[-1] - - def suggest_tools(self, tool): - # TODO: cache commands also for dir - all_names = list(gs.get_commands()[0]) - result = [] - max_suggestions = 10 - for name in all_names: - if self.levenshtein_distance(tool, name) < len(tool) / 2: - result.append(name) - if len(result) >= max_suggestions: - break - return result - def __getattr__(self, name): """Parse attribute to GRASS display module. Attribute should be in the form 'd_module_name'. For example, 'd.rast' is called with 'd_rast'. """ - # Reformat string - tool_name = name.replace("_", ".") - # Assert module exists - if not shutil.which(tool_name): - suggesions = self.suggest_tools(tool_name) - if suggesions: - msg = ( - f"Tool {tool_name} not found. " - f"Did you mean: {', '.join(suggesions)}?" - ) - raise AttributeError(msg) - msg = ( - f"Tool or attribute {name} not found. " - "If you are executing a tool, is the session set up and the tool on path? " - "If you are looking for an attribute, is it in the documentation?" + if not self._name_helper: + self._name_helper = ToolFunctionNameHelper( + run_function=self.run, + env=self.env, + prefix=self._prefix, ) - raise AttributeError(msg) - - def wrapper(**kwargs): - # Run module - return self.run(tool_name, **kwargs) - - return wrapper + return self._name_helper.get_function(name, exception_type=AttributeError) def _test(): diff --git a/python/grass/script/core.py b/python/grass/script/core.py index 8d8065ac0d8..838e80a39a4 100644 --- a/python/grass/script/core.py +++ b/python/grass/script/core.py @@ -307,7 +307,7 @@ def make_command( return args -def handle_errors(returncode, result, args, kwargs): +def handle_errors(returncode, result, args, kwargs, handler=None, stderr=None): """Error handler for :func:`run_command()` and similar functions The functions which are using this function to handle errors, @@ -352,7 +352,8 @@ def get_module_and_code(args, kwargs): code = " ".join(args) return module, code - handler = kwargs.get("errors", "raise") + if handler is None: + handler = kwargs.get("errors", "raise") if handler.lower() == "status": return returncode if returncode == 0: @@ -370,7 +371,9 @@ def get_module_and_code(args, kwargs): sys.exit(returncode) else: module, code = get_module_and_code(args, kwargs) - raise CalledModuleError(module=module, code=code, returncode=returncode) + raise CalledModuleError( + module=module, code=code, returncode=returncode, errors=stderr + ) def popen_args_command( From ce7c53e430e610fce9f3dd45ab388905e3e312f0 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Wed, 11 Jun 2025 11:24:32 -0400 Subject: [PATCH 22/49] Remove NumPy --- .../experimental/tests/grass_tools_test.py | 113 ------------------ python/grass/experimental/tools.py | 56 +-------- 2 files changed, 2 insertions(+), 167 deletions(-) diff --git a/python/grass/experimental/tests/grass_tools_test.py b/python/grass/experimental/tests/grass_tools_test.py index 1cba3e9e1c9..ea8ab12e232 100644 --- a/python/grass/experimental/tests/grass_tools_test.py +++ b/python/grass/experimental/tests/grass_tools_test.py @@ -3,7 +3,6 @@ import os import io -import numpy as np import pytest import grass.script as gs @@ -252,118 +251,6 @@ def test_wrong_attribute(xy_dataset_session): tools.execute_big_command() -def test_numpy_one_input(xy_dataset_session): - """Check that global overwrite is not used when separate env is used""" - tools = Tools(session=xy_dataset_session) - tools.r_slope_aspect(elevation=np.ones((1, 1)), slope="slope") - assert tools.r_info(map="slope", format="json")["datatype"] == "FCELL" - - -# NumPy syntax for outputs -# While inputs are straightforward, there is several possible ways how to handle -# syntax for outputs. -# Output is the type of function for creating NumPy arrays, return value is now the arrays: -# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope=np.ndarray, aspect=np.array) -# Output is explicitly requested: -# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope="slope", aspect="aspect", force_numpy_for_output=True) -# Output is explicitly requested at the object level: -# Tools(force_numpy_for_output=True).r_slope_aspect(elevation=np.ones((1, 1)), slope="slope", aspect="aspect") -# Output is always array or arrays when at least on input is an array: -# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope="slope", aspect="aspect") -# An empty array is passed to signal the desired output: -# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope=np.nulls((0, 0))) -# An array to be filled with data is passed, the return value is kept as is: -# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope=np.nulls((1, 1))) -# NumPy universal function concept can be used explicitly to indicate, -# possibly more easily allowing for nameless args as opposed to keyword arguments, -# but outputs still need to be explicitly requested: -# Returns by value (tuple: (np.array, np.array)): -# tools.r_slope_aspect.ufunc(np.ones((1, 1)), slope=True, aspect=True) -# Modifies its arguments in-place: -# tools.r_slope_aspect.ufunc(np.ones((1, 1)), slope=True, aspect=True, out=(np.array((1, 1)), np.array((1, 1)))) -# Custom signaling classes or objects are passed (assuming empty classes AsNumpy and AsInput): -# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope=ToNumpy(), aspect=ToNumpy()) -# tools.r_slope_aspect(elevation=np.ones((1, 1)), slope=AsInput, aspect=AsInput) -# NumPy functions usually return a tuple, for multiple outputs. Universal function does -# unless the output is written to out parameter which is also provided as a tuple. We -# have names, so generally, we can return a dictionary: -# {"slope": np.array(...), "aspect": np.array(...) }. - - -def test_numpy_one_input_one_output(xy_dataset_session): - """Check that a NumPy array works as input and for signaling output - - It tests that the np.ndarray class is supported to signal output. - Return type is not strictly defined, so we are not testing for it explicitly - (only by actually using it as an NumPy array). - """ - tools = Tools(session=xy_dataset_session) - tools.g_region(rows=2, cols=3) - slope = tools.r_slope_aspect(elevation=np.ones((2, 3)), slope=np.ndarray) - assert slope.shape == (2, 3) - assert np.all(slope == np.full((2, 3), 0)) - - -def test_numpy_with_name_and_parameter(xy_dataset_session): - """Check that a NumPy array works as input and for signaling output - - It tests that the np.ndarray class is supported to signal output. - Return type is not strictly defined, so we are not testing for it explicitly - (only by actually using it as an NumPy array). - """ - tools = Tools(session=xy_dataset_session) - tools.g_region(rows=2, cols=3) - slope = tools.run("r.slope.aspect", elevation=np.ones((2, 3)), slope=np.ndarray) - assert slope.shape == (2, 3) - assert np.all(slope == np.full((2, 3), 0)) - - -def test_numpy_one_input_multiple_outputs(xy_dataset_session): - """Check that a NumPy array function works for signaling multiple outputs - - Besides multiple outputs it tests that np.array is supported to signal output. - """ - tools = Tools(session=xy_dataset_session) - tools.g_region(rows=2, cols=3) - (slope, aspect) = tools.r_slope_aspect( - elevation=np.ones((2, 3)), slope=np.array, aspect=np.array - ) - assert slope.shape == (2, 3) - assert np.all(slope == np.full((2, 3), 0)) - assert aspect.shape == (2, 3) - assert np.all(aspect == np.full((2, 3), 0)) - - -def test_numpy_multiple_inputs_one_output(xy_dataset_session): - """Check that a NumPy array works for multiple inputs""" - tools = Tools(session=xy_dataset_session) - tools.g_region(rows=2, cols=3) - result = tools.r_mapcalc_simple( - expression="A + B", a=np.full((2, 3), 2), b=np.full((2, 3), 5), output=np.array - ) - assert result.shape == (2, 3) - assert np.all(result == np.full((2, 3), 7)) - - -def test_numpy_grass_array_input_output(xy_dataset_session): - """Check that global overwrite is not used when separate env is used - - When grass array output is requested, we explicitly test the return value type. - """ - tools = Tools(session=xy_dataset_session) - rows = 2 - cols = 3 - tools.g_region(rows=rows, cols=cols) - tools.r_mapcalc_simple(expression="5", output="const_5") - const_5 = gs.array.array("const_5") - result = tools.r_mapcalc_simple( - expression="2 * A", a=const_5, output=gs.array.array - ) - assert result.shape == (rows, cols) - assert np.all(result == np.full((rows, cols), 10)) - assert isinstance(result, gs.array.array) - - def test_pack_input_output(xy_dataset_session, rows_raster_file3x3): """Check that global overwrite is not used when separate env is used""" tools = Tools(session=xy_dataset_session) diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py index b6a4a2f5539..55ea7467a32 100644 --- a/python/grass/experimental/tools.py +++ b/python/grass/experimental/tools.py @@ -21,10 +21,8 @@ from pathlib import Path from io import StringIO -import numpy as np import grass.script as gs -import grass.script.array as garray from grass.exceptions import CalledModuleError @@ -99,55 +97,14 @@ def export_data(self): class ObjectParameterHandler: def __init__(self): - self._numpy_inputs = {} - self._numpy_outputs = {} - self._numpy_inputs_ordered = [] self.stdin = None def process_parameters(self, kwargs): for key, value in kwargs.items(): - if isinstance(value, np.ndarray): - kwargs[key] = gs.append_uuid("tmp_serialized_input_array") - self._numpy_inputs[key] = value - self._numpy_inputs_ordered.append(value) - elif value in (np.ndarray, np.array, garray.array): - # We test for class or the function. - kwargs[key] = gs.append_uuid("tmp_serialized_output_array") - self._numpy_outputs[key] = value - elif isinstance(value, StringIO): + if isinstance(value, StringIO): kwargs[key] = "-" self.stdin = value.getvalue() - def translate_objects_to_data(self, kwargs, parameters, env): - if "inputs" in parameters: - for param in parameters["inputs"]: - if param["param"] in self._numpy_inputs: - map2d = garray.array(env=env) - map2d[:] = self._numpy_inputs[param["param"]] - map2d.write(kwargs[param["param"]]) - - def input_rows_columns(self): - if not len(self._numpy_inputs_ordered): - return None - return self._numpy_inputs_ordered[0].shape - - def translate_data_to_objects(self, kwargs, parameters, env): - output_arrays = [] - if "outputs" in parameters: - for param in parameters["outputs"]: - if param["param"] not in self._numpy_outputs: - continue - output_array = garray.array(kwargs[param["param"]], env=env) - output_arrays.append(output_array) - if len(output_arrays) == 1: - self.result = output_arrays[0] - return True - if len(output_arrays) > 1: - self.result = tuple(output_arrays) - return True - self.result = None - return False - class ToolFunctionNameHelper: def __init__(self, *, run_function, env, prefix=None): @@ -406,24 +363,15 @@ def run(self, name, /, **kwargs): handler="raise", ) parameters = json.loads(interface_result.stdout) - object_parameter_handler.translate_objects_to_data( - kwargs, parameters, env=self._env - ) # We approximate tool_kwargs as original kwargs. - result = self.run_from_list( + return self.run_from_list( args, tool_kwargs=kwargs, processed_parameters=parameters, stdin=object_parameter_handler.stdin, **popen_options, ) - use_objects = object_parameter_handler.translate_data_to_objects( - kwargs, parameters, env=self._env - ) - if use_objects: - result = object_parameter_handler.result - return result def run_from_list( self, From bd12384f508aeef2e52bb79b1776bf8594718b0e Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Wed, 2 Jul 2025 13:29:20 -0400 Subject: [PATCH 23/49] More robust version of getting the parsed CLI from --json --- python/grass/experimental/tools.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py index 55ea7467a32..9946041e7ff 100644 --- a/python/grass/experimental/tools.py +++ b/python/grass/experimental/tools.py @@ -17,7 +17,6 @@ import json import os import shutil -import subprocess from pathlib import Path from io import StringIO @@ -330,12 +329,13 @@ def env(self): """Internally used environment (reference to it, not a copy)""" return self._env - def _process_parameters(self, command, popen_options): - env = popen_options.get("env", self._env) - - return subprocess.run( - [*command, "--json"], text=True, capture_output=True, env=env - ) + def _process_parameters(self, command, **popen_options): + popen_options["stdin"] = None + popen_options["stdout"] = gs.PIPE + # We respect whatever is in the stderr option because that's what the user + # asked for and will expect to get in case of error (we pretend that it was + # the intended run, not our special run before the actual run). + return self.no_nonsense_run_from_list([*command, "--json"], **popen_options) def run(self, name, /, **kwargs): """Run modules from the GRASS display family (modules starting with "d."). From 52b29d8c190e26d683f179d8527873e3d3435fa3 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Tue, 2 Sep 2025 16:49:08 -0400 Subject: [PATCH 24/49] Make the code functional again and align with the grass.tools code. Clean up tests and doc-strings. --- .../tests/grass_tools_pack_test.py | 42 ++ .../experimental/tests/grass_tools_test.py | 335 -------------- python/grass/experimental/tools.py | 414 +----------------- 3 files changed, 54 insertions(+), 737 deletions(-) create mode 100644 python/grass/experimental/tests/grass_tools_pack_test.py delete mode 100644 python/grass/experimental/tests/grass_tools_test.py diff --git a/python/grass/experimental/tests/grass_tools_pack_test.py b/python/grass/experimental/tests/grass_tools_pack_test.py new file mode 100644 index 00000000000..bc2e90ae14a --- /dev/null +++ b/python/grass/experimental/tests/grass_tools_pack_test.py @@ -0,0 +1,42 @@ +"""Test grass.experimental.Tools class""" + +import os + +from grass.experimental.tools import Tools + + +def test_pack_input_output(xy_dataset_session, rows_raster_file3x3): + """Check that global overwrite is not used when separate env is used""" + tools = Tools(session=xy_dataset_session) + tools.g_region(rows=3, cols=3) + assert os.path.exists(rows_raster_file3x3) + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope="file.grass_raster") + assert os.path.exists("file.grass_raster") + + +def test_pack_input_output_with_name_and_parameter_call( + xy_dataset_session, rows_raster_file3x3 +): + """Check that global overwrite is not used when separate env is used""" + tools = Tools(session=xy_dataset_session) + tools.g_region(rows=3, cols=3) + assert os.path.exists(rows_raster_file3x3) + tools.run( + "r.slope.aspect", elevation=rows_raster_file3x3, slope="file.grass_raster" + ) + assert os.path.exists("file.grass_raster") + + +def test_pack_input_output_with_subprocess_run_like_call( + xy_dataset_session, rows_raster_file3x3 +): + tools = Tools(session=xy_dataset_session) + assert os.path.exists(rows_raster_file3x3) + tools.run_cmd( + [ + "r.slope.aspect", + f"elevation={rows_raster_file3x3}", + "aspect=file.grass_raster", + ] + ) + assert os.path.exists("file.grass_raster") diff --git a/python/grass/experimental/tests/grass_tools_test.py b/python/grass/experimental/tests/grass_tools_test.py deleted file mode 100644 index ea8ab12e232..00000000000 --- a/python/grass/experimental/tests/grass_tools_test.py +++ /dev/null @@ -1,335 +0,0 @@ -"""Test grass.experimental.Tools class""" - -import os -import io - -import pytest - -import grass.script as gs -from grass.experimental.mapset import TemporaryMapsetSession -from grass.experimental.tools import Tools -from grass.exceptions import CalledModuleError - - -def test_key_value_parser_number(xy_dataset_session): - """Check that numbers are parsed as numbers""" - tools = Tools(session=xy_dataset_session) - assert tools.g_region(flags="g").keyval["nsres"] == 1 - - -@pytest.mark.xfail -def test_key_value_parser_multiple_values(xy_dataset_session): - """Check that strings and floats are parsed""" - tools = Tools(session=xy_dataset_session) - name = "surface" - tools.r_surf_gauss(output=name) # needs seed - result = tools.r_info(map=name, flags="g").keyval - assert result["datatype"] == "DCELL" - assert result["nsres"] == 1 - result = tools.r_univar(map=name, flags="g").keyval - assert result["mean"] == pytest.approx(-0.756762744552762) - - -def test_json_parser(xy_dataset_session): - """Check that JSON is parsed""" - tools = Tools(session=xy_dataset_session) - assert ( - tools.g_search_modules(keyword="random", flags="j").json[0]["name"] - == "r.random" - ) - - -def test_json_with_name_and_parameter_call(xy_dataset_session): - """Check that JSON is parsed with a name-and-parameters style call""" - tools = Tools(session=xy_dataset_session) - assert ( - tools.run("g.search.modules", keyword="random", flags="j")[0]["name"] - == "r.random" - ) - - -def test_json_with_subprocess_run_like_call(xy_dataset_session): - """Check that JSON is parsed with a name-and-parameters style call""" - tools = Tools(session=xy_dataset_session) - assert ( - tools.run_from_list(["g.search.modules", "keyword=random", "-j"])[0]["name"] - == "r.random" - ) - - -def test_json_direct_access(xy_dataset_session): - """Check that JSON is parsed""" - tools = Tools(session=xy_dataset_session) - assert tools.g_search_modules(keyword="random", flags="j")[0]["name"] == "r.random" - - -def test_json_direct_access_bad_key_type(xy_dataset_session): - """Check that JSON is parsed""" - tools = Tools(session=xy_dataset_session) - with pytest.raises(TypeError): - tools.g_search_modules(keyword="random", flags="j")["name"] - - -def test_json_direct_access_bad_key_value(xy_dataset_session): - """Check that JSON is parsed""" - tools = Tools(session=xy_dataset_session) - high_number = 100_000_000 - with pytest.raises(IndexError): - tools.g_search_modules(keyword="random", flags="j")[high_number] - - -def test_json_direct_access_not_json(xy_dataset_session): - """Check that JSON parsing creates an ValueError - - Specifically, this tests the case when format="json" is not set. - """ - tools = Tools(session=xy_dataset_session) - with pytest.raises(ValueError, match=r"format.*json"): - tools.g_search_modules(keyword="random")[0]["name"] - - -def test_stdout_as_text(xy_dataset_session): - """Check that simple text is parsed and has no whitespace""" - tools = Tools(session=xy_dataset_session) - assert tools.g_mapset(flags="p").text == "PERMANENT" - - -def test_stdout_as_space_items(xy_dataset_session): - """Check that whitespace-separated items are parsed""" - tools = Tools(session=xy_dataset_session) - assert tools.g_mapset(flags="l").space_items == ["PERMANENT"] - - -def test_stdout_split_whitespace(xy_dataset_session): - """Check that whitespace-based split function works""" - tools = Tools(session=xy_dataset_session) - assert tools.g_mapset(flags="l").text_split() == ["PERMANENT"] - - -def test_stdout_split_space(xy_dataset_session): - """Check that the split function works with space""" - tools = Tools(session=xy_dataset_session) - # Not a good example usage, but it tests the functionality. - assert tools.g_mapset(flags="l").text_split(" ") == ["PERMANENT", ""] - - -def test_stdout_without_capturing(xy_dataset_session): - """Check that text is not present when not capturing it""" - tools = Tools(session=xy_dataset_session, capture_output=False) - assert not tools.g_mapset(flags="p").text - assert tools.g_mapset(flags="p").text is None - - -def test_direct_overwrite(xy_dataset_session): - """Check overwrite as a parameter""" - tools = Tools(session=xy_dataset_session) - tools.r_random_surface(output="surface", seed=42) - tools.r_random_surface(output="surface", seed=42, overwrite=True) - - -def test_object_overwrite(xy_dataset_session): - """Check overwrite as parameter of the tools object""" - tools = Tools(session=xy_dataset_session, overwrite=True) - tools.r_random_surface(output="surface", seed=42) - tools.r_random_surface(output="surface", seed=42) - - -def test_no_overwrite(xy_dataset_session): - """Check that it fails without overwrite""" - tools = Tools(session=xy_dataset_session) - tools.r_random_surface(output="surface", seed=42) - with pytest.raises(CalledModuleError, match="overwrite"): - tools.r_random_surface(output="surface", seed=42) - - -def test_env_overwrite(xy_dataset_session): - """Check that overwrite from env parameter is used""" - # env = xy_dataset_session.env.copy() # ideally - env = os.environ.copy() # for now - env["GRASS_OVERWRITE"] = "1" - tools = Tools(session=xy_dataset_session, env=env) - tools.r_random_surface(output="surface", seed=42) - tools.r_random_surface(output="surface", seed=42) - - -def test_global_overwrite_vs_env(xy_dataset_session): - """Check that global overwrite is not used when separate env is used""" - # env = xy_dataset_session.env.copy() # ideally - env = os.environ.copy() # for now - os.environ["GRASS_OVERWRITE"] = "1" # change to xy_dataset_session.env - tools = Tools(session=xy_dataset_session, env=env) - tools.r_random_surface(output="surface", seed=42) - with pytest.raises(CalledModuleError, match="overwrite"): - tools.r_random_surface(output="surface", seed=42) - del os.environ["GRASS_OVERWRITE"] # check or ideally remove this - - -def test_global_overwrite_vs_init(xy_dataset_session): - """Check that global overwrite is not used when separate env is used""" - tools = Tools(session=xy_dataset_session) - os.environ["GRASS_OVERWRITE"] = "1" # change to xy_dataset_session.env - tools.r_random_surface(output="surface", seed=42) - with pytest.raises(CalledModuleError, match="overwrite"): - tools.r_random_surface(output="surface", seed=42) - del os.environ["GRASS_OVERWRITE"] # check or ideally remove this - - -def test_stdin(xy_dataset_session): - """Test that stdin is accepted""" - tools = Tools(session=xy_dataset_session) - tools.feed_input_to("13.45,29.96,200").v_in_ascii( - input="-", output="point", separator="," - ) - - -def test_raises(xy_dataset_session): - """Test that exception is raised for wrong parameter value""" - tools = Tools(session=xy_dataset_session) - wrong_name = "wrong_standard" - with pytest.raises(CalledModuleError, match=wrong_name): - tools.feed_input_to("13.45,29.96,200").v_in_ascii( - input="-", - output="point", - format=wrong_name, - ) - - -def test_run_command(xy_dataset_session): - """Check run_command and its overwrite parameter""" - tools = Tools(session=xy_dataset_session) - tools.run_command("r.random.surface", output="surface", seed=42) - tools.run_command("r.random.surface", output="surface", seed=42, overwrite=True) - - -def test_parse_command_key_value(xy_dataset_session): - tools = Tools(session=xy_dataset_session) - assert tools.parse_command("g.region", flags="g")["nsres"] == "1" - - -def test_parse_command_json(xy_dataset_session): - tools = Tools(session=xy_dataset_session) - assert ( - tools.parse_command("g.region", flags="g", format="json")["region"]["ns-res"] - == 1 - ) - - -def test_with_context_managers(tmpdir): - project = tmpdir / "project" - gs.create_project(project) - with gs.setup.init(project) as session: - tools = Tools(session=session) - tools.r_random_surface(output="surface", seed=42) - with TemporaryMapsetSession(env=tools.env) as mapset: - tools.r_random_surface(output="surface", seed=42, env=mapset.env) - with gs.MaskManager(env=mapset.env) as mask: - # TODO: Do actual test - tools.r_univar(map="surface", env=mask.env, format="json")["mean"] - - -def test_misspelling(xy_dataset_session): - tools = Tools(session=xy_dataset_session) - with pytest.raises(AttributeError, match=r"r\.slope\.aspect"): - tools.r_sloppy_respect() - - -def test_multiple_suggestions(xy_dataset_session): - tools = Tools(session=xy_dataset_session) - with pytest.raises(AttributeError, match=r"v\.db\.univar|db\.univar"): - tools.db_v_uni_var() - - -def test_tool_group_vs_model_name(xy_dataset_session): - tools = Tools(session=xy_dataset_session) - with pytest.raises(AttributeError, match=r"r\.sim\.water"): - tools.rSIMWEwater() - - -def test_wrong_attribute(xy_dataset_session): - tools = Tools(session=xy_dataset_session) - with pytest.raises(AttributeError, match="execute_big_command"): - tools.execute_big_command() - - -def test_pack_input_output(xy_dataset_session, rows_raster_file3x3): - """Check that global overwrite is not used when separate env is used""" - tools = Tools(session=xy_dataset_session) - tools.g_region(rows=3, cols=3) - assert os.path.exists(rows_raster_file3x3) - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope="file.grass_raster") - assert os.path.exists("file.grass_raster") - - -def test_pack_input_output_with_name_and_parameter_call( - xy_dataset_session, rows_raster_file3x3 -): - """Check that global overwrite is not used when separate env is used""" - tools = Tools(session=xy_dataset_session) - tools.g_region(rows=3, cols=3) - assert os.path.exists(rows_raster_file3x3) - tools.run( - "r.slope.aspect", elevation=rows_raster_file3x3, slope="file.grass_raster" - ) - assert os.path.exists("file.grass_raster") - - -def test_pack_input_output_with_subprocess_run_like_call( - xy_dataset_session, rows_raster_file3x3 -): - tools = Tools(session=xy_dataset_session) - assert os.path.exists(rows_raster_file3x3) - tools.run_from_list( - [ - "r.slope.aspect", - f"elevation={rows_raster_file3x3}", - "aspect=file.grass_raster", - ] - ) - assert os.path.exists("file.grass_raster") - - -def test_tool_groups_raster(xy_dataset_session): - """Check that global overwrite is not used when separate env is used""" - raster = Tools(session=xy_dataset_session, prefix="r") - raster.mapcalc(expression="streams = if(row() > 1, 1, null())") - raster.buffer(input="streams", output="buffer", distance=1) - assert raster.info(map="streams", format="json")["datatype"] == "CELL" - - -def test_tool_groups_vector(xy_dataset_session): - """Check that global overwrite is not used when separate env is used""" - vector = Tools(prefix="v") - vector.edit(map="points", type="point", tool="create", env=xy_dataset_session.env) - # Here, the feed_input_to style does not make sense, but we are not using StringIO - # here to test the feed_input_to functionality and avoid dependence on the StringIO - # functionality. - # The ASCII format is for one point with no categories. - vector.feed_input_to("P 1 0\n 10 20").edit( - map="points", - type="point", - tool="add", - input="-", - flags="n", - env=xy_dataset_session.env, - ) - vector.buffer( - input="points", output="buffer", distance=1, env=xy_dataset_session.env - ) - assert ( - vector.info(map="buffer", format="json", env=xy_dataset_session.env)["areas"] - == 1 - ) - - -def test_stdin_as_stringio_object(xy_dataset_session): - """Check that global overwrite is not used when separate env is used""" - tools = Tools(session=xy_dataset_session) - tools.v_edit(map="points", type="point", tool="create") - tools.v_edit( - map="points", - type="point", - tool="add", - input=io.StringIO("P 1 0\n 10 20"), - flags="n", - ) - assert tools.v_info(map="points", format="json")["points"] == 1 diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py index 9946041e7ff..9e5fd012c2d 100644 --- a/python/grass/experimental/tools.py +++ b/python/grass/experimental/tools.py @@ -15,14 +15,11 @@ """API to call GRASS tools (modules) as Python functions""" import json -import os -import shutil from pathlib import Path -from io import StringIO - import grass.script as gs -from grass.exceptions import CalledModuleError +import grass.tools +from grass.tools.support import ParameterConverter class PackImporterExporter: @@ -94,240 +91,8 @@ def export_data(self): self.export_rasters() -class ObjectParameterHandler: - def __init__(self): - self.stdin = None - - def process_parameters(self, kwargs): - for key, value in kwargs.items(): - if isinstance(value, StringIO): - kwargs[key] = "-" - self.stdin = value.getvalue() - - -class ToolFunctionNameHelper: - def __init__(self, *, run_function, env, prefix=None): - self._run_function = run_function - self._env = env - self._prefix = prefix - - # def __getattr__(self, name): - # self.get_function(name, exception_type=AttributeError) - - def get_function(self, name, exception_type): - """Parse attribute to GRASS display module. Attribute should be in - the form 'd_module_name'. For example, 'd.rast' is called with 'd_rast'. - """ - if self._prefix: - name = f"{self._prefix}.{name}" - # Reformat string - tool_name = name.replace("_", ".") - # Assert module exists - if not shutil.which(tool_name, path=self._env["PATH"]): - suggestions = self.suggest_tools(tool_name) - if suggestions: - msg = ( - f"Tool {tool_name} not found. " - f"Did you mean: {', '.join(suggestions)}?" - ) - raise AttributeError(msg) - msg = ( - f"Tool or attribute {name} not found. " - "If you are executing a tool, is the session set up and the tool on path? " - "If you are looking for an attribute, is it in the documentation?" - ) - raise AttributeError(msg) - - def wrapper(**kwargs): - # Run module - return self._run_function(tool_name, **kwargs) - - return wrapper - - @staticmethod - def levenshtein_distance(text1: str, text2: str) -> int: - if len(text1) < len(text2): - return ToolFunctionNameHelper.levenshtein_distance(text2, text1) - - if len(text2) == 0: - return len(text1) - - previous_row = list(range(len(text2) + 1)) - for i, char1 in enumerate(text1): - current_row = [i + 1] - for j, char2 in enumerate(text2): - insertions = previous_row[j + 1] + 1 - deletions = current_row[j] + 1 - substitutions = previous_row[j] + (char1 != char2) - current_row.append(min(insertions, deletions, substitutions)) - previous_row = current_row - - return previous_row[-1] - - @staticmethod - def suggest_tools(tool): - # TODO: cache commands also for dir - all_names = list(gs.get_commands()[0]) - result = [] - max_suggestions = 10 - for name in all_names: - if ToolFunctionNameHelper.levenshtein_distance(tool, name) < len(tool) / 2: - result.append(name) - if len(result) >= max_suggestions: - break - return result - - -class ExecutedTool: - """Result returned after executing a tool""" - - def __init__(self, name, kwargs, stdout, stderr): - self._name = name - self._kwargs = kwargs - self._stdout = stdout - self._stderr = stderr - if self._stdout is not None: - self._decoded_stdout = gs.decode(self._stdout) - else: - self._decoded_stdout = None - self._cached_json = None - - @property - def text(self) -> str: - """Text output as decoded string""" - if self._decoded_stdout is None: - return None - return self._decoded_stdout.strip() - - @property - def json(self): - """Text output read as JSON - - This returns the nested structure of dictionaries and lists or fails when - the output is not JSON. - """ - if self._cached_json is None: - self._cached_json = json.loads(self._stdout) - return self._cached_json - - @property - def keyval(self): - """Text output read as key-value pairs separated by equal signs""" - - def conversion(value): - """Convert text to int or float if possible, otherwise return it as is""" - try: - return int(value) - except ValueError: - pass - try: - return float(value) - except ValueError: - pass - return value - - return gs.parse_key_val(self._stdout, val_type=conversion) - - @property - def comma_items(self): - """Text output read as comma-separated list""" - return self.text_split(",") - - @property - def space_items(self): - """Text output read as whitespace-separated list""" - return self.text_split(None) - - def text_split(self, separator=None): - """Parse text output read as list separated by separators - - Any leading or trailing newlines are removed prior to parsing. - """ - # The use of strip is assuming that the output is one line which - # ends with a newline character which is for display only. - return self._decoded_stdout.strip("\n").split(separator) - - def __getitem__(self, name): - if self._stdout: - # We are testing just std out and letting rest to the parse and the user. - # This makes no assumption about how JSON is produced by the tool. - try: - return self.json[name] - except json.JSONDecodeError as error: - if self._kwargs.get("format") == "json": - raise - msg = ( - f"Output of {self._name} cannot be parsed as JSON. " - 'Did you use format="json"?' - ) - raise ValueError(msg) from error - msg = f"No text output for {self._name} to be parsed as JSON" - raise ValueError(msg) - - -class Tools: - """Call GRASS tools as methods - - GRASS tools (modules) can be executed as methods of this class. - """ - - def __init__( - self, - *, - session=None, - env=None, - overwrite=False, - quiet=False, - verbose=False, - superquiet=False, - freeze_region=False, - stdin=None, - errors=None, - capture_output=True, - prefix=None, - ): - if env: - self._env = env.copy() - elif session and hasattr(session, "env"): - self._env = session.env.copy() - else: - self._env = os.environ.copy() - self._region_is_frozen = False - if freeze_region: - self._freeze_region() - if overwrite: - self._overwrite() - # This hopefully sets the numbers directly. An alternative implementation would - # be to pass the parameter every time. - # Does not check for multiple set at the same time, but the most verbose wins - # for safety. - if superquiet: - self._env["GRASS_VERBOSE"] = "0" - if quiet: - self._env["GRASS_VERBOSE"] = "1" - if verbose: - self._env["GRASS_VERBOSE"] = "3" - self._set_stdin(stdin) - self._errors = errors - self._capture_output = capture_output - self._prefix = prefix - self._name_helper = None - - # These could be public, not protected. - def _freeze_region(self): - self._env["GRASS_REGION"] = gs.region_env(env=self._env) - self._region_is_frozen = True - - def _overwrite(self): - self._env["GRASS_OVERWRITE"] = "1" - - def _set_stdin(self, stdin, /): - self._stdin = stdin - - @property - def env(self): - """Internally used environment (reference to it, not a copy)""" - return self._env +class Tools(grass.tools.Tools): + """In addition to Tools, it processes arguments which are raster pack files""" def _process_parameters(self, command, **popen_options): popen_options["stdin"] = None @@ -335,23 +100,15 @@ def _process_parameters(self, command, **popen_options): # We respect whatever is in the stderr option because that's what the user # asked for and will expect to get in case of error (we pretend that it was # the intended run, not our special run before the actual run). - return self.no_nonsense_run_from_list([*command, "--json"], **popen_options) + return self.call_cmd([*command, "--json"], **popen_options) def run(self, name, /, **kwargs): - """Run modules from the GRASS display family (modules starting with "d."). - - This function passes arguments directly to grass.script.run_command() - so the syntax is the same. - - :param str module: name of GRASS module - :param `**kwargs`: named arguments passed to run_command()""" - - object_parameter_handler = ObjectParameterHandler() + object_parameter_handler = ParameterConverter() object_parameter_handler.process_parameters(kwargs) args, popen_options = gs.popen_args_command(name, **kwargs) - interface_result = self._process_parameters(args, popen_options) + interface_result = self._process_parameters(args, **popen_options) if interface_result.returncode != 0: # This is only for the error states. return gs.handle_errors( @@ -365,7 +122,7 @@ def run(self, name, /, **kwargs): parameters = json.loads(interface_result.stdout) # We approximate tool_kwargs as original kwargs. - return self.run_from_list( + return self.run_cmd( args, tool_kwargs=kwargs, processed_parameters=parameters, @@ -373,7 +130,7 @@ def run(self, name, /, **kwargs): **popen_options, ) - def run_from_list( + def run_cmd( self, command, tool_kwargs=None, @@ -382,7 +139,7 @@ def run_from_list( **popen_options, ): if not processed_parameters: - interface_result = self._process_parameters(command, popen_options) + interface_result = self._process_parameters(command, **popen_options) if interface_result.returncode != 0: # This is only for the error states. return gs.handle_errors( @@ -395,14 +152,14 @@ def run_from_list( ) processed_parameters = json.loads(interface_result.stdout) - pack_importer_exporter = PackImporterExporter(run_function=self.no_nonsense_run) + pack_importer_exporter = PackImporterExporter(run_function=self.call) pack_importer_exporter.modify_and_ingest_argument_list( command, processed_parameters ) pack_importer_exporter.import_data() # We approximate tool_kwargs as original kwargs. - result = self.no_nonsense_run_from_list( + result = self.call_cmd( command, tool_kwargs=tool_kwargs, stdin=stdin, @@ -410,150 +167,3 @@ def run_from_list( ) pack_importer_exporter.export_data() return result - - def run_command(self, name, /, **kwargs): - # TODO: Provide custom implementation for full control - return gs.run_command(name, **kwargs, env=self._env) - - def parse_command(self, name, /, **kwargs): - # TODO: Provide custom implementation for full control - return gs.parse_command(name, **kwargs, env=self._env) - - def no_nonsense_run(self, name, /, *, tool_kwargs=None, stdin=None, **kwargs): - args, popen_options = gs.popen_args_command(name, **kwargs) - return self.no_nonsense_run_from_list( - args, tool_kwargs=tool_kwargs, stdin=stdin, **popen_options - ) - - # Make this an overload of run. - def no_nonsense_run_from_list( - self, command, tool_kwargs=None, stdin=None, **popen_options - ): - # alternatively use dev null as default or provide it as convenient settings - if self._capture_output: - stdout_pipe = gs.PIPE - stderr_pipe = gs.PIPE - else: - stdout_pipe = None - stderr_pipe = None - if self._stdin: - stdin_pipe = gs.PIPE - stdin = gs.utils.encode(self._stdin) - elif stdin: - stdin_pipe = gs.PIPE - stdin = gs.utils.encode(stdin) - else: - stdin_pipe = None - stdin = None - # Allowing to overwrite env, but that's just to have maximum flexibility when - # the session is actually set up, but it may be confusing. - if "env" not in popen_options: - popen_options["env"] = self._env - process = gs.Popen( - command, - stdin=stdin_pipe, - stdout=stdout_pipe, - stderr=stderr_pipe, - **popen_options, - ) - stdout, stderr = process.communicate(input=stdin) - if stderr: - stderr = gs.utils.decode(stderr) - returncode = process.poll() - if returncode and self._errors != "ignore": - raise CalledModuleError( - command[0], - code=" ".join(command), - returncode=returncode, - errors=stderr, - ) - # TODO: solve tool_kwargs is None - # We don't have the keyword arguments to pass to the resulting object. - return ExecutedTool( - name=command[0], kwargs=tool_kwargs, stdout=stdout, stderr=stderr - ) - - def feed_input_to(self, stdin, /): - """Get a new object which will feed text input to a tool or tools""" - return Tools( - env=self._env, - stdin=stdin, - freeze_region=self._region_is_frozen, - errors=self._errors, - capture_output=self._capture_output, - prefix=self._prefix, - ) - - def ignore_errors_of(self): - """Get a new object which will ignore errors of the called tools""" - return Tools(env=self._env, errors="ignore") - - def __getattr__(self, name): - """Parse attribute to GRASS display module. Attribute should be in - the form 'd_module_name'. For example, 'd.rast' is called with 'd_rast'. - """ - if not self._name_helper: - self._name_helper = ToolFunctionNameHelper( - run_function=self.run, - env=self.env, - prefix=self._prefix, - ) - return self._name_helper.get_function(name, exception_type=AttributeError) - - -def _test(): - """Ad-hoc tests and examples of the Tools class""" - session = gs.setup.init("~/grassdata/nc_spm_08_grass7/user1") - - tools = Tools() - tools.g_region(raster="elevation") - tools.r_slope_aspect(elevation="elevation", slope="slope", overwrite=True) - print(tools.r_univar(map="slope", flags="g").keyval) - - print(tools.v_info(map="bridges", flags="c").text) - print( - tools.v_db_univar(map="bridges", column="YEAR_BUILT", format="json").json[ - "statistics" - ]["mean"] - ) - - print(tools.g_mapset(flags="p").text) - print(tools.g_mapsets(flags="l").text_split()) - print(tools.g_mapsets(flags="l").space_items) - print(tools.g_gisenv(get="GISDBASE,LOCATION_NAME,MAPSET", sep="comma").comma_items) - - print(tools.g_region(flags="g").keyval) - - env = os.environ.copy() - env["GRASS_REGION"] = gs.region_env(res=250) - coarse_computation = Tools(env=env) - current_region = coarse_computation.g_region(flags="g").keyval - print(current_region["ewres"], current_region["nsres"]) - coarse_computation.r_slope_aspect( - elevation="elevation", slope="slope", flags="a", overwrite=True - ) - print(coarse_computation.r_info(map="slope", flags="g").keyval) - - independent_computation = Tools(session=session, freeze_region=True) - tools.g_region(res=500) # we would do this for another computation elsewhere - print(independent_computation.g_region(flags="g").keyval["ewres"]) - - tools_pro = Tools( - session=session, freeze_region=True, overwrite=True, superquiet=True - ) - tools_pro.r_slope_aspect(elevation="elevation", slope="slope") - tools_pro.feed_input_to("13.45,29.96,200").v_in_ascii( - input="-", output="point", separator="," - ) - print(tools_pro.v_info(map="point", flags="t").keyval["points"]) - - print(tools_pro.ignore_errors_of().g_version(flags="rge").keyval) - - elevation = "elevation" - exaggerated = "exaggerated" - tools_pro.r_mapcalc(expression=f"{exaggerated} = 5 * {elevation}") - tools_pro.feed_input_to(f"{exaggerated} = 5 * {elevation}").r_mapcalc(file="-") - - -if __name__ == "__main__": - _test() From 0b3f412d921ded2647fe92d55f9d0ec20f765e1e Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Wed, 3 Sep 2025 12:14:36 -0400 Subject: [PATCH 25/49] Remove rasters which are copies of pack files --- .../tests/grass_tools_pack_test.py | 5 +++ python/grass/experimental/tools.py | 35 ++++++++++++------- 2 files changed, 28 insertions(+), 12 deletions(-) diff --git a/python/grass/experimental/tests/grass_tools_pack_test.py b/python/grass/experimental/tests/grass_tools_pack_test.py index bc2e90ae14a..8aebf3d2cc0 100644 --- a/python/grass/experimental/tests/grass_tools_pack_test.py +++ b/python/grass/experimental/tests/grass_tools_pack_test.py @@ -12,6 +12,11 @@ def test_pack_input_output(xy_dataset_session, rows_raster_file3x3): assert os.path.exists(rows_raster_file3x3) tools.r_slope_aspect(elevation=rows_raster_file3x3, slope="file.grass_raster") assert os.path.exists("file.grass_raster") + assert not tools.g_findfile(element="raster", file="file", format="json")["name"] + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + assert not tools.g_list(type="raster", format="json") def test_pack_input_output_with_name_and_parameter_call( diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py index 9e5fd012c2d..2dc0c8ea10a 100644 --- a/python/grass/experimental/tools.py +++ b/python/grass/experimental/tools.py @@ -26,6 +26,8 @@ class PackImporterExporter: def __init__(self, *, run_function, env=None): self._run_function = run_function self._env = env + self.input_rasters: list[tuple] = [] + self.output_rasters: list[tuple] = [] @classmethod def is_raster_pack_file(cls, value): @@ -33,36 +35,36 @@ def is_raster_pack_file(cls, value): def modify_and_ingest_argument_list(self, args, parameters): # Uses parameters, but modifies the command, generates list of rasters and vectors. - self.input_rasters = [] if "inputs" in parameters: for item in parameters["inputs"]: if self.is_raster_pack_file(item["value"]): - self.input_rasters.append(Path(item["value"])) + inproject_name = Path(item["value"]).stem + self.input_rasters.append((Path(item["value"]), inproject_name)) # No need to change that for the original kwargs. # kwargs[item["param"]] = Path(item["value"]).stem # Actual parameters to execute are now a list. for i, arg in enumerate(args): if arg.startswith(f"{item['param']}="): - arg = arg.replace(item["value"], Path(item["value"]).stem) + arg = arg.replace(item["value"], inproject_name) args[i] = arg - self.output_rasters = [] if "outputs" in parameters: for item in parameters["outputs"]: if self.is_raster_pack_file(item["value"]): - self.output_rasters.append(Path(item["value"])) + inproject_name = Path(item["value"]).stem + self.output_rasters.append((Path(item["value"]), inproject_name)) # kwargs[item["param"]] = Path(item["value"]).stem for i, arg in enumerate(args): if arg.startswith(f"{item['param']}="): - arg = arg.replace(item["value"], Path(item["value"]).stem) + arg = arg.replace(item["value"], inproject_name) args[i] = arg def import_rasters(self): - for raster_file in self.input_rasters: + for raster_file, inproject_name in self.input_rasters: # Currently we override the projection check. self._run_function( "r.unpack", input=raster_file, - output=raster_file.stem, + output=inproject_name, overwrite=True, superquiet=True, # flags="o", @@ -71,14 +73,14 @@ def import_rasters(self): def export_rasters(self): # Pack the output raster - for raster in self.output_rasters: + for raster_file, inproject_name in self.output_rasters: # Overwriting a file is a warning, so to avoid it, we delete the file first. - Path(raster).unlink(missing_ok=True) + Path(raster_file).unlink(missing_ok=True) self._run_function( "r.pack", - input=raster.stem, - output=raster, + input=inproject_name, + output=raster_file, flags="c", overwrite=True, superquiet=True, @@ -90,6 +92,14 @@ def import_data(self): def export_data(self): self.export_rasters() + def cleanup(self): + remove = [name for (unused, name) in self.input_rasters] + remove.extend([name for (unused, name) in self.output_rasters]) + if remove: + self._run_function( + "g.remove", type="raster", name=remove, superquiet=True, flags="f" + ) + class Tools(grass.tools.Tools): """In addition to Tools, it processes arguments which are raster pack files""" @@ -166,4 +176,5 @@ def run_cmd( **popen_options, ) pack_importer_exporter.export_data() + pack_importer_exporter.cleanup() return result From bd5c6f6c48eea73cbad80f8758490262139d5c16 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Wed, 3 Sep 2025 15:07:13 -0400 Subject: [PATCH 26/49] Clean up the tmp files, further sync with grass.tools --- .../tests/grass_tools_pack_test.py | 108 +++++++++++++++- python/grass/experimental/tools.py | 117 ++++++++++++------ 2 files changed, 180 insertions(+), 45 deletions(-) diff --git a/python/grass/experimental/tests/grass_tools_pack_test.py b/python/grass/experimental/tests/grass_tools_pack_test.py index 8aebf3d2cc0..c4a5f886742 100644 --- a/python/grass/experimental/tests/grass_tools_pack_test.py +++ b/python/grass/experimental/tests/grass_tools_pack_test.py @@ -5,8 +5,8 @@ from grass.experimental.tools import Tools -def test_pack_input_output(xy_dataset_session, rows_raster_file3x3): - """Check that global overwrite is not used when separate env is used""" +def test_pack_input_output_tool_name_function(xy_dataset_session, rows_raster_file3x3): + """Check input and output pack files work with tool name call""" tools = Tools(session=xy_dataset_session) tools.g_region(rows=3, cols=3) assert os.path.exists(rows_raster_file3x3) @@ -16,13 +16,13 @@ def test_pack_input_output(xy_dataset_session, rows_raster_file3x3): assert not tools.g_findfile( element="raster", file=rows_raster_file3x3.stem, format="json" )["name"] - assert not tools.g_list(type="raster", format="json") + assert not tools.g_list(type="raster", format="shell").text def test_pack_input_output_with_name_and_parameter_call( xy_dataset_session, rows_raster_file3x3 ): - """Check that global overwrite is not used when separate env is used""" + """Check input and output pack files work with tool name as string""" tools = Tools(session=xy_dataset_session) tools.g_region(rows=3, cols=3) assert os.path.exists(rows_raster_file3x3) @@ -30,11 +30,17 @@ def test_pack_input_output_with_name_and_parameter_call( "r.slope.aspect", elevation=rows_raster_file3x3, slope="file.grass_raster" ) assert os.path.exists("file.grass_raster") + assert not tools.g_findfile(element="raster", file="file", format="json")["name"] + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + assert not tools.g_list(type="raster", format="shell").text def test_pack_input_output_with_subprocess_run_like_call( xy_dataset_session, rows_raster_file3x3 ): + """Check input and output pack files work with command as list""" tools = Tools(session=xy_dataset_session) assert os.path.exists(rows_raster_file3x3) tools.run_cmd( @@ -45,3 +51,97 @@ def test_pack_input_output_with_subprocess_run_like_call( ] ) assert os.path.exists("file.grass_raster") + assert not tools.g_findfile(element="raster", file="file", format="json")["name"] + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + assert not tools.g_list(type="raster", format="shell").text + + +def test_io_cleanup_after_function(xy_dataset_session, rows_raster_file3x3): + """Check input and output rasters are deleted after function call""" + tools = Tools(session=xy_dataset_session) + tools.g_region(rows=3, cols=3) + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope="file.grass_raster") + assert os.path.exists("file.grass_raster") + assert not tools.g_findfile(element="raster", file="file", format="json")["name"] + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + assert not tools.g_list(type="raster", format="shell").text + + +def test_io_cleanup_after_context(xy_dataset_session, rows_raster_file3x3): + """Check input and output rasters are deleted at the end of context""" + with Tools(session=xy_dataset_session) as tools: + tools.g_region(rows=3, cols=3) + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope="file.grass_raster") + assert os.path.exists("file.grass_raster") + assert tools.g_findfile(element="raster", file="file", format="json")["name"] + tools.r_mapcalc_simple( + expression="100 * A", a="file", output="file2.grass_raster" + ) + assert os.path.exists("file2.grass_raster") + assert tools.g_findfile(element="raster", file="file2", format="json")["name"] + # The pack files should still exist. + assert os.path.exists("file.grass_raster") + assert os.path.exists("file2.grass_raster") + # The in-project rasters should not exist. + assert not tools.g_findfile(element="raster", file="file", format="json")["name"] + assert not tools.g_findfile(element="raster", file="file2", format="json")["name"] + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + assert not tools.g_list(type="raster", format="shell").text + + +def test_io_no_cleanup(xy_dataset_session, rows_raster_file3x3): + """Check input and output rasters are deleted only with explicit cleanup call""" + tools = Tools(session=xy_dataset_session, keep_data=True) + tools.g_region(rows=3, cols=3) + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope="file.grass_raster") + assert os.path.exists("file.grass_raster") + # Files should still be available. + assert tools.g_findfile(element="raster", file="file", format="json")["name"] + assert tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + # But an explicit cleanup should delete the files. + tools.cleanup() + assert not tools.g_findfile(element="raster", file="file", format="json")["name"] + assert not tools.g_findfile(element="raster", file="file2", format="json")["name"] + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + assert not tools.g_list(type="raster", format="shell").text + + +def test_io_no_cleanup_with_context(xy_dataset_session, rows_raster_file3x3): + """Check input and output rasters are kept even with context""" + with Tools(session=xy_dataset_session, keep_data=True) as tools: + tools.g_region(rows=3, cols=3) + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope="file.grass_raster") + assert os.path.exists("file.grass_raster") + assert tools.g_findfile(element="raster", file="file", format="json")["name"] + tools.r_mapcalc_simple( + expression="100 * A", a="file", output="file2.grass_raster" + ) + assert os.path.exists("file2.grass_raster") + assert tools.g_findfile(element="raster", file="file2", format="json")["name"] + # The pack files should still exist. + assert os.path.exists("file.grass_raster") + assert os.path.exists("file2.grass_raster") + # The in-project rasters should also exist. + assert tools.g_findfile(element="raster", file="file", format="json")["name"] + assert tools.g_findfile(element="raster", file="file2", format="json")["name"] + assert tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + # But an explicit cleanup should delete the files. + tools.cleanup() + assert not tools.g_findfile(element="raster", file="file", format="json")["name"] + assert not tools.g_findfile(element="raster", file="file2", format="json")["name"] + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + assert not tools.g_list(type="raster", format="shell").text diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py index 2dc0c8ea10a..d4bd727d8aa 100644 --- a/python/grass/experimental/tools.py +++ b/python/grass/experimental/tools.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - ############################################################################## # AUTHOR(S): Vaclav Petras # @@ -12,7 +10,9 @@ # for details. ############################################################################## -"""API to call GRASS tools (modules) as Python functions""" +"""The module provides an API to use GRASS tools (modules) as Python functions""" + +from __future__ import annotations import json from pathlib import Path @@ -34,6 +34,7 @@ def is_raster_pack_file(cls, value): return value.endswith((".grass_raster", ".pack", ".rpack", ".grr")) def modify_and_ingest_argument_list(self, args, parameters): + # TODO: Deal with r.pack and r.unpack calls. # Uses parameters, but modifies the command, generates list of rasters and vectors. if "inputs" in parameters: for item in parameters["inputs"]: @@ -104,6 +105,12 @@ def cleanup(self): class Tools(grass.tools.Tools): """In addition to Tools, it processes arguments which are raster pack files""" + def __init__(self, keep_data=None, **kwargs): + super().__init__(**kwargs) + self._delete_on_context_exit = False + self._keep_data = keep_data + self._cleanups = [] + def _process_parameters(self, command, **popen_options): popen_options["stdin"] = None popen_options["stdout"] = gs.PIPE @@ -112,55 +119,63 @@ def _process_parameters(self, command, **popen_options): # the intended run, not our special run before the actual run). return self.call_cmd([*command, "--json"], **popen_options) - def run(self, name, /, **kwargs): - object_parameter_handler = ParameterConverter() - object_parameter_handler.process_parameters(kwargs) + def run(self, tool_name_: str, /, **kwargs): + """Run a tool by specifying its name as a string and parameters. - args, popen_options = gs.popen_args_command(name, **kwargs) + The parameters tool are tool name as a string and parameters as keyword + arguments. The keyword arguments may include an argument *flags* which is a + string of one-character tool flags. - interface_result = self._process_parameters(args, **popen_options) - if interface_result.returncode != 0: - # This is only for the error states. - return gs.handle_errors( - interface_result.returncode, - result=None, - args=[name], - kwargs=kwargs, - stderr=interface_result.stderr, - handler="raise", - ) - parameters = json.loads(interface_result.stdout) + The function may perform additional processing on the parameters. - # We approximate tool_kwargs as original kwargs. + :param tool_name_: name of a GRASS tool + :param kwargs: tool parameters + """ + # Object parameters are handled first before the conversion of the call to a + # list of strings happens. + object_parameter_handler = ParameterConverter() + object_parameter_handler.process_parameters(kwargs) + + # Get a fixed env parameter at at the beginning of each execution, + # but repeat it every time in case the referenced environment is modified. + args, popen_options = gs.popen_args_command(tool_name_, **kwargs) + # We approximate original kwargs with the possibly-modified kwargs. return self.run_cmd( args, tool_kwargs=kwargs, - processed_parameters=parameters, - stdin=object_parameter_handler.stdin, + input=object_parameter_handler.stdin, **popen_options, ) def run_cmd( self, - command, - tool_kwargs=None, - stdin=None, - processed_parameters=None, + command: list[str], + *, + input: str | bytes | None = None, + tool_kwargs: dict | None = None, **popen_options, ): - if not processed_parameters: - interface_result = self._process_parameters(command, **popen_options) - if interface_result.returncode != 0: - # This is only for the error states. - return gs.handle_errors( - interface_result.returncode, - result=None, - args=[command], - kwargs=tool_kwargs, - stderr=interface_result.stderr, - handler="raise", - ) - processed_parameters = json.loads(interface_result.stdout) + """Run a tool by passing its name and parameters a list of strings. + + The function may perform additional processing on the parameters. + + :param command: list of strings to execute as the command + :param input: text input for the standard input of the tool + :param tool_kwargs: named tool arguments used for error reporting (experimental) + :param **popen_options: additional options for :py:func:`subprocess.Popen` + """ + interface_result = self._process_parameters(command, **popen_options) + if interface_result.returncode != 0: + # This is only for the error states. + return gs.handle_errors( + interface_result.returncode, + result=None, + args=[command], + kwargs=tool_kwargs, + stderr=interface_result.stderr, + handler="raise", + ) + processed_parameters = json.loads(interface_result.stdout) pack_importer_exporter = PackImporterExporter(run_function=self.call) pack_importer_exporter.modify_and_ingest_argument_list( @@ -172,9 +187,29 @@ def run_cmd( result = self.call_cmd( command, tool_kwargs=tool_kwargs, - stdin=stdin, + input=input, **popen_options, ) pack_importer_exporter.export_data() - pack_importer_exporter.cleanup() + if self._delete_on_context_exit or self._keep_data: + self._cleanups.append(pack_importer_exporter.cleanup) + else: + pack_importer_exporter.cleanup() return result + + def __enter__(self): + """Enter the context manager context. + + :returns: reference to the object (self) + """ + self._delete_on_context_exit = True + return self + + def __exit__(self, exc_type, exc_value, traceback): + """Exit the context manager context.""" + if not self._keep_data: + self.cleanup() + + def cleanup(self): + for cleanup in self._cleanups: + cleanup() From faf96a1897dffd8ac2fff8bc2eb3af1ca0b32b2c Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Wed, 3 Sep 2025 15:15:23 -0400 Subject: [PATCH 27/49] Use g.list with JSON in tests --- .../experimental/tests/grass_tools_pack_test.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/python/grass/experimental/tests/grass_tools_pack_test.py b/python/grass/experimental/tests/grass_tools_pack_test.py index c4a5f886742..6c9d6fd925d 100644 --- a/python/grass/experimental/tests/grass_tools_pack_test.py +++ b/python/grass/experimental/tests/grass_tools_pack_test.py @@ -16,7 +16,7 @@ def test_pack_input_output_tool_name_function(xy_dataset_session, rows_raster_fi assert not tools.g_findfile( element="raster", file=rows_raster_file3x3.stem, format="json" )["name"] - assert not tools.g_list(type="raster", format="shell").text + assert not tools.g_list(type="raster", format="json") def test_pack_input_output_with_name_and_parameter_call( @@ -34,7 +34,7 @@ def test_pack_input_output_with_name_and_parameter_call( assert not tools.g_findfile( element="raster", file=rows_raster_file3x3.stem, format="json" )["name"] - assert not tools.g_list(type="raster", format="shell").text + assert not tools.g_list(type="raster", format="json") def test_pack_input_output_with_subprocess_run_like_call( @@ -55,7 +55,7 @@ def test_pack_input_output_with_subprocess_run_like_call( assert not tools.g_findfile( element="raster", file=rows_raster_file3x3.stem, format="json" )["name"] - assert not tools.g_list(type="raster", format="shell").text + assert not tools.g_list(type="raster", format="json") def test_io_cleanup_after_function(xy_dataset_session, rows_raster_file3x3): @@ -68,7 +68,7 @@ def test_io_cleanup_after_function(xy_dataset_session, rows_raster_file3x3): assert not tools.g_findfile( element="raster", file=rows_raster_file3x3.stem, format="json" )["name"] - assert not tools.g_list(type="raster", format="shell").text + assert not tools.g_list(type="raster", format="json") def test_io_cleanup_after_context(xy_dataset_session, rows_raster_file3x3): @@ -92,7 +92,7 @@ def test_io_cleanup_after_context(xy_dataset_session, rows_raster_file3x3): assert not tools.g_findfile( element="raster", file=rows_raster_file3x3.stem, format="json" )["name"] - assert not tools.g_list(type="raster", format="shell").text + assert not tools.g_list(type="raster", format="json") def test_io_no_cleanup(xy_dataset_session, rows_raster_file3x3): @@ -113,7 +113,7 @@ def test_io_no_cleanup(xy_dataset_session, rows_raster_file3x3): assert not tools.g_findfile( element="raster", file=rows_raster_file3x3.stem, format="json" )["name"] - assert not tools.g_list(type="raster", format="shell").text + assert not tools.g_list(type="raster", format="json") def test_io_no_cleanup_with_context(xy_dataset_session, rows_raster_file3x3): @@ -144,4 +144,4 @@ def test_io_no_cleanup_with_context(xy_dataset_session, rows_raster_file3x3): assert not tools.g_findfile( element="raster", file=rows_raster_file3x3.stem, format="json" )["name"] - assert not tools.g_list(type="raster", format="shell").text + assert not tools.g_list(type="raster", format="json") From fc489446d48f05beb964434f29a5e5845e5d2bb3 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Wed, 10 Sep 2025 12:52:36 -0400 Subject: [PATCH 28/49] Start the import-export machinery only when there are files in the parameters --- python/grass/experimental/tools.py | 76 +++++++++++-------- python/grass/tools/support.py | 25 ++++++ .../tests/grass_tools_session_tools_test.py | 2 +- 3 files changed, 72 insertions(+), 31 deletions(-) diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py index d4bd727d8aa..93ff2b4dcfc 100644 --- a/python/grass/experimental/tools.py +++ b/python/grass/experimental/tools.py @@ -23,16 +23,25 @@ class PackImporterExporter: + raster_pack_suffixes = (".grass_raster", ".pack", ".rpack", ".grr") + + @classmethod + def is_recognized_file(cls, value): + return cls.is_raster_pack_file(value) + + @classmethod + def is_raster_pack_file(cls, value): + if isinstance(value, (str, bytes)): + return value.endswith(cls.raster_pack_suffixes) + if isinstance(value, Path): + return value.suffix in cls.raster_pack_suffixes + return False + def __init__(self, *, run_function, env=None): self._run_function = run_function - self._env = env self.input_rasters: list[tuple] = [] self.output_rasters: list[tuple] = [] - @classmethod - def is_raster_pack_file(cls, value): - return value.endswith((".grass_raster", ".pack", ".rpack", ".grr")) - def modify_and_ingest_argument_list(self, args, parameters): # TODO: Deal with r.pack and r.unpack calls. # Uses parameters, but modifies the command, generates list of rasters and vectors. @@ -61,15 +70,12 @@ def modify_and_ingest_argument_list(self, args, parameters): def import_rasters(self): for raster_file, inproject_name in self.input_rasters: - # Currently we override the projection check. self._run_function( "r.unpack", input=raster_file, output=inproject_name, overwrite=True, superquiet=True, - # flags="o", - env=self._env, ) def export_rasters(self): @@ -144,6 +150,7 @@ def run(self, tool_name_: str, /, **kwargs): args, tool_kwargs=kwargs, input=object_parameter_handler.stdin, + import_export=object_parameter_handler.import_export, **popen_options, ) @@ -152,6 +159,7 @@ def run_cmd( command: list[str], *, input: str | bytes | None = None, + import_export: bool | None = None, tool_kwargs: dict | None = None, **popen_options, ): @@ -164,24 +172,31 @@ def run_cmd( :param tool_kwargs: named tool arguments used for error reporting (experimental) :param **popen_options: additional options for :py:func:`subprocess.Popen` """ - interface_result = self._process_parameters(command, **popen_options) - if interface_result.returncode != 0: - # This is only for the error states. - return gs.handle_errors( - interface_result.returncode, - result=None, - args=[command], - kwargs=tool_kwargs, - stderr=interface_result.stderr, - handler="raise", + if import_export is None: + import_export = False + for item in command: + if PackImporterExporter.is_recognized_file(item): + import_export = True + break + if import_export: + interface_result = self._process_parameters(command, **popen_options) + if interface_result.returncode != 0: + # This is only for the error states. + return gs.handle_errors( + interface_result.returncode, + result=None, + args=[command], + kwargs=tool_kwargs, + stderr=interface_result.stderr, + handler="raise", + ) + processed_parameters = json.loads(interface_result.stdout) + + pack_importer_exporter = PackImporterExporter(run_function=self.call) + pack_importer_exporter.modify_and_ingest_argument_list( + command, processed_parameters ) - processed_parameters = json.loads(interface_result.stdout) - - pack_importer_exporter = PackImporterExporter(run_function=self.call) - pack_importer_exporter.modify_and_ingest_argument_list( - command, processed_parameters - ) - pack_importer_exporter.import_data() + pack_importer_exporter.import_data() # We approximate tool_kwargs as original kwargs. result = self.call_cmd( @@ -190,11 +205,12 @@ def run_cmd( input=input, **popen_options, ) - pack_importer_exporter.export_data() - if self._delete_on_context_exit or self._keep_data: - self._cleanups.append(pack_importer_exporter.cleanup) - else: - pack_importer_exporter.cleanup() + if import_export: + pack_importer_exporter.export_data() + if self._delete_on_context_exit or self._keep_data: + self._cleanups.append(pack_importer_exporter.cleanup) + else: + pack_importer_exporter.cleanup() return result def __enter__(self): diff --git a/python/grass/tools/support.py b/python/grass/tools/support.py index 3ca33b2ed64..d8265c8c8cd 100644 --- a/python/grass/tools/support.py +++ b/python/grass/tools/support.py @@ -24,6 +24,7 @@ import json import shutil from io import StringIO +from pathlib import Path from collections import namedtuple try: @@ -41,6 +42,23 @@ ga = None +# Partial copy to avoid import issues for now +class PackImporterExporter: + raster_pack_suffixes = (".grass_raster", ".pack", ".rpack", ".grr") + + @classmethod + def is_recognized_file(cls, value): + return cls.is_raster_pack_file(value) + + @classmethod + def is_raster_pack_file(cls, value): + if isinstance(value, (str, bytes)): + return value.endswith(cls.raster_pack_suffixes) + if isinstance(value, Path): + return value.suffix in cls.raster_pack_suffixes + return False + + class ParameterConverter: """Converts parameter values to strings and facilitates flow of the data.""" @@ -51,6 +69,7 @@ def __init__(self): self.stdin = None self.result = None self.temporary_rasters = [] + self.import_export = None def process_parameters(self, kwargs): """Converts high level parameter values to strings. @@ -81,6 +100,12 @@ def process_parameters(self, kwargs): elif isinstance(value, StringIO): kwargs[key] = "-" self.stdin = value.getvalue() + elif self.import_export is None and PackImporterExporter.is_recognized_file( + value + ): + self.import_export = True + if self.import_export is None: + self.import_export = False def translate_objects_to_data(self, kwargs, env): """Convert NumPy arrays to GRASS data""" diff --git a/python/grass/tools/tests/grass_tools_session_tools_test.py b/python/grass/tools/tests/grass_tools_session_tools_test.py index bd4151cb178..5e428009cc9 100644 --- a/python/grass/tools/tests/grass_tools_session_tools_test.py +++ b/python/grass/tools/tests/grass_tools_session_tools_test.py @@ -8,7 +8,7 @@ import grass.script as gs from grass.exceptions import CalledModuleError from grass.experimental.mapset import TemporaryMapsetSession -from grass.tools import Tools +from grass.experimental.tools import Tools has_pandas = False From 998255f5ba8d8f9e14e7708bcf62df68d0dc4c39 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Wed, 10 Sep 2025 13:35:36 -0400 Subject: [PATCH 29/49] Integrate pack code into grass.tools --- python/grass/experimental/Makefile | 3 +- python/grass/experimental/tools.py | 231 ------------------ python/grass/tools/Makefile | 1 + python/grass/tools/importexport.py | 87 +++++++ python/grass/tools/session_tools.py | 60 ++++- python/grass/tools/support.py | 18 +- python/grass/tools/tests/conftest.py | 38 +++ .../grass_tools_session_tools_pack_test.py} | 4 +- .../tests/grass_tools_session_tools_test.py | 2 +- 9 files changed, 190 insertions(+), 254 deletions(-) delete mode 100644 python/grass/experimental/tools.py create mode 100644 python/grass/tools/importexport.py rename python/grass/{experimental/tests/grass_tools_pack_test.py => tools/tests/grass_tools_session_tools_pack_test.py} (98%) diff --git a/python/grass/experimental/Makefile b/python/grass/experimental/Makefile index e3c0c6d3c84..e28f932a0a8 100644 --- a/python/grass/experimental/Makefile +++ b/python/grass/experimental/Makefile @@ -7,8 +7,7 @@ DSTDIR = $(ETC)/python/grass/experimental MODULES = \ create \ - mapset \ - tools + mapset PYFILES := $(patsubst %,$(DSTDIR)/%.py,$(MODULES) __init__) PYCFILES := $(patsubst %,$(DSTDIR)/%.pyc,$(MODULES) __init__) diff --git a/python/grass/experimental/tools.py b/python/grass/experimental/tools.py deleted file mode 100644 index 93ff2b4dcfc..00000000000 --- a/python/grass/experimental/tools.py +++ /dev/null @@ -1,231 +0,0 @@ -############################################################################## -# AUTHOR(S): Vaclav Petras -# -# PURPOSE: API to call GRASS tools (modules) as Python functions -# -# COPYRIGHT: (C) 2023-2025 Vaclav Petras and the GRASS Development Team -# -# This program is free software under the GNU General Public -# License (>=v2). Read the file COPYING that comes with GRASS -# for details. -############################################################################## - -"""The module provides an API to use GRASS tools (modules) as Python functions""" - -from __future__ import annotations - -import json -from pathlib import Path - -import grass.script as gs -import grass.tools -from grass.tools.support import ParameterConverter - - -class PackImporterExporter: - raster_pack_suffixes = (".grass_raster", ".pack", ".rpack", ".grr") - - @classmethod - def is_recognized_file(cls, value): - return cls.is_raster_pack_file(value) - - @classmethod - def is_raster_pack_file(cls, value): - if isinstance(value, (str, bytes)): - return value.endswith(cls.raster_pack_suffixes) - if isinstance(value, Path): - return value.suffix in cls.raster_pack_suffixes - return False - - def __init__(self, *, run_function, env=None): - self._run_function = run_function - self.input_rasters: list[tuple] = [] - self.output_rasters: list[tuple] = [] - - def modify_and_ingest_argument_list(self, args, parameters): - # TODO: Deal with r.pack and r.unpack calls. - # Uses parameters, but modifies the command, generates list of rasters and vectors. - if "inputs" in parameters: - for item in parameters["inputs"]: - if self.is_raster_pack_file(item["value"]): - inproject_name = Path(item["value"]).stem - self.input_rasters.append((Path(item["value"]), inproject_name)) - # No need to change that for the original kwargs. - # kwargs[item["param"]] = Path(item["value"]).stem - # Actual parameters to execute are now a list. - for i, arg in enumerate(args): - if arg.startswith(f"{item['param']}="): - arg = arg.replace(item["value"], inproject_name) - args[i] = arg - if "outputs" in parameters: - for item in parameters["outputs"]: - if self.is_raster_pack_file(item["value"]): - inproject_name = Path(item["value"]).stem - self.output_rasters.append((Path(item["value"]), inproject_name)) - # kwargs[item["param"]] = Path(item["value"]).stem - for i, arg in enumerate(args): - if arg.startswith(f"{item['param']}="): - arg = arg.replace(item["value"], inproject_name) - args[i] = arg - - def import_rasters(self): - for raster_file, inproject_name in self.input_rasters: - self._run_function( - "r.unpack", - input=raster_file, - output=inproject_name, - overwrite=True, - superquiet=True, - ) - - def export_rasters(self): - # Pack the output raster - for raster_file, inproject_name in self.output_rasters: - # Overwriting a file is a warning, so to avoid it, we delete the file first. - Path(raster_file).unlink(missing_ok=True) - - self._run_function( - "r.pack", - input=inproject_name, - output=raster_file, - flags="c", - overwrite=True, - superquiet=True, - ) - - def import_data(self): - self.import_rasters() - - def export_data(self): - self.export_rasters() - - def cleanup(self): - remove = [name for (unused, name) in self.input_rasters] - remove.extend([name for (unused, name) in self.output_rasters]) - if remove: - self._run_function( - "g.remove", type="raster", name=remove, superquiet=True, flags="f" - ) - - -class Tools(grass.tools.Tools): - """In addition to Tools, it processes arguments which are raster pack files""" - - def __init__(self, keep_data=None, **kwargs): - super().__init__(**kwargs) - self._delete_on_context_exit = False - self._keep_data = keep_data - self._cleanups = [] - - def _process_parameters(self, command, **popen_options): - popen_options["stdin"] = None - popen_options["stdout"] = gs.PIPE - # We respect whatever is in the stderr option because that's what the user - # asked for and will expect to get in case of error (we pretend that it was - # the intended run, not our special run before the actual run). - return self.call_cmd([*command, "--json"], **popen_options) - - def run(self, tool_name_: str, /, **kwargs): - """Run a tool by specifying its name as a string and parameters. - - The parameters tool are tool name as a string and parameters as keyword - arguments. The keyword arguments may include an argument *flags* which is a - string of one-character tool flags. - - The function may perform additional processing on the parameters. - - :param tool_name_: name of a GRASS tool - :param kwargs: tool parameters - """ - # Object parameters are handled first before the conversion of the call to a - # list of strings happens. - object_parameter_handler = ParameterConverter() - object_parameter_handler.process_parameters(kwargs) - - # Get a fixed env parameter at at the beginning of each execution, - # but repeat it every time in case the referenced environment is modified. - args, popen_options = gs.popen_args_command(tool_name_, **kwargs) - # We approximate original kwargs with the possibly-modified kwargs. - return self.run_cmd( - args, - tool_kwargs=kwargs, - input=object_parameter_handler.stdin, - import_export=object_parameter_handler.import_export, - **popen_options, - ) - - def run_cmd( - self, - command: list[str], - *, - input: str | bytes | None = None, - import_export: bool | None = None, - tool_kwargs: dict | None = None, - **popen_options, - ): - """Run a tool by passing its name and parameters a list of strings. - - The function may perform additional processing on the parameters. - - :param command: list of strings to execute as the command - :param input: text input for the standard input of the tool - :param tool_kwargs: named tool arguments used for error reporting (experimental) - :param **popen_options: additional options for :py:func:`subprocess.Popen` - """ - if import_export is None: - import_export = False - for item in command: - if PackImporterExporter.is_recognized_file(item): - import_export = True - break - if import_export: - interface_result = self._process_parameters(command, **popen_options) - if interface_result.returncode != 0: - # This is only for the error states. - return gs.handle_errors( - interface_result.returncode, - result=None, - args=[command], - kwargs=tool_kwargs, - stderr=interface_result.stderr, - handler="raise", - ) - processed_parameters = json.loads(interface_result.stdout) - - pack_importer_exporter = PackImporterExporter(run_function=self.call) - pack_importer_exporter.modify_and_ingest_argument_list( - command, processed_parameters - ) - pack_importer_exporter.import_data() - - # We approximate tool_kwargs as original kwargs. - result = self.call_cmd( - command, - tool_kwargs=tool_kwargs, - input=input, - **popen_options, - ) - if import_export: - pack_importer_exporter.export_data() - if self._delete_on_context_exit or self._keep_data: - self._cleanups.append(pack_importer_exporter.cleanup) - else: - pack_importer_exporter.cleanup() - return result - - def __enter__(self): - """Enter the context manager context. - - :returns: reference to the object (self) - """ - self._delete_on_context_exit = True - return self - - def __exit__(self, exc_type, exc_value, traceback): - """Exit the context manager context.""" - if not self._keep_data: - self.cleanup() - - def cleanup(self): - for cleanup in self._cleanups: - cleanup() diff --git a/python/grass/tools/Makefile b/python/grass/tools/Makefile index 8820c884347..61a10a62472 100644 --- a/python/grass/tools/Makefile +++ b/python/grass/tools/Makefile @@ -6,6 +6,7 @@ include $(MODULE_TOPDIR)/include/Make/Python.make DSTDIR = $(ETC)/python/grass/tools MODULES = \ + importexport \ session_tools \ support diff --git a/python/grass/tools/importexport.py b/python/grass/tools/importexport.py new file mode 100644 index 00000000000..971d5563115 --- /dev/null +++ b/python/grass/tools/importexport.py @@ -0,0 +1,87 @@ +from pathlib import Path + + +class PackImporterExporter: + raster_pack_suffixes = (".grass_raster", ".pack", ".rpack", ".grr") + + @classmethod + def is_recognized_file(cls, value): + return cls.is_raster_pack_file(value) + + @classmethod + def is_raster_pack_file(cls, value): + if isinstance(value, (str, bytes)): + return value.endswith(cls.raster_pack_suffixes) + if isinstance(value, Path): + return value.suffix in cls.raster_pack_suffixes + return False + + def __init__(self, *, run_function, env=None): + self._run_function = run_function + self.input_rasters: list[tuple] = [] + self.output_rasters: list[tuple] = [] + + def modify_and_ingest_argument_list(self, args, parameters): + # TODO: Deal with r.pack and r.unpack calls. + # Uses parameters, but modifies the command, generates list of rasters and vectors. + if "inputs" in parameters: + for item in parameters["inputs"]: + if self.is_raster_pack_file(item["value"]): + inproject_name = Path(item["value"]).stem + self.input_rasters.append((Path(item["value"]), inproject_name)) + # No need to change that for the original kwargs. + # kwargs[item["param"]] = Path(item["value"]).stem + # Actual parameters to execute are now a list. + for i, arg in enumerate(args): + if arg.startswith(f"{item['param']}="): + arg = arg.replace(item["value"], inproject_name) + args[i] = arg + if "outputs" in parameters: + for item in parameters["outputs"]: + if self.is_raster_pack_file(item["value"]): + inproject_name = Path(item["value"]).stem + self.output_rasters.append((Path(item["value"]), inproject_name)) + # kwargs[item["param"]] = Path(item["value"]).stem + for i, arg in enumerate(args): + if arg.startswith(f"{item['param']}="): + arg = arg.replace(item["value"], inproject_name) + args[i] = arg + + def import_rasters(self): + for raster_file, inproject_name in self.input_rasters: + self._run_function( + "r.unpack", + input=raster_file, + output=inproject_name, + overwrite=True, + superquiet=True, + ) + + def export_rasters(self): + # Pack the output raster + for raster_file, inproject_name in self.output_rasters: + # Overwriting a file is a warning, so to avoid it, we delete the file first. + Path(raster_file).unlink(missing_ok=True) + + self._run_function( + "r.pack", + input=inproject_name, + output=raster_file, + flags="c", + overwrite=True, + superquiet=True, + ) + + def import_data(self): + self.import_rasters() + + def export_data(self): + self.export_rasters() + + def cleanup(self): + remove = [name for (unused, name) in self.input_rasters] + remove.extend([name for (unused, name) in self.output_rasters]) + if remove: + self._run_function( + "g.remove", type="raster", name=remove, superquiet=True, flags="f" + ) diff --git a/python/grass/tools/session_tools.py b/python/grass/tools/session_tools.py index fc1f30e6d02..c91a7fb9ca4 100644 --- a/python/grass/tools/session_tools.py +++ b/python/grass/tools/session_tools.py @@ -18,6 +18,7 @@ import grass.script as gs +from .importexport import PackImporterExporter from .support import ParameterConverter, ToolFunctionResolver, ToolResult @@ -130,6 +131,7 @@ def __init__( capture_output=True, capture_stderr=None, consistent_return_value=False, + keep_data=None, ): """ If session is provided and has an env attribute, it is used to execute tools. @@ -196,6 +198,11 @@ def __init__( self._capture_stderr = capture_stderr self._name_resolver = None self._consistent_return_value = consistent_return_value + # Decides if we delete at each run or only at the end of context. + self._delete_on_context_exit = False + # User request to keep the data. + self._keep_data = keep_data + self._cleanups = [] def _modified_env_if_needed(self): """Get the environment for subprocesses @@ -274,6 +281,7 @@ def run(self, tool_name_: str, /, **kwargs): args, tool_kwargs=kwargs, input=object_parameter_handler.stdin, + import_export=object_parameter_handler.import_export, **popen_options, ) use_objects = object_parameter_handler.translate_data_to_objects( @@ -299,6 +307,7 @@ def run_cmd( command: list[str], *, input: str | bytes | None = None, + import_export: bool | None = None, tool_kwargs: dict | None = None, **popen_options, ): @@ -311,12 +320,46 @@ def run_cmd( :param tool_kwargs: named tool arguments used for error reporting (experimental) :param **popen_options: additional options for :py:func:`subprocess.Popen` """ - return self.call_cmd( + if import_export is None: + import_export = False + for item in command: + if PackImporterExporter.is_recognized_file(item): + import_export = True + break + if import_export: + interface_result = self._process_parameters(command, **popen_options) + if interface_result.returncode != 0: + # This is only for the error states. + return gs.handle_errors( + interface_result.returncode, + result=None, + args=[command], + kwargs=tool_kwargs, + stderr=interface_result.stderr, + handler="raise", + ) + processed_parameters = interface_result.json + + pack_importer_exporter = PackImporterExporter(run_function=self.call) + pack_importer_exporter.modify_and_ingest_argument_list( + command, processed_parameters + ) + pack_importer_exporter.import_data() + + # We approximate tool_kwargs as original kwargs. + result = self.call_cmd( command, tool_kwargs=tool_kwargs, input=input, **popen_options, ) + if import_export: + pack_importer_exporter.export_data() + if self._delete_on_context_exit or self._keep_data: + self._cleanups.append(pack_importer_exporter.cleanup) + else: + pack_importer_exporter.cleanup() + return result def call(self, tool_name_: str, /, **kwargs): """Run a tool by specifying its name as a string and parameters. @@ -423,7 +466,22 @@ def __enter__(self): :returns: reference to the object (self) """ + self._delete_on_context_exit = True return self def __exit__(self, exc_type, exc_value, traceback): """Exit the context manager context.""" + if not self._keep_data: + self.cleanup() + + def cleanup(self): + for cleanup in self._cleanups: + cleanup() + + def _process_parameters(self, command, **popen_options): + popen_options["stdin"] = None + popen_options["stdout"] = gs.PIPE + # We respect whatever is in the stderr option because that's what the user + # asked for and will expect to get in case of error (we pretend that it was + # the intended run, not our special run before the actual run). + return self.call_cmd([*command, "--json"], **popen_options) diff --git a/python/grass/tools/support.py b/python/grass/tools/support.py index d8265c8c8cd..a2c7257aec9 100644 --- a/python/grass/tools/support.py +++ b/python/grass/tools/support.py @@ -24,7 +24,6 @@ import json import shutil from io import StringIO -from pathlib import Path from collections import namedtuple try: @@ -41,22 +40,7 @@ # ga is present as well because that's the only import-time failure we expect. ga = None - -# Partial copy to avoid import issues for now -class PackImporterExporter: - raster_pack_suffixes = (".grass_raster", ".pack", ".rpack", ".grr") - - @classmethod - def is_recognized_file(cls, value): - return cls.is_raster_pack_file(value) - - @classmethod - def is_raster_pack_file(cls, value): - if isinstance(value, (str, bytes)): - return value.endswith(cls.raster_pack_suffixes) - if isinstance(value, Path): - return value.suffix in cls.raster_pack_suffixes - return False +from .importexport import PackImporterExporter class ParameterConverter: diff --git a/python/grass/tools/tests/conftest.py b/python/grass/tools/tests/conftest.py index f4b9145af1c..1cda23c6591 100644 --- a/python/grass/tools/tests/conftest.py +++ b/python/grass/tools/tests/conftest.py @@ -33,3 +33,41 @@ def empty_string_result(): @pytest.fixture def echoing_resolver(): return ToolFunctionResolver(run_function=lambda x: x, env=os.environ.copy()) + + +@pytest.fixture +def rows_raster_file3x3(tmp_path): + project = tmp_path / "xy_test3x3" + gs.create_project(project) + with gs.setup.init(project, env=os.environ.copy()) as session: + gs.run_command("g.region", rows=3, cols=3, env=session.env) + gs.mapcalc("rows = row()", env=session.env) + output_file = tmp_path / "rows3x3.grass_raster" + gs.run_command( + "r.pack", + input="rows", + output=output_file, + flags="c", + superquiet=True, + env=session.env, + ) + return output_file + + +@pytest.fixture +def rows_raster_file4x5(tmp_path): + project = tmp_path / "xy_test4x5" + gs.create_project(project) + with gs.setup.init(project, env=os.environ.copy()) as session: + gs.run_command("g.region", rows=4, cols=5, env=session.env) + gs.mapcalc("rows = row()", env=session.env) + output_file = tmp_path / "rows4x5.grass_raster" + gs.run_command( + "r.pack", + input="rows", + output=output_file, + flags="c", + superquiet=True, + env=session.env, + ) + return output_file diff --git a/python/grass/experimental/tests/grass_tools_pack_test.py b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py similarity index 98% rename from python/grass/experimental/tests/grass_tools_pack_test.py rename to python/grass/tools/tests/grass_tools_session_tools_pack_test.py index 6c9d6fd925d..42ca1a3cc1c 100644 --- a/python/grass/experimental/tests/grass_tools_pack_test.py +++ b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py @@ -1,8 +1,8 @@ -"""Test grass.experimental.Tools class""" +"""Test grass.tools.Tools class""" import os -from grass.experimental.tools import Tools +from grass.tools import Tools def test_pack_input_output_tool_name_function(xy_dataset_session, rows_raster_file3x3): diff --git a/python/grass/tools/tests/grass_tools_session_tools_test.py b/python/grass/tools/tests/grass_tools_session_tools_test.py index 5e428009cc9..bd4151cb178 100644 --- a/python/grass/tools/tests/grass_tools_session_tools_test.py +++ b/python/grass/tools/tests/grass_tools_session_tools_test.py @@ -8,7 +8,7 @@ import grass.script as gs from grass.exceptions import CalledModuleError from grass.experimental.mapset import TemporaryMapsetSession -from grass.experimental.tools import Tools +from grass.tools import Tools has_pandas = False From 353152df4ad93312a49817e81357d810643dbff2 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Wed, 10 Sep 2025 15:41:45 -0400 Subject: [PATCH 30/49] Move code out of the Tools class, clean up code around ImporterExporter class --- python/grass/tools/importexport.py | 63 +++++++---- python/grass/tools/session_tools.py | 46 ++------ python/grass/tools/support.py | 16 ++- .../grass_tools_session_tools_pack_test.py | 106 +++++++++++++++++- 4 files changed, 172 insertions(+), 59 deletions(-) diff --git a/python/grass/tools/importexport.py b/python/grass/tools/importexport.py index 971d5563115..351aa4f2d50 100644 --- a/python/grass/tools/importexport.py +++ b/python/grass/tools/importexport.py @@ -1,7 +1,8 @@ +import subprocess from pathlib import Path -class PackImporterExporter: +class ImporterExporter: raster_pack_suffixes = (".grass_raster", ".pack", ".rpack", ".grr") @classmethod @@ -16,56 +17,76 @@ def is_raster_pack_file(cls, value): return value.suffix in cls.raster_pack_suffixes return False - def __init__(self, *, run_function, env=None): + def __init__(self, *, run_function, run_cmd_function): self._run_function = run_function + self._run_cmd_function = run_cmd_function self.input_rasters: list[tuple] = [] self.output_rasters: list[tuple] = [] - def modify_and_ingest_argument_list(self, args, parameters): - # TODO: Deal with r.pack and r.unpack calls. - # Uses parameters, but modifies the command, generates list of rasters and vectors. + def process_parameter_list(self, command, **popen_options): + """Ingests any file for later imports and exports and replaces arguments + + This function is relatively costly as it calls a subprocess to digest the parameters. + + Returns the list of parameters with inputs and outputs replaced so that a tool + will understand that, i.e., file paths into data names in a project. + """ + # Get processed parameters to distinguish inputs and outputs. + parameters = self._process_parameters(command, **popen_options) + tool_name = parameters["module"] + args = command.copy() if "inputs" in parameters: for item in parameters["inputs"]: - if self.is_raster_pack_file(item["value"]): - inproject_name = Path(item["value"]).stem - self.input_rasters.append((Path(item["value"]), inproject_name)) - # No need to change that for the original kwargs. - # kwargs[item["param"]] = Path(item["value"]).stem - # Actual parameters to execute are now a list. + if tool_name != "r.unpack" and self.is_raster_pack_file(item["value"]): + in_project_name = self._to_name(item["value"]) + self.input_rasters.append((Path(item["value"]), in_project_name)) for i, arg in enumerate(args): if arg.startswith(f"{item['param']}="): - arg = arg.replace(item["value"], inproject_name) + arg = arg.replace(item["value"], in_project_name) args[i] = arg if "outputs" in parameters: for item in parameters["outputs"]: - if self.is_raster_pack_file(item["value"]): - inproject_name = Path(item["value"]).stem - self.output_rasters.append((Path(item["value"]), inproject_name)) - # kwargs[item["param"]] = Path(item["value"]).stem + if tool_name != "r.pack" and self.is_raster_pack_file(item["value"]): + in_project_name = self._to_name(item["value"]) + self.output_rasters.append((Path(item["value"]), in_project_name)) for i, arg in enumerate(args): if arg.startswith(f"{item['param']}="): - arg = arg.replace(item["value"], inproject_name) + arg = arg.replace(item["value"], in_project_name) args[i] = arg + return args + + def _process_parameters(self, command, **popen_options): + """Get parameters processed by the tool itself""" + popen_options["stdin"] = None + popen_options["stdout"] = subprocess.PIPE + # We respect whatever is in the stderr option because that's what the user + # asked for and will expect to get in case of error (we pretend that it was + # the intended run, not our special run before the actual run). + return self._run_cmd_function([*command, "--json"], **popen_options) + + def _to_name(self, value, /): + return Path(value).stem def import_rasters(self): - for raster_file, inproject_name in self.input_rasters: + for raster_file, in_project_name in self.input_rasters: self._run_function( "r.unpack", input=raster_file, - output=inproject_name, + output=in_project_name, overwrite=True, superquiet=True, ) def export_rasters(self): # Pack the output raster - for raster_file, inproject_name in self.output_rasters: + for raster_file, in_project_name in self.output_rasters: # Overwriting a file is a warning, so to avoid it, we delete the file first. + # This creates a behavior consistent with command line tools. Path(raster_file).unlink(missing_ok=True) self._run_function( "r.pack", - input=inproject_name, + input=in_project_name, output=raster_file, flags="c", overwrite=True, diff --git a/python/grass/tools/session_tools.py b/python/grass/tools/session_tools.py index c91a7fb9ca4..5b62f2f8ee4 100644 --- a/python/grass/tools/session_tools.py +++ b/python/grass/tools/session_tools.py @@ -18,7 +18,7 @@ import grass.script as gs -from .importexport import PackImporterExporter +from .importexport import ImporterExporter from .support import ParameterConverter, ToolFunctionResolver, ToolResult @@ -281,7 +281,7 @@ def run(self, tool_name_: str, /, **kwargs): args, tool_kwargs=kwargs, input=object_parameter_handler.stdin, - import_export=object_parameter_handler.import_export, + parameter_converter=object_parameter_handler, **popen_options, ) use_objects = object_parameter_handler.translate_data_to_objects( @@ -307,7 +307,7 @@ def run_cmd( command: list[str], *, input: str | bytes | None = None, - import_export: bool | None = None, + parameter_converter: ParameterConverter | None = None, tool_kwargs: dict | None = None, **popen_options, ): @@ -320,30 +320,14 @@ def run_cmd( :param tool_kwargs: named tool arguments used for error reporting (experimental) :param **popen_options: additional options for :py:func:`subprocess.Popen` """ - if import_export is None: - import_export = False - for item in command: - if PackImporterExporter.is_recognized_file(item): - import_export = True - break - if import_export: - interface_result = self._process_parameters(command, **popen_options) - if interface_result.returncode != 0: - # This is only for the error states. - return gs.handle_errors( - interface_result.returncode, - result=None, - args=[command], - kwargs=tool_kwargs, - stderr=interface_result.stderr, - handler="raise", - ) - processed_parameters = interface_result.json - - pack_importer_exporter = PackImporterExporter(run_function=self.call) - pack_importer_exporter.modify_and_ingest_argument_list( - command, processed_parameters + if parameter_converter is None: + parameter_converter = ParameterConverter() + parameter_converter.process_parameter_list(command[1:]) + if parameter_converter.import_export: + pack_importer_exporter = ImporterExporter( + run_function=self.call, run_cmd_function=self.call_cmd ) + command = pack_importer_exporter.process_parameter_list(command) pack_importer_exporter.import_data() # We approximate tool_kwargs as original kwargs. @@ -353,7 +337,7 @@ def run_cmd( input=input, **popen_options, ) - if import_export: + if parameter_converter.import_export: pack_importer_exporter.export_data() if self._delete_on_context_exit or self._keep_data: self._cleanups.append(pack_importer_exporter.cleanup) @@ -477,11 +461,3 @@ def __exit__(self, exc_type, exc_value, traceback): def cleanup(self): for cleanup in self._cleanups: cleanup() - - def _process_parameters(self, command, **popen_options): - popen_options["stdin"] = None - popen_options["stdout"] = gs.PIPE - # We respect whatever is in the stderr option because that's what the user - # asked for and will expect to get in case of error (we pretend that it was - # the intended run, not our special run before the actual run). - return self.call_cmd([*command, "--json"], **popen_options) diff --git a/python/grass/tools/support.py b/python/grass/tools/support.py index a2c7257aec9..593e7be654d 100644 --- a/python/grass/tools/support.py +++ b/python/grass/tools/support.py @@ -40,7 +40,7 @@ # ga is present as well because that's the only import-time failure we expect. ga = None -from .importexport import PackImporterExporter +from .importexport import ImporterExporter class ParameterConverter: @@ -84,7 +84,19 @@ def process_parameters(self, kwargs): elif isinstance(value, StringIO): kwargs[key] = "-" self.stdin = value.getvalue() - elif self.import_export is None and PackImporterExporter.is_recognized_file( + elif self.import_export is None and ImporterExporter.is_recognized_file( + value + ): + self.import_export = True + if self.import_export is None: + self.import_export = False + + def process_parameter_list(self, command): + """Converts or at least processes parameters passed as list of strings""" + for item in command: + splitted = item.split("=", maxsplit=1) + value = splitted[1] if len(splitted) > 1 else item + if self.import_export is None and ImporterExporter.is_recognized_file( value ): self.import_export = True diff --git a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py index 42ca1a3cc1c..a7d68212235 100644 --- a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py +++ b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py @@ -1,7 +1,10 @@ -"""Test grass.tools.Tools class""" +"""Test pack import-export functionality of grass.tools.Tools class""" import os +import pytest + +from grass.exceptions import CalledModuleError from grass.tools import Tools @@ -58,6 +61,20 @@ def test_pack_input_output_with_subprocess_run_like_call( assert not tools.g_list(type="raster", format="json") +def test_no_modify_command(xy_dataset_session, rows_raster_file3x3): + """Check input and output pack files work with tool name call""" + tools = Tools(session=xy_dataset_session) + tools.g_region(rows=3, cols=3) + command = [ + "r.slope.aspect", + f"elevation={rows_raster_file3x3}", + "slope=file.grass_raster", + ] + original = command.copy() + tools.run_cmd(command) + assert original == command + + def test_io_cleanup_after_function(xy_dataset_session, rows_raster_file3x3): """Check input and output rasters are deleted after function call""" tools = Tools(session=xy_dataset_session) @@ -145,3 +162,90 @@ def test_io_no_cleanup_with_context(xy_dataset_session, rows_raster_file3x3): element="raster", file=rows_raster_file3x3.stem, format="json" )["name"] assert not tools.g_list(type="raster", format="json") + + +def test_wrong_parameter(xy_dataset_session, rows_raster_file3x3): + """Check input and output pack files work with tool name call""" + tools = Tools(session=xy_dataset_session) + tools.g_region(rows=3, cols=3) + with pytest.raises(CalledModuleError, match="does_not_exist"): + tools.r_slope_aspect( + elevation=rows_raster_file3x3, + slope="file.grass_raster", + does_not_exist="test", + ) + + +def test_direct_r_unpack_to_data(xy_dataset_session, rows_raster_file3x3): + """Check that we can r.unpack data as usual""" + tools = Tools(session=xy_dataset_session, keep_data=True) + tools.g_region(rows=3, cols=3) + name = "data_1" + tools.r_unpack(input=rows_raster_file3x3, output=name) + assert tools.g_findfile(element="raster", file=name, format="json")["name"] + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + + +def test_direct_r_unpack_to_pack(xy_dataset_session, rows_raster_file3x3, tmp_path): + """Check that roundtrip from existing packed raster to new packed raster works""" + tools = Tools(session=xy_dataset_session, keep_data=True) + tools.g_region(rows=3, cols=3) + name = "auto_packed_data_1.grass_raster" + packed_file = tmp_path / name + tools.r_unpack(input=rows_raster_file3x3, output=packed_file) + assert packed_file.exists() + assert tools.g_findfile(element="raster", file=packed_file.stem, format="json")[ + "name" + ] + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + + +def test_direct_r_pack_from_data(xy_dataset_session, tmp_path): + """Check that we can r.pack data as usual""" + tools = Tools(session=xy_dataset_session, keep_data=True) + tools.g_region(rows=3, cols=3) + tools.r_mapcalc(expression="data_1 = 1") + name = "manually_packed_data_1.grass_raster" + packed_file = tmp_path / name + tools.r_pack(input="data_1", output=packed_file) + # New file was created. + assert packed_file.exists() + # Input still exists. + assert tools.g_findfile(element="raster", file="data_1", format="json")["name"] + # There should be no raster created automatically. + assert not tools.g_findfile(element="raster", file=packed_file.stem, format="json")[ + "name" + ] + tools.cleanup() + # Input still exists even after cleaning. + assert tools.g_findfile(element="raster", file="data_1", format="json")["name"] + + +def test_direct_r_pack_from_pack(xy_dataset_session, rows_raster_file3x3, tmp_path): + """Check that roundtrip from existing packed raster to raster works""" + tools = Tools(session=xy_dataset_session, keep_data=True) + tools.g_region(rows=3, cols=3) + name = "manually_packed_data_1.grass_raster" + packed_file = tmp_path / name + tools.r_pack(input=rows_raster_file3x3, output=packed_file) + # New file was created. + assert packed_file.exists() + # Input still exists. + assert rows_raster_file3x3.exists() + # Auto-imported raster should exist. + assert tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + # There should be no raster created automatically. + assert not tools.g_findfile(element="raster", file=packed_file.stem, format="json")[ + "name" + ] + tools.cleanup() + # Auto-imported raster should be deleted. + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] From fcbf52ffcd0c4804adaa62253311380c6da0067c Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Wed, 10 Sep 2025 15:44:22 -0400 Subject: [PATCH 31/49] Doc for tests --- .../tools/tests/grass_tools_session_tools_pack_test.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py index a7d68212235..f09ff9aac1e 100644 --- a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py +++ b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py @@ -62,7 +62,7 @@ def test_pack_input_output_with_subprocess_run_like_call( def test_no_modify_command(xy_dataset_session, rows_raster_file3x3): - """Check input and output pack files work with tool name call""" + """Check that input command is not modified by the function""" tools = Tools(session=xy_dataset_session) tools.g_region(rows=3, cols=3) command = [ @@ -165,7 +165,11 @@ def test_io_no_cleanup_with_context(xy_dataset_session, rows_raster_file3x3): def test_wrong_parameter(xy_dataset_session, rows_raster_file3x3): - """Check input and output pack files work with tool name call""" + """Check wrong parameter causes standard exception + + Since the tool is called to process its parameters with pack IO, + the error handling takes a different path than without pack IO active. + """ tools = Tools(session=xy_dataset_session) tools.g_region(rows=3, cols=3) with pytest.raises(CalledModuleError, match="does_not_exist"): From 2eaba0e0f6962810b800412633df3a182001355b Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Wed, 10 Sep 2025 16:12:51 -0400 Subject: [PATCH 32/49] Use tmp_path for files in tests. Test supported path representations. --- python/grass/tools/importexport.py | 2 +- .../grass_tools_session_tools_pack_test.py | 88 +++++++++++++------ 2 files changed, 60 insertions(+), 30 deletions(-) diff --git a/python/grass/tools/importexport.py b/python/grass/tools/importexport.py index 351aa4f2d50..87a7dee6531 100644 --- a/python/grass/tools/importexport.py +++ b/python/grass/tools/importexport.py @@ -11,7 +11,7 @@ def is_recognized_file(cls, value): @classmethod def is_raster_pack_file(cls, value): - if isinstance(value, (str, bytes)): + if isinstance(value, str): return value.endswith(cls.raster_pack_suffixes) if isinstance(value, Path): return value.suffix in cls.raster_pack_suffixes diff --git a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py index f09ff9aac1e..b20c85780f2 100644 --- a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py +++ b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py @@ -1,6 +1,7 @@ """Test pack import-export functionality of grass.tools.Tools class""" import os +from pathlib import Path import pytest @@ -8,13 +9,40 @@ from grass.tools import Tools -def test_pack_input_output_tool_name_function(xy_dataset_session, rows_raster_file3x3): +def test_pack_input_output_tool_name_function( + xy_dataset_session, rows_raster_file3x3, tmp_path +): """Check input and output pack files work with tool name call""" tools = Tools(session=xy_dataset_session) tools.g_region(rows=3, cols=3) assert os.path.exists(rows_raster_file3x3) - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope="file.grass_raster") - assert os.path.exists("file.grass_raster") + output_file = tmp_path / "file.grass_raster" + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + assert output_file.exists() + assert not tools.g_findfile(element="raster", file="file", format="json")["name"] + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + assert not tools.g_list(type="raster", format="json") + + +@pytest.mark.parametrize("parameter_type", [str, Path]) +def test_pack_input_output_tool_name_function_string_value( + xy_dataset_session, rows_raster_file3x3, tmp_path, parameter_type +): + """Check input and output pack files work string a parameter + + We make no assumption about the fixture types and explicitly test all + supported parameter types. + """ + tools = Tools(session=xy_dataset_session) + tools.g_region(rows=3, cols=3) + assert os.path.exists(rows_raster_file3x3) + output_file = tmp_path / "file.grass_raster" + tools.r_slope_aspect( + elevation=parameter_type(rows_raster_file3x3), slope=parameter_type(output_file) + ) + assert output_file.exists() assert not tools.g_findfile(element="raster", file="file", format="json")["name"] assert not tools.g_findfile( element="raster", file=rows_raster_file3x3.stem, format="json" @@ -23,16 +51,15 @@ def test_pack_input_output_tool_name_function(xy_dataset_session, rows_raster_fi def test_pack_input_output_with_name_and_parameter_call( - xy_dataset_session, rows_raster_file3x3 + xy_dataset_session, rows_raster_file3x3, tmp_path ): """Check input and output pack files work with tool name as string""" tools = Tools(session=xy_dataset_session) tools.g_region(rows=3, cols=3) assert os.path.exists(rows_raster_file3x3) - tools.run( - "r.slope.aspect", elevation=rows_raster_file3x3, slope="file.grass_raster" - ) - assert os.path.exists("file.grass_raster") + output_file = tmp_path / "file.grass_raster" + tools.run("r.slope.aspect", elevation=rows_raster_file3x3, slope=output_file) + assert output_file.exists() assert not tools.g_findfile(element="raster", file="file", format="json")["name"] assert not tools.g_findfile( element="raster", file=rows_raster_file3x3.stem, format="json" @@ -41,19 +68,20 @@ def test_pack_input_output_with_name_and_parameter_call( def test_pack_input_output_with_subprocess_run_like_call( - xy_dataset_session, rows_raster_file3x3 + xy_dataset_session, rows_raster_file3x3, tmp_path ): """Check input and output pack files work with command as list""" tools = Tools(session=xy_dataset_session) assert os.path.exists(rows_raster_file3x3) + output_file = tmp_path / "file.grass_raster" tools.run_cmd( [ "r.slope.aspect", f"elevation={rows_raster_file3x3}", - "aspect=file.grass_raster", + f"aspect={output_file}", ] ) - assert os.path.exists("file.grass_raster") + assert output_file.exists() assert not tools.g_findfile(element="raster", file="file", format="json")["name"] assert not tools.g_findfile( element="raster", file=rows_raster_file3x3.stem, format="json" @@ -61,26 +89,28 @@ def test_pack_input_output_with_subprocess_run_like_call( assert not tools.g_list(type="raster", format="json") -def test_no_modify_command(xy_dataset_session, rows_raster_file3x3): +def test_no_modify_command(xy_dataset_session, rows_raster_file3x3, tmp_path): """Check that input command is not modified by the function""" tools = Tools(session=xy_dataset_session) tools.g_region(rows=3, cols=3) + output_file = tmp_path / "file.grass_raster" command = [ "r.slope.aspect", f"elevation={rows_raster_file3x3}", - "slope=file.grass_raster", + f"slope={output_file}", ] original = command.copy() tools.run_cmd(command) assert original == command -def test_io_cleanup_after_function(xy_dataset_session, rows_raster_file3x3): +def test_io_cleanup_after_function(xy_dataset_session, rows_raster_file3x3, tmp_path): """Check input and output rasters are deleted after function call""" tools = Tools(session=xy_dataset_session) tools.g_region(rows=3, cols=3) - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope="file.grass_raster") - assert os.path.exists("file.grass_raster") + output_file = tmp_path / "file.grass_raster" + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + assert output_file.exists() assert not tools.g_findfile(element="raster", file="file", format="json")["name"] assert not tools.g_findfile( element="raster", file=rows_raster_file3x3.stem, format="json" @@ -88,21 +118,21 @@ def test_io_cleanup_after_function(xy_dataset_session, rows_raster_file3x3): assert not tools.g_list(type="raster", format="json") -def test_io_cleanup_after_context(xy_dataset_session, rows_raster_file3x3): +def test_io_cleanup_after_context(xy_dataset_session, rows_raster_file3x3, tmp_path): """Check input and output rasters are deleted at the end of context""" + output_file_1 = tmp_path / "file.grass_raster" + output_file_2 = tmp_path / "file2.grass_raster" with Tools(session=xy_dataset_session) as tools: tools.g_region(rows=3, cols=3) - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope="file.grass_raster") - assert os.path.exists("file.grass_raster") + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file_1) + assert output_file_1.exists() assert tools.g_findfile(element="raster", file="file", format="json")["name"] - tools.r_mapcalc_simple( - expression="100 * A", a="file", output="file2.grass_raster" - ) - assert os.path.exists("file2.grass_raster") + tools.r_mapcalc_simple(expression="100 * A", a="file", output=output_file_2) + assert output_file_2.exists() assert tools.g_findfile(element="raster", file="file2", format="json")["name"] # The pack files should still exist. - assert os.path.exists("file.grass_raster") - assert os.path.exists("file2.grass_raster") + assert output_file_1.exists() + assert output_file_2.exists() # The in-project rasters should not exist. assert not tools.g_findfile(element="raster", file="file", format="json")["name"] assert not tools.g_findfile(element="raster", file="file2", format="json")["name"] @@ -112,12 +142,13 @@ def test_io_cleanup_after_context(xy_dataset_session, rows_raster_file3x3): assert not tools.g_list(type="raster", format="json") -def test_io_no_cleanup(xy_dataset_session, rows_raster_file3x3): +def test_io_no_cleanup(xy_dataset_session, rows_raster_file3x3, tmp_path): """Check input and output rasters are deleted only with explicit cleanup call""" + output_file = tmp_path / "file.grass_raster" tools = Tools(session=xy_dataset_session, keep_data=True) tools.g_region(rows=3, cols=3) - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope="file.grass_raster") - assert os.path.exists("file.grass_raster") + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + assert output_file.exists() # Files should still be available. assert tools.g_findfile(element="raster", file="file", format="json")["name"] assert tools.g_findfile( @@ -126,7 +157,6 @@ def test_io_no_cleanup(xy_dataset_session, rows_raster_file3x3): # But an explicit cleanup should delete the files. tools.cleanup() assert not tools.g_findfile(element="raster", file="file", format="json")["name"] - assert not tools.g_findfile(element="raster", file="file2", format="json")["name"] assert not tools.g_findfile( element="raster", file=rows_raster_file3x3.stem, format="json" )["name"] From b162e39f75cfe818be61ae6a8862b9b82ae7fa27 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Wed, 10 Sep 2025 16:15:58 -0400 Subject: [PATCH 33/49] Remove last piece of grass.experimental.tools from CLI --- python/grass/app/cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/grass/app/cli.py b/python/grass/app/cli.py index 6d2399a7d7c..efdddebf65d 100644 --- a/python/grass/app/cli.py +++ b/python/grass/app/cli.py @@ -26,7 +26,7 @@ import grass.script as gs from grass.app.data import lock_mapset, unlock_mapset, MapsetLockingException -from grass.experimental.tools import Tools +from grass.tools import Tools # Special flags supported besides help and --json which does not need special handling: SPECIAL_FLAGS = [ From 2a245410c7b153baa5cf8161b4ad6c16dd7c18a0 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Thu, 11 Sep 2025 16:59:04 -0400 Subject: [PATCH 34/49] Remove changes from experimental --- python/grass/experimental/tests/conftest.py | 46 --------------------- 1 file changed, 46 deletions(-) diff --git a/python/grass/experimental/tests/conftest.py b/python/grass/experimental/tests/conftest.py index ea20bfe2ade..5bd44da1de0 100644 --- a/python/grass/experimental/tests/conftest.py +++ b/python/grass/experimental/tests/conftest.py @@ -33,14 +33,6 @@ def xy_session_for_module(tmp_path_factory): yield session -@pytest.fixture -def xy_dataset_session(tmp_path): - """Creates a session with a mapset which has vector with a float column""" - gs.core._create_location_xy(tmp_path, "test") # pylint: disable=protected-access - with gs.setup.init(tmp_path / "test") as session: - yield session - - @pytest.fixture def unique_id(): """A unique alphanumeric identifier""" @@ -77,41 +69,3 @@ def xy_mapset_non_permament(xy_session): # pylint: disable=redefined-outer-name "test1", create=True, env=xy_session.env ) as session: yield session - - -@pytest.fixture -def rows_raster_file3x3(tmp_path): - project = tmp_path / "xy_test3x3" - gs.create_project(project) - with gs.setup.init(project, env=os.environ.copy()) as session: - gs.run_command("g.region", rows=3, cols=3, env=session.env) - gs.mapcalc("rows = row()", env=session.env) - output_file = tmp_path / "rows3x3.grass_raster" - gs.run_command( - "r.pack", - input="rows", - output=output_file, - flags="c", - superquiet=True, - env=session.env, - ) - return output_file - - -@pytest.fixture -def rows_raster_file4x5(tmp_path): - project = tmp_path / "xy_test4x5" - gs.create_project(project) - with gs.setup.init(project, env=os.environ.copy()) as session: - gs.run_command("g.region", rows=4, cols=5, env=session.env) - gs.mapcalc("rows = row()", env=session.env) - output_file = tmp_path / "rows4x5.grass_raster" - gs.run_command( - "r.pack", - input="rows", - output=output_file, - flags="c", - superquiet=True, - env=session.env, - ) - return output_file From 461dd8f5651d6008fcfcede69237bc0cd95953cd Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Fri, 12 Sep 2025 16:43:18 -0400 Subject: [PATCH 35/49] Add understanding of what is imported and track function call versus object level pending tasks and cleaning. Respect overwrite setting as for normal GRASS rasters. --- python/grass/tools/importexport.py | 95 ++++++-- python/grass/tools/session_tools.py | 34 ++- .../grass_tools_session_tools_pack_test.py | 207 ++++++++++++++++-- 3 files changed, 289 insertions(+), 47 deletions(-) diff --git a/python/grass/tools/importexport.py b/python/grass/tools/importexport.py index 87a7dee6531..6342415e18e 100644 --- a/python/grass/tools/importexport.py +++ b/python/grass/tools/importexport.py @@ -1,16 +1,26 @@ +from __future__ import annotations + import subprocess from pathlib import Path +from typing import Literal class ImporterExporter: + """Imports and exports data while keeping track of it + + This is a class for internal use, but it may mature into a generally useful tool. + """ + raster_pack_suffixes = (".grass_raster", ".pack", ".rpack", ".grr") @classmethod def is_recognized_file(cls, value): + """Return `True` if file type is a recognized type, `False` otherwise""" return cls.is_raster_pack_file(value) @classmethod def is_raster_pack_file(cls, value): + """Return `True` if file type is GRASS raster pack, `False` otherwise""" if isinstance(value, str): return value.endswith(cls.raster_pack_suffixes) if isinstance(value, Path): @@ -20,8 +30,11 @@ def is_raster_pack_file(cls, value): def __init__(self, *, run_function, run_cmd_function): self._run_function = run_function self._run_cmd_function = run_cmd_function - self.input_rasters: list[tuple] = [] - self.output_rasters: list[tuple] = [] + # At least for reading purposes, public access to the lists makes sense. + self.input_rasters: list[tuple[Path, str]] = [] + self.output_rasters: list[tuple[Path, str]] = [] + self.current_input_rasters: list[tuple[Path, str]] = [] + self.current_output_rasters: list[tuple[Path, str]] = [] def process_parameter_list(self, command, **popen_options): """Ingests any file for later imports and exports and replaces arguments @@ -32,14 +45,25 @@ def process_parameter_list(self, command, **popen_options): will understand that, i.e., file paths into data names in a project. """ # Get processed parameters to distinguish inputs and outputs. + # We actually don't know the type of the input or outputs) because that is + # currently not included in --json. Consequently, we are only assuming that the + # files are meant to be used as in-project data. So, we need to deal with cases + # where that's not true one by one, such as r.unpack taking file, + # not raster (cell), so the file needs to be left as is. parameters = self._process_parameters(command, **popen_options) tool_name = parameters["module"] args = command.copy() + # We will deal with inputs right away if "inputs" in parameters: for item in parameters["inputs"]: if tool_name != "r.unpack" and self.is_raster_pack_file(item["value"]): in_project_name = self._to_name(item["value"]) - self.input_rasters.append((Path(item["value"]), in_project_name)) + record = (Path(item["value"]), in_project_name) + if ( + record not in self.input_rasters + and record not in self.current_input_rasters + ): + self.current_input_rasters.append(record) for i, arg in enumerate(args): if arg.startswith(f"{item['param']}="): arg = arg.replace(item["value"], in_project_name) @@ -48,7 +72,14 @@ def process_parameter_list(self, command, **popen_options): for item in parameters["outputs"]: if tool_name != "r.pack" and self.is_raster_pack_file(item["value"]): in_project_name = self._to_name(item["value"]) - self.output_rasters.append((Path(item["value"]), in_project_name)) + record = (Path(item["value"]), in_project_name) + # Following the logic of r.slope.aspect, we don't deal with one output repeated + # more than once, but this would be the place to address it. + if ( + record not in self.output_rasters + and record not in self.current_output_rasters + ): + self.current_output_rasters.append(record) for i, arg in enumerate(args): if arg.startswith(f"{item['param']}="): arg = arg.replace(item["value"], in_project_name) @@ -67,42 +98,68 @@ def _process_parameters(self, command, **popen_options): def _to_name(self, value, /): return Path(value).stem - def import_rasters(self): - for raster_file, in_project_name in self.input_rasters: + def import_rasters(self, rasters, *, env): + for raster_file, in_project_name in rasters: + # Overwriting here is driven by the run function. self._run_function( "r.unpack", input=raster_file, output=in_project_name, - overwrite=True, superquiet=True, + env=env, ) - def export_rasters(self): + def export_rasters( + self, rasters, *, env, delete_first: bool, overwrite: Literal[True] | None + ): # Pack the output raster - for raster_file, in_project_name in self.output_rasters: + for raster_file, in_project_name in rasters: # Overwriting a file is a warning, so to avoid it, we delete the file first. # This creates a behavior consistent with command line tools. - Path(raster_file).unlink(missing_ok=True) + if delete_first: + Path(raster_file).unlink(missing_ok=True) + # Overwriting here is driven by the run function and env. self._run_function( "r.pack", input=in_project_name, output=raster_file, flags="c", - overwrite=True, superquiet=True, + env=env, + overwrite=overwrite, ) - def import_data(self): - self.import_rasters() - - def export_data(self): - self.export_rasters() - - def cleanup(self): + def import_data(self, *, env): + # We import the data, make records for later, and the clear the current list. + self.import_rasters(self.current_input_rasters, env=env) + self.input_rasters.extend(self.current_input_rasters) + self.current_input_rasters = [] + + def export_data( + self, *, env, delete_first: bool = False, overwrite: Literal[True] | None = None + ): + # We export the data, make records for later, and the clear the current list. + self.export_rasters( + self.current_output_rasters, + env=env, + delete_first=delete_first, + overwrite=overwrite, + ) + self.output_rasters.extend(self.current_output_rasters) + self.current_output_rasters = [] + + def cleanup(self, *, env): remove = [name for (unused, name) in self.input_rasters] remove.extend([name for (unused, name) in self.output_rasters]) if remove: self._run_function( - "g.remove", type="raster", name=remove, superquiet=True, flags="f" + "g.remove", + type="raster", + name=remove, + superquiet=True, + flags="f", + env=env, ) + self.input_rasters = [] + self.output_rasters = [] diff --git a/python/grass/tools/session_tools.py b/python/grass/tools/session_tools.py index 5b62f2f8ee4..3a955fb4c08 100644 --- a/python/grass/tools/session_tools.py +++ b/python/grass/tools/session_tools.py @@ -198,11 +198,11 @@ def __init__( self._capture_stderr = capture_stderr self._name_resolver = None self._consistent_return_value = consistent_return_value + self._importer_exporter = None # Decides if we delete at each run or only at the end of context. self._delete_on_context_exit = False # User request to keep the data. self._keep_data = keep_data - self._cleanups = [] def _modified_env_if_needed(self): """Get the environment for subprocesses @@ -320,15 +320,22 @@ def run_cmd( :param tool_kwargs: named tool arguments used for error reporting (experimental) :param **popen_options: additional options for :py:func:`subprocess.Popen` """ + # Compute the environment for subprocesses and store it for later use. + if "env" not in popen_options: + popen_options["env"] = self._modified_env_if_needed() + if parameter_converter is None: parameter_converter = ParameterConverter() parameter_converter.process_parameter_list(command[1:]) if parameter_converter.import_export: - pack_importer_exporter = ImporterExporter( - run_function=self.call, run_cmd_function=self.call_cmd + if self._importer_exporter is None: + self._importer_exporter = ImporterExporter( + run_function=self.call, run_cmd_function=self.call_cmd + ) + command = self._importer_exporter.process_parameter_list( + command, **popen_options ) - command = pack_importer_exporter.process_parameter_list(command) - pack_importer_exporter.import_data() + self._importer_exporter.import_data(env=popen_options["env"]) # We approximate tool_kwargs as original kwargs. result = self.call_cmd( @@ -338,11 +345,14 @@ def run_cmd( **popen_options, ) if parameter_converter.import_export: - pack_importer_exporter.export_data() - if self._delete_on_context_exit or self._keep_data: - self._cleanups.append(pack_importer_exporter.cleanup) - else: - pack_importer_exporter.cleanup() + overwrite = None + if "--o" in command or "--overwrite" in command: + overwrite = True + self._importer_exporter.export_data( + env=popen_options["env"], overwrite=overwrite + ) + if not self._delete_on_context_exit and not self._keep_data: + self._importer_exporter.cleanup(env=popen_options["env"]) return result def call(self, tool_name_: str, /, **kwargs): @@ -459,5 +469,5 @@ def __exit__(self, exc_type, exc_value, traceback): self.cleanup() def cleanup(self): - for cleanup in self._cleanups: - cleanup() + if self._importer_exporter is not None: + self._importer_exporter.cleanup(env=self._modified_env_if_needed()) diff --git a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py index b20c85780f2..03019487ce6 100644 --- a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py +++ b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py @@ -163,38 +163,213 @@ def test_io_no_cleanup(xy_dataset_session, rows_raster_file3x3, tmp_path): assert not tools.g_list(type="raster", format="json") -def test_io_no_cleanup_with_context(xy_dataset_session, rows_raster_file3x3): +def test_io_no_cleanup_with_context(xy_dataset_session, rows_raster_file3x3, tmp_path): """Check input and output rasters are kept even with context""" + file_1 = tmp_path / "file_1.grass_raster" + file_2 = tmp_path / "file_2.grass_raster" with Tools(session=xy_dataset_session, keep_data=True) as tools: tools.g_region(rows=3, cols=3) - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope="file.grass_raster") - assert os.path.exists("file.grass_raster") - assert tools.g_findfile(element="raster", file="file", format="json")["name"] - tools.r_mapcalc_simple( - expression="100 * A", a="file", output="file2.grass_raster" - ) - assert os.path.exists("file2.grass_raster") - assert tools.g_findfile(element="raster", file="file2", format="json")["name"] + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=file_1) + assert file_1.exists() + assert tools.g_findfile(element="raster", file=file_1.stem, format="json")[ + "name" + ] + tools.r_mapcalc_simple(expression="100 * A", a=file_1.stem, output=file_2) + assert file_2.exists() + assert tools.g_findfile(element="raster", file=file_2.stem, format="json")[ + "name" + ] # The pack files should still exist. - assert os.path.exists("file.grass_raster") - assert os.path.exists("file2.grass_raster") + assert file_1.exists() + assert file_2.exists() # The in-project rasters should also exist. - assert tools.g_findfile(element="raster", file="file", format="json")["name"] - assert tools.g_findfile(element="raster", file="file2", format="json")["name"] + assert tools.g_findfile(element="raster", file=file_1.stem, format="json")["name"] + assert tools.g_findfile(element="raster", file=file_2.stem, format="json")["name"] assert tools.g_findfile( element="raster", file=rows_raster_file3x3.stem, format="json" )["name"] # But an explicit cleanup should delete the files. tools.cleanup() - assert not tools.g_findfile(element="raster", file="file", format="json")["name"] - assert not tools.g_findfile(element="raster", file="file2", format="json")["name"] + assert not tools.g_findfile(element="raster", file=file_1.stem, format="json")[ + "name" + ] + assert not tools.g_findfile(element="raster", file=file_2.stem, format="json")[ + "name" + ] assert not tools.g_findfile( element="raster", file=rows_raster_file3x3.stem, format="json" )["name"] assert not tools.g_list(type="raster", format="json") + # The pack files should still exist after cleanup. + assert file_1.exists() + assert file_2.exists() + + +def test_multiple_input_usages_with_context(xy_dataset_session, rows_raster_file3x3): + """Check multiple usages of the same input raster with context""" + with Tools(session=xy_dataset_session) as tools: + tools.g_region(raster=rows_raster_file3x3) + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope="slope") + tools.r_mapcalc_simple( + expression="100 * A", a=rows_raster_file3x3, output="a100" + ) + assert tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + assert tools.g_findfile(element="raster", file="slope", format="json")["name"] + assert tools.g_findfile(element="raster", file="a100", format="json")["name"] + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + + +def test_multiple_input_usages_with_keep_data(xy_dataset_session, rows_raster_file3x3): + """Check input and output rasters are kept even with context""" + tools = Tools(session=xy_dataset_session, keep_data=True) + tools.g_region(raster=rows_raster_file3x3) + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope="slope") + tools.r_mapcalc_simple(expression="100 * A", a=rows_raster_file3x3, output="a100") + assert tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + assert tools.g_findfile(element="raster", file="slope", format="json")["name"] + assert tools.g_findfile(element="raster", file="a100", format="json")["name"] + tools.cleanup() + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + + +def test_multiple_input_usages_with_defaults(xy_dataset_session, rows_raster_file3x3): + """Check input and output rasters are kept even with context""" + tools = Tools(session=xy_dataset_session) + tools.g_region(rows=3, cols=3) + tools.r_mapcalc_simple( + expression="A + B", + a=rows_raster_file3x3, + b=rows_raster_file3x3, + output="output", + ) + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + assert tools.g_findfile(element="raster", file="output", format="json")["name"] + + +def test_repeated_input_usages_with_context(xy_dataset_session, rows_raster_file3x3): + """Check multiple usages of the same input raster with context""" + with Tools(session=xy_dataset_session) as tools: + tools.g_region(rows=3, cols=3) + tools.r_mapcalc_simple( + expression="A + B", + a=rows_raster_file3x3, + b=rows_raster_file3x3, + output="output", + ) + assert tools.g_findfile(element="raster", file="output", format="json")["name"] + assert tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + assert tools.g_findfile(element="raster", file="output", format="json")["name"] + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + + +def test_repeated_output(xy_dataset_session, rows_raster_file3x3, tmp_path): + """Check behavior when two outputs have the same name + + This would ideally result in error or some other clear state, but at least + r.slope.aspect has that as undefined behavior, so we follow the same logic. + Here, we test the current behavior which is that no error is produced + and one of the outputs is produced (but it is not defined which one). + """ + tools = Tools(session=xy_dataset_session) + tools.g_region(rows=3, cols=3) + output_file = tmp_path / "file.grass_raster" + tools.r_slope_aspect( + elevation=rows_raster_file3x3, slope=output_file, aspect=output_file + ) + assert output_file.exists() + + +def test_output_without_overwrite(xy_dataset_session, rows_raster_file3x3, tmp_path): + """Check input and output pack files work with tool name call""" + tools = Tools(session=xy_dataset_session) + tools.g_region(rows=3, cols=3) + assert os.path.exists(rows_raster_file3x3) + output_file = tmp_path / "file.grass_raster" + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + with pytest.raises(CalledModuleError, match=r"[Oo]verwrite"): + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + assert output_file.exists() + + +def test_output_with_object_level_overwrite( + xy_dataset_session, rows_raster_file3x3, tmp_path +): + """Check input and output pack files work with tool name call""" + tools = Tools(session=xy_dataset_session, overwrite=True) + tools.g_region(rows=3, cols=3) + assert os.path.exists(rows_raster_file3x3) + output_file = tmp_path / "file.grass_raster" + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + # Same call the second time. + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + assert output_file.exists() + + +def test_output_with_function_level_overwrite( + xy_dataset_session, rows_raster_file3x3, tmp_path +): + """Check input and output pack files work with tool name call""" + tools = Tools(session=xy_dataset_session) + tools.g_region(rows=3, cols=3) + assert os.path.exists(rows_raster_file3x3) + output_file = tmp_path / "file.grass_raster" + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + # Same call the second time. + tools.r_slope_aspect( + elevation=rows_raster_file3x3, slope=output_file, overwrite=True + ) + assert output_file.exists() + + +def test_non_existent_pack_input(xy_dataset_session, tmp_path): + """Check input and output pack files work with tool name call""" + tools = Tools(session=xy_dataset_session) + tools.g_region(rows=3, cols=3) + input_file = tmp_path / "does_not_exist.grass_raster" + assert not input_file.exists() + with pytest.raises( + CalledModuleError, + match=rf"(?s)[^/\/a-zA-Z_]{input_file}[^/\/a-zA-Z_].*not found", + ): + tools.r_slope_aspect(elevation=input_file, slope="slope") + assert not tools.g_findfile(element="raster", file=input_file.stem, format="json")[ + "name" + ] + assert not tools.g_findfile(element="raster", file="slope", format="json")["name"] + + +def test_non_existent_output_pack_directory( + xy_dataset_session, rows_raster_file3x3, tmp_path +): + """Check input and output pack files work with tool name call""" + tools = Tools(session=xy_dataset_session) + tools.g_region(rows=3, cols=3) + output_file = tmp_path / "does_not_exist" / "file.grass_raster" + assert not output_file.exists() + assert not output_file.parent.exists() + assert rows_raster_file3x3.exists() + with pytest.raises( + CalledModuleError, + match=rf"(?s)[^/\/a-zA-Z_]{output_file.parent}[^/\/a-zA-Z_].*does not exist", + ): + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) -def test_wrong_parameter(xy_dataset_session, rows_raster_file3x3): +def test_wrong_parameter(xy_dataset_session, rows_raster_file3x3, tmp_path): """Check wrong parameter causes standard exception Since the tool is called to process its parameters with pack IO, From f233987c2532ee8789a0fce6fa45e2db7fed3a15 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Wed, 24 Sep 2025 09:16:52 -0400 Subject: [PATCH 36/49] Skip import of inputs when they are known outputs --- python/grass/tools/importexport.py | 3 +- .../grass_tools_session_tools_pack_test.py | 49 ++++++++++++++++++- 2 files changed, 50 insertions(+), 2 deletions(-) diff --git a/python/grass/tools/importexport.py b/python/grass/tools/importexport.py index 6342415e18e..47e0c42ef41 100644 --- a/python/grass/tools/importexport.py +++ b/python/grass/tools/importexport.py @@ -60,7 +60,8 @@ def process_parameter_list(self, command, **popen_options): in_project_name = self._to_name(item["value"]) record = (Path(item["value"]), in_project_name) if ( - record not in self.input_rasters + record not in self.output_rasters + and record not in self.input_rasters and record not in self.current_input_rasters ): self.current_input_rasters.append(record) diff --git a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py index 03019487ce6..1e78aaecfbb 100644 --- a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py +++ b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py @@ -1,6 +1,7 @@ """Test pack import-export functionality of grass.tools.Tools class""" import os +import re from pathlib import Path import pytest @@ -256,6 +257,52 @@ def test_multiple_input_usages_with_defaults(xy_dataset_session, rows_raster_fil assert tools.g_findfile(element="raster", file="output", format="json")["name"] +def test_creation_and_use_with_context( + xy_dataset_session, rows_raster_file3x3, tmp_path +): + """Check that we can create an external file and then use the file later""" + slope = tmp_path / "slope.grass_raster" + with Tools(session=xy_dataset_session) as tools: + tools.g_region(raster=rows_raster_file3x3) + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=slope) + tools.r_univar(map=slope, format="json")["cells"] == 9 + assert tools.g_findfile(element="raster", file=slope.stem, format="json")[ + "name" + ] + assert not tools.g_findfile(element="raster", file=slope.stem, format="json")[ + "name" + ] + assert slope.exists() + + +def test_creation_and_use_with_keep_data( + xy_dataset_session, rows_raster_file3x3, tmp_path +): + """Check that we can create an external file and then use the file later""" + slope = tmp_path / "slope.grass_raster" + tools = Tools(session=xy_dataset_session, keep_data=True) + tools.g_region(raster=rows_raster_file3x3) + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=slope) + tools.r_univar(map=slope, format="json")["cells"] == 9 + assert tools.g_findfile(element="raster", file=slope.stem, format="json")["name"] + assert slope.exists() + + +def test_creation_and_use_with_defaults( + xy_dataset_session, rows_raster_file3x3, tmp_path +): + """Check that we can create an external file and then use the file later""" + slope = tmp_path / "slope.grass_raster" + tools = Tools(session=xy_dataset_session) + tools.g_region(raster=rows_raster_file3x3) + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=slope) + tools.r_univar(map=slope, format="json")["cells"] == 9 + assert not tools.g_findfile(element="raster", file=slope.stem, format="json")[ + "name" + ] + assert slope.exists() + + def test_repeated_input_usages_with_context(xy_dataset_session, rows_raster_file3x3): """Check multiple usages of the same input raster with context""" with Tools(session=xy_dataset_session) as tools: @@ -364,7 +411,7 @@ def test_non_existent_output_pack_directory( assert rows_raster_file3x3.exists() with pytest.raises( CalledModuleError, - match=rf"(?s)[^/\/a-zA-Z_]{output_file.parent}[^/\/a-zA-Z_].*does not exist", + match=rf"(?s)[^/\/a-zA-Z_]{re.escape(str(output_file.parent))}[^/\/a-zA-Z_].*does not exist", ): tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) From 222e7a6693b823af2a684bb38c4cb0c11248ca31 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Wed, 24 Sep 2025 17:02:04 -0400 Subject: [PATCH 37/49] Clean the imported data in case of exceptions --- python/grass/tools/session_tools.py | 42 ++-- .../grass_tools_session_tools_pack_test.py | 185 ++++++++++++++++++ 2 files changed, 213 insertions(+), 14 deletions(-) diff --git a/python/grass/tools/session_tools.py b/python/grass/tools/session_tools.py index 3a955fb4c08..905442a1a19 100644 --- a/python/grass/tools/session_tools.py +++ b/python/grass/tools/session_tools.py @@ -17,6 +17,7 @@ import os import grass.script as gs +from grass.exceptions import CalledModuleError from .importexport import ImporterExporter from .support import ParameterConverter, ToolFunctionResolver, ToolResult @@ -335,24 +336,37 @@ def run_cmd( command = self._importer_exporter.process_parameter_list( command, **popen_options ) - self._importer_exporter.import_data(env=popen_options["env"]) - - # We approximate tool_kwargs as original kwargs. - result = self.call_cmd( - command, - tool_kwargs=tool_kwargs, - input=input, - **popen_options, - ) + try: + self._importer_exporter.import_data(env=popen_options["env"]) + except CalledModuleError: + if parameter_converter.import_export: + if not self._delete_on_context_exit and not self._keep_data: + self._importer_exporter.cleanup(env=popen_options["env"]) + raise + try: + # We approximate tool_kwargs as original kwargs. + result = self.call_cmd( + command, + tool_kwargs=tool_kwargs, + input=input, + **popen_options, + ) + except CalledModuleError: + if parameter_converter.import_export: + if not self._delete_on_context_exit and not self._keep_data: + self._importer_exporter.cleanup(env=popen_options["env"]) + raise if parameter_converter.import_export: overwrite = None if "--o" in command or "--overwrite" in command: overwrite = True - self._importer_exporter.export_data( - env=popen_options["env"], overwrite=overwrite - ) - if not self._delete_on_context_exit and not self._keep_data: - self._importer_exporter.cleanup(env=popen_options["env"]) + try: + self._importer_exporter.export_data( + env=popen_options["env"], overwrite=overwrite + ) + finally: + if not self._delete_on_context_exit and not self._keep_data: + self._importer_exporter.cleanup(env=popen_options["env"]) return result def call(self, tool_name_: str, /, **kwargs): diff --git a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py index 1e78aaecfbb..76f0d5fc376 100644 --- a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py +++ b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py @@ -505,3 +505,188 @@ def test_direct_r_pack_from_pack(xy_dataset_session, rows_raster_file3x3, tmp_pa assert not tools.g_findfile( element="raster", file=rows_raster_file3x3.stem, format="json" )["name"] + + +def test_clean_after_tool_failure_with_context_and_try( + xy_dataset_session, rows_raster_file3x3, tmp_path +): + """Check we delete imported input when we fail after that import. + + A realistic code example with try-finally blocks, but without an explicit check + that the exception was raised. + + We don't test multiple raster, assuming that either all are removed or all kept. + """ + try: + with Tools(session=xy_dataset_session) as tools: + tools.r_mapcalc_simple( + expression="A + does_not_exist", a=rows_raster_file3x3, output="output" + ) + except CalledModuleError: + pass + finally: + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + + +def test_clean_after_tool_failure_with_context_and_raises( + xy_dataset_session, rows_raster_file3x3, tmp_path +): + """Check input and output pack files work with tool name call + + Checks that the exception was actually raised, but does not show the intention + as clearly as the test with try-finally. + """ + with ( + pytest.raises(CalledModuleError, match=r"r\.mapcalc\.simple"), + Tools(session=xy_dataset_session) as tools, + ): + tools.r_mapcalc_simple( + expression="A + does_not_exist", a=rows_raster_file3x3, output="output" + ) + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + assert rows_raster_file3x3.exists() + + +def test_clean_after_tool_failure_without_context( + xy_dataset_session, rows_raster_file3x3, tmp_path +): + """Check we delete imported input when we fail after that import. + + A single call should clean after itself unless told otherwise. + """ + tools = Tools(session=xy_dataset_session) + with pytest.raises(CalledModuleError, match=r"r\.mapcalc\.simple"): + tools.r_mapcalc_simple( + expression="A + does_not_exist", a=rows_raster_file3x3, output="output" + ) + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + assert rows_raster_file3x3.exists() + + +def test_clean_after_tool_failure_without_context_with_keep_data( + xy_dataset_session, rows_raster_file3x3, tmp_path +): + """Check we don't delete imported input even after failure when asked. + + When explicitly requested, we wait for explicit request to delete the imported + data even after a failure. + """ + tools = Tools(session=xy_dataset_session, keep_data=True) + with pytest.raises(CalledModuleError, match=r"r\.mapcalc\.simple"): + tools.r_mapcalc_simple( + expression="A + does_not_exist", a=rows_raster_file3x3, output="output" + ) + assert tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + tools.cleanup() + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + assert rows_raster_file3x3.exists() + + +def test_clean_after_call_failure_with_context_and_try( + xy_dataset_session, rows_raster_file3x3, tmp_path +): + """Check we delete imported input when we fail after that import. + + A realistic code example with try-finally blocks, but without an explicit check + that the exception was raised. + + We don't test multiple raster, assuming that either all are removed or all kept. + """ + try: + with Tools(session=xy_dataset_session) as tools: + tools.g_region(rows=3, cols=3) + output_file = tmp_path / "does_not_exist" / "file.grass_raster" + assert not output_file.parent.exists() + # Non-existence of a directory will be failure inside r.pack which is + # what we use to get an internal failure inside the call. + # This relies on inputs being resolved before outputs. + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + except CalledModuleError: + pass + finally: + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + assert rows_raster_file3x3.exists() + + +def test_clean_after_call_with_context_and_raises( + xy_dataset_session, rows_raster_file3x3, tmp_path +): + """Check input and output pack files work with tool name call + + Checks that the exception was actually raised, but does not show the intention + as clearly as the test with try-finally. + """ + with Tools(session=xy_dataset_session) as tools: + tools.g_region(rows=3, cols=3) + output_file = tmp_path / "does_not_exist" / "file.grass_raster" + assert not output_file.parent.exists() + # Non-existence of a directory will be failure inside r.pack which is + # what we use to get an internal failure inside the call. + # This relies on inputs being resolved before outputs. + with pytest.raises(CalledModuleError, match=r"r\.pack"): + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + assert rows_raster_file3x3.exists() + + +def test_clean_after_call_without_context( + xy_dataset_session, rows_raster_file3x3, tmp_path +): + """Check we delete imported input when we fail after that import. + + A single call should clean after itself unless told otherwise. + """ + tools = Tools(session=xy_dataset_session) + tools.g_region(rows=3, cols=3) + output_file = tmp_path / "does_not_exist" / "file.grass_raster" + assert not output_file.parent.exists() + with pytest.raises(CalledModuleError, match=r"r\.pack"): + # Non-existence of a directory will be failure inside r.pack which is + # what we use to get an internal failure inside the call. + # This relies on inputs being resolved before outputs. + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + assert rows_raster_file3x3.exists() + + +def test_clean_after_call_without_context_with_keep_data( + xy_dataset_session, rows_raster_file3x3, tmp_path +): + """Check we don't delete imported input even after failure when asked. + + When explicitly requested, we wait for explict request to delete the imported + data even after a failure. + """ + tools = Tools(session=xy_dataset_session, keep_data=True) + tools.g_region(rows=3, cols=3) + output_file = tmp_path / "does_not_exist" / "file.grass_raster" + assert not output_file.parent.exists() + with pytest.raises(CalledModuleError, match=r"r\.pack"): + # Non-existence of a directory will be failure inside r.pack which is + # what we use to get an internal failure inside the call. + # This relies on inputs being resolved before outputs. + tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + assert tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + tools.cleanup() + assert not tools.g_findfile( + element="raster", file=rows_raster_file3x3.stem, format="json" + )["name"] + assert rows_raster_file3x3.exists() From 5f332248e88c9e5c0657d46f5c7b13ace5476cad Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Wed, 24 Sep 2025 17:03:32 -0400 Subject: [PATCH 38/49] Clean the imported data in case of exceptions --- .../tools/tests/grass_tools_session_tools_pack_test.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py index 76f0d5fc376..2112d174edc 100644 --- a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py +++ b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py @@ -620,7 +620,7 @@ def test_clean_after_call_failure_with_context_and_try( assert rows_raster_file3x3.exists() -def test_clean_after_call_with_context_and_raises( +def test_clean_after_call_failure_with_context_and_raises( xy_dataset_session, rows_raster_file3x3, tmp_path ): """Check input and output pack files work with tool name call @@ -643,7 +643,7 @@ def test_clean_after_call_with_context_and_raises( assert rows_raster_file3x3.exists() -def test_clean_after_call_without_context( +def test_clean_after_call_failure_without_context( xy_dataset_session, rows_raster_file3x3, tmp_path ): """Check we delete imported input when we fail after that import. @@ -665,7 +665,7 @@ def test_clean_after_call_without_context( assert rows_raster_file3x3.exists() -def test_clean_after_call_without_context_with_keep_data( +def test_clean_after_call_failure_without_context_with_keep_data( xy_dataset_session, rows_raster_file3x3, tmp_path ): """Check we don't delete imported input even after failure when asked. From 4dba4d3229124dc28877cbef9276db362589d530 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Wed, 24 Sep 2025 17:12:05 -0400 Subject: [PATCH 39/49] Use single try-finally block --- python/grass/tools/session_tools.py | 38 ++++++++++------------------- 1 file changed, 13 insertions(+), 25 deletions(-) diff --git a/python/grass/tools/session_tools.py b/python/grass/tools/session_tools.py index 905442a1a19..5ad750c2a53 100644 --- a/python/grass/tools/session_tools.py +++ b/python/grass/tools/session_tools.py @@ -17,7 +17,6 @@ import os import grass.script as gs -from grass.exceptions import CalledModuleError from .importexport import ImporterExporter from .support import ParameterConverter, ToolFunctionResolver, ToolResult @@ -328,22 +327,16 @@ def run_cmd( if parameter_converter is None: parameter_converter = ParameterConverter() parameter_converter.process_parameter_list(command[1:]) - if parameter_converter.import_export: - if self._importer_exporter is None: - self._importer_exporter = ImporterExporter( - run_function=self.call, run_cmd_function=self.call_cmd + try: + if parameter_converter.import_export: + if self._importer_exporter is None: + self._importer_exporter = ImporterExporter( + run_function=self.call, run_cmd_function=self.call_cmd + ) + command = self._importer_exporter.process_parameter_list( + command, **popen_options ) - command = self._importer_exporter.process_parameter_list( - command, **popen_options - ) - try: self._importer_exporter.import_data(env=popen_options["env"]) - except CalledModuleError: - if parameter_converter.import_export: - if not self._delete_on_context_exit and not self._keep_data: - self._importer_exporter.cleanup(env=popen_options["env"]) - raise - try: # We approximate tool_kwargs as original kwargs. result = self.call_cmd( command, @@ -351,20 +344,15 @@ def run_cmd( input=input, **popen_options, ) - except CalledModuleError: if parameter_converter.import_export: - if not self._delete_on_context_exit and not self._keep_data: - self._importer_exporter.cleanup(env=popen_options["env"]) - raise - if parameter_converter.import_export: - overwrite = None - if "--o" in command or "--overwrite" in command: - overwrite = True - try: + overwrite = None + if "--o" in command or "--overwrite" in command: + overwrite = True self._importer_exporter.export_data( env=popen_options["env"], overwrite=overwrite ) - finally: + finally: + if parameter_converter.import_export: if not self._delete_on_context_exit and not self._keep_data: self._importer_exporter.cleanup(env=popen_options["env"]) return result From fd82a7dd3ecba260c6347cf57649691f146a40af Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Thu, 25 Sep 2025 10:23:40 -0400 Subject: [PATCH 40/49] Use use_cache for enabling and disabling the imported and exported data preservation by the user (as opposed to the alternative name keep_data). Document pack inputs and the cache behavior. --- python/grass/tools/importexport.py | 2 + python/grass/tools/session_tools.py | 49 +++++++++++++++++-- .../grass_tools_session_tools_pack_test.py | 28 +++++------ 3 files changed, 61 insertions(+), 18 deletions(-) diff --git a/python/grass/tools/importexport.py b/python/grass/tools/importexport.py index 47e0c42ef41..045648ad226 100644 --- a/python/grass/tools/importexport.py +++ b/python/grass/tools/importexport.py @@ -151,6 +151,8 @@ def export_data( self.current_output_rasters = [] def cleanup(self, *, env): + # We don't track in what mapset the rasters are, and we assume + # the mapset was not changed in the meantime. remove = [name for (unused, name) in self.input_rasters] remove.extend([name for (unused, name) in self.output_rasters]) if remove: diff --git a/python/grass/tools/session_tools.py b/python/grass/tools/session_tools.py index 5ad750c2a53..e689c1712af 100644 --- a/python/grass/tools/session_tools.py +++ b/python/grass/tools/session_tools.py @@ -116,6 +116,40 @@ class Tools: >>> result.text '' + + If a tool accepts a single raster input or output, a native GRASS raster pack + format can be used in the same way as an in-project raster or NumPy array. + GRASS native rasters are recognized by `.grass_raster`, `.grr`, and `.rpack` + extensions. All approaches can be combined in one workflow: + + >>> with Tools(session=session) as tools: + ... tools.r_slope_aspect( + ... elevation=np.ones((2, 3)), slope="slope.grass_raster", aspect="aspect" + ... ) + ... statistics = tools.r_univar(map="slope.grass_raster", format="json") + >>> # File now exists + >>> from pathlib import Path + >>> Path("slope.grass_raster").is_file() + True + >>> # In-project raster now exists + >>> tools.r_info(map="aspect", format="json")["cells"] + 6 + + When the *Tools* object is used as a context manager, in-project data created as + part of handling the raster files will be cached and will not be imported again + when used in the following steps. The cache is cleared at the end of the context. + When the *Tools* object is not used as a context manager, the cashing can be + enabled by `use_cache=True`. Explicitly enabled cache requires explicit cleanup: + + >>> tools = Tools(session=session, use_cache=True) + >>> tools.r_univar(map="slope.grass_raster", format="json")["cells"] + 6 + >>> tools.r_info(map="slope.grass_raster", format="json")["cells"] + 6 + >>> tools.cleanup() + + Notably, the above code works also with `use_cache=False` (or the default), + but the file will be imported twice, once for each tool call. """ def __init__( @@ -131,7 +165,7 @@ def __init__( capture_output=True, capture_stderr=None, consistent_return_value=False, - keep_data=None, + use_cache=None, ): """ If session is provided and has an env attribute, it is used to execute tools. @@ -176,6 +210,13 @@ def __init__( Additionally, this can be used to obtain both NumPy arrays and text outputs from a tool call. + While using of cache is primarily driven by the use of the object as + a context manager, cashing can be explicitly enabled or disabled with + the *use_cache* parameter. The cached data is kept in the current + mapset so that it is available as tool inputs. Without a context manager, + explicit `use_cache=True` requires explicit call to *cleanup* to remove + the data from the current mapset. + If *env* or other *Popen* arguments are provided to one of the tool running functions, the constructor parameters except *errors* are ignored. """ @@ -202,7 +243,7 @@ def __init__( # Decides if we delete at each run or only at the end of context. self._delete_on_context_exit = False # User request to keep the data. - self._keep_data = keep_data + self._use_cache = use_cache def _modified_env_if_needed(self): """Get the environment for subprocesses @@ -353,7 +394,7 @@ def run_cmd( ) finally: if parameter_converter.import_export: - if not self._delete_on_context_exit and not self._keep_data: + if not self._delete_on_context_exit and not self._use_cache: self._importer_exporter.cleanup(env=popen_options["env"]) return result @@ -467,7 +508,7 @@ def __enter__(self): def __exit__(self, exc_type, exc_value, traceback): """Exit the context manager context.""" - if not self._keep_data: + if not self._use_cache: self.cleanup() def cleanup(self): diff --git a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py index 2112d174edc..2431079235b 100644 --- a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py +++ b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py @@ -146,7 +146,7 @@ def test_io_cleanup_after_context(xy_dataset_session, rows_raster_file3x3, tmp_p def test_io_no_cleanup(xy_dataset_session, rows_raster_file3x3, tmp_path): """Check input and output rasters are deleted only with explicit cleanup call""" output_file = tmp_path / "file.grass_raster" - tools = Tools(session=xy_dataset_session, keep_data=True) + tools = Tools(session=xy_dataset_session, use_cache=True) tools.g_region(rows=3, cols=3) tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) assert output_file.exists() @@ -168,7 +168,7 @@ def test_io_no_cleanup_with_context(xy_dataset_session, rows_raster_file3x3, tmp """Check input and output rasters are kept even with context""" file_1 = tmp_path / "file_1.grass_raster" file_2 = tmp_path / "file_2.grass_raster" - with Tools(session=xy_dataset_session, keep_data=True) as tools: + with Tools(session=xy_dataset_session, use_cache=True) as tools: tools.g_region(rows=3, cols=3) tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=file_1) assert file_1.exists() @@ -224,9 +224,9 @@ def test_multiple_input_usages_with_context(xy_dataset_session, rows_raster_file )["name"] -def test_multiple_input_usages_with_keep_data(xy_dataset_session, rows_raster_file3x3): +def test_multiple_input_usages_with_use_cache(xy_dataset_session, rows_raster_file3x3): """Check input and output rasters are kept even with context""" - tools = Tools(session=xy_dataset_session, keep_data=True) + tools = Tools(session=xy_dataset_session, use_cache=True) tools.g_region(raster=rows_raster_file3x3) tools.r_slope_aspect(elevation=rows_raster_file3x3, slope="slope") tools.r_mapcalc_simple(expression="100 * A", a=rows_raster_file3x3, output="a100") @@ -275,12 +275,12 @@ def test_creation_and_use_with_context( assert slope.exists() -def test_creation_and_use_with_keep_data( +def test_creation_and_use_with_use_cache( xy_dataset_session, rows_raster_file3x3, tmp_path ): """Check that we can create an external file and then use the file later""" slope = tmp_path / "slope.grass_raster" - tools = Tools(session=xy_dataset_session, keep_data=True) + tools = Tools(session=xy_dataset_session, use_cache=True) tools.g_region(raster=rows_raster_file3x3) tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=slope) tools.r_univar(map=slope, format="json")["cells"] == 9 @@ -434,7 +434,7 @@ def test_wrong_parameter(xy_dataset_session, rows_raster_file3x3, tmp_path): def test_direct_r_unpack_to_data(xy_dataset_session, rows_raster_file3x3): """Check that we can r.unpack data as usual""" - tools = Tools(session=xy_dataset_session, keep_data=True) + tools = Tools(session=xy_dataset_session, use_cache=True) tools.g_region(rows=3, cols=3) name = "data_1" tools.r_unpack(input=rows_raster_file3x3, output=name) @@ -446,7 +446,7 @@ def test_direct_r_unpack_to_data(xy_dataset_session, rows_raster_file3x3): def test_direct_r_unpack_to_pack(xy_dataset_session, rows_raster_file3x3, tmp_path): """Check that roundtrip from existing packed raster to new packed raster works""" - tools = Tools(session=xy_dataset_session, keep_data=True) + tools = Tools(session=xy_dataset_session, use_cache=True) tools.g_region(rows=3, cols=3) name = "auto_packed_data_1.grass_raster" packed_file = tmp_path / name @@ -462,7 +462,7 @@ def test_direct_r_unpack_to_pack(xy_dataset_session, rows_raster_file3x3, tmp_pa def test_direct_r_pack_from_data(xy_dataset_session, tmp_path): """Check that we can r.pack data as usual""" - tools = Tools(session=xy_dataset_session, keep_data=True) + tools = Tools(session=xy_dataset_session, use_cache=True) tools.g_region(rows=3, cols=3) tools.r_mapcalc(expression="data_1 = 1") name = "manually_packed_data_1.grass_raster" @@ -483,7 +483,7 @@ def test_direct_r_pack_from_data(xy_dataset_session, tmp_path): def test_direct_r_pack_from_pack(xy_dataset_session, rows_raster_file3x3, tmp_path): """Check that roundtrip from existing packed raster to raster works""" - tools = Tools(session=xy_dataset_session, keep_data=True) + tools = Tools(session=xy_dataset_session, use_cache=True) tools.g_region(rows=3, cols=3) name = "manually_packed_data_1.grass_raster" packed_file = tmp_path / name @@ -569,7 +569,7 @@ def test_clean_after_tool_failure_without_context( assert rows_raster_file3x3.exists() -def test_clean_after_tool_failure_without_context_with_keep_data( +def test_clean_after_tool_failure_without_context_with_use_cache( xy_dataset_session, rows_raster_file3x3, tmp_path ): """Check we don't delete imported input even after failure when asked. @@ -577,7 +577,7 @@ def test_clean_after_tool_failure_without_context_with_keep_data( When explicitly requested, we wait for explicit request to delete the imported data even after a failure. """ - tools = Tools(session=xy_dataset_session, keep_data=True) + tools = Tools(session=xy_dataset_session, use_cache=True) with pytest.raises(CalledModuleError, match=r"r\.mapcalc\.simple"): tools.r_mapcalc_simple( expression="A + does_not_exist", a=rows_raster_file3x3, output="output" @@ -665,7 +665,7 @@ def test_clean_after_call_failure_without_context( assert rows_raster_file3x3.exists() -def test_clean_after_call_failure_without_context_with_keep_data( +def test_clean_after_call_failure_without_context_with_use_cache( xy_dataset_session, rows_raster_file3x3, tmp_path ): """Check we don't delete imported input even after failure when asked. @@ -673,7 +673,7 @@ def test_clean_after_call_failure_without_context_with_keep_data( When explicitly requested, we wait for explict request to delete the imported data even after a failure. """ - tools = Tools(session=xy_dataset_session, keep_data=True) + tools = Tools(session=xy_dataset_session, use_cache=True) tools.g_region(rows=3, cols=3) output_file = tmp_path / "does_not_exist" / "file.grass_raster" assert not output_file.parent.exists() From 0190eb3a79a12233f34a9f26480b60ff493a04a6 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Thu, 25 Sep 2025 11:08:03 -0400 Subject: [PATCH 41/49] Use ToolError --- .../grass_tools_session_tools_pack_test.py | 27 +++++++++---------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py index 2431079235b..8837926fe46 100644 --- a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py +++ b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py @@ -6,8 +6,7 @@ import pytest -from grass.exceptions import CalledModuleError -from grass.tools import Tools +from grass.tools import Tools, ToolError def test_pack_input_output_tool_name_function( @@ -347,7 +346,7 @@ def test_output_without_overwrite(xy_dataset_session, rows_raster_file3x3, tmp_p assert os.path.exists(rows_raster_file3x3) output_file = tmp_path / "file.grass_raster" tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) - with pytest.raises(CalledModuleError, match=r"[Oo]verwrite"): + with pytest.raises(ToolError, match=r"[Oo]verwrite"): tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) assert output_file.exists() @@ -389,7 +388,7 @@ def test_non_existent_pack_input(xy_dataset_session, tmp_path): input_file = tmp_path / "does_not_exist.grass_raster" assert not input_file.exists() with pytest.raises( - CalledModuleError, + ToolError, match=rf"(?s)[^/\/a-zA-Z_]{input_file}[^/\/a-zA-Z_].*not found", ): tools.r_slope_aspect(elevation=input_file, slope="slope") @@ -410,7 +409,7 @@ def test_non_existent_output_pack_directory( assert not output_file.parent.exists() assert rows_raster_file3x3.exists() with pytest.raises( - CalledModuleError, + ToolError, match=rf"(?s)[^/\/a-zA-Z_]{re.escape(str(output_file.parent))}[^/\/a-zA-Z_].*does not exist", ): tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) @@ -424,7 +423,7 @@ def test_wrong_parameter(xy_dataset_session, rows_raster_file3x3, tmp_path): """ tools = Tools(session=xy_dataset_session) tools.g_region(rows=3, cols=3) - with pytest.raises(CalledModuleError, match="does_not_exist"): + with pytest.raises(ToolError, match="does_not_exist"): tools.r_slope_aspect( elevation=rows_raster_file3x3, slope="file.grass_raster", @@ -522,7 +521,7 @@ def test_clean_after_tool_failure_with_context_and_try( tools.r_mapcalc_simple( expression="A + does_not_exist", a=rows_raster_file3x3, output="output" ) - except CalledModuleError: + except ToolError: pass finally: assert not tools.g_findfile( @@ -539,7 +538,7 @@ def test_clean_after_tool_failure_with_context_and_raises( as clearly as the test with try-finally. """ with ( - pytest.raises(CalledModuleError, match=r"r\.mapcalc\.simple"), + pytest.raises(ToolError, match=r"r\.mapcalc\.simple"), Tools(session=xy_dataset_session) as tools, ): tools.r_mapcalc_simple( @@ -559,7 +558,7 @@ def test_clean_after_tool_failure_without_context( A single call should clean after itself unless told otherwise. """ tools = Tools(session=xy_dataset_session) - with pytest.raises(CalledModuleError, match=r"r\.mapcalc\.simple"): + with pytest.raises(ToolError, match=r"r\.mapcalc\.simple"): tools.r_mapcalc_simple( expression="A + does_not_exist", a=rows_raster_file3x3, output="output" ) @@ -578,7 +577,7 @@ def test_clean_after_tool_failure_without_context_with_use_cache( data even after a failure. """ tools = Tools(session=xy_dataset_session, use_cache=True) - with pytest.raises(CalledModuleError, match=r"r\.mapcalc\.simple"): + with pytest.raises(ToolError, match=r"r\.mapcalc\.simple"): tools.r_mapcalc_simple( expression="A + does_not_exist", a=rows_raster_file3x3, output="output" ) @@ -611,7 +610,7 @@ def test_clean_after_call_failure_with_context_and_try( # what we use to get an internal failure inside the call. # This relies on inputs being resolved before outputs. tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) - except CalledModuleError: + except ToolError: pass finally: assert not tools.g_findfile( @@ -635,7 +634,7 @@ def test_clean_after_call_failure_with_context_and_raises( # Non-existence of a directory will be failure inside r.pack which is # what we use to get an internal failure inside the call. # This relies on inputs being resolved before outputs. - with pytest.raises(CalledModuleError, match=r"r\.pack"): + with pytest.raises(ToolError, match=r"r\.pack"): tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) assert not tools.g_findfile( element="raster", file=rows_raster_file3x3.stem, format="json" @@ -654,7 +653,7 @@ def test_clean_after_call_failure_without_context( tools.g_region(rows=3, cols=3) output_file = tmp_path / "does_not_exist" / "file.grass_raster" assert not output_file.parent.exists() - with pytest.raises(CalledModuleError, match=r"r\.pack"): + with pytest.raises(ToolError, match=r"r\.pack"): # Non-existence of a directory will be failure inside r.pack which is # what we use to get an internal failure inside the call. # This relies on inputs being resolved before outputs. @@ -677,7 +676,7 @@ def test_clean_after_call_failure_without_context_with_use_cache( tools.g_region(rows=3, cols=3) output_file = tmp_path / "does_not_exist" / "file.grass_raster" assert not output_file.parent.exists() - with pytest.raises(CalledModuleError, match=r"r\.pack"): + with pytest.raises(ToolError, match=r"r\.pack"): # Non-existence of a directory will be failure inside r.pack which is # what we use to get an internal failure inside the call. # This relies on inputs being resolved before outputs. From 43b326c6cc36a00263d8a3a604dc53d6482cb87a Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Thu, 25 Sep 2025 11:09:43 -0400 Subject: [PATCH 42/49] Mention overhead in the doc. Use module scope to reduce fixture setup by couple seconds. --- python/grass/tools/session_tools.py | 8 +++++++- python/grass/tools/tests/conftest.py | 10 ++++++---- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/python/grass/tools/session_tools.py b/python/grass/tools/session_tools.py index a21ee1c906e..048291697c2 100644 --- a/python/grass/tools/session_tools.py +++ b/python/grass/tools/session_tools.py @@ -196,6 +196,10 @@ class Tools: >>> result.text '' + Although using arrays incurs an overhead cost compared to using only + in-project data, the array interface provides a convenient workflow + when NumPy arrays are in use. + If a tool accepts a single raster input or output, a native GRASS raster pack format can be used in the same way as an in-project raster or NumPy array. GRASS native rasters are recognized by `.grass_raster`, `.grr`, and `.rpack` @@ -228,7 +232,9 @@ class Tools: >>> tools.cleanup() Notably, the above code works also with `use_cache=False` (or the default), - but the file will be imported twice, once for each tool call. + but the file will be imported twice, once for each tool call, so using + context manager or managing the cache explicity is good for reducing the + overhead which the external rasters bring compared to using in-project data. """ def __init__( diff --git a/python/grass/tools/tests/conftest.py b/python/grass/tools/tests/conftest.py index 38ac2c22de4..8315722af8c 100644 --- a/python/grass/tools/tests/conftest.py +++ b/python/grass/tools/tests/conftest.py @@ -51,8 +51,9 @@ def echoing_resolver(): return ToolFunctionResolver(run_function=lambda x: x, env=os.environ.copy()) -@pytest.fixture -def rows_raster_file3x3(tmp_path): +@pytest.fixture(scope="module") +def rows_raster_file3x3(tmp_path_factory): + tmp_path = tmp_path_factory.mktemp("rows_raster_file3x3") project = tmp_path / "xy_test3x3" gs.create_project(project) with gs.setup.init(project, env=os.environ.copy()) as session: @@ -70,8 +71,9 @@ def rows_raster_file3x3(tmp_path): return output_file -@pytest.fixture -def rows_raster_file4x5(tmp_path): +@pytest.fixture(scope="module") +def rows_raster_file4x5(tmp_path_factory): + tmp_path = tmp_path_factory.mktemp("rows_raster_file4x5") project = tmp_path / "xy_test4x5" gs.create_project(project) with gs.setup.init(project, env=os.environ.copy()) as session: From 91912b88c3622414f52ba8e13f2a2743b929eecf Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Thu, 25 Sep 2025 13:40:37 -0400 Subject: [PATCH 43/49] Slightly better wording for arrays. --- python/grass/tools/session_tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/grass/tools/session_tools.py b/python/grass/tools/session_tools.py index 048291697c2..e73ecdb5c2f 100644 --- a/python/grass/tools/session_tools.py +++ b/python/grass/tools/session_tools.py @@ -198,7 +198,7 @@ class Tools: Although using arrays incurs an overhead cost compared to using only in-project data, the array interface provides a convenient workflow - when NumPy arrays are in use. + when NumPy arrays are used with other array functions. If a tool accepts a single raster input or output, a native GRASS raster pack format can be used in the same way as an in-project raster or NumPy array. From 7ee02085b948d6bb2917b71cc1a112f26c8d9a07 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Fri, 3 Oct 2025 14:40:26 -0400 Subject: [PATCH 44/49] Missing regexp escape for path --- .../app/tests/grass_app_cli_run_pack_test.py | 132 ++++++++++++++++++ .../grass_tools_session_tools_pack_test.py | 2 +- 2 files changed, 133 insertions(+), 1 deletion(-) create mode 100644 python/grass/app/tests/grass_app_cli_run_pack_test.py diff --git a/python/grass/app/tests/grass_app_cli_run_pack_test.py b/python/grass/app/tests/grass_app_cli_run_pack_test.py new file mode 100644 index 00000000000..8ffe0645941 --- /dev/null +++ b/python/grass/app/tests/grass_app_cli_run_pack_test.py @@ -0,0 +1,132 @@ +import json +import sys +import subprocess + +import pytest + + +def test_run_with_crs_as_pack_as_input(pack_raster_file4x5_rows): + """Check that we accept pack as input.""" + result = subprocess.run( + [ + sys.executable, + "-m", + "grass.app", + "run", + "--crs", + str(pack_raster_file4x5_rows), + "r.univar", + f"map={pack_raster_file4x5_rows}", + "format=json", + ], + capture_output=True, + text=True, + check=True, + ) + assert ( + json.loads(result.stdout)["cells"] == 1 + ) # because we don't set the computational region + + +@pytest.mark.parametrize("crs", ["EPSG:3358", "EPSG:4326"]) +@pytest.mark.parametrize("extension", [".grass_raster", ".grr", ".rpack"]) +def test_run_with_crs_as_pack_as_output(tmp_path, crs, extension): + """Check outputting pack with different CRSs and extensions""" + raster = tmp_path / f"test{extension}" + subprocess.run( + [ + sys.executable, + "-m", + "grass.app", + "run", + "--crs", + crs, + "r.mapcalc.simple", + "expression=row() + col()", + f"output={raster}", + ], + check=True, + ) + assert raster.exists() + assert raster.is_file() + result = subprocess.run( + [ + sys.executable, + "-m", + "grass.app", + "run", + "--crs", + str(raster), + "g.proj", + "-p", + "format=json", + ], + capture_output=True, + text=True, + check=True, + ) + assert json.loads(result.stdout)["srid"] == crs + + +def test_run_with_crs_as_pack_with_multiple_steps(tmp_path): + """Check that we accept pack as both input and output. + + The extension is only tested for the output. + Tests basic properties of the output. + """ + crs = "EPSG:3358" + extension = ".grass_raster" + raster_a = tmp_path / f"test_a{extension}" + raster_b = tmp_path / f"test_b{extension}" + subprocess.run( + [ + sys.executable, + "-m", + "grass.app", + "run", + "--crs", + crs, + "r.mapcalc.simple", + "expression=row() + col()", + f"output={raster_a}", + ], + check=True, + ) + assert raster_a.exists() + assert raster_a.is_file() + subprocess.run( + [ + sys.executable, + "-m", + "grass.app", + "run", + "--crs", + crs, + "r.mapcalc.simple", + "expression=1.5 * A", + f"a={raster_a}", + f"output={raster_b}", + ], + check=True, + ) + assert raster_b.exists() + assert raster_b.is_file() + result = subprocess.run( + [ + sys.executable, + "-m", + "grass.app", + "run", + "--crs", + crs, + "r.univar", + f"map={raster_b}", + "format=json", + ], + capture_output=True, + text=True, + check=True, + ) + assert ( + json.loads(result.stdout)["cells"] == 1 + ) # because we don't set the computational region diff --git a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py index 8837926fe46..13c764ae038 100644 --- a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py +++ b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py @@ -389,7 +389,7 @@ def test_non_existent_pack_input(xy_dataset_session, tmp_path): assert not input_file.exists() with pytest.raises( ToolError, - match=rf"(?s)[^/\/a-zA-Z_]{input_file}[^/\/a-zA-Z_].*not found", + match=rf"(?s)[^/\/a-zA-Z_]{re.escape(str(input_file))}[^/\/a-zA-Z_].*not found", ): tools.r_slope_aspect(elevation=input_file, slope="slope") assert not tools.g_findfile(element="raster", file=input_file.stem, format="json")[ From e056ba65cefc12dac32a34e683197a4df2e73852 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Fri, 3 Oct 2025 14:57:54 -0400 Subject: [PATCH 45/49] Use non-square raster, fix tests --- python/grass/tools/tests/conftest.py | 19 +- .../grass_tools_session_tools_pack_test.py | 256 +++++++++--------- 2 files changed, 142 insertions(+), 133 deletions(-) diff --git a/python/grass/tools/tests/conftest.py b/python/grass/tools/tests/conftest.py index 8315722af8c..a413c15766a 100644 --- a/python/grass/tools/tests/conftest.py +++ b/python/grass/tools/tests/conftest.py @@ -52,14 +52,19 @@ def echoing_resolver(): @pytest.fixture(scope="module") -def rows_raster_file3x3(tmp_path_factory): - tmp_path = tmp_path_factory.mktemp("rows_raster_file3x3") - project = tmp_path / "xy_test3x3" +def rows_raster_file3x2(tmp_path_factory): + """Native raster pack file + + Smallest possible file, but with rows and columns greater than one, + and a different number of rows and columns. + """ + tmp_path = tmp_path_factory.mktemp("rows_raster_file3x2") + project = tmp_path / "xy_test3x2" gs.create_project(project) with gs.setup.init(project, env=os.environ.copy()) as session: - gs.run_command("g.region", rows=3, cols=3, env=session.env) + gs.run_command("g.region", rows=3, cols=2, env=session.env) gs.mapcalc("rows = row()", env=session.env) - output_file = tmp_path / "rows3x3.grass_raster" + output_file = tmp_path / "rows3x2.grass_raster" gs.run_command( "r.pack", input="rows", @@ -73,6 +78,10 @@ def rows_raster_file3x3(tmp_path_factory): @pytest.fixture(scope="module") def rows_raster_file4x5(tmp_path_factory): + """Native raster pack file + + Small file, but slightly larger than the smallest. + """ tmp_path = tmp_path_factory.mktemp("rows_raster_file4x5") project = tmp_path / "xy_test4x5" gs.create_project(project) diff --git a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py index 13c764ae038..20edd0fc41f 100644 --- a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py +++ b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py @@ -10,25 +10,25 @@ def test_pack_input_output_tool_name_function( - xy_dataset_session, rows_raster_file3x3, tmp_path + xy_dataset_session, rows_raster_file3x2, tmp_path ): """Check input and output pack files work with tool name call""" tools = Tools(session=xy_dataset_session) tools.g_region(rows=3, cols=3) - assert os.path.exists(rows_raster_file3x3) + assert os.path.exists(rows_raster_file3x2) output_file = tmp_path / "file.grass_raster" - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + tools.r_slope_aspect(elevation=rows_raster_file3x2, slope=output_file) assert output_file.exists() assert not tools.g_findfile(element="raster", file="file", format="json")["name"] assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] assert not tools.g_list(type="raster", format="json") @pytest.mark.parametrize("parameter_type", [str, Path]) def test_pack_input_output_tool_name_function_string_value( - xy_dataset_session, rows_raster_file3x3, tmp_path, parameter_type + xy_dataset_session, rows_raster_file3x2, tmp_path, parameter_type ): """Check input and output pack files work string a parameter @@ -37,66 +37,66 @@ def test_pack_input_output_tool_name_function_string_value( """ tools = Tools(session=xy_dataset_session) tools.g_region(rows=3, cols=3) - assert os.path.exists(rows_raster_file3x3) + assert os.path.exists(rows_raster_file3x2) output_file = tmp_path / "file.grass_raster" tools.r_slope_aspect( - elevation=parameter_type(rows_raster_file3x3), slope=parameter_type(output_file) + elevation=parameter_type(rows_raster_file3x2), slope=parameter_type(output_file) ) assert output_file.exists() assert not tools.g_findfile(element="raster", file="file", format="json")["name"] assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] assert not tools.g_list(type="raster", format="json") def test_pack_input_output_with_name_and_parameter_call( - xy_dataset_session, rows_raster_file3x3, tmp_path + xy_dataset_session, rows_raster_file3x2, tmp_path ): """Check input and output pack files work with tool name as string""" tools = Tools(session=xy_dataset_session) tools.g_region(rows=3, cols=3) - assert os.path.exists(rows_raster_file3x3) + assert os.path.exists(rows_raster_file3x2) output_file = tmp_path / "file.grass_raster" - tools.run("r.slope.aspect", elevation=rows_raster_file3x3, slope=output_file) + tools.run("r.slope.aspect", elevation=rows_raster_file3x2, slope=output_file) assert output_file.exists() assert not tools.g_findfile(element="raster", file="file", format="json")["name"] assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] assert not tools.g_list(type="raster", format="json") def test_pack_input_output_with_subprocess_run_like_call( - xy_dataset_session, rows_raster_file3x3, tmp_path + xy_dataset_session, rows_raster_file3x2, tmp_path ): """Check input and output pack files work with command as list""" tools = Tools(session=xy_dataset_session) - assert os.path.exists(rows_raster_file3x3) + assert os.path.exists(rows_raster_file3x2) output_file = tmp_path / "file.grass_raster" tools.run_cmd( [ "r.slope.aspect", - f"elevation={rows_raster_file3x3}", + f"elevation={rows_raster_file3x2}", f"aspect={output_file}", ] ) assert output_file.exists() assert not tools.g_findfile(element="raster", file="file", format="json")["name"] assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] assert not tools.g_list(type="raster", format="json") -def test_no_modify_command(xy_dataset_session, rows_raster_file3x3, tmp_path): +def test_no_modify_command(xy_dataset_session, rows_raster_file3x2, tmp_path): """Check that input command is not modified by the function""" tools = Tools(session=xy_dataset_session) tools.g_region(rows=3, cols=3) output_file = tmp_path / "file.grass_raster" command = [ "r.slope.aspect", - f"elevation={rows_raster_file3x3}", + f"elevation={rows_raster_file3x2}", f"slope={output_file}", ] original = command.copy() @@ -104,27 +104,27 @@ def test_no_modify_command(xy_dataset_session, rows_raster_file3x3, tmp_path): assert original == command -def test_io_cleanup_after_function(xy_dataset_session, rows_raster_file3x3, tmp_path): +def test_io_cleanup_after_function(xy_dataset_session, rows_raster_file3x2, tmp_path): """Check input and output rasters are deleted after function call""" tools = Tools(session=xy_dataset_session) tools.g_region(rows=3, cols=3) output_file = tmp_path / "file.grass_raster" - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + tools.r_slope_aspect(elevation=rows_raster_file3x2, slope=output_file) assert output_file.exists() assert not tools.g_findfile(element="raster", file="file", format="json")["name"] assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] assert not tools.g_list(type="raster", format="json") -def test_io_cleanup_after_context(xy_dataset_session, rows_raster_file3x3, tmp_path): +def test_io_cleanup_after_context(xy_dataset_session, rows_raster_file3x2, tmp_path): """Check input and output rasters are deleted at the end of context""" output_file_1 = tmp_path / "file.grass_raster" output_file_2 = tmp_path / "file2.grass_raster" with Tools(session=xy_dataset_session) as tools: tools.g_region(rows=3, cols=3) - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file_1) + tools.r_slope_aspect(elevation=rows_raster_file3x2, slope=output_file_1) assert output_file_1.exists() assert tools.g_findfile(element="raster", file="file", format="json")["name"] tools.r_mapcalc_simple(expression="100 * A", a="file", output=output_file_2) @@ -137,39 +137,39 @@ def test_io_cleanup_after_context(xy_dataset_session, rows_raster_file3x3, tmp_p assert not tools.g_findfile(element="raster", file="file", format="json")["name"] assert not tools.g_findfile(element="raster", file="file2", format="json")["name"] assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] assert not tools.g_list(type="raster", format="json") -def test_io_no_cleanup(xy_dataset_session, rows_raster_file3x3, tmp_path): +def test_io_no_cleanup(xy_dataset_session, rows_raster_file3x2, tmp_path): """Check input and output rasters are deleted only with explicit cleanup call""" output_file = tmp_path / "file.grass_raster" tools = Tools(session=xy_dataset_session, use_cache=True) tools.g_region(rows=3, cols=3) - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + tools.r_slope_aspect(elevation=rows_raster_file3x2, slope=output_file) assert output_file.exists() # Files should still be available. assert tools.g_findfile(element="raster", file="file", format="json")["name"] assert tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] # But an explicit cleanup should delete the files. tools.cleanup() assert not tools.g_findfile(element="raster", file="file", format="json")["name"] assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] assert not tools.g_list(type="raster", format="json") -def test_io_no_cleanup_with_context(xy_dataset_session, rows_raster_file3x3, tmp_path): +def test_io_no_cleanup_with_context(xy_dataset_session, rows_raster_file3x2, tmp_path): """Check input and output rasters are kept even with context""" file_1 = tmp_path / "file_1.grass_raster" file_2 = tmp_path / "file_2.grass_raster" with Tools(session=xy_dataset_session, use_cache=True) as tools: tools.g_region(rows=3, cols=3) - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=file_1) + tools.r_slope_aspect(elevation=rows_raster_file3x2, slope=file_1) assert file_1.exists() assert tools.g_findfile(element="raster", file=file_1.stem, format="json")[ "name" @@ -186,7 +186,7 @@ def test_io_no_cleanup_with_context(xy_dataset_session, rows_raster_file3x3, tmp assert tools.g_findfile(element="raster", file=file_1.stem, format="json")["name"] assert tools.g_findfile(element="raster", file=file_2.stem, format="json")["name"] assert tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] # But an explicit cleanup should delete the files. tools.cleanup() @@ -197,7 +197,7 @@ def test_io_no_cleanup_with_context(xy_dataset_session, rows_raster_file3x3, tmp "name" ] assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] assert not tools.g_list(type="raster", format="json") # The pack files should still exist after cleanup. @@ -205,66 +205,66 @@ def test_io_no_cleanup_with_context(xy_dataset_session, rows_raster_file3x3, tmp assert file_2.exists() -def test_multiple_input_usages_with_context(xy_dataset_session, rows_raster_file3x3): +def test_multiple_input_usages_with_context(xy_dataset_session, rows_raster_file3x2): """Check multiple usages of the same input raster with context""" with Tools(session=xy_dataset_session) as tools: - tools.g_region(raster=rows_raster_file3x3) - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope="slope") + tools.g_region(raster=rows_raster_file3x2) + tools.r_slope_aspect(elevation=rows_raster_file3x2, slope="slope") tools.r_mapcalc_simple( - expression="100 * A", a=rows_raster_file3x3, output="a100" + expression="100 * A", a=rows_raster_file3x2, output="a100" ) assert tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] assert tools.g_findfile(element="raster", file="slope", format="json")["name"] assert tools.g_findfile(element="raster", file="a100", format="json")["name"] assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] -def test_multiple_input_usages_with_use_cache(xy_dataset_session, rows_raster_file3x3): +def test_multiple_input_usages_with_use_cache(xy_dataset_session, rows_raster_file3x2): """Check input and output rasters are kept even with context""" tools = Tools(session=xy_dataset_session, use_cache=True) - tools.g_region(raster=rows_raster_file3x3) - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope="slope") - tools.r_mapcalc_simple(expression="100 * A", a=rows_raster_file3x3, output="a100") + tools.g_region(raster=rows_raster_file3x2) + tools.r_slope_aspect(elevation=rows_raster_file3x2, slope="slope") + tools.r_mapcalc_simple(expression="100 * A", a=rows_raster_file3x2, output="a100") assert tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] assert tools.g_findfile(element="raster", file="slope", format="json")["name"] assert tools.g_findfile(element="raster", file="a100", format="json")["name"] tools.cleanup() assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] -def test_multiple_input_usages_with_defaults(xy_dataset_session, rows_raster_file3x3): +def test_multiple_input_usages_with_defaults(xy_dataset_session, rows_raster_file3x2): """Check input and output rasters are kept even with context""" tools = Tools(session=xy_dataset_session) tools.g_region(rows=3, cols=3) tools.r_mapcalc_simple( expression="A + B", - a=rows_raster_file3x3, - b=rows_raster_file3x3, + a=rows_raster_file3x2, + b=rows_raster_file3x2, output="output", ) assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] assert tools.g_findfile(element="raster", file="output", format="json")["name"] def test_creation_and_use_with_context( - xy_dataset_session, rows_raster_file3x3, tmp_path + xy_dataset_session, rows_raster_file3x2, tmp_path ): """Check that we can create an external file and then use the file later""" slope = tmp_path / "slope.grass_raster" with Tools(session=xy_dataset_session) as tools: - tools.g_region(raster=rows_raster_file3x3) - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=slope) - tools.r_univar(map=slope, format="json")["cells"] == 9 + tools.g_region(raster=rows_raster_file3x2) + tools.r_slope_aspect(elevation=rows_raster_file3x2, slope=slope) + assert tools.r_univar(map=slope, format="json")["cells"] == 6 assert tools.g_findfile(element="raster", file=slope.stem, format="json")[ "name" ] @@ -275,54 +275,54 @@ def test_creation_and_use_with_context( def test_creation_and_use_with_use_cache( - xy_dataset_session, rows_raster_file3x3, tmp_path + xy_dataset_session, rows_raster_file3x2, tmp_path ): """Check that we can create an external file and then use the file later""" slope = tmp_path / "slope.grass_raster" tools = Tools(session=xy_dataset_session, use_cache=True) - tools.g_region(raster=rows_raster_file3x3) - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=slope) - tools.r_univar(map=slope, format="json")["cells"] == 9 + tools.g_region(raster=rows_raster_file3x2) + tools.r_slope_aspect(elevation=rows_raster_file3x2, slope=slope) + assert tools.r_univar(map=slope, format="json")["cells"] == 6 assert tools.g_findfile(element="raster", file=slope.stem, format="json")["name"] assert slope.exists() def test_creation_and_use_with_defaults( - xy_dataset_session, rows_raster_file3x3, tmp_path + xy_dataset_session, rows_raster_file3x2, tmp_path ): """Check that we can create an external file and then use the file later""" slope = tmp_path / "slope.grass_raster" tools = Tools(session=xy_dataset_session) - tools.g_region(raster=rows_raster_file3x3) - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=slope) - tools.r_univar(map=slope, format="json")["cells"] == 9 + tools.g_region(raster=rows_raster_file3x2) + tools.r_slope_aspect(elevation=rows_raster_file3x2, slope=slope) + assert tools.r_univar(map=slope, format="json")["cells"] == 6 assert not tools.g_findfile(element="raster", file=slope.stem, format="json")[ "name" ] assert slope.exists() -def test_repeated_input_usages_with_context(xy_dataset_session, rows_raster_file3x3): +def test_repeated_input_usages_with_context(xy_dataset_session, rows_raster_file3x2): """Check multiple usages of the same input raster with context""" with Tools(session=xy_dataset_session) as tools: tools.g_region(rows=3, cols=3) tools.r_mapcalc_simple( expression="A + B", - a=rows_raster_file3x3, - b=rows_raster_file3x3, + a=rows_raster_file3x2, + b=rows_raster_file3x2, output="output", ) assert tools.g_findfile(element="raster", file="output", format="json")["name"] assert tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] assert tools.g_findfile(element="raster", file="output", format="json")["name"] assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] -def test_repeated_output(xy_dataset_session, rows_raster_file3x3, tmp_path): +def test_repeated_output(xy_dataset_session, rows_raster_file3x2, tmp_path): """Check behavior when two outputs have the same name This would ideally result in error or some other clear state, but at least @@ -334,49 +334,49 @@ def test_repeated_output(xy_dataset_session, rows_raster_file3x3, tmp_path): tools.g_region(rows=3, cols=3) output_file = tmp_path / "file.grass_raster" tools.r_slope_aspect( - elevation=rows_raster_file3x3, slope=output_file, aspect=output_file + elevation=rows_raster_file3x2, slope=output_file, aspect=output_file ) assert output_file.exists() -def test_output_without_overwrite(xy_dataset_session, rows_raster_file3x3, tmp_path): +def test_output_without_overwrite(xy_dataset_session, rows_raster_file3x2, tmp_path): """Check input and output pack files work with tool name call""" tools = Tools(session=xy_dataset_session) tools.g_region(rows=3, cols=3) - assert os.path.exists(rows_raster_file3x3) + assert os.path.exists(rows_raster_file3x2) output_file = tmp_path / "file.grass_raster" - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + tools.r_slope_aspect(elevation=rows_raster_file3x2, slope=output_file) with pytest.raises(ToolError, match=r"[Oo]verwrite"): - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + tools.r_slope_aspect(elevation=rows_raster_file3x2, slope=output_file) assert output_file.exists() def test_output_with_object_level_overwrite( - xy_dataset_session, rows_raster_file3x3, tmp_path + xy_dataset_session, rows_raster_file3x2, tmp_path ): """Check input and output pack files work with tool name call""" tools = Tools(session=xy_dataset_session, overwrite=True) tools.g_region(rows=3, cols=3) - assert os.path.exists(rows_raster_file3x3) + assert os.path.exists(rows_raster_file3x2) output_file = tmp_path / "file.grass_raster" - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + tools.r_slope_aspect(elevation=rows_raster_file3x2, slope=output_file) # Same call the second time. - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + tools.r_slope_aspect(elevation=rows_raster_file3x2, slope=output_file) assert output_file.exists() def test_output_with_function_level_overwrite( - xy_dataset_session, rows_raster_file3x3, tmp_path + xy_dataset_session, rows_raster_file3x2, tmp_path ): """Check input and output pack files work with tool name call""" tools = Tools(session=xy_dataset_session) tools.g_region(rows=3, cols=3) - assert os.path.exists(rows_raster_file3x3) + assert os.path.exists(rows_raster_file3x2) output_file = tmp_path / "file.grass_raster" - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + tools.r_slope_aspect(elevation=rows_raster_file3x2, slope=output_file) # Same call the second time. tools.r_slope_aspect( - elevation=rows_raster_file3x3, slope=output_file, overwrite=True + elevation=rows_raster_file3x2, slope=output_file, overwrite=True ) assert output_file.exists() @@ -399,7 +399,7 @@ def test_non_existent_pack_input(xy_dataset_session, tmp_path): def test_non_existent_output_pack_directory( - xy_dataset_session, rows_raster_file3x3, tmp_path + xy_dataset_session, rows_raster_file3x2, tmp_path ): """Check input and output pack files work with tool name call""" tools = Tools(session=xy_dataset_session) @@ -407,15 +407,15 @@ def test_non_existent_output_pack_directory( output_file = tmp_path / "does_not_exist" / "file.grass_raster" assert not output_file.exists() assert not output_file.parent.exists() - assert rows_raster_file3x3.exists() + assert rows_raster_file3x2.exists() with pytest.raises( ToolError, match=rf"(?s)[^/\/a-zA-Z_]{re.escape(str(output_file.parent))}[^/\/a-zA-Z_].*does not exist", ): - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + tools.r_slope_aspect(elevation=rows_raster_file3x2, slope=output_file) -def test_wrong_parameter(xy_dataset_session, rows_raster_file3x3, tmp_path): +def test_wrong_parameter(xy_dataset_session, rows_raster_file3x2, tmp_path): """Check wrong parameter causes standard exception Since the tool is called to process its parameters with pack IO, @@ -425,37 +425,37 @@ def test_wrong_parameter(xy_dataset_session, rows_raster_file3x3, tmp_path): tools.g_region(rows=3, cols=3) with pytest.raises(ToolError, match="does_not_exist"): tools.r_slope_aspect( - elevation=rows_raster_file3x3, + elevation=rows_raster_file3x2, slope="file.grass_raster", does_not_exist="test", ) -def test_direct_r_unpack_to_data(xy_dataset_session, rows_raster_file3x3): +def test_direct_r_unpack_to_data(xy_dataset_session, rows_raster_file3x2): """Check that we can r.unpack data as usual""" tools = Tools(session=xy_dataset_session, use_cache=True) tools.g_region(rows=3, cols=3) name = "data_1" - tools.r_unpack(input=rows_raster_file3x3, output=name) + tools.r_unpack(input=rows_raster_file3x2, output=name) assert tools.g_findfile(element="raster", file=name, format="json")["name"] assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] -def test_direct_r_unpack_to_pack(xy_dataset_session, rows_raster_file3x3, tmp_path): +def test_direct_r_unpack_to_pack(xy_dataset_session, rows_raster_file3x2, tmp_path): """Check that roundtrip from existing packed raster to new packed raster works""" tools = Tools(session=xy_dataset_session, use_cache=True) tools.g_region(rows=3, cols=3) name = "auto_packed_data_1.grass_raster" packed_file = tmp_path / name - tools.r_unpack(input=rows_raster_file3x3, output=packed_file) + tools.r_unpack(input=rows_raster_file3x2, output=packed_file) assert packed_file.exists() assert tools.g_findfile(element="raster", file=packed_file.stem, format="json")[ "name" ] assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] @@ -480,20 +480,20 @@ def test_direct_r_pack_from_data(xy_dataset_session, tmp_path): assert tools.g_findfile(element="raster", file="data_1", format="json")["name"] -def test_direct_r_pack_from_pack(xy_dataset_session, rows_raster_file3x3, tmp_path): +def test_direct_r_pack_from_pack(xy_dataset_session, rows_raster_file3x2, tmp_path): """Check that roundtrip from existing packed raster to raster works""" tools = Tools(session=xy_dataset_session, use_cache=True) tools.g_region(rows=3, cols=3) name = "manually_packed_data_1.grass_raster" packed_file = tmp_path / name - tools.r_pack(input=rows_raster_file3x3, output=packed_file) + tools.r_pack(input=rows_raster_file3x2, output=packed_file) # New file was created. assert packed_file.exists() # Input still exists. - assert rows_raster_file3x3.exists() + assert rows_raster_file3x2.exists() # Auto-imported raster should exist. assert tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] # There should be no raster created automatically. assert not tools.g_findfile(element="raster", file=packed_file.stem, format="json")[ @@ -502,12 +502,12 @@ def test_direct_r_pack_from_pack(xy_dataset_session, rows_raster_file3x3, tmp_pa tools.cleanup() # Auto-imported raster should be deleted. assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] def test_clean_after_tool_failure_with_context_and_try( - xy_dataset_session, rows_raster_file3x3, tmp_path + xy_dataset_session, rows_raster_file3x2, tmp_path ): """Check we delete imported input when we fail after that import. @@ -519,18 +519,18 @@ def test_clean_after_tool_failure_with_context_and_try( try: with Tools(session=xy_dataset_session) as tools: tools.r_mapcalc_simple( - expression="A + does_not_exist", a=rows_raster_file3x3, output="output" + expression="A + does_not_exist", a=rows_raster_file3x2, output="output" ) except ToolError: pass finally: assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] def test_clean_after_tool_failure_with_context_and_raises( - xy_dataset_session, rows_raster_file3x3, tmp_path + xy_dataset_session, rows_raster_file3x2, tmp_path ): """Check input and output pack files work with tool name call @@ -542,16 +542,16 @@ def test_clean_after_tool_failure_with_context_and_raises( Tools(session=xy_dataset_session) as tools, ): tools.r_mapcalc_simple( - expression="A + does_not_exist", a=rows_raster_file3x3, output="output" + expression="A + does_not_exist", a=rows_raster_file3x2, output="output" ) assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] - assert rows_raster_file3x3.exists() + assert rows_raster_file3x2.exists() def test_clean_after_tool_failure_without_context( - xy_dataset_session, rows_raster_file3x3, tmp_path + xy_dataset_session, rows_raster_file3x2, tmp_path ): """Check we delete imported input when we fail after that import. @@ -560,16 +560,16 @@ def test_clean_after_tool_failure_without_context( tools = Tools(session=xy_dataset_session) with pytest.raises(ToolError, match=r"r\.mapcalc\.simple"): tools.r_mapcalc_simple( - expression="A + does_not_exist", a=rows_raster_file3x3, output="output" + expression="A + does_not_exist", a=rows_raster_file3x2, output="output" ) assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] - assert rows_raster_file3x3.exists() + assert rows_raster_file3x2.exists() def test_clean_after_tool_failure_without_context_with_use_cache( - xy_dataset_session, rows_raster_file3x3, tmp_path + xy_dataset_session, rows_raster_file3x2, tmp_path ): """Check we don't delete imported input even after failure when asked. @@ -579,20 +579,20 @@ def test_clean_after_tool_failure_without_context_with_use_cache( tools = Tools(session=xy_dataset_session, use_cache=True) with pytest.raises(ToolError, match=r"r\.mapcalc\.simple"): tools.r_mapcalc_simple( - expression="A + does_not_exist", a=rows_raster_file3x3, output="output" + expression="A + does_not_exist", a=rows_raster_file3x2, output="output" ) assert tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] tools.cleanup() assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] - assert rows_raster_file3x3.exists() + assert rows_raster_file3x2.exists() def test_clean_after_call_failure_with_context_and_try( - xy_dataset_session, rows_raster_file3x3, tmp_path + xy_dataset_session, rows_raster_file3x2, tmp_path ): """Check we delete imported input when we fail after that import. @@ -609,18 +609,18 @@ def test_clean_after_call_failure_with_context_and_try( # Non-existence of a directory will be failure inside r.pack which is # what we use to get an internal failure inside the call. # This relies on inputs being resolved before outputs. - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + tools.r_slope_aspect(elevation=rows_raster_file3x2, slope=output_file) except ToolError: pass finally: assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] - assert rows_raster_file3x3.exists() + assert rows_raster_file3x2.exists() def test_clean_after_call_failure_with_context_and_raises( - xy_dataset_session, rows_raster_file3x3, tmp_path + xy_dataset_session, rows_raster_file3x2, tmp_path ): """Check input and output pack files work with tool name call @@ -635,15 +635,15 @@ def test_clean_after_call_failure_with_context_and_raises( # what we use to get an internal failure inside the call. # This relies on inputs being resolved before outputs. with pytest.raises(ToolError, match=r"r\.pack"): - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + tools.r_slope_aspect(elevation=rows_raster_file3x2, slope=output_file) assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] - assert rows_raster_file3x3.exists() + assert rows_raster_file3x2.exists() def test_clean_after_call_failure_without_context( - xy_dataset_session, rows_raster_file3x3, tmp_path + xy_dataset_session, rows_raster_file3x2, tmp_path ): """Check we delete imported input when we fail after that import. @@ -657,15 +657,15 @@ def test_clean_after_call_failure_without_context( # Non-existence of a directory will be failure inside r.pack which is # what we use to get an internal failure inside the call. # This relies on inputs being resolved before outputs. - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + tools.r_slope_aspect(elevation=rows_raster_file3x2, slope=output_file) assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] - assert rows_raster_file3x3.exists() + assert rows_raster_file3x2.exists() def test_clean_after_call_failure_without_context_with_use_cache( - xy_dataset_session, rows_raster_file3x3, tmp_path + xy_dataset_session, rows_raster_file3x2, tmp_path ): """Check we don't delete imported input even after failure when asked. @@ -680,12 +680,12 @@ def test_clean_after_call_failure_without_context_with_use_cache( # Non-existence of a directory will be failure inside r.pack which is # what we use to get an internal failure inside the call. # This relies on inputs being resolved before outputs. - tools.r_slope_aspect(elevation=rows_raster_file3x3, slope=output_file) + tools.r_slope_aspect(elevation=rows_raster_file3x2, slope=output_file) assert tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] tools.cleanup() assert not tools.g_findfile( - element="raster", file=rows_raster_file3x3.stem, format="json" + element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] - assert rows_raster_file3x3.exists() + assert rows_raster_file3x2.exists() From 07b9c355f358d2a3f099c5198f7c0a6779746aa3 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Fri, 3 Oct 2025 15:44:08 -0400 Subject: [PATCH 46/49] Test whole workflow with create_project --- python/grass/tools/tests/conftest.py | 42 ++++++++++ .../grass_tools_session_tools_pack_test.py | 78 ++++++++++++++++++- 2 files changed, 119 insertions(+), 1 deletion(-) diff --git a/python/grass/tools/tests/conftest.py b/python/grass/tools/tests/conftest.py index a413c15766a..ae1edd1f76c 100644 --- a/python/grass/tools/tests/conftest.py +++ b/python/grass/tools/tests/conftest.py @@ -98,3 +98,45 @@ def rows_raster_file4x5(tmp_path_factory): env=session.env, ) return output_file + + +@pytest.fixture(scope="module") +def ones_raster_file_epsg3358(tmp_path_factory): + """Native raster pack with EPSG:3358""" + tmp_path = tmp_path_factory.mktemp("ones_raster_file4x5") + project = tmp_path / "xy_test4x5" + gs.create_project(project, crs="EPSG:3358") + with gs.setup.init(project, env=os.environ.copy()) as session: + gs.run_command("g.region", rows=4, cols=5, env=session.env) + gs.mapcalc("ones = 1", env=session.env) + output_file = tmp_path / "ones4x5.grass_raster" + gs.run_command( + "r.pack", + input="ones", + output=output_file, + flags="c", + superquiet=True, + env=session.env, + ) + return output_file + + +@pytest.fixture(scope="module") +def ones_raster_file_epsg4326(tmp_path_factory): + """Native raster pack with EPSG:4326 (LL)""" + tmp_path = tmp_path_factory.mktemp("ones_raster_file4x5") + project = tmp_path / "xy_test4x5" + gs.create_project(project, crs="EPSG:4326") + with gs.setup.init(project, env=os.environ.copy()) as session: + gs.run_command("g.region", rows=4, cols=5, env=session.env) + gs.mapcalc("ones = 1", env=session.env) + output_file = tmp_path / "ones4x5.grass_raster" + gs.run_command( + "r.pack", + input="ones", + output=output_file, + flags="c", + superquiet=True, + env=session.env, + ) + return output_file diff --git a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py index 20edd0fc41f..afd36d26ea7 100644 --- a/python/grass/tools/tests/grass_tools_session_tools_pack_test.py +++ b/python/grass/tools/tests/grass_tools_session_tools_pack_test.py @@ -6,6 +6,7 @@ import pytest +import grass.script as gs from grass.tools import Tools, ToolError @@ -669,7 +670,7 @@ def test_clean_after_call_failure_without_context_with_use_cache( ): """Check we don't delete imported input even after failure when asked. - When explicitly requested, we wait for explict request to delete the imported + When explicitly requested, we wait for explicit request to delete the imported data even after a failure. """ tools = Tools(session=xy_dataset_session, use_cache=True) @@ -689,3 +690,78 @@ def test_clean_after_call_failure_without_context_with_use_cache( element="raster", file=rows_raster_file3x2.stem, format="json" )["name"] assert rows_raster_file3x2.exists() + + +def test_workflow_create_project_and_run_general_crs( + tmp_path, ones_raster_file_epsg3358 +): + """Check workflow with create project""" + project = tmp_path / "project" + raster = tmp_path / "raster.grass_raster" + gs.create_project(project, crs=ones_raster_file_epsg3358) + with ( + gs.setup.init(project) as session, + Tools(session=session) as tools, + ): + assert tools.g_region(flags="p", format="json")["crs"]["type"] == "other" + assert tools.g_proj(flags="p", format="json")["srid"] == "EPSG:3358" + tools.g_region(raster=ones_raster_file_epsg3358) + assert tools.g_region(flags="p", format="json")["cells"] == 4 * 5 + tools.r_mapcalc_simple( + expression="2 * A", a=ones_raster_file_epsg3358, output=raster + ) + stats = tools.r_univar(map=raster, format="json") + assert stats["cells"] == 4 * 5 + assert stats["min"] == 2 + assert stats["max"] == 2 + assert stats["mean"] == 2 + assert stats["sum"] == 4 * 5 * 1 * 2 + assert raster.exists() + assert raster.is_file() + + +def test_workflow_create_project_and_run_ll_crs(tmp_path, ones_raster_file_epsg4326): + """Check workflow with create project""" + project = tmp_path / "project" + raster = tmp_path / "raster.grass_raster" + gs.create_project(project, crs=ones_raster_file_epsg4326) + with ( + gs.setup.init(project) as session, + Tools(session=session) as tools, + ): + assert tools.g_region(flags="p", format="json")["crs"]["type"] == "ll" + assert tools.g_proj(flags="p", format="json")["srid"] == "EPSG:4326" + tools.g_region(raster=ones_raster_file_epsg4326) + assert tools.g_region(flags="p", format="json")["cells"] == 4 * 5 + tools.r_mapcalc_simple( + expression="2 * A", a=ones_raster_file_epsg4326, output=raster + ) + stats = tools.r_univar(map=raster, format="json") + assert stats["cells"] == 4 * 5 + assert stats["min"] == 2 + assert stats["max"] == 2 + assert stats["mean"] == 2 + assert stats["sum"] == 4 * 5 * 1 * 2 + assert raster.exists() + assert raster.is_file() + + +def test_workflow_create_project_and_run_xy_crs(tmp_path, rows_raster_file4x5): + """Check workflow with create project""" + project = tmp_path / "project" + raster = tmp_path / "raster.grass_raster" + gs.create_project(project, crs=rows_raster_file4x5) + with ( + gs.setup.init(project) as session, + Tools(session=session) as tools, + ): + assert tools.g_region(flags="p", format="json")["crs"]["type"] == "xy" + tools.g_region(raster=rows_raster_file4x5) + assert tools.g_region(flags="p", format="json")["cells"] == 4 * 5 + tools.r_mapcalc_simple(expression="2 * A", a=rows_raster_file4x5, output=raster) + stats = tools.r_univar(map=raster, format="json") + assert stats["cells"] == 4 * 5 + assert stats["min"] == 2 + assert stats["max"] == 8 + assert raster.exists() + assert raster.is_file() From f66730279f378406e1a7e012683b614958d2c473 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Sat, 4 Oct 2025 22:33:30 -0400 Subject: [PATCH 47/49] Clarify interface of run_cmd by creating a private/protected version which has the internal-only parameters. Document the import-export code. --- python/grass/tools/session_tools.py | 55 +++++++++++++++++++++++++---- 1 file changed, 48 insertions(+), 7 deletions(-) diff --git a/python/grass/tools/session_tools.py b/python/grass/tools/session_tools.py index e73ecdb5c2f..6de9c6b061b 100644 --- a/python/grass/tools/session_tools.py +++ b/python/grass/tools/session_tools.py @@ -233,8 +233,17 @@ class Tools: Notably, the above code works also with `use_cache=False` (or the default), but the file will be imported twice, once for each tool call, so using - context manager or managing the cache explicity is good for reducing the + context manager or managing the cache explicitly is good for reducing the overhead which the external rasters bring compared to using in-project data. + + For parallel processing, create separate Tools objects. Each Tools instance + can operate with the same or different sessions or environments, as well as with + :py:class:`grass.script.RegionManager` and :py:class:`grass.script.MaskManager`. + When working exclusively with data within a project, objects are lightweight + and add negligible overhead compared to direct subprocess calls. + Using NumPy or out-of-project native GRASS raster files, adds computational + and IO cost, but generally not more than the cost of the same operation done + directly without the aid of a Tools object. """ def __init__( @@ -403,9 +412,9 @@ def run(self, tool_name_: str, /, **kwargs): ) # We approximate original kwargs with the possibly-modified kwargs. - result = self.run_cmd( + result = self._run_cmd( args, - tool_kwargs=kwargs, + tool_kwargs=kwargs, # We send the original kwargs for error reporting. input=object_parameter_handler.stdin, parameter_converter=object_parameter_handler, **popen_options, @@ -439,10 +448,33 @@ def run_cmd( ): """Run a tool by passing its name and parameters a list of strings. - The function may perform additional processing on the parameters. + The function will perform additional processing on the parameters + such as importing GRASS native raster files to in-project data. :param command: list of strings to execute as the command :param input: text input for the standard input of the tool + :param **popen_options: additional options for :py:func:`subprocess.Popen` + """ + return self._run_cmd(command, input=input, **popen_options) + + def _run_cmd( + self, + command: list[str], + *, + input: str | bytes | None = None, + parameter_converter: ParameterConverter | None = None, + tool_kwargs: dict | None = None, + **popen_options, + ): + """Run a tool by passing its name and parameters a list of strings. + + If parameters were already processed using a *ParameterConverter* instance, + the instance can be passed as the *parameter_converter* parameter, avoiding + re-processing. + + :param command: list of strings to execute as the command + :param input: text input for the standard input of the tool + :param parameter_converter: a Parameter converter instance if already used :param tool_kwargs: named tool arguments used for error reporting (experimental) :param **popen_options: additional options for :py:func:`subprocess.Popen` """ @@ -451,26 +483,34 @@ def run_cmd( popen_options["env"] = self._modified_env_if_needed() if parameter_converter is None: + # Parameters were not processed yet, so process them now. parameter_converter = ParameterConverter() parameter_converter.process_parameter_list(command[1:]) try: + # Processing parameters for import and export is costly, so we do it + # only when we previously determined it there might be such parameters. if parameter_converter.import_export: if self._importer_exporter is None: + # The importer exporter instance may be reused in later calls + # based on how the cache is used. self._importer_exporter = ImporterExporter( run_function=self.call, run_cmd_function=self.call_cmd ) command = self._importer_exporter.process_parameter_list( command, **popen_options ) + # The command now has external files replaced with in-project data, + # so now we import the data. self._importer_exporter.import_data(env=popen_options["env"]) - # We approximate tool_kwargs as original kwargs. result = self.call_cmd( command, - tool_kwargs=tool_kwargs, + tool_kwargs=tool_kwargs, # used in error reporting input=input, **popen_options, ) if parameter_converter.import_export: + # Exporting data inherits the overwrite flag from the command + # if provided, otherwise it is driven by the environment. overwrite = None if "--o" in command or "--overwrite" in command: overwrite = True @@ -480,6 +520,7 @@ def run_cmd( finally: if parameter_converter.import_export: if not self._delete_on_context_exit and not self._use_cache: + # Delete the in-project data after each call. self._importer_exporter.cleanup(env=popen_options["env"]) return result @@ -507,7 +548,7 @@ def call_cmd(self, command, tool_kwargs=None, input=None, **popen_options): defaults and return value. :param command: list of strings to execute as the command - :param tool_kwargs: named tool arguments used for error reporting (experimental) + :param tool_kwargs: named tool arguments used for error reporting :param input: text input for the standard input of the tool :param **popen_options: additional options for :py:func:`subprocess.Popen` """ From de5e79730fb125e0144e5ca94794fa8fedf5f3f3 Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Sat, 4 Oct 2025 22:34:27 -0400 Subject: [PATCH 48/49] Minor cleanup of unrelated comments in tests (related because of the --json option). --- python/grass/tools/tests/grass_tools_session_tools_test.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/python/grass/tools/tests/grass_tools_session_tools_test.py b/python/grass/tools/tests/grass_tools_session_tools_test.py index 02848789791..b420b28657c 100644 --- a/python/grass/tools/tests/grass_tools_session_tools_test.py +++ b/python/grass/tools/tests/grass_tools_session_tools_test.py @@ -135,7 +135,8 @@ def test_json_with_direct_subprocess_run_like_call(xy_dataset_session): def test_json_as_list(xy_dataset_session): """Check that a JSON result behaves as a list""" tools = Tools(session=xy_dataset_session) - # This also tests JSON parsing with a format option. + # This also tests JSON parsing without a format option + # (which should not have any influence). result = tools.g_search_modules(keyword="random", flags="j") for item in result: assert "name" in item @@ -146,7 +147,6 @@ def test_json_as_list(xy_dataset_session): def test_json_for_pandas(xy_dataset_session): """Check that JSON can be read into Pandas dataframe""" tools = Tools(session=xy_dataset_session) - # This also tests JSON parsing with a format option. result = tools.run("g.region", flags="p", format="json") assert not pd.DataFrame(result).empty @@ -171,7 +171,8 @@ def test_help_call_with_parameters(xy_dataset_session): def test_json_call_with_low_level_call(xy_dataset_session): """Check that --json call works including JSON data parsing""" tools = Tools(session=xy_dataset_session) - # This also tests JSON parsing with a format option. + # This also tests JSON parsing without a format option + # (which should not have any influence). data = tools.call_cmd( ["r.slope.aspect", "elevation=dem", "slope=slope", "--json"] ).json From 110041d9fb10d61ae2e878ecc46d5e86db49c08b Mon Sep 17 00:00:00 2001 From: Vaclav Petras Date: Mon, 6 Oct 2025 10:41:04 -0400 Subject: [PATCH 49/49] Fix typo Co-authored-by: Anna Petrasova --- python/grass/tools/session_tools.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/grass/tools/session_tools.py b/python/grass/tools/session_tools.py index 6de9c6b061b..61c9b1db3d2 100644 --- a/python/grass/tools/session_tools.py +++ b/python/grass/tools/session_tools.py @@ -488,7 +488,7 @@ def _run_cmd( parameter_converter.process_parameter_list(command[1:]) try: # Processing parameters for import and export is costly, so we do it - # only when we previously determined it there might be such parameters. + # only when we previously determined there might be such parameters. if parameter_converter.import_export: if self._importer_exporter is None: # The importer exporter instance may be reused in later calls