diff --git a/.github/workflows/scenario.yaml b/.github/workflows/scenario.yaml index b7bf09a9..2cc289e4 100644 --- a/.github/workflows/scenario.yaml +++ b/.github/workflows/scenario.yaml @@ -55,6 +55,9 @@ jobs: pip3 install -r cicd/requirements.txt - name: Run Walkthrough Scenarios + env: + AWS_ACCESS_KEY_ID: ${{ secrets.CI_SCENARIO_RO_AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.CI_SCENARIO_RO_AWS_SECRET_ACCESS_KEY }} run: | python3 test/python/markdown_testing/markdown_testing.py 2>&1 | tee cicd/log/markdown-testing-results.log diff --git a/.vscode/.gitignore b/.vscode/.gitignore index ec83ec27..9a4c1809 100644 --- a/.vscode/.gitignore +++ b/.vscode/.gitignore @@ -1,4 +1,5 @@ * !.gitignore !launch.json -!settings.json \ No newline at end of file +!settings.json +!example.env diff --git a/.vscode/example.env b/.vscode/example.env new file mode 100644 index 00000000..6232bb0d --- /dev/null +++ b/.vscode/example.env @@ -0,0 +1,4 @@ +DUMMY_DIGITALOCEAN_USERNAME=myusername +DUMMY_DIGITALOCEAN_PASSWORD=mypassword +DD_API_KEY=myusername +DD_APPLICATION_KEY=mypassword diff --git a/.vscode/launch.json b/.vscode/launch.json index 70ea776e..c6972e7b 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -155,7 +155,10 @@ "select JSON_EXTRACT(saml.samlIdentity, '$.username') as saml_username from github.scim.saml_ids saml where saml.org = 'dummyorg';", "select kind, name, maximumCardsPerInstance from google.compute.acceleratorTypes where project = 'defective-response-content-project' and zone = 'australia-southeast1-a' order by name desc;", "registry pull google;", - "create materialized view nv as select BackupId, BackupState from aws.cloudhsm.backups where region = 'ap-southeast-2' order by BackupId;" + "create materialized view nv as select BackupId, BackupState from aws.cloudhsm.backups where region = 'ap-southeast-2' order by BackupId;", + "SELECT instance_id FROM aws.ec2.instances WHERE region IN ('us-east-1', 'ap-southeast-2');", + "SELECT instance_id FROM aws.ec2_solid_gold.instances WHERE region IN ('us-east-1', 'ap-southeast-2');", + "SELECT region FROM aws.ec2_nextgen.instances WHERE region IN ('us-east-1', 'ap-southeast-2', 'ap-southeast-1') order by region, instance_id; SELECT region, instance_id, tenancy, security_groups FROM aws.ec2_nextgen.instances WHERE region IN ('us-east-1', 'ap-southeast-2', 'ap-southeast-1') order by region, instance_id; SELECT region, instance_id, tenancy, security_groups FROM aws.ec2_nextgen.instances WHERE region IN ('us-east-1', 'ap-southeast-2', 'ap-southeast-1') order by region, instance_id; SELECT region, instance_id, tenancy, security_groups FROM aws.ec2_nextgen.instances WHERE region IN ('us-east-1', 'ap-southeast-2', 'ap-southeast-1') order by region, instance_id; SELECT region, instance_id, tenancy, security_groups FROM aws.ec2_nextgen.instances WHERE region IN ('us-east-1', 'ap-southeast-2', 'ap-southeast-1') order by region, instance_id; SELECT region, instance_id, tenancy, security_groups FROM aws.ec2_nextgen.instances WHERE region IN ('us-east-1', 'ap-southeast-2', 'ap-southeast-1') order by region, instance_id;", ], "default": "show providers;" }, @@ -182,12 +185,13 @@ "type": "pickString", "id": "authString", "description": "Auth Input arg String", - "default": "{ \"azure\": { \"type\": \"azure_default\" }, \"digitalocean\": { \"type\": \"bearer\", \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/integration/digitalocean-key.txt\" }, \"google\": { \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/integration/stackql-security-reviewer.json\" }, \"googleadmin\": { \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/integration/ryuk-it-query.json\" }, \"okta\": { \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/okta-token.txt\", \"type\": \"api_key\", \"valuePrefix\": \"SSWS \" }, \"github\": { \"type\": \"basic\", \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/integration/github-key.txt\" }, \"aws\": { \"type\": \"aws_signing_v4\", \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/integration/aws-secret-key.txt\", \"keyID\": \"AKIA376P4FQSS2ONB2NS\" }, \"netlify\": { \"type\": \"api_key\", \"valuePrefix\": \"Bearer \", \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/netlify-token.txt\" }, \"k8s\": { \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/integration/k8s-token.txt\", \"type\": \"api_key\", \"valuePrefix\": \"Bearer \" }, \"sumologic\": { \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/integration/sumologic-token.txt\", \"type\": \"basic\" } }", + "default": "{}", "options": [ "{ \"azure\": { \"type\": \"azure_default\" }, \"digitalocean\": { \"type\": \"bearer\", \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/integration/digitalocean-key.txt\" }, \"google\": { \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/integration/stackql-security-reviewer.json\" }, \"googleadmin\": { \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/integration/ryuk-it-query.json\" }, \"okta\": { \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/okta-token.txt\", \"type\": \"api_key\", \"valuePrefix\": \"SSWS \" }, \"github\": { \"type\": \"basic\", \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/integration/github-key.txt\" }, \"aws\": { \"type\": \"aws_signing_v4\", \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/integration/aws-secret-key.txt\", \"keyID\": \"AKIA376P4FQSS2ONB2NS\" }, \"netlify\": { \"type\": \"api_key\", \"valuePrefix\": \"Bearer \", \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/netlify-token.txt\" }, \"k8s\": { \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/integration/k8s-token.txt\", \"type\": \"api_key\", \"valuePrefix\": \"Bearer \" }, \"sumologic\": { \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/integration/sumologic-token.txt\", \"type\": \"basic\" } }", "{ \"google\": { \"credentialsfilepath\": \"${workspaceFolder}/test/assets/credentials/dummy/google/functional-test-dummy-sa-key.json\" }, \"googleadmin\": { \"credentialsfilepath\": \"${workspaceFolder}/test/assets/credentials/dummy/google/functional-test-dummy-sa-key.json\" },s \"okta\": { \"credentialsfilepath\": \"${workspaceFolder}/test/assets/credentials/dummy/okta/api-key.txt\", \"type\": \"api_key\", \"valuePrefix\": \"SSWS \" }, \"github\": { \"type\": \"basic\", \"credentialsfilepath\": \"${workspaceFolder}/test/assets/credentials/dummy/okta/api-key.txt\" }, \"aws\": { \"type\": \"aws_signing_v4\", \"credentialsfilepath\": \"${workspaceFolder}/test/assets/credentials/dummy/aws/functional-test-dummy-aws-key.txt\", \"keyID\": \"AKIA376P4FQSS2ONB2NS\" }, \"netlify\": { \"type\": \"api_key\", \"valuePrefix\": \"Bearer \", \"credentialsfilepath\": \"${workspaceFolder}/test/assets/credentials/dummy/netlify/netlify-token.txt\" }, \"k8s\": { \"credentialsfilepath\": \"${workspaceFolder}/test/assets/credentials/dummy/k8s/k8s-token.txt\", \"type\": \"api_key\", \"valuePrefix\": \"Bearer \" }, \"sumologic\": { \"credentialsfilepath\": \"${workspaceFolder}/test/assets/credentials/dummy/sumologic/sumologic-token.txt\", \"type\": \"basic\" } }", "{ \"pgi\": { \"type\": \"sql_data_source::postgres\", \"sqlDataSource\": { \"dsn\": \"postgres://stackql:stackql@127.0.0.1:8432\" } }, \"azure\": { \"type\": \"azure_default\" }, \"google\": { \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/integration/stackql-security-reviewer.json\" }, \"okta\": { \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/okta-token.txt\", \"type\": \"api_key\", \"valuePrefix\": \"SSWS \" }, \"github\": { \"type\": \"basic\", \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/integration/github-key.txt\" }, \"aws\": { \"type\": \"aws_signing_v4\", \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/integration/aws-secret-key.txt\", \"keyID\": \"AKIA376P4FQSS2ONB2NS\" }, \"netlify\": { \"type\": \"api_key\", \"valuePrefix\": \"Bearer \", \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/netlify-token.txt\" }, \"k8s\": { \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/integration/k8s-token.txt\", \"type\": \"api_key\", \"valuePrefix\": \"Bearer \" }, \"sumologic\": { \"credentialsfilepath\": \"${workspaceFolder}/cicd/keys/integration/sumologic-token.txt\", \"type\": \"basic\" } }", - "{ \"digitalocean\": { \"username_var\": \"DUMMY_DIGITALOCEAN_USERNAME\", \"password_var\": \"DUMMY_DIGITALOCEAN_PASSWORD\", \"type\": \"bearer\" } }" + "{ \"digitalocean\": { \"username_var\": \"DUMMY_DIGITALOCEAN_USERNAME\", \"password_var\": \"DUMMY_DIGITALOCEAN_PASSWORD\", \"type\": \"bearer\" } }", + "{}" ] }, { @@ -381,12 +385,7 @@ { "name": "generic exec", "type": "go", - "env": { - "DUMMY_DIGITALOCEAN_USERNAME": "myusername", - "DUMMY_DIGITALOCEAN_PASSWORD": "mypassword", - "DD_API_KEY": "myusername", - "DD_APPLICATION_KEY": "mypassword" - }, + "envFile": "${workspaceFolder}/.vscode/.env", "request": "launch", "mode": "debug", "program": "${workspaceFolder}/stackql", diff --git a/cicd/requirements.txt b/cicd/requirements.txt index b9fc2e8c..7a457881 100644 --- a/cicd/requirements.txt +++ b/cicd/requirements.txt @@ -1,4 +1,5 @@ Flask==3.0.3 +Jinja2==3.1.4 mistune==3.0.2 psycopg2-binary>=2.9.9 psycopg[binary]>=3.1.16 @@ -6,3 +7,4 @@ PyYaml>=6.0.1 requests==2.32.3 robotframework==6.1.1 sqlalchemy==1.4.44 +tabulate==0.9.0 diff --git a/docs/developer_guide.md b/docs/developer_guide.md index 7b75e110..2aadea37 100644 --- a/docs/developer_guide.md +++ b/docs/developer_guide.md @@ -15,6 +15,7 @@ The short of things is that for basic build and unit testing, these are needed: - Install `golang` on your system **if you do not already have version >= 1.21**, per [the `golang` doco](https://go.dev/doc/install). - Install `python` on your system **if you do not already have version >= 3.11**, available from [the `python` website](https://www.python.org/downloads/) and numerous package managers. +- Using a `venv` or otherwise, install the requisite python packages, eg: (system permitting) from the repository root: `pip install -r cicd/requirements.txt`. Then, each of these should be run from the repository root: @@ -25,7 +26,6 @@ Then, each of these should be run from the repository root: For serious development, simulated integration tests are essential. So, there are more dependencies: -- Install the python dependencies (including `robot` framework). Simplest way, system permitting, is `pip install -r cicd/requirements.txt`. - Install `psql`. On some systems, this can be done as client only and/or with various package managers; fallback is to just [install postgres manually](https://www.postgresql.org/download/). Having installed all dependencies, the `robot` tests should be run from the repository root directory (this relies upon the executable in `./build/stackql`, built above): @@ -88,9 +88,15 @@ robot --variable SHOULD_RUN_DOCKER_EXTERNAL_TESTS:true -d test/robot/functional ### Manually Testing -Please see [the mock testing doco](/test/mockserver/README.md#manually-testing-mocks). +Please see [the mock testing doco](/test/python/flask/README.md). +## Debuggers + +The `vscode` tooling configuration is mostly ready to use, as seen in the `.vscode` directory. You will need to create a file at the `.gitignore`d location `.vscode/.env`. Simplest thing just copy the example to get going: `cp .vscode/example.env .vscode/.env`. + +The debugger config is pretty messy, and probably with time we will slim it down. That said, it is far from useless as an example. + ## Provider development Keen to expose some new functionality though `stackql`? We are very keen on this! diff --git a/docs/walkthroughs/README.md b/docs/walkthroughs/README.md index 436e2639..1b10d333 100644 --- a/docs/walkthroughs/README.md +++ b/docs/walkthroughs/README.md @@ -16,7 +16,13 @@ in order to setup, run, verify and tear down testing scenarios. The tests *can* ## Running from CI -The canonical, **ruleset-protected** tag form is `scenario--`. At this stage, `run_number` must refer to a `stackql` run for which a `linux` `amd64` stackql binary archive is present at the time the tag is run. +The canonical, **ruleset-protected** tag form is `scenario-<>-<>`. At this stage, `run_number` must refer to a `stackql` run for which a `linux` `amd64` stackql binary archive is present at the time the tag is run. +## Plumbing + +These walkthroughs are runnable using CI. This is built upon: + +- `jinja2` templates, with `<<` and `>>` as delimiters. + diff --git a/docs/walkthroughs/list-aws-instances.md b/docs/walkthroughs/list-aws-instances.md new file mode 100644 index 00000000..b433fb5d --- /dev/null +++ b/docs/walkthroughs/list-aws-instances.md @@ -0,0 +1,47 @@ + +## Setup + +First, for whichever AWS user you would like to use, grant read only privileges on EC2 (eg: using `arn:aws:iam::aws:policy/ReadOnlyAccess`). Then, create a set of AWS CLI credentials per [the AWS documentation](https://docs.aws.amazon.com/cli/latest/userguide/cli-authentication-user.html#cli-authentication-user-get), and store them in the appropriate environment variables `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`. + +Then, do this in bash: + +```bash setup stackql-shell app_root_path=./test/tmp/.list-aws-instances.stackql + +stackql shell --approot=<> --registry="{ \"url\": \"file://$(pwd)/test/registry\", \"localDocRoot\": \"$(pwd)/test/registry\", \"verifyConfig\": { \"nopVerify\": true } }" +``` + +## Method + +Do this in the `stackql` shell, replacing the tuple of regions with whichever AWS regions hold interest for you (these are not templated in the example): + +```sql stackql-shell + + +SELECT instance_id, region +FROM aws.ec2_nextgen.instances +WHERE region IN ('us-east-1', 'ap-southeast-2', 'eu-west-1'); + +``` + +## Result + + +Assuming you have chosen regions wisely, you will see something like this included in the output: + +```sql stackql stdout expectation stdout-table-contains-data +|---------------------|----------------| +| instance_id | region | +|---------------------|----------------| +| i-some-silly-id-011 | us-east-1 | +|---------------------|----------------| +| i-some-other-id-011 | ap-southeast-2 | +|---------------------|----------------| +``` + +## Cleanup + +```bash teardown best-effort app_root_path=./test/tmp/.list-aws-instances.stackql + +rm -rf <> + +``` \ No newline at end of file diff --git a/docs/walkthroughs/get-google-accelerator-types.md b/docs/walkthroughs/list-google-accelerator-types.md similarity index 52% rename from docs/walkthroughs/get-google-accelerator-types.md rename to docs/walkthroughs/list-google-accelerator-types.md index 42f84752..3de08e4f 100644 --- a/docs/walkthroughs/get-google-accelerator-types.md +++ b/docs/walkthroughs/list-google-accelerator-types.md @@ -1,20 +1,20 @@ ## Setup -First, create a google service account key using the GCP Console, per [the GCP documentation](https://cloud.google.com/iam/docs/keys-create-delete). Grant the service account at least `Viewer` role equivalent privileges, per [the GCP dumentation](https://cloud.google.com/iam/docs/create-service-agents#grant-roles). +First, create a google service account key using the GCP Console, per [the GCP documentation](https://cloud.google.com/iam/docs/keys-create-delete). Grant the service account at least `Viewer` role equivalent privileges, per [the GCP documentation](https://cloud.google.com/iam/docs/create-service-agents#grant-roles). Then, do this in bash: -```bash setup stackql-shell credentials-path=cicd/keys/testing/google-ro-credentials.json app-root-path=./test/tmp/.get-google-accel.stackql +```bash setup stackql-shell credentials_path=cicd/keys/testing/google-ro-credentials.json app_root_path=./test/tmp/.get-google-accel.stackql -export GOOGLE_CREDENTIALS="$(cat )"; +export GOOGLE_CREDENTIALS="$(cat <>)"; -stackql shell --approot= +stackql shell --approot=<> ``` ## Method -Do this in the `stackql` shell, replacing `` with your GCP project name, and `` as desired, eg: `australia-southeast1-a`: +Do this in the `stackql` shell, replacing `<>` with your GCP project name, and `<>` as desired, eg: `australia-southeast1-a`: ```sql stackql-shell input required project=stackql-demo zone=australia-southeast1-a @@ -25,8 +25,8 @@ select kind FROM google.compute.accelerator_types WHERE - project = '' - AND zone = '' + project = '<>' + AND zone = '<>' ORDER BY name desc ; @@ -36,7 +36,7 @@ ORDER BY ## Result -You will see something very much like this included in the output, presuming you have one VM (if you have zero, only the headers should appper, more VMs means more rows): +You will see exactly this included in the output: ```sql expectation stdout-contains-all |---------------------|-------------------------| @@ -52,19 +52,10 @@ You will see something very much like this included in the output, presuming you |---------------------|-------------------------| ``` - - - - - - ## Cleanup -```bash teardown best-effort app-root-path=./test/tmp/.get-google-accel.stackql +```bash teardown best-effort app_root_path=./test/tmp/.get-google-accel.stackql -rm -rf +rm -rf <> ``` \ No newline at end of file diff --git a/docs/walkthroughs/get-google-vms.md b/docs/walkthroughs/list-google-vms.md similarity index 67% rename from docs/walkthroughs/get-google-vms.md rename to docs/walkthroughs/list-google-vms.md index f2f70059..27e5b511 100644 --- a/docs/walkthroughs/get-google-vms.md +++ b/docs/walkthroughs/list-google-vms.md @@ -1,20 +1,20 @@ ## Setup -First, create a google service account key using the GCP Console, per [the GCP documentation](https://cloud.google.com/iam/docs/keys-create-delete). Grant the service account at least `Viewer` role equivalent privileges, per [the GCP dumentation](https://cloud.google.com/iam/docs/create-service-agents#grant-roles). +First, create a google service account key using the GCP Console, per [the GCP documentation](https://cloud.google.com/iam/docs/keys-create-delete). Grant the service account at least `Viewer` role equivalent privileges, per [the GCP documentation](https://cloud.google.com/iam/docs/create-service-agents#grant-roles). Then, do this in bash: -```bash setup stackql-shell credentials-path=cicd/keys/testing/google-ro-credentials.json app-root-path=./test/tmp/.get-google-vms.stackql +```bash setup stackql-shell credentials_path=cicd/keys/testing/google-ro-credentials.json app_root_path=./test/tmp/.get-google-vms.stackql -export GOOGLE_CREDENTIALS="$(cat )"; +export GOOGLE_CREDENTIALS="$(cat <>)"; -stackql shell --approot= +stackql shell --approot=<> ``` ## Method -Do this in the `stackql` shell, replacing `` with your GCP project name, and `` as desired, eg: `australia-southeast1-a`: +Do this in the `stackql` shell, replacing `<>` with your GCP project name, and `<>` as desired, eg: `australia-southeast1-a`: ```sql stackql-shell input required project=stackql-demo zone=australia-southeast1-a @@ -25,8 +25,8 @@ select id FROM google.compute.instances WHERE - project = '' - AND zone = '' + project = '<>' + AND zone = '<>' ; ``` @@ -55,8 +55,8 @@ goodbye ## Cleanup -```bash teardown best-effort app-root-path=./test/tmp/.get-google-vms.stackql +```bash teardown best-effort app_root_path=./test/tmp/.get-google-vms.stackql -rm -rf +rm -rf <> ``` \ No newline at end of file diff --git a/test/python/markdown_testing/markdown_testing.py b/test/python/markdown_testing/markdown_testing.py index 5ce4e1da..01db9e7d 100644 --- a/test/python/markdown_testing/markdown_testing.py +++ b/test/python/markdown_testing/markdown_testing.py @@ -6,6 +6,10 @@ import json +import jinja2 + +from tabulate import tabulate + _REPOSITORY_ROOT_PATH = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..', '..')) """ @@ -18,6 +22,15 @@ def eprint(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) + +_TEMPLATE_ENV = jinja2.Environment( + trim_blocks=True, + block_start_string='<%', + block_end_string='%>', + variable_start_string='<<', + variable_end_string='>>' +) + class ASTNode(object): _STACKQL_SHELL_INVOCATION: str = 'stackql-shell' @@ -83,7 +96,9 @@ def get_execution_language(self) -> str: return self.node.get('lang', '') def expand(self) -> str: - return self.get_text().replace("<", "{").replace(">", "}").format(**self._local_vars) + tpl = _TEMPLATE_ENV.from_string(self.get_text()) + rv = tpl.render(**self._local_vars) + return rv def __str__(self): return json.dumps(self.node, indent=2) @@ -94,8 +109,12 @@ def __repr__(self): class MdAST(object): - def __init__(self, ast: List[ASTNode]): + def __init__(self, ast: List[ASTNode], path: str): self._ast: List[ASTNode] = ast + self._path: str = path + + def get_name(self) -> str: + return os.path.basename(self._path) def get_ordered(self) -> List[ASTNode]: return self._ast @@ -118,7 +137,7 @@ def parse_markdown_file(self, file_path: str, lang=None) -> MdAST: with open(file_path, 'r') as f: txt = f.read() raw_list: List[dict] = markdown(txt) - return MdAST([ASTNode(node) for node in raw_list]) + return MdAST([ASTNode(node) for node in raw_list], file_path) class Expectation(object): @@ -166,16 +185,21 @@ class WorkloadDTO(object): def __init__( self, + name: str, setup: str, in_session: List[str], teardown: str, expectations: List[Expectation] ): + self._name = name self._setup = setup self._in_session = in_session self._teardown = teardown self._expectations = expectations + def get_name(self) -> str: + return self._name + def get_setup(self) -> List[str]: return self._setup @@ -244,14 +268,25 @@ def orchestrate(self, file_path: str) -> WorkloadDTO: invocation_count += 1 else: raise KeyError(f'Maximum invocation blocks exceeded: {self._max_invocations_blocks}') - return WorkloadDTO(setup_str, in_session_commands, teardown_str, expectations) + return WorkloadDTO(ast.get_name(), setup_str, in_session_commands, teardown_str, expectations) class WalkthroughResult: - def __init__(self, stdout_str :str, stderr_str :str, rc :int) -> None: + def __init__( + self, + name: str, + stdout_str :str, + stderr_str :str, + rc :int, + passes_stdout: bool, + passes_stderr: bool + ) -> None: + self.name: str = name self.stdout :str = stdout_str self.stderr :str = stderr_str self.rc = rc + self.passes_stdout_check = passes_stdout + self.passes_stderr_check = passes_stderr class SimpleRunner(object): @@ -276,12 +311,28 @@ def run(self) -> WalkthroughResult: stdout_str: str = stdout_bytes.decode(sys.getdefaultencoding()) stderr_str: str = stderr_bytes.decode(sys.getdefaultencoding()) + fails_stdout: bool = False + fails_stderr: bool = False + for expectation in self._workload.get_expectations(): + passes_stdout: bool = expectation.passes_stdout(stdout_str) + passes_stderr: bool = expectation.passes_stderr(stderr_str) + if not passes_stdout: + fails_stdout = True + if not passes_stderr: + fails_stderr = True print(f'Expectation: {expectation}') - print(f'Passes stdout: {expectation.passes_stdout(stdout_str)}') - print(f'Passes stderr: {expectation.passes_stderr(stderr_str)}') + print(f'Passes stdout: {passes_stdout}') + print(f'Passes stderr: {passes_stderr}') print('---') - return WalkthroughResult(stdout_str, stderr_str, pr.returncode) + return WalkthroughResult( + self._workload.get_name(), + stdout_str, + stderr_str, + pr.returncode, + not fails_stdout, + not fails_stderr + ) class AllWalkthroughsRunner(object): @@ -324,13 +375,23 @@ def run_all(self, walkthrough_inodes: List[str], recursive=True, skip_readme=Tru raise FileNotFoundError(f'Path not tractable: {inode_path}') return results +def collate_results(results: List[WalkthroughResult]) -> bool: + failed: int = 0 + for result in results: + if result.rc != 0 or not result.passes_stdout_check or not result.passes_stderr_check: + failed += 1 + print(f'Failed test count: {failed}') + print(tabulate([[result.name, result.rc, result.passes_stdout_check, result.passes_stderr_check] for result in results], headers=['Test Name', 'Return Code', 'Passes Stdout Checks', 'Passes Stderr Checks'])) + return failed == 0 + def main(): runner: AllWalkthroughsRunner = AllWalkthroughsRunner() results: List[WalkthroughResult] = runner.run_all([os.path.join(_REPOSITORY_ROOT_PATH, 'docs', 'walkthroughs')]) - for result in results: - print(f'RC: {result.rc}') - print(f'STDOUT: {result.stdout}') - print(f'STDERR: {result.stderr}') + if collate_results(results): + print('All tests passed.') + sys.exit(0) + print('Some tests failed.') + sys.exit(1) if __name__ == '__main__': main()