diff --git a/.github/workflows/business-pay-ocp-cd.yml b/.github/workflows/business-pay-ocp-cd.yml index 908121310f..ff8dfbdbd1 100644 --- a/.github/workflows/business-pay-ocp-cd.yml +++ b/.github/workflows/business-pay-ocp-cd.yml @@ -24,7 +24,7 @@ env: jobs: entity-pay-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -67,7 +72,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} entity-pay-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: @@ -75,10 +80,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/colin-api-cd.yml b/.github/workflows/colin-api-cd.yml index 9b3e921239..ed5a1443ce 100644 --- a/.github/workflows/colin-api-cd.yml +++ b/.github/workflows/colin-api-cd.yml @@ -24,7 +24,7 @@ env: jobs: colin-api-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -67,7 +72,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} colin-api-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: @@ -75,10 +80,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/colin-api-ci.yml b/.github/workflows/colin-api-ci.yml index 2f0a954179..52763afb5f 100644 --- a/.github/workflows/colin-api-ci.yml +++ b/.github/workflows/colin-api-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -64,7 +64,11 @@ jobs: JWT_OIDC_JWKS_CACHE_TIMEOUT: 300 GO_LIVE_DATE: 2019-08-12 - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] services: postgres: @@ -81,7 +85,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -102,7 +106,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/data-reset-tool-cd.yml b/.github/workflows/data-reset-tool-cd.yml index 6d0497e245..4268199b14 100644 --- a/.github/workflows/data-reset-tool-cd.yml +++ b/.github/workflows/data-reset-tool-cd.yml @@ -24,7 +24,7 @@ env: jobs: data-reset-tool-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -67,7 +72,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} data-reset-tool-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: @@ -75,10 +80,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/data-reset-tool-ci.yml b/.github/workflows/data-reset-tool-ci.yml index 2c3cb28994..a8092af1d9 100644 --- a/.github/workflows/data-reset-tool-ci.yml +++ b/.github/workflows/data-reset-tool-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -50,12 +50,16 @@ jobs: testing: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -75,7 +79,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/email-reminder-cd.yml b/.github/workflows/email-reminder-cd.yml index 5c6ff84bfb..17691779f2 100644 --- a/.github/workflows/email-reminder-cd.yml +++ b/.github/workflows/email-reminder-cd.yml @@ -24,7 +24,7 @@ env: jobs: email-reminder-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -62,7 +67,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} email-reminder-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: @@ -70,10 +75,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/email-reminder-ci.yml b/.github/workflows/email-reminder-ci.yml index cf1a3fb317..83e5263868 100644 --- a/.github/workflows/email-reminder-ci.yml +++ b/.github/workflows/email-reminder-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -49,11 +49,14 @@ jobs: # testing: # needs: setup-job - # runs-on: ubuntu-20.04 + # runs-on: ubuntu-24.04 + # strategy: + # matrix: + # python-version: [3.8] # steps: # - uses: actions/checkout@v3 # - name: Set up Python ${{ matrix.python-version }} - # uses: actions/setup-python@v1 + # uses: actions/setup-python@v5 # with: # python-version: ${{ matrix.python-version }} # - name: Install dependencies @@ -66,7 +69,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/entity-bn-cd.yml b/.github/workflows/entity-bn-cd.yml index 57ea0d4c29..2807e67cd3 100644 --- a/.github/workflows/entity-bn-cd.yml +++ b/.github/workflows/entity-bn-cd.yml @@ -25,7 +25,7 @@ env: jobs: entity-bn-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -34,6 +34,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -68,7 +73,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} entity-bn-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: @@ -76,10 +81,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/entity-bn-ci.yml b/.github/workflows/entity-bn-ci.yml index a4b71479a4..c0674fbc4b 100644 --- a/.github/workflows/entity-bn-ci.yml +++ b/.github/workflows/entity-bn-ci.yml @@ -14,7 +14,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -24,7 +24,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -33,7 +33,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -67,7 +67,11 @@ jobs: BN_HUB_CLIENT_ID: id BN_HUB_CLIENT_SECRET: secret - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] services: postgres: @@ -84,7 +88,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -105,7 +109,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/entity-digital-credentials-cd.yml b/.github/workflows/entity-digital-credentials-cd.yml index 56ceda2230..e9d9b7388f 100644 --- a/.github/workflows/entity-digital-credentials-cd.yml +++ b/.github/workflows/entity-digital-credentials-cd.yml @@ -25,7 +25,7 @@ env: jobs: entity-digital-credentials-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -34,6 +34,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -68,7 +73,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} entity-digital-credentials-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: @@ -76,10 +81,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/entity-digital-credentials-ci.yml b/.github/workflows/entity-digital-credentials-ci.yml index bca691e437..1afdc52c25 100644 --- a/.github/workflows/entity-digital-credentials-ci.yml +++ b/.github/workflows/entity-digital-credentials-ci.yml @@ -14,7 +14,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -24,7 +24,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -33,7 +33,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -63,7 +63,11 @@ jobs: TEST_NATS_DOCKER: True STAN_CLUSTER_NAME: test-cluster - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] services: postgres: @@ -80,7 +84,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -101,7 +105,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/entity-emailer-cd.yml b/.github/workflows/entity-emailer-cd.yml index 9643b0bd78..3c41b87722 100644 --- a/.github/workflows/entity-emailer-cd.yml +++ b/.github/workflows/entity-emailer-cd.yml @@ -25,7 +25,7 @@ env: jobs: entity-emailer-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -34,6 +34,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -68,7 +73,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} entity-emailer-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: @@ -76,10 +81,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/entity-emailer-ci.yml b/.github/workflows/entity-emailer-ci.yml index 3b51c68ac5..45a45cf4f0 100644 --- a/.github/workflows/entity-emailer-ci.yml +++ b/.github/workflows/entity-emailer-ci.yml @@ -14,7 +14,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -24,7 +24,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -33,7 +33,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -73,7 +73,11 @@ jobs: TEST_NATS_DOCKER: True STAN_CLUSTER_NAME: test-cluster - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] services: postgres: @@ -100,7 +104,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -121,7 +125,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/entity-filer-cd.yml b/.github/workflows/entity-filer-cd.yml index c02c180cff..fb967a593e 100644 --- a/.github/workflows/entity-filer-cd.yml +++ b/.github/workflows/entity-filer-cd.yml @@ -25,7 +25,7 @@ env: jobs: entity-filer-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -34,6 +34,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -68,7 +73,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} entity-filer-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: @@ -76,10 +81,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/entity-filer-ci.yml b/.github/workflows/entity-filer-ci.yml index 8dfe6e208e..90c50151fa 100644 --- a/.github/workflows/entity-filer-ci.yml +++ b/.github/workflows/entity-filer-ci.yml @@ -14,7 +14,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -24,7 +24,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -33,7 +33,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -74,8 +74,7 @@ jobs: ACCOUNT_SVC_CLIENT_SECRET: account_svc_client_secret BUSINESS_EVENTS_TOPIC: projects/project-id/topics/test - - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -96,7 +95,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install docker-compose @@ -122,7 +121,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/entity-pay-cd.yml b/.github/workflows/entity-pay-cd.yml index 77508a607b..0b894e07a9 100644 --- a/.github/workflows/entity-pay-cd.yml +++ b/.github/workflows/entity-pay-cd.yml @@ -25,7 +25,7 @@ env: jobs: entity-pay-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -34,6 +34,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -68,7 +73,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} entity-pay-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: @@ -76,10 +81,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/entity-pay-ci.yml b/.github/workflows/entity-pay-ci.yml index d6163cf3da..0a492a0445 100644 --- a/.github/workflows/entity-pay-ci.yml +++ b/.github/workflows/entity-pay-ci.yml @@ -14,7 +14,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -24,7 +24,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -33,7 +33,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -69,7 +69,11 @@ jobs: NATS_SUBJECT: entity.filings NATS_EMAILER_SUBJECT: entity.email - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] services: postgres: @@ -86,7 +90,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -107,7 +111,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/expired-limited-restoration-cd.yml b/.github/workflows/expired-limited-restoration-cd.yml new file mode 100644 index 0000000000..04293b6f90 --- /dev/null +++ b/.github/workflows/expired-limited-restoration-cd.yml @@ -0,0 +1,114 @@ +name: Expired Limited Restoration Job CD + +on: + push: + branches: + - main + paths: + - "jobs/expired-limited-restoration/**" + workflow_dispatch: + inputs: + environment: + description: "Environment (dev/test/prod)" + required: true + default: "dev" + +defaults: + run: + shell: bash + working-directory: ./jobs/expired-limited-restoration + +env: + APP_NAME: "expired-limited-restoration" + TAG_NAME: "dev" + +jobs: + expired-limited-restoration-cd-by-push: + runs-on: ubuntu-24.04 + + if: github.event_name == 'push' && github.repository == 'bcgov/lear' + environment: + name: "dev" + + steps: + - uses: actions/checkout@v3 + + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + + - name: Login Openshift + shell: bash + run: | + oc login --server=${{secrets.OPENSHIFT4_LOGIN_REGISTRY}} --token=${{secrets.OPENSHIFT4_SA_TOKEN}} + + - name: CD Flow + shell: bash + env: + OPS_REPOSITORY: ${{ secrets.OPS_REPOSITORY }} + OPENSHIFT_DOCKER_REGISTRY: ${{ secrets.OPENSHIFT4_DOCKER_REGISTRY }} + OPENSHIFT_SA_NAME: ${{ secrets.OPENSHIFT4_SA_NAME }} + OPENSHIFT_SA_TOKEN: ${{ secrets.OPENSHIFT4_SA_TOKEN }} + OPENSHIFT_REPOSITORY: ${{ secrets.OPENSHIFT4_REPOSITORY }} + TAG_NAME: ${{ env.TAG_NAME }} + run: | + make cd + + - name: Rocket.Chat Notification + uses: RocketChat/Rocket.Chat.GitHub.Action.Notification@master + if: failure() + with: + type: ${{ job.status }} + job_name: "*Future Effective Filings Job Built and Deployed to ${{env.TAG_NAME}}*" + channel: "#registries-bot" + url: ${{ secrets.ROCKETCHAT_WEBHOOK }} + commit: true + token: ${{ secrets.GITHUB_TOKEN }} + + expired-limited-restoration-cd-by-dispatch: + runs-on: ubuntu-24.04 + + if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' + environment: + name: "${{ github.event.inputs.environment }}" + + steps: + - uses: actions/checkout@v3 + + - name: Set env by input + run: | + echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + + - name: Login Openshift + shell: bash + run: | + oc login --server=${{secrets.OPENSHIFT4_LOGIN_REGISTRY}} --token=${{secrets.OPENSHIFT4_SA_TOKEN}} + + - name: CD Flow + shell: bash + env: + OPS_REPOSITORY: ${{ secrets.OPS_REPOSITORY }} + OPENSHIFT_DOCKER_REGISTRY: ${{ secrets.OPENSHIFT4_DOCKER_REGISTRY }} + OPENSHIFT_SA_NAME: ${{ secrets.OPENSHIFT4_SA_NAME }} + OPENSHIFT_SA_TOKEN: ${{ secrets.OPENSHIFT4_SA_TOKEN }} + OPENSHIFT_REPOSITORY: ${{ secrets.OPENSHIFT4_REPOSITORY }} + TAG_NAME: ${{ env.TAG_NAME }} + run: | + make cd + + - name: Rocket.Chat Notification + uses: RocketChat/Rocket.Chat.GitHub.Action.Notification@master + if: failure() + with: + type: ${{ job.status }} + job_name: "*Future Effective Filings Job Built and Deployed to ${{env.TAG_NAME}}*" + channel: "#registries-bot" + url: ${{ secrets.ROCKETCHAT_WEBHOOK }} + commit: true + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/expired-limited-restoration-ci.yml b/.github/workflows/expired-limited-restoration-ci.yml new file mode 100644 index 0000000000..19fd1ff777 --- /dev/null +++ b/.github/workflows/expired-limited-restoration-ci.yml @@ -0,0 +1,88 @@ +name: Expired Limited Restoration Job CI + +on: + pull_request: + types: [assigned, synchronize] + paths: + - "jobs/expired-limited-restoration/**" + +defaults: + run: + shell: bash + working-directory: ./jobs/expired-limited-restoration + +jobs: + setup-job: + runs-on: ubuntu-24.04 + + if: github.repository == 'bcgov/lear' + + steps: + - uses: actions/checkout@v3 + - run: "true" + + linting: + needs: setup-job + runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + make setup + - name: Lint with pylint + id: pylint + run: | + make pylint + - name: Lint with flake8 + id: flake8 + run: | + make flake8 + + testing: + needs: setup-job + runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + make setup + - name: Test with pytest + id: test + run: | + make test + #- name: Upload coverage to Codecov + # uses: codecov/codecov-action@v3 + # with: + # file: ./queue_services/entity-pay/coverage.xml + # flags: entitypay + # name: codecov-entity-pay + # fail_ci_if_error: false + + build-check: + needs: setup-job + runs-on: ubuntu-24.04 + + steps: + - uses: actions/checkout@v3 + - name: build to check strictness + id: build + run: | + make build-nc diff --git a/.github/workflows/filings-notebook-report-cd.yml b/.github/workflows/filings-notebook-report-cd.yml index e1465ef216..c3f1ed9e5a 100644 --- a/.github/workflows/filings-notebook-report-cd.yml +++ b/.github/workflows/filings-notebook-report-cd.yml @@ -24,7 +24,7 @@ env: jobs: filings-notebook-report-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -62,7 +67,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} filings-notebook-report-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: @@ -70,10 +75,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/filings-notebook-report-ci.yml b/.github/workflows/filings-notebook-report-ci.yml index 7dbd4adac6..c2b19db992 100644 --- a/.github/workflows/filings-notebook-report-ci.yml +++ b/.github/workflows/filings-notebook-report-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -48,11 +48,16 @@ jobs: make flake8 testing: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] + steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -72,7 +77,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/furnishings-cd.yml b/.github/workflows/furnishings-cd.yml index 0c43cacf23..5257ba742d 100644 --- a/.github/workflows/furnishings-cd.yml +++ b/.github/workflows/furnishings-cd.yml @@ -24,7 +24,7 @@ env: jobs: furnishings-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -62,7 +67,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} furnishings-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: @@ -70,10 +75,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/furnishings-ci.yml b/.github/workflows/furnishings-ci.yml index b57ee06f73..58d8841ad4 100644 --- a/.github/workflows/furnishings-ci.yml +++ b/.github/workflows/furnishings-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -49,7 +49,7 @@ jobs: testing: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 env: DATABASE_USERNAME: postgres DATABASE_PASSWORD: postgres @@ -64,6 +64,11 @@ jobs: NATS_CLIENT_NAME: entity.job.tester NATS_ENTITY_EVENTS_SUBJECT: entity.events SECOND_NOTICE_DELAY: 5 + + strategy: + matrix: + python-version: [3.8] + services: postgres: image: postgres:12 @@ -77,7 +82,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -98,7 +103,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/future-effective-filings-cd.yml b/.github/workflows/future-effective-filings-cd.yml index ccde9f3172..f4f9fbca4d 100644 --- a/.github/workflows/future-effective-filings-cd.yml +++ b/.github/workflows/future-effective-filings-cd.yml @@ -24,7 +24,7 @@ env: jobs: future-effective-filings-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -62,7 +67,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} future-effective-filings-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: @@ -70,10 +75,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/future-effective-filings-ci.yml b/.github/workflows/future-effective-filings-ci.yml index 83e9babd84..9de4695517 100644 --- a/.github/workflows/future-effective-filings-ci.yml +++ b/.github/workflows/future-effective-filings-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -49,11 +49,16 @@ jobs: testing: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] + steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -73,7 +78,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/involuntary-dissolutions-cd.yml b/.github/workflows/involuntary-dissolutions-cd.yml index a06f83284f..d9c0eaa064 100644 --- a/.github/workflows/involuntary-dissolutions-cd.yml +++ b/.github/workflows/involuntary-dissolutions-cd.yml @@ -24,7 +24,7 @@ env: jobs: involuntary-dissolutions-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -62,7 +67,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} involuntary-dissolutions-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: @@ -70,10 +75,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/involuntary-dissolutions-ci.yml b/.github/workflows/involuntary-dissolutions-ci.yml index 1b1c1c0a2a..172ce77241 100644 --- a/.github/workflows/involuntary-dissolutions-ci.yml +++ b/.github/workflows/involuntary-dissolutions-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -49,7 +49,7 @@ jobs: testing: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 env: DATABASE_USERNAME: postgres DATABASE_PASSWORD: postgres @@ -65,6 +65,11 @@ jobs: NATS_ENTITY_EVENTS_SUBJECT: entity.events STAGE_1_DELAY: 42 STAGE_2_DELAY: 30 + + strategy: + matrix: + python-version: [3.8] + services: postgres: image: postgres:12 @@ -78,7 +83,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -99,7 +104,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/legal-api-cd.yml b/.github/workflows/legal-api-cd.yml index 9be7c2c570..4f80bd9a2e 100644 --- a/.github/workflows/legal-api-cd.yml +++ b/.github/workflows/legal-api-cd.yml @@ -24,7 +24,7 @@ env: jobs: legal-api-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -67,7 +72,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} legal-api-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: @@ -75,10 +80,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/legal-api-ci.yml b/.github/workflows/legal-api-ci.yml index 9b58e4d715..2a9d49dfac 100644 --- a/.github/workflows/legal-api-ci.yml +++ b/.github/workflows/legal-api-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -69,7 +69,11 @@ jobs: BUSINESS_SCHEMA_NAME: digital_business_card BUSINESS_SCHEMA_VERSION: "1.0.0" - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] services: postgres: @@ -86,7 +90,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install docker-compose @@ -112,7 +116,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/sftp-icbc-report-cd.yml b/.github/workflows/sftp-icbc-report-cd.yml index b5c70f7a84..2391fafd9d 100644 --- a/.github/workflows/sftp-icbc-report-cd.yml +++ b/.github/workflows/sftp-icbc-report-cd.yml @@ -24,7 +24,7 @@ env: jobs: sftp-icbc-report-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -62,7 +67,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} sftp-icbc-report-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: @@ -70,10 +75,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/sftp-icbc-report-ci.yml b/.github/workflows/sftp-icbc-report-ci.yml index 7643027782..a6a31b1102 100644 --- a/.github/workflows/sftp-icbc-report-ci.yml +++ b/.github/workflows/sftp-icbc-report-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -48,11 +48,16 @@ jobs: make flake8 testing: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] + steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -72,7 +77,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/sftp-nuans-report-cd.yml b/.github/workflows/sftp-nuans-report-cd.yml index b26330c4d8..0829e9568b 100644 --- a/.github/workflows/sftp-nuans-report-cd.yml +++ b/.github/workflows/sftp-nuans-report-cd.yml @@ -24,7 +24,7 @@ env: jobs: sftp-nuans-report-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -62,7 +67,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} sftp-nuans-report-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: @@ -70,10 +75,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/sftp-nuans-report-ci.yml b/.github/workflows/sftp-nuans-report-ci.yml index eddfd6dcb4..c33e2cc7c6 100644 --- a/.github/workflows/sftp-nuans-report-ci.yml +++ b/.github/workflows/sftp-nuans-report-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -48,11 +48,16 @@ jobs: make flake8 testing: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] + steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -72,7 +77,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/update-colin-filings-cd.yml b/.github/workflows/update-colin-filings-cd.yml index 72f32fc312..057a11b77b 100644 --- a/.github/workflows/update-colin-filings-cd.yml +++ b/.github/workflows/update-colin-filings-cd.yml @@ -24,7 +24,7 @@ env: jobs: update-colin-filings-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -62,7 +67,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} update-colin-filings-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: @@ -70,10 +75,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/update-colin-filings-ci.yml b/.github/workflows/update-colin-filings-ci.yml index f40f45fe28..b5aad8b85d 100644 --- a/.github/workflows/update-colin-filings-ci.yml +++ b/.github/workflows/update-colin-filings-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -49,11 +49,16 @@ jobs: testing: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] + steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -73,7 +78,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/update-legal-filings-cd.yml b/.github/workflows/update-legal-filings-cd.yml index 18cdc2a743..1c67e81346 100644 --- a/.github/workflows/update-legal-filings-cd.yml +++ b/.github/workflows/update-legal-filings-cd.yml @@ -24,7 +24,7 @@ env: jobs: update-legal-filings-cd-by-push: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'push' && github.repository == 'bcgov/lear' environment: @@ -33,6 +33,11 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | @@ -62,7 +67,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} update-legal-filings-cd-by-dispatch: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.event_name == 'workflow_dispatch' && github.repository == 'bcgov/lear' environment: @@ -70,10 +75,16 @@ jobs: steps: - uses: actions/checkout@v3 + - name: Set env by input run: | echo "TAG_NAME=${{ github.event.inputs.environment }}" >> $GITHUB_ENV + - name: Install CLI tools from OpenShift + uses: redhat-actions/openshift-tools-installer@v1 + with: + oc: "4" + - name: Login Openshift shell: bash run: | diff --git a/.github/workflows/update-legal-filings-ci.yml b/.github/workflows/update-legal-filings-ci.yml index 95ec9b62aa..201300e9f4 100644 --- a/.github/workflows/update-legal-filings-ci.yml +++ b/.github/workflows/update-legal-filings-ci.yml @@ -13,7 +13,7 @@ defaults: jobs: setup-job: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 if: github.repository == 'bcgov/lear' @@ -23,7 +23,7 @@ jobs: linting: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: matrix: @@ -32,7 +32,7 @@ jobs: steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -49,11 +49,16 @@ jobs: testing: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 + + strategy: + matrix: + python-version: [3.8] + steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies @@ -73,7 +78,7 @@ jobs: build-check: needs: setup-job - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v3 diff --git a/colin-api/devops/vaults.json b/colin-api/devops/vaults.json index ace7440da6..4e7deeb8c4 100644 --- a/colin-api/devops/vaults.json +++ b/colin-api/devops/vaults.json @@ -6,7 +6,8 @@ "test-oracle", "sentry", "jwt", - "launchdarkly" + "launchdarkly", + "entity-service-account" ] } ] diff --git a/colin-api/src/colin_api/config.py b/colin-api/src/colin_api/config.py index f4308a675c..bb6c9e7e07 100644 --- a/colin-api/src/colin_api/config.py +++ b/colin-api/src/colin_api/config.py @@ -93,6 +93,15 @@ class _Config: # pylint: disable=too-few-public-methods except (TypeError, ValueError): JWT_OIDC_JWKS_CACHE_TIMEOUT = 300 + # legal api + LEGAL_API_URL = os.getenv('LEGAL_API_URL') + + # service accounts + ACCOUNT_SVC_AUTH_URL = os.getenv('ACCOUNT_SVC_AUTH_URL') + ACCOUNT_SVC_CLIENT_ID = os.getenv('ACCOUNT_SVC_CLIENT_ID') + ACCOUNT_SVC_CLIENT_SECRET = os.getenv('ACCOUNT_SVC_CLIENT_SECRET') + ACCOUNT_SVC_TIMEOUT = os.getenv('ACCOUNT_SVC_TIMEOUT') + TESTING = False DEBUG = False diff --git a/colin-api/src/colin_api/models/business.py b/colin-api/src/colin_api/models/business.py index 5f4421771b..a6509d5889 100644 --- a/colin-api/src/colin_api/models/business.py +++ b/colin-api/src/colin_api/models/business.py @@ -66,9 +66,10 @@ class CorpStateTypes(Enum): AMALGAMATED = 'HAM' CONTINUE_IN = 'HCI' CONTINUE_OUT = 'HCO' - INVOLUNTARY_DISSOLUTION_NO_AR = 'HDF' + INVOLUNTARY_DISSOLUTION_NO_AR = 'HDF' # this corp state is also used for Put back off INVOLUNTARY_DISSOLUTION_NO_TR = 'HDT' LIMITED_RESTORATION = 'LRS' + RESTORATION_EXPIRATION = 'EXR' VOLUNTARY_DISSOLUTION = 'HDV' CORPS = [TypeCodes.BCOMP.value, TypeCodes.BC_COMP.value, diff --git a/colin-api/src/colin_api/models/filing.py b/colin-api/src/colin_api/models/filing.py index 9b687b164b..8ad22e597f 100644 --- a/colin-api/src/colin_api/models/filing.py +++ b/colin-api/src/colin_api/models/filing.py @@ -174,7 +174,8 @@ class FilingSource(Enum): 'CORPS_NAME': 'CO_BC', # company name/translated name 'CORPS_DIRECTOR': 'CO_DI', 'CORPS_OFFICE': 'CO_RR', # registered and record offices - 'CORPS_SHARE': 'CO_SS' + 'CORPS_SHARE': 'CO_SS', + 'CORPS_COMMENT_ONLY': 'CO_LI' # Called local correction (adding a comment only) }, 'specialResolution': { 'type_code_list': ['OTSPE'], @@ -352,6 +353,30 @@ class FilingSource(Enum): 'courtOrder': { 'type_code_list': ['COURT'], Business.TypeCodes.BC_COMP.value: 'COURT' + }, + 'putBackOn': { + 'type_code_list': ['CO_PO'], + Business.TypeCodes.COOP.value: 'CO_PO', + Business.TypeCodes.BCOMP.value: 'CO_PO', + Business.TypeCodes.BC_COMP.value: 'CO_PO', + Business.TypeCodes.ULC_COMP.value: 'CO_PO', + Business.TypeCodes.CCC_COMP.value: 'CO_PO', + Business.TypeCodes.BCOMP_CONTINUE_IN.value: 'CO_PO', + Business.TypeCodes.CONTINUE_IN.value: 'CO_PO', + Business.TypeCodes.ULC_CONTINUE_IN.value: 'CO_PO', + Business.TypeCodes.CCC_CONTINUE_IN.value: 'CO_PO', + }, + 'putBackOff': { + 'type_code_list': ['CO_PF'], + Business.TypeCodes.COOP.value: 'CO_PF', + Business.TypeCodes.BCOMP.value: 'CO_PF', + Business.TypeCodes.BC_COMP.value: 'CO_PF', + Business.TypeCodes.ULC_COMP.value: 'CO_PF', + Business.TypeCodes.CCC_COMP.value: 'CO_PF', + Business.TypeCodes.BCOMP_CONTINUE_IN.value: 'CO_PF', + Business.TypeCodes.CONTINUE_IN.value: 'CO_PF', + Business.TypeCodes.ULC_CONTINUE_IN.value: 'CO_PF', + Business.TypeCodes.CCC_CONTINUE_IN.value: 'CO_PF', } } @@ -503,6 +528,14 @@ def _get_event_id(cls, cursor, corp_num: str, filing_dt: str, event_type: str = filing_dt=filing_dt, event_type=event_type ) + cursor.execute( + """ + INSERT INTO event_insert (event_id, corp_num, insert_date) + VALUES (:event_id, :corp_num, sysdate) + """, + event_id=event_id, + corp_num=corp_num + ) except Exception as err: current_app.logger.error('Error in filing: Failed to create new event.') raise err @@ -610,7 +643,7 @@ def _insert_filing(cls, cursor, filing, # pylint: disable=too-many-statements, filing_date=filing.filing_date[:10] ) elif filing_type_code in ['NOCAD', 'TRANS', - 'CO_BC', 'CO_DI', 'CO_RR', 'CO_SS', + 'CO_BC', 'CO_DI', 'CO_RR', 'CO_SS', 'CO_LI', 'BEINC', 'ICORP', 'ICORU', 'ICORC', 'AMLRB', 'AMALR', 'AMLRU', 'AMLRC', 'AMLHB', 'AMALH', 'AMLHU', 'AMLHC', @@ -618,7 +651,7 @@ def _insert_filing(cls, cursor, filing, # pylint: disable=too-many-statements, 'CONTB', 'CONTI', 'CONTU', 'CONTC', 'NOABE', 'NOALE', 'NOALR', 'NOALD', 'NOALA', 'NOALB', 'NOALU', 'NOALC', - 'CONTO', 'COUTI', + 'CONTO', 'COUTI', 'CO_PO', 'CO_PF', 'AGMDT', 'AGMLC', 'RESTF', 'RESTL', 'RESXL', 'RESXF', 'REGSN', 'REGSO', 'COURT']: @@ -1205,16 +1238,25 @@ def add_involuntary_dissolution_event(cls, con, corp_num, filing_dt, filing_body return None + @classmethod + def add_limited_restoration_expiration_event(cls, con, corp_num, filing_dt) -> int: + """Add limited restoration expiration event .""" + cursor = con.cursor() + event_id = cls._get_event_id(cursor=cursor, corp_num=corp_num, filing_dt=filing_dt, event_type='SYSDL') + Business.update_corp_state(cursor, event_id, corp_num, + Business.CorpStateTypes.RESTORATION_EXPIRATION.value) + return event_id + # pylint: disable=too-many-locals,too-many-statements,too-many-branches,too-many-nested-blocks; @classmethod - def add_filing(cls, con, filing: Filing) -> int: + def add_filing(cls, con, filing: Filing, lear_identifier: str) -> int: """Add new filing to COLIN tables.""" try: if filing.filing_type not in ['agmExtension', 'agmLocationChange', 'alteration', 'amalgamationApplication', 'annualReport', 'changeOfAddress', 'changeOfDirectors', 'consentContinuationOut', 'continuationIn', - 'continuationOut', 'courtOrder', - 'dissolution', 'incorporationApplication', 'registrarsNotation', + 'continuationOut', 'courtOrder', 'dissolution', 'incorporationApplication', + 'putBackOn', 'putBackOff', 'registrarsNotation', 'registrarsOrder', 'restoration', 'specialResolution', 'transition']: raise InvalidFilingTypeException(filing_type=filing.filing_type) @@ -1252,6 +1294,10 @@ def add_filing(cls, con, filing: Filing) -> int: cls._process_continuation_out(cursor, filing) elif filing.filing_type == 'restoration': cls._process_restoration(cursor, filing) + elif filing.filing_type == 'putBackOn': + cls._process_put_back_on(cursor, filing) + elif filing.filing_type == 'putBackOff': + cls._process_put_back_off(cursor, filing) elif filing.filing_type == 'alteration': # alter corp type if ( @@ -1359,11 +1405,14 @@ def add_filing(cls, con, filing: Filing) -> int: Business.TypeCodes.BCOMP_CONTINUE_IN.value, ]) - # Freeze all entities except CP if 'enable-bc-ccc-ulc' flag is on else just freeze BEN + # Freeze all entities except CP if business exists in lear and + # 'enable-bc-ccc-ulc' flag is on else just freeze BEN is_frozen_condition = ( flags.is_on('enable-bc-ccc-ulc') and - business['business']['legalType'] != Business.TypeCodes.COOP.value + business['business']['legalType'] != Business.TypeCodes.COOP.value and + filing_source == cls.FilingSource.LEAR.value ) + current_app.logger.debug(f'Business {lear_identifier}, is_frozen_condition:{is_frozen_condition}') is_new_or_altered_ben = is_new_ben or is_new_cben or is_alteration_to_ben_or_cben @@ -1447,6 +1496,29 @@ def _process_restoration(cls, cursor, filing): corp_state = Business.CorpStateTypes.LIMITED_RESTORATION.value Business.update_corp_state(cursor, filing.event_id, corp_num, corp_state) + @classmethod + def _process_put_back_on(cls, cursor, filing): + """Process Put Back On.""" + corp_num = filing.get_corp_num() + + Office.end_office(cursor=cursor, + event_id=filing.event_id, + corp_num=corp_num, + office_code=Office.OFFICE_TYPES_CODES['custodialOffice']) + + Party.end_current(cursor, filing.event_id, corp_num, 'Custodian') + + corp_state = Business.CorpStateTypes.ACTIVE.value # Active for Put Back On + Business.update_corp_state(cursor, filing.event_id, corp_num, corp_state) + + @classmethod + def _process_put_back_off(cls, cursor, filing): + """Process Put Back Off.""" + corp_num = filing.get_corp_num() + + corp_state = Business.CorpStateTypes.INVOLUNTARY_DISSOLUTION_NO_AR.value + Business.update_corp_state(cursor, filing.event_id, corp_num, corp_state) + @classmethod def _process_continuation_out(cls, cursor, filing): """Process continuation out.""" @@ -1744,6 +1816,9 @@ def _process_share_structure(cls, cursor, filing: Filing, corp_num: str): @classmethod def _process_name_translations(cls, cursor, filing: Filing, corp_num: str): """Process name translations.""" + if 'nameTranslations' not in filing.body: + return + name_translations = filing.body.get('nameTranslations', []) old_translations = CorpName.get_current_by_type( cursor=cursor, @@ -1845,6 +1920,19 @@ def _process_share_correction(cls, cursor, filing: Filing, corp_num: str, filing return filing.event_id + @classmethod + def _process_comment_correction(cls, cursor, filing: Filing, corp_num: str, filing_type_code: str): + """Process comment correction.""" + # create new event record, return event ID + filing.event_id = cls._get_event_id(cursor=cursor, corp_num=corp_num, filing_dt=filing.filing_date) + cls._insert_filing_user(cursor=cursor, filing=filing) + cls._insert_filing(cursor=cursor, filing=filing, filing_type_code=filing_type_code) + + ledger_text = filing.body.get('comment', '') + cls._insert_ledger_text(cursor, filing, ledger_text) + + return filing.event_id + @classmethod def add_correction_filings(cls, con, filing: Filing) -> list: """Create correction filings.""" @@ -1898,11 +1986,13 @@ def add_correction_filings(cls, con, filing: Filing) -> list: 'filing_type': filing.filing_type, 'filing_sub_type': None}) - if not filings_added: # if no filing created - raise GenericException( # pylint: disable=broad-exception-raised - f'No filing created for this correction identifier:{corp_num}.', - HTTPStatus.NOT_IMPLEMENTED - ) + if not filings_added: # only comment added + filing_type_code = Filing.FILING_TYPES[filing.filing_type][f'{sub_type}_COMMENT_ONLY'] + event_id = cls._process_comment_correction(cursor, filing, corp_num, filing_type_code) + + filings_added.append({'event_id': event_id, + 'filing_type': filing.filing_type, + 'filing_sub_type': None}) return filings_added diff --git a/colin-api/src/colin_api/resources/filing.py b/colin-api/src/colin_api/resources/filing.py index a22337920d..cd4654781f 100644 --- a/colin-api/src/colin_api/resources/filing.py +++ b/colin-api/src/colin_api/resources/filing.py @@ -92,7 +92,8 @@ def get(legal_type, identifier, filing_type, filing_sub_type=None): @jwt.requires_roles([COLIN_SVC_ROLE]) def post(legal_type, identifier, **kwargs): """Create a new filing.""" - # pylint: disable=unused-argument,too-many-branches; filing_type is only used for the get + # pylint: disable=too-many-return-statements,unused-argument,too-many-branches; + # filing_type is only used for the get try: if legal_type not in [x.value for x in Business.TypeCodes]: return jsonify({'message': 'Must provide a valid legal type.'}), HTTPStatus.BAD_REQUEST @@ -117,6 +118,9 @@ def post(legal_type, identifier, **kwargs): {'message': 'Error: Identifier in URL does not match identifier in filing data'} ), HTTPStatus.BAD_REQUEST + # setting this for lear business check as identifier is converted from lear to colin below + lear_identifier = identifier + # convert identifier if BC legal_type identifier = Business.get_colin_identifier(identifier, legal_type) @@ -137,6 +141,8 @@ def post(legal_type, identifier, **kwargs): 'courtOrder': json_data.get('courtOrder', None), 'dissolution': json_data.get('dissolution', None), 'incorporationApplication': json_data.get('incorporationApplication', None), + 'putBackOff': json_data.get('putBackOff', None), + 'putBackOn': json_data.get('putBackOn', None), 'registrarsNotation': json_data.get('registrarsNotation', None), 'registrarsOrder': json_data.get('registrarsOrder', None), 'restoration': json_data.get('restoration', None), @@ -175,7 +181,20 @@ def post(legal_type, identifier, **kwargs): } }), HTTPStatus.CREATED - filings_added = FilingInfo._add_filings(con, json_data, filing_list, identifier) + # filing will not be created for Limited restoration expiration-Put back off (make business Historical) + # Create an event and update corp state. + if ('putBackOff' in filing_list and json_data['header']['hideInLedger'] is True): + filing_dt = convert_to_pacific_time(json_data['header']['date']) + event_id = Filing.add_limited_restoration_expiration_event(con, identifier, filing_dt) + + con.commit() + return jsonify({ + 'filing': { + 'header': {'colinIds': [event_id]} + } + }), HTTPStatus.CREATED + + filings_added = FilingInfo._add_filings(con, json_data, filing_list, identifier, lear_identifier) # success! commit the db changes con.commit() @@ -200,7 +219,7 @@ def post(legal_type, identifier, **kwargs): }), HTTPStatus.INTERNAL_SERVER_ERROR @staticmethod - def _add_filings(con, json_data: dict, filing_list: list, identifier: str) -> list: + def _add_filings(con, json_data: dict, filing_list: list, identifier: str, lear_identifier: str) -> list: """Process all parts of the filing.""" filings_added = [] for filing_type in filing_list: @@ -211,8 +230,11 @@ def _add_filings(con, json_data: dict, filing_list: list, identifier: str) -> li filing_body = filing_list[filing_type] filing.filing_sub_type = Filing.get_filing_sub_type(filing_type, filing_body) filing.body = filing_body - # get utc lear effective date and convert to pacific time for insert into oracle - filing.effective_date = convert_to_pacific_time(filing.header['learEffectiveDate']) + if filing.header['isFutureEffective']: + # get utc lear effective date and convert to pacific time for insert into oracle + filing.effective_date = convert_to_pacific_time(filing.header['learEffectiveDate']) + else: + filing.effective_date = filing.filing_date if filing_type in ['amalgamationApplication', 'continuationIn', 'incorporationApplication']: filing.business = Business.create_corporation(con, json_data) @@ -222,7 +244,7 @@ def _add_filings(con, json_data: dict, filing_list: list, identifier: str) -> li if filing_type == 'correction': filings_added.extend(Filing.add_correction_filings(con, filing)) else: - event_id = Filing.add_filing(con, filing) + event_id = Filing.add_filing(con, filing, lear_identifier) filings_added.append({'event_id': event_id, 'filing_type': filing_type, 'filing_sub_type': filing.filing_sub_type}) diff --git a/colin-api/src/colin_api/services/account.py b/colin-api/src/colin_api/services/account.py new file mode 100644 index 0000000000..5366a8dfbd --- /dev/null +++ b/colin-api/src/colin_api/services/account.py @@ -0,0 +1,52 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This class provides the service for auth calls.""" + +import requests +from flask import current_app + + +# pylint: disable=too-few-public-methods +class AccountService: + """Provides service to call Authentication Services.""" + + BEARER: str = 'Bearer ' + CONTENT_TYPE_JSON = {'Content-Type': 'application/json'} + + try: + timeout = int(current_app.config.get('ACCOUNT_SVC_TIMEOUT', 20)) + except Exception: # pylint: disable=broad-except + timeout = 20 + + @classmethod + def get_bearer_token(cls): + """Get a valid Bearer token for the service to use.""" + token_url = current_app.config.get('ACCOUNT_SVC_AUTH_URL') + client_id = current_app.config.get('ACCOUNT_SVC_CLIENT_ID') + client_secret = current_app.config.get('ACCOUNT_SVC_CLIENT_SECRET') + + data = 'grant_type=client_credentials' + + # get service account token + res = requests.post(url=token_url, + data=data, + headers={'content-type': 'application/x-www-form-urlencoded'}, + auth=(client_id, client_secret), + timeout=cls.timeout) + + try: + return res.json().get('access_token') + except Exception: # pylint: disable=broad-except + return None diff --git a/colin-api/src/colin_api/version.py b/colin-api/src/colin_api/version.py index 6a26e494ec..81c66b5c24 100644 --- a/colin-api/src/colin_api/version.py +++ b/colin-api/src/colin_api/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.135.0' # pylint: disable=invalid-name +__version__ = '2.140.0' # pylint: disable=invalid-name diff --git a/data-tool/.corps.env.sample b/data-tool/.corps.env.sample index e680ad60b6..87df82910b 100644 --- a/data-tool/.corps.env.sample +++ b/data-tool/.corps.env.sample @@ -44,6 +44,9 @@ UPDATE_ENTITY=False AFFILIATE_ENTITY=False AFFILIATE_ENTITY_ACCOUNT_ID= +USE_CUSTOM_CONTACT_EMAIL=False +CUSTOM_CONTACT_EMAIL= + AUTH_SVC_URL= ACCOUNT_SVC_AUTH_URL= @@ -69,6 +72,11 @@ TOMBSTONE_BATCH_SIZE=300 DELETE_BATCHES=1 DELETE_BATCH_SIZE=300 +VERIFY_BATCH_SIZE=300 + ## delete corps record in auth db, corp_processing of colin extract DELETE_AUTH_RECORDS=False -DELETE_CORP_PROCESSING_RECORDS=True \ No newline at end of file +DELETE_CORP_PROCESSING_RECORDS=True + +# verify script +VERIFY_SUMMARY_PATH=results.csv diff --git a/data-tool/Makefile b/data-tool/Makefile index a19866731c..3c50a2e04a 100644 --- a/data-tool/Makefile +++ b/data-tool/Makefile @@ -105,6 +105,10 @@ run-tombstone-migration: ## Run corp tombstone migration flow . $(VENV_DIR)/bin/activate && \ python flows/corps_tombstone_flow.py +run-tombstone-verify: ## Run corp tombstone verify flow + . $(VENV_DIR)/bin/activate && \ + python flows/corps_verify_flow.py + ################################################################################# # Self Documenting Commands # diff --git a/data-tool/flows/batch_delete_flow.py b/data-tool/flows/batch_delete_flow.py index d2292f59c8..4f002a4529 100644 --- a/data-tool/flows/batch_delete_flow.py +++ b/data-tool/flows/batch_delete_flow.py @@ -85,6 +85,10 @@ def lear_delete_non_versioned(conn: Connection, business_ids: list): 'source': 'resolutions', 'params': {'business_id': business_ids}, }, + { + 'source': 'amalgamations', + 'params': {'business_id': business_ids}, + }, ] query_futures_one = [] @@ -92,7 +96,7 @@ def lear_delete_non_versioned(conn: Connection, business_ids: list): query_futures_one.append( execute_query.submit(conn, plan) ) - + results_one = {} for future in query_futures_one: result = future.result() @@ -114,6 +118,14 @@ def lear_delete_non_versioned(conn: Connection, business_ids: list): 'source': 'share_series', 'params': {'share_class_id': results_one['share_classes']}, }, + { + 'source': 'amalgamating_businesses', + 'params': { 'amalgamation_id': results_one['amalgamations']}, + }, + { + 'source': 'offices_held', + 'params': {'party_role_id': results_one['party_roles']}, + } ] query_futures_two = [] @@ -121,7 +133,7 @@ def lear_delete_non_versioned(conn: Connection, business_ids: list): query_futures_two.append( execute_query.submit(conn, plan) ) - + delete_futures = [] # delete for first query results for table, ids in results_one.items(): @@ -207,6 +219,10 @@ def lear_delete_versioned(conn: Connection, business_ids: list): 'source': 'share_series_version', 'params': {'transaction_id': transaction_ids}, }, + { + 'source': 'offices_held_version', + 'params': {'transaction_id': transaction_ids}, + }, # based on others { 'source': 'batch_processing', @@ -223,6 +239,10 @@ def lear_delete_versioned(conn: Connection, business_ids: list): 'source': 'comments', 'params': {'filing_id': filing_ids}, }, + { + 'source': 'furnishings', + 'params': {'business_id': business_ids}, + }, # there're some Comment records saved by legal-api directly instead of filer # some of them are linked via business_id { @@ -330,7 +350,7 @@ def auth_delete(db_engine: Engine, identifiers: list): delete_futures.append( execute_delete_plan.submit(conn, table, ids) ) - + # delete records in entities table delete_futures.append( execute_delete_plan.submit(conn, 'entities', entity_ids) @@ -424,7 +444,7 @@ def delete_entities(identifiers: list, auth_svc_url, headers, timeout=None): else: failed += 1 - print(f'👷 Auth entity delete complete for this round. Succeeded: {succeeded}. Failed: {failed}. Skipped: {skipped}') + print(f'👷 Auth entity delete complete for this round. Succeeded: {succeeded}. Failed: {failed}. Skipped: {skipped}') def filter_none(values: list) -> list: @@ -439,20 +459,20 @@ def execute_query(conn: Connection, template: dict) -> dict: :param template: A dictionary specifying the query structure. Expected keys in `template` include: - + - **source** (`str`): The table to query. - - **columns** (`list[str]`, optional): The columns to select from the `source` table. + - **columns** (`list[str]`, optional): The columns to select from the `source` table. Defaults to `['id']`. - - **params** (`dict`, optional): A dictionary with filter conditions + - **params** (`dict`, optional): A dictionary with filter conditions for the query. Defaults to `None`. - - **targets** (`list[str]`, optional): A list of tables where the results will be mapped + - **targets** (`list[str]`, optional): A list of tables where the results will be mapped to targets for delete operations. Defaults to `[source]`. :return: A dictionary containing the mapping results. The format is: `{ 'target_table_name': [id1, id2, ...] }` - - where each `target_table_name` is a table specified in `targets` or the origin table of a + + where each `target_table_name` is a table specified in `targets` or the origin table of a `_version` table. The associated value is a list of IDs for records to delete in that table. """ @@ -474,7 +494,10 @@ def execute_query(conn: Connection, template: dict) -> dict: else: # now only consider str and int in the list v_str = ', '.join(map(lambda x: f'\'{x}\'' if isinstance(x, str) else str(x), filter_none(v))) - query += f' AND {k} IN ({v_str})' + if v_str: + query += f' AND {k} IN ({v_str})' + else: + query += ' AND 1 != 1' results = conn.execute(text(query)) @@ -482,7 +505,7 @@ def execute_query(conn: Connection, template: dict) -> dict: if not rows: # if source table is version table and has no record, then won't generate plan for origin table ret = {t: [] for t in targets} - else: + else: cols = zip(*rows) ret = defaultdict(list) for t, c in zip(targets, cols): @@ -492,7 +515,7 @@ def execute_query(conn: Connection, template: dict) -> dict: if (origin := (t.rsplit('_version', 1)[0])) != t: ret[origin].extend(c) ret[t].extend(c) - + return ret @@ -506,8 +529,9 @@ def execute_delete_plan(conn: Connection, table: str, ids: list): @task(persist_result=False) def delete_by_ids(conn: Connection, table_name: str, ids: list, id_name: str = 'id'): + ids = filter_none(ids) if ids: - ids_str = ', '.join(map(lambda x: f'\'{x}\'' if isinstance(x, str) else str(x), filter_none(ids))) + ids_str = ', '.join(map(lambda x: f'\'{x}\'' if isinstance(x, str) else str(x), ids)) query_str = f'DELETE FROM {table_name} WHERE {id_name} IN ({ids_str})' query = text(query_str) results = conn.execute(query, {'ids': ids}) diff --git a/data-tool/flows/common/auth_service.py b/data-tool/flows/common/auth_service.py index 5fb5c9d357..faacb9db68 100644 --- a/data-tool/flows/common/auth_service.py +++ b/data-tool/flows/common/auth_service.py @@ -128,7 +128,7 @@ def create_entity(cls, timeout=cls.get_time_out(config) ) - if entity_record.status_code != HTTPStatus.OK: + if entity_record.status_code not in (HTTPStatus.ACCEPTED, HTTPStatus.CREATED): return HTTPStatus.BAD_REQUEST return HTTPStatus.OK @@ -200,3 +200,43 @@ def delete_affiliation(cls, config, account: int, business_registration: str) -> or entity_record.status_code not in (HTTPStatus.OK, HTTPStatus.NO_CONTENT): return HTTPStatus.BAD_REQUEST return HTTPStatus.OK + + @classmethod + def update_contact_email(cls, config, identifier: str, email: str) -> Dict: + """Update contact email of the business.""" + token = cls.get_bearer_token(config) + auth_url = config.AUTH_SVC_URL + account_svc_entity_url = f'{auth_url}/entities' + + # Create an entity record + data = { + 'email': email, + 'phone': '', + 'phoneExtension': '' + } + + rv = requests.post( + url=f'{account_svc_entity_url}/{identifier}/contacts', + headers={ + **cls.CONTENT_TYPE_JSON, + 'Authorization': cls.BEARER + token + }, + data=json.dumps(data), + timeout=cls.get_time_out(config) + ) + + if (rv.status_code == HTTPStatus.BAD_REQUEST and 'DATA_ALREADY_EXISTS' in rv.text): + rv = requests.put( + url=f'{account_svc_entity_url}/{identifier}/contacts', + headers={ + **cls.CONTENT_TYPE_JSON, + 'Authorization': cls.BEARER + token + }, + data=json.dumps(data), + timeout=cls.get_time_out(config) + ) + + if rv.status_code in (HTTPStatus.OK, HTTPStatus.CREATED): + return HTTPStatus.OK + + return rv.status_code diff --git a/data-tool/flows/common/corp_processing_queue_service.py b/data-tool/flows/common/corp_processing_queue_service.py index eecd621b00..cc348149c5 100644 --- a/data-tool/flows/common/corp_processing_queue_service.py +++ b/data-tool/flows/common/corp_processing_queue_service.py @@ -8,6 +8,7 @@ class ProcessingStatuses(str, Enum): PROCESSING = 'PROCESSING' COMPLETED = 'COMPLETED' FAILED = 'FAILED' + PARTIAL = 'PARTIAL' class CorpProcessingQueueService: def __init__(self, environment: str, db_engine, flow_name: str): @@ -87,7 +88,7 @@ def claim_batch(self, flow_run_id: str, batch_size: int) -> List[str]: """ query = """ WITH claimable AS ( - SELECT corp_num + SELECT corp_num, id FROM corp_processing WHERE processed_status = :pending_status AND environment = :environment @@ -103,6 +104,7 @@ def claim_batch(self, flow_run_id: str, batch_size: int) -> List[str]: last_modified = NOW() FROM claimable WHERE corp_processing.corp_num = claimable.corp_num + AND corp_processing.id = claimable.id RETURNING corp_processing.corp_num, corp_processing.claimed_at """ diff --git a/data-tool/flows/config.py b/data-tool/flows/config.py index 534b2e79fe..fa13f0e4e0 100644 --- a/data-tool/flows/config.py +++ b/data-tool/flows/config.py @@ -61,6 +61,9 @@ class _Config(): # pylint: disable=too-few-public-methods else: AFFILIATE_ENTITY_ACCOUNT_ID = None + USE_CUSTOM_CONTACT_EMAIL = os.getenv('USE_CUSTOM_CONTACT_EMAIL', 'False') == 'True' + CUSTOM_CONTACT_EMAIL = os.getenv('CUSTOM_CONTACT_EMAIL', '') + # POSTGRESQL COLIN MIGRATION DB DB_USER_COLIN_MIGR = os.getenv('DATABASE_USERNAME_COLIN_MIGR', '') DB_PASSWORD_COLIN_MIGR = os.getenv('DATABASE_PASSWORD_COLIN_MIGR', '') @@ -139,6 +142,11 @@ class _Config(): # pylint: disable=too-few-public-methods TOMBSTONE_BATCH_SIZE = os.getenv('TOMBSTONE_BATCH_SIZE') TOMBSTONE_BATCH_SIZE = int(TOMBSTONE_BATCH_SIZE) if TOMBSTONE_BATCH_SIZE.isnumeric() else 0 + # verify flow + VERIFY_BATCH_SIZE = os.getenv('VERIFY_BATCH_SIZE') + VERIFY_BATCH_SIZE = int(VERIFY_BATCH_SIZE) if VERIFY_BATCH_SIZE.isnumeric() else 0 + VERIFY_SUMMARY_PATH = os.getenv('VERIFY_SUMMARY_PATH') + TESTING = False DEBUG = False diff --git a/data-tool/flows/corps_tombstone_flow.py b/data-tool/flows/corps_tombstone_flow.py index bfbb06c49e..5ec57c67c9 100644 --- a/data-tool/flows/corps_tombstone_flow.py +++ b/data-tool/flows/corps_tombstone_flow.py @@ -1,3 +1,7 @@ +import contextlib +from http import HTTPStatus +from pathlib import Path + import math from datetime import datetime, timedelta @@ -7,6 +11,9 @@ from prefect import flow, task, serve from prefect.futures import wait from prefect.context import get_run_context +from prefect.task_runners import ConcurrentTaskRunner +from prefect.states import Failed +from prefect_dask import DaskTaskRunner from sqlalchemy import Connection, text from sqlalchemy.engine import Engine @@ -18,7 +25,7 @@ from tombstone.tombstone_utils import (build_epoch_filing, format_users_data, formatted_data_cleanup, get_data_formatters, load_data, - unsupported_event_file_types, + all_unsupported_types, update_data) @@ -43,7 +50,7 @@ def reserve_unprocessed_corps(config, processing_service, flow_run_id, num_corps @task def get_unprocessed_count(config, colin_engine: Engine) -> int: query = get_total_unprocessed_count_query( - 'local', + 'tombstone-flow', config.DATA_LOAD_ENV ) @@ -60,7 +67,6 @@ def get_unprocessed_count(config, colin_engine: Engine) -> int: def get_corp_users(colin_engine: Engine, corp_nums: list) -> list[dict]: """Get user information.""" query = get_corp_users_query(corp_nums) - sql_text = text(query) with colin_engine.connect() as conn: @@ -102,6 +108,7 @@ def get_snapshot_filings_data(config, colin_engine: Engine, corp_num: str) -> di return raw_data + @task(name='2.2-Corp-Snapshot-Placeholder-Filings-Cleanup-Task') def clean_snapshot_filings_data(data: dict) -> dict: """Clean corp snapshot and placeholder filings data.""" @@ -117,12 +124,12 @@ def clean_snapshot_filings_data(data: dict) -> dict: @task(name='3.1-Corp-Snapshot-Migrate-Task') -def load_corp_snapshot(conn: Connection, tombstone_data: dict) -> int: +def load_corp_snapshot(conn: Connection, tombstone_data: dict, users_mapper: dict) -> int: """Migrate corp snapshot.""" # Note: The business info is partially loaded for businesses table now. And it will be fully # updated by the following placeholder historical filings migration. But it depends on the # implementation of next step. - business_id = load_data(conn, 'businesses', tombstone_data['businesses']) + business_id = load_data(conn, 'businesses', tombstone_data['businesses'], 'identifier', conflict_error=True) for office in tombstone_data['offices']: office['offices']['business_id'] = business_id @@ -133,6 +140,7 @@ def load_corp_snapshot(conn: Connection, tombstone_data: dict) -> int: address['office_id'] = office_id load_data(conn, 'addresses', address) + party_roles_map = {} for party in tombstone_data['parties']: mailing_address_id = None delivery_address_id = None @@ -146,12 +154,26 @@ def load_corp_snapshot(conn: Connection, tombstone_data: dict) -> int: party['parties']['mailing_address_id'] = mailing_address_id party['parties']['delivery_address_id'] = delivery_address_id + source_full_name = party['parties']['cp_full_name'] + del party['parties']['cp_full_name'] party_id = load_data(conn, 'parties', party['parties']) for party_role in party['party_roles']: party_role['business_id'] = business_id party_role['party_id'] = party_id - load_data(conn, 'party_roles', party_role) + party_role_id = load_data(conn, 'party_roles', party_role, expecting_id=True) + + # Create a unique key for mapping + key = (source_full_name, party_role['role']) + party_roles_map[key] = party_role_id + + for office_held in tombstone_data.get('offices_held', []): + # Map to party_role_id using the key + key = (office_held['cp_full_name'], 'officer') + party_role_id = party_roles_map.get(key) + office_held['party_role_id'] = party_role_id + del office_held['cp_full_name'] + load_data(conn,'offices_held', office_held) for share_class in tombstone_data['share_classes']: share_class['share_classes']['business_id'] = business_id @@ -169,36 +191,115 @@ def load_corp_snapshot(conn: Connection, tombstone_data: dict) -> int: resolution['business_id'] = business_id load_data(conn, 'resolutions', resolution) + for comment in tombstone_data['comments']: + comment['business_id'] = business_id + username = comment['staff_id'] + staff_id = users_mapper.get(username) + comment['staff_id'] = staff_id + load_data(conn, 'comments', comment) + + if in_dissolution := tombstone_data['in_dissolution']: + batch = in_dissolution['batches'] + batch_id = load_data(conn, 'batches', batch) + batch_processing = in_dissolution['batch_processing'] + + batch_processing['batch_id'] = batch_id + batch_processing['business_id'] = business_id + load_data(conn, 'batch_processing', batch_processing) + + furnishing = in_dissolution['furnishings'] + furnishing['batch_id'] = batch_id + furnishing['business_id'] = business_id + load_data(conn, 'furnishings', furnishing) + return business_id -@task(name='3.2-Placeholder-Historical-Filings-Migrate-Task') +@task(name='3.2.1-Placeholder-Historical-Filings-Migrate-Task') def load_placeholder_filings(conn: Connection, tombstone_data: dict, business_id: int, users_mapper: dict): """Migrate placeholder historical filings.""" filings_data = tombstone_data['filings'] update_info = tombstone_data['updates'] state_filing_index = update_info['state_filing_index'] update_business_data = update_info['businesses'] + filing_ids_mapper = {} # load placeholder filings - for i, f in enumerate(filings_data): + for i, data in enumerate(filings_data): + f = data['filings'] transaction_id = load_data(conn, 'transaction', {'issued_at': datetime.utcnow().isoformat()}) username = f['submitter_id'] user_id = users_mapper.get(username) f['submitter_id'] = user_id f['transaction_id'] = transaction_id f['business_id'] = business_id + if (withdrawn_idx := f['withdrawn_filing_id']) is not None: + f['withdrawn_filing_id'] = filing_ids_mapper[withdrawn_idx] + filing_id = load_data(conn, 'filings', f) + filing_ids_mapper[i] = filing_id + + data['colin_event_ids']['filing_id'] = filing_id + load_data(conn, 'colin_event_ids', data['colin_event_ids'], expecting_id=False) if i == state_filing_index: update_info['businesses']['state_filing_id'] = filing_id + if jurisdiction := data['jurisdiction']: + jurisdiction['business_id'] = business_id + jurisdiction['filing_id'] = filing_id + load_data(conn, 'jurisdictions', jurisdiction) + + # load amalgamation snapshot linked to the current filing + if amalgamation_data := data['amalgamations']: + load_amalgamation_snapshot(conn, amalgamation_data, business_id, filing_id) + + if comments_data := data['comments']: + for comment in comments_data: + comment['business_id'] = business_id + comment['filing_id'] = filing_id + username = comment['staff_id'] + staff_id = users_mapper.get(username) + comment['staff_id'] = staff_id + load_data(conn, 'comments', comment) + + if cco_data := data['consent_continuation_out']: + cco_data['business_id'] = business_id + cco_data['filing_id'] = filing_id + load_data(conn, 'consent_continuation_outs', cco_data) + # load epoch filing epoch_filing_data = build_epoch_filing(business_id) load_data(conn, 'filings', epoch_filing_data) # load updates for business if update_business_data: - update_data(conn, 'businesses', update_business_data, business_id) + update_data(conn, 'businesses', update_business_data, 'id', business_id) + + +@task(name='3.2.2-Amalgamation-Snapshot-Migrate-Task') +def load_amalgamation_snapshot(conn: Connection, amalgamation_data: dict, business_id: int, filing_id: int): + """Migrate amalgamation snapshot.""" + amalgamation = amalgamation_data['amalgamations'] + amalgamation['business_id'] = business_id + amalgamation['filing_id'] = filing_id + amalgamation_id = load_data(conn, 'amalgamations', amalgamation) + + for ting in amalgamation_data['amalgamating_businesses']: + if ting_identifier := ting.get('ting_identifier'): + # TODO: avoid update info for withdrawn amalg filing (will handle in NoW work) + # TING must exists in db before updating state filing info, + del ting['ting_identifier'] + temp_ting = { + 'identifier': ting_identifier, + 'state_filing_id': filing_id, + 'dissolution_date': amalgamation['amalgamation_date'] + } + ting_business_id = update_data(conn, 'businesses', temp_ting, 'identifier', ting_identifier) + if not ting_business_id: + raise Exception(f'TING {ting_identifier} does not exist, cannot migrate TED before TING') + ting['business_id'] = ting_business_id + ting['amalgamation_id'] = amalgamation_id + load_data(conn, 'amalgamating_businesses', ting) @task(name='3.3-Update-Auth-Task') @@ -209,22 +310,44 @@ def update_auth(conn: Connection, config, corp_num: str, tombstone_data: dict): if config.AFFILIATE_ENTITY: business_data = tombstone_data['businesses'] account_id = config.AFFILIATE_ENTITY_ACCOUNT_ID - AuthService.create_affiliation( + affiliation_status = AuthService.create_affiliation( config=config, account=account_id, business_registration=business_data['identifier'], business_name=business_data['legal_name'], corp_type_code=business_data['legal_type'] ) + if affiliation_status != HTTPStatus.OK: + with contextlib.suppress(Exception): + AuthService.delete_affiliation( + config=config, + account=account_id, + business_registration=business_data['identifier']) + raise Exception(f"""Failed to affiliate business {business_data['identifier']}""") if config.UPDATE_ENTITY: business_data = tombstone_data['businesses'] - AuthService.create_entity( + entity_status = AuthService.create_entity( config=config, business_registration=business_data['identifier'], business_name=business_data['legal_name'], corp_type_code=business_data['legal_type'] ) + admin_email = tombstone_data.get('admin_email') + if config.USE_CUSTOM_CONTACT_EMAIL: + admin_email = config.CUSTOM_CONTACT_EMAIL + + if entity_status == HTTPStatus.OK and admin_email: + update_email_status = AuthService.update_contact_email( + config=config, + identifier=business_data['identifier'], + email=admin_email + ) + if update_email_status != HTTPStatus.OK: + raise Exception(f"""Failed to update admin email in auth {business_data['identifier']}""") + else: + raise Exception(f"""Failed to create entity in auth {business_data['identifier']}""") + @task(name='1-Migrate-Corp-Users-Task') def migrate_corp_users(colin_engine: Engine, lear_engine: Engine, corp_nums: list) -> dict: @@ -236,7 +359,7 @@ def migrate_corp_users(colin_engine: Engine, lear_engine: Engine, corp_nums: lis print(f'👷 Complete collecting and migrating users for {len(corp_nums)} corps: {", ".join(corp_nums[:5])}...') except Exception as e: print(f'❌ Error collecting and migrating users: {repr(e)}') - return None + raise e return users_mapper @@ -254,7 +377,7 @@ def get_tombstone_data(config, colin_engine: Engine, corp_num: str) -> tuple[str return corp_num, clean_data except Exception as e: print(f'❌ Error collecting corp snapshot and filings data for {corp_num}: {repr(e)}') - return corp_num, None + return corp_num, e @task(name='3-Corp-Tombstone-Migrate-Task-Async') @@ -266,26 +389,33 @@ def migrate_tombstone(config, lear_engine: Engine, corp_num: str, clean_data: di with lear_engine.connect() as lear_conn: transaction = lear_conn.begin() try: - business_id = load_corp_snapshot(lear_conn, clean_data) + business_id = load_corp_snapshot(lear_conn, clean_data, users_mapper) load_placeholder_filings(lear_conn, clean_data, business_id, users_mapper) update_auth(lear_conn, config, corp_num, clean_data) transaction.commit() except Exception as e: transaction.rollback() + print(f'❌ Error migrating corp snapshot and filings data for {corp_num}: {repr(e)}') return corp_num, e print(f'✅ Complete migrating {corp_num}!') - return corp_num, None + + additional_info = clean_data['unsupported_types'] + return corp_num, additional_info @flow( name='Corps-Tombstone-Migrate-Flow', log_prints=True, - persist_result=False + persist_result=False, + # use ConcurrentTaskRunner when using work pool based deployments + # task_runner=ConcurrentTaskRunner(max_workers=100) + # task_runner=DaskTaskRunner(cluster_kwargs={"n_workers": 3, "threads_per_worker": 2}) ) def tombstone_flow(): """Entry of tombstone pipeline""" # TODO: track migration progress + error handling # TODO: update unprocessed query + count query + # TODO: current pipeline doesn't support migrating TED & TING at the same time, need a better strategy try: config = get_config() colin_engine = colin_init(config) @@ -304,13 +434,15 @@ def tombstone_flow(): # Calculate max corps to initialize max_corps = min(total, config.TOMBSTONE_BATCHES * config.TOMBSTONE_BATCH_SIZE) - print(f'max_corps: {max_corps}') + print(f'👷 max_corps: {max_corps}') reserved_corps = reserve_unprocessed_corps(config, processing_service, flow_run_id, max_corps) print(f'👷 Reserved {reserved_corps} corps for processing') print(f'👷 Going to migrate {total} corps with batch size of {batch_size}') cnt = 0 migrated_cnt = 0 + total_corp_failed = 0 + is_user_failed = False while cnt < batches: # Claim next batch of reserved corps for current flow corp_nums = processing_service.claim_batch(flow_run_id, batch_size) @@ -320,12 +452,20 @@ def tombstone_flow(): print(f'👷 Start processing {len(corp_nums)} corps: {", ".join(corp_nums[:5])}...') - users_mapper = migrate_corp_users(colin_engine, lear_engine, corp_nums) - - # TODO: skip the following migration or continue? - if users_mapper is None: - print(f'❗ Skip populating user info for corps in this round due to user migration error.') - users_mapper = {} + try: + users_mapper = migrate_corp_users(colin_engine, lear_engine, corp_nums) + except Exception as e: + # skip migration if there's user migration error + print('❗ Skip corp migration in this round due to user migration error.') + for corp_num in corp_nums: + processing_service.update_corp_status( + flow_run_id, + corp_num, + ProcessingStatuses.FAILED, + error=f'Failed due to user migration error in round {cnt}: {repr(e)}' + ) + is_user_failed = True + continue data_futures = [] for corp_num in corp_nums: @@ -334,44 +474,65 @@ def tombstone_flow(): ) corp_futures = [] - skipped = 0 + failed = 0 for f in data_futures: corp_num, clean_data = f.result() - if clean_data: + if clean_data and not isinstance(clean_data, Exception): corp_futures.append( migrate_tombstone.submit(config, lear_engine, corp_num, clean_data, users_mapper) ) else: - skipped += 1 + failed += 1 + processing_service.update_corp_status( + flow_run_id, + corp_num, + ProcessingStatuses.FAILED, + error=f'Failed due to data collection error: {repr(clean_data)}' + ) print(f'❗ Skip migrating {corp_num} due to data collection error.') wait(corp_futures) - + complete = 0 + partial = 0 for f in corp_futures: corp_num, e = f.result() if not e: + complete += 1 processing_service.update_corp_status( flow_run_id, corp_num, ProcessingStatuses.COMPLETED ) - else: + elif isinstance(e, Exception): # Handle error case if needed + failed += 1 processing_service.update_corp_status( flow_run_id, corp_num, ProcessingStatuses.FAILED, - error=f"Migration failed - {repr(e)}" + error=f'Failed due to {repr(e)}' + ) + else: + partial += 1 + processing_service.update_corp_status( + flow_run_id, + corp_num, + ProcessingStatuses.PARTIAL, + error=f"Partial due to unsupported event_file types: {', '.join(e)}" ) - succeeded = sum(1 for f in corp_futures if f.state.is_completed()) - failed = len(corp_futures) - succeeded - print(f'🌟 Complete round {cnt}. Succeeded: {succeeded}. Failed: {failed}. Skip: {skipped}') + total_corp_failed += failed + print(f'🌟 Complete round {cnt}. Complete: {complete}. Partial: {partial}. Failed: {failed}.') cnt += 1 - migrated_cnt += succeeded + migrated_cnt += complete + partial print(f'🌰 Complete {cnt} rounds, migrate {migrated_cnt} corps.') - print(f"🌰 All unsupport event file types: {', '.join(unsupported_event_file_types)}") + print(f"🌰 All unsupport event file types: {', '.join(all_unsupported_types)}") + + if is_user_failed: + return Failed(message='Failed due to user migration error.') + if total_corp_failed > 0: + return Failed(message=f'{total_corp_failed} corps failed due to corp migration error.') except Exception as e: raise e @@ -389,3 +550,51 @@ def tombstone_flow(): # # # Start serving the deployment # serve(deployment) + + + # Work pool based deployments + # + # Only one of deployments 1-3 should be running at any given time. + # + # Note: the following deployment is used strictly for maximizing local resource usage for production + # dry runs and the actual final tombstone migration to the production environment. If there is no need + # to run multiple parallel flows, the following set-ups are not req'd. + + # flow_source = Path(__file__).parent + + # # 1. TINGs deployment setup + # # subquery = subqueries[1] + # # ensure "and cs.state_type_cd = 'ACT'" is commented out as TINGS are historical + # tombstone_flow.from_source( + # source=flow_source, + # entrypoint="corps_tombstone_flow.py:tombstone_flow" + # ).deploy( + # name="tombstone-tings-deployment", + # tags=["tombstone-tings-migration"], + # work_pool_name="tombstone-tings-pool", + # interval=timedelta(seconds=60) # Run every x seconds + # ) + + # # 2. TEDs deployment setup + # # subquery = subqueries[2] + # tombstone_flow.from_source( + # source=flow_source, + # entrypoint="corps_tombstone_flow.py:tombstone_flow" + # ).deploy( + # name="tombstone-teds-deployment", + # tags=["tombstone-teds-migration"], + # work_pool_name="tombstone-teds-pool", + # interval=timedelta(seconds=60) # Run every x seconds + # ) + + # # 3. OTHERs deployment setup + # # subquery = subqueries[3] + # tombstone_flow.from_source( + # source=flow_source, + # entrypoint="corps_tombstone_flow.py:tombstone_flow" + # ).deploy( + # name="tombstone-deployment", + # tags=["tombstone-migration"], + # work_pool_name="tombstone-pool", + # interval=timedelta(seconds=70) # Run every x seconds + # ) diff --git a/data-tool/flows/corps_verify_flow.py b/data-tool/flows/corps_verify_flow.py new file mode 100644 index 0000000000..4048a34922 --- /dev/null +++ b/data-tool/flows/corps_verify_flow.py @@ -0,0 +1,105 @@ +import math + +import pandas as pd +from common.init_utils import colin_init, get_config, lear_init +from prefect import flow, task +from sqlalchemy import Engine, text + + +# TODO: adjust clause in different phases +where_clause = """ +1 = 1 +""" + +colin_cnt_query = f""" + SELECT COUNT(*) FROM corporation c WHERE {where_clause} + """ + +colin_query = f""" + SELECT corp_num FROM corporation c WHERE {where_clause} ORDER BY corp_num LIMIT :limit OFFSET :offset +""" + +lear_query = f""" + SELECT colin_corps.identifier FROM UNNEST(ARRAY[:identifiers]) AS colin_corps(identifier) + LEFT JOIN businesses b on colin_corps.identifier = b.identifier + WHERE b.identifier IS NULL +""" + + +@task(name='1-Count') +def get_verify_count(colin_engine: Engine) -> int: + with colin_engine.connect() as colin_conn: + rs = colin_conn.execute(text(colin_cnt_query)) + total = rs.scalar() + return total + + +@task(name='2-Verify') +def verify(colin_engine: Engine, lear_engine: Engine, limit: int, offset: int) -> list: + + identifiers = None + + with colin_engine.connect() as colin_conn: + rs = colin_conn.execute(text(colin_query), {'limit': limit, 'offset': offset}) + colin_results = rs.fetchall() + identifiers = [row[0] for row in colin_results] + + if identifiers: + with lear_engine.connect() as lear_conn: + rs = lear_conn.execute(text(lear_query), {'identifiers': identifiers}) + lear_results = rs.fetchall() + missing = [row[0] for row in lear_results] + return missing + + return [] + + +@flow( + name='Corps-Tombstone-Verify-Flow', + log_prints=True, + persist_result=False, +) +def verify_flow(): + try: + config = get_config() + colin_engine = colin_init(config) + lear_engine = lear_init(config) + + total = get_verify_count(colin_engine) + + if config.VERIFY_BATCH_SIZE <= 0: + raise ValueError('VERIFY_BATCH_SIZE must be explicitly set to a positive integer') + batch_size = config.VERIFY_BATCH_SIZE + batches = math.ceil(total/batch_size) + + print(f'🚀 Verifying {total} busiesses...') + + cnt = 0 + offset = 0 + results = [] + futures = [] + while cnt < batches: + print(f'🚀 Running {cnt} round...') + futures.append(verify.submit(colin_engine, lear_engine, batch_size, offset)) + offset += batch_size + cnt += 1 + + for f in futures: + r = f.result() + results.extend(r) + + print(f'🌟 Complete round {cnt}') + + if summary_path:=config.VERIFY_SUMMARY_PATH: + df = pd.DataFrame(results, columns=['identifier']) + df.to_csv(summary_path, index=False) + print(f"🌰 Save {len(results)} corps which meet the selection criteria but don't exsit in LEAR to {summary_path}") + else: + print(f"🌰 {len(results)} corps which meet the selection criteria don't exsit in LEAR: {results}") + + except Exception as e: + raise e + + +if __name__ == '__main__': + verify_flow() diff --git a/data-tool/flows/tombstone/tombstone_base_data.py b/data-tool/flows/tombstone/tombstone_base_data.py index cd3a2d247a..362f246ce6 100644 --- a/data-tool/flows/tombstone/tombstone_base_data.py +++ b/data-tool/flows/tombstone/tombstone_base_data.py @@ -17,6 +17,28 @@ } +# ======== user ======== +USER = { + 'username': None, + 'firstname': None, + 'middlename': None, + 'lastname': None, + 'email': None, + 'creation_date': None +} + + +# ======== comment ======== +COMMENT = { + 'comment': None, + 'timestamp': None, + # FK + 'business_id': None, + 'staff_id': None, + 'filing_id': None +} + + # ======== address ======== ADDRESS = { 'address_type': None, # mailing or delivery @@ -81,6 +103,10 @@ ] } +OFFICES_HELD = { + 'party_role_id': None, + 'title': None # enum +} # ======== share structure (composite) ======== # insert: share_class -> share_series(if any) @@ -104,6 +130,7 @@ 'par_value_flag': False, 'par_value': None, # float 'currency': None, + 'currency_additional': None, 'special_rights_flag': False, # FK 'business_id': None @@ -132,16 +159,20 @@ } -# ======== filing ======== -USER = { - 'username': None, - 'firstname': None, - 'middlename': None, - 'lastname': None, - 'email': None, - 'creation_date': None +# ======== jurisdiction ======== +JURISDICTION = { + 'country': None, + 'region': None, + 'identifier': None, + 'legal_name': None, + 'tax_id': None, + 'incorporation_date': None, # date + 'expro_identifier': None, + 'expro_legal_name': None, } + +# ======== filing ======== FILING_JSON = { 'filing': { 'header': {} @@ -149,25 +180,34 @@ } FILING = { - 'filing_date': None, # timestamptz - 'filing_json': FILING_JSON, - 'filing_type': None, - 'filing_sub_type': None, - 'status': 'COMPLETED', - 'completion_date': None, # timestamptz - 'effective_date': None, # timestamptz - 'meta_data': None, - # default values for now - 'paper_only': True, - 'source': 'COLIN', - 'colin_only': False, - 'deletion_locked': False, - # FK - 'business_id': None, - 'transaction_id': None, - 'submitter_id': None, - # others - 'submitter_roles': None, + 'filings': { + 'filing_date': None, # timestamptz + 'filing_json': FILING_JSON, + 'filing_type': None, + 'filing_sub_type': None, + 'status': 'COMPLETED', + 'completion_date': None, # timestamptz + 'effective_date': None, # timestamptz + 'meta_data': None, + # default values for now + 'paper_only': True, + 'source': 'COLIN', + 'colin_only': False, + 'deletion_locked': False, + 'hide_in_ledger': False, # TODO: double check when doing cleanup - dissolution (invol, admin) + 'withdrawal_pending': False, + # FK + 'business_id': None, + 'transaction_id': None, + 'submitter_id': None, + 'withdrawn_filing_id': None, + # others + 'submitter_roles': None, + }, + 'jurisdiction': None, # optional + 'amalgamations': None, # optional + 'comments': None, # optional + 'colin_event_ids': None } FILING_COMBINED = { @@ -175,7 +215,78 @@ 'update_business_info': { # business info to update }, - 'state_filing_index': -1 + 'state_filing_index': -1, + 'unsupported_types': None, +} + +AMALGAMATION = { + 'amalgamations': { + 'amalgamation_date': None, + 'court_approval': None, + 'amalgamation_type': None, + # FK + 'business_id': None, + 'filing_id': None, + }, + 'amalgamating_businesses': [] +} + +AMALGAMTING_BUSINESS = { + 'foreign_jurisdiction': None, + 'foreign_name': None, + 'foreign_identifier': None, + 'role': None, + 'foreign_jurisdiction_region': None, + # FK + 'business_id': None, + 'amalgamation_id': None, +} + + +# ======== in_dissoluion ======== +BATCH = { + 'batch_type': 'INVOLUNTARY_DISSOLUTION', + 'status': 'PROCESSING', + 'size': 1, + 'max_size': 1, + 'start_date': None, # timestamptz, required + 'notes': 'Import from COLIN', +} + +BATCH_PROCESSING = { + 'business_identifier': None, + 'step': None, + 'meta_data': None, + 'created_date': None, # timestamptz, required + 'last_modified': None, # timestamptz, required + 'trigger_date': None, # timestamptz + 'status': 'PROCESSING', + 'notes': 'Import from COLIN', + # FK + 'batch_id': None, + 'business_id': None, +} + +FURNISHING = { + 'business_identifier': None, + 'furnishing_type': None, + 'furnishing_name': None, + 'meta_data': None, + 'created_date': None, # timestamptz, required + 'last_modified': None, # timestamptz, required + 'processed_date': None, # timestamptz + 'status': 'PROCESSED', + 'notes': 'Import from COLIN', + # FK + 'batch_id': None, + 'business_id': None, + +} + +IN_DISSOLUTION = { + 'batches': BATCH, + 'batch_processing': BATCH_PROCESSING, + 'furnishings': FURNISHING, } @@ -188,8 +299,11 @@ 'aliases': [ALIAS], 'resolutions': [RESOLUTION], 'filings': [FILING], + 'comments': [COMMENT], + 'in_dissolution': IN_DISSOLUTION, 'updates': { 'businesses': BUSINESS, 'state_filing_index': -1 - } + }, + 'unsupported_types': None, } diff --git a/data-tool/flows/tombstone/tombstone_mappings.py b/data-tool/flows/tombstone/tombstone_mappings.py index ebd6ed059b..0b63893f00 100644 --- a/data-tool/flows/tombstone/tombstone_mappings.py +++ b/data-tool/flows/tombstone/tombstone_mappings.py @@ -19,8 +19,9 @@ class EventFilings(str, Enum): FILE_AM_SS = 'FILE_AM_SS' # TODO: FILE_AM_AR = 'FILE_AM_AR' - # TODO: Amalgamation Out Consent - unsupported - # TODO: Amalgamation Out - unsupported + + FILE_IAMGO = 'FILE_IAMGO' + FILE_AMALO = 'FILE_AMALO' # Amalgamation Appliation FILE_AMALH = 'FILE_AMALH' @@ -35,6 +36,7 @@ class EventFilings(str, Enum): FILE_AMLRC = 'FILE_AMLRC' FILE_AMLVC = 'FILE_AMLVC' + # Annual Report FILE_ANNBC = 'FILE_ANNBC' @@ -60,14 +62,31 @@ class EventFilings(str, Enum): FILE_CONTU = 'FILE_CONTU' FILE_CONTC = 'FILE_CONTC' + # Conversion Ledger + FILE_CONVL = 'FILE_CONVL' + + # Conversion + CONVAMAL_NULL = 'CONVAMAL_NULL' + CONVCIN_NULL = 'CONVCIN_NULL' + CONVCOUT_NULL = 'CONVCOUT_NULL' + CONVDS_NULL = 'CONVDS_NULL' + CONVDSF_NULL = 'CONVDSF_NULL' + CONVDSL_NULL = 'CONVDSL_NULL' + CONVDSO_NULL = 'CONVDSO_NULL' + CONVICORP_NULL = 'CONVICORP_NULL' + CONVID1_NULL = 'CONVID1_NULL' + CONVID2_NULL = 'CONVID2_NULL' + CONVILIQ_NULL = 'CONVILIQ_NULL' + CONVLRSTR_NULL = 'CONVLRSTR_NULL' + CONVNC_NULL = 'CONVNC_NULL' + CONVRSTR_NULL = 'CONVRSTR_NULL' + # Correction FILE_CO_AR = 'FILE_CO_AR' FILE_CO_BC = 'FILE_CO_BC' FILE_CO_DI = 'FILE_CO_DI' FILE_CO_DO = 'FILE_CO_DO' FILE_CO_LI = 'FILE_CO_LI' - FILE_CO_PF = 'FILE_CO_PF' - FILE_CO_PO = 'FILE_CO_PO' FILE_CO_RM = 'FILE_CO_RM' FILE_CO_RR = 'FILE_CO_RR' FILE_CO_SS = 'FILE_CO_SS' @@ -98,21 +117,28 @@ class EventFilings(str, Enum): FILE_ICORP = 'FILE_ICORP' FILE_ICORU = 'FILE_ICORU' FILE_ICORC = 'FILE_ICORC' - CONVICORP_NULL = 'CONVICORP_NULL' # TODO: may need to be removed - - # TODO: Ledger - unsupported # TODO: Legacy Other - unsupported - FILE_AM_PF = 'FILE_AM_PF' - FILE_AM_PO = 'FILE_AM_PO' + ADCORP_NULL = 'ADCORP_NULL' + ADFIRM_NULL = 'ADFIRM_NULL' + ADMIN_NULL = 'ADMIN_NULL' FILE_AM_TR = 'FILE_AM_TR' # TODO: Liquidation - unsupported # FILE_ADCOL = 'FILE_ADCOL' - # Notice of Withdrawal + # TODO: Notice of Withdrawal - unsupported FILE_NWITH = 'FILE_NWITH' + # Put Back Off + SYSDL_NULL = 'SYSDL_NULL' + FILE_AM_PF = 'FILE_AM_PF' + FILE_CO_PF = 'FILE_CO_PF' + + # Put Back On + FILE_AM_PO = 'FILE_AM_PO' + FILE_CO_PO = 'FILE_CO_PO' + # Registrar's Notation FILE_REGSN = 'FILE_REGSN' @@ -135,7 +161,7 @@ class EventFilings(str, Enum): # TODO: # Other COLIN events: - # CONV*, Adim Corp (ADCORP, BNUPD, ADMIN), XPRO filing + # Adim Corp (ADCORP, BNUPD, ADMIN), XPRO filing # SYSDL, SYST # more legacyOther filings @@ -158,8 +184,8 @@ def has_value(cls, value): EventFilings.FILE_AM_RM: 'alteration', EventFilings.FILE_AM_SS: 'alteration', - # TODO: Amalgamation Out Consent - unsupported - # TODO: Amalgamation Out - unsupported + EventFilings.FILE_IAMGO: 'consentAmalgamationOut', + EventFilings.FILE_AMALO: 'amalgamationOut', EventFilings.FILE_AMALH: ['amalgamationApplication', 'horizontal'], EventFilings.FILE_AMALR: ['amalgamationApplication', 'regular'], @@ -189,13 +215,28 @@ def has_value(cls, value): EventFilings.FILE_CONTU: 'continuationIn', EventFilings.FILE_CONTC: 'continuationIn', + EventFilings.FILE_CONVL: 'conversionLedger', + + EventFilings.CONVAMAL_NULL: ['conversion', ('amalgamationApplication', 'unknown')], + EventFilings.CONVCIN_NULL: ['conversion', 'continuationIn'], + EventFilings.CONVCOUT_NULL: ['conversion', 'continuationOut'], + EventFilings.CONVDS_NULL: ['conversion', ('dissolution', 'voluntary')], + EventFilings.CONVDSF_NULL: ['conversion', ('dissolution', 'involuntary')], + EventFilings.CONVDSL_NULL: 'conversion', # TODO: liquidation + EventFilings.CONVDSO_NULL: ['conversion', ('dissolution', 'unknown')], + EventFilings.CONVICORP_NULL: 'conversion', + EventFilings.CONVID1_NULL: ['conversion', 'putBackOn'], # TODO: to confirm + EventFilings.CONVID2_NULL: ['conversion', 'putBackOn'], # TODO: to confirm + EventFilings.CONVILIQ_NULL: 'conversion', # TODO: liquidation + EventFilings.CONVLRSTR_NULL: ['conversion', ('restoration', 'limitedRestoration')], + EventFilings.CONVNC_NULL: ['conversion', 'changeOfName'], + EventFilings.CONVRSTR_NULL: ['conversion', ('restoration', 'fullRestoration')], + EventFilings.FILE_CO_AR: 'correction', EventFilings.FILE_CO_BC: 'correction', EventFilings.FILE_CO_DI: 'correction', EventFilings.FILE_CO_DO: 'correction', EventFilings.FILE_CO_LI: 'correction', - EventFilings.FILE_CO_PF: 'correction', - EventFilings.FILE_CO_PO: 'correction', EventFilings.FILE_CO_RM: 'correction', EventFilings.FILE_CO_RR: 'correction', EventFilings.FILE_CO_SS: 'correction', @@ -210,8 +251,8 @@ def has_value(cls, value): EventFilings.FILE_ADVD2: ['dissolution', 'voluntary'], EventFilings.FILE_ADVDS: ['dissolution', 'voluntary'], - EventFilings.DISLV_NULL: ['dissolution', 'voluntary'], - EventFilings.DISLC_NULL: ['dissolution', 'administrative'], + EventFilings.DISLV_NULL: ['dissolution', 'voluntary'], # TODO: re-map + EventFilings.DISLC_NULL: ['dissolution', 'administrative'], # TODO: re-map EventFilings.SYSDA_NULL: ['dissolution', 'administrative'], EventFilings.SYSDS_NULL: ['dissolution', 'administrative'], EventFilings.SYSDF_NULL: ['dissolution', 'involuntary'], @@ -220,18 +261,24 @@ def has_value(cls, value): EventFilings.FILE_ICORP: 'incorporationApplication', EventFilings.FILE_ICORU: 'incorporationApplication', EventFilings.FILE_ICORC: 'incorporationApplication', - EventFilings.CONVICORP_NULL: 'incorporationApplication', - # TODO: Ledger - unsupported # TODO: Legacy Other - unsupported - EventFilings.FILE_AM_PF: 'legacyOther', - EventFilings.FILE_AM_PO: 'legacyOther', + EventFilings.ADCORP_NULL: 'legacyOther', + EventFilings.ADFIRM_NULL: 'legacyOther', + EventFilings.ADMIN_NULL: 'legacyOther', EventFilings.FILE_AM_TR: 'legacyOther', # TODO: Liquidation - unsupported EventFilings.FILE_NWITH: 'noticeOfWithdrawal', + EventFilings.SYSDL_NULL: 'putBackOff', + EventFilings.FILE_AM_PF: 'putBackOff', + EventFilings.FILE_CO_PF: 'putBackOff', + + EventFilings.FILE_AM_PO: 'putBackOn', + EventFilings.FILE_CO_PO: 'putBackOn', + EventFilings.FILE_REGSN: 'registrarsNotation', EventFilings.FILE_REGSO: 'registrarsOrder', @@ -249,14 +296,179 @@ def has_value(cls, value): } +EVENT_FILING_DISPLAY_NAME_MAPPING = { + EventFilings.FILE_AGMDT: 'Notice of Change - AGM Date', + EventFilings.FILE_AGMLC: 'Notice of Change - AGM Location', + + EventFilings.FILE_NOALA: 'Notice of Alteration', + EventFilings.FILE_NOALB: 'Notice of Alteration from a BC Unlimited Liability Company to Become a BC Company', + EventFilings.FILE_NOALU: 'Notice of Alteration from a BC Company to Become a BC Unlimited Liability Company', + EventFilings.FILE_NOALC: 'Notice of Alteration from a BC Company to Become a Community Contribution Company', + EventFilings.FILE_AM_BC: 'Amendment - Translated Name', + EventFilings.FILE_AM_LI: 'Amendment - Ledger Information', + EventFilings.FILE_AM_RM: 'Amendment - Receiver or Receiver Manager', + EventFilings.FILE_AM_SS: 'Amendment - Share Structure', + + EventFilings.FILE_IAMGO: 'Application For Authorization For Amalgamation (into a Foreign Corporation) with 6 months consent granted', + EventFilings.FILE_AMALO: 'Record of Amalgamation', + + EventFilings.FILE_AMALH: 'Amalgamation Application Short Form (Horizontal)', + EventFilings.FILE_AMALR: 'Amalgamation Application (Regular)', + EventFilings.FILE_AMALV: 'Amalgamation Application Short Form (Vertical)', + EventFilings.FILE_AMLHU: 'Amalgamation Application Short Form (Horizontal) for a BC Unlimited Liability Company', + EventFilings.FILE_AMLRU: 'Amalgamation Application (Regular) for a BC Unlimited Liability Company', + EventFilings.FILE_AMLVU: 'Amalgamation Application Short Form (Vertical) for a BC Unlimited Liability Company', + EventFilings.FILE_AMLHC: 'Amalgamation Application Short Form (Horizontal) for a Community Contribution Company', + EventFilings.FILE_AMLRC: 'Amalgamation Application (Regular) for a Community Contribution Company', + EventFilings.FILE_AMLVC: 'Amalgamation Application Short Form (Vertical) for a Community Contribution Company', + + EventFilings.FILE_ANNBC: 'BC Annual Report', # has suffix of date, dynamically add it during formatting + + EventFilings.FILE_APTRA: 'Application to Transfer Registered Office', + EventFilings.FILE_NOERA: 'Notice of Elimination of Registered Office', + EventFilings.FILE_NOCAD: 'Notice of Change of Address', + EventFilings.FILE_AM_DO: 'Amendment - Dissolved Office', + EventFilings.FILE_AM_RR: 'Amendment - Registered and Records Offices', + + EventFilings.FILE_NOCDR: 'Notice of Change of Directors', # dynamically add suffix for some scenarios + EventFilings.FILE_AM_DI: 'Amendment - Director', + + EventFilings.FILE_CONTO: '6 Months Consent to Continue Out', + EventFilings.FILE_COUTI: 'Instrument of Continuation Out', + + EventFilings.FILE_CONTI: 'Continuation Application', + EventFilings.FILE_CONTU: 'Continuation Application for a BC Unlimited Liability Company', + EventFilings.FILE_CONTC: 'Continuation Application for a Community Contribution Company', + + EventFilings.FILE_CO_AR: 'Correction - Annual Report', + EventFilings.FILE_CO_BC: 'Correction - BC Company Name/Translated Name', + EventFilings.FILE_CO_DI: 'Correction - Director', + EventFilings.FILE_CO_DO: 'Correction - Dissolved Office', + EventFilings.FILE_CO_LI: 'Correction - Ledger Information', + EventFilings.FILE_CO_RM: 'Correction - Receiver or Receiver Manager', + EventFilings.FILE_CO_RR: 'Correction - Registered and Records Offices', + EventFilings.FILE_CO_SS: 'Correction - Share Structure', + EventFilings.FILE_CO_TR: 'Correction - Transition', + EventFilings.FILE_CORRT: 'Correction', + + EventFilings.FILE_COURT: 'Court Order', + + # TODO: Delay of Dissolution - unsupported (need confirmation) + # no ledger item in colin + + EventFilings.DISD1_DISDE: "Registrar's Notation - Dissolution or Cancellation Delay", # has prefix "Registrar's Notation - " + EventFilings.DISD2_DISDE: "Registrar's Notation - Dissolution or Cancellation Delay", + + EventFilings.FILE_ADVD2: 'Application for Dissolution (Voluntary Dissolution)', + EventFilings.FILE_ADVDS: 'Application for Dissolution (Voluntary Dissolution)', + EventFilings.DISLV_NULL: None, # TODO: re-map, voluntary - no ledger in colin + status liquidated + EventFilings.DISLC_NULL: None, # TODO: re-map, admin - no ledger in colin + status liquidated + EventFilings.SYSDA_NULL: None, # admin - status Administrative Dissolution + EventFilings.SYSDS_NULL: None, # admin - status Administrative Dissolution + EventFilings.SYSDF_NULL: None, # invol - no ledger in lear & colin + EventFilings.SYSDT_NULL: None, # invol - no ledger in lear & colin + + EventFilings.FILE_ICORP: 'Incorporation Application', + EventFilings.FILE_ICORU: 'Incorporation Application for a BC Unlimited Liability Company', + EventFilings.FILE_ICORC: 'Incorporation Application for a Community Contribution Company', + + # TODO: Legacy Other - unsupported + EventFilings.ADCORP_NULL: None, + EventFilings.ADFIRM_NULL: None, + EventFilings.ADMIN_NULL: None, + EventFilings.FILE_AM_TR: 'Amendment - Transition', + + # TODO: Liquidation - unsupported (need to check if anything missing) + # NOLDS: "Notice of Location of Dissolved Company's Records" + # NOCDS: "Notice of Change Respecting Dissolved Company's Records" + # NOTRA: 'Notice of Transfer of Records' + # NOAPL: 'Notice of Appointment of Liquidator' + # NOCAL: 'Notice of Change of Address of Liquidator And/Or Liquidation Records Office' + # NOCEL: 'Notice of Ceasing to Act as Liquidator' + # LIQUR: 'Liquidation Report' + # LQWOS: 'Notice of Withdrawal Statement of Intent to Liquidate' + # NOARM: 'Notice of Appointment of Receiver or Receiver Manager' + # NOCER: 'Notice of Ceasing to Act as Receiver or Receiver Manager' + # LQSIN: 'Statement of Intent to Liquidate' + # LQSCO: 'Stay of Liquidation - Court Ordered' + # LQDIS: 'Discontinuance of Liquidation - Court Ordered' + # LQCON: 'Continuance of Liquidation - Court Ordered' + # NOCRM: 'Notice of Change of Address of Receiver or Receiver Manager' + # ADVLQ: 'Application for Dissolution (Voluntary Liquidation)' + # AM_LR: 'Amendment - Liquidation Report' + # CO_LR: 'Correction - Liquidation Report' + # AM_LQ: 'Amendment - Liquidator' + # CO_LQ: 'Correction - Liquidator' + + EventFilings.FILE_NWITH: 'Notice of Withdrawal', + + EventFilings.SYSDL_NULL: None, + EventFilings.FILE_AM_PF: 'Amendment - Put Back Off', + EventFilings.FILE_CO_PF: 'Correction - Put Back Off', + + EventFilings.FILE_AM_PO: 'Amendment - Put Back On', + EventFilings.FILE_CO_PO: 'Correction - Put Back On', + + EventFilings.FILE_REGSN: "Registrar's Notation", + EventFilings.FILE_REGSO: "Registrar's Order", + + EventFilings.FILE_RESTL: 'Restoration Application - Limited', + EventFilings.FILE_RESTF: 'Restoration Application - Full', + EventFilings.FILE_RESXL: 'Restoration Application (Extend Time Limit)', + EventFilings.FILE_RESXF: 'Restoration Application (Convert Limited to Full)', + EventFilings.FILE_RUSTL: 'Restoration Application - Limited for a BC Unlimited Liability Company', + EventFilings.FILE_RUSTF: 'Restoration Application - Full for a BC Unlimited Liability Company', + EventFilings.FILE_RUSXL: 'Restoration Application (Extend Time Limit) for a BC Unlimited Liability Company', + EventFilings.FILE_RUSXF: 'Restoration Application (Convert Limited to Full) for a BC Unlimited Liability Company', + + EventFilings.FILE_TRANS: 'Transition Application', + EventFilings.FILE_TRANP: 'Post Restoration Transition Application', +} + + +SKIPPED_EVENT_FILE_TYPES = [ + # XPRO + 'FILE_CHGJU', + 'FILE_NWPTA', + 'FILE_PARES', + 'FILE_TILAT', + 'FILE_TILHO', + 'FILE_TILMA', + 'SYST_CANPS', + 'SYST_CHGJU', + 'SYST_CHGPN', + 'SYST_CO_PN', + 'SYST_LNKPS', + 'SYST_NWPTA', + 'SYST_PARES', + 'SYST_RIPFL', + 'SYST_TILAT', + 'SYST_TILHO', + 'SYST_NULL', + 'TRESP_NULL', + 'TRESP_COUTI', + # Others + 'FILE_COGS1', + # TODO: decide on the final list +] + + +NO_FILING_EVENT_FILE_TYPES = [ + 'SYSD1_NULL', + 'SYSD2_NULL', + # TODO: decide on the final list +] + + LEAR_FILING_BUSINESS_UPDATE_MAPPING = { 'incorporationApplication': ['last_coa_date', 'last_cod_date'], 'changeOfAddress': ['last_coa_date'], 'changeOfDirectors': ['last_cod_date'], 'agmExtension': ['last_agm_date'], - # TODO: 'dissolution_date' - Amalgamating business, continuation out - # TODO: 'continuation_out_date' - continuation out + 'amalgamationApplication': ['last_coa_date', 'last_cod_date'], + 'continuationIn': ['last_coa_date', 'last_cod_date'], 'dissolution': ['dissolution_date'], + 'putBackOff': ['restoration_expiry_date', 'dissolution_date'], 'putBackOn': ['dissolution_date'], 'restoration': ['dissolution_date', 'restoration_expiry_date'], } @@ -265,11 +477,19 @@ def has_value(cls, value): LEAR_STATE_FILINGS = [ 'dissolution', 'restoration', + 'putBackOff', 'putBackOn', 'continuationOut', + 'amalgamationOut', # TODO: other state filings that lear doesn't support for now e.g. liquidation # ingore the following since we won't map to them # 'dissolved', 'restorationApplication', 'continuedOut' ] + +LEGAL_TYPE_CHANGE_FILINGS = { + EventFilings.FILE_NOALB: ['ULC', 'BC'], + EventFilings.FILE_NOALU: ['BC', 'ULC'], + EventFilings.FILE_NOALC: ['BC', 'CC'], +} diff --git a/data-tool/flows/tombstone/tombstone_queries.py b/data-tool/flows/tombstone/tombstone_queries.py index 0103bd8812..420097abc4 100644 --- a/data-tool/flows/tombstone/tombstone_queries.py +++ b/data-tool/flows/tombstone/tombstone_queries.py @@ -1,5 +1,93 @@ + +def get_unprocessed_corps_subquery(flow_name, environment): + subqueries = [ + { + 'name': 'default(all corps)', + 'cte': '', + 'where': '' + }, + { + 'name':'TING', + 'cte': """ + with ting_corps as ( + select distinct ting_corp_num + from corp_involved_amalgamating + ), + ted_corps as ( + select distinct ted_corp_num + from corp_involved_amalgamating + ) + """, + 'where': """ + and exists ( + select 1 from ting_corps t where t.ting_corp_num = c.corp_num + ) + and not exists ( + select 1 from ted_corps t where t.ted_corp_num = c.corp_num + ) + """ + }, + { + 'name':'TED that all its TINGs(XP excluded) have been migrated', + 'cte': f""" + with t2 as ( + select distinct cia1.ted_corp_num + from corp_involved_amalgamating cia1 + where not exists ( + select 1 + from corp_involved_amalgamating cia2 + left join corp_processing cp + on cia2.ting_corp_num = cp.corp_num + and cp.flow_name = '{flow_name}' + and cp.environment = '{environment}' + and cp.processed_status in ('COMPLETED', 'PARTIAL') + where cia2.ted_corp_num = cia1.ted_corp_num + and (cia2.ting_corp_num like 'BC%' or cia2.ting_corp_num like 'Q%' or cia2.ting_corp_num like 'C%') + and cp.corp_num is null + ) + ) + """, + 'where': """ + and exists ( + select 1 from t2 where c.corp_num = t2.ted_corp_num + ) + """ + }, + { + 'name':'Other corps, non-TING and non-TED', + 'cte': """ + with t3 as ( + select ting_corp_num as corp_num + from corp_involved_amalgamating + union + select ted_corp_num as corp_num + from corp_involved_amalgamating + ) + """, + 'where': """ + and not exists ( + select 1 + from t3 + where t3.corp_num = c.corp_num + ) + """ + } + ] + # Note: change index to select subset of corps + # [0] all, [1] TING, [2] TED that linked TINGs are migrated, [3] exclude TING & TED + # Acceptable order when it comes to the actual migration: + # [1]->[2]->[3] + # [2]->[1]->[3] (may fetch fewer eligible corps in [2] at the beginning, if so, go to [1] and then go back to [2], repeatedly) + # Other usage: + # [0] is used for other purposes, e.g. tweak query to select specific corps + subquery = subqueries[3] + return subquery['cte'], subquery['where'] + def get_unprocessed_corps_query(flow_name, environment, batch_size): + cte_clause, where_clause = get_unprocessed_corps_subquery(flow_name, environment) + query = f""" + {cte_clause} select c.corp_num, c.corp_type_cd, cs.state_type_cd, cp.flow_name, cp.processed_status, cp.last_processed_event_id, cp.failed_event_id, cp.failed_event_file_type from corporation c left outer join corp_state cs @@ -9,6 +97,7 @@ def get_unprocessed_corps_query(flow_name, environment, batch_size): and cp.flow_name = '{flow_name}' and cp.environment = '{environment}' where 1 = 1 + {where_clause} -- and c.corp_type_cd like 'BC%' -- some are 'Q%' -- and c.corp_num = 'BC0000621' -- state changes a lot -- and c.corp_num = 'BC0883637' -- one pary with multiple roles, but werid address_ids, same filing submitter but diff email @@ -18,9 +107,9 @@ def get_unprocessed_corps_query(flow_name, environment, batch_size): -- and c.corp_num = 'BC0326163' -- double quotes in corp name, no share structure, city in street additional of party's address -- and c.corp_num = 'BC0395512' -- long RG, RC addresses -- and c.corp_num = 'BC0043406' -- lots of directors --- and c.corp_num in ('BC0326163', 'BC0395512', 'BC0883637') -- TODO: re-migrate issue (can be solved by adding tracking) +-- and c.corp_num in ('BC0326163', 'BC0395512', 'BC0883637') -- and c.corp_num = 'BC0870626' -- lots of filings - IA, CoDs, ARs --- and c.corp_num = 'BC0004969' -- lots of filings - IA, ARs, transition, alteration, COD, COA +-- and c.corp_num = 'BC0004969' -- lots of filings - IA, ARs, transition, alteration, COD, COA -- and c.corp_num = 'BC0002567' -- lots of filings - IA, ARs, transition, COD -- and c.corp_num in ('BC0068889', 'BC0441359') -- test users mapping -- and c.corp_num in ('BC0326163', 'BC0046540', 'BC0883637', 'BC0043406', 'BC0068889', 'BC0441359') @@ -29,12 +118,21 @@ def get_unprocessed_corps_query(flow_name, environment, batch_size): -- 'BC0472301', 'BC0649417', 'BC0808085', 'BC0803411', 'BC0511226', 'BC0833000', 'BC0343855', 'BC0149266', -- dissolution -- 'BC0548839', 'BC0541207', 'BC0462424', 'BC0021973', -- restoration -- 'BC0034290', -- legacy other +-- 'C0870179', 'C0870343', 'C0883424', -- continuation in (C, CCC, CUL) +-- 'BC0019921', 'BC0010385', -- conversion ledger -- 'BC0207097', 'BC0693625', 'BC0754041', 'BC0072008', 'BC0355241', 'BC0642237', 'BC0555891', 'BC0308683', -- correction -- 'BC0688906', 'BC0870100', 'BC0267106', 'BC0873461', -- alteration -- 'BC0536998', 'BC0574096', 'BC0663523' -- new mappings of CoA, CoD + -- TED +-- 'BC0812196', -- amalg - r (with xpro) +-- 'BC0870100', -- amalg - v +-- 'BC0747392' -- amalg - h + -- TING +-- 'BC0593394', -- amalg - r (with xpro) +-- 'BC0805986', 'BC0561086', -- amalg - v +-- 'BC0543231', 'BC0358476' -- amalg - h -- ) - - and c.corp_type_cd in ('BC', 'C', 'ULC', 'CUL', 'CC', 'CCC', 'QA', 'QB', 'QC', 'QD', 'QE') -- TODO: update transfer script + and c.corp_type_cd in ('BC', 'C', 'ULC', 'CUL', 'CC', 'CCC', 'QA', 'QB', 'QC', 'QD', 'QE') and cs.end_event_id is null -- and ((cp.processed_status is null or cp.processed_status != 'COMPLETED')) and cp.processed_status is null @@ -58,7 +156,7 @@ def get_total_unprocessed_count_query(flow_name, environment): and cp.environment = '{environment}' where 1 = 1 and cs.end_event_id is null - and ((cp.processed_status is null or cp.processed_status != 'COMPLETED')) + and ((cp.processed_status is null or cp.processed_status not in ('COMPLETED', 'PARTIAL'))) """ return query @@ -68,40 +166,53 @@ def get_corp_users_query(corp_nums: list): query = f""" select u_user_id, - u_full_name, string_agg(event_type_cd || '_' || coalesce(filing_type_cd, 'NULL'), ',') as event_file_types, u_first_name, u_middle_name, u_last_name, to_char( - min(event_timerstamp::timestamp at time zone 'UTC'), + min(u_timestamp::timestamptz at time zone 'UTC'), 'YYYY-MM-DD HH24:MI:SSTZH:TZM' ) as earliest_event_dt_str, min(u_email_addr) as u_email_addr, - u_role_typ_cd + u_role_typ_cd, + p_cc_holder_name from ( - select - upper(u.user_id) as u_user_id, - u.last_name as u_last_name, - u.first_name as u_first_name, - u.middle_name as u_middle_name, - e.event_type_cd, - f.filing_type_cd, - e.event_timerstamp, - case - when u.first_name is null and u.middle_name is null and u.last_name is null then null - else upper(concat_ws('_', nullif(trim(u.first_name),''), nullif(trim(u.middle_name),''), nullif(trim(u.last_name),''))) - end as u_full_name, - u.email_addr as u_email_addr, - u.role_typ_cd as u_role_typ_cd - from event e - left outer join filing f on e.event_id = f.event_id - left outer join filing_user u on u.event_id = e.event_id - where 1 = 1 --- and e.corp_num in ('BC0326163', 'BC0046540', 'BC0883637', 'BC0043406', 'BC0068889', 'BC0441359') - and e.corp_num in ({corp_nums_str}) + select + upper(u.user_id) as u_user_id, + trim(u.last_name) as u_last_name, + trim(u.first_name) as u_first_name, + trim(u.middle_name) as u_middle_name, + e.event_type_cd, + f.filing_type_cd, + e.event_timerstamp as u_timestamp, + u.email_addr as u_email_addr, + u.role_typ_cd as u_role_typ_cd, + p.cc_holder_nme as p_cc_holder_name + from event e + left outer join filing f on e.event_id = f.event_id + left outer join filing_user u on e.event_id = u.event_id + left outer join payment p on e.event_id = p.event_id + where 1 = 1 + -- and e.corp_num in ('BC0326163', 'BC0046540', 'BC0883637', 'BC0043406', 'BC0068889', 'BC0441359') + and e.corp_num in ({corp_nums_str}) + union + -- staff comment at business level + select + upper(cc.user_id) as u_user_id, + trim(cc.last_nme) as u_last_name, + trim(cc.first_nme) as u_first_name, + trim(cc.middle_nme) as u_middle_name, + 'STAFF' as event_type_cd, -- placeholder + 'COMMENT' as filing_type_cd, -- placeholder + comment_dts as u_timestamp, + null as u_email_addr, + null as u_role_typ_cd, + null as p_cc_holder_name + from corp_comments cc + where cc.corp_num in ({corp_nums_str}) ) sub - group by sub.u_user_id, sub.u_full_name, sub.u_first_name, sub.u_middle_name, sub.u_last_name, sub.u_role_typ_cd + group by sub.u_user_id, sub.u_first_name, sub.u_middle_name, sub.u_last_name, sub.u_role_typ_cd, sub.p_cc_holder_name order by sub.u_user_id; """ return query @@ -134,7 +245,7 @@ def get_business_query(corp_num, suffix): (case when (c.recognition_dts is null and e.event_timerstamp is not null) then e.event_timerstamp else c.recognition_dts - end)::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as founding_date, + end)::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as founding_date, -- state ( select op_state_type_cd @@ -158,15 +269,17 @@ def get_business_query(corp_num, suffix): -- TODO: submitter_userid -- c.send_ar_ind, - to_char(c.last_ar_filed_dt::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as last_ar_date, + c.last_ar_reminder_year, + to_char(c.last_ar_filed_dt::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as last_ar_date, -- admin_freeze case when c.corp_frozen_type_cd = 'C' then true else false - end admin_freeze + end admin_freeze, + c.admin_email from corporation c - left outer join event e on e.corp_num = c.corp_num and e.event_type_cd = 'CONVICORP' -- need to add other event like CONVAMAL, CONVCIN... + left outer join event e on e.corp_num = c.corp_num and e.event_type_cd IN ('CONVICORP', 'CONVAMAL') -- need to add other event like CONVCIN... where 1 = 1 --and c.corp_num = 'BC0684912' -- state - ACT --and c.corp_num = 'BC0000621' -- state - HLD @@ -267,8 +380,8 @@ def get_parties_and_addresses_query(corp_num): when cp.appointment_dt is null and f.effective_dt is not null then date_trunc('day', f.effective_dt) when cp.appointment_dt is null and f.effective_dt is null then date_trunc('day', e.event_timerstamp) else null - end)::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as cp_appointment_dt_str, - to_char(cp.cessation_dt::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as cp_cessation_dt_str, + end), 'YYYY-MM-DD') as cp_appointment_dt_str, + to_char(cp.cessation_dt, 'YYYY-MM-DD') as cp_cessation_dt_str, cp.last_name as cp_last_name, cp.middle_name as cp_middle_name, cp.first_name as cp_first_name, @@ -345,7 +458,7 @@ def get_parties_and_addresses_query(corp_num): -- and e.corp_num = 'BC0883637' -- INC, DIR and e.corp_num = '{corp_num}' and cp.end_event_id is null - and cp.party_typ_cd in ('INC', 'DIR') + and cp.party_typ_cd in ('INC', 'DIR', 'OFF') --order by e.event_id order by cp_full_name, e.event_id ; @@ -429,58 +542,261 @@ def get_resolutions_query(corp_num): return query +def get_jurisdictions_query(corp_num): + query = f""" + select + j.corp_num as j_corp_num, + j.start_event_id as j_start_event_id, + j.can_jur_typ_cd as j_can_jur_typ_cd, + j.xpro_typ_cd as j_xpro_typ_cd, + j.home_company_nme as j_home_company_nme, + j.home_juris_num as j_home_juris_num, + to_char( + j.home_recogn_dt::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM' + ) as j_home_recogn_dt, + j.othr_juris_desc as j_othr_juris_desc, + j.bc_xpro_num as j_bc_xpro_num + from jurisdiction j + where corp_num = '{corp_num}' + ; + """ + return query + + def get_filings_query(corp_num): query = f""" - select + select -- event e.event_id as e_event_id, e.corp_num as e_corp_num, e.event_type_cd as e_event_type_cd, - to_char(e.event_timerstamp::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as e_event_dt_str, - to_char(e.trigger_dts::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as e_trigger_dt_str, + to_char(e.event_timerstamp::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as e_event_dt_str, + to_char(e.trigger_dts::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as e_trigger_dt_str, e.event_type_cd || '_' || COALESCE(f.filing_type_cd, 'NULL') as event_file_type, -- filing f.event_id as f_event_id, f.filing_type_cd as f_filing_type_cd, - to_char(f.effective_dt::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as f_effective_dt_str, + to_char(f.effective_dt::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as f_effective_dt_str, f.withdrawn_event_id as f_withdrawn_event_id, + case + when f.withdrawn_event_id is null then null + else ( + select + to_char(we.event_timerstamp::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') + from event we + where we.event_id = f.withdrawn_event_id + ) + end as f_withdrawn_event_ts_str, -- paper only now -> f_ods_type f.nr_num as f_nr_num, - to_char(f.period_end_dt::timestamp at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as f_period_end_dt_str, + to_char(f.period_end_dt::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as f_period_end_dt_str, + to_char(f.change_dt::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as f_change_at_str, + -- state filing info + ( + select start_event_id + from corp_state + where 1 = 1 + and corp_num = '{corp_num}' + and end_event_id is null + ) as cs_state_event_id, --- filing user - upper(u.user_id) as u_user_id, - u.last_name as u_last_name, - u.first_name as u_first_name, - u.middle_name as u_middle_name, - case - when u.first_name is null and u.middle_name is null and u.last_name is null then null - else upper(concat_ws('_', nullif(trim(u.first_name),''), nullif(trim(u.middle_name),''), nullif(trim(u.last_name),''))) - end as u_full_name, + upper(u.user_id) as u_user_id, + trim(u.last_name) as u_last_name, + trim(u.first_name) as u_first_name, + trim(u.middle_name) as u_middle_name, u.email_addr as u_email_addr, - u.role_typ_cd as u_role_typ_cd + u.role_typ_cd as u_role_typ_cd, + p.cc_holder_nme as p_cc_holder_name, + --- conversion ledger + cl.ledger_title_txt as cl_ledger_title_txt, + -- conv event + to_char(ce.effective_dt::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as ce_effective_dt_str, + -- corp name change + cn_old.corp_name as old_corp_name, + cn_new.corp_name as new_corp_name, + + -- continuation out + co.can_jur_typ_cd as out_can_jur_typ_cd, + to_char(co.cont_out_dt::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as cont_out_dt, + co.othr_juri_desc as out_othr_juri_desc, + co.home_company_nme as out_home_company_nme from event e left outer join filing f on e.event_id = f.event_id left outer join filing_user u on u.event_id = e.event_id + left outer join payment p on p.event_id = e.event_id + left outer join conv_ledger cl on cl.event_id = e.event_id + left outer join conv_event ce on e.event_id = ce.event_id + left outer join corp_name cn_old on e.event_id = cn_old.end_event_id and cn_old.corp_name_typ_cd in ('CO', 'NB') + left outer join corp_name cn_new on e.event_id = cn_new.start_event_id and cn_new.corp_name_typ_cd in ('CO', 'NB') + left outer join cont_out co on co.start_event_id = e.event_id where 1 = 1 and e.corp_num = '{corp_num}' -- and e.corp_num = 'BC0068889' -- and e.corp_num = 'BC0449924' -- AR, ADCORP -- and e.trigger_dts is not null - order by e.event_timerstamp + order by e.event_timerstamp, e.event_id ; """ return query +def get_amalgamation_query(corp_num): + query = f""" + select + e.event_id as e_event_id, + ted_corp_num, + ting_corp_num, + cs.state_type_cd as ting_state_type_cd, + cs.end_event_id as ting_state_end_event_id, + corp_involve_id, + can_jur_typ_cd, + adopted_corp_ind, + home_juri_num, + othr_juri_desc, + foreign_nme, + -- event + e.event_type_cd as e_event_type_cd, + to_char(e.event_timerstamp::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as e_event_dt_str, + -- filing + f.filing_type_cd as f_filing_type_cd, + to_char(f.effective_dt::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as f_effective_dt_str, + f.court_appr_ind as f_court_approval, + -- event_file + e.event_type_cd || '_' || COALESCE(f.filing_type_cd, 'NULL') as event_file_type + from corp_involved_amalgamating cig + left outer join event e on e.event_id = cig.event_id + left outer join filing f on e.event_id = f.event_id + left outer join corp_state cs on cig.ting_corp_num = cs.corp_num and cs.start_event_id = e.event_id + where 1 = 1 + and cs.end_event_id is null + and cig.ted_corp_num = '{corp_num}' + order by cig.corp_involve_id; + """ + return query + + +def get_business_comments_query(corp_num): + query = f""" + select + to_char( + cc.comment_dts::timestamptz at time zone 'UTC', + 'YYYY-MM-DD HH24:MI:SSTZH:TZM' + ) as cc_comments_dts_str, + cc.comments as cc_comments, + cc.accession_comments as cc_accession_comments, + upper(cc.user_id) as u_user_id, + trim(cc.first_nme) as u_first_name, + trim(cc.last_nme) as u_last_name, + trim(cc.middle_nme) as u_middle_name + from corp_comments cc + where corp_num = '{corp_num}'; + """ + return query + + +def get_filing_comments_query(corp_num): + query = f""" + select + e.event_id as e_event_id, + to_char( + lt.ledger_text_dts::timestamptz at time zone 'UTC', + 'YYYY-MM-DD HH24:MI:SSTZH:TZM' + ) as lt_ledger_text_dts_str, + lt.user_id as lt_user_id, + trim(lt.notation) as lt_notation, + null as cl_ledger_desc + from event e + join ledger_text lt on e.event_id = lt.event_id + join corporation c on e.corp_num = c.corp_num and c.corp_num = '{corp_num}' + where + nullif(trim(lt.notation), '') is not null + union + select + e.event_id as e_event_id, + null as lt_ledger_text_dts_str, + null as lt_user_id, + null as lt_notation, + trim(cl.ledger_desc) as cl_ledger_desc + from event e + join conv_ledger cl on e.event_id = cl.event_id + join corporation c on e.corp_num = c.corp_num and c.corp_num = '{corp_num}' + where + nullif(trim(cl.ledger_desc), '') is not null + ; + """ + return query + + +def get_in_dissolution_query(corp_num): + query = f""" + select + cs.corp_num as cs_corp_num, + cs.state_type_cd as cs_state_type_cd, + e.event_id as e_event_id, + e.event_type_cd as e_event_type_cd, + to_char( + e.trigger_dts::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM' + ) as e_trigger_dts_str + from corp_state cs + join event e on e.event_id = cs.start_event_id + where 1 = 1 + and cs.corp_num = '{corp_num}' + and cs.end_event_id is null + and cs.state_type_cd in ('D1F', 'D2F', 'D1T', 'D2T') + """ + return query + +def get_offices_held_query(corp_num): + query = f""" + SELECT cp.corp_party_id AS cp_corp_party_id, + concat_ws(' ', nullif(trim(cp.first_name), ''), nullif(trim(cp.middle_name), ''), + nullif(trim(cp.last_name), '')) as cp_full_name, + oh.officer_typ_cd as oh_officer_typ_cd, + e.event_id AS transaction_id + FROM event e + join corp_party cp on cp.start_event_id = e.event_id + join offices_held oh on oh.corp_party_id = cp.corp_party_id + WHERE 1 = 1 + and cp.corp_num = '{corp_num}' + and cp.end_event_id is null + AND cp.party_typ_cd IN ('OFF') + """ + return query + + +def get_out_data_query(corp_num): + query = f""" + select + cs.state_type_cd, + co.can_jur_typ_cd, + to_char(co.cont_out_dt::timestamptz at time zone 'UTC', 'YYYY-MM-DD HH24:MI:SSTZH:TZM') as cont_out_dt, + co.othr_juri_desc, + co.home_company_nme + from cont_out co + join corp_state cs on cs.corp_num = co.corp_num and cs.end_event_id is null + where co.corp_num = '{corp_num}' + and co.end_event_id is null + and cs.state_type_cd in ('HCO', 'HAO') + """ + return query + + def get_corp_snapshot_filings_queries(config, corp_num): queries = { 'businesses': get_business_query(corp_num, config.CORP_NAME_SUFFIX), 'offices': get_offices_and_addresses_query(corp_num), 'parties': get_parties_and_addresses_query(corp_num), + 'offices_held': get_offices_held_query(corp_num), 'share_classes': get_share_classes_share_series_query(corp_num), 'aliases': get_aliases_query(corp_num), 'resolutions': get_resolutions_query(corp_num), - 'filings': get_filings_query(corp_num) + 'jurisdictions': get_jurisdictions_query(corp_num), + 'filings': get_filings_query(corp_num), + 'amalgamations': get_amalgamation_query(corp_num), + 'business_comments': get_business_comments_query(corp_num), + 'filing_comments': get_filing_comments_query(corp_num), + 'in_dissolution': get_in_dissolution_query(corp_num), + 'out_data': get_out_data_query(corp_num), # continuation/amalgamation out } return queries diff --git a/data-tool/flows/tombstone/tombstone_utils.py b/data-tool/flows/tombstone/tombstone_utils.py index 9245b2276c..b5951b8443 100644 --- a/data-tool/flows/tombstone/tombstone_utils.py +++ b/data-tool/flows/tombstone/tombstone_utils.py @@ -1,19 +1,29 @@ import copy +import datedelta import json from datetime import datetime, timezone +from decimal import Decimal +from typing import Final, Optional import pandas as pd import pytz from sqlalchemy import Connection, text -from tombstone.tombstone_base_data import (ALIAS, FILING, FILING_JSON, OFFICE, +from tombstone.tombstone_base_data import (ALIAS, AMALGAMATION, FILING, + FILING_JSON, IN_DISSOLUTION, + JURISDICTION, OFFICE, OFFICES_HELD, PARTY, PARTY_ROLE, RESOLUTION, SHARE_CLASSES, USER) -from tombstone.tombstone_mappings import (EVENT_FILING_LEAR_TARGET_MAPPING, +from tombstone.tombstone_mappings import (EVENT_FILING_DISPLAY_NAME_MAPPING, + EVENT_FILING_LEAR_TARGET_MAPPING, LEAR_FILING_BUSINESS_UPDATE_MAPPING, - LEAR_STATE_FILINGS) - -unsupported_event_file_types = set() + LEAR_STATE_FILINGS, + LEGAL_TYPE_CHANGE_FILINGS, + NO_FILING_EVENT_FILE_TYPES, + SKIPPED_EVENT_FILE_TYPES, + EventFilings) +all_unsupported_types = set() +date_format_with_tz: Final = '%Y-%m-%d %H:%M:%S%z' def format_business_data(data: dict) -> dict: business_data = data['businesses'][0] @@ -21,18 +31,26 @@ def format_business_data(data: dict) -> dict: state = business_data['state'] business_data['state'] = 'ACTIVE' if state == 'ACT' else 'HISTORICAL' - if not (last_ar_date := business_data['last_ar_date']): - last_ar_date = business_data['founding_date'] - - last_ar_year = int(last_ar_date.split('-')[0]) + if last_ar_date := business_data['last_ar_date']: + last_ar_year = int(last_ar_date.split('-')[0]) + else: + last_ar_date = None + last_ar_year = None + + last_ar_reminder_year = business_data['last_ar_reminder_year'] + + # last_ar_reminder_year can be None if send_ar_ind is false or the business is in the 1st financial year + if business_data['send_ar_ind'] and last_ar_reminder_year is None: + last_ar_reminder_year = last_ar_year formatted_business = { **business_data, 'last_ar_date': last_ar_date, 'last_ar_year': last_ar_year, + 'last_ar_reminder_year': last_ar_reminder_year, 'fiscal_year_end_date': business_data['founding_date'], 'last_ledger_timestamp': business_data['founding_date'], - 'last_modified': datetime.utcnow().replace(tzinfo=timezone.utc).isoformat() + 'last_modified': datetime.now(tz=timezone.utc).isoformat() } return formatted_business @@ -41,7 +59,7 @@ def format_business_data(data: dict) -> dict: def format_address_data(address_data: dict, prefix: str) -> dict: # Note: all corps have a format type of null or FOR address_type = 'mailing' if prefix == 'ma_' else 'delivery' - + street = address_data[f'{prefix}addr_line_1'] street_additional_elements = [] if (line_2 := address_data[f'{prefix}addr_line_2']) and (line_2 := line_2.strip()): @@ -51,7 +69,7 @@ def format_address_data(address_data: dict, prefix: str) -> dict: street_additional = ' '.join(street_additional_elements) if not (delivery_instructions := address_data[f'{prefix}delivery_instructions']) \ - or not (delivery_instructions := delivery_instructions.strip()): + or not (delivery_instructions := delivery_instructions.strip()): delivery_instructions = '' formatted_address = { @@ -84,7 +102,7 @@ def format_offices_data(data: dict) -> list[dict]: office['addresses'].append(delivery_address) formatted_offices.append(office) - + return formatted_offices @@ -96,11 +114,20 @@ def format_parties_data(data: dict) -> list[dict]: formatted_parties = [] + # Map role codes to role names + role_mapping = { + 'INC': 'incorporator', + 'DIR': 'director', + 'OFF': 'officer' + # Additional roles can be added here in the future + } + df = pd.DataFrame(parties_data) grouped_parties = df.groupby('cp_full_name') for _, group in grouped_parties: party = copy.deepcopy(PARTY) party_info = group.iloc[0].to_dict() + party['parties']['cp_full_name'] = party_info['cp_full_name'] party['parties']['first_name'] = party_info['cp_first_name'] party['parties']['middle_initial'] = party_info['cp_middle_name'] party['parties']['last_name'] = party_info['cp_last_name'] @@ -111,7 +138,7 @@ def format_parties_data(data: dict) -> list[dict]: mailing_addr_data = group.loc[ma_index].to_dict() else: mailing_addr_data = None - + if (da_index := group['cp_delivery_addr_id'].first_valid_index()) is not None: delivery_addr_data = group.loc[da_index].to_dict() else: @@ -126,19 +153,49 @@ def format_parties_data(data: dict) -> list[dict]: formatted_party_roles = party['party_roles'] for _, r in group.iterrows(): - if (role_code := r['cp_party_typ_cd']) not in ['INC', 'DIR']: + if (role_code := r['cp_party_typ_cd']) not in ['INC', 'DIR', 'OFF']: continue - role = 'incorporator' if role_code == 'INC' else 'director' + + role = role_mapping[role_code] # Will raise KeyError if role_code not in mapping + party_role = copy.deepcopy(PARTY_ROLE) party_role['role'] = role party_role['appointment_date'] = r['cp_appointment_dt_str'] party_role['cessation_date'] = r['cp_cessation_dt_str'] formatted_party_roles.append(party_role) - + formatted_parties.append(party) - + return formatted_parties +def format_offices_held_data(data: dict) -> list[dict]: + offices_held_data = data['offices_held'] + + if not offices_held_data: + return [] + + formatted_offices_held = [] + + title_mapping = { + 'ASC': 'ASSISTANT_SECRETARY', + 'CEO': 'CEO', + 'CFO': 'CFO', + 'CHR': 'CHAIR', + 'OTH': 'OTHER_OFFICES', + 'PRE': 'PRESIDENT', + 'SEC': 'SECRETARY', + 'TRE': 'TREASURER', + 'VIP': 'VICE_PRESIDENT' + } + + for x in offices_held_data: + office_held = copy.deepcopy(OFFICES_HELD) + office_held['cp_full_name'] = x['cp_full_name'] + office_held['title'] = title_mapping[x['oh_officer_typ_cd']] # map to enum val + formatted_offices_held.append(office_held) + + return formatted_offices_held + def format_share_series_data(share_series_data: dict) -> dict: formatted_series = { @@ -170,10 +227,14 @@ def format_share_classes_data(data: dict) -> list[dict]: priority = int(share_class_info['ssc_share_class_id']) if share_class_info['ssc_share_class_id'] else None max_shares = int(share_class_info['ssc_share_quantity']) if share_class_info['ssc_share_quantity'] else None par_value = float(share_class_info['ssc_par_value_amt']) if share_class_info['ssc_par_value_amt'] else None - - # TODO: map NULL or custom input value of ssc_other_currency + + currency_additioanl = None if (currency := share_class_info['ssc_currency_typ_cd']) == 'OTH': - currency = share_class_info['ssc_other_currency'] + if (other_currency := share_class_info['ssc_other_currency']) and other_currency.strip() == 'CAD': + currency = 'CAD' + else: + currency = 'OTHER' # TODO: to confirm the code used in LEAR in the end + currency_additioanl = other_currency share_class['share_classes']['name'] = share_class_info['ssc_class_nme'] share_class['share_classes']['priority'] = priority @@ -182,10 +243,11 @@ def format_share_classes_data(data: dict) -> list[dict]: share_class['share_classes']['par_value_flag'] = share_class_info['ssc_par_value_ind'] share_class['share_classes']['par_value'] = par_value share_class['share_classes']['currency'] = currency + share_class['share_classes']['currency_additional'] = currency_additioanl share_class['share_classes']['special_rights_flag'] = share_class_info['ssc_spec_rights_ind'] # Note: srs_share_class_id should be either None or equal to share_class_id - matching_series = group[group['srs_share_class_id']==share_class_id] + matching_series = group[group['srs_share_class_id'] == share_class_id] formatted_series = share_class['share_series'] for _, r in matching_series.iterrows(): formatted_series.append(format_share_series_data(r.to_dict())) @@ -223,48 +285,161 @@ def format_resolutions_data(data: dict) -> list[dict]: return formatted_resolutions -def format_filings_data(data: dict) -> list[dict]: +def format_jurisdictions_data(data: dict, event_id: Decimal) -> dict: + jurisdictions_data = data['jurisdictions'] + + matched_jurisdictions = [ + item for item in jurisdictions_data if item.get('j_start_event_id') == event_id + ] + + if not matched_jurisdictions: + return None + + formatted_jurisdiction = copy.deepcopy(JURISDICTION) + jurisdiction_info = matched_jurisdictions[0] + + formatted_jurisdiction['legal_name'] = jurisdiction_info['j_home_company_nme'] + formatted_jurisdiction['identifier'] = jurisdiction_info['j_home_juris_num'] + formatted_jurisdiction['incorporation_date'] = jurisdiction_info['j_home_recogn_dt'] + formatted_jurisdiction['expro_identifier'] = jurisdiction_info['j_bc_xpro_num'] + formatted_jurisdiction['country'] = None + formatted_jurisdiction['region'] = None + + can_jurisdiction_code = jurisdiction_info['j_can_jur_typ_cd'] or '' + other_jurisdiction_desc = jurisdiction_info['j_othr_juris_desc'] or '' + + # when canadian jurisdiction, ignore othr_juris_desc + if can_jurisdiction_code != 'OT': + formatted_jurisdiction['country'] = 'CA' + formatted_jurisdiction['region'] = 'FEDERAL' if can_jurisdiction_code == 'FD' else can_jurisdiction_code + # when other jurisdiction and len(othr_juris_desc) = 2, then othr_juris_desc is country code + elif can_jurisdiction_code == 'OT' and len(other_jurisdiction_desc) == 2: + formatted_jurisdiction['country'] = other_jurisdiction_desc + # when other jurisdiction and len(othr_juris_desc) = 6, then othr_juris_desc contains both + # region code and country code (like "US, SS"). Ignore any other cases. + elif can_jurisdiction_code == 'OT' and len(other_jurisdiction_desc) == 6: + formatted_jurisdiction['country'] = other_jurisdiction_desc[:2] + formatted_jurisdiction['region'] = other_jurisdiction_desc[4:] + else: + # add placeholder for unavailable information + formatted_jurisdiction['country'] = 'UNKNOWN' + + return formatted_jurisdiction + + +def format_filings_data(data: dict) -> dict: # filing info in business business_update_dict = {} + current_unsupported_types = set() filings_data = data['filings'] formatted_filings = [] - last_state_filing_idx = -1 + state_filing_idx = -1 idx = 0 + withdrawn_filing_idx = -1 for x in filings_data: event_file_type = x['event_file_type'] + # skip event_file_type that we don't need to support + if event_file_type in SKIPPED_EVENT_FILE_TYPES: + print(f'💡 Skip event filing type: {event_file_type}') + continue # TODO: build a new complete filing event mapper (WIP) - filing_type, filing_subtype = get_target_filing_type(event_file_type) - # skip the unsupported ones - if not filing_type: - print(f'❗ Skip event filing type: {event_file_type}') - unsupported_event_file_types.add(event_file_type) + raw_filing_type, raw_filing_subtype = get_target_filing_type(event_file_type) + # skip the unsupported ones (need to support in the future) + if not raw_filing_type and event_file_type not in NO_FILING_EVENT_FILE_TYPES: + print(f'❗ Unsupported event filing type: {event_file_type}') + current_unsupported_types.add(event_file_type) + all_unsupported_types.add(event_file_type) continue - effective_date = x['f_effective_dt_str'] - if not effective_date: - effective_date = x['e_event_dt_str'] + # get converted filing_type and filing_subtype + if raw_filing_type == 'conversion': + if isinstance(raw_filing_subtype, tuple): + filing_type, filing_subtype = raw_filing_subtype + else: + filing_type = raw_filing_subtype + filing_subtype = None + raw_filing_subtype = None + else: + filing_type = raw_filing_type + filing_subtype = raw_filing_subtype + + effective_date = x['ce_effective_dt_str'] or x['f_effective_dt_str'] or x['e_event_dt_str'] + if filing_type == 'annualReport': + effective_date = x['f_period_end_dt_str'] + + filing_date = x['ce_effective_dt_str'] or x['e_event_dt_str'] trigger_date = x['e_trigger_dt_str'] - filing_json, meta_data = build_filing_json_meta_data(filing_type, filing_subtype, + filing_json, meta_data = build_filing_json_meta_data(raw_filing_type, filing_type, filing_subtype, effective_date, x) - filing = copy.deepcopy(FILING) + filing_body = copy.deepcopy(FILING['filings']) + jurisdiction = None + amalgamation = None + consent_continuation_out = None - # make it None if no valid value - if not (user_id := x['u_user_id']): - user_id = x['u_full_name'] if x['u_full_name'] else None + user_id = get_username(x) - filing = { - **filing, - 'filing_date': effective_date, - 'filing_type': filing_type, - 'filing_sub_type': filing_subtype, - 'completion_date': effective_date, + if ( + raw_filing_type == 'conversion' + or raw_filing_subtype == 'involuntary' + or event_file_type in ['SYSDL_NULL', 'ADCORP_NULL', 'ADFIRM_NULL', 'ADMIN_NULL'] + ): + hide_in_ledger = True + else: + hide_in_ledger = False + + if x['f_withdrawn_event_id']: + if filing_type in [ + 'amalgamationApplication', + 'incorporationApplication', + 'continuationIn' + ]: + raise Exception('Stop migrating withdrawn corp') + status = 'WITHDRAWN' + completion_date = None + withdrawn_filing_idx = idx + else: + status = 'COMPLETED' + completion_date = effective_date + + filing_body = { + **filing_body, + 'filing_date': filing_date, + 'filing_type': raw_filing_type, + 'filing_sub_type': raw_filing_subtype, + 'completion_date': completion_date, 'effective_date': effective_date, 'filing_json': filing_json, 'meta_data': meta_data, - 'submitter_id': user_id # will be updated to real user_id when loading data into db + 'hide_in_ledger': hide_in_ledger, + 'status': status, + 'submitter_id': user_id, # will be updated to real user_id when loading data into db + } + + # conversion still need to populate create-new-business info + # based on converted filing type + if filing_type == 'continuationIn': + jurisdiction = format_jurisdictions_data(data, x['e_event_id']) + elif filing_type == 'amalgamationApplication': + amalgamation = format_amalgamations_data(data, x['e_event_id'], effective_date, filing_subtype) + elif filing_type == 'noticeOfWithdrawal': + filing_body['withdrawn_filing_id'] = withdrawn_filing_idx # will be updated to real filing_id when loading data + withdrawn_filing_idx = -1 + elif filing_type in ('consentContinuationOut', 'consentAmalgamationOut'): + consent_continuation_out = format_consent_continuation_out(filing_type, effective_date) + + comments = format_filing_comments_data(data, x['e_event_id']) + + colin_event_ids = {'colin_event_id': x['e_event_id']} + filing = { + 'filings': filing_body, + 'jurisdiction': jurisdiction, + 'amalgamations': amalgamation, + 'comments': comments, + 'colin_event_ids': colin_event_ids, + 'consent_continuation_out': consent_continuation_out } formatted_filings.append(filing) @@ -275,17 +450,215 @@ def format_filings_data(data: dict) -> list[dict]: business_update_dict[k] = get_business_update_value(k, effective_date, trigger_date, filing_type, filing_subtype) # save state filing index - if filing_type in LEAR_STATE_FILINGS: - last_state_filing_idx = idx - + if filing_type in LEAR_STATE_FILINGS and x['e_event_id'] == x['cs_state_event_id']: + state_filing_idx = idx + idx += 1 return { 'filings': formatted_filings, 'update_business_info': business_update_dict, - 'state_filing_index': last_state_filing_idx + 'state_filing_index': state_filing_idx, + 'unsupported_types': current_unsupported_types, + } + +def format_consent_continuation_out(filing_type: str, effective_date_str: str): + expiry_date = get_expiry_date(effective_date_str) + consent_continuation_out = { + 'consent_type': 'continuation_out' if filing_type == 'consentContinuationOut' else 'amalgamation_out', + 'expiry_date': expiry_date.isoformat(), + 'foreign_jurisdiction': '', + 'foreign_jurisdiction_region': '', + } + + return consent_continuation_out + + +def get_expiry_date(effective_date_str: str) -> datetime: + pst = pytz.timezone('America/Vancouver') + effective_date = datetime.strptime(effective_date_str, date_format_with_tz) + effective_date = effective_date.astimezone(pst) + _date = effective_date.replace(hour=23, minute=59, second=0, microsecond=0) + _date += datedelta.datedelta(months=6) + + # Setting legislation timezone again after adding 6 months to recalculate the UTC offset and DST info + _date = _date.astimezone(pst) + + # Adjust day light savings. Handle DST +-1 hour changes + dst_offset_diff = effective_date.dst() - _date.dst() + _date += dst_offset_diff + + return _date.astimezone(pytz.timezone('GMT')) + + + +def format_amalgamations_data(data: dict, event_id: Decimal, amalgamation_date: str, amalgamation_type: str) -> dict: + amalgamations_data = data['amalgamations'] + + matched_amalgamations = [ + item for item in amalgamations_data if item.get('e_event_id') == event_id + ] + + if not matched_amalgamations: + return None + + formatted_amalgmation = copy.deepcopy(AMALGAMATION) + amalgamation_info = matched_amalgamations[0] + + formatted_amalgmation['amalgamations']['amalgamation_date'] = amalgamation_date + formatted_amalgmation['amalgamations']['court_approval'] = bool(amalgamation_info['f_court_approval']) + + formatted_amalgmation['amalgamations']['amalgamation_type'] = amalgamation_type + formatted_tings = formatted_amalgmation['amalgamating_businesses'] + for ting in matched_amalgamations: + formatted_tings.append(format_amalgamating_businesses(ting)) + + return formatted_amalgmation + + +def format_amalgamating_businesses(ting_data: dict) -> dict: + formatted_ting = {} + role = 'holding' if ting_data['adopted_corp_ind'] else 'amalgamating' + + foreign_identifier = None + if not (ting_data['ting_corp_num'].startswith('BC') or + ting_data['ting_corp_num'].startswith('Q') or + ting_data['ting_corp_num'].startswith('C')): + foreign_identifier = ting_data['ting_corp_num'] + + if foreign_identifier: + foreign_jurisdiction = 'CA' + foreign_jurisdiction_region = ting_data['can_jur_typ_cd'] + if foreign_jurisdiction_region == 'OT': + foreign_jurisdiction = 'US' + foreign_jurisdiction_region = ting_data['othr_juri_desc'] + formatted_ting = { + 'foreign_jurisdiction': foreign_jurisdiction, + 'foreign_name': ting_data['foreign_nme'], + 'foreign_identifier': foreign_identifier, + 'role': role, + 'foreign_jurisdiction_region': foreign_jurisdiction_region + } + else: + formatted_ting = { + 'ting_identifier': ting_data['ting_corp_num'], + 'role': role, + } + + return formatted_ting + + +def format_filing_comments_data(data: dict, event_id: Decimal) -> list: + filing_comments_data = data['filing_comments'] + + matched_filing_comments = [ + item for item in filing_comments_data if item.get('e_event_id') == event_id + ] + + if not matched_filing_comments: + return None + + formatted_filing_comments = [] + for x in matched_filing_comments: + if c := x['lt_notation']: + timestamp = x['lt_ledger_text_dts_str'] + # Note that only a small number of lt_user_id is BCOMPS, + # others are None + # TODO: investigate BCOMPS related stuff + staff_id = x['lt_user_id'] + else: + c = x['cl_ledger_desc'] + timestamp = None + staff_id = None + comment = { + 'comment': c, + 'timestamp': timestamp, + 'staff_id': staff_id, # will be updated to real staff_id when loading data into db + } + + formatted_filing_comments.append(comment) + + return formatted_filing_comments + + +def format_business_comments_data(data: dict) -> list: + business_comments_data = data['business_comments'] + formatted_business_comments = [] + + for x in business_comments_data: + c = x['cc_comments'] if x['cc_comments'] else x['cc_accession_comments'] + staff_id = get_username(x) + comment = { + 'comment': c, + 'timestamp': x['cc_comments_dts_str'], + 'staff_id': staff_id, # will be updated to real staff_id when loading data into db + } + formatted_business_comments.append(comment) + + return formatted_business_comments + + +def format_in_dissolution_data(data: dict) -> dict: + if not (in_dissolution_data := data['in_dissolution']): + return None + + in_dissolution_data = in_dissolution_data[0] + + formatted_in_dissolution = copy.deepcopy(IN_DISSOLUTION) + batch = formatted_in_dissolution['batches'] + batch_processiong = formatted_in_dissolution['batch_processing'] + furnishing = formatted_in_dissolution['furnishings'] + + utc_now_str = datetime.utcnow().replace(tzinfo=timezone.utc).isoformat() + batch['start_date'] = utc_now_str + + corp_state = in_dissolution_data['cs_state_type_cd'] + + batch_processiong['business_identifier'] = in_dissolution_data['cs_corp_num'] + batch_processiong['created_date'] = batch_processiong['last_modified'] = utc_now_str + batch_processiong['trigger_date'] = in_dissolution_data['e_trigger_dts_str'] + batch_processiong['meta_data'] = { + 'importFromColin': True, + 'colinDissolutionState': corp_state, + } + + furnishing['business_identifier'] = in_dissolution_data['cs_corp_num'] + furnishing['created_date'] = furnishing['last_modified'] = furnishing['processed_date'] = utc_now_str + furnishing['meta_data'] = { + 'importFromColin': True, + 'colinDissolutionState': corp_state, } + if corp_state in ('D1F', 'D1T'): + # stage 1 + batch_processiong['step'] = 'WARNING_LEVEL_1' + overdue_ar = True if corp_state == 'D1F' else False + batch_processiong['meta_data'] = { + **batch_processiong['meta_data'], + 'overdueARs': overdue_ar, + 'overdueTransition': not overdue_ar, + 'stage_1_date': utc_now_str, + } + + furnishing['furnishing_type'] = 'MAIL' # as placeholder + furnishing['furnishing_name'] = 'DISSOLUTION_COMMENCEMENT_NO_AR' if overdue_ar \ + else 'DISSOLUTION_COMMENCEMENT_NO_TR' + else: + # stage 2 + batch_processiong['step'] = 'WARNING_LEVEL_2' + overdue_ar = True if corp_state == 'D2F' else False + batch_processiong['meta_data'] = { + **batch_processiong['meta_data'], + 'overdueARs': overdue_ar, + 'overdueTransition': not overdue_ar, + 'stage_2_date': utc_now_str, + } + + furnishing['furnishing_type'] = 'GAZETTE' + furnishing['furnishing_name'] = 'INTENT_TO_DISSOLVE' + + return formatted_in_dissolution + def format_users_data(users_data: list) -> list: formatted_users = [] @@ -293,23 +666,19 @@ def format_users_data(users_data: list) -> list: for x in users_data: user = copy.deepcopy(USER) event_file_types = x['event_file_types'].split(',') - # skip users if all event_file_type is unsupported - if not any(get_target_filing_type(ef)[0] for ef in event_file_types): + # skip users if all event_file_type is unsupported or not users for staff comments + if not any(get_target_filing_type(ef)[0] for ef in event_file_types) \ + and not any(ef == 'STAFF_COMMENT' for ef in event_file_types): continue - - if not (username := x['u_user_id']): - username = x['u_full_name'] - # skip if both u_user_id and u_full_name is empty + username = get_username(x) + if not username: continue user = { **user, 'username': username, - 'firstname': x['u_first_name'], - 'middlename': x['u_middle_name'], - 'lastname': x['u_last_name'], 'email': x['u_email_addr'], 'creation_date': x['earliest_event_dt_str'] } @@ -319,16 +688,55 @@ def format_users_data(users_data: list) -> list: return formatted_users +def format_out_data_data(data: dict) -> dict: + out_data = data.get('out_data') + if not out_data: + return {} + + out_data = out_data[0] + country, region = map_country_region(out_data['can_jur_typ_cd']) + + date_field = { + 'HCO': 'continuation_out_date', + 'HAO': 'amalgamation_out_date' + }.get(out_data['state_type_cd']) + + formatted_out_data = { + 'foreign_jurisdiction': country, + 'foreign_jurisdiction_region': region, + 'foreign_legal_name': out_data['home_company_nme'], + date_field: out_data['cont_out_dt'], + } + + return formatted_out_data + + +def map_country_region(can_jur_typ_cd): + if can_jur_typ_cd != 'OT': + country = 'CA' + region = 'FEDERAL' if can_jur_typ_cd == 'FD' else can_jur_typ_cd + else: # placeholder for other + country = 'UNKNOWN' + region = 'UNKNOWN' + + return country, region + + def formatted_data_cleanup(data: dict) -> dict: filings_business = data['filings'] data['updates'] = { 'businesses': filings_business['update_business_info'], 'state_filing_index': filings_business['state_filing_index'] } + data['unsupported_types'] = filings_business['unsupported_types'] + data['filings'] = filings_business['filings'] - return data + data['admin_email'] = data['businesses']['admin_email'] + del data['businesses']['admin_email'] + data['businesses'].update(data['out_data']) + return data def get_data_formatters() -> dict: @@ -336,10 +744,14 @@ def get_data_formatters() -> dict: 'businesses': format_business_data, 'offices': format_offices_data, 'parties': format_parties_data, + 'offices_held': format_offices_held_data, 'share_classes': format_share_classes_data, 'aliases': format_aliases_data, 'resolutions': format_resolutions_data, - 'filings': format_filings_data + 'filings': format_filings_data, + 'comments': format_business_comments_data, # only for business level, filing level will be formatted ith filings + 'in_dissolution': format_in_dissolution_data, + 'out_data': format_out_data_data, # continuation/amalgamation out } return ret @@ -358,9 +770,14 @@ def get_target_filing_type(event_file_type: str) -> tuple[str, str]: def get_business_update_value(key: str, effective_date: str, trigger_date: str, filing_type: str, filing_subtype: str) -> str: if filing_type == 'putBackOn': value = None + elif filing_type == 'putBackOff': + if key == 'restoration_expiry_date': + value = None + else: + value = effective_date elif filing_type == 'restoration': - if key == 'restoration_expiry_date' and\ - filing_subtype in ['limitedRestoration', 'limitedRestorationExtension']: + if key == 'restoration_expiry_date' and \ + filing_subtype in ['limitedRestoration', 'limitedRestorationExtension']: value = trigger_date else: value = None @@ -370,18 +787,55 @@ def get_business_update_value(key: str, effective_date: str, trigger_date: str, return value -def build_filing_json_meta_data(filing_type: str, filing_subtype: str, effective_date: str, data: dict) -> tuple[dict, dict]: +def build_filing_json_meta_data(raw_filing_type: str, filing_type: str, filing_subtype: str, effective_date: str, data: dict) -> tuple[dict, dict]: filing_json = copy.deepcopy(FILING_JSON) - filing_json['filing'][filing_type] = {} + filing_json['filing'][raw_filing_type] = {} + # if conversion has conv filing type, set filing_json + if raw_filing_type != filing_type and filing_type: + filing_json['filing'][filing_type] = {} meta_data = { 'colinFilingInfo': { 'eventType': data['e_event_type_cd'], - 'filingType': data['f_filing_type_cd'] + 'filingType': data['f_filing_type_cd'], + 'eventId': int(data['e_event_id']) }, 'isLedgerPlaceholder': True, + 'colinDisplayName': get_colin_display_name(data) } + if raw_filing_type == 'conversion': + # will populate state filing info for conversion in the following steps + # based on converted filing type and converted filing subtype + if filing_type in LEAR_STATE_FILINGS: + state_change = True + else: + state_change = False + if filing_type == 'changeOfName': + name_change = True + filing_json['filing']['changeOfName'] = { + 'fromLegalName': data['old_corp_name'], + 'toLegalName': data['new_corp_name'], + } + meta_data['changeOfName'] = { + 'fromLegalName': data['old_corp_name'], + 'toLegalName': data['new_corp_name'], + } + else: + name_change = False + filing_json['filing']['conversion'] = { + 'convFilingType': filing_type, + 'convFilingSubType': filing_subtype, + 'stateChange': state_change, + 'nameChange': name_change, + } + meta_data['conversion'] = { + 'convFilingType': filing_type, + 'convFilingSubType': filing_subtype, + 'stateChange': state_change, + 'nameChange': name_change, + } + if filing_type == 'annualReport': meta_data['annualReport'] = { 'annualReportFilingYear': int(effective_date[:4]), @@ -409,14 +863,78 @@ def build_filing_json_meta_data(filing_type: str, filing_subtype: str, effective **filing_json['filing']['restoration'], 'type': filing_subtype, } + elif filing_type == 'alteration': + meta_data['alteration'] = {} + if (event_file_type := data['event_file_type']) in LEGAL_TYPE_CHANGE_FILINGS.keys(): + meta_data['alteration'] = { + **meta_data['alteration'], + 'fromLegalType': LEGAL_TYPE_CHANGE_FILINGS[event_file_type][0], + 'toLegalType': LEGAL_TYPE_CHANGE_FILINGS[event_file_type][1], + } + if (old_corp_name := data['old_corp_name']) and (new_corp_name := data['new_corp_name']): + meta_data['alteration'] = { + **meta_data['alteration'], + 'fromLegalName': old_corp_name, + 'toLegalName': new_corp_name, + } + elif filing_type == 'putBackOff': + if (event_file_type := data['event_file_type']) == 'SYSDL_NULL': + filing_json['filing']['putBackOff'] = { + 'details': 'Put back off filing due to expired limited restoration.' + } + meta_data['putBackOff'] = { + 'reason': 'Limited Restoration Expired', + 'expiryDate': effective_date[:10] + } + elif filing_type in ('amalgamationOut', 'continuationOut'): + country, region = map_country_region(data['out_can_jur_typ_cd']) + meta_data[filing_type] = { + 'country': country, + 'region': region, + 'legalName': data['out_home_company_nme'], + f'{filing_type}Date': data['cont_out_dt'][:10] + } + if data['out_othr_juri_desc']: + meta_data[filing_type]['otherJurisdictionDesc'] = data['out_othr_juri_desc'] + + if withdrawn_ts_str := data['f_withdrawn_event_ts_str']: + withdrawn_ts = datetime.strptime(withdrawn_ts_str, date_format_with_tz) + meta_data = { + **meta_data, + 'withdrawnDate': withdrawn_ts.isoformat() + } + # TODO: populate meta_data for correction to display correct filing name return filing_json, meta_data +def get_colin_display_name(data: dict) -> str: + event_file_type = data['event_file_type'] + name = EVENT_FILING_DISPLAY_NAME_MAPPING.get(event_file_type) + + # Annual Report + if event_file_type == EventFilings.FILE_ANNBC.value: + ar_dt_str = data['f_period_end_dt_str'] + ar_dt = datetime.strptime(ar_dt_str, date_format_with_tz) + suffix = ar_dt.strftime('%b %d, %Y').upper() + name = f'{name} - {suffix}' + + # Change of Directors + elif event_file_type == EventFilings.FILE_NOCDR.value: + if not data['f_change_at_str']: + name = f'{name} - Address Change or Name Correction Only' + + # Conversion Ledger + elif event_file_type == EventFilings.FILE_CONVL.value: + name = data['cl_ledger_title_txt'] + + return name + + def build_epoch_filing(business_id: int) -> dict: now = datetime.utcnow().replace(tzinfo=pytz.UTC) - filing = copy.deepcopy(FILING) + filing = copy.deepcopy(FILING['filings']) filing = { **filing, 'filing_type': 'lear_tombstone', @@ -429,43 +947,74 @@ def build_epoch_filing(business_id: int) -> dict: return filing -def load_data(conn: Connection, table_name: str, data: dict, conflict_column: str=None) -> int: +def get_username(data: dict) -> str: + first_name = data.get('u_first_name') + middle_name = data.get('u_middle_name') + last_name = data.get('u_last_name') + + username = ' '.join([name for name in [first_name, middle_name, last_name] if name]) + if not username: + username = data.get('u_user_id') + if not username: + username = data.get('p_cc_holder_name') + + return username + + +def load_data(conn: Connection, + table_name: str, + data: dict, + conflict_column: str = None, + conflict_error = False, + expecting_id: bool = True) -> Optional[int]: columns = ', '.join(data.keys()) - values = ', '.join([format_value(v) for v in data.values()]) + placeholders = ', '.join([f':{key}' for key in data.keys()]) if conflict_column: - conflict_value = format_value(data[conflict_column]) - check_query = f"select id from {table_name} where {conflict_column} = {conflict_value}" - check_result = conn.execute(text(check_query)).scalar() + conflict_value = data[conflict_column] + check_query = f"select id from {table_name} where {conflict_column} = :conflict_value" + check_result = conn.execute(text(check_query), {'conflict_value': format_value(conflict_value)}).scalar() if check_result: - return check_result + if not conflict_error: + return check_result + else: + raise Exception('Trying to reload corp existing in db, run delete script first') - query = f"""insert into {table_name} ({columns}) values ({values}) returning id""" + query = f"""insert into {table_name} ({columns}) values ({placeholders})""" + if expecting_id: + query = query + ' returning id' - result = conn.execute(text(query)) - id = result.scalar() + result = conn.execute(text(query), format_params(data)) - return id + if expecting_id: + id = result.scalar() + return id + + return None -def update_data(conn: Connection, table_name: str, data: dict, id: int) -> bool: - update_pairs = [f'{k} = {format_value(v)}' for k, v in data.items()] +def update_data(conn: Connection, table_name: str, data: dict, column: str, value: any) -> int: + update_pairs = [f'{k} = :{k}' for k in data.keys()] update_pairs_str = ', '.join(update_pairs) - query = f"""update {table_name} set {update_pairs_str} where id={id}""" + query = f"""update {table_name} set {update_pairs_str} where {column}=:condition_value returning id""" - result = conn.execute(text(query)) + params = copy.deepcopy(data) + params['condition_value'] = value - return result.rowcount > 0 + result = conn.execute(text(query), format_params(params)) + id = result.scalar() + + return id def format_value(value) -> str: - if value is None: - return 'NULL' - elif isinstance(value, (int, float)): - return str(value) - elif isinstance(value, dict): - return f"'{json.dumps(value)}'" - else: - # Note: handle single quote issue - value = str(value).replace("'", "''") - return f"'{value}'" + if isinstance(value, dict): + return json.dumps(value) + return value + + +def format_params(data: dict) -> dict: + formatted = {} + for k, v in data.items(): + formatted[k] = format_value(v) + return formatted diff --git a/data-tool/requirements.txt b/data-tool/requirements.txt index 17c45d16c9..9b877e0a7e 100755 --- a/data-tool/requirements.txt +++ b/data-tool/requirements.txt @@ -1,4 +1,5 @@ prefect==3.0.8 +prefect[dask] Babel==2.9.1 Flask>=2.0.0,<2.1.0 # Pin to 2.0.x series for flask-restx compatibility Flask-Babel==2.0.0 diff --git a/data-tool/requirements/prefect.txt b/data-tool/requirements/prefect.txt index 670302f365..9ead3f5fe2 100644 --- a/data-tool/requirements/prefect.txt +++ b/data-tool/requirements/prefect.txt @@ -1 +1,2 @@ prefect==3.0.8 +prefect[dask] diff --git a/data-tool/scripts/README_COLIN_Corps_Extract.md b/data-tool/scripts/README_COLIN_Corps_Extract.md index 471284278a..18c17e4753 100644 --- a/data-tool/scripts/README_COLIN_Corps_Extract.md +++ b/data-tool/scripts/README_COLIN_Corps_Extract.md @@ -7,7 +7,7 @@ ``` # create empty db for the first time createdb -h localhost -p 5432 -U postgres -T template0 colin-mig-corps-data-test && \ -psql -h localhost -p 5432 -U postgres -d colin-mig-corps-test -f /data-tool/scripts/colin_corps_extract_postgres_ddl +psql -h localhost -p 5432 -U postgres -d colin-mig-corps-data-test -f /data-tool/scripts/colin_corps_extract_postgres_ddl # kill connection & recreate empty db psql -h localhost -p 5432 -U postgres -d colin-mig-corps-data-test -c "SELECT pg_terminate_backend(pg_stat_activity.pid) FROM pg_stat_activity WHERE datname = 'colin-mig-corps-data-test' AND pid <> pg_backend_pid();" && \ @@ -25,7 +25,7 @@ connection cprd -d Oracle -u -p -h -P

:/ "port=5432" -connection cprd_pg -d PostgreSql -u postgres -p -h localhost -P -D colin-mig-corps-test +connection cprd_pg -d PostgreSql -u postgres -p -h localhost -P -D colin-mig-corps-data-test ``` 7. Transfer data `dbschemacli /data-tool/scripts/transfer_cprd_corps.sql` 8. Successful output will look something like following: diff --git a/data-tool/scripts/colin_corps_extract_postgres_ddl b/data-tool/scripts/colin_corps_extract_postgres_ddl index 16938f1c02..708e951f4a 100644 --- a/data-tool/scripts/colin_corps_extract_postgres_ddl +++ b/data-tool/scripts/colin_corps_extract_postgres_ddl @@ -6,10 +6,6 @@ create sequence corp_processing_id_seq; alter sequence corp_processing_id_seq owner to postgres; -create sequence synonym_id_seq; - -alter sequence synonym_id_seq owner to postgres; - create table if not exists address ( addr_id numeric(10) @@ -55,7 +51,8 @@ create table if not exists corporation admin_email varchar(254), accession_num varchar(10), send_ar_ind boolean, - last_ar_filed_dt timestamp with time zone + last_ar_filed_dt timestamp with time zone, + last_ar_reminder_year numeric(4) ); alter table corporation @@ -492,8 +489,11 @@ create table if not exists corp_involved_amalgamating event_id numeric(9) not null constraint fk_corp_involved_event references event (event_id), - corp_num varchar(10) not null - constraint fk_corp_involved_corporation + ted_corp_num varchar(10) not null + constraint fk_corp_involved_ted_corporation + references corporation (corp_num), + ting_corp_num varchar(10) not null + constraint fk_corp_involved_ting_corporation references corporation (corp_num), corp_involve_id numeric(9) not null, can_jur_typ_cd char(2), @@ -772,3 +772,138 @@ comment on table corp_involved_cont_in is 'new table\n\n"Optionally, a ""Continu alter table corp_involved_cont_in owner to postgres; +create table if not exists payment +( + event_id numeric(9) not null + constraint fk_payment + references event (event_id), + payment_typ_cd varchar(4) not null, + cc_holder_nme varchar(80) +); + +alter table payment + owner to postgres; + +CREATE INDEX if not exists ix_conv_event_event_id ON conv_event (event_id); + +CREATE INDEX if not exists ix_conv_ledger_event_id ON conv_ledger (event_id); + +CREATE INDEX if not exists ix_corp_comments_corp_num ON corp_comments (corp_num); + +CREATE INDEX if not exists ix_corp_comments_first_nme ON corp_comments (first_nme); + +CREATE INDEX if not exists ix_corp_comments_last_nme ON corp_comments (last_nme); + +CREATE INDEX if not exists ix_corp_comments_middle_nme ON corp_comments (middle_nme); + +CREATE INDEX if not exists ix_corp_involved_amalgamating_event_id ON corp_involved_amalgamating (event_id); + +CREATE INDEX if not exists ix_corp_involved_amalgamating_ted_corp_num ON corp_involved_amalgamating (ted_corp_num); + +CREATE INDEX if not exists ix_corp_involved_amalgamating_ting_corp_num ON corp_involved_amalgamating (ting_corp_num); + +CREATE INDEX if not exists ix_corp_name_corp_num ON corp_name (corp_num); + +CREATE INDEX if not exists ix_corp_name_start_event_id ON corp_name (start_event_id); + +CREATE INDEX if not exists ix_corp_name_end_event_id ON corp_name (end_event_id); + +CREATE INDEX if not exists ix_corp_name_corp_name_typ_cd ON corp_name (corp_name_typ_cd); + +CREATE INDEX if not exists ix_corp_party_mailing_addr_id ON corp_party (mailing_addr_id); + +CREATE INDEX if not exists ix_corp_party_delivery_addr_id ON corp_party (delivery_addr_id); + +CREATE INDEX if not exists ix_corp_party_corp_num ON corp_party (corp_num); + +CREATE INDEX if not exists ix_corp_party_start_event_id ON corp_party (start_event_id); + +CREATE INDEX if not exists ix_corp_party_end_event_id ON corp_party (end_event_id); + +CREATE INDEX if not exists ix_corp_party_appointment_dt ON corp_party (appointment_dt); + +CREATE INDEX if not exists ix_corp_processing_id ON corp_processing (id); + +CREATE INDEX if not exists ix_corp_processing_flow_run_id ON corp_processing (flow_run_id); + +CREATE INDEX if not exists ix_corp_processing_claimed_at ON corp_processing (claimed_at); + +CREATE INDEX if not exists ix_corp_state_corp_num ON corp_state (corp_num); + +CREATE INDEX if not exists ix_corp_state_start_event_id ON corp_state (start_event_id); + +CREATE INDEX if not exists ix_corp_state_end_event_id ON corp_state (end_event_id); + +CREATE INDEX if not exists ix_corp_state_state_type_cd ON corp_state (state_type_cd); + +CREATE INDEX if not exists ix_corporation_recognition_dts ON corporation (recognition_dts); + +CREATE INDEX if not exists ix_corporation_bn_9 ON corporation (bn_9); + +CREATE INDEX if not exists ix_corporation_bn_15 ON corporation (bn_15); + +CREATE INDEX if not exists ix_corporation_last_ar_filed_dt ON corporation (last_ar_filed_dt); + +CREATE INDEX if not exists ix_corporation_corp_frozen_type_cd ON corporation (corp_frozen_type_cd); + +CREATE INDEX if not exists ix_filing_withdrawn_event_id ON filing (withdrawn_event_id); + +CREATE INDEX if not exists ix_filing_user_event_id ON filing_user (event_id); + +CREATE INDEX if not exists ix_filing_user_last_name ON filing_user (last_name); + +CREATE INDEX if not exists ix_filing_user_middle_name ON filing_user (middle_name); + +CREATE INDEX if not exists ix_filing_user_first_name ON filing_user (first_name); + +CREATE INDEX if not exists ix_filing_user_user_id ON filing_user (user_id); + +CREATE INDEX if not exists ix_filing_user_role_typ_cd ON filing_user (role_typ_cd); + +CREATE INDEX if not exists ix_jurisdiction_corp_num ON jurisdiction (corp_num); + +CREATE INDEX if not exists ix_jurisdiction_start_event_id ON jurisdiction (start_event_id); + +CREATE INDEX if not exists ix_ledger_text_event_id ON ledger_text (event_id); + +CREATE INDEX if not exists ix_office_corp_num ON office (corp_num); + +CREATE INDEX if not exists ix_office_office_typ_cd ON office (office_typ_cd); + +CREATE INDEX if not exists ix_office_start_event_id ON office (start_event_id); + +CREATE INDEX if not exists ix_office_end_event_id ON office (end_event_id); + +CREATE INDEX if not exists ix_office_mailing_addr_id ON office (mailing_addr_id); + +CREATE INDEX if not exists ix_office_delivery_addr_id ON office (delivery_addr_id); + +CREATE INDEX if not exists ix_payment_event_id ON payment (event_id); + +CREATE INDEX if not exists ix_resolution_corp_num ON resolution (corp_num); + +CREATE INDEX if not exists ix_resolution_start_event_id ON resolution (start_event_id); + +CREATE INDEX if not exists ix_resolution_end_event_id ON resolution (end_event_id); + +CREATE INDEX if not exists ix_share_series_corp_num ON share_series (corp_num); + +CREATE INDEX if not exists ix_share_series ON share_series (share_class_id); + +CREATE INDEX if not exists ix_share_series_start_event_id ON share_series (start_event_id); + +CREATE INDEX if not exists ix_share_struct_end_event_id ON share_struct (end_event_id); + +CREATE INDEX if not exists ix_share_struct_cls_corp_num ON share_struct_cls (corp_num); + +CREATE INDEX if not exists ix_share_struct_cls_start_event_id ON share_struct_cls (start_event_id); + +CREATE INDEX if not exists ix_share_struct_cls_share_class_id ON share_struct_cls (share_class_id); + +CREATE INDEX if not exists idx_corp_processing_flow_env_status ON corp_processing (flow_name, environment, processed_status, corp_num); + +CREATE INDEX if not exists idx_corp_processing_claim_batch ON corp_processing (environment, flow_name, flow_run_id, processed_status, claimed_at); + +CREATE INDEX if not exists idx_corp_state_active ON corp_state (end_event_id, corp_num); + +CREATE INDEX if not exists idx_corp_state_corp_num_end_event_id ON corp_state (corp_num, end_event_id); diff --git a/data-tool/scripts/transfer_cprd_corps.sql b/data-tool/scripts/transfer_cprd_corps.sql index 4db418dbcd..c0a54fc932 100644 --- a/data-tool/scripts/transfer_cprd_corps.sql +++ b/data-tool/scripts/transfer_cprd_corps.sql @@ -80,7 +80,14 @@ select case when 'N' then 0 when 'Y' then 1 else 1 - end SEND_AR_IND + end SEND_AR_IND, + (select + to_number(to_char(max(date_1), 'YYYY')) + from eml_log e, rep_data r + where + e.corp_num=c.corp_num + and e.param_id=r.param_id + and e.corp_num=r.t20_1) as LAST_AR_REMINDER_YEAR from corporation c where corp_typ_cd in ('BC', 'C', 'ULC', 'CUL', 'CC', 'CCC', 'QA', 'QB', 'QC', 'QD', 'QE') -- and c.corp_num in ('1396310', '1396309', '1396308', '1396307', '1396306', '1396890', '1396889', '1396885', '1396883', '1396878','1396597', '1396143', '1395925', '1395116', '1394990', '1246445', '1216743', '1396508', '1396505', '1396488', '1396401', '1396387', '1396957', '1355943', '1340611', '1335427', '1327193', '1393945', '1208648', '1117024', '1120292', '1127373', '1135492') @@ -556,7 +563,11 @@ from (select e.event_id, -- SELECT BY EVENT case when c.CORP_TYP_CD in ('BC', 'ULC', 'CC') then 'BC' || c.CORP_NUM else c.CORP_NUM - end CORP_NUM, + end TED_CORP_NUM, + case + when c2.corp_typ_cd in ('BC', 'ULC', 'CC') then 'BC' || c2.corp_num + else c2.corp_num + end TING_CORP_NUM, ci.CORP_INVOLVE_ID, ci.CAN_JUR_TYP_CD, case ci.ADOPTED_CORP_IND @@ -570,10 +581,12 @@ from (select e.event_id, -- SELECT BY EVENT from event e , CORP_INVOLVED ci , corporation c + , corporation c2 where e.event_id = ci.event_id and c.corp_num = e.corp_num - and corp_typ_cd in ('BC', 'C', 'ULC', 'CUL', 'CC', 'CCC', 'QA', 'QB', 'QC', 'QD', 'QE') + and c.corp_typ_cd in ('BC', 'C', 'ULC', 'CUL', 'CC', 'CCC', 'QA', 'QB', 'QC', 'QD', 'QE') and event_typ_cd = 'CONVAMAL' + and c2.corp_num = ci.corp_num -- and c.corp_num in ('1396310', '1396309', '1396308', '1396307', '1396306', '1396890', '1396889', '1396885', '1396883', '1396878','1396597', '1396143', '1395925', '1395116', '1394990', '1246445', '1216743', '1396508', '1396505', '1396488', '1396401', '1396387', '1396957', '1355943', '1340611', '1335427', '1327193', '1393945', '1208648', '1117024', '1120292', '1127373', '1135492') -- and rownum <= 5 UNION ALL @@ -581,7 +594,11 @@ from (select e.event_id, -- SELECT BY EVENT case when c.CORP_TYP_CD in ('BC', 'ULC', 'CC') then 'BC' || c.CORP_NUM else c.CORP_NUM - end CORP_NUM, + end TED_CORP_NUM, + case + when c2.corp_typ_cd in ('BC', 'ULC', 'CC') then 'BC' || c2.corp_num + else c2.corp_num + end TING_CORP_NUM, ci.CORP_INVOLVE_ID, ci.CAN_JUR_TYP_CD, case ci.ADOPTED_CORP_IND @@ -595,11 +612,13 @@ from (select e.event_id, -- SELECT BY EVENT from event e , CORP_INVOLVED ci , corporation c + , corporation c2 , filing f where e.event_id = ci.event_id and c.corp_num = e.corp_num + and c2.corp_num = ci.corp_num and e.event_id = f.event_id - and corp_typ_cd in ('BC', 'C', 'ULC', 'CUL', 'CC', 'CCC', 'QA', 'QB', 'QC', 'QD', 'QE') + and c.corp_typ_cd in ('BC', 'C', 'ULC', 'CUL', 'CC', 'CCC', 'QA', 'QB', 'QC', 'QD', 'QE') and filing_typ_cd in ('AMALH', 'AMALV', 'AMALR', 'AMLHU', 'AMLVU', 'AMLRU', 'AMLHC', 'AMLVC', 'AMLRC') -- and c.corp_num in ('1396310', '1396309', '1396308', '1396307', '1396306', '1396890', '1396889', '1396885', '1396883', '1396878','1396597', '1396143', '1395925', '1395116', '1394990', '1246445', '1216743', '1396508', '1396505', '1396488', '1396401', '1396387', '1396957', '1355943', '1340611', '1335427', '1327193', '1393945', '1208648', '1117024', '1120292', '1127373', '1135492') -- and rownum <= 5 @@ -873,6 +892,22 @@ where cp.CORP_PARTY_ID = pn.party_id order by c.corp_num; + +-- payment +transfer public.payment from cprd using +select p.event_id, + p.payment_typ_cd, + p.cc_holder_nme +from payment p + , event e + , corporation c +where p.event_id = e.event_id +and e.corp_num = c.corp_num +and c.corp_typ_cd in ('BC', 'C', 'ULC', 'CUL', 'CC', 'CCC', 'QA', 'QB', 'QC', 'QD', 'QE') +order by e.event_id; + + + -- alter tables alter table corporation alter column send_ar_ind type boolean using send_ar_ind::boolean; alter table filing diff --git a/docs/business.yaml b/docs/business.yaml index 423df3f80c..6fe8d06c2e 100644 --- a/docs/business.yaml +++ b/docs/business.yaml @@ -22,6 +22,7 @@ info: - Consent Continuation Out - Conversion - Dissolution + - Notice of Withdrawal - Registration (Sole Proprietorship, General Partnership) - Special Resolution @@ -696,6 +697,37 @@ paths: paymentToken: '12345' status: 'PENDING' submitter: 'mocked submitter' + notice-of-withdrawal-success-response: + summary: Notice of Withdrawal Response + value: + filing: + business: + foundingDate: '2023-07-12T17:31:58.000+00:00' + identifier: BC1234567 + legalName: 1234567 B.C. LTD. + legalType: BC + header: + affectedFilings: [] + availableOnPaperOnly: false + certifiedBy: Sample Certified Person + colinIds: [] + comments: [] + data: '2024-12-18T00:10:47.042797+00:00' + email: api.specs@example.com + filingId: 654321 + inColinOnly: false + isCorrected: false + isCorrectionPending: false + isPaymentActionRequired: false + name: noticeOfWithdrawal + paymentStatusCode: 'APPROVED' + paymentToken: '12345' + status: 'PENDING' + submitter: 'mocked submitter' + noticeOfWithdrawal: + filingId: 123456 + hasTakenEffect: false + partOfPoa: false voluntary-dissolution-success-response: summary: Voluntary Dissolution Response value: @@ -870,6 +902,12 @@ paths: value: errorMessage: API backend third party service error. rootCause: errors:[error:Can't have new consent for same jurisdiction if an unexpired one already exists,path:/filing/consentContinuationOut/foreignJurisdiction],filing:business:foundingDate:2024-07-08T15:34:57.844764+00:00,identifier:BC0882848,legalName:0882848 B.C. LTD.,legalType:BEN,consentContinuationOut:courtOrder:effectOfOrder:planOfArrangement,fileNumber:12345,foreignJurisdiction:country:CA,region:AB,header:availableOnPaperOnly:false,certifiedBy:Api specs,date:2024-07-10,documentOptionalEmail:Apispecs@email.com,email:Apispecs@gov.bc.ca,inColinOnly:false,name:consentContinuationOut + notice-of-withdrawal-failed-withdrawn-filing-issues-response: + summary: Notice of Withdrawal - invalid withdrawn filing + value: + errors: + - error: Only filings with a future effective date can be withdrawn. + - error: Only paid filings with a future effective date can be withdrawn. voluntary-dissolution-failed-missing-filing-name-response: summary: Voluntary Dissolution - Missing Filing Name Response value: @@ -884,7 +922,21 @@ paths: summary: Consent Continuation Out - Unauthorized Response value: errorMessage: API backend third party service error. - rootCause: message:You are not authorized to submit a filing for BC1218840. + rootCause: message:You are not authorized to submit a filing for BC1218840. + notice-of-withdrawal-failed-not-staff-response: + summary: Notice of Withdrawal - Not a staff + value: + message: You are not authorized to submit a filing for BC1234567. + '404': + description: Cannot found, when a value cannot be found in the records + content: + application/json: + examples: + notice-of-withdrawal-failed-invalid-filing-id-response: + summary: Notice of Withdrawal - withdrawn filing cannot be found + value: + errors: + - error: The filing to be withdrawn cannot be found. '422': description: UNPROCESSABLE ENTITY, in many cases caused by missing one or more required field(s) content: @@ -1436,6 +1488,45 @@ paths: courtOrder: fileNumber: '12345' effectOfOrder: planOfArrangement + notice-of-withdrawal-request: + summary: Notice of Withdrawal Request + value: + filing: + header: + name: noticeOfWithdrawal + certifiedBy: Sample Certified Person + email: api.specs@example.com + date: '2024-12-18' + priority: false + business: + foundingDate: '2023-07-12T17:31:58.000+00:00' + identifier: BC1234567 + legalName: 1234567 B.C. LTD. + legalType: BC + noticeOfWithdrawal: + filingId: 123456 + notice-of-withdrawal-with-options-request: + summary: Notice of Withdrawal Request with Options + value: + filing: + header: + name: noticeOfWithdrawal + certifiedBy: Sample Certified Person + email: api.specs@example.com + date: '2024-12-18' + priority: false + business: + foundingDate: '2023-07-12T17:31:58.000+00:00' + identifier: BC1234567 + legalName: 1234567 B.C. LTD. + legalType: BC + noticeOfWithdrawal: + filingId: 123456 + courtOrder: + fileNumber: "A12345" + effectOfOrder: planOfArrangement + hasTakenEffect: false + partOfPoa: false voluntary-dissolution-request: summary: Voluntary Dissolution Request value: @@ -5325,6 +5416,7 @@ components: - dissolution - dissolved - incorporationApplication + - noticeOfWithdrawal - putBackOn - registration - restoration @@ -5677,6 +5769,7 @@ components: - $ref: '#/components/schemas/Correction' - $ref: '#/components/schemas/Dissolution' - $ref: '#/components/schemas/Incorporation_application' + - $ref: '#/components/schemas/Notice_of_withdrawal' - $ref: '#/components/schemas/Registrars_notation' - $ref: '#/components/schemas/Registrars_order' - $ref: '#/components/schemas/Registration' @@ -6248,6 +6341,34 @@ components: - DBA required: - name + Notice_of_withdrawal: + type: object + title: Notice of Withdrawal Filing + description: Filing to withdraw future effective filings. + required: + - noticeOfWithdrawal + properties: + noticeOfWithdrawal: + type: object + description: This section contains all the information to withdraw a future effective filing. + required: + - filingId + properties: + filingId: + type: integer + title: ID for the future effective filing + courtOrder: + $ref: '#/components/schemas/Court_order' + hasTakenEffect: + type: boolean + title: One of the terms of arrangement for the FED filing have taken effect + partOfPoa: + type: boolean + title: FED filing is part of a Plan of Arrangement + x-examples: + Example 1: + noticeOfWithdrawal: + filingId: 123456 Office: title: Office Schema type: object diff --git a/jobs/correction-ben-statement/add_corrections.ipynb b/jobs/correction-ben-statement/add_corrections.ipynb new file mode 100644 index 0000000000..63136e0e70 --- /dev/null +++ b/jobs/correction-ben-statement/add_corrections.ipynb @@ -0,0 +1,159 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Add Correction filing for All active existing companies" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + " Purpose: Add Corrections filing for all active existing BENs.\n", + "\n", + "This is a one time (python) script to be run at a given date/time.
\n", + "Set the configuration (client_id, client_secret, url(s)) for a scpecific environment.
\n", + "Get access token for authorization.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Access token returned successfully\n" + ] + } + ], + "source": [ + "import requests\n", + "import os\n", + "from datetime import datetime\n", + "\n", + "# token_url, client_id, client_secret, base_url - update based on environment\n", + "token_url = os.getenv('ACCOUNT_SVC_AUTH_URL')\n", + "client_id = os.getenv('ACCOUNT_SVC_CLIENT_ID')\n", + "client_secret = os.getenv('ACCOUNT_SVC_CLIENT_SECRET')\n", + "base_url = os.getenv('LEGAL_API_BASE_URL')\n", + "\n", + "header = {\n", + " \"Content-Type\": \"application/x-www-form-urlencoded\"\n", + "}\n", + "\n", + "data = 'grant_type=client_credentials'\n", + "\n", + "res = requests.post(token_url, data, auth=(client_id, client_secret), headers=header)\n", + "\n", + "# Check the status code of the response\n", + "if res.status_code == 200:\n", + " print(\"Access token returned successfully\")\n", + " token = res.json()[\"access_token\"]\n", + "else:\n", + " print(f\"Failed to make POST request. Status code: {res.status_code}\")\n", + " print(res.text) # Print the error message if the request fails\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Call API (POST) endpoint to createCorrection filing with details as Ben correction statement for businesses." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Correction created successfully for BC0887594 and correction filing_id is 170235\n" + ] + } + ], + "source": [ + "from urllib.parse import urljoin\n", + "from corrections_output import correction_businesses\n", + "\n", + "current_date = datetime.now().date().isoformat()\n", + "formatted_current_date = datetime.now().date().strftime('%B %d, %Y')\n", + "correction_statement = (\"BC benefit company statement contained in notice of articles as required under section \" \n", + "\"51.992 of the Business Corporations Act corrected from “This company is a benefit company and, as such, has purposes \"\n", + "\"that include conducting its business in a responsible and sustainable manner and promoting one or more public \"\n", + "\"benefits” to “This company is a benefit company and, as such, is committed to conducting its business in a \"\n", + "\"responsible and sustainable manner and promoting one or more public benefits”\")\n", + "\n", + "headers = {\n", + " 'Content-Type': 'application/json',\n", + " 'Authorization': 'Bearer ' + token\n", + "}\n", + "\n", + "# loop through list of businesses to create filing\n", + "for correction_businesse in correction_businesses:\n", + " identifier = correction_businesse[0]\n", + " filind_id = correction_businesse[1]\n", + " correction_filing_data = {\n", + " \"filing\": {\n", + " \"header\": {\n", + " \"name\": \"correction\",\n", + " \"date\": current_date,\n", + " \"certifiedBy\": \"system\"\n", + " },\n", + " \"business\": {\n", + " \"identifier\": identifier,\n", + " \"legalType\": \"BC\"\n", + " },\n", + " \"correction\": {\n", + " \"details\": \"BEN Correction statement\",\n", + " \"correctedFilingId\": filind_id,\n", + " \"correctedFilingType\": \"incorporationApplication\",\n", + " \"comment\": f\"\"\"Correction for Incorporation Application filed on {formatted_current_date} \\n{correction_statement}\"\"\"\n", + " }\n", + " }\n", + " }\n", + "\n", + " filing_url = urljoin(base_url, f\"/api/v2/businesses/{identifier}/filings\")\n", + " rv = requests.post(filing_url, headers=headers, json=correction_filing_data)\n", + "\n", + " # Check the status code of the response\n", + " if rv.status_code == 201:\n", + " correction_filing_id = rv.json()[\"filing\"][\"header\"][\"filingId\"]\n", + " print(f\"Correction created successfully for {identifier} and correction filing_id is {correction_filing_id}\")\n", + " else:\n", + " print(f\"Failed to make POST request. Status code: {rv.status_code}: {rv.text}\")\n", + " print(rv.text) # Print the error message if the request fails\n", + " \n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.17" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/jobs/correction-ben-statement/add_registrars_notation.ipynb b/jobs/correction-ben-statement/add_registrars_notation.ipynb index 17d0234a19..0197abf547 100644 --- a/jobs/correction-ben-statement/add_registrars_notation.ipynb +++ b/jobs/correction-ben-statement/add_registrars_notation.ipynb @@ -66,7 +66,7 @@ "outputs": [], "source": [ "from urllib.parse import urljoin\n", - "from data import ben_businesses\n", + "from rn_output import rn_businesses\n", "\n", "current_date = datetime.now().date().isoformat()\n", "headers = {\n", @@ -75,7 +75,7 @@ "}\n", "\n", "# loop through list of businesses to create filing\n", - "for ben in ben_businesses:\n", + "for business in rn_businesses:\n", " filing_data = {\n", " \"filing\": {\n", " \"header\": {\n", @@ -84,7 +84,7 @@ " \"certifiedBy\": \"system\"\n", " },\n", " \"business\": {\n", - " \"identifier\": ben,\n", + " \"identifier\": business,\n", " \"legalType\": \"BEN\"\n", " },\n", " \"registrarsNotation\": {\n", @@ -98,16 +98,15 @@ " }\n", " }\n", "\n", - " filing_url = urljoin(base_url, f\"/api/v2/businesses/{ben}/filings\")\n", + " filing_url = urljoin(base_url, f\"/api/v2/businesses/{business}/filings\")\n", " response = requests.post(filing_url, headers=headers, json=filing_data)\n", "\n", " # Check the status code of the response\n", " if response.status_code == 201:\n", - " print(f\"Registrars Notation cretaed successfully for {ben}\")\n", + " print(f\"Registrars Notation created successfully for {business}\")\n", " else:\n", " print(f\"Failed to make POST request. Status code: {response.status_code}\")\n", - " print(response.text) # Print the error message if the request fails\n", - " \n" + " print(response.text) # Print the error message if the request fails\n" ] } ], diff --git a/jobs/correction-ben-statement/convert_corrections_data.py b/jobs/correction-ben-statement/convert_corrections_data.py new file mode 100644 index 0000000000..c364c3afa0 --- /dev/null +++ b/jobs/correction-ben-statement/convert_corrections_data.py @@ -0,0 +1,31 @@ +import pandas as pd + +# Function to convert CSV to array of arrays using pandas +def convert_csv_to_array_of_arrays(csv_filename): + # Read the CSV file into a pandas DataFrame + df = pd.read_csv(csv_filename) + + # Convert the DataFrame to a list of lists (array of arrays) + rows_array = df.values.tolist() + + return rows_array + +# Write the array of arrays to a Python file +def write_array_to_python_file(array, output_filename): + with open(output_filename, 'w') as f: + f.write('correction_businesses = [\n') # Start the Python array + for row in rows_array: + f.write(f' {row},\n') # Write each row as a list + f.write(']\n') # End the Python array + +# Specify your input and output filenames +csv_filename = 'corrections_results.csv' +output_filename = 'corrections_output.py' + +# Convert CSV to array of arrays +rows_array = convert_csv_to_array_of_arrays(csv_filename) + +# Write the result to a Python file +write_array_to_python_file(rows_array, output_filename) + +print(f"Data has been written to {output_filename}") diff --git a/jobs/correction-ben-statement/convert_registrar_notation_data.py b/jobs/correction-ben-statement/convert_registrar_notation_data.py new file mode 100644 index 0000000000..8e821038a9 --- /dev/null +++ b/jobs/correction-ben-statement/convert_registrar_notation_data.py @@ -0,0 +1,32 @@ +import csv + +# Function to read CSV and convert to a Python array +def csv_to_python_array(file_path): + array = [] + + # Open the CSV file and read its contents + with open(file_path, mode='r', newline='') as file: + reader = csv.reader(file) + for row in reader: + array.extend(row) # Add each element from the row to the array + + return array + +# Function to write the Python array to a file, with each element on a new row +def write_to_python_file(array, output_file): + with open(output_file, 'w') as file: + file.write('rn_businesses = [\n') # Start the array in Python format + for element in array: + file.write(f" '{element}',\n") # Write each element in the array + file.write(']\n') # End the array in Python format + +input_csv = 'registrar_notation_result.csv' +output_python_file = 'rn_output.py' + +# Convert CSV to Python array +python_array = csv_to_python_array(input_csv) + +# Write the array to a Python file +write_to_python_file(python_array, output_python_file) + +print(f"Python array has been written to {output_python_file}") diff --git a/jobs/correction-ben-statement/corrections_output.py b/jobs/correction-ben-statement/corrections_output.py new file mode 100644 index 0000000000..18a26bcc4a --- /dev/null +++ b/jobs/correction-ben-statement/corrections_output.py @@ -0,0 +1,5 @@ +correction_businesses = [ + ['BC0871147', 131528], + ['BC0871183', 133390], + ['BC0871186', 139687], +] diff --git a/jobs/correction-ben-statement/corrections_results.csv b/jobs/correction-ben-statement/corrections_results.csv new file mode 100644 index 0000000000..68832b4eb4 --- /dev/null +++ b/jobs/correction-ben-statement/corrections_results.csv @@ -0,0 +1,4 @@ +"identifier","id" +"BC1218818",110441 +"BC1218819",110445 +"BC1218820",110446 diff --git a/jobs/correction-ben-statement/data.py b/jobs/correction-ben-statement/data.py deleted file mode 100644 index a81110f91e..0000000000 --- a/jobs/correction-ben-statement/data.py +++ /dev/null @@ -1,7 +0,0 @@ -# Populate this list with the existing BEN business identifiers from specified environment -# This works as a data file for Jupyter notebook used to add Registrar's Notation -ben_businesses = [ - "BC0871277", - "BC0871062" -] - diff --git a/jobs/correction-ben-statement/queries.sql b/jobs/correction-ben-statement/queries.sql new file mode 100644 index 0000000000..0cf5992cb0 --- /dev/null +++ b/jobs/correction-ben-statement/queries.sql @@ -0,0 +1,17 @@ +-- query to get all businesses (BENs) for Registrar's Notation +select b.identifier +from businesses b +where b.legal_type = 'BEN' +order by b.identifier asc; + +-- query to get all ACTIVE businesses (BENs) for Corrections +select b.identifier, f.id +from businesses b join filings f on b.id = f.business_id +where b.legal_type = 'BEN' and f.filing_type = 'incorporationApplication' and b.state = 'ACTIVE' +order by b.identifier asc; + +-- query to get all ACTIVE businesses (BENs) which have "in progress" drafts +select b.identifier, f.id, f.filing_type +from businesses b join filings f on b.id = f.business_id +where b.legal_type = 'BEN' and b.state = 'ACTIVE' and f.status = 'DRAFT' +order by b.identifier asc; diff --git a/jobs/correction-ben-statement/registrar_notation_result.csv b/jobs/correction-ben-statement/registrar_notation_result.csv new file mode 100644 index 0000000000..2f58ca3532 --- /dev/null +++ b/jobs/correction-ben-statement/registrar_notation_result.csv @@ -0,0 +1,4 @@ +"BC1230101" +"BC1230102" +"BC1230104" + diff --git a/jobs/correction-ben-statement/rn_output.py b/jobs/correction-ben-statement/rn_output.py new file mode 100644 index 0000000000..7fe05540b4 --- /dev/null +++ b/jobs/correction-ben-statement/rn_output.py @@ -0,0 +1,5 @@ +rn_businesses = [ + 'BC1230101', + 'BC1230102', + 'BC1230104', +] diff --git a/jobs/email-reminder/config.py b/jobs/email-reminder/config.py index e835295b49..032d9cfefc 100644 --- a/jobs/email-reminder/config.py +++ b/jobs/email-reminder/config.py @@ -44,6 +44,8 @@ def get_named_config(config_name: str = 'production'): class _Config(object): # pylint: disable=too-few-public-methods """Base class configuration.""" + # used to identify versioning flag + SERVICE_NAME = 'emailer-reminder-job' PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) SEND_OUTSTANDING_BCOMPS = os.getenv('SEND_OUTSTANDING_BCOMPS', None) diff --git a/jobs/email-reminder/email_reminder.py b/jobs/email-reminder/email_reminder.py index b87d5faab1..b998bdadf4 100644 --- a/jobs/email-reminder/email_reminder.py +++ b/jobs/email-reminder/email_reminder.py @@ -21,6 +21,7 @@ import sentry_sdk # noqa: I001, E501; pylint: disable=ungrouped-imports; conflicts with Flake8 from flask import Flask from legal_api.models import Business, Filing, db # noqa: I001 +from legal_api.models.db import init_db from legal_api.services.bootstrap import AccountService from legal_api.services.flags import Flags from legal_api.services.queue import QueueService @@ -30,6 +31,8 @@ import config # pylint: disable=import-error from utils.logging import setup_logging # pylint: disable=import-error + + # noqa: I003 setup_logging( @@ -46,7 +49,7 @@ def create_app(run_mode=os.getenv('FLASK_ENV', 'production')): """Return a configured Flask App using the Factory method.""" app = Flask(__name__) app.config.from_object(config.CONFIGURATION[run_mode]) - db.init_app(app) + init_db(app) # Configure Sentry if app.config.get('SENTRY_DSN', None): @@ -143,7 +146,7 @@ async def find_and_send_ar_reminder(app: Flask, qsm: QueueService): # pylint: d Business.LegalTypes.ULC_CONTINUE_IN.value, Business.LegalTypes.CCC_CONTINUE_IN.value,] # entity types to send ar reminder - if flags.is_on('enable-bc-ccc-ulc'): + if flags.is_on('enable-bc-ccc-ulc-email-reminder'): legal_types.extend( [Business.LegalTypes.COMP.value, Business.LegalTypes.BC_CCC.value, diff --git a/jobs/email-reminder/flags.json b/jobs/email-reminder/flags.json new file mode 100644 index 0000000000..147cc34398 --- /dev/null +++ b/jobs/email-reminder/flags.json @@ -0,0 +1,15 @@ +{ + "flagValues": { + "enable-bc-ccc-ulc": false, + "db-versioning": { + "legal-api": true, + "emailer": true, + "filer": false, + "entity-bn": true, + "digital-credentials": false, + "dissolutions-job": false, + "furnishings-job": true, + "emailer-reminder-job": true + } + } +} \ No newline at end of file diff --git a/jobs/email-reminder/requirements.txt b/jobs/email-reminder/requirements.txt index 3dce2c6323..8585b8ce43 100644 --- a/jobs/email-reminder/requirements.txt +++ b/jobs/email-reminder/requirements.txt @@ -7,7 +7,7 @@ Werkzeug==1.0.1 aniso8601==9.0.1 asyncio-nats-client==0.11.4 asyncio-nats-streaming==0.4.0 -attrs==20.3.0 +attrs==23.1.0 blinker==1.4 certifi==2020.12.5 click==7.1.2 @@ -29,4 +29,4 @@ six==1.15.0 urllib3==1.26.11 git+https://github.com/bcgov/lear.git#egg=legal_api&subdirectory=legal-api git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning -git+https://github.com/bcgov/business-schemas.git@2.15.38#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.31#egg=registry_schemas diff --git a/jobs/expired-limited-restoration/Dockerfile b/jobs/expired-limited-restoration/Dockerfile new file mode 100644 index 0000000000..6dcfb50c59 --- /dev/null +++ b/jobs/expired-limited-restoration/Dockerfile @@ -0,0 +1,25 @@ +# platform=linux/amd64 +FROM python:3.8.5-buster +USER root + +# Create working directory +RUN mkdir /opt/app-root && chmod 755 /opt/app-root +WORKDIR /opt/app-root + +# Install the requirements +COPY ./requirements.txt . + +#RUN pip install --upgrade pip +RUN pip install pip==20.1.1 +RUN pip install --no-cache-dir -r requirements.txt + +COPY . . + +USER 1001 + +# Set Python path +ENV PYTHONPATH=/opt/app-root/src + +EXPOSE 8080 + +CMD [ "python", "/opt/app-root/file_expired_limited_restoration.py" ] diff --git a/jobs/expired-limited-restoration/Makefile b/jobs/expired-limited-restoration/Makefile new file mode 100644 index 0000000000..a3b38ce72a --- /dev/null +++ b/jobs/expired-limited-restoration/Makefile @@ -0,0 +1,148 @@ +.PHONY: license +.PHONY: setup +.PHONY: ci cd +.PHONY: run + +MKFILE_PATH:=$(abspath $(lastword $(MAKEFILE_LIST))) +CURRENT_ABS_DIR:=$(patsubst %/,%,$(dir $(MKFILE_PATH))) + +PROJECT_NAME:=expired-limited-restoration +DOCKER_NAME:=expired-limited-restoration + +################################################################################# +# COMMANDS -- Setup # +################################################################################# +setup: install install-dev ## Setup the project + +clean: clean-build clean-pyc clean-test ## Clean the project + rm -rf venv/ + +clean-build: ## Clean build files + rm -fr build/ + rm -fr dist/ + rm -fr .eggs/ + find . -name '*.egg-info' -exec rm -fr {} + + find . -name '*.egg' -exec rm -fr {} + + +clean-pyc: ## Clean cache files + find . -name '*.pyc' -exec rm -f {} + + find . -name '*.pyo' -exec rm -f {} + + find . -name '*~' -exec rm -f {} + + find . -name '__pycache__' -exec rm -fr {} + + +clean-test: ## clean test files + find . -name '.pytest_cache' -exec rm -fr {} + + rm -fr .tox/ + rm -f .coverage + rm -fr htmlcov/ + +build-req: clean ## Upgrade requirements + test -f venv/bin/activate || python3.8 -m venv $(CURRENT_ABS_DIR)/venv ;\ + . venv/bin/activate ;\ + pip install pip==20.1.1 ;\ + pip install -Ur requirements/prod.txt ;\ + pip freeze | sort > requirements.txt ;\ + cat requirements/bcregistry-libraries.txt >> requirements.txt ;\ + pip install -Ur requirements/bcregistry-libraries.txt + +install: clean ## Install python virtual environment + test -f venv/bin/activate || python3.8 -m venv $(CURRENT_ABS_DIR)/venv ;\ + . venv/bin/activate ;\ + pip install pip==20.1.1 ;\ + pip install -Ur requirements.txt + +install-dev: ## Install local application + . venv/bin/activate ; \ + pip install -Ur requirements/dev.txt; \ + pip install -e . + +################################################################################# +# COMMANDS - CI # +################################################################################# +ci: lint flake8 test ## CI flow + +pylint: ## Linting with pylint + . venv/bin/activate && pylint --rcfile=setup.cfg file_expired_limited_restoration.py + +flake8: ## Linting with flake8 + . venv/bin/activate && flake8 file_expired_limited_restoration.py + +lint: pylint flake8 ## run all lint type scripts + +test: ## Unit testing + . venv/bin/activate && pytest + +mac-cov: test ## Run the coverage report and display in a browser window (mac) + @open -a "Google Chrome" htmlcov/index.html + +################################################################################# +# COMMANDS - CD +# expects the terminal to be openshift login +# expects export OPENSHIFT_DOCKER_REGISTRY="" +# expects export OPENSHIFT_SA_NAME="$(oc whoami)" +# expects export OPENSHIFT_SA_TOKEN="$(oc whoami -t)" +# expects export OPENSHIFT_REPOSITORY="" +# expects export TAG_NAME="dev/test/prod" +# expects export OPS_REPOSITORY="" # +################################################################################# +cd: ## CD flow +ifeq ($(TAG_NAME), test) +# cd: update-env +cd: + oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):dev $(DOCKER_NAME):$(TAG_NAME) +else ifeq ($(TAG_NAME), prod) +# cd: update-env +cd: + oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):$(TAG_NAME) $(DOCKER_NAME):$(TAG_NAME)-$(shell date +%F) + oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):test $(DOCKER_NAME):$(TAG_NAME) +else +TAG_NAME=dev +# cd: build update-env tag +cd: build tag +endif + +build: ## Build the docker container + docker build . -t $(DOCKER_NAME) \ + --build-arg VCS_REF=$(shell git rev-parse --short HEAD) \ + --build-arg BUILD_DATE=$(shell date -u +"%Y-%m-%dT%H:%M:%SZ") \ + +build-nc: ## Build the docker container without caching + docker build --no-cache -t $(DOCKER_NAME) . + +REGISTRY_IMAGE=$(OPENSHIFT_DOCKER_REGISTRY)/$(OPENSHIFT_REPOSITORY)-tools/$(DOCKER_NAME) +push: #build ## Push the docker container to the registry & tag latest + @echo "$(OPENSHIFT_SA_TOKEN)" | docker login $(OPENSHIFT_DOCKER_REGISTRY) -u $(OPENSHIFT_SA_NAME) --password-stdin ;\ + docker tag $(DOCKER_NAME) $(REGISTRY_IMAGE):latest ;\ + docker push $(REGISTRY_IMAGE):latest + +# 1Password CLI1 will be deprecated on Oct 1, 2024 +# VAULTS=`cat devops/vaults.json` +# update-env: ## Update env from 1pass +# oc -n "$(OPS_REPOSITORY)-$(TAG_NAME)" exec "dc/vault-service-$(TAG_NAME)" -- ./scripts/1pass.sh \ +# -m "secret" \ +# -e "$(TAG_NAME)" \ +# -a "$(DOCKER_NAME)-$(TAG_NAME)" \ +# -n "$(OPENSHIFT_REPOSITORY)-$(TAG_NAME)" \ +# -v "$(VAULTS)" \ +# -r "false" \ +# -f "false" + +tag: push ## tag image + oc -n "$(OPENSHIFT_REPOSITORY)-tools" tag $(DOCKER_NAME):latest $(DOCKER_NAME):$(TAG_NAME) + +################################################################################# +# COMMANDS - Local # +################################################################################# + +run: ## Run the project in local + . venv/bin/activate && python file_expired_limited_restoration.py + +################################################################################# +# Self Documenting Commands # +################################################################################# +.PHONY: help + +.DEFAULT_GOAL := help + +help: + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' diff --git a/jobs/expired-limited-restoration/__init__.py b/jobs/expired-limited-restoration/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/jobs/expired-limited-restoration/config.py b/jobs/expired-limited-restoration/config.py new file mode 100644 index 0000000000..57e04c49ad --- /dev/null +++ b/jobs/expired-limited-restoration/config.py @@ -0,0 +1,103 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""All of the configuration for the service is captured here. + +All items are loaded, or have Constants defined here that are loaded into the Flask configuration. +All modules and lookups get their configuration from the Flask config, rather than reading environment variables +directly or by accessing this configuration directly. +""" +import os +import random +import sys + +from dotenv import find_dotenv, load_dotenv + + +# this will load all the envars from a .env file located in the project root (api) +load_dotenv(find_dotenv()) + +CONFIGURATION = { + 'development': 'config.DevConfig', + 'testing': 'config.TestConfig', + 'production': 'config.ProdConfig', + 'default': 'config.ProdConfig' +} + + +def get_named_config(config_name: str = 'production'): + """Return the configuration object based on the name. + + :raise: KeyError: if an unknown configuration is requested + """ + if config_name in ['production', 'staging', 'default']: + config = ProdConfig() + elif config_name == 'testing': + config = TestConfig() + elif config_name == 'development': + config = DevConfig() + else: + raise KeyError(f"Unknown configuration '{config_name}'") + return config + + +class _Config(object): # pylint: disable=too-few-public-methods + """Base class configuration that should set reasonable defaults for all the other configurations.""" + + PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) + + LEGAL_API_URL = os.getenv('LEGAL_API_URL', '') + + SENTRY_DSN = os.getenv('SENTRY_DSN') or '' + SENTRY_DSN = '' if SENTRY_DSN.lower() == 'null' else SENTRY_DSN + + ACCOUNT_SVC_AUTH_URL = os.getenv('ACCOUNT_SVC_AUTH_URL', None) + ACCOUNT_SVC_CLIENT_ID = os.getenv('ACCOUNT_SVC_CLIENT_ID', None) + ACCOUNT_SVC_CLIENT_SECRET = os.getenv('ACCOUNT_SVC_CLIENT_SECRET', None) + ACCOUNT_SVC_TIMEOUT = os.getenv('ACCOUNT_SVC_TIMEOUT', 20) + + SECRET_KEY = 'a secret' + + TESTING = False + DEBUG = False + + +class DevConfig(_Config): # pylint: disable=too-few-public-methods + """Config for local development.""" + + TESTING = False + DEBUG = True + + +class TestConfig(_Config): # pylint: disable=too-few-public-methods + """In support of testing only used by the py.test suite.""" + + DEBUG = True + TESTING = True + + LEGAL_API_URL = os.getenv('LEGAL_API_URL_TEST', '') + SENTRY_DSN = os.getenv('SENTRY_DSN_TEST', '') + + +class ProdConfig(_Config): # pylint: disable=too-few-public-methods + """Production environment configuration.""" + + SECRET_KEY = os.getenv('SECRET_KEY', None) + + if not SECRET_KEY: + SECRET_KEY = os.urandom(24) + print('WARNING: SECRET_KEY being set as a one-shot', file=sys.stderr) + + TESTING = False + DEBUG = False diff --git a/jobs/expired-limited-restoration/devops/vaults.json b/jobs/expired-limited-restoration/devops/vaults.json new file mode 100644 index 0000000000..ff693d1c2b --- /dev/null +++ b/jobs/expired-limited-restoration/devops/vaults.json @@ -0,0 +1,10 @@ +[ + { + "vault": "entity", + "application": [ + "filings-jobs", + "entity-service-account", + "sentry" + ] + } +] diff --git a/jobs/expired-limited-restoration/file_expired_limited_restoration.py b/jobs/expired-limited-restoration/file_expired_limited_restoration.py new file mode 100644 index 0000000000..c4324575c4 --- /dev/null +++ b/jobs/expired-limited-restoration/file_expired_limited_restoration.py @@ -0,0 +1,175 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Expired Limited Restoration service. + +This module is being used to process businesses with expired limited restorations. +""" +import asyncio +import logging +import os +from datetime import datetime + +import requests +import sentry_sdk # noqa: I001; pylint: disable=ungrouped-imports; conflicts with Flake8 +from dotenv import find_dotenv, load_dotenv +from flask import Flask +from sentry_sdk.integrations.logging import LoggingIntegration # noqa: I001 + +import config # pylint: disable=import-error +from utils.logging import setup_logging # pylint: disable=import-error + + +setup_logging(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'logging.conf')) # important to do this first + +# this will load all the envars from a .env file located in the project root +load_dotenv(find_dotenv()) + +SENTRY_LOGGING = LoggingIntegration( + event_level=logging.ERROR # send errors as events +) + + +def create_app(run_mode=os.getenv('FLASK_ENV', 'production')): + """Return a configured Flask App using the Factory method.""" + app = Flask(__name__) + app.config.from_object(config.CONFIGURATION[run_mode]) + # Configure Sentry + if app.config.get('SENTRY_DSN', None): + sentry_sdk.init( + dsn=app.config.get('SENTRY_DSN'), + integrations=[SENTRY_LOGGING] + ) + + return app + + +def get_bearer_token(app: Flask, timeout): + """Get a valid Bearer token for the service to use.""" + token_url = app.config.get('ACCOUNT_SVC_AUTH_URL') + client_id = app.config.get('ACCOUNT_SVC_CLIENT_ID') + client_secret = app.config.get('ACCOUNT_SVC_CLIENT_SECRET') + + data = 'grant_type=client_credentials' + + # get service account token + res = requests.post(url=token_url, + data=data, + headers={'content-type': 'application/x-www-form-urlencoded'}, + auth=(client_id, client_secret), + timeout=timeout) + + try: + return res.json().get('access_token') + except Exception: # pylint: disable=broad-exception-caught; # noqa: B902 + return None + + +def get_businesses_to_process(app: Flask): + """Get list of business identifiers that need processing.""" + timeout = int(app.config.get('ACCOUNT_SVC_TIMEOUT')) + token = get_bearer_token(app, timeout) + + response = requests.get( + f'{app.config["LEGAL_API_URL"]}/internal/expired_restoration', + headers={ + 'Content-Type': 'application/json', + 'Authorization': f'Bearer {token}' + }, + timeout=timeout + ) + + if not response or response.status_code != 200: + app.logger.error(f'Failed to get businesses from legal-api. \ + {response} {response.json()} {response.status_code}') + raise Exception # pylint: disable=broad-exception-raised; + + return response.json().get('identifiers', []) + + +def create_put_back_off_filing(app: Flask, identifier: str): + """Create a putBackOff filing for the business.""" + timeout = int(app.config.get('ACCOUNT_SVC_TIMEOUT')) + token = get_bearer_token(app, timeout) + filing_data = { + 'filing': { + 'header': { + 'date': datetime.utcnow().date().isoformat(), + 'name': 'putBackOff', + 'certifiedBy': 'system' + }, + 'business': { + 'identifier': identifier + }, + 'putBackOff': { + 'details': 'Put back off filing due to expired limited restoration.' + } + } + } + + response = requests.post( + f'{app.config["LEGAL_API_URL"]}/businesses/{identifier}/filings', + json=filing_data, + headers={ + 'Content-Type': 'application/json', + 'Authorization': f'Bearer {token}', + 'hide-in-ledger': 'true' # Add this header to hide from ledger + }, + timeout=timeout + ) + + if not response or response.status_code != 201: + app.logger.error(f'Failed to create filing from legal-api. \ + {response} {response.json()} {response.status_code}') + raise Exception # pylint: disable=broad-exception-raised; + + return response.json() + + +async def run(loop, application: Flask): # pylint: disable=redefined-outer-name + """Run the methods for processing expired limited restorations.""" + with application.app_context(): + try: + # 1. get businesses that need to be processed + businesses = get_businesses_to_process(application) + + if not businesses: + application.logger.debug('No businesses to process') + return + + application.logger.debug(f'Processing {len(businesses)} businesses') + + # 2. create put back off filing for each business + for identifier in businesses: + try: + # create putBackOff filing via API + filing = create_put_back_off_filing(application, identifier) + filing_id = filing['filing']['header']['filingId'] + application.logger.debug( + f'Successfully created put back off filing {filing_id} for {identifier}' + ) + except Exception as err: # pylint: disable=broad-except; # noqa: B902 + application.logger.error(f'Error processing business {identifier}: {err}') + continue + except Exception as err: # pylint: disable=broad-except; # noqa: B902 + application.logger.error(f'Job failed: {err}') + + +if __name__ == '__main__': + application = create_app() + try: + event_loop = asyncio.get_event_loop() + event_loop.run_until_complete(run(event_loop, application)) + except Exception as err: # pylint: disable=broad-except; # noqa: B902; Catching all errors from the frameworks + application.logger.error(err) # pylint: disable=no-member + raise err diff --git a/jobs/expired-limited-restoration/k8s/Readme.md b/jobs/expired-limited-restoration/k8s/Readme.md new file mode 100644 index 0000000000..d3e4a6c1dc --- /dev/null +++ b/jobs/expired-limited-restoration/k8s/Readme.md @@ -0,0 +1,11 @@ + + +# buildconfig +oc process -f openshift/templates/bc.yaml -o yaml | oc apply -f - -n cc892f-tools +# cronjob +oc process -f openshift/templates/cronjob.yaml -o yaml | oc apply -f - -n cc892f-dev +oc process -f openshift/templates/cronjob.yaml -p TAG=test -o yaml | oc apply -f - -n cc892f-test +oc process -f openshift/templates/cronjob.yaml -p TAG=prod -o yaml | oc apply -f - -n cc892f-prod + +# manually run job +oc create job --from=cronjob/ -n cc892f-prod diff --git a/jobs/expired-limited-restoration/k8s/templates/bc.yaml b/jobs/expired-limited-restoration/k8s/templates/bc.yaml new file mode 100644 index 0000000000..97da9e9a13 --- /dev/null +++ b/jobs/expired-limited-restoration/k8s/templates/bc.yaml @@ -0,0 +1,121 @@ +apiVersion: template.openshift.io/v1 +kind: Template +metadata: + labels: + app: ${NAME} + name: ${NAME}-build +objects: +- apiVersion: v1 + kind: ImageStream + metadata: + name: ${NAME} + labels: + app: ${NAME} +- apiVersion: v1 + kind: BuildConfig + metadata: + name: ${NAME} + labels: + app: ${NAME} + spec: + output: + to: + kind: ImageStreamTag + name: ${NAME}:${OUTPUT_IMAGE_TAG} + resources: + limits: + cpu: ${CPU_LIMIT} + memory: ${MEMORY_LIMIT} + requests: + cpu: ${CPU_REQUEST} + memory: ${MEMORY_REQUEST} + runPolicy: Serial + source: + contextDir: ${SOURCE_CONTEXT_DIR} + git: + ref: ${GIT_REF} + uri: ${GIT_REPO_URL} + dockerfile: | + FROM docker-remote.artifacts.developer.gov.bc.ca/python:3.8.5-buster + USER root + + # Create working directory + RUN mkdir /opt/app-root && chmod 755 /opt/app-root + WORKDIR /opt/app-root + + # Install the requirements + COPY ./requirements.txt . + + #RUN pip install --upgrade pip + RUN pip install pip==20.1.1 + RUN pip install --no-cache-dir -r requirements.txt + + COPY . . + + USER 1001 + + # Set Python path + ENV PYTHONPATH=/opt/app-root/src + + EXPOSE 8080 + + CMD [ "python", "/opt/app-root/file_expired_limited_restoration.py"" ] + type: Git + strategy: + type: Docker + dockerStrategy: + pullSecret: + name: artifactory-creds + + triggers: + - type: ConfigChange +parameters: +- description: | + The name assigned to all of the objects defined in this template. + You should keep this as default unless your know what your doing. + displayName: Name + name: NAME + required: true + value: expired-limited-restoration +- description: | + The URL to your GIT repo, don't use the this default unless + your just experimenting. + displayName: Git Repo URL + name: GIT_REPO_URL + required: true + value: https://github.com/bcgov/lear.git +- description: The git reference or branch. + displayName: Git Reference + name: GIT_REF + required: true + value: main +- description: The source context directory. + displayName: Source Context Directory + name: SOURCE_CONTEXT_DIR + required: false + value: jobs/expired-limited-restoration +- description: The tag given to the built image. + displayName: Output Image Tag + name: OUTPUT_IMAGE_TAG + required: true + value: latest +- description: The resources CPU limit (in cores) for this build. + displayName: Resources CPU Limit + name: CPU_LIMIT + required: true + value: "2" +- description: The resources Memory limit (in Mi, Gi, etc) for this build. + displayName: Resources Memory Limit + name: MEMORY_LIMIT + required: true + value: 2Gi +- description: The resources CPU request (in cores) for this build. + displayName: Resources CPU Request + name: CPU_REQUEST + required: true + value: "1" +- description: The resources Memory request (in Mi, Gi, etc) for this build. + displayName: Resources Memory Request + name: MEMORY_REQUEST + required: true + value: 2Gi diff --git a/jobs/expired-limited-restoration/k8s/templates/cronjob.yaml b/jobs/expired-limited-restoration/k8s/templates/cronjob.yaml new file mode 100644 index 0000000000..6cce33b2bf --- /dev/null +++ b/jobs/expired-limited-restoration/k8s/templates/cronjob.yaml @@ -0,0 +1,138 @@ +apiVersion: template.openshift.io/v1 +kind: Template +metadata: + labels: + name: ${NAME} + name: ${NAME}-cronjob +objects: +- kind: "CronJob" + apiVersion: "batch/v1beta1" + metadata: + name: "${NAME}-${TAG}" + labels: + name: ${NAME} + environment: ${TAG} + role: "${ROLE}" + spec: + schedule: "${SCHEDULE}" + concurrencyPolicy: "Forbid" + successfulJobsHistoryLimit: "${{SUCCESS_JOBS_HISTORY_LIMIT}}" + failedJobsHistoryLimit: "${{FAILED_JOBS_HISTORY_LIMIT}}" + jobTemplate: + metadata: + labels: + name: ${NAME} + environment: ${TAG} + role: "${ROLE}" + spec: + backoffLimit: ${{JOB_BACKOFF_LIMIT}} + template: + metadata: + labels: + name: ${NAME} + environment: ${TAG} + role: "${ROLE}" + spec: + containers: + - name: "${NAME}-${TAG}" + image: "${IMAGE_REGISTRY}/${IMAGE_NAMESPACE}/${NAME}:${TAG}" + imagePullPolicy: Always + command: + - /bin/sh + - -c + - cd /opt/app-root; ./run.sh + env: + - name: COLIN_URL + valueFrom: + secretKeyRef: + name: ${NAME}-${TAG}-secret + key: COLIN_URL + - name: LEGAL_API_URL + valueFrom: + secretKeyRef: + name: ${NAME}-${TAG}-secret + key: LEGAL_API_URL + - name: ACCOUNT_SVC_AUTH_URL + valueFrom: + secretKeyRef: + name: ${NAME}-${TAG}-secret + key: ACCOUNT_SVC_AUTH_URL + - name: ACCOUNT_SVC_CLIENT_ID + valueFrom: + secretKeyRef: + name: ${NAME}-${TAG}-secret + key: ACCOUNT_SVC_CLIENT_ID + - name: ACCOUNT_SVC_CLIENT_SECRET + valueFrom: + secretKeyRef: + name: ${NAME}-${TAG}-secret + key: ACCOUNT_SVC_CLIENT_SECRET + - name: SENTRY_DSN + valueFrom: + secretKeyRef: + name: ${NAME}-${TAG}-secret + key: SENTRY_DSN + restartPolicy: "Never" + terminationGracePeriodSeconds: 30 + activeDeadlineSeconds: 1600 + dnsPolicy: "ClusterFirst" +parameters: + - name: NAME + displayName: Name + description: The name assigned to all of the OpenShift resources associated to the server instance. + required: true + value: expired-limited-restoration + + - name: TAG + displayName: Environment TAG name + description: The TAG name for this environment, e.g., dev, test, prod + value: dev + required: true + + - name: ROLE + displayName: Role + description: Role + required: true + value: job + + - name: NAMESPACE + displayName: Namespace Name + description: The base namespace name for the project. + required: true + value: cc892f + + - name: IMAGE_NAMESPACE + displayName: Image Namespace + required: true + description: The namespace of the OpenShift project containing the imagestream for the application. + value: cc892f-tools + + - name: IMAGE_REGISTRY + displayName: Image Registry + required: true + description: The image registry of the OpenShift project. + value: image-registry.openshift-image-registry.svc:5000 + + - name: "SCHEDULE" + displayName: "Cron Schedule" + description: "Cron Schedule to Execute the Job (using local cluster system TZ)" + value: "59 23 * * *" + required: true + + - name: "SUCCESS_JOBS_HISTORY_LIMIT" + displayName: "Successful Job History Limit" + description: "The number of successful jobs that will be retained" + value: "5" + required: true + + - name: "FAILED_JOBS_HISTORY_LIMIT" + displayName: "Failed Job History Limit" + description: "The number of failed jobs that will be retained" + value: "2" + required: true + + - name: "JOB_BACKOFF_LIMIT" + displayName: "Job Backoff Limit" + description: "The number of attempts to try for a successful job outcome" + value: "0" + required: false diff --git a/jobs/expired-limited-restoration/logging.conf b/jobs/expired-limited-restoration/logging.conf new file mode 100644 index 0000000000..0806a8a2c0 --- /dev/null +++ b/jobs/expired-limited-restoration/logging.conf @@ -0,0 +1,28 @@ +[loggers] +keys=root,api + +[handlers] +keys=console + +[formatters] +keys=simple + +[logger_root] +level=DEBUG +handlers=console + +[logger_api] +level=DEBUG +handlers=console +qualname=api +propagate=0 + +[handler_console] +class=StreamHandler +level=DEBUG +formatter=simple +args=(sys.stdout,) + +[formatter_simple] +format=%(asctime)s - %(name)s - %(levelname)s in %(module)s:%(filename)s:%(lineno)d - %(funcName)s: %(message)s +datefmt= diff --git a/jobs/expired-limited-restoration/requirements.txt b/jobs/expired-limited-restoration/requirements.txt new file mode 100644 index 0000000000..45702e705b --- /dev/null +++ b/jobs/expired-limited-restoration/requirements.txt @@ -0,0 +1,30 @@ +Flask-Moment==0.10.0 +Flask-Script==2.0.6 +Flask==1.1.2 +Jinja2==2.11.2 +MarkupSafe==1.1.1 +Werkzeug==0.16.1 +aniso8601==8.1.0 +attrs==20.3.0 +blinker==1.4 +certifi==2020.12.5 +chardet==3.0.4 +click==7.1.2 +ecdsa==0.14.1 +flask-jwt-oidc==0.1.5 +flask-restplus==0.13.0 +gunicorn==20.0.4 +idna==2.10 +itsdangerous==1.1.0 +jsonschema==3.2.0 +pyasn1==0.4.8 +pyrsistent==0.17.3 +python-dateutil==2.8.1 +python-dotenv==0.15.0 +python-jose==3.2.0 +pytz==2020.4 +requests==2.25.0 +rsa==4.6 +sentry-sdk==1.20.0 +six==1.15.0 +urllib3==1.26.11 diff --git a/jobs/expired-limited-restoration/requirements/bcregistry-libraries.txt b/jobs/expired-limited-restoration/requirements/bcregistry-libraries.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/jobs/expired-limited-restoration/requirements/dev.txt b/jobs/expired-limited-restoration/requirements/dev.txt new file mode 100644 index 0000000000..e8f1c165a6 --- /dev/null +++ b/jobs/expired-limited-restoration/requirements/dev.txt @@ -0,0 +1,20 @@ +# Everything the developer needs outside of the production requirements + +# Testing +pytest +pytest-mock +requests +pyhamcrest + +# Lint and code style +flake8 +flake8-blind-except +flake8-debugger +flake8-docstrings +flake8-isort +flake8-quotes +pep8-naming +autopep8 +coverage +pylint +pylint-flask diff --git a/jobs/expired-limited-restoration/requirements/prod.txt b/jobs/expired-limited-restoration/requirements/prod.txt new file mode 100644 index 0000000000..c09b53c441 --- /dev/null +++ b/jobs/expired-limited-restoration/requirements/prod.txt @@ -0,0 +1,11 @@ +gunicorn +Flask +Flask-Script +Flask-Moment +Flask-RESTplus +flask-jwt-oidc>=0.1.5 +python-dotenv +requests +sentry-sdk[flask] +python-dateutil +Werkzeug<1 diff --git a/jobs/expired-limited-restoration/run.sh b/jobs/expired-limited-restoration/run.sh new file mode 100755 index 0000000000..bf0a27ab11 --- /dev/null +++ b/jobs/expired-limited-restoration/run.sh @@ -0,0 +1,3 @@ +cd /opt/app-root +echo 'run file_expired_limited_restoration' +python file_expired_limited_restoration.py diff --git a/jobs/expired-limited-restoration/setup.cfg b/jobs/expired-limited-restoration/setup.cfg new file mode 100644 index 0000000000..adfeed4332 --- /dev/null +++ b/jobs/expired-limited-restoration/setup.cfg @@ -0,0 +1,60 @@ +[flake8] +exclude = .git,*migrations* +max-line-length = 120 +docstring-min-length=10 +per-file-ignores = + */__init__.py:F401 + *.py:B902 + +[pycodestyle] +max_line_length = 120 +ignore = E501 +docstring-min-length=10 +notes=FIXME,XXX # TODO is ignored +match_dir = src/legal_api +ignored-modules=flask_sqlalchemy + sqlalchemy +per-file-ignores = + */__init__.py:F401 +good-names= + b, + d, + i, + e, + f, + k, + q, + u, + v, + ar, + id, + rv, + logger, + +[pylint] +ignore=migrations,test +notes=FIXME,XXX,TODO +ignored-modules=flask_sqlalchemy,sqlalchemy,SQLAlchemy,alembic,scoped_session +ignored-classes=scoped_session +disable=C0209,C0301,W0511,W0613,W0703,W1514,R0801,R0902,R0903,R0911,R0401,R1705,R1718,W3101 +good-names= + b, + d, + i, + e, + f, + k, + q, + u, + v, + ar, + id, + rv, + logger, + +[isort] +line_length = 120 +indent = 4 +multi_line_output = 3 +lines_after_imports = 2 +include_trailing_comma = True diff --git a/jobs/expired-limited-restoration/setup.py b/jobs/expired-limited-restoration/setup.py new file mode 100644 index 0000000000..6c75af7455 --- /dev/null +++ b/jobs/expired-limited-restoration/setup.py @@ -0,0 +1,22 @@ +# Copyright © 2025 Province of British Columbia. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Installer and setup for this module.""" + +from setuptools import find_packages, setup + + +setup( + name='expired-limited-restoration', + packages=find_packages() +) diff --git a/jobs/expired-limited-restoration/utils/__init__.py b/jobs/expired-limited-restoration/utils/__init__.py new file mode 100644 index 0000000000..82c0485dc3 --- /dev/null +++ b/jobs/expired-limited-restoration/utils/__init__.py @@ -0,0 +1,13 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/python/common/sql-versioning/sql_versioning/debugging.py b/jobs/expired-limited-restoration/utils/logging.py similarity index 51% rename from python/common/sql-versioning/sql_versioning/debugging.py rename to jobs/expired-limited-restoration/utils/logging.py index 5909fd9aaa..9e2f456995 100644 --- a/python/common/sql-versioning/sql_versioning/debugging.py +++ b/jobs/expired-limited-restoration/utils/logging.py @@ -1,4 +1,4 @@ -# Copyright © 2024 Province of British Columbia +# Copyright © 2025 Province of British Columbia # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,17 +11,16 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -"""Utilities used for debugging.""" -# TODO: remove this debugging utility file -import functools +"""Centralized setup of logging for the service.""" +import logging.config +import sys +from os import path -def debug(func): - """A decorator to print a message before and after a function call.""" - @functools.wraps(func) - def wrapper(*args, **kwargs): - print(f'\033[34m--> Entering {func.__qualname__}()\033[0m') - ret = func(*args, **kwargs) - print(f'\033[34m<-- Exiting {func.__qualname__}()\033[0m') - return ret - return wrapper +def setup_logging(conf): + """Create the services logger.""" + if conf and path.isfile(conf): + logging.config.fileConfig(conf) + print('Configure logging, from conf:{}'.format(conf), file=sys.stdout) + else: + print('Unable to configure logging, attempted conf:{}'.format(conf), file=sys.stderr) diff --git a/jobs/furnishings/flags.json b/jobs/furnishings/flags.json index 72c489d6cf..fbca79a275 100644 --- a/jobs/furnishings/flags.json +++ b/jobs/furnishings/flags.json @@ -2,6 +2,17 @@ "flagValues": { "enable-involuntary-dissolution": true, "disable-dissolution-sftp-bcmail": true, - "disable-dissolution-sftp-bclaws": false + "disable-dissolution-sftp-bclaws": false, + "db-versioning": { + "legal-api": true, + "emailer": true, + "filer": false, + "entity-bn": true, + "digital-credentials": false, + "dissolutions-job": false, + "furnishings-job": true, + "emailer-reminder-job": true, + "future-effective-job": false + } } } diff --git a/jobs/furnishings/src/furnishings/config.py b/jobs/furnishings/src/furnishings/config.py index 2056481993..9100da6bc4 100644 --- a/jobs/furnishings/src/furnishings/config.py +++ b/jobs/furnishings/src/furnishings/config.py @@ -45,6 +45,8 @@ def get_named_config(config_name: str = 'production'): class _Config: # pylint: disable=too-few-public-methods """Base class configuration.""" + # used to identify versioning flag + SERVICE_NAME = 'furnishings-job' PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) SENTRY_DSN = os.getenv('SENTRY_DSN') or '' diff --git a/jobs/furnishings/src/furnishings/worker.py b/jobs/furnishings/src/furnishings/worker.py index e10d4e267b..eae49e1718 100644 --- a/jobs/furnishings/src/furnishings/worker.py +++ b/jobs/furnishings/src/furnishings/worker.py @@ -20,7 +20,8 @@ import sentry_sdk # noqa: I001, E501; pylint: disable=ungrouped-imports; conflicts with Flake8 from croniter import croniter from flask import Flask -from legal_api.models import Configuration, db +from legal_api.models import Configuration +from legal_api.models.db import init_db from legal_api.services.flags import Flags from legal_api.services.queue import QueueService from sentry_sdk.integrations.logging import LoggingIntegration @@ -44,7 +45,7 @@ def create_app(run_mode=os.getenv('FLASK_ENV', 'production')): """Return a configured Flask App using the Factory method.""" app = Flask(__name__) app.config.from_object(get_named_config(run_mode)) - db.init_app(app) + init_db(app) # Configure Sentry if app.config.get('SENTRY_DSN', None): diff --git a/jobs/furnishings/tests/unit/__init__.py b/jobs/furnishings/tests/unit/__init__.py index 3c06391fa8..3c8e4a0701 100644 --- a/jobs/furnishings/tests/unit/__init__.py +++ b/jobs/furnishings/tests/unit/__init__.py @@ -21,7 +21,7 @@ from legal_api.models import Address, Batch, BatchProcessing, Business, Filing, Furnishing, db from legal_api.models.colin_event_id import ColinEventId -from legal_api.models.db import versioning_manager +from legal_api.models.db import VersioningProxy EPOCH_DATETIME = datetime.datetime.utcfromtimestamp(0).replace(tzinfo=datetime.timezone.utc) @@ -115,9 +115,8 @@ def factory_completed_filing(business, filing._filing_sub_type = filing_sub_type filing.save() - uow = versioning_manager.unit_of_work(db.session) - transaction = uow.create_transaction(db.session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(db.session()) + filing.transaction_id = transaction_id filing.payment_token = payment_token filing.effective_date = filing_date filing.payment_completion_date = filing_date diff --git a/jobs/involuntary-dissolutions/config.py b/jobs/involuntary-dissolutions/config.py index 6eda133991..3d31862e48 100644 --- a/jobs/involuntary-dissolutions/config.py +++ b/jobs/involuntary-dissolutions/config.py @@ -45,6 +45,7 @@ def get_named_config(config_name: str = 'production'): class _Config(object): # pylint: disable=too-few-public-methods """Base class configuration.""" + SERVICE_NAME = 'dissolutions-job' PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) SENTRY_DSN = os.getenv('SENTRY_DSN') or '' diff --git a/jobs/involuntary-dissolutions/flags.json b/jobs/involuntary-dissolutions/flags.json index 28dff8c224..75f4418e0b 100644 --- a/jobs/involuntary-dissolutions/flags.json +++ b/jobs/involuntary-dissolutions/flags.json @@ -2,6 +2,17 @@ "flagValues": { "enable-involuntary-dissolution": true, "disable-dissolution-sftp-bcmail": false, - "disable-dissolution-sftp-bclaws": false + "disable-dissolution-sftp-bclaws": false, + "db-versioning": { + "legal-api": true, + "emailer": false, + "filer": false, + "entity-bn": false, + "digital-credentials": false, + "dissolutions-job": true, + "furnishings-job": false, + "emailer-reminder-job": false, + "update-colin-filings-job": false + } } } diff --git a/jobs/involuntary-dissolutions/involuntary_dissolutions.py b/jobs/involuntary-dissolutions/involuntary_dissolutions.py index caeb5fe743..fdaca86509 100644 --- a/jobs/involuntary-dissolutions/involuntary_dissolutions.py +++ b/jobs/involuntary-dissolutions/involuntary_dissolutions.py @@ -23,6 +23,7 @@ from flask import Flask from legal_api.core.filing import Filing as CoreFiling from legal_api.models import Batch, BatchProcessing, Business, Configuration, Filing, Furnishing, db # noqa: I001 +from legal_api.models.db import init_db from legal_api.services.filings.validations.dissolution import DissolutionTypes from legal_api.services.flags import Flags from legal_api.services.involuntary_dissolution import InvoluntaryDissolutionService @@ -52,7 +53,7 @@ def create_app(run_mode=os.getenv('FLASK_ENV', 'production')): """Return a configured Flask App using the Factory method.""" app = Flask(__name__) app.config.from_object(config.CONFIGURATION[run_mode]) - db.init_app(app) + init_db(app) # Configure Sentry if app.config.get('SENTRY_DSN', None): @@ -106,6 +107,7 @@ def create_invountary_dissolution_filing(business_id: int): } } + filing.hide_in_ledger = True filing.save() return filing @@ -199,7 +201,7 @@ def stage_1_process(app: Flask): # pylint: disable=redefined-outer-name,too-man step=BatchProcessing.BatchProcessingStep.WARNING_LEVEL_1, status=BatchProcessing.BatchProcessingStatus.PROCESSING, created_date=datetime.utcnow(), - trigger_date=datetime.utcnow()+stage_1_delay, + trigger_date=datetime.utcnow() + stage_1_delay, batch_id=batch.id, business_id=business.id) @@ -222,10 +224,10 @@ def _check_stage_1_furnishing_entries(furnishings): 2. only available to send mail out, and it's processed. """ email_processed = any( - furnishing.furnishing_type == Furnishing.FurnishingType.EMAIL - and furnishing.status == Furnishing.FurnishingStatus.PROCESSED - for furnishing in furnishings - ) + furnishing.furnishing_type == Furnishing.FurnishingType.EMAIL + and furnishing.status == Furnishing.FurnishingStatus.PROCESSED + for furnishing in furnishings + ) expected_mail_status = [Furnishing.FurnishingStatus.PROCESSED] # if SFTP function is off, we expect the mail status will be QUEUED or PROCESSED diff --git a/jobs/update-legal-filings/Makefile b/jobs/update-legal-filings/Makefile index 10a2e9baac..5510d8b979 100644 --- a/jobs/update-legal-filings/Makefile +++ b/jobs/update-legal-filings/Makefile @@ -39,7 +39,7 @@ clean-test: ## clean test files build-req: clean ## Upgrade requirements test -f venv/bin/activate || python3.8 -m venv $(CURRENT_ABS_DIR)/venv ;\ . venv/bin/activate ;\ - pip install pip==20.1.1 ;\ + pip install --upgrade pip ;\ pip install -Ur requirements/prod.txt ;\ pip freeze | sort > requirements.txt ;\ cat requirements/bcregistry-libraries.txt >> requirements.txt ;\ @@ -48,7 +48,7 @@ build-req: clean ## Upgrade requirements install: clean ## Install python virtual environment test -f venv/bin/activate || python3.8 -m venv $(CURRENT_ABS_DIR)/venv ;\ . venv/bin/activate ;\ - pip install pip==20.1.1 ;\ + pip install --upgrade pip ;\ pip install -Ur requirements.txt install-dev: ## Install local application @@ -135,7 +135,7 @@ tag: push ## tag image ################################################################################# run: ## Run the project in local - . venv/bin/activate && python notebookreport.py + . venv/bin/activate && python update_legal_filings.py ################################################################################# # Self Documenting Commands # diff --git a/jobs/update-legal-filings/requirements.txt b/jobs/update-legal-filings/requirements.txt index 882d9aeca9..a6922ab6e2 100644 --- a/jobs/update-legal-filings/requirements.txt +++ b/jobs/update-legal-filings/requirements.txt @@ -5,20 +5,20 @@ Jinja2==2.11.3 MarkupSafe==1.1.1 Werkzeug==1.0.1 aniso8601==9.0.1 -attrs==20.3.0 +attrs==23.1.0 blinker==1.4 certifi==2020.12.5 -click==7.1.2 +click==8.1.3 ecdsa==0.14.1 flask-jwt-oidc==0.3.0 gunicorn==20.1.0 itsdangerous==1.1.0 -jsonschema==3.2.0 +jsonschema==4.19.0 pyasn1==0.4.8 pyrsistent==0.17.3 python-dotenv==0.17.1 python-jose==3.2.0 -pytz==2021.1 +pytz==2024.1 rsa==4.7.2 sentry-sdk==1.20.0 six==1.15.0 @@ -30,4 +30,4 @@ protobuf==3.15.8 git+https://github.com/bcgov/lear.git#subdirectory=colin-api git+https://github.com/bcgov/lear.git#egg=legal_api&subdirectory=legal-api git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning -git+https://github.com/bcgov/business-schemas.git@2.5.12#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.27#egg=registry_schemas diff --git a/legal-api/flags.json b/legal-api/flags.json index 6f0d1d71ca..35669843b1 100644 --- a/legal-api/flags.json +++ b/legal-api/flags.json @@ -5,6 +5,7 @@ "integer-flag": 10, "enable-legal-name-fix": true, "disable-nr-check": false, + "enable-business-summary-for-migrated-corps": true, "enable-involuntary-dissolution-filter": false, "enable-new-ben-statements": false, "involuntary-dissolution-filter": { @@ -12,17 +13,14 @@ "exclude-accounts": [] }, "db-versioning": { - "legal-api": false, - "emailer": false, - "filer": false, - "entity-bn": false, - "digital-credentials": false, - "dissolutions-job": false, - "furnishings-job": false, - "emailer-reminder-job": false, - "future-effective-job": false, - "update-colin-filings-job": false, - "update-legal-filings-job": false + "legal-api": true, + "emailer": true, + "filer": true, + "entity-bn": true, + "digital-credentials": true, + "dissolutions-job": true, + "furnishings-job": true, + "emailer-reminder-job": true } } } diff --git a/legal-api/migrations/versions/24b59f535ec3_add_currency_additional_to_share_classes.py b/legal-api/migrations/versions/24b59f535ec3_add_currency_additional_to_share_classes.py new file mode 100644 index 0000000000..bb9907d610 --- /dev/null +++ b/legal-api/migrations/versions/24b59f535ec3_add_currency_additional_to_share_classes.py @@ -0,0 +1,26 @@ +"""add_currency_additional_to_share_classes + +Revision ID: 24b59f535ec3 +Revises: f1d010259785 +Create Date: 2025-02-28 23:28:54.053129 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '24b59f535ec3' +down_revision = 'f1d010259785' +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column('share_classes', sa.Column('currency_additional', sa.String(length=40))) + op.add_column('share_classes_version', sa.Column('currency_additional', sa.String(length=40))) + + +def downgrade(): + op.drop_column('share_classes', 'currency_additional') + op.drop_column('share_classes_version', 'currency_additional') diff --git a/legal-api/migrations/versions/ad21c1ed551e_.py b/legal-api/migrations/versions/ad21c1ed551e_.py new file mode 100644 index 0000000000..47c297e21d --- /dev/null +++ b/legal-api/migrations/versions/ad21c1ed551e_.py @@ -0,0 +1,30 @@ +"""empty message + +Revision ID: ad21c1ed551e +Revises: d9254d3cbbf4 +Create Date: 2025-02-21 14:05:14.971210 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = 'ad21c1ed551e' +down_revision = 'd0b10576924c' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('businesses', sa.Column('last_tr_year', sa.Integer(), nullable=True)) + op.add_column('businesses_version', sa.Column('last_tr_year', sa.Integer(), autoincrement=False, nullable=True)) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('businesses_version', 'last_tr_year') + op.drop_column('businesses', 'last_tr_year') + # ### end Alembic commands ### diff --git a/legal-api/migrations/versions/b0937b915e6b_add_offices_held_table.py b/legal-api/migrations/versions/b0937b915e6b_add_offices_held_table.py new file mode 100644 index 0000000000..54de8ea2b8 --- /dev/null +++ b/legal-api/migrations/versions/b0937b915e6b_add_offices_held_table.py @@ -0,0 +1,54 @@ +"""“add_offices_held_table” + +Revision ID: b0937b915e6b +Revises: ad21c1ed551e +Create Date: 2025-02-28 14:30:31.105670 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + + +# revision identifiers, used by Alembic. +revision = 'b0937b915e6b' +down_revision = 'ad21c1ed551e' +branch_labels = None +depends_on = None + +titles_enum = postgresql.ENUM('CEO', 'CFO', 'CHAIR', 'OTHER_OFFICES', 'TREASURER', 'VICE_PRESIDENT', + 'PRESIDENT', 'SECRETARY', 'ASSISTANT_SECRETARY', + name='titles_enum') + +def upgrade(): + titles_enum.create(op.get_bind(), checkfirst=True) + + op.create_table( + 'offices_held', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('party_role_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['party_role_id'], ['party_roles.id']), + sa.PrimaryKeyConstraint('id')) + + op.add_column('offices_held', sa.Column('title', titles_enum, nullable=False)) + + op.create_table( + 'offices_held_version', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('party_role_id', sa.Integer(), nullable=False), + sa.Column('transaction_id', sa.BigInteger(), autoincrement=False, nullable=False), + sa.Column('end_transaction_id', sa.BigInteger(), nullable=True), + sa.Column('operation_type', sa.SmallInteger(), nullable=False), + sa.ForeignKeyConstraint(['party_role_id'], ['party_roles.id']), + sa.PrimaryKeyConstraint('id', 'transaction_id') + ) + + op.add_column('offices_held_version', sa.Column('title', titles_enum, nullable=False)) + + +def downgrade(): + op.drop_table('offices_held_version') + op.drop_table('offices_held') + titles_enum.drop(op.get_bind(), checkfirst=True) + + diff --git a/legal-api/migrations/versions/d0b10576924c_alter_amalgamation_type_enum.py b/legal-api/migrations/versions/d0b10576924c_alter_amalgamation_type_enum.py new file mode 100644 index 0000000000..eeb169f797 --- /dev/null +++ b/legal-api/migrations/versions/d0b10576924c_alter_amalgamation_type_enum.py @@ -0,0 +1,50 @@ +"""alter_amalgamation_type_enum + +Revision ID: d0b10576924c +Revises: d9254d3cbbf4 +Create Date: 2025-02-03 21:47:05.061172 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = 'd0b10576924c' +down_revision = 'd9254d3cbbf4' +branch_labels = None +depends_on = None + + +amalgamation_type_old_enum = postgresql.ENUM('regular', + 'vertical', + 'horizontal', + name='amalgamation_type_old') + + +def upgrade(): + with op.get_context().autocommit_block(): + op.execute("ALTER TYPE amalgamation_type ADD VALUE 'unknown'") + + +def downgrade(): + op.execute("UPDATE amalgamations SET amalgamation_type = 'regular' WHERE amalgamation_type = 'unknown'") + op.execute("UPDATE amalgamations_version SET amalgamation_type = 'regular' WHERE amalgamation_type = 'unknown'") + + amalgamation_type_old_enum.create(op.get_bind(), checkfirst=True) + + op.execute(""" + ALTER TABLE amalgamations + ALTER COLUMN amalgamation_type + TYPE amalgamation_type_old + USING amalgamation_type::text::amalgamation_type_old + """) + op.execute(""" + ALTER TABLE amalgamations_version + ALTER COLUMN amalgamation_type + TYPE amalgamation_type_old + USING amalgamation_type::text::amalgamation_type_old + """) + + op.execute("DROP TYPE amalgamation_type") + op.execute("ALTER TYPE amalgamation_type_old RENAME TO amalgamation_type") diff --git a/legal-api/migrations/versions/d9254d3cbbf4_add_now_properties_to_filing_table.py b/legal-api/migrations/versions/d9254d3cbbf4_add_now_properties_to_filing_table.py new file mode 100644 index 0000000000..80cd0a883e --- /dev/null +++ b/legal-api/migrations/versions/d9254d3cbbf4_add_now_properties_to_filing_table.py @@ -0,0 +1,27 @@ +"""add_NoW_properties_to_filing_table + +Revision ID: d9254d3cbbf4 +Revises: f99e7bda56bb +Create Date: 2025-01-02 16:52:38.449590 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = 'd9254d3cbbf4' +down_revision = 'f99e7bda56bb' +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column('filings', sa.Column('withdrawn_filing_id', sa.Integer(), nullable=True)) + op.create_foreign_key('filings_withdrawn_filing_id_fkey', 'filings', 'filings', ['withdrawn_filing_id'], ['id']) + op.add_column('filings', sa.Column('withdrawal_pending', sa.Boolean(), nullable=False, server_default='False')) + +def downgrade(): + op.drop_constraint('filings_withdrawn_filing_id_fkey', 'filings', type_='foreignkey') + op.drop_column('filings', 'withdrawn_filing_id') + op.drop_column('filings', 'withdrawal_pending') diff --git a/legal-api/migrations/versions/f1d010259785_modify_data_type_for_max_shares.py b/legal-api/migrations/versions/f1d010259785_modify_data_type_for_max_shares.py new file mode 100644 index 0000000000..e51071d42e --- /dev/null +++ b/legal-api/migrations/versions/f1d010259785_modify_data_type_for_max_shares.py @@ -0,0 +1,42 @@ +"""modify_data_type_for_max_shares + +Revision ID: f1d010259785 +Revises: b0937b915e6b +Create Date: 2025-02-28 22:29:38.543965 + +""" +from alembic import op + + +# revision identifiers, used by Alembic. +revision = 'f1d010259785' +down_revision = 'b0937b915e6b' +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute('ALTER TABLE share_classes ALTER COLUMN max_shares TYPE NUMERIC(20) USING max_shares::NUMERIC(20);') + op.execute('ALTER TABLE share_series ALTER COLUMN max_shares TYPE NUMERIC(20) USING max_shares::NUMERIC(20);') + + op.execute('ALTER TABLE share_classes_version ALTER COLUMN max_shares TYPE NUMERIC(20) USING max_shares::NUMERIC(20);') + op.execute('ALTER TABLE share_series_version ALTER COLUMN max_shares TYPE NUMERIC(20) USING max_shares::NUMERIC(20);') + + +def downgrade(): + op.execute("UPDATE share_classes SET max_shares = 2147483647 WHERE max_shares > 2147483647;") + op.execute("UPDATE share_classes SET max_shares = -2147483648 WHERE max_shares < -2147483648;") + op.execute("UPDATE share_classes_version SET max_shares = 2147483647 WHERE max_shares > 2147483647;") + op.execute("UPDATE share_classes_version SET max_shares = -2147483648 WHERE max_shares < -2147483648;") + + op.execute("UPDATE share_series SET max_shares = 2147483647 WHERE max_shares > 2147483647;") + op.execute("UPDATE share_series SET max_shares = -2147483648 WHERE max_shares < -2147483648;") + op.execute("UPDATE share_series_version SET max_shares = 2147483647 WHERE max_shares > 2147483647;") + op.execute("UPDATE share_series_version SET max_shares = -2147483648 WHERE max_shares < -2147483648;") + + op.execute("ALTER TABLE share_classes ALTER COLUMN max_shares TYPE INTEGER USING max_shares::INTEGER;") + op.execute("ALTER TABLE share_series ALTER COLUMN max_shares TYPE INTEGER USING max_shares::INTEGER;") + op.execute("ALTER TABLE share_classes_version ALTER COLUMN max_shares TYPE INTEGER USING max_shares::INTEGER;") + op.execute("ALTER TABLE share_series_version ALTER COLUMN max_shares TYPE INTEGER USING max_shares::INTEGER;") + + diff --git a/legal-api/migrations/versions/f99e7bda56bb_hide_in_ledger.py b/legal-api/migrations/versions/f99e7bda56bb_hide_in_ledger.py new file mode 100644 index 0000000000..4c7e87381a --- /dev/null +++ b/legal-api/migrations/versions/f99e7bda56bb_hide_in_ledger.py @@ -0,0 +1,29 @@ +"""hide-in-ledger + +Revision ID: f99e7bda56bb +Revises: f3b30f43aa86 +Create Date: 2024-12-20 13:59:15.359911 + +""" +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision = 'f99e7bda56bb' +down_revision = 'f3b30f43aa86' +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('filings', sa.Column('hide_in_ledger', sa.Boolean(), nullable=False, server_default='False')) + op.execute("UPDATE filings SET hide_in_ledger = true WHERE filing_type = 'adminFreeze'") + op.execute("UPDATE filings SET hide_in_ledger = true WHERE filing_type = 'dissolution' and filing_sub_type = 'involuntary'") + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('filings', 'hide_in_ledger') + # ### end Alembic commands ### diff --git a/legal-api/migrations/versions/fe158a53151f_amalgamation_out.py b/legal-api/migrations/versions/fe158a53151f_amalgamation_out.py new file mode 100644 index 0000000000..76b04425be --- /dev/null +++ b/legal-api/migrations/versions/fe158a53151f_amalgamation_out.py @@ -0,0 +1,38 @@ +"""amalgamation_out + +Revision ID: fe158a53151f +Revises: 24b59f535ec3 +Create Date: 2025-03-14 11:34:01.606149 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = 'fe158a53151f' +down_revision = '24b59f535ec3' +branch_labels = None +depends_on = None + +consent_out_types_enum = postgresql.ENUM('continuation_out', 'amalgamation_out', name='consent_out_types') + + +def upgrade(): + # add enum values + consent_out_types_enum.create(op.get_bind(), checkfirst=True) + + # ### commands auto generated by Alembic - please adjust! ### + op.add_column('businesses', sa.Column('amalgamation_out_date', sa.DateTime(timezone=True), nullable=True)) + op.add_column('businesses_version', sa.Column('amalgamation_out_date', sa.DateTime(timezone=True), autoincrement=False, nullable=True)) + op.add_column('consent_continuation_outs', sa.Column('consent_type', consent_out_types_enum, nullable=False, server_default='continuation_out')) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column('consent_continuation_outs', 'consent_type') + consent_out_types_enum.drop(op.get_bind(), checkfirst=True) + op.drop_column('businesses_version', 'amalgamation_out_date') + op.drop_column('businesses', 'amalgamation_out_date') + # ### end Alembic commands ### diff --git a/legal-api/report-templates/correction.html b/legal-api/report-templates/correction.html index 2f70471ee2..ab83882654 100644 --- a/legal-api/report-templates/correction.html +++ b/legal-api/report-templates/correction.html @@ -51,5 +51,10 @@ [[correction/rulesMemorandum.html]] [[correction/resolution.html]] +

+
Correction DetailADDED
+
Correction filed by Registry Staff on {{effective_date_time}}
+
{{correction.comment}}
+
diff --git a/legal-api/report-templates/noticeOfWithdrawal.html b/legal-api/report-templates/noticeOfWithdrawal.html new file mode 100644 index 0000000000..59aaa17815 --- /dev/null +++ b/legal-api/report-templates/noticeOfWithdrawal.html @@ -0,0 +1,33 @@ +[[macros.html]] + + + + Notice of Withdrawal + + + [[common/style.html]] + + +
+ + + + + + +
+ +
+
+ [[common/businessDetails.html]] +
NOTICE OF WITHDRAWAL
+
+ [[notice-of-withdrawal/recordToBeWithdrawn.html]] +
+ + \ No newline at end of file diff --git a/legal-api/report-templates/template-parts/amalgamation/amalgamationStmt.html b/legal-api/report-templates/template-parts/amalgamation/amalgamationStmt.html index ae4a756f8e..21993fc5d2 100644 --- a/legal-api/report-templates/template-parts/amalgamation/amalgamationStmt.html +++ b/legal-api/report-templates/template-parts/amalgamation/amalgamationStmt.html @@ -4,8 +4,13 @@ Amalgamation Statement
- This amalgamation has been effected without court approval. A copy of all of the required affidavits under section 277(1) have been obtained and the affidavit + This amalgamation has been effected + {% if amalgamationApplication.courtApproval %} + with + {% else %} + without + {% endif %} + court approval. A copy of all of the required affidavits under section 277(1) have been obtained and the affidavit obtained from each amalgamating company has been deposited in that company's records office.
- - \ No newline at end of file + diff --git a/legal-api/report-templates/template-parts/business-summary/stateTransition.html b/legal-api/report-templates/template-parts/business-summary/stateTransition.html index b7a6c3d3cb..a9556044f3 100644 --- a/legal-api/report-templates/template-parts/business-summary/stateTransition.html +++ b/legal-api/report-templates/template-parts/business-summary/stateTransition.html @@ -35,9 +35,12 @@
Incorporation Number: {{ filing.identifier }}
- {% elif filing.filingType == 'dissolution' and filing.filingSubType == 'involuntary' %} + {% elif (filing.filingType == 'dissolution' and filing.filingSubType == 'involuntary') %} Effective Date: {{ filing.effectiveDateTime }} + {% elif filing.filingType == 'putBackOff' %} + Effective Date: + {{ filing.historicalDate }} {% else %} Filing Date: {{filing.filingDateTime}} diff --git a/legal-api/report-templates/template-parts/common/businessDetails.html b/legal-api/report-templates/template-parts/common/businessDetails.html index f26c60eaac..7db81a2f82 100644 --- a/legal-api/report-templates/template-parts/common/businessDetails.html +++ b/legal-api/report-templates/template-parts/common/businessDetails.html @@ -42,16 +42,24 @@
{{report_date_time}}
- {{business.state}} - {% if business.state in ('HISTORICAL', 'LIQUIDATION') %} - - - {% if business.legalType in ['GP', 'SP'] and business.state == 'HISTORICAL' %} - Dissolved - {% else %} - {{stateFilings[0].filingName}} - {% endif %} - - Effective {{stateFilings[0].effectiveDateTime}} - {% endif %} + {{business.state}} + {% if business.state in ('HISTORICAL', 'LIQUIDATION') and stateFilings %} + - + {% if business.state == 'HISTORICAL' %} + {% if business.legalType in ['GP', 'SP'] %} + Dissolved + {% else %} + {% set filing = stateFilings[0] %} + {% if filing.filingType == 'putBackOff' %} + {{filing.reason}} on {{filing.expiryDate}} + {% else %} + {{filing.filingName}} - Effective {{filing.effectiveDateTime}} + {% endif %} + {% endif %} + {% endif %} + {% endif %} +
+ {% elif header.reportType == 'amalgamationApplication' %} @@ -70,8 +78,8 @@
{{business.identifier}}
{% endif %}
{{effective_date_time}}
- {% if header.isFutureEffective %} -
{{effective_date_time}}
+ {% if not business or business.identifier.startswith('T') %} +
 
{% elif header.status == 'COMPLETED' %}
{{recognition_date_time}}
{% endif %} @@ -132,8 +140,8 @@
{{business.identifier}}
{% endif %}
{{filing_date_time}}
- {% if header.isFutureEffective %} -
{{effective_date_time}}
+ {% if not business or business.identifier.startswith('T') %} +
 
{% elif header.status == 'COMPLETED' %}
{{recognition_date_time}}
{% endif %} @@ -328,6 +336,28 @@ {% endif %}
{{report_date_time}}
+ {% elif header.name == 'noticeOfWithdrawal' %} + + {% if not business or business.identifier.startswith('T') %} +
Filed Date and Time:
+ {% else %} +
Incorporation Number:
+
Filed Date and Time:
+ {% endif %} +
Recognition Date and Time:
+
Retrieved Date and Time:
+ + + {% if not business or business.identifier.startswith('T') %} +
{{ filing_date_time }}
+
 
+ {% else %} +
{{business.identifier}}
+
{{ filing_date_time }}
+
{{ recognition_date_time }}
+ {% endif %} +
{{ report_date_time }}
+ {% endif %} {% if reportType != 'summary' %} @@ -359,8 +389,8 @@ {% if business.legalType in ['SP', 'GP'] %} | Registration #{{business.identifier}} {% else %} - {% if header.name != 'incorporationApplication' %} - + {% if header.name not in ['incorporationApplication', 'noticeOfWithdrawal'] %} + | Incorporation # {% if not business or business.identifier.startswith('T') %} Pending diff --git a/legal-api/report-templates/template-parts/common/style.html b/legal-api/report-templates/template-parts/common/style.html index 08c373d572..92c3e013f3 100644 --- a/legal-api/report-templates/template-parts/common/style.html +++ b/legal-api/report-templates/template-parts/common/style.html @@ -301,6 +301,20 @@ font-family: 'BCSans-Bold', sans-serif !important; text-align: center } + + .details-header-text { + font-size: 14px; + color: #234075; + font-family: 'BCSans-Bold', sans-serif !important; + text-align: left; + margin-right: 1rem; + } + + .preserve-line-breaks { + white-space: pre-wrap; + word-wrap: break-word; + line-height: 1.5rem; + } .doc-description, .registrar-title { @@ -329,6 +343,15 @@ text-align: center } + .record-to-be-withdrawn-table { + width: 100%; + font-family: 'BCSans-Regular', sans-serif !important; + color: #313132; + font-size: 13px; + line-height: 16px; + text-align: left; + } + .share-structure-table { width: 100%; border-collapse: collapse; @@ -568,4 +591,15 @@ margin: 0 0.25rem; text-align: center; } + .correction-label-added { + font-family: 'BCSans-Bold', sans-serif !important; + color: #313132; + background: #E2E7E7; + font-size: 8px; + padding: 4px 7px; + margin: 0 0.25rem; + text-align: center; + border-radius: 0.25rem; + } + diff --git a/legal-api/report-templates/template-parts/notice-of-withdrawal/recordToBeWithdrawn.html b/legal-api/report-templates/template-parts/notice-of-withdrawal/recordToBeWithdrawn.html new file mode 100644 index 0000000000..4e78cb1674 --- /dev/null +++ b/legal-api/report-templates/template-parts/notice-of-withdrawal/recordToBeWithdrawn.html @@ -0,0 +1,17 @@ +
+
Withdrawn Record
+ + + + + + +
diff --git a/legal-api/requirements.txt b/legal-api/requirements.txt index 80dfadb843..b4d90b5f84 100755 --- a/legal-api/requirements.txt +++ b/legal-api/requirements.txt @@ -59,5 +59,5 @@ PyPDF2==1.26.0 reportlab==3.6.12 html-sanitizer==2.4.1 lxml==5.2.2 -git+https://github.com/bcgov/business-schemas.git@2.18.31#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.39#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning diff --git a/legal-api/requirements/bcregistry-libraries.txt b/legal-api/requirements/bcregistry-libraries.txt index 260b5b725c..a64a9a57ae 100644 --- a/legal-api/requirements/bcregistry-libraries.txt +++ b/legal-api/requirements/bcregistry-libraries.txt @@ -1,2 +1,2 @@ -git+https://github.com/bcgov/business-schemas.git@2.18.31#egg=registry_schemas -git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning \ No newline at end of file +git+https://github.com/bcgov/business-schemas.git@2.18.39#egg=registry_schemas +git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning diff --git a/legal-api/src/legal_api/config.py b/legal-api/src/legal_api/config.py index 0b35323e3a..0c3a0b30c0 100644 --- a/legal-api/src/legal_api/config.py +++ b/legal-api/src/legal_api/config.py @@ -182,6 +182,13 @@ class _Config(): # pylint: disable=too-few-public-methods STAGE_1_DELAY = int(os.getenv('STAGE_1_DELAY', '42')) STAGE_2_DELAY = int(os.getenv('STAGE_2_DELAY', '30')) + # Transparency Register + TR_START_DATE = os.getenv('TR_START_DATE', '').strip() # i.e. '2025-02-01' + # Document Record Service Settings + DOC_API_URL = os.getenv('DOC_API_URL', '') + DOC_API_ACCOUNT_ID = os.getenv('DOC_API_ACCOUNT_ID', '') + DOC_API_KEY = os.getenv('DOC_API_KEY', '') + TESTING = False DEBUG = False @@ -216,6 +223,9 @@ class TestConfig(_Config): # pylint: disable=too-few-public-methods # URLs AUTH_SVC_URL = os.getenv('AUTH_SVC_URL', 'http://test-auth-url') + # Transparency Register - test cases set this explicitly as needed + TR_START_DATE = '' + # JWT OIDC settings # JWT_OIDC_TEST_MODE will set jwt_manager to use JWT_OIDC_TEST_MODE = True diff --git a/legal-api/src/legal_api/constants.py b/legal-api/src/legal_api/constants.py index 92e7394c79..3863acf2d9 100644 --- a/legal-api/src/legal_api/constants.py +++ b/legal-api/src/legal_api/constants.py @@ -13,4 +13,167 @@ # limitations under the License. """Constants for legal api.""" +from enum import Enum + + BOB_DATE = '2019-03-08' + +class DocumentClasses(Enum): + """Render an Enum of the document service document classes.""" + + COOP = "COOP" + CORP = "CORP" + DELETED = "DELETED" + FIRM = "FIRM" + LP_LLP = "LP_LLP" + MHR = "MHR" + NR = "NR" + OTHER = "OTHER" + PPR = "PPR" + SOCIETY = "SOCIETY" + XP = "XP" + + +class DocumentTypes(Enum): + """Render an Enum of the document service document types.""" + + REG_101 = "REG_101" + REG_102 = "REG_102" + REG_103 = "REG_103" + ABAN = "ABAN" + ADDI = "ADDI" + AFFE = "AFFE" + ATTA = "ATTA" + BANK = "BANK" + BCLC = "BCLC" + CAU = "CAU" + CAUC = "CAUC" + CAUE = "CAUE" + COMP = "COMP" + COUR = "COUR" + DEAT = "DEAT" + DNCH = "DNCH" + EXMN = "EXMN" + EXNR = "EXNR" + EXRE = "EXRE" + EXRS = "EXRS" + FORE = "FORE" + FZE = "FZE" + GENT = "GENT" + LETA = "LETA" + MAID = "MAID" + MAIL = "MAIL" + MARR = "MARR" + NAMV = "NAMV" + NCAN = "NCAN" + NCON = "NCON" + NPUB = "NPUB" + NRED = "NRED" + PDEC = "PDEC" + PUBA = "PUBA" + REBU = "REBU" + REGC = "REGC" + REIV = "REIV" + REPV = "REPV" + REST = "REST" + STAT = "STAT" + SZL = "SZL" + TAXN = "TAXN" + TAXS = "TAXS" + THAW = "THAW" + TRAN = "TRAN" + VEST = "VEST" + WHAL = "WHAL" + WILL = "WILL" + XP_MISC = "XP_MISC" + COFI = "COFI" + DISS = "DISS" + DISD = "DISD" + ATTN = "ATTN" + FRMA = "FRMA" + AMLO = "AMLO" + CNTA = "CNTA" + CNTI = "CNTI" + CNTO = "CNTO" + COFF = "COFF" + COSD = "COSD" + AMLG = "AMLG" + AMAL = "AMAL" + RSRI = "RSRI" + ASNU = "ASNU" + LPRG = "LPRG" + FILE = "FILE" + CNVF = "CNVF" + COPN = "COPN" + MHSP = "MHSP" + FNCH = "FNCH" + CONS = "CONS" + PPRS = "PPRS" + PPRC = "PPRC" + ADDR = "ADDR" + ANNR = "ANNR" + CORR = "CORR" + DIRS = "DIRS" + CORC = "CORC" + SOCF = "SOCF" + CERT = "CERT" + LTR = "LTR" + CLW = "CLW" + BYLW = "BYLW" + CNST = "CNST" + CONT = "CONT" + SYSR = "SYSR" + ADMN = "ADMN" + RSLN = "RSLN" + AFDV = "AFDV" + SUPP = "SUPP" + MNOR = "MNOR" + FINM = "FINM" + APCO = "APCO" + RPTP = "RPTP" + DAT = "DAT" + BYLT = "BYLT" + CNVS = "CNVS" + CRTO = "CRTO" + MEM = "MEM" + PRE = "PRE" + REGO = "REGO" + PLNA = "PLNA" + REGN = "REGN" + FINC = "FINC" + BCGT = "BCGT" + CHNM = "CHNM" + OTP = "OTP" + PPR = "PPR" + LHS = "LHS" + RGS = "RGS" + HSR = "HSR" + RPL = "RPL" + FINS = "FINS" + DELETED = "DELETED" + COOP_RULES = "COOP_RULES" + COOP_MEMORANDUM = "COOP_MEMORANDUM" + CORP_AFFIDAVIT = "CORP_AFFIDAVIT" + DIRECTOR_AFFIDAVIT = "DIRECTOR_AFFIDAVIT" + PART = "PART" + REG_103E = "REG_103E" + AMEND_PERMIT = "AMEND_PERMIT" + CANCEL_PERMIT = "CANCEL_PERMIT" + REREGISTER_C = "REREGISTER_C" + MEAM = "MEAM" + COU = "COU" + CRT = "CRT" + INV = "INV" + NATB = "NATB" + NWP = "NWP" + +DOCUMENT_TYPES = { + 'coopMemorandum': { + 'class': DocumentClasses.COOP.value, + 'type': DocumentTypes.COOP_MEMORANDUM.value + }, + 'coopRules': { + 'class': DocumentClasses.COOP.value, + 'type': DocumentTypes.COOP_RULES.value + }, +} diff --git a/legal-api/src/legal_api/core/filing.py b/legal-api/src/legal_api/core/filing.py index ce305284ef..b818fb7020 100644 --- a/legal-api/src/legal_api/core/filing.py +++ b/legal-api/src/legal_api/core/filing.py @@ -37,7 +37,7 @@ # @dataclass(init=False, repr=False) -class Filing: +class Filing: # pylint: disable=too-many-public-methods """Domain class for Filings.""" class Status(str, Enum): @@ -68,15 +68,18 @@ class FilingTypes(str, Enum): AGMLOCATIONCHANGE = 'agmLocationChange' ALTERATION = 'alteration' AMALGAMATIONAPPLICATION = 'amalgamationApplication' + AMALGAMATIONOUT = 'amalgamationOut' AMENDEDAGM = 'amendedAGM' AMENDEDANNUALREPORT = 'amendedAnnualReport' AMENDEDCHANGEOFDIRECTORS = 'amendedChangeOfDirectors' ANNUALREPORT = 'annualReport' APPOINTRECEIVER = 'appointReceiver' + CEASERECEIVER = 'ceaseReceiver' CHANGEOFADDRESS = 'changeOfAddress' CHANGEOFDIRECTORS = 'changeOfDirectors' CHANGEOFNAME = 'changeOfName' CHANGEOFREGISTRATION = 'changeOfRegistration' + CONSENTAMALGAMATIONOUT = 'consentAmalgamationOut' CONSENTCONTINUATIONOUT = 'consentContinuationOut' CONTINUATIONIN = 'continuationIn' CONTINUATIONOUT = 'continuationOut' @@ -88,6 +91,7 @@ class FilingTypes(str, Enum): DISSOLVED = 'dissolved' INCORPORATIONAPPLICATION = 'incorporationApplication' NOTICEOFWITHDRAWAL = 'noticeOfWithdrawal' + PUTBACKOFF = 'putBackOff' PUTBACKON = 'putBackOn' REGISTRARSNOTATION = 'registrarsNotation' REGISTRARSORDER = 'registrarsOrder' @@ -96,6 +100,7 @@ class FilingTypes(str, Enum): RESTORATIONAPPLICATION = 'restorationApplication' SPECIALRESOLUTION = 'specialResolution' TRANSITION = 'transition' + TRANSPARENCY_REGISTER = 'transparencyRegister' class FilingTypesCompact(str, Enum): """Render enum for filing types with sub-types.""" @@ -109,6 +114,9 @@ class FilingTypesCompact(str, Enum): AMALGAMATION_APPLICATION_REGULAR = 'amalgamationApplication.regular' AMALGAMATION_APPLICATION_VERTICAL = 'amalgamationApplication.vertical' AMALGAMATION_APPLICATION_HORIZONTAL = 'amalgamationApplication.horizontal' + TRANSPARENCY_REGISTER_ANNUAL = 'transparencyRegister.annual' + TRANSPARENCY_REGISTER_CHANGE = 'transparencyRegister.change' + TRANSPARENCY_REGISTER_INITIAL = 'transparencyRegister.initial' NEW_BUSINESS_FILING_TYPES: Final = [ FilingTypes.AMALGAMATIONAPPLICATION, @@ -272,7 +280,7 @@ def validate(): def get(identifier, filing_id=None) -> Optional[Filing]: """Return a Filing domain by the id.""" if identifier.startswith('T'): - storage = FilingStorage.get_temp_reg_filing(identifier) + storage = FilingStorage.get_temp_reg_filing(identifier, filing_id) else: storage = Business.get_filing_by_id(identifier, filing_id) @@ -283,6 +291,18 @@ def get(identifier, filing_id=None) -> Optional[Filing]: return None + @staticmethod + def get_by_withdrawn_filing_id(filing_id, withdrawn_filing_id, filing_type: str = None) -> Optional[Filing]: + """Return a Filing domain by the id, withdrawn_filing_id and filing_type.""" + storage = FilingStorage.get_temp_reg_filing_by_withdrawn_filing(filing_id, withdrawn_filing_id, filing_type) + + if storage: + filing = Filing() + filing._storage = storage # pylint: disable=protected-access + return filing + + return None + @staticmethod def find_by_id(filing_id) -> Optional[Filing]: """Return a Filing domain by the id.""" @@ -308,7 +328,7 @@ def get_filings_by_status(business_id: int, status: list, after_date: date = Non @staticmethod def get_most_recent_filing_json(business_id: str, filing_type: str = None, jwt: JwtManager = None): """Return the most recent filing json.""" - if storage := FilingStorage.get_most_recent_legal_filing(business_id, filing_type): + if storage := FilingStorage.get_most_recent_filing(business_id, filing_type): submitter_displayname = REDACTED_STAFF_SUBMITTER if (submitter := storage.filing_submitter) \ and submitter.username and jwt \ @@ -434,13 +454,14 @@ def common_ledger_items(business_identifier: str, filing_storage: FilingStorage) filing = Filing() filing._storage = filing_storage # pylint: disable=protected-access return { - 'displayLedger': Filing._is_display_ledger(filing_storage), + 'displayLedger': not filing_storage.hide_in_ledger, 'commentsCount': filing_storage.comments_count, 'commentsLink': f'{base_url}/{business_identifier}/filings/{filing_storage.id}/comments', 'documentsLink': f'{base_url}/{business_identifier}/filings/{filing_storage.id}/documents' if filing_storage.filing_type not in no_output_filing_types else None, 'filingLink': f'{base_url}/{business_identifier}/filings/{filing_storage.id}', 'isFutureEffective': filing.is_future_effective, + 'withdrawalPending': filing_storage.withdrawal_pending } @staticmethod @@ -457,18 +478,6 @@ def _add_ledger_order(filing: FilingStorage, ledger_filing: dict) -> dict: ledger_filing['data'] = {} ledger_filing['data']['order'] = court_order_data - @staticmethod - def _is_display_ledger(filing: FilingStorage) -> bool: - """Return boolean that display the ledger.""" - # If filing is NOT an admin freeze or involuntary dissolution, we will display it on ledger - return not ( - filing.filing_type == Filing.FilingTypes.ADMIN_FREEZE or - ( - filing.filing_type == Filing.FilingTypes.DISSOLUTION and - filing.filing_sub_type == 'involuntary' - ) - ) - @staticmethod def get_document_list(business, # pylint: disable=too-many-locals disable=too-many-branches filing, @@ -476,7 +485,7 @@ def get_document_list(business, # pylint: disable=too-many-locals disable=too-m """Return a list of documents for a particular filing.""" no_output_filings = [ Filing.FilingTypes.CONVERSION.value, - Filing.FilingTypes.COURTORDER.value, + Filing.FilingTypes.PUTBACKOFF.value, Filing.FilingTypes.PUTBACKON.value, Filing.FilingTypes.REGISTRARSNOTATION.value, Filing.FilingTypes.REGISTRARSORDER.value, @@ -497,6 +506,10 @@ def get_document_list(business, # pylint: disable=too-many-locals disable=too-m base_url = current_app.config.get('LEGAL_API_BASE_URL') base_url = base_url[:base_url.find('/api')] identifier = business.identifier if business else filing.storage.temp_reg + if not identifier and filing.storage.withdrawn_filing_id: + withdrawn_filing = Filing.find_by_id(filing.storage.withdrawn_filing_id) + identifier = withdrawn_filing.storage.temp_reg + doc_url = url_for('API2.get_documents', **{'identifier': identifier, 'filing_id': filing.id, 'legal_filing_name': None}) @@ -507,26 +520,29 @@ def get_document_list(business, # pylint: disable=too-many-locals disable=too-m return documents if filing.storage and filing.storage.filing_type in no_output_filings: - if filing.filing_type == 'courtOrder' and \ - (filing.storage.documents.filter( - Document.type == DocumentType.COURT_ORDER.value).one_or_none()): - documents['documents']['uploadedCourtOrder'] = f'{base_url}{doc_url}/uploadedCourtOrder' - return documents # return a receipt for filings completed in our system if filing.storage and filing.storage.payment_completion_date: + if filing.filing_type == 'courtOrder' and \ + (filing.storage.documents.filter( + Document.type == DocumentType.COURT_ORDER.value).one_or_none()): + documents['documents']['uploadedCourtOrder'] = f'{base_url}{doc_url}/uploadedCourtOrder' documents['documents']['receipt'] = f'{base_url}{doc_url}/receipt' - no_legal_filings_in_paid_status = [ + no_legal_filings_in_paid_withdrawn_status = [ Filing.FilingTypes.REGISTRATION.value, Filing.FilingTypes.CONSENTCONTINUATIONOUT.value, + Filing.FilingTypes.COURTORDER.value, Filing.FilingTypes.CONTINUATIONOUT.value, Filing.FilingTypes.AGMEXTENSION.value, Filing.FilingTypes.AGMLOCATIONCHANGE.value, + Filing.FilingTypes.TRANSPARENCY_REGISTER.value, ] - if filing.status == Filing.Status.PAID and \ - not (filing.filing_type in no_legal_filings_in_paid_status + if (filing.status in (Filing.Status.PAID, Filing.Status.WITHDRAWN) or + (filing.status == Filing.Status.COMPLETED and + filing.filing_type == Filing.FilingTypes.NOTICEOFWITHDRAWAL.value)) and \ + not (filing.filing_type in no_legal_filings_in_paid_withdrawn_status or (filing.filing_type == Filing.FilingTypes.DISSOLUTION.value and business.legal_type in [ Business.LegalTypes.SOLE_PROP.value, @@ -557,8 +573,10 @@ def get_document_list(business, # pylint: disable=too-many-locals disable=too-m no_legal_filings = [ Filing.FilingTypes.CONSENTCONTINUATIONOUT.value, Filing.FilingTypes.CONTINUATIONOUT.value, + Filing.FilingTypes.COURTORDER.value, Filing.FilingTypes.AGMEXTENSION.value, Filing.FilingTypes.AGMLOCATIONCHANGE.value, + Filing.FilingTypes.TRANSPARENCY_REGISTER.value, ] if filing.filing_type not in no_legal_filings: documents['documents']['legalFilings'] = \ diff --git a/legal-api/src/legal_api/core/meta/filing.py b/legal-api/src/legal_api/core/meta/filing.py index 20c29cdf3b..5437743cc1 100644 --- a/legal-api/src/legal_api/core/meta/filing.py +++ b/legal-api/src/legal_api/core/meta/filing.py @@ -137,7 +137,8 @@ class FilingTitles(str, Enum): 'CBEN': 'ALTER', 'CUL': 'ALTER', 'CCC': 'ALTER', - 'BC_TO_ULC': 'NOALU' + 'BC_TO_ULC': 'NOALU', + 'C_TO_CUL': 'NOALU' }, 'additional': [ { @@ -188,6 +189,21 @@ class FilingTitles(str, Enum): } } }, + 'amalgamationOut': { + 'name': 'amalgamationOut', + 'title': 'Amalgamation Out', + 'displayName': 'Amalgamation Out', + 'codes': { + 'BC': 'AMALO', + 'BEN': 'AMALO', + 'ULC': 'AMALO', + 'CC': 'AMALO', + 'C': 'AMALO', + 'CBEN': 'AMALO', + 'CUL': 'AMALO', + 'CCC': 'AMALO' + } + }, 'annualReport': { 'name': 'annualReport', 'title': 'Annual Report Filing', @@ -204,6 +220,36 @@ class FilingTitles(str, Enum): 'CCC': 'BCANN' } }, + 'appointReceiver': { + 'name': 'appointReceiver', + 'title': 'Appoint Receiver Filing', + 'displayName': 'Appoint Receiver', + 'codes': { + 'BEN': 'NOARM', + 'BC': 'NOARM', + 'ULC': 'NOARM', + 'CC': 'NOARM', + 'CBEN': 'NOARM', + 'C': 'NOARM', + 'CUL': 'NOARM', + 'CCC': 'NOARM' + } + }, + 'ceaseReceiver': { + 'name': 'ceaseReceiver', + 'title': 'Cease Receiver Filing', + 'displayName': 'Cease Receiver', + 'codes': { + 'BEN': 'NOCER', + 'BC': 'NOCER', + 'ULC': 'NOCER', + 'CC': 'NOCER', + 'CBEN': 'NOCER', + 'C': 'NOCER', + 'CUL': 'NOCER', + 'CCC': 'NOCER' + } + }, 'changeOfAddress': { 'name': 'changeOfAddress', 'title': 'Change of Address Filing', @@ -287,6 +333,27 @@ class FilingTitles(str, Enum): }, ] }, + 'consentAmalgamationOut': { + 'name': 'consentAmalgamationOut', + 'title': 'Consent Amalgamation Out', + 'displayName': '6-Month Consent to Amalgamate Out', + 'codes': { + 'BC': 'IAMGO', + 'BEN': 'IAMGO', + 'ULC': 'IAMGO', + 'CC': 'IAMGO', + 'C': 'IAMGO', + 'CBEN': 'IAMGO', + 'CUL': 'IAMGO', + 'CCC': 'IAMGO' + }, + 'additional': [ + { + 'types': ['BC', 'BEN', 'CC', 'ULC', 'C', 'CBEN', 'CCC', 'CUL'], + 'outputs': ['letterOfConsent'] + }, + ] + }, 'consentContinuationOut': { 'name': 'consentContinuationOut', 'title': 'Consent Continuation Out', @@ -390,8 +457,20 @@ class FilingTitles(str, Enum): 'name': 'courtOrder', 'title': 'Court Order', 'displayName': 'Court Order', - 'code': 'NOFEE' - }, + 'codes': { + 'SP': 'COURT', + 'GP': 'COURT', + 'CP': 'COURT', + 'BC': 'COURT', + 'BEN': 'COURT', + 'CC': 'COURT', + 'ULC': 'COURT', + 'C': 'COURT', + 'CBEN': 'COURT', + 'CUL': 'COURT', + 'CCC': 'COURT', + } + }, 'dissolution': { 'name': 'dissolution', 'additional': [ @@ -506,6 +585,12 @@ class FilingTitles(str, Enum): 'CCC': 'NWITH' } }, + 'putBackOff': { + 'name': 'putBackOff', + 'title': 'Put Back Off', + 'displayName': 'Correction - Put Back Off', + 'code': 'NOFEE' + }, 'putBackOn': { 'name': 'putBackOn', 'title': 'Put Back On', @@ -639,7 +724,55 @@ class FilingTitles(str, Enum): 'outputs': ['noticeOfArticles'] }, ] - } + }, + 'transparencyRegister': { + 'name': 'transparencyRegister', + 'annual': { + 'name': 'transparencyRegister', + 'title': 'Transparency Register - Annual Filing', + 'displayName': 'Transparency Register - Annual Filing', + 'codes': { + 'BC': 'REGSIGIN', + 'BEN': 'REGSIGIN', + 'ULC': 'REGSIGIN', + 'CC': 'REGSIGIN', + 'C': 'REGSIGIN', + 'CBEN': 'REGSIGIN', + 'CUL': 'REGSIGIN', + 'CCC': 'REGSIGIN' + } + }, + 'change': { + 'name': 'transparencyRegister', + 'title': 'Transparency Register Filing', + 'displayName': 'Transparency Register Filing', + 'codes': { + 'BC': 'REGSIGIN', + 'BEN': 'REGSIGIN', + 'ULC': 'REGSIGIN', + 'CC': 'REGSIGIN', + 'C': 'REGSIGIN', + 'CBEN': 'REGSIGIN', + 'CUL': 'REGSIGIN', + 'CCC': 'REGSIGIN' + } + }, + 'initial': { + 'name': 'transparencyRegister', + 'title': 'Transparency Register Filing', + 'displayName': 'Transparency Register Filing', + 'codes': { + 'BC': 'REGSIGIN', + 'BEN': 'REGSIGIN', + 'ULC': 'REGSIGIN', + 'CC': 'REGSIGIN', + 'C': 'REGSIGIN', + 'CBEN': 'REGSIGIN', + 'CUL': 'REGSIGIN', + 'CCC': 'REGSIGIN' + } + } + }, } @@ -649,6 +782,10 @@ class FilingMeta: # pylint: disable=too-few-public-methods @staticmethod def display_name(business: Business, filing: FilingStorage) -> Optional[str]: """Return the name of the filing to display on outputs.""" + # if filing is imported from COLIN and has custom disaply name + if filing.meta_data and\ + (display_name := filing.meta_data.get('colinDisplayName')): + return display_name # if there is no lookup if not (names := FILINGS.get(filing.filing_type, {}).get('displayName')): if not (filing.filing_sub_type and @@ -678,8 +815,9 @@ def display_name(business: Business, filing: FilingStorage) -> Optional[str]: # overriden with the latest correction, which cause loosing the previous correction link. name = FilingMeta.get_corrected_filing_name(filing, business_revision, name) - elif filing.filing_type in ('dissolution') and filing.meta_data: - if filing.meta_data['dissolution'].get('dissolutionType') == 'administrative': + elif filing.filing_type in ('dissolution'): + dissolution_data = filing.meta_data.get('dissolution') if filing.meta_data else None + if dissolution_data and dissolution_data.get('dissolutionType') == 'administrative': name = 'Administrative Dissolution' elif filing.filing_type in ('adminFreeze') and filing.meta_data: diff --git a/legal-api/src/legal_api/errorhandlers.py b/legal-api/src/legal_api/errorhandlers.py index f3335c4ccd..3ea6a9b0cf 100644 --- a/legal-api/src/legal_api/errorhandlers.py +++ b/legal-api/src/legal_api/errorhandlers.py @@ -20,9 +20,10 @@ """ import logging +import re import sys -from flask import jsonify +from flask import jsonify, request from werkzeug.exceptions import HTTPException from werkzeug.routing import RoutingException @@ -47,6 +48,11 @@ def handle_http_error(error): if isinstance(error, RoutingException): return error + app_name = request.headers.get('App-Name', 'unknown') + if not re.match(r'^[a-zA-Z0-9_-]+$', app_name): + app_name = 'invalid app name' + logger.error('HTTP error from app: %s', app_name, exc_info=sys.exc_info()) + response = jsonify({'message': error.description}) response.status_code = error.code return response @@ -58,7 +64,11 @@ def handle_uncaught_error(error: Exception): # pylint: disable=unused-argument Since the handler suppresses the actual exception, log it explicitly to ensure it's logged and recorded in Sentry. """ - logger.error('Uncaught exception', exc_info=sys.exc_info()) + app_name = request.headers.get('App-Name', 'unknown') + if not re.match(r'^[a-zA-Z0-9_-]+$', app_name): + app_name = 'invalid app name' + logger.error('Uncaught exception from app: %s', app_name, exc_info=sys.exc_info()) + response = jsonify({'message': 'Internal server error'}) response.status_code = 500 return response diff --git a/legal-api/src/legal_api/models/amalgamating_business.py b/legal-api/src/legal_api/models/amalgamating_business.py index aea42243b7..97d1b08213 100644 --- a/legal-api/src/legal_api/models/amalgamating_business.py +++ b/legal-api/src/legal_api/models/amalgamating_business.py @@ -20,10 +20,9 @@ from sql_versioning import Versioned from sqlalchemy import or_ -from sqlalchemy_continuum import version_class from ..utils.base import BaseEnum -from .db import db +from .db import db, VersioningProxy # noqa: I001 class AmalgamatingBusiness(db.Model, Versioned): # pylint: disable=too-many-instance-attributes @@ -62,7 +61,7 @@ def save(self): def get_revision(cls, transaction_id, amalgamation_id): """Get amalgamating businesses for the given transaction id.""" # pylint: disable=singleton-comparison; - amalgamating_businesses_version = version_class(AmalgamatingBusiness) + amalgamating_businesses_version = VersioningProxy.version_class(db.session(), AmalgamatingBusiness) amalgamating_businesses = db.session.query(amalgamating_businesses_version) \ .filter(amalgamating_businesses_version.transaction_id <= transaction_id) \ .filter(amalgamating_businesses_version.operation_type == 0) \ @@ -73,7 +72,7 @@ def get_revision(cls, transaction_id, amalgamation_id): return amalgamating_businesses @classmethod - def get_all_revision(cls, business_id): + def get_all_revision(cls, business_id, tombstone=False): """ Get all amalgamating businesses for the given business id. @@ -83,10 +82,19 @@ def get_all_revision(cls, business_id): 3. Business T1 is dissolved as part of another amalgamation In this case T1 is involved in 2 amalgamation + + If tombstone is True, get all non-versioned amalgamating businesses + for the given business id. """ - amalgamating_businesses_version = version_class(AmalgamatingBusiness) - amalgamating_businesses = db.session.query(amalgamating_businesses_version) \ - .filter(amalgamating_businesses_version.operation_type == 0) \ - .filter(amalgamating_businesses_version.business_id == business_id) \ - .order_by(amalgamating_businesses_version.transaction_id).all() + if tombstone: + amalgamating_businesses = db.session.query(AmalgamatingBusiness) \ + .filter(AmalgamatingBusiness.business_id == business_id) \ + .all() + else: + amalgamating_businesses_version = VersioningProxy.version_class(db.session(), AmalgamatingBusiness) + amalgamating_businesses = db.session.query(amalgamating_businesses_version) \ + .filter(amalgamating_businesses_version.operation_type == 0) \ + .filter(amalgamating_businesses_version.business_id == business_id) \ + .order_by(amalgamating_businesses_version.transaction_id).all() + return amalgamating_businesses diff --git a/legal-api/src/legal_api/models/amalgamation.py b/legal-api/src/legal_api/models/amalgamation.py index 40650a26c0..7f024d7e8c 100644 --- a/legal-api/src/legal_api/models/amalgamation.py +++ b/legal-api/src/legal_api/models/amalgamation.py @@ -21,10 +21,9 @@ from sql_versioning import Versioned from sqlalchemy import or_ -from sqlalchemy_continuum import version_class from ..utils.base import BaseEnum -from .db import db +from .db import db, VersioningProxy # noqa: I001 class Amalgamation(db.Model, Versioned): # pylint: disable=too-many-instance-attributes @@ -37,6 +36,7 @@ class AmalgamationTypes(BaseEnum): regular = auto() vertical = auto() horizontal = auto() + unknown = auto() __versioned__ = {} __tablename__ = 'amalgamations' @@ -80,24 +80,32 @@ def json(self): } @classmethod - def get_revision_by_id(cls, transaction_id, amalgamation_id): - """Get amalgamation for the given id.""" + def get_revision_by_id(cls, amalgamation_id, transaction_id=None, tombstone=False): + """Get amalgamation for the given id. + + If tombstone is True, get all non-versioned amalgamating for the given id. + """ # pylint: disable=singleton-comparison; - amalgamation_version = version_class(Amalgamation) - amalgamation = db.session.query(amalgamation_version) \ - .filter(amalgamation_version.transaction_id <= transaction_id) \ - .filter(amalgamation_version.operation_type == 0) \ - .filter(amalgamation_version.id == amalgamation_id) \ - .filter(or_(amalgamation_version.end_transaction_id == None, # noqa: E711; - amalgamation_version.end_transaction_id > transaction_id)) \ - .order_by(amalgamation_version.transaction_id).one_or_none() + if tombstone: + amalgamation = db.session.query(Amalgamation) \ + .filter(Amalgamation.id == amalgamation_id) \ + .one_or_none() + else: + amalgamation_version = VersioningProxy.version_class(db.session(), Amalgamation) + amalgamation = db.session.query(amalgamation_version) \ + .filter(amalgamation_version.transaction_id <= transaction_id) \ + .filter(amalgamation_version.operation_type == 0) \ + .filter(amalgamation_version.id == amalgamation_id) \ + .filter(or_(amalgamation_version.end_transaction_id == None, # noqa: E711; + amalgamation_version.end_transaction_id > transaction_id)) \ + .order_by(amalgamation_version.transaction_id).one_or_none() return amalgamation @classmethod def get_revision(cls, transaction_id, business_id): """Get amalgamation for the given transaction id.""" # pylint: disable=singleton-comparison; - amalgamation_version = version_class(Amalgamation) + amalgamation_version = VersioningProxy.version_class(db.session(), Amalgamation) amalgamation = db.session.query(amalgamation_version) \ .filter(amalgamation_version.transaction_id <= transaction_id) \ .filter(amalgamation_version.operation_type == 0) \ @@ -108,8 +116,18 @@ def get_revision(cls, transaction_id, business_id): return amalgamation @classmethod - def get_revision_json(cls, transaction_id, business_id): - """Get amalgamation json for the given transaction id.""" + def get_revision_json(cls, transaction_id, business_id, tombstone=False): + """Get amalgamation json for the given transaction id. + + If tombstone is True, return placeholder amalgamation json. + """ + if tombstone: + return { + 'identifier': 'Not Available', + 'legalName': 'Not Available', + 'amalgamationDate': 'Not Available' + } + amalgamation = Amalgamation.get_revision(transaction_id, business_id) from .business import Business # pylint: disable=import-outside-toplevel business = Business.find_by_internal_id(amalgamation.business_id) diff --git a/legal-api/src/legal_api/models/business.py b/legal-api/src/legal_api/models/business.py index 2b3fb570a1..be96c25552 100644 --- a/legal-api/src/legal_api/models/business.py +++ b/legal-api/src/legal_api/models/business.py @@ -27,7 +27,6 @@ from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.orm import aliased, backref from sqlalchemy.sql import and_, exists, func, not_, text -from sqlalchemy_continuum import version_class from legal_api.exceptions import BusinessException from legal_api.utils.base import BaseEnum @@ -37,7 +36,7 @@ from .amalgamation import Amalgamation # noqa: F401, I001, I003 pylint: disable=unused-import from .batch import Batch # noqa: F401, I001, I003 pylint: disable=unused-import from .batch_processing import BatchProcessing # noqa: F401, I001, I003 pylint: disable=unused-import -from .db import db # noqa: I001 +from .db import db, VersioningProxy # noqa: I001 from .party import Party from .share_class import ShareClass # noqa: F401,I001,I003 pylint: disable=unused-import @@ -190,11 +189,16 @@ class AssociationTypes(Enum): 'include_properties': [ 'id', 'admin_freeze', + 'amalgamation_out_date', 'association_type', + 'continuation_out_date', 'dissolution_date', 'fiscal_year_end_date', + 'foreign_jurisdiction_region', + 'foreign_legal_name', 'founding_date', 'identifier', + 'jurisdiction', 'last_agm_date', 'last_ar_date', 'last_ar_year', @@ -205,24 +209,21 @@ class AssociationTypes(Enum): 'last_ledger_timestamp', 'last_modified', 'last_remote_ledger_id', + 'last_tr_year', 'legal_name', 'legal_type', + 'naics_code', + 'naics_description', + 'naics_key', + 'no_dissolution', + 'restoration_expiry_date', 'restriction_ind', + 'send_ar_ind', 'state', + 'start_date', 'state_filing_id', 'submitter_userid', 'tax_id', - 'naics_key', - 'naics_code', - 'naics_description', - 'no_dissolution', - 'start_date', - 'jurisdiction', - 'foreign_jurisdiction_region', - 'foreign_legal_name', - 'send_ar_ind', - 'restoration_expiry_date', - 'continuation_out_date' ] } @@ -242,12 +243,14 @@ class AssociationTypes(Enum): restoration_expiry_date = db.Column('restoration_expiry_date', db.DateTime(timezone=True)) dissolution_date = db.Column('dissolution_date', db.DateTime(timezone=True), default=None) continuation_out_date = db.Column('continuation_out_date', db.DateTime(timezone=True)) + amalgamation_out_date = db.Column('amalgamation_out_date', db.DateTime(timezone=True)) _identifier = db.Column('identifier', db.String(10), index=True) tax_id = db.Column('tax_id', db.String(15), index=True) fiscal_year_end_date = db.Column('fiscal_year_end_date', db.DateTime(timezone=True), default=datetime.utcnow) restriction_ind = db.Column('restriction_ind', db.Boolean, unique=False, default=False) last_ar_year = db.Column('last_ar_year', db.Integer) last_ar_reminder_year = db.Column('last_ar_reminder_year', db.Integer) + last_tr_year = db.Column('last_tr_year', db.Integer) association_type = db.Column('association_type', db.String(50)) state = db.Column('state', db.Enum(State), default=State.ACTIVE.value) state_filing_id = db.Column('state_filing_id', db.Integer) @@ -267,9 +270,10 @@ class AssociationTypes(Enum): # relationships filings = db.relationship('Filing', lazy='dynamic') - offices = db.relationship('Office', lazy='dynamic', cascade='all, delete, delete-orphan') + offices = db.relationship('Office', backref='business', lazy='dynamic', cascade='all, delete, delete-orphan') party_roles = db.relationship('PartyRole', lazy='dynamic') - share_classes = db.relationship('ShareClass', lazy='dynamic', cascade='all, delete, delete-orphan') + share_classes = db.relationship('ShareClass', backref='business', lazy='dynamic', + cascade='all, delete, delete-orphan') aliases = db.relationship('Alias', lazy='dynamic') resolutions = db.relationship('Resolution', lazy='dynamic') documents = db.relationship('Document', lazy='dynamic') @@ -327,12 +331,92 @@ def business_legal_name(self): @property def next_anniversary(self): """Retrieve the next anniversary date for which an AR filing is due.""" + if not self.founding_date and not self.last_ar_date: + return None last_anniversary = self.founding_date if self.last_ar_date: last_anniversary = self.last_ar_date return last_anniversary + datedelta.datedelta(years=1) + @property + def next_annual_tr_due_datetime(self) -> datetime: + """Retrieve the next annual TR filing due datetime for the business.""" + due_year_offset = 1 + # NOTE: Converting to pacific time to ensure we get the right date + # for comparisons and when replacing time at the end + founding_datetime = LegislationDatetime.as_legislation_timezone(self.founding_date) + + tr_start_datetime = None + if tr_start_date := current_app.config.get('TR_START_DATE', None): + tr_start_datetime = LegislationDatetime.as_legislation_timezone_from_date( + datetime.fromisoformat(tr_start_date)) + + last_restoration_datetime = None + if restoration_filing := Filing.get_most_recent_filing(self.id, 'restoration'): + if restoration_filing.effective_date: + last_restoration_datetime = LegislationDatetime.as_legislation_timezone( + restoration_filing.effective_date) + else: + last_restoration_datetime = LegislationDatetime.as_legislation_timezone( + restoration_filing.filing_date) + + if ( + last_restoration_datetime and + last_restoration_datetime.year > (self.last_tr_year or tr_start_datetime.year or 0) + ): + # Set offset based on the year of the restoration + # NOTE: Currently could end up being due before the initial filing - policy still getting worked out + due_year_offset = last_restoration_datetime.year - founding_datetime.year + if ( + last_restoration_datetime.month > founding_datetime.month or + ( + last_restoration_datetime.month == founding_datetime.month and + last_restoration_datetime.day >= founding_datetime.day + ) + ): + # Month/day of the founding date has already passed for this year so add 1 + due_year_offset += 1 + + elif self.last_tr_year: + # i.e. founding_date.year=2023, last_tr_year=2024, then due_year_offset=2 and next due date for 2025 + due_year_offset = (self.last_tr_year - founding_datetime.year) + 1 + + elif tr_start_datetime: + # Case examples: + # ---- Founded before TR start, month/day are earlier or the same + # -> tr_start_date=2025-02-01, founding_date=2023-01-01.., + # then due_year_offset=3 and next due date for 2026 + # -> tr_start_date=2025-02-01, founding_date=2024-01-01.., + # then due_year_offset=2 and next due date for 2026 + # ---- Founded before TR start, month/day are after + # -> tr_start_date=2025-02-01, founding_date=2023-02-02.., + # then due_year_offset=2 and next due date for 2025 + # -> tr_start_date=2025-02-01, founding_date=2024-02-02.., + # then due_year_offset=1 and next due date for 2025 + # ---- Founded after TR start, nothing needed + # -> tr_start_date=2025-02-01, founding_date=2025-02-02.., + # then due_year_offset=1 and next due date for 2026 (regular) + # -> tr_start_date=2025-02-01, founding_date=2026-02-02.., + # then due_year_offset=1 and next due date for 2027 (regular) + if tr_start_datetime > founding_datetime: + # Set offset based on the year of the tr start + due_year_offset = tr_start_datetime.year - founding_datetime.year + if ( + tr_start_datetime.month > founding_datetime.month or + ( + tr_start_datetime.month == founding_datetime.month and + tr_start_datetime.day >= founding_datetime.day + ) + ): + # Month/day of the founding date had already passed for that year so add 1 + due_year_offset += 1 + + due_datetime = founding_datetime + datedelta.datedelta(years=due_year_offset, months=2) + + # return as this date at 23:59:59 + return due_datetime.replace(hour=23, minute=59, second=59, microsecond=0) + def get_ar_dates(self, next_ar_year): """Get ar min and max date for the specific year.""" ar_min_date = datetime(next_ar_year, 1, 1).date() @@ -443,10 +527,10 @@ def _has_no_transition_filed_after_restoration(self) -> bool: exists().where( and_( transition_filing.business_id == self.id, - transition_filing._filing_type == \ - CoreFiling.FilingTypes.TRANSITION.value, # pylint: disable=protected-access - transition_filing._status == \ - Filing.Status.COMPLETED.value, # pylint: disable=protected-access + (transition_filing._filing_type == # pylint: disable=protected-access + CoreFiling.FilingTypes.TRANSITION.value), + (transition_filing._status == # pylint: disable=protected-access + Filing.Status.COMPLETED.value), transition_filing.effective_date.between( restoration_filing.effective_date, restoration_filing_effective_cutoff @@ -472,6 +556,12 @@ def in_dissolution(self): one_or_none() return find_in_batch_processing is not None + @property + def is_tombstone(self): + """Return True if it's a tombstone business, otherwise False.""" + tombstone_filing = Filing.get_filings_by_status(self.id, [Filing.Status.TOMBSTONE]) + return bool(tombstone_filing) + def save(self): """Render a Business to the local cache.""" db.session.add(self) @@ -495,29 +585,31 @@ def json(self, slim=False): if slim: return slim_json - ar_min_date, ar_max_date = self.get_ar_dates( - (self.last_ar_year if self.last_ar_year else self.founding_date.year) + 1 - ) + ar_min_date = None + ar_max_date = None + if self.last_ar_year or self.founding_date: + ar_min_date, ar_max_date = self.get_ar_dates( + (self.last_ar_year if self.last_ar_year else self.founding_date.year) + 1 + ) + d = { **slim_json, - 'arMinDate': ar_min_date.isoformat(), - 'arMaxDate': ar_max_date.isoformat(), - 'foundingDate': self.founding_date.isoformat(), + 'arMinDate': ar_min_date.isoformat() if ar_min_date else '', + 'arMaxDate': ar_max_date.isoformat() if ar_max_date else '', 'hasRestrictions': self.restriction_ind, 'complianceWarnings': self.compliance_warnings, 'warnings': self.warnings, 'lastAnnualGeneralMeetingDate': datetime.date(self.last_agm_date).isoformat() if self.last_agm_date else '', 'lastAnnualReportDate': datetime.date(self.last_ar_date).isoformat() if self.last_ar_date else '', - 'lastLedgerTimestamp': self.last_ledger_timestamp.isoformat(), + 'lastLedgerTimestamp': self.last_ledger_timestamp.isoformat() if self.last_ledger_timestamp else '', 'lastAddressChangeDate': '', 'lastDirectorChangeDate': '', - 'lastModified': self.last_modified.isoformat(), 'naicsKey': self.naics_key, 'naicsCode': self.naics_code, 'naicsDescription': self.naics_description, 'nextAnnualReport': LegislationDatetime.as_legislation_timezone_from_date( self.next_anniversary - ).astimezone(timezone.utc).isoformat(), + ).astimezone(timezone.utc).isoformat() if self.next_anniversary else '', 'noDissolution': self.no_dissolution, 'associationType': self.association_type, 'allowedActions': self.allowable_actions, @@ -531,12 +623,14 @@ def _slim_json(self): """Return a smaller/faster version of the business json.""" d = { 'adminFreeze': self.admin_freeze or False, + 'foundingDate': self.founding_date.isoformat() if self.founding_date else '', 'goodStanding': self.good_standing, 'identifier': self.identifier, 'inDissolution': self.in_dissolution, 'legalName': self.business_legal_name, 'legalType': self.legal_type, - 'state': self.state.name if self.state else Business.State.ACTIVE.name + 'state': self.state.name if self.state else Business.State.ACTIVE.name, + 'lastModified': self.last_modified.isoformat() } if self.tax_id: @@ -571,6 +665,8 @@ def _extend_json(self, d): d['restorationExpiryDate'] = LegislationDatetime.format_as_legislation_date(self.restoration_expiry_date) if self.continuation_out_date: d['continuationOutDate'] = LegislationDatetime.format_as_legislation_date(self.continuation_out_date) + if self.amalgamation_out_date: + d['amalgamationOutDate'] = LegislationDatetime.format_as_legislation_date(self.amalgamation_out_date) if self.jurisdiction: d['jurisdiction'] = self.jurisdiction @@ -672,6 +768,14 @@ def get_all_by_no_tax_id(cls): .all()) return businesses + @classmethod + def get_expired_restoration(cls): + """Return all identifier with an expired restoration_expiry_date.""" + businesses = (db.session.query(Business.identifier) + .filter(Business.restoration_expiry_date <= datetime.utcnow()) + .all()) + return businesses + @classmethod def get_filing_by_id(cls, business_identifier: int, filing_id: str): """Return the filings for a specific business and filing_id.""" @@ -693,7 +797,7 @@ def get_alternate_names(self) -> dict: alternate_names = [] # Fetch aliases and related filings in a single query - alias_version = version_class(Alias) + alias_version = VersioningProxy.version_class(db.session(), Alias) filing_alias = aliased(Filing) aliases_query = db.session.query( alias_version.alias, @@ -771,7 +875,14 @@ def get_amalgamated_into(self) -> dict: self.state_filing_id and (state_filing := Filing.find_by_id(self.state_filing_id)) and state_filing.is_amalgamation_application): - return Amalgamation.get_revision_json(state_filing.transaction_id, state_filing.business_id) + if not self.is_tombstone: + return Amalgamation.get_revision_json(state_filing.transaction_id, state_filing.business_id) + else: + return { + 'identifier': 'Not Available', + 'legalName': 'Not Available', + 'amalgamationDate': 'Not Available' + } return None diff --git a/legal-api/src/legal_api/models/comment.py b/legal-api/src/legal_api/models/comment.py index a48da5ba6a..de2da52da2 100644 --- a/legal-api/src/legal_api/models/comment.py +++ b/legal-api/src/legal_api/models/comment.py @@ -52,15 +52,16 @@ class Comment(db.Model): @property def json(self): """Return the json repressentation of a comment.""" + from legal_api.core.constants import REDACTED_STAFF_SUBMITTER # pylint: disable=import-outside-toplevel user = User.find_by_id(self.staff_id) return { 'comment': { 'id': self.id, - 'submitterDisplayName': user.display_name if user else None, + 'submitterDisplayName': user.display_name if user else REDACTED_STAFF_SUBMITTER, 'comment': self.comment, 'filingId': self.filing_id, 'businessId': self.business_id, - 'timestamp': self.timestamp.isoformat() + 'timestamp': self.timestamp.isoformat() if self.timestamp else None } } diff --git a/legal-api/src/legal_api/models/consent_continuation_out.py b/legal-api/src/legal_api/models/consent_continuation_out.py index 4247601dc4..fab346af64 100644 --- a/legal-api/src/legal_api/models/consent_continuation_out.py +++ b/legal-api/src/legal_api/models/consent_continuation_out.py @@ -14,19 +14,29 @@ """This model holds data for consent continuation out.""" from __future__ import annotations +from enum import auto from typing import Optional from sqlalchemy.orm import backref +from ..utils.base import BaseEnum from .db import db class ConsentContinuationOut(db.Model): # pylint: disable=too-few-public-methods """This class manages the consent continuation out for businesses.""" + # pylint: disable=invalid-name + class ConsentTypes(BaseEnum): + """Enum for the consent type.""" + + continuation_out = auto() + amalgamation_out = auto() + __tablename__ = 'consent_continuation_outs' id = db.Column('id', db.Integer, unique=True, primary_key=True) + consent_type = db.Column('consent_type', db.Enum(ConsentTypes), nullable=False) foreign_jurisdiction = db.Column('foreign_jurisdiction', db.String(10)) foreign_jurisdiction_region = db.Column('foreign_jurisdiction_region', db.String(10)) expiry_date = db.Column('expiry_date', db.DateTime(timezone=True)) @@ -46,11 +56,13 @@ def save(self): def get_active_cco(business_id, expiry_date, foreign_jurisdiction=None, - foreign_jurisdiction_region=None) -> list[ConsentContinuationOut]: + foreign_jurisdiction_region=None, + consent_type=ConsentTypes.continuation_out) -> list[ConsentContinuationOut]: """Get a list of active consent_continuation_outs linked to the given business_id.""" - query = db.session.query(ConsentContinuationOut). \ - filter(ConsentContinuationOut.business_id == business_id). \ - filter(ConsentContinuationOut.expiry_date >= expiry_date) + query = (db.session.query(ConsentContinuationOut). + filter(ConsentContinuationOut.business_id == business_id). + filter(ConsentContinuationOut.consent_type == consent_type). + filter(ConsentContinuationOut.expiry_date >= expiry_date)) if foreign_jurisdiction: query = query.filter(ConsentContinuationOut.foreign_jurisdiction == foreign_jurisdiction.upper()) diff --git a/legal-api/src/legal_api/models/db.py b/legal-api/src/legal_api/models/db.py index 67b3916632..e8c4007d5e 100644 --- a/legal-api/src/legal_api/models/db.py +++ b/legal-api/src/legal_api/models/db.py @@ -11,7 +11,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -"""Create SQLAlchenmy and Schema managers. +"""Create SQLAlchemy and Schema managers. These will get initialized by the application using the models """ @@ -19,7 +19,7 @@ from flask import current_app from flask_sqlalchemy import SignallingSession, SQLAlchemy -from sql_versioning import TransactionManager, debug +from sql_versioning import TransactionManager from sql_versioning import disable_versioning as _new_disable_versioning from sql_versioning import enable_versioning as _new_enable_versioning from sql_versioning import version_class as _new_version_class @@ -50,6 +50,26 @@ class Transaction(db.Model): issued_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=True) +def print_versioning_info(): + """ + Print the current versioning status if not already printed. + + This should only be called within an application context. + """ + try: + from legal_api.services import flags as flag_service # pylint: disable=import-outside-toplevel + + current_service = current_app.config.get('SERVICE_NAME') + if current_service: + db_versioning = flag_service.value('db-versioning') + use_new_versioning = (bool(db_versioning) and bool(db_versioning.get(current_service))) + current_versioning = 'new' if use_new_versioning else 'old' + current_app.logger.info(f'\033[31mService: {current_service}, db versioning={current_versioning}\033[0m') + except Exception as err: + # Don't crash if something goes wrong + current_app.logger.error('Unable to read flags: %s' % repr(err), exc_info=True) + + def init_db(app): """Initialize database using flask app and configure db mappers. @@ -59,6 +79,9 @@ def init_db(app): db.init_app(app) orm.configure_mappers() + with app.app_context(): + print_versioning_info() + # TODO: remove versioning switching logic # TODO: remove debugging variables, messages, and decorators @@ -136,7 +159,6 @@ def _check_versioning(cls): db_versioning = flags.value('db-versioning') use_new_versioning = (bool(db_versioning) and bool(db_versioning.get(current_service))) cls._current_versioning = 'new' if use_new_versioning else 'old' - print(f'\033[31mCurrent versioning={cls._current_versioning}\033[0m') @classmethod def _initialize_versioning(cls): @@ -159,9 +181,10 @@ def _switch_versioning(cls, previous, current): """ cls._versioning_control[previous]['disable']() cls._versioning_control[current]['enable']() + # Print when versioning changes + current_app.logger.info(f'\033[31mVersioning changed: {previous} -> {current}\033[0m') @classmethod - @debug def lock_versioning(cls, session, transaction): """Lock versioning for the session. @@ -171,37 +194,22 @@ def lock_versioning(cls, session, transaction): :param transaction: The transaction associated with the session. :return: None """ - print(f"\033[32mCurrent service={current_app.config['SERVICE_NAME']}, session={session}," - f' transaction={transaction}\033[0m') if '_versioning_locked' not in session.info: if not cls._is_initialized: cls._initialize_versioning() - print(f'\033[31mVersioning locked, current versioning type={cls._current_versioning}' - '(initialized)\033[0m') else: previous_versioning = cls._current_versioning cls._check_versioning() - # TODO: remove debug - lock_type - lock_type = 'unchanged' if cls._current_versioning != previous_versioning: cls._switch_versioning(previous_versioning, cls._current_versioning) - lock_type = 'switched' - - print(f'\033[31mVersioning locked, current versioning type={cls._current_versioning}' - f'({lock_type})\033[0m') session.info['_versioning_locked'] = cls._current_versioning session.info['_transactions_locked'] = [] - # TODO: remove debug - else statement - else: - print('\033[31mVersioning already set for this session, skip\033[0m') - session.info['_transactions_locked'].append(transaction) @classmethod - @debug def unlock_versioning(cls, session, transaction): """Unlock versioning for the session. @@ -211,27 +219,14 @@ def unlock_versioning(cls, session, transaction): :param transaction: The transaction associated with the session. :return: None """ - print(f'\033[32mSession={session}, transaction={transaction}\033[0m') - if '_versioning_locked' in session.info and '_transactions_locked' in session.info: session.info['_transactions_locked'].remove(transaction) - print('\033[31mTransaction unlocked\033[0m') if not session.info['_transactions_locked']: session.info.pop('_versioning_locked', None) session.info.pop('_transactions_locked', None) - print('\033[31mVersioning unlocked\033[0m') - - # TODO: remove debug - else statement - else: - print("\033[32mThis session has active transaction, can't be unlocked\033[0m") - - # TODO: remove debug - else statement - else: - print("\033[32mVersioning/Transaction lock doesn't exist, skip\033[0m") @classmethod - @debug def get_transaction_id(cls, session): """Get the transaction ID for the session. @@ -241,14 +236,11 @@ def get_transaction_id(cls, session): transaction_id = None current_versioning = session.info['_versioning_locked'] - print(f'\033[31mCurrent versioning type={current_versioning}\033[0m') transaction_id = cls._versioning_control[current_versioning]['get_transaction_id'](session) - print(f'\033[31mUsing transaction_id = {transaction_id}\033[0m') return transaction_id @classmethod - @debug def version_class(cls, session, obj): """Return version class for an object based in the session. @@ -260,25 +252,21 @@ def version_class(cls, session, obj): session.begin() current_versioning = session.info['_versioning_locked'] - print(f'\033[31mCurrent versioning type={current_versioning}\033[0m') return cls._versioning_control[current_versioning]['version_class'](obj) -@debug def setup_versioning(): - """Set up and initialize versioining switching. + """Set up and initialize versioning switching. :return: None """ # use SignallingSession to skip events for continuum's internal session/txn operations @event.listens_for(SignallingSession, 'after_transaction_create') - @debug def after_transaction_create(session, transaction): VersioningProxy.lock_versioning(session, transaction) @event.listens_for(SignallingSession, 'after_transaction_end') - @debug def clear_transaction(session, transaction): VersioningProxy.unlock_versioning(session, transaction) @@ -287,7 +275,8 @@ def clear_transaction(session, transaction): # TODO: enable versioning switching -# it should be called before data model initialzed, otherwise, old versioning doesn't work properly -# setup_versioning() +# it should be called before data model initialized, otherwise, old versioning doesn't work properly +setup_versioning() + -make_versioned(user_cls=None, manager=versioning_manager) +# make_versioned(user_cls=None, manager=versioning_manager) diff --git a/legal-api/src/legal_api/models/filing.py b/legal-api/src/legal_api/models/filing.py index bd94a0cccb..8996ac1099 100644 --- a/legal-api/src/legal_api/models/filing.py +++ b/legal-api/src/legal_api/models/filing.py @@ -11,6 +11,7 @@ """Filings are legal documents that alter the state of a business.""" # pylint: disable=too-many-lines import copy +from contextlib import suppress from datetime import date, datetime, timezone from enum import Enum from http import HTTPStatus @@ -52,6 +53,8 @@ class Status(str, Enum): PENDING_CORRECTION = 'PENDING_CORRECTION' WITHDRAWN = 'WITHDRAWN' + TOMBSTONE = 'TOMBSTONE' + # filings with staff review APPROVED = 'APPROVED' AWAITING_REVIEW = 'AWAITING_REVIEW' @@ -61,10 +64,12 @@ class Status(str, Enum): class Source(Enum): """Render an Enum of the Filing Sources.""" + BTR = 'BTR' COLIN = 'COLIN' LEAR = 'LEAR' # TODO: get legal types from defined class once table is made (getting it from Business causes circ import) + # TODO: add filing types for btr FILINGS = { 'affidavit': { 'name': 'affidavit', @@ -113,7 +118,8 @@ class Source(Enum): 'CBEN': 'ALTER', 'CUL': 'ALTER', 'CCC': 'ALTER', - 'BC_TO_ULC': 'NOALU' + 'BC_TO_ULC': 'NOALU', + 'C_TO_CUL': 'NOALU' } }, 'amalgamationApplication': { @@ -150,6 +156,20 @@ class Source(Enum): }, } }, + 'amalgamationOut': { + 'name': 'amalgamationOut', + 'title': 'Amalgamation Out', + 'codes': { + 'BC': 'AMALO', + 'BEN': 'AMALO', + 'ULC': 'AMALO', + 'CC': 'AMALO', + 'C': 'AMALO', + 'CBEN': 'AMALO', + 'CUL': 'AMALO', + 'CCC': 'AMALO' + } + }, 'annualReport': { 'name': 'annualReport', 'title': 'Annual Report Filing', @@ -165,6 +185,35 @@ class Source(Enum): 'CCC': 'BCANN' } }, + 'appointReceiver': { + 'name': 'appointReceiver', + 'title': 'Appoint Receiver Filing', + 'codes': { + 'BEN': 'NOARM', + 'BC': 'NOARM', + 'ULC': 'NOARM', + 'CC': 'NOARM', + 'CBEN': 'NOARM', + 'C': 'NOARM', + 'CUL': 'NOARM', + 'CCC': 'NOARM' + } + }, + 'ceaseReceiver': { + 'name': 'ceaseReceiver', + 'title': 'Cease Receiver Filing', + 'displayName': 'Cease Receiver', + 'codes': { + 'BEN': 'NOCER', + 'BC': 'NOCER', + 'ULC': 'NOCER', + 'CC': 'NOCER', + 'CBEN': 'NOCER', + 'C': 'NOCER', + 'CUL': 'NOCER', + 'CCC': 'NOCER' + } + }, 'changeOfAddress': { 'name': 'changeOfAddress', 'title': 'Change of Address Filing', @@ -223,6 +272,20 @@ class Source(Enum): 'GP': 'FMCHANGE' } }, + 'consentAmalgamationOut': { + 'name': 'consentAmalgamationOut', + 'title': 'Consent Amalgamation Out', + 'codes': { + 'BC': 'IAMGO', + 'BEN': 'IAMGO', + 'ULC': 'IAMGO', + 'CC': 'IAMGO', + 'C': 'IAMGO', + 'CBEN': 'IAMGO', + 'CUL': 'IAMGO', + 'CCC': 'IAMGO' + } + }, 'consentContinuationOut': { 'name': 'consentContinuationOut', 'title': 'Consent Continuation Out', @@ -288,6 +351,24 @@ class Source(Enum): 'CCC': 'CRCTN', } }, + 'courtOrder': { + 'name': 'courtOrder', + 'title': 'Court Order', + 'displayName': 'Court Order', + 'codes': { + 'SP': 'COURT', + 'GP': 'COURT', + 'CP': 'COURT', + 'BC': 'COURT', + 'BEN': 'COURT', + 'CC': 'COURT', + 'ULC': 'COURT', + 'C': 'COURT', + 'CBEN': 'COURT', + 'CUL': 'COURT', + 'CCC': 'COURT', + } + }, 'dissolution': { 'name': 'dissolution', 'voluntary': { @@ -443,11 +524,56 @@ class Source(Enum): 'CCC': 'TRANS' } }, + 'transparencyRegister': { + 'name': 'transparencyRegister', + 'annual': { + 'name': 'transparencyRegister', + 'title': 'Transparency Register - Annual Filing', + 'codes': { + 'BC': 'REGSIGIN', + 'BEN': 'REGSIGIN', + 'ULC': 'REGSIGIN', + 'CC': 'REGSIGIN', + 'C': 'REGSIGIN', + 'CBEN': 'REGSIGIN', + 'CUL': 'REGSIGIN', + 'CCC': 'REGSIGIN' + } + }, + 'change': { + 'name': 'transparencyRegister', + 'title': 'Transparency Register Filing', + 'codes': { + 'BC': 'REGSIGIN', + 'BEN': 'REGSIGIN', + 'ULC': 'REGSIGIN', + 'CC': 'REGSIGIN', + 'C': 'REGSIGIN', + 'CBEN': 'REGSIGIN', + 'CUL': 'REGSIGIN', + 'CCC': 'REGSIGIN' + } + }, + 'initial': { + 'name': 'transparencyRegister', + 'title': 'Transparency Register Filing', + 'codes': { + 'BC': 'REGSIGIN', + 'BEN': 'REGSIGIN', + 'ULC': 'REGSIGIN', + 'CC': 'REGSIGIN', + 'C': 'REGSIGIN', + 'CBEN': 'REGSIGIN', + 'CUL': 'REGSIGIN', + 'CCC': 'REGSIGIN' + } + } + }, # changing the structure of fee code in courtOrder/registrarsNotation/registrarsOrder # for all the business the fee code remain same as NOFEE (Staff) 'adminFreeze': {'name': 'adminFreeze', 'title': 'Admin Freeze', 'code': 'NOFEE'}, - 'courtOrder': {'name': 'courtOrder', 'title': 'Court Order', 'code': 'NOFEE'}, + 'putBackOff': {'name': 'putBackOff', 'title': 'Put Back Off', 'code': 'NOFEE'}, 'putBackOn': {'name': 'putBackOn', 'title': 'Put Back On', 'code': 'NOFEE'}, 'registrarsNotation': {'name': 'registrarsNotation', 'title': 'Registrars Notation', 'code': 'NOFEE'}, 'registrarsOrder': {'name': 'registrarsOrder', 'title': 'Registrars Order', 'code': 'NOFEE'} @@ -458,7 +584,8 @@ class Source(Enum): # breaking and more testing was req'd so did not make refactor when introducing this dictionary. 'dissolution': 'dissolutionType', 'restoration': 'type', - 'amalgamationApplication': 'type' + 'amalgamationApplication': 'type', + 'transparencyRegister': 'type' } __tablename__ = 'filings' @@ -492,6 +619,7 @@ class Source(Enum): 'court_order_effect_of_order', 'court_order_file_number', 'deletion_locked', + 'hide_in_ledger', 'effective_date', 'order_details', 'paper_only', @@ -504,7 +632,9 @@ class Source(Enum): 'transaction_id', 'approval_type', 'application_date', - 'notice_date' + 'notice_date', + 'withdrawal_pending', + 'withdrawn_filing_id' ] } @@ -535,6 +665,8 @@ class Source(Enum): application_date = db.Column('application_date', db.DateTime(timezone=True)) notice_date = db.Column('notice_date', db.DateTime(timezone=True)) resubmission_date = db.Column('resubmission_date', db.DateTime(timezone=True)) + hide_in_ledger = db.Column('hide_in_ledger', db.Boolean, unique=False, default=False) + withdrawal_pending = db.Column('withdrawal_pending', db.Boolean, unique=False, default=False) # # relationships transaction_id = db.Column('transaction_id', db.BigInteger, @@ -558,7 +690,16 @@ class Source(Enum): review = db.relationship('Review', lazy='dynamic') parent_filing_id = db.Column(db.Integer, db.ForeignKey('filings.id')) - parent_filing = db.relationship('Filing', remote_side=[id], backref=backref('children')) + parent_filing = db.relationship('Filing', + remote_side=[id], + backref=backref('children', uselist=True), + foreign_keys=[parent_filing_id]) + + withdrawn_filing_id = db.Column('withdrawn_filing_id', db.Integer, + db.ForeignKey('filings.id')) + withdrawn_filing = db.relationship('Filing', + remote_side=[id], + foreign_keys=[withdrawn_filing_id]) # properties @property @@ -743,7 +884,8 @@ def set_processed(self, business_type): def effective_date_can_be_before_payment_completion_date(self, business_type): """For AR or COD filings then the effective date can be before the payment date.""" return self.filing_type in (Filing.FILINGS['annualReport'].get('name'), - Filing.FILINGS['changeOfDirectors'].get('name')) + Filing.FILINGS['changeOfDirectors'].get('name'), + Filing.FILINGS['transparencyRegister'].get('name')) @staticmethod def _raise_default_lock_exception(): @@ -752,6 +894,14 @@ def _raise_default_lock_exception(): status_code=HTTPStatus.FORBIDDEN ) + @property + def is_future_effective(self) -> bool: + """Return True if the effective date is in the future.""" + with suppress(AttributeError, TypeError): + if self.effective_date > self.payment_completion_date: + return True + return False + @property def is_corrected(self): """Has this filing been corrected.""" @@ -844,11 +994,36 @@ def find_by_id(cls, filing_id: str = None): @staticmethod def get_temp_reg_filing(temp_reg_id: str, filing_id: str = None): - """Return a Filing by it's payment token.""" - q = db.session.query(Filing).filter(Filing.temp_reg == temp_reg_id) + """Return a filing by the temp id and filing id (if applicable).""" + if not filing_id: + return db.session.query(Filing).filter(Filing.temp_reg == temp_reg_id).one_or_none() + + return ( + db.session.query(Filing).filter( + db.or_( + db.and_( + Filing.id == filing_id, + Filing.temp_reg == temp_reg_id + ), + db.and_( # special case for NoW + Filing.id == filing_id, + Filing._filing_type == 'noticeOfWithdrawal', + Filing.withdrawn_filing_id == db.session.query(Filing.id) + .filter(Filing.temp_reg == temp_reg_id) + .scalar_subquery() + ) + ) + ).one_or_none()) - if filing_id: - q.filter(Filing.id == filing_id) + @staticmethod + def get_temp_reg_filing_by_withdrawn_filing(filing_id: str, withdrawn_filing_id: str, filing_type: str = None): + """Return an temp reg Filing by withdrawn filing.""" + q = db.session.query(Filing). \ + filter(Filing.withdrawn_filing_id == withdrawn_filing_id). \ + filter(Filing.id == filing_id) + + if filing_type: + q = q.filter(Filing._filing_type == filing_type) filing = q.one_or_none() return filing @@ -881,7 +1056,7 @@ def get_incomplete_filings_by_type(business_id: int, filing_type: str): filings = db.session.query(Filing). \ filter(Filing.business_id == business_id). \ filter(Filing._filing_type == filing_type). \ - filter(Filing._status != Filing.Status.COMPLETED.value). \ + filter(not_(Filing._status.in_([Filing.Status.COMPLETED.value, Filing.Status.WITHDRAWN.value]))). \ order_by(desc(Filing.filing_date)). \ all() return filings @@ -893,8 +1068,25 @@ def get_filings_by_types(business_id: int, filing_types): filter(Filing.business_id == business_id). \ filter(Filing._filing_type.in_(filing_types)). \ filter(Filing._status == Filing.Status.COMPLETED.value). \ - order_by(desc(Filing.effective_date)). \ + order_by(desc(Filing.transaction_id)). \ + all() + return filings + + @staticmethod + def get_conversion_filings_by_conv_types(business_id: int, filing_types: list): + """Return the conversion filings of a particular conv type. + + Records only exist in some legacy corps imported from COLIN. + """ + filings = db.session.query(Filing). \ + filter(Filing.business_id == business_id). \ + filter(Filing._filing_type == 'conversion'). \ + filter( + Filing._meta_data.op('->')('conversion').op('->>')('convFilingType').in_(filing_types) + ). \ + order_by(desc(Filing.transaction_id)). \ all() + return filings @staticmethod @@ -908,7 +1100,7 @@ def get_incomplete_filings_by_types(business_id: int, filing_types: list, exclud filings = db.session.query(Filing). \ filter(Filing.business_id == business_id). \ filter(Filing._filing_type.in_(filing_types)). \ - filter(Filing._status != Filing.Status.COMPLETED.value). \ + filter(not_(Filing._status.in_([Filing.Status.COMPLETED.value, Filing.Status.WITHDRAWN.value]))). \ filter(not_(Filing._status.in_(excluded_statuses))). \ order_by(desc(Filing.effective_date)). \ all() @@ -953,23 +1145,21 @@ def get_filings_by_type_pairs(business_id: int, filing_type_pairs: list, status: return filings @staticmethod - def get_a_businesses_most_recent_filing_of_a_type(business_id: int, filing_type: str, filing_sub_type: str = None): - """Return the filings of a particular type.""" - max_filing = db.session.query(db.func.max(Filing._filing_date).label('last_filing_date')).\ - filter(Filing._filing_type == filing_type). \ - filter(Filing.business_id == business_id) - if filing_sub_type: - max_filing = max_filing.filter(Filing._filing_sub_type == filing_sub_type) - max_filing = max_filing.subquery() + def get_most_recent_filing(business_id: str, filing_type: str = None, filing_sub_type: str = None): + """Return the most recent filing. - filing = Filing.query.join(max_filing, Filing._filing_date == max_filing.c.last_filing_date). \ + filing_type is required, if filing_sub_type is provided, it will be used to filter the query. + """ + query = db.session.query(Filing). \ filter(Filing.business_id == business_id). \ - filter(Filing._filing_type == filing_type). \ filter(Filing._status == Filing.Status.COMPLETED.value) - if filing_sub_type: - filing = filing.filter(Filing._filing_sub_type == filing_sub_type) + if filing_type: + query = query.filter(Filing._filing_type == filing_type) + if filing_sub_type: + query = query.filter(Filing._filing_sub_type == filing_sub_type) - return filing.one_or_none() + query = query.order_by(Filing.transaction_id.desc()) + return query.first() @staticmethod def get_most_recent_legal_filing(business_id: str, filing_type: str = None): @@ -1002,7 +1192,14 @@ def get_most_recent_legal_filing(business_id: str, filing_type: str = None): def get_completed_filings_for_colin(limit=20, offset=0): """Return the filings based on limit and offset.""" from .business import Business # noqa: F401; pylint: disable=import-outside-toplevel - excluded_filings = ['lear_epoch', 'adminFreeze', 'courtOrder', 'registrarsNotation', 'registrarsOrder'] + excluded_filings = [ + 'lear_epoch', + 'adminFreeze', + 'courtOrder', + 'registrarsNotation', + 'registrarsOrder', + 'transparencyRegister' + ] excluded_businesses = [Business.LegalTypes.SOLE_PROP.value, Business.LegalTypes.PARTNERSHIP.value] filings = db.session.query(Filing).join(Business). \ filter( @@ -1034,15 +1231,14 @@ def get_all_filings_by_status(status): @staticmethod def get_previous_completed_filing(filing): """Return the previous completed filing.""" - filings = db.session.query(Filing). \ + query = db.session.query(Filing). \ filter(Filing.business_id == filing.business_id). \ - filter(Filing._status == Filing.Status.COMPLETED.value). \ - filter(Filing.id < filing.id). \ - filter(Filing.effective_date < filing.effective_date). \ - order_by(Filing.effective_date.desc()).all() - if filings: - return filings[0] - return None + filter(Filing._status == Filing.Status.COMPLETED.value) + + if filing.transaction_id: # transaction_id will be None for the pending filings (intermediate state) + query = query.filter(Filing.transaction_id < filing.transaction_id) + + return query.order_by(Filing.transaction_id.desc()).first() @staticmethod def has_completed_filing(business_id: int, filing_type: str) -> bool: @@ -1171,7 +1367,8 @@ def receive_before_change(mapper, connection, target): # pylint: disable=unused # pylint: disable=protected-access if (filing._status in [Filing.Status.AWAITING_REVIEW.value, Filing.Status.CHANGE_REQUESTED.value, - Filing.Status.REJECTED.value] or + Filing.Status.REJECTED.value, + Filing.Status.WITHDRAWN.value] or (filing._status == Filing.Status.APPROVED.value and not filing.payment_token)): return # should not override status in the review process diff --git a/legal-api/src/legal_api/models/jurisdiction.py b/legal-api/src/legal_api/models/jurisdiction.py index 1705fc4188..b7f7934d65 100644 --- a/legal-api/src/legal_api/models/jurisdiction.py +++ b/legal-api/src/legal_api/models/jurisdiction.py @@ -15,6 +15,7 @@ from __future__ import annotations from sql_versioning import Versioned +from sqlalchemy import and_, or_ from .db import db from .filing import Filing @@ -61,6 +62,15 @@ def get_continuation_in_jurisdiction(cls, business_id) -> Jurisdiction: # pylint: disable=protected-access jurisdiction = (db.session.query(Jurisdiction).join(Filing). filter(Jurisdiction.business_id == business_id). - filter(Filing._filing_type == 'continuationIn'). + filter( + or_( + Filing._filing_type == 'continuationIn', + and_( + Filing._filing_type == 'conversion', + Filing._meta_data.op('->')('conversion'). + op('->>')('convFilingType') == 'continuationIn' + ) + ) + ). one_or_none()) return jurisdiction diff --git a/legal-api/src/legal_api/models/office.py b/legal-api/src/legal_api/models/office.py index f04d16e37d..974f2dad9e 100644 --- a/legal-api/src/legal_api/models/office.py +++ b/legal-api/src/legal_api/models/office.py @@ -34,7 +34,7 @@ class Office(db.Model, Versioned): # pylint: disable=too-few-public-methods id = db.Column(db.Integer, primary_key=True) office_type = db.Column('office_type', db.String(75), db.ForeignKey('office_types.identifier')) business_id = db.Column('business_id', db.Integer, db.ForeignKey('businesses.id'), index=True) - addresses = db.relationship('Address', lazy='dynamic', cascade='all, delete, delete-orphan') + addresses = db.relationship('Address', backref='office', lazy='dynamic', cascade='all, delete, delete-orphan') deactivated_date = db.Column('deactivated_date', db.DateTime(timezone=True), default=None) # relationships diff --git a/legal-api/src/legal_api/models/party_role.py b/legal-api/src/legal_api/models/party_role.py index 2d68828387..e2fcd1b949 100644 --- a/legal-api/src/legal_api/models/party_role.py +++ b/legal-api/src/legal_api/models/party_role.py @@ -38,6 +38,7 @@ class RoleTypes(Enum): LIQUIDATOR = 'liquidator' PROPRIETOR = 'proprietor' PARTNER = 'partner' + RECEIVER = 'receiver' __versioned__ = {} __tablename__ = 'party_roles' diff --git a/legal-api/src/legal_api/models/share_class.py b/legal-api/src/legal_api/models/share_class.py index 43ecc5da28..8e48b749a2 100644 --- a/legal-api/src/legal_api/models/share_class.py +++ b/legal-api/src/legal_api/models/share_class.py @@ -35,10 +35,11 @@ class ShareClass(db.Model, Versioned): # pylint: disable=too-many-instance-attr name = db.Column('name', db.String(1000), index=True) priority = db.Column('priority', db.Integer, nullable=True) max_share_flag = db.Column('max_share_flag', db.Boolean, unique=False, default=False) - max_shares = db.Column('max_shares', db.Integer, nullable=True) + max_shares = db.Column('max_shares', db.Numeric(20), nullable=True) par_value_flag = db.Column('par_value_flag', db.Boolean, unique=False, default=False) par_value = db.Column('par_value', db.Float, nullable=True) currency = db.Column('currency', db.String(10), nullable=True) + currency_additional = db.Column('currency_additional', db.String(40), nullable=True) special_rights_flag = db.Column('special_rights_flag', db.Boolean, unique=False, default=False) # parent keys @@ -62,7 +63,7 @@ def json(self): 'name': self.name, 'priority': self.priority, 'hasMaximumShares': self.max_share_flag, - 'maxNumberOfShares': self.max_shares, + 'maxNumberOfShares': int(self.max_shares) if self.max_shares else None, 'hasParValue': self.par_value_flag, 'parValue': self.par_value, 'currency': self.currency, @@ -121,3 +122,4 @@ def receive_before_change(mapper, connection, target): # pylint: disable=unused else: share_class.par_value = None share_class.currency = None + share_class.currency_additional = None diff --git a/legal-api/src/legal_api/models/share_series.py b/legal-api/src/legal_api/models/share_series.py index 029bd6069c..de32a0940c 100644 --- a/legal-api/src/legal_api/models/share_series.py +++ b/legal-api/src/legal_api/models/share_series.py @@ -33,7 +33,7 @@ class ShareSeries(db.Model, Versioned): # pylint: disable=too-many-instance-att name = db.Column('name', db.String(1000), index=True) priority = db.Column('priority', db.Integer, nullable=True) max_share_flag = db.Column('max_share_flag', db.Boolean, unique=False, default=False) - max_shares = db.Column('max_shares', db.Integer, nullable=True) + max_shares = db.Column('max_shares', db.Numeric(20), nullable=True) special_rights_flag = db.Column('special_rights_flag', db.Boolean, unique=False, default=False) # parent keys @@ -52,7 +52,7 @@ def json(self): 'name': self.name, 'priority': self.priority, 'hasMaximumShares': self.max_share_flag, - 'maxNumberOfShares': self.max_shares, + 'maxNumberOfShares': int(self.max_shares) if self.max_shares else None, 'hasRightsOrRestrictions': self.special_rights_flag } return share_series diff --git a/legal-api/src/legal_api/reports/business_document.py b/legal-api/src/legal_api/reports/business_document.py index 8d8c707050..0de9354e97 100644 --- a/legal-api/src/legal_api/reports/business_document.py +++ b/legal-api/src/legal_api/reports/business_document.py @@ -42,6 +42,7 @@ def __init__(self, business, document_key): self._document_key = document_key self._report_date_time = LegislationDatetime.now() self._epoch_filing_date = None + self._tombstone_filing_date = None def get_pdf(self): """Render the business document pdf response.""" @@ -131,6 +132,7 @@ def _get_template_data(self, get_json=False): business_json['registrarInfo'] = {**RegistrarInfo.get_registrar_info(self._report_date_time)} self._set_description(business_json) self._set_epoch_date(business_json) + self._set_tombstone_date() if self._document_key in ['lseal', 'summary']: self._set_addresses(business_json) @@ -186,6 +188,12 @@ def _set_epoch_date(self, business: dict): self._epoch_filing_date = epoch_filing[0].effective_date business['business']['epochFilingDate'] = self._epoch_filing_date.isoformat() + def _set_tombstone_date(self): + """Set the tombstone filing date if the business is tombstone.""" + tombstone_filing = Filing.get_filings_by_status(self._business.id, [Filing.Status.TOMBSTONE]) + if tombstone_filing: + self._tombstone_filing_date = tombstone_filing[0].effective_date + def _set_description(self, business: dict): """Set business descriptors used by json and pdf template.""" legal_type = self._business.legal_type @@ -256,9 +264,13 @@ def _set_dates(self, business: dict): # pylint: disable=too-many-branches business['formatted_registration_date'] = LegislationDatetime.\ format_as_report_string(datetime.fromisoformat(registration_datetime_str)) # founding dates - founding_datetime = LegislationDatetime.as_legislation_timezone(self._business.founding_date) - business['formatted_founding_date_time'] = LegislationDatetime.format_as_report_string(founding_datetime) - business['formatted_founding_date'] = founding_datetime.strftime(OUTPUT_DATE_FORMAT) + if self._business.founding_date: + founding_datetime = LegislationDatetime.as_legislation_timezone(self._business.founding_date) + business['formatted_founding_date_time'] = LegislationDatetime.format_as_report_string(founding_datetime) + business['formatted_founding_date'] = founding_datetime.strftime(OUTPUT_DATE_FORMAT) + else: + business['formatted_founding_date_time'] = 'Not Available' + business['formatted_founding_date'] = 'Not Available' # dissolution dates if self._business.dissolution_date: dissolution_datetime = LegislationDatetime.as_legislation_timezone(self._business.dissolution_date) @@ -315,18 +327,31 @@ def _set_business_state_changes(self, business: dict): 'dissolved', 'restoration', 'voluntaryDissolution', 'Involuntary Dissolution', - 'voluntaryLiquidation', 'putBackOn', + 'voluntaryLiquidation', 'putBackOn', 'putBackOff', 'continuationOut']): state_filings.append(self._format_state_filing(filing)) - # If it has amalgamating businesses - if (amalgamating_businesses := AmalgamatingBusiness.get_all_revision(self._business.id)): + # TODO: add conv liquidation etc. in the future work + for filing in Filing.get_conversion_filings_by_conv_types(self._business.id, ['dissolution', + 'continuationOut', + 'putBackOn', + 'restoration']): + state_filings.append(self._format_state_filing(filing)) + + # If it has linked amalgamating businesses + # set placeholder info if this business is tombstone + tombstone = self._business.is_tombstone + if (amalgamating_businesses := AmalgamatingBusiness.get_all_revision(self._business.id, tombstone)): for amalgamating_business in amalgamating_businesses: - amalgamation = Amalgamation.get_revision_by_id(amalgamating_business.transaction_id, - amalgamating_business.amalgamation_id) + if tombstone: + amalgamation = Amalgamation.get_revision_by_id( + amalgamating_business.amalgamation_id, tombstone=True) + else: + amalgamation = Amalgamation.get_revision_by_id( + amalgamating_business.amalgamation_id, amalgamating_business.transaction_id) filing = Filing.find_by_id(amalgamation.filing_id) state_filing = self._format_state_filing(filing) - amalgamation_json = Amalgamation.get_revision_json(filing.transaction_id, filing.business_id) + amalgamation_json = Amalgamation.get_revision_json(filing.transaction_id, filing.business_id, tombstone) state_filings.append({ **state_filing, **amalgamation_json @@ -390,40 +415,61 @@ def _set_business_changes(self, business: dict): 'Not Available') name_change_info['filingDateTime'] = filing.filing_date.isoformat() name_changes.append(name_change_info) + + # get name change info from conversion filing + for filing in Filing.get_conversion_filings_by_conv_types(self._business.id, ['changeOfName']): + filing_meta = filing.meta_data + name_change_info = {} + name_change_info['fromLegalName'] = filing_meta.get('changeOfName').get('fromLegalName', + 'Not Available') + name_change_info['toLegalName'] = filing_meta.get('changeOfName').get('toLegalName', + 'Not Available') + name_change_info['filingDateTime'] = filing.filing_date.isoformat() + name_changes.append(name_change_info) + business['nameChanges'] = name_changes business['alterations'] = alterations def _format_state_filing(self, filing: Filing) -> dict: """Format state change filing data.""" filing_info = {} + filing_meta = filing.meta_data + if filing.filing_type == 'conversion': + filing_type = filing_meta.get('conversion').get('convFilingType') + filing_sub_type = filing_meta.get('conversion').get('convFilingSubType') + else: + filing_type = filing.filing_type + filing_sub_type = filing.filing_sub_type - filing_info['filingType'] = filing.filing_type - filing_info['filingSubType'] = filing.filing_sub_type + filing_info['filingType'] = filing_type + filing_info['filingSubType'] = filing_sub_type filing_info['filingDateTime'] = filing.filing_date.isoformat() filing_info['effectiveDateTime'] = filing.effective_date.isoformat() - filing_meta = filing.meta_data - if filing.filing_type == 'dissolution': + if filing_type == 'dissolution': filing_info['filingName'] = BusinessDocument.\ - _get_summary_display_name(filing.filing_type, + _get_summary_display_name(filing_type, filing_meta['dissolution']['dissolutionType'], - self._business.legal_type) + self._business.legal_type, + None) if self._business.legal_type in ['SP', 'GP'] and filing_meta['dissolution']['dissolutionType'] == \ 'voluntary': filing_info['dissolution_date_str'] = LegislationDatetime.as_legislation_timezone_from_date_str( filing.filing_json['filing']['dissolution']['dissolutionDate']).strftime(OUTPUT_DATE_FORMAT) - elif filing.filing_type == 'restoration': + elif filing_type == 'restoration': filing_info['filingName'] = BusinessDocument.\ - _get_summary_display_name(filing.filing_type, - filing.filing_sub_type, - self._business.legal_type) - if filing.filing_sub_type in ['limitedRestoration', 'limitedRestorationExtension']: + _get_summary_display_name(filing_type, + filing_sub_type, + self._business.legal_type, + None) + if filing_sub_type in ['limitedRestoration', 'limitedRestorationExtension']: expiry_date = filing_meta['restoration']['expiry'] expiry_date = LegislationDatetime.as_legislation_timezone_from_date_str(expiry_date) expiry_date = expiry_date.replace(minute=1) - filing_info['limitedRestorationExpiryDate'] = LegislationDatetime.format_as_report_string(expiry_date) - elif filing.filing_type == 'continuationOut': - filing_info['filingName'] = BusinessDocument._get_summary_display_name(filing.filing_type, None, None) + filing_info['limitedRestorationExpiryDate'] = LegislationDatetime.\ + format_as_report_expiry_string_1159(expiry_date) + elif filing_type == 'continuationOut': + filing_info['filingName'] = BusinessDocument._get_summary_display_name(filing_type, None, None, None) country_code = filing_meta['continuationOut']['country'] region_code = filing_meta['continuationOut']['region'] @@ -437,19 +483,33 @@ def _format_state_filing(self, filing: Filing) -> dict: continuation_out_date = LegislationDatetime.as_legislation_timezone_from_date_str( filing_meta['continuationOut']['continuationOutDate']) filing_info['continuationOutDate'] = continuation_out_date.strftime(OUTPUT_DATE_FORMAT) + elif filing_type == 'putBackOff': + put_back_off = filing_meta.get('putBackOff') + reason = put_back_off.get('reason') + expiry_date_str = put_back_off.get('expiryDate') + filing_info['filingName'] = BusinessDocument.\ + _get_summary_display_name(filing_type, None, None, reason) + filing_info['reason'] = reason + expiry_date = LegislationDatetime.as_legislation_timezone_from_date_str(expiry_date_str) + filing_info['expiryDate'] = expiry_date.strftime(OUTPUT_DATE_FORMAT) + filing_info['historicalDate'] = LegislationDatetime.format_as_next_legislation_day(expiry_date_str) else: filing_info['filingName'] = BusinessDocument.\ - _get_summary_display_name(filing.filing_type, None, None) + _get_summary_display_name(filing_type, None, None, None) return filing_info def _set_amalgamation_details(self, business: dict): """Set amalgamation filing data.""" amalgamated_businesses = [] - filings = Filing.get_filings_by_types(self._business.id, ['amalgamationApplication']) + # get amalgamation info from either general filing or conversion filing + filings = Filing.get_filings_by_types(self._business.id, ['amalgamationApplication']) or \ + Filing.get_conversion_filings_by_conv_types(self._business.id, ['amalgamationApplication']) if filings: amalgamation_application = filings[0] business['business']['amalgamatedEntity'] = True - if self._epoch_filing_date and amalgamation_application.effective_date < self._epoch_filing_date: + if (self._epoch_filing_date and amalgamation_application.effective_date < self._epoch_filing_date) or\ + (self._tombstone_filing_date and + amalgamation_application.effective_date < self._tombstone_filing_date): # imported from COLIN amalgamated_businesses_info = { 'legalName': 'Not Available', @@ -503,48 +563,65 @@ def _set_liquidation_details(self, business: dict): def _set_continuation_in_details(self, business: dict): """Set continuation in filing data.""" continuation_in_info = {} - continuation_in_filing = Filing.get_filings_by_types(self._business.id, ['continuationIn']) + # get continuation in info from either general filing or conversion filing + continuation_in_filing = Filing.get_filings_by_types(self._business.id, ['continuationIn']) or \ + Filing.get_conversion_filings_by_conv_types(self._business.id, ['continuationIn']) if continuation_in_filing: continuation_in_filing = continuation_in_filing[0] jurisdiction = Jurisdiction.get_continuation_in_jurisdiction(continuation_in_filing.business_id) + if not jurisdiction: + return + # Format country and region region_code = jurisdiction.region country_code = jurisdiction.country - country = pycountry.countries.get(alpha_2=country_code) - region = None - if region_code and region_code.upper() != 'FEDERAL': - region = pycountry.subdivisions.get(code=f'{country_code}-{region_code}') - location_jurisdiction = f'{region.name}, {country.name}' if region else country.name + location_jurisdiction = 'Not Available' + if country_code and country_code.upper() != 'UNKNOWN': + country = pycountry.countries.get(alpha_2=country_code) + region = None + if region_code and region_code.upper() != 'FEDERAL': + region = pycountry.subdivisions.get(code=f'{country_code}-{region_code}') + location_jurisdiction = f'{region.name}, {country.name}' if region else country.name # Format incorporation date - incorp_date = LegislationDatetime.as_legislation_timezone(jurisdiction.incorporation_date) - formatted_incorporation_date = incorp_date.strftime(OUTPUT_DATE_FORMAT) + if jurisdiction.incorporation_date: + incorp_date = LegislationDatetime.as_legislation_timezone(jurisdiction.incorporation_date) + formatted_incorporation_date = incorp_date.strftime(OUTPUT_DATE_FORMAT) + else: + formatted_incorporation_date = 'Not Available' # Format Jurisdiction data jurisdiction_info = { 'id': jurisdiction.id, 'jurisdiction': location_jurisdiction, - 'identifier': jurisdiction.identifier, - 'legal_name': jurisdiction.legal_name, + 'identifier': jurisdiction.identifier or 'Not Available', + 'legal_name': jurisdiction.legal_name or 'Not Available', 'tax_id': jurisdiction.tax_id, 'incorporation_date': formatted_incorporation_date, - 'expro_identifier': jurisdiction.expro_identifier, - 'expro_legal_name': jurisdiction.expro_legal_name, + 'expro_identifier': jurisdiction.expro_identifier or 'Not Available', + 'expro_legal_name': jurisdiction.expro_legal_name or 'Not Available', 'business_id': jurisdiction.business_id, 'filing_id': jurisdiction.filing_id, } + continuation_in_info['foreignJurisdiction'] = jurisdiction_info business['continuationIn'] = continuation_in_info @staticmethod def _format_address(address): + address['streetAddress'] = address.get('streetAddress') or '' address['streetAddressAdditional'] = address.get('streetAddressAdditional') or '' + address['addressCity'] = address.get('addressCity') or '' address['addressRegion'] = address.get('addressRegion') or '' address['deliveryInstructions'] = address.get('deliveryInstructions') or '' + address['postalCode'] = address.get('postalCode') or '' country = address['addressCountry'] - country = pycountry.countries.search_fuzzy(country)[0].name + if country: + country = pycountry.countries.search_fuzzy(country)[0].name + else: + country = '' address['addressCountry'] = country address['addressCountryDescription'] = country return address @@ -566,7 +643,9 @@ def _get_environment(): @staticmethod def _get_summary_display_name(filing_type: str, filing_sub_type: Optional[str], - legal_type: Optional[str]) -> str: + legal_type: Optional[str], + reason: Optional[str] + ) -> str: if filing_type == 'dissolution': if filing_sub_type == 'voluntary': if legal_type in ['SP', 'GP']: @@ -577,6 +656,8 @@ def _get_summary_display_name(filing_type: str, return BusinessDocument.FILING_SUMMARY_DISPLAY_NAME[filing_type][filing_sub_type] elif filing_type == 'restoration': return BusinessDocument.FILING_SUMMARY_DISPLAY_NAME[filing_type][filing_sub_type] + elif filing_type == 'putBackOff': + return BusinessDocument.FILING_SUMMARY_DISPLAY_NAME[filing_type][reason] else: return BusinessDocument.FILING_SUMMARY_DISPLAY_NAME[filing_type] @@ -594,7 +675,8 @@ def _get_legal_type_description(legal_type: str) -> str: 'GP': 'Dissolution Application' }, 'involuntary': 'Involuntary Dissolution', - 'administrative': 'Administrative Dissolution' + 'administrative': 'Administrative Dissolution', + 'unknown': 'Dissolved' }, 'restorationApplication': 'Restoration Application', 'restoration': { @@ -608,6 +690,9 @@ def _get_legal_type_description(legal_type: str) -> str: 'Involuntary Dissolution': 'Involuntary Dissolution', 'voluntaryLiquidation': 'Voluntary Liquidation', 'putBackOn': 'Correction - Put Back On', + 'putBackOff': { + 'Limited Restoration Expired': 'Dissolved due to expired Limited Restoration' + }, 'continuationOut': 'Continuation Out' } diff --git a/legal-api/src/legal_api/reports/report.py b/legal-api/src/legal_api/reports/report.py index facc8af50c..527135d8a0 100644 --- a/legal-api/src/legal_api/reports/report.py +++ b/legal-api/src/legal_api/reports/report.py @@ -24,7 +24,7 @@ from dateutil.relativedelta import relativedelta from flask import current_app, jsonify -from legal_api.core.meta.filing import FILINGS +from legal_api.core.meta.filing import FILINGS, FilingMeta from legal_api.models import ( AmalgamatingBusiness, Amalgamation, @@ -38,10 +38,16 @@ ) from legal_api.models.business import ASSOCIATION_TYPE_DESC from legal_api.reports.registrar_meta import RegistrarInfo -from legal_api.services import MinioService, VersionedBusinessDetailsService, flags +from legal_api.services import ( + MinioService, + VersionedBusinessDetailsService, + DocumentRecordService, + flags +) from legal_api.utils.auth import jwt from legal_api.utils.formatting import float_to_str from legal_api.utils.legislation_datetime import LegislationDatetime +from legal_api.constants import DocumentClasses OUTPUT_DATE_FORMAT: Final = '%B %-d, %Y' @@ -66,9 +72,18 @@ def get_pdf(self, report_type=None): return self._get_report() def _get_static_report(self): + document_type = ReportMeta.static_reports[self._report_key]['documentType'] + document_class = ReportMeta.static_reports[self._report_key]['documentType'] + document: Document = self._filing.documents.filter(Document.type == document_type).first() - response = MinioService.get_file(document.file_key) + if(flags.is_on('enable-document-records')): + response = DocumentRecordService.download_document( + document_class, + document.file_key + ) + else: + response = MinioService.get_file(document.file_key) return current_app.response_class( response=response.data, status=response.status, @@ -174,6 +189,7 @@ def _substitute_template_parts(template_code): 'change-of-registration/addresses', 'change-of-registration/proprietor', 'change-of-registration/partner', + 'notice-of-withdrawal/recordToBeWithdrawn', 'incorporation-application/benefitCompanyStmt', 'incorporation-application/completingParty', 'incorporation-application/effectiveDate', @@ -306,6 +322,8 @@ def _format_filing_json(self, filing): # pylint: disable=too-many-branches, too self._format_continuation_in_data(filing) elif self._report_key == 'certificateOfContinuation': self._format_certificate_of_continuation_in_data(filing) + elif self._report_key == 'noticeOfWithdrawal': + self._format_notice_of_withdrawal_data(filing) else: # set registered office address from either the COA filing or status quo data in AR filing with suppress(KeyError): @@ -351,15 +369,20 @@ def _set_tax_id(self, filing): def _set_description(self, filing): legal_type = None - if self._filing.filing_type == 'alteration': - legal_type = self._filing.filing_json.get('filing').get('alteration').get('business', {}).get('legalType') + filing_json = self._filing.filing_json.get('filing', {}) + filing_type = self._filing.filing_type + + # Check for alteration filing type + if filing_type == 'alteration': + legal_type = filing_json.get('alteration', {}).get('business', {}).get('legalType') else: - legal_type = (self._filing.filing_json - .get('filing') - .get(self._filing.filing_type) - .get('nameRequest', {}) - .get('legalType')) + legal_type = filing_json.get(filing_type, {}).get('nameRequest', {}).get('legalType') + + # Fallback: Check the general business section + if not legal_type: + legal_type = filing_json.get('business', {}).get('legalType') + # Final fallback: Check the _business object if not legal_type and self._business: legal_type = self._business.legal_type @@ -586,8 +609,10 @@ def _format_restoration_data(self, filing): filing['nameRequest'] = filing['restoration'].get('nameRequest') filing['parties'] = filing['restoration'].get('parties') filing['offices'] = filing['restoration']['offices'] - meta_data = self._filing.meta_data or {} - filing['fromLegalName'] = meta_data.get('restoration', {}).get('fromLegalName') + if self._filing.meta_data: # available when filing is COMPLETED + filing['fromLegalName'] = self._filing.meta_data.get('restoration', {}).get('fromLegalName') + else: + filing['fromLegalName'] = self._business.legal_name if relationships := filing['restoration'].get('relationships'): filing['relationshipsDesc'] = ', '.join(relationships) @@ -600,14 +625,16 @@ def _format_restoration_data(self, filing): filing['applicationDate'] = filing['restoration'].get('applicationDate', 'Not Applicable') filing['noticeDate'] = filing['restoration'].get('noticeDate', 'Not Applicable') - business_dissolution = VersionedBusinessDetailsService.find_last_value_from_business_revision( - self._filing.transaction_id, self._business.id, is_dissolution_date=True) - filing['dissolutionLegalName'] = business_dissolution.legal_name + if self._filing.transaction_id: # available when filing is COMPLETED + business_dissolution = VersionedBusinessDetailsService.find_last_value_from_business_revision( + self._filing.transaction_id, self._business.id, is_dissolution_date=True) + filing['dissolutionLegalName'] = business_dissolution.legal_name + else: + filing['dissolutionLegalName'] = self._business.legal_name - if expiry_date := meta_data.get('restoration', {}).get('expiry'): + if expiry_date := filing['restoration'].get('expiry'): expiry_date = LegislationDatetime.as_legislation_timezone_from_date_str(expiry_date) - expiry_date = expiry_date.replace(minute=1) - filing['restoration_expiry_date'] = LegislationDatetime.format_as_report_string(expiry_date) + filing['restoration_expiry_date'] = LegislationDatetime.format_as_report_expiry_string_1159(expiry_date) def _format_consent_continuation_out_data(self, filing): cco = ConsentContinuationOut.get_by_filing_id(self._filing.id) @@ -677,9 +704,14 @@ def _format_alteration_data(self, filing): # Get current list of translations in alteration. None if it is deletion if 'nameTranslations' in filing['alteration']: filing['listOfTranslations'] = filing['alteration'].get('nameTranslations', []) - # Get previous translations for deleted translations. No record created in aliases version for deletions - filing['previousNameTranslations'] = VersionedBusinessDetailsService.get_name_translations_before_revision( - self._filing.transaction_id, self._business.id) + if self._filing.transaction_id: + # Get previous translations for deleted translations. No record created in version for deletions + filing['previousNameTranslations'] = ( + VersionedBusinessDetailsService.get_name_translations_before_revision( + self._filing.transaction_id, + self._business.id)) + else: + filing['previousNameTranslations'] = [alias.json for alias in self._business.aliases.all()] if filing['alteration'].get('shareStructure', None): filing['shareClasses'] = filing['alteration']['shareStructure'].get('shareClasses', []) dates = filing['alteration']['shareStructure'].get('resolutionDates', []) @@ -748,6 +780,18 @@ def _format_amalgamation_data(self, filing): def _format_certificate_of_amalgamation_data(self, filing): self._set_amalgamating_businesses(filing) + def _format_notice_of_withdrawal_data(self, filing): + withdrawn_filing_id = filing['noticeOfWithdrawal']['filingId'] + withdrawn_filing = Filing.find_by_id(withdrawn_filing_id) + formatted_withdrawn_filing_type = FilingMeta.get_display_name( + withdrawn_filing.filing_json['filing']['business']['legalType'], + withdrawn_filing.filing_type, + withdrawn_filing.filing_sub_type + ) + filing['withdrawnFilingType'] = formatted_withdrawn_filing_type + withdrawn_filing_date = LegislationDatetime.as_legislation_timezone(withdrawn_filing.effective_date) + filing['withdrawnFilingEffectiveDate'] = LegislationDatetime.format_as_report_string(withdrawn_filing_date) + def _set_amalgamating_businesses(self, filing): amalgamating_businesses = [] business_legal_name = None @@ -1058,16 +1102,17 @@ def _format_correction_data(self, filing): def _format_name_request_data(self, filing, versioned_business: Business): name_request_json = filing.get('correction').get('nameRequest', {}) - filing['nameRequest'] = name_request_json - prev_legal_name = versioned_business.legal_name + if name_request_json: + filing['nameRequest'] = name_request_json + prev_legal_name = versioned_business.legal_name - if name_request_json and not (new_legal_name := name_request_json.get('legalName')): - new_legal_name = Business.generate_numbered_legal_name(name_request_json['legalType'], - versioned_business.identifier) + if name_request_json and not (new_legal_name := name_request_json.get('legalName')): + new_legal_name = Business.generate_numbered_legal_name(name_request_json['legalType'], + versioned_business.identifier) - if new_legal_name and prev_legal_name != new_legal_name: - filing['previousLegalName'] = prev_legal_name - filing['newLegalName'] = new_legal_name + if new_legal_name and prev_legal_name != new_legal_name: + filing['previousLegalName'] = prev_legal_name + filing['newLegalName'] = new_legal_name def _format_name_translations_data(self, filing, prev_completed_filing: Filing): filing['listOfTranslations'] = filing['correction'].get('nameTranslations', []) @@ -1150,6 +1195,8 @@ def _format_party_data(self, filing, prev_completed_filing: Filing): filing['ceasedParties'] = parties_deleted def _format_share_class_data(self, filing, prev_completed_filing: Filing): # pylint: disable=too-many-locals; # noqa: E501; + if filing.get('correction').get('shareStructure') is None: + return filing['shareClasses'] = filing.get('correction').get('shareStructure', {}).get('shareClasses') dates = filing['correction']['shareStructure'].get('resolutionDates', []) formatted_dates = [ @@ -1460,20 +1507,28 @@ class ReportMeta: # pylint: disable=too-few-public-methods 'certificateOfContinuation': { 'filingDescription': 'Certificate of Continuation', 'fileName': 'certificateOfContinuation' + }, + 'noticeOfWithdrawal': { + 'filingDescription': 'Notice of Withdrawal', + 'fileName': 'noticeOfWithdrawal' } } static_reports = { 'certifiedRules': { + 'documentClass': DocumentClasses.COOP.value, 'documentType': 'coop_rules' }, 'certifiedMemorandum': { + 'documentClass': DocumentClasses.COOP.value, 'documentType': 'coop_memorandum' }, 'affidavit': { + 'documentClass': DocumentClasses.CORP.value, 'documentType': 'affidavit' }, 'uploadedCourtOrder': { + 'documentClass': DocumentClasses.CORP.value, 'documentType': 'court_order' } } diff --git a/legal-api/src/legal_api/resources/endpoints.py b/legal-api/src/legal_api/resources/endpoints.py index 4f3963a71a..2a0521e72c 100644 --- a/legal-api/src/legal_api/resources/endpoints.py +++ b/legal-api/src/legal_api/resources/endpoints.py @@ -107,7 +107,7 @@ def _redirect(self, path, code=302): def _set_access_control_header(self, response): # pylint: disable=unused-variable response.headers['Access-Control-Allow-Origin'] = '*' - response.headers['Access-Control-Allow-Headers'] = 'Authorization, Content-Type' + response.headers['Access-Control-Allow-Headers'] = 'Authorization, Content-Type, App-Name' def _mount_endpoints(self): """Mount the endpoints of the system.""" diff --git a/legal-api/src/legal_api/resources/v1/business/business_filings.py b/legal-api/src/legal_api/resources/v1/business/business_filings.py index 5057b89cde..768916db10 100644 --- a/legal-api/src/legal_api/resources/v1/business/business_filings.py +++ b/legal-api/src/legal_api/resources/v1/business/business_filings.py @@ -40,9 +40,11 @@ DocumentMetaService, MinioService, RegistrationBootstrapService, + DocumentRecordService, authorized, namex, queue, + flags ) from legal_api.services.authz import is_allowed from legal_api.services.filings import validate @@ -244,7 +246,10 @@ def delete(identifier, filing_id=None): # pylint: disable=too-many-branches return ListFilingResource._create_deletion_locked_response(identifier, filing) try: - ListFilingResource._delete_from_minio(filing) + if flags.is_on('enable-document-records'): + ListFilingResource._delete_from_drs(filing) + else: + ListFilingResource._delete_from_minio(filing) filing.delete() except BusinessException as err: return jsonify({'errors': [{'error': err.error}, ]}), err.status_code @@ -287,6 +292,36 @@ def _delete_from_minio(filing): .get('fileKey', None)): MinioService.delete_file(file_key) + @staticmethod + def _delete_from_drs(filing): + document_service_id = '' + if (filing.filing_type == Filing.FILINGS['incorporationApplication'].get('name') + and (cooperative := filing.filing_json + .get('filing', {}) + .get('incorporationApplication', {}) + .get('cooperative', None))) or \ + (filing.filing_type == Filing.FILINGS['alteration'].get('name') + and (cooperative := filing.filing_json + .get('filing', {}) + .get('alteration', {}))): + if rules_file_key := cooperative.get('rulesFileKey', None): + document_service_id = rules_file_key + if memorandum_file_key := cooperative.get('memorandumFileKey', None): + document_service_id = memorandum_file_key + elif filing.filing_type == Filing.FILINGS['dissolution'].get('name') \ + and (affidavit_file_key := filing.filing_json + .get('filing', {}) + .get('dissolution', {}) + .get('affidavitFileKey', None)): + document_service_id = affidavit_file_key + elif filing.filing_type == Filing.FILINGS['courtOrder'].get('name') \ + and (file_key := filing.filing_json + .get('filing', {}) + .get('courtOrder', {}) + .get('fileKey', None)): + document_service_id = file_key + DocumentRecordService.delete_document(document_service_id) + @staticmethod def _create_deletion_locked_response(identifier, filing): business = Business.find_by_identifier(identifier) diff --git a/legal-api/src/legal_api/resources/v2/business/business.py b/legal-api/src/legal_api/resources/v2/business/business.py index d6cfc10c42..95d2dfa310 100644 --- a/legal-api/src/legal_api/resources/v2/business/business.py +++ b/legal-api/src/legal_api/resources/v2/business/business.py @@ -80,7 +80,6 @@ def get_businesses(identifier: str): recent_filing_json = CoreFiling.get_most_recent_filing_json(business.id, None, jwt) if recent_filing_json: business_json['submitter'] = recent_filing_json['filing']['header']['submitter'] - business_json['lastModified'] = recent_filing_json['filing']['header']['date'] allowed_filings = str(request.args.get('allowed_filings', None)).lower() == 'true' if allowed_filings: diff --git a/legal-api/src/legal_api/resources/v2/business/business_documents.py b/legal-api/src/legal_api/resources/v2/business/business_documents.py index 4563a8a4bb..29a16273ff 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_documents.py +++ b/legal-api/src/legal_api/resources/v2/business/business_documents.py @@ -21,7 +21,7 @@ from legal_api.models import Business, Filing from legal_api.models.document import Document, DocumentType from legal_api.reports.business_document import BusinessDocument -from legal_api.services import authorized +from legal_api.services import authorized, flags from legal_api.services.business import validate_document_request from legal_api.utils.auth import jwt from legal_api.utils.legislation_datetime import LegislationDatetime @@ -56,6 +56,15 @@ def get_business_documents(identifier: str, document_name: str = None): response_message = {'errors': err.msg} return jsonify(response_message), err.code + # Hide business summary for tombstone corps + if ( + not flags.is_on('enable-business-summary-for-migrated-corps') and + business.is_tombstone and + business.legal_type in Business.CORPS and + document_name == 'summary' + ): + return {}, HTTPStatus.NOT_FOUND + if document_name: if 'application/pdf' in request.accept_mimetypes: return BusinessDocument(business, document_name).get_pdf() @@ -70,9 +79,17 @@ def _get_document_list(business): base_url = base_url[:base_url.find('/api')] doc_url = url_for('API2.get_business_documents', **{'identifier': business.identifier, 'document_name': None}) - business_documents = ['summary'] documents = {'documents': {}} + # Hide business summary for tombstone corps + if ( + not flags.is_on('enable-business-summary-for-migrated-corps') and + business.is_tombstone and + business.legal_type in Business.CORPS + ): + return jsonify(documents), HTTPStatus.OK + + business_documents = ['summary'] for doc in business_documents: documents['documents'][doc] = f'{base_url}{doc_url}/{doc}' diff --git a/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py b/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py index 5b62e18a3e..a5663e8ce5 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py +++ b/legal-api/src/legal_api/resources/v2/business/business_filings/business_documents.py @@ -26,7 +26,7 @@ from legal_api.exceptions import ErrorCode, get_error_message from legal_api.models import Business, Document, Filing as FilingModel # noqa: I001 from legal_api.reports import get_pdf -from legal_api.services import MinioService, authorized +from legal_api.services import MinioService, authorized, DocumentRecordService from legal_api.utils.auth import jwt from legal_api.utils.legislation_datetime import LegislationDatetime from legal_api.utils.util import cors_preflight @@ -45,6 +45,7 @@ @cross_origin(origin='*') @jwt.requires_auth def get_documents(identifier: str, filing_id: int, legal_filing_name: str = None, file_key: str = None): + # pylint: disable=too-many-branches """Return a JSON object with meta information about the Service.""" # basic checks if not authorized(identifier, jwt, ['view', ]): @@ -63,14 +64,23 @@ def get_documents(identifier: str, filing_id: int, legal_filing_name: str = None message=get_error_message(ErrorCode.MISSING_BUSINESS, **{'identifier': identifier}) ), HTTPStatus.NOT_FOUND - if not (filing := Filing.get(identifier, filing_id)): + filing = Filing.get(identifier, filing_id) + if filing and identifier.startswith('T') and filing.id != filing_id: + withdrawn_filing = Filing.get_by_withdrawn_filing_id(filing_id=filing_id, + withdrawn_filing_id=filing.id, + filing_type=Filing.FilingTypes.NOTICEOFWITHDRAWAL) + if withdrawn_filing: + filing = withdrawn_filing + + if not filing: return jsonify( message=get_error_message(ErrorCode.FILING_NOT_FOUND, **{'filing_id': filing_id, 'identifier': identifier}) ), HTTPStatus.NOT_FOUND if not legal_filing_name and not file_key: - if identifier.startswith('T') and filing.status == Filing.Status.COMPLETED: + if identifier.startswith('T') and filing.status == Filing.Status.COMPLETED and \ + filing.filing_type != Filing.FilingTypes.NOTICEOFWITHDRAWAL: return {'documents': {}}, HTTPStatus.OK return _get_document_list(business, filing) @@ -82,6 +92,13 @@ def get_documents(identifier: str, filing_id: int, legal_filing_name: str = None return get_pdf(filing.storage, legal_filing_name) elif file_key and (document := Document.find_by_file_key(file_key)): if document.filing_id == filing.id: # make sure the file belongs to this filing + if document.file_key.startswith('DS'): # docID from DRS + response = DocumentRecordService.download_document('CORP', document.file_key) + return current_app.response_class( + response=response, + status=HTTPStatus.OK, + mimetype='application/pdf' + ) response = MinioService.get_file(document.file_key) return current_app.response_class( response=response.data, @@ -106,11 +123,13 @@ def _get_receipt(business: Business, filing: Filing, token): Filing.Status.COMPLETED, Filing.Status.CORRECTED, Filing.Status.PAID, + Filing.Status.WITHDRAWN ): return {}, HTTPStatus.BAD_REQUEST effective_date = None - if filing.storage.effective_date.date() != filing.storage.filing_date.date(): + if filing.storage.effective_date.date() != filing.storage.filing_date.date() \ + or filing.filing_type == 'noticeOfWithdrawal': effective_date = LegislationDatetime.format_as_report_string(filing.storage.effective_date) headers = {'Authorization': 'Bearer ' + token} @@ -141,15 +160,15 @@ def _get_corp_name(business, filing): if business: return business.legal_name - name_request = (filing.filing_json - .get('filing') - .get(filing.filing_type) - .get('nameRequest', {})) - if name_request.get('legalName'): - return name_request.get('legalName') + filing_json = filing.filing_json.get('filing', {}) + name_request = filing_json.get(filing.filing_type, {}).get('nameRequest', {}) + + legal_name = name_request.get('legalName') or filing_json.get('business', {}).get('legalName') + if legal_name: + return legal_name - legal_type = name_request.get('legalType') + legal_type = name_request.get('legalType') or filing_json.get('business', {}).get('legal_type') if legal_type: - return Business.BUSINESSES.get(legal_type, {}).get('numberedDescription') + return Business.BUSINESSES.get(legal_type, {}).get('numberedDescription', '') return '' diff --git a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py index 2a9a9821d9..808bdc7ca2 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py +++ b/legal-api/src/legal_api/resources/v2/business/business_filings/business_filings.py @@ -37,6 +37,7 @@ import legal_api.reports from legal_api.constants import BOB_DATE from legal_api.core import Filing as CoreFiling +from legal_api.core.constants import REDACTED_STAFF_SUBMITTER from legal_api.exceptions import BusinessException from legal_api.models import ( Address, @@ -57,7 +58,9 @@ SYSTEM_ROLE, MinioService, RegistrationBootstrapService, + DocumentRecordService, authorized, + flags, namex, queue, ) @@ -77,6 +80,7 @@ class QueryModel(BaseModel): draft: Optional[bool] only_validate: Optional[bool] + document_id: Optional[bool] FilingT = TypeVar('FilingT') @@ -198,14 +202,20 @@ def delete_filings(identifier, filing_id=None): if err_code: return jsonify({'message': _(err_message)}), err_code + if filing.filing_type == Filing.FILINGS['noticeOfWithdrawal']['name']: + ListFilingResource.unlink_now_and_withdrawn_filing(filing) + filing_type = filing.filing_type filing_json = filing.filing_json filing.delete() with suppress(Exception): - ListFilingResource.delete_from_minio(filing_type, filing_json) + if flags.is_on('enable-document-records'): + ListFilingResource.delete_from_drs(filing_type, filing_json) + else: + ListFilingResource.delete_from_minio(filing_type, filing_json) - if identifier.startswith('T'): + if identifier.startswith('T') and filing.filing_type != Filing.FILINGS['noticeOfWithdrawal']['name']: bootstrap = RegistrationBootstrap.find_by_identifier(identifier) if bootstrap: deregister_status = RegistrationBootstrapService.deregister_bootstrap(bootstrap) @@ -304,6 +314,19 @@ def get_single_filing(identifier: str, filing_id: int): filing_json = rv.json if rv.status == Filing.Status.PENDING.value: ListFilingResource.get_payment_update(filing_json) + if (rv.status == Filing.Status.WITHDRAWN.value or rv.storage.withdrawal_pending) and identifier.startswith('T'): + now_filing = ListFilingResource.get_notice_of_withdrawal(filing_json['filing']['header']['filingId']) + filing_json['filing']['noticeOfWithdrawal'] = now_filing.json + + submitter = now_filing.filing_submitter + if submitter and submitter.username and jwt: + if rv.redact_submitter(now_filing.submitter_roles, jwt): + submitter_displayname = REDACTED_STAFF_SUBMITTER + else: + submitter_displayname = submitter.display_name or submitter.username + + filing_json['filing']['noticeOfWithdrawal']['filing']['header']['submitter'] = submitter_displayname + elif (rv.status in [Filing.Status.CHANGE_REQUESTED.value, Filing.Status.APPROVED.value, Filing.Status.REJECTED.value] and @@ -365,7 +388,8 @@ def get_ledger_listing(identifier: str, user_jwt: JwtManager): filings = CoreFiling.ledger(business.id, jwt=user_jwt, - statuses=[Filing.Status.COMPLETED.value, Filing.Status.PAID.value], + statuses=[Filing.Status.COMPLETED.value, Filing.Status.PAID.value, + Filing.Status.WITHDRAWN.value], start=ledger_start, size=ledger_size, effective_date=effective_date) @@ -461,6 +485,14 @@ def get_business_and_filing(identifier, filing_id=None) -> Tuple[Optional[Busine business = Business.find_by_identifier(identifier) return business, filing + @staticmethod + def get_notice_of_withdrawal(filing_id: str = None): + """Return a NoW by the withdrawn filing id.""" + filing = db.session.query(Filing). \ + filter(Filing.withdrawn_filing_id == filing_id).one_or_none() + + return filing + @staticmethod def put_basic_checks(identifier, filing, client_request, business) -> Tuple[dict, int]: """Perform basic checks to ensure put can do something.""" @@ -479,7 +511,8 @@ def put_basic_checks(identifier, filing, client_request, business) -> Tuple[dict if not filing_type: return ({'message': 'filing/header/name is a required property'}, HTTPStatus.BAD_REQUEST) - if filing_type not in CoreFiling.NEW_BUSINESS_FILING_TYPES and business is None: + if filing_type not in CoreFiling.NEW_BUSINESS_FILING_TYPES + [CoreFiling.FilingTypes.NOTICEOFWITHDRAWAL] \ + and business is None: return ({'message': 'A valid business is required.'}, HTTPStatus.BAD_REQUEST) if client_request.method == 'PUT' and not filing: @@ -497,9 +530,14 @@ def check_authorization(identifier, filing_json: dict, # While filing IA business object will be None. Setting default values in that case. state = business.state if business else Business.State.ACTIVE + if business: + legal_type = business.legal_type + # for temporary business notice of withdraw, get legalType from filing json + elif filing_type == CoreFiling.FilingTypes.NOTICEOFWITHDRAWAL.value: + legal_type = filing_json['filing'].get('business', None).get('legalType') # for incorporationApplication and registration, get legalType from nameRequest - legal_type = business.legal_type if business else \ - filing_json['filing'][filing_type]['nameRequest'].get('legalType') + else: + legal_type = filing_json['filing'][filing_type]['nameRequest'].get('legalType') if not authorized(identifier, jwt, action=['edit']) or \ not is_allowed(business, state, filing_type, legal_type, jwt, filing_sub_type, filing): @@ -604,6 +642,7 @@ def save_filing(client_request: LocalProxy, # pylint: disable=too-many-return-s filing = Filing() filing.business_id = business.id + try: filing.submitter_id = user.id filing.filing_json = ListFilingResource.sanitize_html_fields(json_input) @@ -620,12 +659,29 @@ def save_filing(client_request: LocalProxy, # pylint: disable=too-many-return-s datetime.datetime.fromisoformat(filing.filing_json['filing']['header']['effectiveDate']) \ if filing.filing_json['filing']['header'].get('effectiveDate', None) else datetime.datetime.utcnow() + filing.hide_in_ledger = ListFilingResource._hide_in_ledger(filing) + + if filing.filing_type == Filing.FILINGS['noticeOfWithdrawal']['name']: + ListFilingResource.link_now_and_withdrawn_filing(filing) + if business_identifier.startswith('T'): + filing.temp_reg = None filing.save() except BusinessException as err: return None, None, {'error': err.error}, err.status_code return business or bootstrap, filing, None, None + @staticmethod + def _hide_in_ledger(filing: Filing) -> bool: + """Hide the filing in the ledger.""" + hide_in_ledger = str(request.headers.get('hide-in-ledger', None)).lower() + if (filing.filing_type == 'adminFreeze' or + (filing.filing_type == 'dissolution' and filing.filing_sub_type == 'involuntary') or + (jwt.validate_roles([SYSTEM_ROLE]) and hide_in_ledger == 'true')): + return True + + return False + @staticmethod def _save_colin_event_ids(filing: Filing, business: Union[Business, RegistrationBootstrap]): try: @@ -668,6 +724,9 @@ def get_filing_types(business: Business, filing_json: dict): # pylint: disable= if filing_type in CoreFiling.NEW_BUSINESS_FILING_TYPES: legal_type = filing_json['filing'][filing_type]['nameRequest']['legalType'] + elif business.identifier.startswith('T') and \ + filing_type == CoreFiling.FilingTypes.NOTICEOFWITHDRAWAL: + legal_type = filing_json['filing'].get('business', None).get('legalType') else: legal_type = business.legal_type @@ -683,7 +742,8 @@ def get_filing_types(business: Business, filing_json: dict): # pylint: disable= legal_type, priority_flag, waive_fees_flag)) - elif filing_type in ['courtOrder', 'registrarsNotation', 'registrarsOrder', 'putBackOn', 'adminFreeze']: + elif filing_type in ('adminFreeze', 'putBackOff', 'putBackOn', + 'registrarsNotation', 'registrarsOrder'): filing_type_code = Filing.FILINGS.get(filing_type, {}).get('code') filing_types.append({ 'filingTypeCode': filing_type_code, @@ -785,6 +845,28 @@ def get_filing_types_for_dissolution(filing_json: dict, legal_type: str, priorit }) return filing_types + @staticmethod + def get_withdrawn_filing(filing: Filing) -> Filing: + """Get withdrawn filing from NoW filing ID.""" + withdrawn_filing_id = filing.filing_json['filing']['noticeOfWithdrawal']['filingId'] + withdrawn_filing = Filing.find_by_id(withdrawn_filing_id) + return withdrawn_filing + + @staticmethod + def link_now_and_withdrawn_filing(filing: Filing): + """Add withdrawn filing ID to the NoW and set the withdrawal pending flag to True on the withdrawn filing.""" + withdrawn_filing = ListFilingResource.get_withdrawn_filing(filing) + withdrawn_filing.withdrawal_pending = True + withdrawn_filing.save() + filing.withdrawn_filing_id = withdrawn_filing.id + + @staticmethod + def unlink_now_and_withdrawn_filing(filing: Filing): + """Set the withdrawal pending flag to False when a NoW is deleted.""" + withdrawn_filing = ListFilingResource.get_withdrawn_filing(filing) + withdrawn_filing.withdrawal_pending = False + withdrawn_filing.save() + @staticmethod def create_invoice(business: Business, # pylint: disable=too-many-locals,too-many-branches,too-many-statements filing: Filing, @@ -826,11 +908,20 @@ def create_invoice(business: Business, # pylint: disable=too-many-locals,too-ma mailing_address = business.mailing_address.one_or_none() corp_type = business.legal_type if business.legal_type else \ filing.json['filing']['business'].get('legalType') + # deal with withdrawing a new business filing + elif business.identifier.startswith('T') and \ + filing.filing_type == Filing.FILINGS['noticeOfWithdrawal']['name']: + mailing_address, corp_type, legal_name = \ + ListFilingResource._get_address_from_withdrawn_new_business_filing(business, filing) + business.legal_name = legal_name else: mailing_address = business.mailing_address.one_or_none() corp_type = business.legal_type if business.legal_type else \ filing.json['filing']['business'].get('legalType') + if filing.filing_type == Filing.FILINGS['transparencyRegister']['name']: + corp_type = 'BTR' + payload = { 'businessInfo': { 'businessIdentifier': f'{business.identifier}', @@ -960,6 +1051,37 @@ def delete_from_minio(filing_type: str, filing_json: dict): elif filing_type == Filing.FILINGS['continuationIn'].get('name'): ListFilingResource.delete_continuation_in_files(filing_json) + @staticmethod + def delete_from_drs(filing_type: str, filing_json: dict): + """Delete file from Document Record Service.""" + if (filing_type == Filing.FILINGS['incorporationApplication'].get('name') + and (cooperative := filing_json + .get('filing', {}) + .get('incorporationApplication', {}) + .get('cooperative', None))) or \ + (filing_type == Filing.FILINGS['alteration'].get('name') + and (cooperative := filing_json + .get('filing', {}) + .get('alteration', {}))): + if rules_file_key := cooperative.get('rulesFileKey', None): + DocumentRecordService.delete_document(rules_file_key) + if memorandum_file_key := cooperative.get('memorandumFileKey', None): + DocumentRecordService.delete_document(memorandum_file_key) + elif filing_type == Filing.FILINGS['dissolution'].get('name') \ + and (affidavit_file_key := filing_json + .get('filing', {}) + .get('dissolution', {}) + .get('affidavitFileKey', None)): + DocumentRecordService.delete_document(affidavit_file_key) + elif filing_type == Filing.FILINGS['courtOrder'].get('name') \ + and (file_key := filing_json + .get('filing', {}) + .get('courtOrder', {}) + .get('fileKey', None)): + DocumentRecordService.delete_document(file_key) + elif filing_type == Filing.FILINGS['continuationIn'].get('name'): + ListFilingResource.delete_continuation_in_files(filing_json) + @staticmethod def delete_continuation_in_files(filing_json: dict): """Delete continuation in files from minio.""" @@ -967,13 +1089,19 @@ def delete_continuation_in_files(filing_json: dict): # Delete affidavit file if affidavit_file_key := continuation_in.get('foreignJurisdiction', {}).get('affidavitFileKey', None): - MinioService.delete_file(affidavit_file_key) + if flags.is_on('enable-document-records'): + DocumentRecordService.delete_document(affidavit_file_key) + else: + MinioService.delete_file(affidavit_file_key) # Delete authorization file(s) authorization_files = continuation_in.get('authorization', {}).get('files', []) for file in authorization_files: if auth_file_key := file.get('fileKey', None): - MinioService.delete_file(auth_file_key) + if flags.is_on('enable-document-records'): + DocumentRecordService.delete_document(auth_file_key) + else: + MinioService.delete_file(auth_file_key) @staticmethod def details_for_invoice(business_identifier: str, corp_type: str): @@ -1044,3 +1172,26 @@ def submit_filing_for_review(filing: Filing): {'email': {'filingId': filing.id, 'type': filing.filing_type, 'option': review.status}}, current_app.config.get('NATS_EMAILER_SUBJECT') ) + + @staticmethod + def _get_address_from_withdrawn_new_business_filing(business: Business, filing: Filing): + if filing.filing_type != CoreFiling.FilingTypes.NOTICEOFWITHDRAWAL.value: + return None, None, None + withdrawn_filing = ListFilingResource.get_withdrawn_filing(filing) + if withdrawn_filing.filing_type in CoreFiling.NEW_BUSINESS_FILING_TYPES: + office_type = OfficeType.REGISTERED + if withdrawn_filing.filing_type == Filing.FILINGS['registration']['name']: + office_type = OfficeType.BUSINESS + + mailing_address = Address.create_address( + withdrawn_filing.json['filing'][withdrawn_filing.filing_type]['offices'][office_type]['mailingAddress']) + corp_type = withdrawn_filing.json['filing'][withdrawn_filing.filing_type]['nameRequest'].get( + 'legalType', Business.LegalTypes.BCOMP.value) + + try: + legal_name = withdrawn_filing.json['filing'][withdrawn_filing.filing_type]['nameRequest']['legalName'] + except KeyError: + legal_name = business.identifier + + return mailing_address, corp_type, legal_name + return None, None, None diff --git a/legal-api/src/legal_api/resources/v2/business/business_tasks.py b/legal-api/src/legal_api/resources/v2/business/business_tasks.py index d4a02c2369..9411322715 100644 --- a/legal-api/src/legal_api/resources/v2/business/business_tasks.py +++ b/legal-api/src/legal_api/resources/v2/business/business_tasks.py @@ -19,6 +19,7 @@ from datetime import datetime from http import HTTPStatus +import datedelta import requests from requests import exceptions # noqa I001 from flask import current_app, jsonify @@ -28,6 +29,7 @@ from legal_api.services import check_warnings, namex from legal_api.services.warnings.business.business_checks import WarningType from legal_api.utils.auth import jwt +from legal_api.utils.legislation_datetime import LegislationDatetime from .bp import bp @@ -77,7 +79,7 @@ def get_tasks(identifier): return jsonify(tasks=rv) -def construct_task_list(business): # pylint: disable=too-many-locals; only 2 extra +def construct_task_list(business: Business): # pylint: disable=too-many-locals; only 2 extra """ Return all current pending tasks to do. @@ -113,7 +115,11 @@ def construct_task_list(business): # pylint: disable=too-many-locals; only 2 ex Filing.Status.PENDING_CORRECTION.value, Filing.Status.ERROR.value]) # Create a todo item for each pending filing + pending_tr_type: str = None for filing in pending_filings: + if filing.filing_type == 'transparencyRegister': + pending_tr_type = filing.filing_sub_type + filing_json = filing.json if filing.payment_status_code == 'CREATED' and filing.payment_token: # get current pay details from pay-api @@ -163,9 +169,131 @@ def construct_task_list(business): # pylint: disable=too-many-locals; only 2 ex next_ar_year += 1 ar_min_date, ar_max_date = business.get_ar_dates(next_ar_year) order += 1 + + tasks, order = add_tr_tasks(business, tasks, order, pending_tr_type) + return tasks +def add_tr_tasks(business: Business, tasks: list, order: int, pending_tr_type: str = None): + """Add Transparency Register tasks to the tasks list.""" + entity_types_no_tr = ['SP', 'GP', 'CP'] + tr_required = business.state != Business.State.HISTORICAL.value and business.legal_type not in entity_types_no_tr + if tr_required and (tr_start_date := current_app.config.get('TR_START_DATE', None)): + # Initial TR todo + if not pending_tr_type: + tr_start_datetime = LegislationDatetime.as_legislation_timezone_from_date( + datetime.fromisoformat(tr_start_date)) + initial_filing: Filing = Filing.get_most_recent_filing(business.id, 'transparencyRegister', 'initial') + last_restoration_datetime = None + if restoration_filing := Filing.get_most_recent_filing(business.id, 'restoration'): + if restoration_filing.effective_date: + last_restoration_datetime = LegislationDatetime.as_legislation_timezone( + restoration_filing.effective_date) + else: + last_restoration_datetime = LegislationDatetime.as_legislation_timezone( + restoration_filing.filing_date) + + if ( + last_restoration_datetime and + not (initial_filing and initial_filing.effective_date > last_restoration_datetime) + ): + pending_tr_type = 'initial' + tasks, order = _add_tr_task(tasks, + order, + True, + business, + 'initial', + last_restoration_datetime + datedelta.datedelta(months=6)) + + elif business.founding_date > tr_start_datetime and not initial_filing: + pending_tr_type = 'initial' + tasks, order = _add_tr_task(tasks, + order, + True, + business, + 'initial', + business.founding_date + datedelta.datedelta(months=6)) + + # Annual TR todos + if (LegislationDatetime.now() + datedelta.datedelta(months=2)) > business.next_annual_tr_due_datetime: + # the next annual tr due datetime is within 2 months so add task for annual TR + annual_year = (business.next_annual_tr_due_datetime - datedelta.datedelta(months=2)).year + if pending_tr_type != 'annual': + tasks, order = _add_tr_task(tasks, + order, + not pending_tr_type, + business, + 'annual', + business.next_annual_tr_due_datetime, + annual_year) + # add any other outstanding annual TRs to the list + now = LegislationDatetime.now() + years_offset = 0 + while annual_year < now.year: + years_offset += 1 + annual_year += 1 + # NOTE: can't just replace with annual_year due to 2 month offset (could be off by 1) + due_date = business.next_annual_tr_due_datetime + datedelta.datedelta(years=years_offset) + if (now + datedelta.datedelta(months=2)) > due_date: + tasks, order = _add_tr_task(tasks, order, False, business, 'annual', due_date, annual_year) + + return tasks, order + + +def _find_task_order_for_tr(tasks: list, order: int, tr_sub_type: str, year: int) -> int: + """Find the appropriate task order value for the TR filing in the task list.""" + ar_todo_tasks = [task for task in tasks if task['task'].get('todo', {}).get('header', {}).get('ARFilingYear')] + if not ar_todo_tasks: + # default order will be after any pending tasks + return order + + def _by_order(e: dict): + """Return the order value of the given task.""" + return e['order'] + + ar_todo_tasks.sort(key=_by_order) + if tr_sub_type == 'initial': + # Should be directly after any AR in the same year as initial + # (not possible to have ARs outstanding in previous years) + if ar_todo_tasks[0]['task']['todo']['header']['ARFilingYear'] == year: + # Will be directly after this task + return ar_todo_tasks[0]['order'] + 1 + # Will be ahead of this task + return ar_todo_tasks[0]['order'] + else: + # tr annual task, should be directly after the AR task of the same year + for ar_task in ar_todo_tasks: + if ar_task['task']['todo']['header']['ARFilingYear'] == year: + # Will be directly after this task + return ar_task['order'] + 1 + elif ar_task['task']['todo']['header']['ARFilingYear'] > year: + # Will be ahead of this task + return ar_task['order'] + + # is an annual task and should after all existing AR tasks + return order + + +def _bump_task_order(tasks: list, bump_start_point: int) -> list: + """Bump the order of the task list down from the start point.""" + for task in tasks: + if task['order'] >= bump_start_point: + task['order'] += 1 + return tasks + + +def _add_tr_task(tasks: list, order: int, enabled: bool, # pylint: disable=too-many-arguments + business: Business, sub_type: str, due_date: datetime, year: int = None): + """Add a TR task to the list of tasks in the correct order.""" + tr_order = _find_task_order_for_tr(tasks, order, sub_type, year) + # bump the order of all the tasks after the tr by 1 + tasks = _bump_task_order(tasks, tr_order) + tasks.append(create_tr_todo(business, tr_order, enabled, sub_type, due_date, year)) + order += 1 + return tasks, order + + def create_todo(business, ar_year, ar_min_date, ar_max_date, order, enabled): # pylint: disable=too-many-arguments """Return a to-do JSON object.""" todo = { @@ -221,3 +349,24 @@ def create_conversion_filing_todo(business, order, enabled): 'enabled': enabled } return todo + + +def create_tr_todo(business: Business, order: int, enabled: bool, # pylint: disable=too-many-arguments + sub_type: str, due_date: datetime, year: int = None): + """Return a to-do JSON object for a Tranparency Register todo item.""" + return { + 'task': { + 'todo': { + 'business': business.json(), + 'header': { + 'TRFilingYear': year, + 'dueDate': LegislationDatetime.as_legislation_timezone(due_date).isoformat(), + 'name': 'tranparencyRegister', + 'status': 'NEW', + 'subType': sub_type + } + } + }, + 'order': order, + 'enabled': enabled + } diff --git a/legal-api/src/legal_api/resources/v2/business/colin_sync.py b/legal-api/src/legal_api/resources/v2/business/colin_sync.py index 7eae8525c5..196cd97faf 100644 --- a/legal-api/src/legal_api/resources/v2/business/colin_sync.py +++ b/legal-api/src/legal_api/resources/v2/business/colin_sync.py @@ -21,7 +21,6 @@ from flask import current_app, jsonify, request from flask_cors import cross_origin from sqlalchemy import or_ -from sqlalchemy_continuum import version_class from legal_api.exceptions import BusinessException from legal_api.models import ( @@ -42,6 +41,7 @@ db, ) from legal_api.models.colin_event_id import ColinEventId +from legal_api.models.db import VersioningProxy from legal_api.services.business_details_version import VersionedBusinessDetailsService from legal_api.utils.auth import jwt from legal_api.utils.legislation_datetime import LegislationDatetime @@ -67,6 +67,8 @@ def get_completed_filings_for_colin(): filing_json['filing']['header']['source'] = Filing.Source.LEAR.value filing_json['filing']['header']['date'] = filing.filing_date.isoformat() filing_json['filing']['header']['learEffectiveDate'] = filing.effective_date.isoformat() + filing_json['filing']['header']['isFutureEffective'] = filing.is_future_effective + filing_json['filing']['header']['hideInLedger'] = filing.hide_in_ledger if not filing_json['filing'].get('business'): if filing.transaction_id: @@ -107,7 +109,6 @@ def get_completed_filings_for_colin(): current_app.logger.error(f'dissolution: filingId={filing.id}, missing batch processing info') # to skip this filing and block subsequent filing from syncing in update-colin-filings filing_json['filing']['header']['name'] = None - filings.append(filing_json) return jsonify({'filings': filings}), HTTPStatus.OK @@ -135,7 +136,7 @@ def set_correction_flags(filing_json, filing: Filing): def has_alias_changed(filing) -> bool: """Has alias changed in the given filing.""" - alias_version = version_class(Alias) + alias_version = VersioningProxy.version_class(db.session(), Alias) aliases_query = (db.session.query(alias_version) .filter(or_(alias_version.transaction_id == filing.transaction_id, alias_version.end_transaction_id == filing.transaction_id)) @@ -148,7 +149,7 @@ def has_office_changed(filing) -> bool: """Has office changed in the given filing.""" offices = db.session.query(Office).filter(Office.business_id == filing.business_id).all() - address_version = version_class(Address) + address_version = VersioningProxy.version_class(db.session(), Address) addresses_query = (db.session.query(address_version) .filter(or_(address_version.transaction_id == filing.transaction_id, address_version.end_transaction_id == filing.transaction_id)) @@ -160,7 +161,7 @@ def has_office_changed(filing) -> bool: def has_party_changed(filing: Filing) -> bool: """Has party changed in the given filing.""" - party_role_version = version_class(PartyRole) + party_role_version = VersioningProxy.version_class(db.session(), PartyRole) party_roles_query = (db.session.query(party_role_version) .filter(or_(party_role_version.transaction_id == filing.transaction_id, party_role_version.end_transaction_id == filing.transaction_id)) @@ -175,7 +176,7 @@ def has_party_changed(filing: Filing) -> bool: filing.business_id, role=PartyRole.RoleTypes.DIRECTOR.value) - party_version = version_class(Party) + party_version = VersioningProxy.version_class(db.session(), Party) for party_role in party_roles: parties_query = (db.session.query(party_version) .filter(or_(party_version.transaction_id == filing.transaction_id, @@ -186,7 +187,7 @@ def has_party_changed(filing: Filing) -> bool: return True party = VersionedBusinessDetailsService.get_party_revision(filing.transaction_id, party_role['id']) - address_version = version_class(Address) + address_version = VersioningProxy.version_class(db.session(), Address) # Has party delivery/mailing address modified address_query = (db.session.query(address_version) .filter(or_(address_version.transaction_id == filing.transaction_id, @@ -201,7 +202,7 @@ def has_party_changed(filing: Filing) -> bool: def has_resolution_changed(filing: Filing) -> bool: """Has resolution changed in the given filing.""" - resolution_version = version_class(Resolution) + resolution_version = VersioningProxy.version_class(db.session(), Resolution) resolution_query = (db.session.query(resolution_version) .filter(or_(resolution_version.transaction_id == filing.transaction_id, resolution_version.end_transaction_id == filing.transaction_id)) @@ -212,7 +213,7 @@ def has_resolution_changed(filing: Filing) -> bool: def has_share_changed(filing: Filing) -> bool: """Has share changed in the given filing.""" - share_class_version = version_class(ShareClass) + share_class_version = VersioningProxy.version_class(db.session(), ShareClass) share_class_query = (db.session.query(share_class_version) .filter(or_(share_class_version.transaction_id == filing.transaction_id, share_class_version.end_transaction_id == filing.transaction_id)) @@ -222,7 +223,7 @@ def has_share_changed(filing: Filing) -> bool: return True share_classes = VersionedBusinessDetailsService.get_share_class_revision(filing.transaction_id, filing.business_id) - series_version = version_class(ShareSeries) + series_version = VersioningProxy.version_class(db.session(), ShareSeries) share_series_query = (db.session.query(series_version) .filter(or_(series_version.transaction_id == filing.transaction_id, series_version.end_transaction_id == filing.transaction_id)) @@ -283,11 +284,28 @@ def _set_offices(primary_or_holding_business, amalgamation_filing, transaction_i def _set_shares(primary_or_holding_business, amalgamation_filing, transaction_id): - # copy shares + """Set shares from holding/primary business.""" + # Copy shares share_classes = VersionedBusinessDetailsService.get_share_class_revision(transaction_id, primary_or_holding_business.id) amalgamation_filing['shareStructure'] = {'shareClasses': share_classes} - business_dates = [item.resolution_date.isoformat() for item in primary_or_holding_business.resolutions] + + # Get resolution dates using versioned query + resolution_version = VersioningProxy.version_class(db.session(), Resolution) + resolutions_query = ( + db.session.query(resolution_version.resolution_date) + .filter(resolution_version.transaction_id <= transaction_id) # Get records valid at or before the transaction + .filter(resolution_version.operation_type != 2) # Exclude deleted records + .filter(resolution_version.business_id == primary_or_holding_business.id) + .filter(or_( + resolution_version.end_transaction_id.is_(None), # Records not yet ended + resolution_version.end_transaction_id > transaction_id # Records ended after our transaction + )) + .order_by(resolution_version.transaction_id) + .all() + ) + + business_dates = [res.resolution_date.isoformat() for res in resolutions_query] if business_dates: amalgamation_filing['shareStructure']['resolutionDates'] = business_dates diff --git a/legal-api/src/legal_api/resources/v2/document.py b/legal-api/src/legal_api/resources/v2/document.py index 8d08f9af91..38115fd5da 100644 --- a/legal-api/src/legal_api/resources/v2/document.py +++ b/legal-api/src/legal_api/resources/v2/document.py @@ -13,6 +13,7 @@ # limitations under the License. """Module for handling Minio document operations.""" +import re from http import HTTPStatus from flask import Blueprint, current_app, jsonify @@ -20,6 +21,7 @@ from legal_api.models import Document, Filing from legal_api.services.minio import MinioService +from legal_api.services.document_record import DocumentRecordService from legal_api.utils.auth import jwt @@ -77,3 +79,42 @@ def get_minio_document(document_key: str): return jsonify( message=f'Error getting file {document_key}.' ), HTTPStatus.INTERNAL_SERVER_ERROR + +@bp.route('//', methods=['POST', 'OPTIONS']) +@cross_origin(origin='*') +@jwt.requires_auth +def upload_document(document_class: str, document_type: str): + """Upload document file to Document Record Service.""" + + return DocumentRecordService.upload_document(document_class, document_type), HTTPStatus.OK + +@bp.route('/drs/', methods=['DELETE']) +@cross_origin(origin='*') +@jwt.requires_auth +def delete_document(document_service_id: str): + """Delete document file from Document Record Service.""" + + return DocumentRecordService.delete_document(document_service_id), HTTPStatus.OK + +@bp.route('/drs//', methods=['GET']) +@cross_origin(origins='*') +@jwt.requires_auth +def get_document(document_class: str, document_key: str): + """Get document file from Minio or Document Record Service.""" + drs_id_pattern = r"^DS\d{10}$" + + try: + if re.match(drs_id_pattern, document_key): + return DocumentRecordService.get_document(document_class, document_key), HTTPStatus.OK + else: + response = MinioService.get_file(document_key) + return current_app.response_class( + response=response.data, + status=response.status, + mimetype='application/pdf' + ) + except Exception as e: + current_app.logger.error(f'Error getting file {document_key}: {e}') + return jsonify( + message=f'Error getting file {document_key}.' + ), HTTPStatus.INTERNAL_SERVER_ERROR \ No newline at end of file diff --git a/legal-api/src/legal_api/resources/v2/internal_services.py b/legal-api/src/legal_api/resources/v2/internal_services.py index 00bbc46b68..7b22e1aff0 100644 --- a/legal-api/src/legal_api/resources/v2/internal_services.py +++ b/legal-api/src/legal_api/resources/v2/internal_services.py @@ -36,6 +36,15 @@ def get_future_effective_filing_ids(): return jsonify(filing_ids), HTTPStatus.OK +@bp.route('/expired_restoration', methods=['GET']) +@cross_origin(origin='*') +@jwt.has_one_of_roles([UserRoles.system]) +def get_identifiers_of_expired_restoration(): + """Return all identifiers (if limited restoration has expired).""" + businesses = Business.get_expired_restoration() + return jsonify({'identifiers': [business.identifier for business in businesses]}), HTTPStatus.OK + + @bp.route('/bnmove', methods=['POST']) @cross_origin(origin='*') @jwt.has_one_of_roles([UserRoles.system]) diff --git a/legal-api/src/legal_api/services/__init__.py b/legal-api/src/legal_api/services/__init__.py index c2a44e2dc0..7f895c28a1 100644 --- a/legal-api/src/legal_api/services/__init__.py +++ b/legal-api/src/legal_api/services/__init__.py @@ -29,6 +29,7 @@ from .furnishing_documents_service import FurnishingDocumentsService from .involuntary_dissolution import InvoluntaryDissolutionService from .minio import MinioService +from .document_record import DocumentRecordService from .mras_service import MrasService from .naics import NaicsService from .namex import NameXService diff --git a/legal-api/src/legal_api/services/authz.py b/legal-api/src/legal_api/services/authz.py index 087236e898..5639ef238e 100644 --- a/legal-api/src/legal_api/services/authz.py +++ b/legal-api/src/legal_api/services/authz.py @@ -201,12 +201,31 @@ def get_allowable_filings_dict(): } } }, + 'amalgamationOut': { + 'legalTypes': ['BC', 'BEN', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'], + 'blockerChecks': { + 'business': [BusinessBlocker.NOT_IN_GOOD_STANDING], + 'completedFilings': ['consentAmalgamationOut'] + } + }, 'annualReport': { 'legalTypes': ['CP', 'BEN', 'BC', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], 'blockerChecks': { 'business': [BusinessBlocker.DEFAULT] } }, + 'appointReceiver': { + 'legalTypes': ['BC', 'BEN', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], + 'blockerChecks': { + 'business': [BusinessBlocker.DEFAULT] + } + }, + 'ceaseReceiver': { + 'legalTypes': ['BC', 'BEN', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], + 'blockerChecks': { + 'business': [BusinessBlocker.DEFAULT] + } + }, 'changeOfAddress': { 'legalTypes': ['CP', 'BEN', 'BC', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], 'blockerChecks': { @@ -231,6 +250,12 @@ def get_allowable_filings_dict(): # only show filing when providing allowable filings not specific to a business 'businessRequirement': BusinessRequirement.NOT_EXIST }, + 'consentAmalgamationOut': { + 'legalTypes': ['BC', 'BEN', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'], + 'blockerChecks': { + 'business': [BusinessBlocker.DEFAULT, BusinessBlocker.NOT_IN_GOOD_STANDING] + } + }, 'consentContinuationOut': { 'legalTypes': ['BC', 'BEN', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'], 'blockerChecks': { @@ -279,6 +304,9 @@ def get_allowable_filings_dict(): # only show filing when providing allowable filings not specific to a business 'businessRequirement': BusinessRequirement.NOT_EXIST }, + 'putBackOff': { + 'legalTypes': ['BEN', 'BC', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'] + }, 'registrarsNotation': { 'legalTypes': ['SP', 'GP', 'CP', 'BC', 'BEN', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'] }, @@ -321,7 +349,8 @@ def get_allowable_filings_dict(): 'legalTypes': ['BC', 'BEN', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'], 'blockerChecks': { 'business': [BusinessBlocker.FILING_WITHDRAWAL] - } + }, + 'businessRequirement': BusinessRequirement.NO_RESTRICTION } }, Business.State.HISTORICAL: { @@ -478,6 +507,29 @@ def get_allowable_filings_dict(): }, 'transition': { 'legalTypes': ['BC', 'BEN', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'] + }, + CoreFiling.FilingTypes.TRANSPARENCY_REGISTER.value: { + 'annual': { + 'legalTypes': ['BC', 'BEN', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'], + 'blockerChecks': { + 'business': [BusinessBlocker.BUSINESS_FROZEN], + 'futureEffectiveFilings': [CoreFiling.FilingTypes.INCORPORATIONAPPLICATION.value] + } + }, + 'change': { + 'legalTypes': ['BC', 'BEN', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'], + 'blockerChecks': { + 'business': [BusinessBlocker.BUSINESS_FROZEN], + 'futureEffectiveFilings': [CoreFiling.FilingTypes.INCORPORATIONAPPLICATION.value] + } + }, + 'initial': { + 'legalTypes': ['BC', 'BEN', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'], + 'blockerChecks': { + 'business': [BusinessBlocker.BUSINESS_FROZEN], + 'futureEffectiveFilings': [CoreFiling.FilingTypes.INCORPORATIONAPPLICATION.value] + } + } } }, Business.State.HISTORICAL: {} @@ -504,7 +556,7 @@ def is_allowed(business: Business, else: is_ignore_draft_blockers = True - # Special case: handiling authorization for amalgamation application + # Special case: handling authorization for amalgamation application # this check is to make sure that amalgamation application is not allowed/authorized with continue in corps if filing_type == 'amalgamationApplication' and legal_type in ['C', 'CBEN', 'CUL', 'CCC']: return False @@ -746,7 +798,7 @@ def business_blocker_check(business: Business, is_ignore_draft_blockers: bool = if business.in_dissolution: business_blocker_checks[BusinessBlocker.IN_DISSOLUTION] = True - if has_notice_of_withdrawal_filing_blocker(business): + if has_notice_of_withdrawal_filing_blocker(business, is_ignore_draft_blockers): business_blocker_checks[BusinessBlocker.FILING_WITHDRAWAL] = True return business_blocker_checks @@ -872,15 +924,16 @@ def has_blocker_warning_filing(warnings: List, blocker_checks: dict): return warning_matches -def has_notice_of_withdrawal_filing_blocker(business: Business): +def has_notice_of_withdrawal_filing_blocker(business: Business, is_ignore_draft_blockers: bool = False): """Check if there are any blockers specific to Notice of Withdrawal.""" if business.admin_freeze: return True - filing_statuses = [Filing.Status.DRAFT.value, - Filing.Status.PENDING.value, + filing_statuses = [Filing.Status.PENDING.value, Filing.Status.PENDING_CORRECTION.value, Filing.Status.ERROR.value] + if not is_ignore_draft_blockers: + filing_statuses.append(Filing.Status.DRAFT.value) blocker_filing_matches = Filing.get_filings_by_status(business.id, filing_statuses) if any(blocker_filing_matches): return True diff --git a/legal-api/src/legal_api/services/business_details_version.py b/legal-api/src/legal_api/services/business_details_version.py index bd0693af62..048bcbb109 100644 --- a/legal-api/src/legal_api/services/business_details_version.py +++ b/legal-api/src/legal_api/services/business_details_version.py @@ -18,7 +18,6 @@ import pycountry from sqlalchemy import or_ -from sqlalchemy_continuum import version_class from legal_api.models import ( Address, @@ -33,6 +32,7 @@ ShareSeries, db, ) +from legal_api.models.db import VersioningProxy from legal_api.utils.legislation_datetime import LegislationDatetime @@ -210,7 +210,7 @@ def get_company_details_revision(filing_id, business_id) -> dict: @staticmethod def get_business_revision(transaction_id, business) -> dict: """Consolidates the business info as of a particular transaction.""" - business_version = version_class(Business) + business_version = VersioningProxy.version_class(db.session(), Business) business_revision = db.session.query(business_version) \ .filter(business_version.transaction_id <= transaction_id) \ .filter(business_version.operation_type != 2) \ @@ -223,7 +223,7 @@ def get_business_revision(transaction_id, business) -> dict: @staticmethod def get_business_revision_obj(transaction_id, business_id): """Return business version object associated with a given transaction id for a business.""" - business_version = version_class(Business) + business_version = VersioningProxy.version_class(db.session(), Business) business_revision = db.session.query(business_version) \ .filter(business_version.transaction_id <= transaction_id) \ .filter(business_version.operation_type != 2) \ @@ -238,7 +238,7 @@ def find_last_value_from_business_revision(transaction_id, business_id, is_dissolution_date=False, is_restoration_expiry_date=False) -> dict: """Get business info with last value of dissolution_date or restoration_expiry_date.""" - business_version = version_class(Business) + business_version = VersioningProxy.version_class(db.session(), Business) query = db.session.query(business_version) \ .filter(business_version.transaction_id < transaction_id) \ .filter(business_version.operation_type != 2) \ @@ -255,7 +255,7 @@ def get_business_revision_after_filing(filing_id, business_id) -> dict: """Consolidates the business info as of a particular transaction.""" business = Business.find_by_internal_id(business_id) filing = Filing.find_by_id(filing_id) - business_version = version_class(Business) + business_version = VersioningProxy.version_class(db.session(), Business) business_revision = db.session.query(business_version) \ .filter(business_version.transaction_id > filing.transaction_id) \ .filter(business_version.operation_type != 2) \ @@ -267,8 +267,8 @@ def get_business_revision_after_filing(filing_id, business_id) -> dict: def get_office_revision(transaction_id, business_id) -> dict: """Consolidates all office changes upto the given transaction id.""" offices_json = {} - address_version = version_class(Address) - offices_version = version_class(Office) + address_version = VersioningProxy.version_class(db.session(), Address) + offices_version = VersioningProxy.version_class(db.session(), Office) offices = db.session.query(offices_version) \ .filter(offices_version.transaction_id <= transaction_id) \ @@ -296,7 +296,7 @@ def get_office_revision(transaction_id, business_id) -> dict: @staticmethod def get_party_role_revision(transaction_id, business_id, is_ia_or_after=False, role=None) -> dict: """Consolidates all party changes upto the given transaction id.""" - party_role_version = version_class(PartyRole) + party_role_version = VersioningProxy.version_class(db.session(), PartyRole) party_roles = db.session.query(party_role_version)\ .filter(party_role_version.transaction_id <= transaction_id) \ .filter(party_role_version.operation_type != 2) \ @@ -322,7 +322,7 @@ def get_party_role_revision(transaction_id, business_id, is_ia_or_after=False, r @staticmethod def get_share_class_revision(transaction_id, business_id) -> dict: """Consolidates all share classes upto the given transaction id.""" - share_class_version = version_class(ShareClass) + share_class_version = VersioningProxy.version_class(db.session(), ShareClass) share_classes_list = db.session.query(share_class_version) \ .filter(share_class_version.transaction_id <= transaction_id) \ .filter(share_class_version.operation_type != 2) \ @@ -343,7 +343,7 @@ def get_share_class_revision(transaction_id, business_id) -> dict: @staticmethod def get_share_series_revision(transaction_id, share_class_id) -> dict: """Consolidates all share series under the share class upto the given transaction id.""" - share_series_version = version_class(ShareSeries) + share_series_version = VersioningProxy.version_class(db.session(), ShareSeries) share_series_list = db.session.query(share_series_version) \ .filter(share_series_version.transaction_id <= transaction_id) \ .filter(share_series_version.operation_type != 2) \ @@ -362,7 +362,7 @@ def get_share_series_revision(transaction_id, share_class_id) -> dict: @staticmethod def get_name_translations_revision(transaction_id, business_id) -> dict: """Consolidates all name translations upto the given transaction id.""" - name_translations_version = version_class(Alias) + name_translations_version = VersioningProxy.version_class(db.session(), Alias) name_translations_list = db.session.query(name_translations_version) \ .filter(name_translations_version.transaction_id <= transaction_id) \ .filter(name_translations_version.operation_type != 2) \ @@ -380,7 +380,7 @@ def get_name_translations_revision(transaction_id, business_id) -> dict: @staticmethod def get_name_translations_before_revision(transaction_id, business_id) -> dict: """Consolidates all name translations before deletion given a transaction id.""" - name_translations_version = version_class(Alias) + name_translations_version = VersioningProxy.version_class(db.session(), Alias) name_translations_list = db.session.query(name_translations_version) \ .filter(name_translations_version.transaction_id <= transaction_id) \ .filter(name_translations_version.operation_type != 2) \ @@ -396,7 +396,7 @@ def get_name_translations_before_revision(transaction_id, business_id) -> dict: @staticmethod def get_resolution_dates_revision(transaction_id, business_id) -> dict: """Consolidates all resolutions upto the given transaction id.""" - resolution_version = version_class(Resolution) + resolution_version = VersioningProxy.version_class(db.session(), Resolution) resolution_list = db.session.query(resolution_version) \ .filter(resolution_version.transaction_id <= transaction_id) \ .filter(resolution_version.operation_type != 2) \ @@ -439,7 +439,7 @@ def party_role_revision_json(transaction_id, party_role_revision, is_ia_or_after @staticmethod def get_party_revision(transaction_id, party_id) -> dict: """Consolidates all party changes upto the given transaction id.""" - party_version = version_class(Party) + party_version = VersioningProxy.version_class(db.session(), Party) party = db.session.query(party_version) \ .filter(party_version.transaction_id <= transaction_id) \ .filter(party_version.operation_type != 2) \ @@ -500,9 +500,8 @@ def party_revision_json(transaction_id, party_revision, is_ia_or_after) -> dict: if 'addressType' in member_mailing_address: del member_mailing_address['addressType'] member['mailingAddress'] = member_mailing_address - else: - if party_revision.delivery_address: - member['mailingAddress'] = member['deliveryAddress'] + elif party_revision.delivery_address_id: + member['mailingAddress'] = member['deliveryAddress'] if is_ia_or_after: member['officer']['id'] = str(party_revision.id) @@ -514,7 +513,7 @@ def party_revision_json(transaction_id, party_revision, is_ia_or_after) -> dict: @staticmethod def get_address_revision(transaction_id, address_id) -> dict: """Consolidates all party changes upto the given transaction id.""" - address_version = version_class(Address) + address_version = VersioningProxy.version_class(db.session(), Address) address = db.session.query(address_version) \ .filter(address_version.transaction_id <= transaction_id) \ .filter(address_version.operation_type != 2) \ @@ -550,7 +549,7 @@ def share_class_revision_json(share_class_revision) -> dict: 'name': share_class_revision.name, 'priority': share_class_revision.priority, 'hasMaximumShares': share_class_revision.max_share_flag, - 'maxNumberOfShares': share_class_revision.max_shares, + 'maxNumberOfShares': int(share_class_revision.max_shares) if share_class_revision.max_shares else None, 'hasParValue': share_class_revision.par_value_flag, 'parValue': share_class_revision.par_value, 'currency': share_class_revision.currency, @@ -566,7 +565,7 @@ def share_series_revision_json(share_series_revision) -> dict: 'name': share_series_revision.name, 'priority': share_series_revision.priority, 'hasMaximumShares': share_series_revision.max_share_flag, - 'maxNumberOfShares': share_series_revision.max_shares, + 'maxNumberOfShares': int(share_series_revision.max_shares) if share_series_revision.max_shares else None, 'hasRightsOrRestrictions': share_series_revision.special_rights_flag } return share_series diff --git a/legal-api/src/legal_api/services/document_record.py b/legal-api/src/legal_api/services/document_record.py new file mode 100644 index 0000000000..70410485e5 --- /dev/null +++ b/legal-api/src/legal_api/services/document_record.py @@ -0,0 +1,179 @@ +# Copyright © 2021 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""This module is a wrapper for Document Record Service.""" + +from typing import Optional +import requests +from flask import current_app +from flask_babel import _ +import PyPDF2 + +from legal_api.constants import DocumentTypes + +class DocumentRecordService: + """Document Storage class.""" + + + @staticmethod + def upload_document(document_class: str, document_type: str, file) -> dict: + """Upload document to Docuemtn Record Service.""" + # Ensure file exists + if not file: + current_app.logger.debug('No file found in request.') + return {'data': 'File not provided'} + + DOC_API_URL = current_app.config.get('DOC_API_URL', '') # pylint: disable=invalid-name + url = f'https://bcregistry-dev.apigee.net/doc/api/v1/documents/{document_class}/{document_type}' + + try: + response_body = requests.post( + url, + data=file, + headers={ + 'x-apikey': current_app.config.get('DOC_API_KEY', ''), + 'Account-Id': current_app.config.get('DOC_API_ACCOUNT_ID', ''), + 'Content-Type': 'application/pdf' + } + ).json() + + current_app.logger.debug(f'Upload file to document record service {response_body}') + return { + 'documentServiceId': response_body['documentServiceId'], + 'consumerDocumentId': response_body['consumerDocumentId'], + 'consumerFilename': response_body['consumerFilename'] + } + except Exception as e: + current_app.logger.debug(f"Error on uploading document {e}") + return {} + + @staticmethod + def update_document(document: bytes, document_service_id: str, document_name: str) -> dict: + """Update a document on Document Record Service (DRS).""" + + DOC_API_URL = current_app.config.get('DOC_API_URL', '') + url = f"{DOC_API_URL}/documents/{document_service_id}?consumerFilename={document_name}" + + headers = { + 'x-apikey': current_app.config.get('DOC_API_KEY', ''), + 'Account-Id': current_app.config.get('DOC_API_ACCOUNT_ID', ''), + 'Content-Type': 'application/pdf' + } + + try: + response = requests.put(url, data=document, headers=headers) + response.raise_for_status() + return response.json() + except requests.exceptions.RequestException as error: + current_app.logger.error(f"Error updating document on DRS: {error}") + return {"error": str(error), "response": error.response.json() if error.response else None} + + @staticmethod + def delete_document(document_service_id: str) -> dict: + """Delete document from Document Record Service.""" + DOC_API_URL = current_app.config.get('DOC_API_URL', '') # pylint: disable=invalid-name + url = f'{DOC_API_URL}/documents/{document_service_id}' + + try: + response = requests.patch( + url, json={ 'removed': True }, + headers={ + 'x-apikey': current_app.config.get('DOC_API_KEY', ''), + 'Account-Id': current_app.config.get('DOC_API_ACCOUNT_ID', ''), + } + ).json() + current_app.logger.debug(f'Delete document from document record service {response}') + return response + except Exception as e: + current_app.logger.debug(f'Error on deleting document {e}') + return {} + + @staticmethod + def get_document(document_class: str, document_service_id: str) -> dict: + """Get document record from Document Record Service.""" + DOC_API_URL = current_app.config.get('DOC_API_URL', '') # pylint: disable=invalid-name + url = f'{DOC_API_URL}/searches/{document_class}?documentServiceId={document_service_id}' + try: + response = requests.get( + url, + headers={ + 'x-apikey': current_app.config.get('DOC_API_KEY', ''), + 'Account-Id': current_app.config.get('DOC_API_ACCOUNT_ID', ''), + } + ).json() + current_app.logger.debug(f'Get document from document record service {document_service_id}') + return response[0] + except Exception as e: + current_app.logger.debug(f'Error on getting a document object {e}') + return {} + + @staticmethod + def download_document(document_class: str, document_service_id: str) -> dict: + """Download document from Document Record Service.""" + doc_object = DocumentRecordService.get_document(document_class, document_service_id) + + response = requests.get(doc_object['documentURL']) # Download file from storage + response.raise_for_status() # Raise an HTTPError for bad responses (4xx and 5xx) + + return response.content + + @staticmethod + def update_business_identifier(business_identifier: str, document_service_id: str): + """Update business identifier up on approval.""" + DOC_API_URL = current_app.config.get('DOC_API_URL', '') # pylint: disable=invalid-name + url = f'{DOC_API_URL}/documents/{document_service_id}' + + try: + response = requests.patch( + url, json={ 'consumerIdentifer': business_identifier }, + headers={ + 'x-apikey': current_app.config.get('DOC_API_KEY', ''), + 'Account-Id': current_app.config.get('DOC_API_ACCOUNT_ID', ''), + } + ).json() + current_app.logger.debug(f'Update business identifier - {business_identifier}') + return response + except Exception as e: + current_app.logger.debug(f'Error on deleting document {e}') + return {} + + @staticmethod + def validate_pdf(file, content_length, document_type) -> Optional[list]: + """Validate the PDF file.""" + msg = [] + verify_paper_size = document_type in [ + DocumentTypes.CNTO.value, + DocumentTypes.DIRECTOR_AFFIDAVIT.value + ] + + try: + pdf_reader = PyPDF2.PdfFileReader(file) + if verify_paper_size: + # Check that all pages in the pdf are letter size and able to be processed. + if any(x.mediaBox.getWidth() != 612 or x.mediaBox.getHeight() != 792 for x in pdf_reader.pages): + msg.append({'error': _('Document must be set to fit onto 8.5” x 11” letter-size paper.'), + 'path': file.filename}) + if content_length > 30000000: + msg.append({'error': _('File exceeds maximum size.'), 'path': file.filename}) + + if pdf_reader.isEncrypted: + msg.append({'error': _('File must be unencrypted.'), 'path': file.filename}) + + except Exception as e: + msg.append({'error': _('Invalid file.'), 'path': file.filename}) + current_app.logger.debug(e) + + if msg: + return msg + + return None diff --git a/legal-api/src/legal_api/services/filings/validations/alteration.py b/legal-api/src/legal_api/services/filings/validations/alteration.py index 38ce04d0aa..8998f126ce 100644 --- a/legal-api/src/legal_api/services/filings/validations/alteration.py +++ b/legal-api/src/legal_api/services/filings/validations/alteration.py @@ -21,13 +21,14 @@ from legal_api.errors import Error from legal_api.models import Business from legal_api.services.utils import get_bool, get_str +from legal_api.constants import DocumentClasses from .common_validations import ( validate_court_order, validate_name_request, validate_pdf, validate_resolution_date_in_share_structure, - validate_share_structure, + validate_share_structure ) @@ -179,7 +180,12 @@ def rules_change_validation(filing): return msg if rules_file_key: - rules_err = validate_pdf(rules_file_key, rules_file_key_path) + rules_err = validate_pdf( + file_key=rules_file_key, + file_key_path=rules_file_key_path, + document_class=DocumentClasses.COOP.value + ) + if rules_err: msg.extend(rules_err) return msg @@ -203,7 +209,11 @@ def memorandum_change_validation(filing): return msg if memorandum_file_key: - memorandum_err = validate_pdf(memorandum_file_key, memorandum_file_key_path) + memorandum_err = validate_pdf( + file_key=memorandum_file_key, + file_key_path=memorandum_file_key_path, + document_class=DocumentClasses.COOP.value + ) if memorandum_err: msg.extend(memorandum_err) diff --git a/legal-api/src/legal_api/services/filings/validations/amalgamation_application.py b/legal-api/src/legal_api/services/filings/validations/amalgamation_application.py index e7850e2cd7..e0909889ea 100644 --- a/legal-api/src/legal_api/services/filings/validations/amalgamation_application.py +++ b/legal-api/src/legal_api/services/filings/validations/amalgamation_application.py @@ -170,11 +170,21 @@ def validate_amalgamating_businesses( # pylint: disable=too-many-branches,too-m 'error': 'Adopt a name that have the same business type as the resulting business.', 'path': f'/filing/{filing_type}/nameRequest/legalName' }) - elif primary_or_holding_business and primary_or_holding_business.legal_type != legal_type: - msg.append({ - 'error': 'Legal type should be same as the legal type in primary or holding business.', - 'path': f'/filing/{filing_type}/nameRequest/legalType' - }) + + if primary_or_holding_business: + continued_types_map = { + Business.LegalTypes.CONTINUE_IN.value: Business.LegalTypes.COMP.value, + Business.LegalTypes.BCOMP_CONTINUE_IN.value: Business.LegalTypes.BCOMP.value, + Business.LegalTypes.ULC_CONTINUE_IN.value: Business.LegalTypes.BC_ULC_COMPANY.value, + Business.LegalTypes.CCC_CONTINUE_IN.value: Business.LegalTypes.BC_CCC.value + } + legal_type_to_compare = continued_types_map.get(primary_or_holding_business.legal_type, + primary_or_holding_business.legal_type) + if legal_type_to_compare != legal_type: + msg.append({ + 'error': 'Legal type should be same as the legal type in primary or holding business.', + 'path': f'/filing/{filing_type}/nameRequest/legalType' + }) msg.extend(_validate_amalgamation_type(amalgamation_type, amalgamating_business_roles, diff --git a/legal-api/src/legal_api/services/filings/validations/appoint_receiver.py b/legal-api/src/legal_api/services/filings/validations/appoint_receiver.py new file mode 100644 index 0000000000..4fef0b356c --- /dev/null +++ b/legal-api/src/legal_api/services/filings/validations/appoint_receiver.py @@ -0,0 +1,43 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the BSD 3 Clause License, (the "License"); +# you may not use this file except in compliance with the License. +# The template for the license can be found here +# https://opensource.org/license/bsd-3-clause/ +# +# Redistribution and use in source and binary forms, +# with or without modification, are permitted provided that the +# following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3. Neither the name of the copyright holder nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +"""Validation for the Appoint Receiver filing.""" +from typing import Optional + +from legal_api.errors import Error + + +def validate(_: dict) -> Optional[Error]: + """Validate the Appoint Receiver filing.""" + # NOTE: There isn't anything to validate outside what is already validated via the schema yet + return None diff --git a/legal-api/src/legal_api/services/filings/validations/cease_receiver.py b/legal-api/src/legal_api/services/filings/validations/cease_receiver.py new file mode 100644 index 0000000000..5ec47269fb --- /dev/null +++ b/legal-api/src/legal_api/services/filings/validations/cease_receiver.py @@ -0,0 +1,43 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the BSD 3 Clause License, (the "License"); +# you may not use this file except in compliance with the License. +# The template for the license can be found here +# https://opensource.org/license/bsd-3-clause/ +# +# Redistribution and use in source and binary forms, +# with or without modification, are permitted provided that the +# following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3. Neither the name of the copyright holder nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +"""Validation for the Cease Receiver filing.""" +from typing import Optional + +from legal_api.errors import Error + + +def validate(_: dict) -> Optional[Error]: + """Validate the Cease Receiver filing.""" + # NOTE: There isn't anything to validate outside what is already validated via the schema yet + return None diff --git a/legal-api/src/legal_api/services/filings/validations/common_validations.py b/legal-api/src/legal_api/services/filings/validations/common_validations.py index a624e296a6..cd3f952abf 100644 --- a/legal-api/src/legal_api/services/filings/validations/common_validations.py +++ b/legal-api/src/legal_api/services/filings/validations/common_validations.py @@ -13,6 +13,7 @@ # limitations under the License. """Common validations share through the different filings.""" import io +import re from datetime import datetime from typing import Optional @@ -22,7 +23,7 @@ from legal_api.errors import Error from legal_api.models import Business -from legal_api.services import MinioService, flags, namex +from legal_api.services import MinioService, flags, namex, DocumentRecordService from legal_api.services.utils import get_str from legal_api.utils.datetime import datetime as dt @@ -168,12 +169,23 @@ def validate_court_order(court_order_path, court_order): return None -def validate_pdf(file_key: str, file_key_path: str, verify_paper_size: bool = True) -> Optional[list]: +def validate_pdf(file_key: str, file_key_path: str, verify_paper_size: bool = True, document_class: str = None) -> Optional[list]: """Validate the PDF file.""" msg = [] + DRS_ID_PATTERN = r"^DS\d{10,}$" + file_size = 0 + try: - file = MinioService.get_file(file_key) - open_pdf_file = io.BytesIO(file.data) + file = None + if bool(re.match(DRS_ID_PATTERN, file_key)): # Check if file_key is matched with document service ID pattern + file = DocumentRecordService.download_document(document_class, file_key) + open_pdf_file = io.BytesIO(file) + file_size = len(file) + else: + file = MinioService.get_file(file_key) + open_pdf_file = io.BytesIO(file.data ) + file_info = MinioService.get_file_info(file_key) + file_size = file_info.size pdf_reader = PyPDF2.PdfFileReader(open_pdf_file) if verify_paper_size: @@ -182,14 +194,13 @@ def validate_pdf(file_key: str, file_key_path: str, verify_paper_size: bool = Tr msg.append({'error': _('Document must be set to fit onto 8.5” x 11” letter-size paper.'), 'path': file_key_path}) - file_info = MinioService.get_file_info(file_key) - if file_info.size > 30000000: + if file_size > 30000000: msg.append({'error': _('File exceeds maximum size.'), 'path': file_key_path}) if pdf_reader.isEncrypted: msg.append({'error': _('File must be unencrypted.'), 'path': file_key_path}) - except Exception: + except Exception as e: msg.append({'error': _('Invalid file.'), 'path': file_key_path}) if msg: @@ -329,3 +340,12 @@ def validate_foreign_jurisdiction(foreign_jurisdiction: dict, msg.append({'error': 'Invalid region.', 'path': f'{foreign_jurisdiction_path}/region'}) return msg + +def validate_file_on_drs(document_class: str, document_service_id: str, path) -> bool: + """Validate file existence on DRS""" + msg = [] + doc = DocumentRecordService.get_document(document_class, document_service_id) + if not bool(doc.get("documentURL")): + msg.append({'error': 'File does not exist on Document Record Service', 'path': path}) + + return msg \ No newline at end of file diff --git a/legal-api/src/legal_api/services/filings/validations/continuation_in.py b/legal-api/src/legal_api/services/filings/validations/continuation_in.py index 08ec8e0286..10eb14ff93 100644 --- a/legal-api/src/legal_api/services/filings/validations/continuation_in.py +++ b/legal-api/src/legal_api/services/filings/validations/continuation_in.py @@ -25,8 +25,8 @@ validate_foreign_jurisdiction, validate_name_request, validate_parties_names, - validate_pdf, validate_share_structure, + validate_pdf ) from legal_api.services.filings.validations.incorporation_application import ( validate_incorporation_effective_date, @@ -35,6 +35,7 @@ ) from legal_api.services.utils import get_bool, get_str from legal_api.utils.datetime import datetime as dt +from legal_api.constants import DocumentClasses def validate(filing_json: dict) -> Optional[Error]: # pylint: disable=too-many-branches; @@ -127,7 +128,12 @@ def _validate_foreign_jurisdiction(filing_json: dict, filing_type: str, legal_ty ((region := foreign_jurisdiction.get('region')) and region == 'AB')): affidavit_file_key_path = f'{foreign_jurisdiction_path}/affidavitFileKey' if file_key := foreign_jurisdiction.get('affidavitFileKey'): - if err := validate_pdf(file_key, affidavit_file_key_path, False): + if err := validate_pdf( + file_key=file_key, + file_key_path=affidavit_file_key_path, + verify_paper_size=False, + document_class=DocumentClasses.CORP.value + ): msg.extend(err) else: msg.append({'error': 'Affidavit from the directors is required.', 'path': affidavit_file_key_path}) @@ -157,7 +163,11 @@ def validate_continuation_in_authorization(filing_json: dict, filing_type: str) for index, file in enumerate(filing_json['filing'][filing_type]['authorization']['files']): file_key = file['fileKey'] file_key_path = f'{authorization_path}/files/{index}/fileKey' - if err := validate_pdf(file_key, file_key_path, False): + if err := validate_pdf( + file_key=file_key, + file_key_path=file_key_path, + document_class=DocumentClasses.CORP.value + ): msg.extend(err) return msg diff --git a/legal-api/src/legal_api/services/filings/validations/dissolution.py b/legal-api/src/legal_api/services/filings/validations/dissolution.py index 21bc9b2575..fba6e20d27 100644 --- a/legal-api/src/legal_api/services/filings/validations/dissolution.py +++ b/legal-api/src/legal_api/services/filings/validations/dissolution.py @@ -23,6 +23,8 @@ from legal_api.models import Address, Business, PartyRole from .common_validations import validate_court_order, validate_pdf +from legal_api.constants import DocumentClasses + from ...utils import get_str # noqa: I003; needed as the linter gets confused from the babel override above. @@ -243,7 +245,10 @@ def validate_affidavit(filing_json, legal_type, dissolution_type) -> Optional[li return [{'error': _('A valid affidavit key is required.'), 'path': affidavit_file_key_path}] - return validate_pdf(affidavit_file_key, affidavit_file_key_path) + return validate_pdf( + file_key=affidavit_file_key, + file_key_path=affidavit_file_key_path, + document_class=DocumentClasses.CORP.value) return None diff --git a/legal-api/src/legal_api/services/filings/validations/incorporation_application.py b/legal-api/src/legal_api/services/filings/validations/incorporation_application.py index dc33ed88eb..6ec12c7905 100644 --- a/legal-api/src/legal_api/services/filings/validations/incorporation_application.py +++ b/legal-api/src/legal_api/services/filings/validations/incorporation_application.py @@ -24,6 +24,7 @@ from legal_api.models import Business from legal_api.services.utils import get_str from legal_api.utils.datetime import datetime as dt +from legal_api.constants import DocumentClasses from .common_validations import ( # noqa: I001 validate_court_order, @@ -294,13 +295,21 @@ def validate_cooperative_documents(incorporation_json: dict): rules_file_key = cooperative['rulesFileKey'] rules_file_key_path = '/filing/incorporationApplication/cooperative/rulesFileKey' - rules_err = validate_pdf(rules_file_key, rules_file_key_path) + rules_err = validate_pdf( + file_key=rules_file_key, + file_key_path=rules_file_key_path, + document_class=DocumentClasses.COOP.value + ) if rules_err: msg.extend(rules_err) memorandum_file_key = cooperative['memorandumFileKey'] memorandum_file_key_path = '/filing/incorporationApplication/cooperative/memorandumFileKey' - memorandum_err = validate_pdf(memorandum_file_key, memorandum_file_key_path) + memorandum_err = validate_pdf( + file_key=memorandum_file_key, + file_key_path=memorandum_file_key_path, + document_class=DocumentClasses.COOP.value + ) if memorandum_err: msg.extend(memorandum_err) diff --git a/legal-api/src/legal_api/services/filings/validations/notice_of_withdrawal.py b/legal-api/src/legal_api/services/filings/validations/notice_of_withdrawal.py index 443bf4bc78..9a1f6d4014 100644 --- a/legal-api/src/legal_api/services/filings/validations/notice_of_withdrawal.py +++ b/legal-api/src/legal_api/services/filings/validations/notice_of_withdrawal.py @@ -20,7 +20,7 @@ from legal_api.errors import Error from legal_api.models import Filing from legal_api.models.db import db # noqa: I001 -from legal_api.services.utils import get_int +from legal_api.services.utils import get_bool, get_int from legal_api.utils.datetime import datetime as dt @@ -31,15 +31,27 @@ def validate(filing: Dict) -> Optional[Error]: msg = [] - withdrawn_filing_id_path: Final = '/filing/noticeOfWithdrawal/filingId' + base_path: Final = '/filing/noticeOfWithdrawal' + + withdrawn_filing_id_path: Final = f'{base_path}/filingId' withdrawn_filing_id = get_int(filing, withdrawn_filing_id_path) + + has_taken_effect = get_bool(filing, f'{base_path}/hasTakenEffect') + part_of_poa = get_bool(filing, f'{base_path}/partOfPoa') + if not withdrawn_filing_id: msg.append({'error': babel('Filing Id is required.'), 'path': withdrawn_filing_id_path}) return msg # cannot continue validation without the to be withdrawn filing id - err = validate_withdrawn_filing(withdrawn_filing_id) - if err: - msg.extend(err) + if has_taken_effect and part_of_poa: + msg.append({'error': babel('Cannot file a Notice of Withdrawal as the filing has a POA in effect.')}) + return Error(HTTPStatus.BAD_REQUEST, msg) # cannot continue validation if the filing has a POA in effect + + is_not_found, err_msg = validate_withdrawn_filing(withdrawn_filing_id) + if is_not_found: + return Error(HTTPStatus.NOT_FOUND, err_msg) + if err_msg and not is_not_found: + msg.extend(err_msg) if msg: return Error(HTTPStatus.BAD_REQUEST, msg) @@ -49,12 +61,14 @@ def validate(filing: Dict) -> Optional[Error]: def validate_withdrawn_filing(withdrawn_filing_id: int): """Validate the to be withdrawn filing id exists, the filing has a FED, the filing status is PAID.""" msg = [] + is_not_found = False # check whether the filing ID exists withdrawn_filing = db.session.query(Filing). \ filter(Filing.id == withdrawn_filing_id).one_or_none() if not withdrawn_filing: msg.append({'error': babel('The filing to be withdrawn cannot be found.')}) - return msg # cannot continue if the withdrawn filing doesn't exist + is_not_found = True + return is_not_found, msg # cannot continue if the withdrawn filing doesn't exist # check whether the filing has a Future Effective Date(FED) now = dt.utcnow() @@ -68,5 +82,5 @@ def validate_withdrawn_filing(withdrawn_filing_id: int): msg.append({'error': babel('Only paid filings with a future effective date can be withdrawn.')}) if msg: - return msg - return None + return is_not_found, msg + return None, None diff --git a/legal-api/src/legal_api/services/filings/validations/put_back_off.py b/legal-api/src/legal_api/services/filings/validations/put_back_off.py new file mode 100644 index 0000000000..a031d46b5a --- /dev/null +++ b/legal-api/src/legal_api/services/filings/validations/put_back_off.py @@ -0,0 +1,50 @@ +# Copyright © 2024 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the 'License'); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an 'AS IS' BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Validation for the Put Back Off filing.""" +from http import HTTPStatus +from typing import Dict, Final, Optional + +from flask_babel import _ as babel # noqa: N813, I004, I001; importing camelcase '_' as a name +# noqa: I004 +from legal_api.errors import Error +from legal_api.models import Business + +from .common_validations import validate_court_order +from ...utils import get_str # noqa: I003; needed as the linter gets confused from the babel override above. + + +def validate(business: Business, put_back_off: Dict) -> Optional[Error]: + """Validate the Court Order filing.""" + if not business or not put_back_off: + return Error(HTTPStatus.BAD_REQUEST, [{'error': babel('A valid business and filing are required.')}]) + msg = [] + + if not get_str(put_back_off, '/filing/putBackOff/details'): + msg.append({'error': babel('Put Back Off details are required.'), 'path': '/filing/putBackOff/details'}) + + msg.extend(_validate_court_order(put_back_off)) + + if msg: + return Error(HTTPStatus.BAD_REQUEST, msg) + return None + + +def _validate_court_order(filing): + """Validate court order.""" + if court_order := filing.get('filing', {}).get('putBackOff', {}).get('courtOrder', None): + court_order_path: Final = '/filing/putBackOff/courtOrder' + err = validate_court_order(court_order_path, court_order) + if err: + return err + return [] diff --git a/legal-api/src/legal_api/services/filings/validations/restoration.py b/legal-api/src/legal_api/services/filings/validations/restoration.py index fb6528047d..f6531f3b24 100644 --- a/legal-api/src/legal_api/services/filings/validations/restoration.py +++ b/legal-api/src/legal_api/services/filings/validations/restoration.py @@ -39,9 +39,9 @@ def validate(business: Business, restoration: Dict) -> Optional[Error]: restoration_type = get_str(restoration, '/filing/restoration/type') limited_restoration = None if restoration_type in ('limitedRestorationExtension', 'limitedRestorationToFull'): - limited_restoration = Filing.get_a_businesses_most_recent_filing_of_a_type(business.id, - 'restoration', - 'limitedRestoration') + limited_restoration = Filing.get_most_recent_filing(business.id, + 'restoration', + 'limitedRestoration') if restoration_type in ('limitedRestoration', 'limitedRestorationExtension'): msg.extend(validate_expiry_date(business, restoration, restoration_type)) elif restoration_type in ('fullRestoration', 'limitedRestorationToFull'): diff --git a/legal-api/src/legal_api/services/filings/validations/transparency_register.py b/legal-api/src/legal_api/services/filings/validations/transparency_register.py new file mode 100644 index 0000000000..8989dac050 --- /dev/null +++ b/legal-api/src/legal_api/services/filings/validations/transparency_register.py @@ -0,0 +1,43 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the BSD 3 Clause License, (the "License"); +# you may not use this file except in compliance with the License. +# The template for the license can be found here +# https://opensource.org/license/bsd-3-clause/ +# +# Redistribution and use in source and binary forms, +# with or without modification, are permitted provided that the +# following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3. Neither the name of the copyright holder nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +"""Validation for the Transparency Register filing.""" +from typing import Optional + +from legal_api.errors import Error + + +def validate(_: dict) -> Optional[Error]: + """Validate the Transparency Register filing.""" + # NOTE: There isn't anything to validate outside what is already validated via the schema yet + return None diff --git a/legal-api/src/legal_api/services/filings/validations/validation.py b/legal-api/src/legal_api/services/filings/validations/validation.py index cd0a85c33b..8b1a0e2b44 100644 --- a/legal-api/src/legal_api/services/filings/validations/validation.py +++ b/legal-api/src/legal_api/services/filings/validations/validation.py @@ -27,6 +27,8 @@ from .alteration import validate as alteration_validate from .amalgamation_application import validate as amalgamation_application_validate from .annual_report import validate as annual_report_validate +from .appoint_receiver import validate as appoint_receiver_validate +from .cease_receiver import validate as cease_receiver_validate from .change_of_address import validate as coa_validate from .change_of_directors import validate as cod_validate from .change_of_name import validate as con_validate @@ -41,6 +43,7 @@ from .dissolution import validate as dissolution_validate from .incorporation_application import validate as incorporation_application_validate from .notice_of_withdrawal import validate as notice_of_withdrawal_validate +from .put_back_off import validate as put_back_off_validate from .put_back_on import validate as put_back_on_validate from .registrars_notation import validate as registrars_notation_validate from .registrars_order import validate as registrars_order_validate @@ -48,6 +51,7 @@ from .restoration import validate as restoration_validate from .schemas import validate_against_schema from .special_resolution import validate as special_resolution_validate +from .transparency_register import validate as transparency_register_validate def validate(business: Business, # pylint: disable=too-many-branches,too-many-statements @@ -186,6 +190,18 @@ def validate(business: Business, # pylint: disable=too-many-branches,too-many-s elif k == Filing.FILINGS['noticeOfWithdrawal'].get('name'): err = notice_of_withdrawal_validate(filing_json) + elif k == Filing.FILINGS['putBackOff'].get('name'): + err = put_back_off_validate(business, filing_json) + + elif k == Filing.FILINGS['transparencyRegister'].get('name'): + err = transparency_register_validate(filing_json) # pylint: disable=assignment-from-none + + elif k == Filing.FILINGS['appointReceiver'].get('name'): + err = appoint_receiver_validate(filing_json) # pylint: disable=assignment-from-none + + elif k == Filing.FILINGS['ceaseReceiver'].get('name'): + err = cease_receiver_validate(filing_json) # pylint: disable=assignment-from-none + if err: return err diff --git a/legal-api/src/legal_api/services/warnings/business/business_checks/firms.py b/legal-api/src/legal_api/services/warnings/business/business_checks/firms.py index ad2cb54a6a..2804fedaa5 100644 --- a/legal-api/src/legal_api/services/warnings/business/business_checks/firms.py +++ b/legal-api/src/legal_api/services/warnings/business/business_checks/firms.py @@ -82,12 +82,10 @@ def check_parties(legal_type: str, business: Business) -> list: firm_party_roles = business.party_roles.filter(PartyRole.cessation_date.is_(None)) result.extend(check_firm_parties(legal_type, firm_party_roles)) - completing_party_filing = Filing \ - .get_most_recent_legal_filing(business.id, 'conversion') + completing_party_filing = Filing.get_most_recent_filing(business.id, 'conversion') if not completing_party_filing: - completing_party_filing = Filing \ - .get_most_recent_legal_filing(business.id, 'registration') + completing_party_filing = Filing.get_most_recent_filing(business.id, 'registration') result.extend(check_completing_party_for_filing(completing_party_filing)) return result diff --git a/legal-api/src/legal_api/services/warnings/business/business_checks/involuntary_dissolution.py b/legal-api/src/legal_api/services/warnings/business/business_checks/involuntary_dissolution.py index a573c303a2..219367246c 100644 --- a/legal-api/src/legal_api/services/warnings/business/business_checks/involuntary_dissolution.py +++ b/legal-api/src/legal_api/services/warnings/business/business_checks/involuntary_dissolution.py @@ -52,6 +52,11 @@ def check_business(business: Business) -> list: exclude_in_dissolution=False, exclude_future_effective_filing=True ) ) + + # dis_details is None when the account is not included in FF filter + if not dis_details: + return result + if dis_details.transition_overdue: result.append(transition_warning) elif dis_details.ar_overdue: diff --git a/legal-api/src/legal_api/utils/legislation_datetime.py b/legal-api/src/legal_api/utils/legislation_datetime.py index 715e54489e..4e1fb44dc2 100644 --- a/legal-api/src/legal_api/utils/legislation_datetime.py +++ b/legal-api/src/legal_api/utils/legislation_datetime.py @@ -81,6 +81,14 @@ def as_utc_timezone_from_legislation_date_str(date_string: str) -> datetime: _date_time = LegislationDatetime.as_legislation_timezone_from_date_str(date_string) return LegislationDatetime.as_utc_timezone(_date_time) + @staticmethod + def format_as_next_legislation_day(date_string: str) -> str: + """Return the next day in this format (eg: `August 5, 2021`).""" + input_date = datetime.fromisoformat(date_string) + next_day = input_date + timedelta(days=1) + + return next_day.strftime('%B %d, %Y') + @staticmethod def format_as_report_string(date_time: datetime) -> str: """Return a datetime string in this format (eg: `August 5, 2021 at 11:00 am Pacific time`).""" @@ -126,6 +134,13 @@ def format_as_report_expiry_string(date_time: datetime) -> str: date_time_str = LegislationDatetime.format_as_report_string_with_custom_time(date_time, 0, 1, 0, 0) return date_time_str + @staticmethod + def format_as_report_expiry_string_1159(date_time: datetime) -> str: + """Return a datetime string in this format (eg: `August 5, 2021 at 11:59 pm Pacific time`).""" + # ensure is set to correct timezone + date_time_str = LegislationDatetime.format_as_report_string_with_custom_time(date_time, 23, 59, 0, 0) + return date_time_str + @staticmethod def format_as_legislation_date(date_time: datetime) -> str: """Return the date in legislation timezone as a string.""" diff --git a/legal-api/src/legal_api/utils/util.py b/legal-api/src/legal_api/utils/util.py index 6f430ef3ac..a98d953b15 100644 --- a/legal-api/src/legal_api/utils/util.py +++ b/legal-api/src/legal_api/utils/util.py @@ -26,7 +26,7 @@ def options(self, *args, **kwargs): # pylint: disable=unused-argument return {'Allow': 'GET'}, 200, \ {'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Methods': methods, - 'Access-Control-Allow-Headers': 'Authorization, Content-Type'} + 'Access-Control-Allow-Headers': 'Authorization, Content-Type, App-Name'} setattr(func, 'options', options) return func diff --git a/legal-api/src/legal_api/version.py b/legal-api/src/legal_api/version.py index 74c5b883e2..4ec6d14652 100644 --- a/legal-api/src/legal_api/version.py +++ b/legal-api/src/legal_api/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.135.0' # pylint: disable=invalid-name +__version__ = '2.144.0' # pylint: disable=invalid-name diff --git a/legal-api/tests/postman/legal-api.postman_collection.json b/legal-api/tests/postman/legal-api.postman_collection.json index 5097bef2bf..e0d5419fbe 100644 --- a/legal-api/tests/postman/legal-api.postman_collection.json +++ b/legal-api/tests/postman/legal-api.postman_collection.json @@ -1,10 +1,11 @@ { "info": { - "_postman_id": "109b8aad-eda5-442c-a7e3-981b00ada6f8", + "_postman_id": "9e1398e3-eb5e-4cb5-b4a8-9f077cf1b284", "name": "legal-api", "description": "version=2.8 - Legal API Postman Tests", "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", - "_exporter_id": "484083" + "_exporter_id": "6835935", + "_collection_link": "https://warped-escape-616276.postman.co/workspace/bc-registries~8ef8e652-492a-4d19-b978-d4f0da255b2c/collection/6835935-9e1398e3-eb5e-4cb5-b4a8-9f077cf1b284?action=share&source=collection_link&creator=6835935" }, "item": [ { @@ -196,9 +197,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "multipart/form-data" + "value": "multipart/form-data", + "type": "text" } ], "body": { @@ -255,9 +255,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "multipart/form-data" + "value": "multipart/form-data", + "type": "text" } ], "body": { @@ -314,9 +313,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "multipart/form-data" + "value": "multipart/form-data", + "type": "text" } ], "body": { @@ -373,9 +371,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "multipart/form-data" + "value": "multipart/form-data", + "type": "text" } ], "body": { @@ -432,9 +429,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "multipart/form-data" + "value": "multipart/form-data", + "type": "text" } ], "body": { @@ -491,9 +487,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "multipart/form-data" + "value": "multipart/form-data", + "type": "text" } ], "body": { @@ -541,9 +536,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "multipart/form-data" + "value": "multipart/form-data", + "type": "text" } ], "url": { @@ -582,9 +576,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "multipart/form-data" + "value": "multipart/form-data", + "type": "text" } ], "url": { @@ -2332,9 +2325,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -2380,9 +2372,9 @@ "header": [ { "key": "Content-Type", + "value": "application/json", "name": "Content-Type", - "type": "text", - "value": "application/json" + "type": "text" } ], "body": { @@ -2637,14 +2629,13 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" }, { "key": "Authorization", - "type": "text", - "value": "Bearer {{token}}" + "value": "Bearer {{token}}", + "type": "text" } ], "body": { @@ -2684,14 +2675,14 @@ "header": [ { "key": "Content-Type", + "value": "application/json", "name": "Content-Type", - "type": "text", - "value": "application/json" + "type": "text" }, { "key": "Authorization", - "type": "text", - "value": "Bearer {{token}}" + "value": "Bearer {{token}}", + "type": "text" } ], "body": { @@ -3244,7 +3235,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -3281,8 +3271,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -4029,9 +4019,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -4066,9 +4055,9 @@ "header": [ { "key": "Content-Type", + "value": "application/json", "name": "Content-Type", - "type": "text", - "value": "application/json" + "type": "text" } ], "body": { @@ -4623,7 +4612,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -4660,8 +4648,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -4833,9 +4821,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -4875,8 +4862,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -4983,9 +4970,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -5025,8 +5011,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -5541,7 +5527,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -5578,8 +5563,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -5747,9 +5732,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -5789,8 +5773,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -5897,9 +5881,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -5939,8 +5922,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -6058,9 +6041,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -6095,8 +6077,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -6216,9 +6198,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -6253,8 +6234,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -6374,9 +6355,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -6411,8 +6391,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -6530,9 +6510,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -6567,8 +6546,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -7430,7 +7409,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -7519,7 +7497,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -7799,8 +7776,8 @@ "header": [ { "key": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -7892,8 +7869,8 @@ "header": [ { "key": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -7972,8 +7949,8 @@ "header": [ { "key": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -8062,8 +8039,8 @@ "header": [ { "key": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -8253,9 +8230,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -8303,9 +8279,9 @@ "header": [ { "key": "Content-Type", + "value": "application/json", "name": "Content-Type", - "type": "text", - "value": "application/json" + "type": "text" } ], "body": { @@ -8684,9 +8660,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -8734,9 +8709,9 @@ "header": [ { "key": "Content-Type", + "value": "application/json", "name": "Content-Type", - "type": "text", - "value": "application/json" + "type": "text" } ], "body": { @@ -8860,9 +8835,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -8910,9 +8884,9 @@ "header": [ { "key": "Content-Type", + "value": "application/json", "name": "Content-Type", - "type": "text", - "value": "application/json" + "type": "text" } ], "body": { @@ -9034,9 +9008,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -9089,9 +9062,9 @@ "header": [ { "key": "Content-Type", + "value": "application/json", "name": "Content-Type", - "type": "text", - "value": "application/json" + "type": "text" } ], "body": { @@ -9213,14 +9186,13 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" }, { "key": "Authorization", - "type": "text", - "value": "Bearer {{token}}" + "value": "Bearer {{token}}", + "type": "text" } ], "body": { @@ -9266,14 +9238,14 @@ "header": [ { "key": "Content-Type", + "value": "application/json", "name": "Content-Type", - "type": "text", - "value": "application/json" + "type": "text" }, { "key": "Authorization", - "type": "text", - "value": "Bearer {{token}}" + "value": "Bearer {{token}}", + "type": "text" } ], "body": { @@ -9491,9 +9463,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -9541,9 +9512,9 @@ "header": [ { "key": "Content-Type", + "value": "application/json", "name": "Content-Type", - "type": "text", - "value": "application/json" + "type": "text" } ], "body": { @@ -9958,7 +9929,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -9995,8 +9965,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -10122,9 +10092,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "url": { @@ -10156,8 +10125,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -10298,9 +10267,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -10401,9 +10369,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "type": "text", - "value": "application/json" + "value": "application/json", + "type": "text" } ], "body": { @@ -10473,7 +10440,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" }, @@ -10582,7 +10548,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -10676,7 +10641,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -10713,8 +10677,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -10856,7 +10820,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -10893,8 +10856,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -11036,7 +10999,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -11130,7 +11092,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -11224,7 +11185,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -11316,7 +11276,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -11410,7 +11369,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -11447,7 +11405,7 @@ "name": "Special Resolution", "item": [ { - "name": "post - success - CP3490248", + "name": "post - fail, missing resolution text - CP3490248", "event": [ { "listen": "test", @@ -11458,8 +11416,8 @@ "", "var jsonData = pm.response.json();", "", - "pm.test(\"Status code is 200\", function () {", - " pm.response.to.have.status(200);", + "pm.test(\"Status code is 400\", function () {", + " pm.response.to.have.status(400);", "});", "", "pm.test('should return JSON', function () {", @@ -11467,11 +11425,15 @@ "});", "", "", - "pm.test(\"Returns special resolution filing.\", () => {", + "pm.test(\"Returns dissolution filing.\", () => {", " pm.expect(jsonData.filing).to.exist", " pm.expect(jsonData.filing.business).to.exist", " pm.expect(jsonData.filing.specialResolution).to.exist", " pm.expect(jsonData.filing.header).to.exist", + " pm.expect(jsonData.errors).to.exist", + " pm.expect(jsonData.errors.length).to.eq(1)", + " pm.expect(jsonData.errors[0].error).to.eq('Resolution must be provided.')", + " pm.expect(jsonData.errors[0].path).to.eq('/filing/specialResolution/resolution')", "});", "", "" @@ -11505,14 +11467,13 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } ], "body": { "mode": "raw", - "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"resolutionDate\": \"2021-10-01\",\n \"signingDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" + "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"\",\n \"resolutionDate\": \"2021-10-01\",\n \"signingDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" }, "url": { "raw": "{{url}}/api/v1/businesses/:id/filings", @@ -11537,7 +11498,7 @@ "response": [] }, { - "name": "post - fail, missing resolution text - CP3490248", + "name": "post - fail, missing resolution date - CP3490248 Copy", "event": [ { "listen": "test", @@ -11564,8 +11525,8 @@ " pm.expect(jsonData.filing.header).to.exist", " pm.expect(jsonData.errors).to.exist", " pm.expect(jsonData.errors.length).to.eq(1)", - " pm.expect(jsonData.errors[0].error).to.eq('Resolution must be provided.')", - " pm.expect(jsonData.errors[0].path).to.eq('/filing/specialResolution/resolution')", + " pm.expect(jsonData.errors[0].error).to.eq('Resolution date is required.')", + " pm.expect(jsonData.errors[0].path).to.eq('/filing/specialResolution/resolutionDate')", "});", "", "" @@ -11599,14 +11560,13 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } ], "body": { "mode": "raw", - "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"\",\n \"resolutionDate\": \"2021-10-01\",\n \"signingDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" + "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"signingDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" }, "url": { "raw": "{{url}}/api/v1/businesses/:id/filings", @@ -11631,13 +11591,14 @@ "response": [] }, { - "name": "post - fail, missing resolution date - CP3490248 Copy", + "name": "post - fail, resolution date future - CP3490248", "event": [ { "listen": "test", "script": { "exec": [ "var jsonData = pm.response.json()", + "const jsonDataText = pm.response.text()", "pm.environment.set(\"filing_id\", jsonData.filing.header.filingId)", "", "var jsonData = pm.response.json();", @@ -11657,9 +11618,8 @@ " pm.expect(jsonData.filing.specialResolution).to.exist", " pm.expect(jsonData.filing.header).to.exist", " pm.expect(jsonData.errors).to.exist", - " pm.expect(jsonData.errors.length).to.eq(1)", - " pm.expect(jsonData.errors[0].error).to.eq('Resolution date is required.')", - " pm.expect(jsonData.errors[0].path).to.eq('/filing/specialResolution/resolutionDate')", + " pm.expect(jsonDataText).to.include('Resolution date cannot be in the future.')", + " pm.expect(jsonDataText).to.include('/filing/specialResolution/resolutionDate')", "});", "", "" @@ -11672,7 +11632,11 @@ "script": { "exec": [ "var today = new Date()", - "pm.environment.set(\"today\", today.getFullYear()+'-'+('0'+(today.getMonth()+1)).slice(-2)+'-'+('0'+today.getDate()).slice(-2))" + "pm.environment.set(\"today\", today.getFullYear()+'-'+('0'+(today.getMonth()+1)).slice(-2)+'-'+('0'+today.getDate()).slice(-2))", + "", + "var future = new Date()", + "future.setDate(future.getDate() + 10)", + "pm.environment.set(\"future\", future.getFullYear()+'-'+('0'+(future.getMonth()+1)).slice(-2)+'-'+('0'+future.getDate()).slice(-2))" ], "type": "text/javascript" } @@ -11693,14 +11657,13 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } ], "body": { "mode": "raw", - "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"signingDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" + "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"resolutionDate\": \"{{future}}\",\n \"signingDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" }, "url": { "raw": "{{url}}/api/v1/businesses/:id/filings", @@ -11725,7 +11688,7 @@ "response": [] }, { - "name": "post - fail, resolution date future - CP3490248", + "name": "post - fail, resolution date < incorp date - CP3490248", "event": [ { "listen": "test", @@ -11752,7 +11715,7 @@ " pm.expect(jsonData.filing.specialResolution).to.exist", " pm.expect(jsonData.filing.header).to.exist", " pm.expect(jsonData.errors).to.exist", - " pm.expect(jsonDataText).to.include('Resolution date cannot be in the future.')", + " pm.expect(jsonDataText).to.include('Resolution date cannot be earlier than the incorporation date.')", " pm.expect(jsonDataText).to.include('/filing/specialResolution/resolutionDate')", "});", "", @@ -11791,14 +11754,13 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } ], "body": { "mode": "raw", - "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"resolutionDate\": \"{{future}}\",\n \"signingDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" + "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"resolutionDate\": \"1970-01-01\",\n \"signingDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" }, "url": { "raw": "{{url}}/api/v1/businesses/:id/filings", @@ -11823,14 +11785,13 @@ "response": [] }, { - "name": "post - fail, resolution date < incorp date - CP3490248", + "name": "post - fail, missing signing date - CP3490248", "event": [ { "listen": "test", "script": { "exec": [ "var jsonData = pm.response.json()", - "const jsonDataText = pm.response.text()", "pm.environment.set(\"filing_id\", jsonData.filing.header.filingId)", "", "var jsonData = pm.response.json();", @@ -11850,8 +11811,9 @@ " pm.expect(jsonData.filing.specialResolution).to.exist", " pm.expect(jsonData.filing.header).to.exist", " pm.expect(jsonData.errors).to.exist", - " pm.expect(jsonDataText).to.include('Resolution date cannot be earlier than the incorporation date.')", - " pm.expect(jsonDataText).to.include('/filing/specialResolution/resolutionDate')", + " pm.expect(jsonData.errors.length).to.eq(1)", + " pm.expect(jsonData.errors[0].error).to.eq('Signing date is required.')", + " pm.expect(jsonData.errors[0].path).to.eq('/filing/specialResolution/signingDate')", "});", "", "" @@ -11864,11 +11826,7 @@ "script": { "exec": [ "var today = new Date()", - "pm.environment.set(\"today\", today.getFullYear()+'-'+('0'+(today.getMonth()+1)).slice(-2)+'-'+('0'+today.getDate()).slice(-2))", - "", - "var future = new Date()", - "future.setDate(future.getDate() + 10)", - "pm.environment.set(\"future\", future.getFullYear()+'-'+('0'+(future.getMonth()+1)).slice(-2)+'-'+('0'+future.getDate()).slice(-2))" + "pm.environment.set(\"today\", today.getFullYear()+'-'+('0'+(today.getMonth()+1)).slice(-2)+'-'+('0'+today.getDate()).slice(-2))" ], "type": "text/javascript" } @@ -11889,14 +11847,13 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } ], "body": { "mode": "raw", - "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"resolutionDate\": \"1970-01-01\",\n \"signingDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" + "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"resolutionDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" }, "url": { "raw": "{{url}}/api/v1/businesses/:id/filings", @@ -11921,13 +11878,14 @@ "response": [] }, { - "name": "post - fail, missing signing date - CP3490248", + "name": "post - fail, signing date future - CP3490248", "event": [ { "listen": "test", "script": { "exec": [ "var jsonData = pm.response.json()", + "const jsonDataText = pm.response.text()", "pm.environment.set(\"filing_id\", jsonData.filing.header.filingId)", "", "var jsonData = pm.response.json();", @@ -11947,9 +11905,8 @@ " pm.expect(jsonData.filing.specialResolution).to.exist", " pm.expect(jsonData.filing.header).to.exist", " pm.expect(jsonData.errors).to.exist", - " pm.expect(jsonData.errors.length).to.eq(1)", - " pm.expect(jsonData.errors[0].error).to.eq('Signing date is required.')", - " pm.expect(jsonData.errors[0].path).to.eq('/filing/specialResolution/signingDate')", + " pm.expect(jsonDataText).to.include('Signing date cannot be in the future.')", + " pm.expect(jsonDataText).to.include('/filing/specialResolution/signingDate')", "});", "", "" @@ -11962,7 +11919,11 @@ "script": { "exec": [ "var today = new Date()", - "pm.environment.set(\"today\", today.getFullYear()+'-'+('0'+(today.getMonth()+1)).slice(-2)+'-'+('0'+today.getDate()).slice(-2))" + "pm.environment.set(\"today\", today.getFullYear()+'-'+('0'+(today.getMonth()+1)).slice(-2)+'-'+('0'+today.getDate()).slice(-2))", + "", + "var future = new Date()", + "future.setDate(future.getDate() + 10)", + "pm.environment.set(\"future\", future.getFullYear()+'-'+('0'+(future.getMonth()+1)).slice(-2)+'-'+('0'+future.getDate()).slice(-2))" ], "type": "text/javascript" } @@ -11983,14 +11944,13 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } ], "body": { "mode": "raw", - "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"resolutionDate\": \"2021-10-01\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" + "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"resolutionDate\": \"2021-10-01\",\n \"signingDate\": \"{{future}}\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" }, "url": { "raw": "{{url}}/api/v1/businesses/:id/filings", @@ -12015,7 +11975,7 @@ "response": [] }, { - "name": "post - fail, signing date future - CP3490248", + "name": "post - fail, signing date < resolution date - CP3490248", "event": [ { "listen": "test", @@ -12042,7 +12002,7 @@ " pm.expect(jsonData.filing.specialResolution).to.exist", " pm.expect(jsonData.filing.header).to.exist", " pm.expect(jsonData.errors).to.exist", - " pm.expect(jsonDataText).to.include('Signing date cannot be in the future.')", + " pm.expect(jsonDataText).to.include('Signing date cannot be before the resolution date.')", " pm.expect(jsonDataText).to.include('/filing/specialResolution/signingDate')", "});", "", @@ -12081,105 +12041,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", - "value": "application/json", - "type": "text" - } - ], - "body": { - "mode": "raw", - "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"dissolution\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T00:00:00+00:00\"\n },\n \"business\": {\n \"identifier\": \"CP3490248\",\n \"legalType\": \"CP\"\n },\n \"specialResolution\": {\n \"resolution\": \"Be in resolved that cookies are delicious.\\n\\nNom nom nom...\",\n \"resolutionDate\": \"2021-10-01\",\n \"signingDate\": \"{{future}}\",\n \"signatory\": {\n \"givenName\": \"Jane\",\n \"additionalName\": \"\",\n \"familyName\": \"Doe\"\n }\n }\n }\n}" - }, - "url": { - "raw": "{{url}}/api/v1/businesses/:id/filings", - "host": [ - "{{url}}" - ], - "path": [ - "api", - "v1", - "businesses", - ":id", - "filings" - ], - "variable": [ - { - "key": "id", - "value": "CP3490248" - } - ] - } - }, - "response": [] - }, - { - "name": "post - fail, signing date < resolution date - CP3490248", - "event": [ - { - "listen": "test", - "script": { - "exec": [ - "var jsonData = pm.response.json()", - "const jsonDataText = pm.response.text()", - "pm.environment.set(\"filing_id\", jsonData.filing.header.filingId)", - "", - "var jsonData = pm.response.json();", - "", - "pm.test(\"Status code is 400\", function () {", - " pm.response.to.have.status(400);", - "});", - "", - "pm.test('should return JSON', function () {", - " pm.response.to.have.header('Content-Type', 'application/json');", - "});", - "", - "", - "pm.test(\"Returns dissolution filing.\", () => {", - " pm.expect(jsonData.filing).to.exist", - " pm.expect(jsonData.filing.business).to.exist", - " pm.expect(jsonData.filing.specialResolution).to.exist", - " pm.expect(jsonData.filing.header).to.exist", - " pm.expect(jsonData.errors).to.exist", - " pm.expect(jsonDataText).to.include('Signing date cannot be before the resolution date.')", - " pm.expect(jsonDataText).to.include('/filing/specialResolution/signingDate')", - "});", - "", - "" - ], - "type": "text/javascript" - } - }, - { - "listen": "prerequest", - "script": { - "exec": [ - "var today = new Date()", - "pm.environment.set(\"today\", today.getFullYear()+'-'+('0'+(today.getMonth()+1)).slice(-2)+'-'+('0'+today.getDate()).slice(-2))", - "", - "var future = new Date()", - "future.setDate(future.getDate() + 10)", - "pm.environment.set(\"future\", future.getFullYear()+'-'+('0'+(future.getMonth()+1)).slice(-2)+'-'+('0'+future.getDate()).slice(-2))" - ], - "type": "text/javascript" - } - } - ], - "request": { - "auth": { - "type": "bearer", - "bearer": [ - { - "key": "token", - "value": "{{token}}", - "type": "string" - } - ] - }, - "method": "POST", - "header": [ - { - "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -12278,7 +12139,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -12377,7 +12237,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -17436,7 +17295,6 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", "type": "text" } @@ -17473,8 +17331,8 @@ "header": [ { "key": "Content-Type", - "name": "Content-Type", "value": "application/json", + "name": "Content-Type", "type": "text" } ], @@ -18103,6 +17961,281 @@ "response": [] } ] + }, + { + "name": "Transparency Register", + "item": [ + { + "name": "initial", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "var jsonData = pm.response.json()", + "pm.environment.set(\"filing_id\", jsonData.filing.header.filingId)", + "", + "var jsonData = pm.response.json();", + "", + "pm.test(\"Status code is 200\", function () {", + " pm.response.to.have.status(200);", + "});", + "", + "pm.test('should return JSON', function () {", + " pm.response.to.have.header('Content-Type', 'application/json');", + "});", + "", + "", + "pm.test(\"Returns special resolution filing.\", () => {", + " pm.expect(jsonData.filing).to.exist", + " pm.expect(jsonData.filing.business).to.exist", + " pm.expect(jsonData.filing.specialResolution).to.exist", + " pm.expect(jsonData.filing.header).to.exist", + "});", + "", + "" + ], + "type": "text/javascript", + "packages": {} + } + }, + { + "listen": "prerequest", + "script": { + "exec": [ + "var today = new Date()", + "pm.environment.set(\"today\", today.getFullYear()+'-'+('0'+(today.getMonth()+1)).slice(-2)+'-'+('0'+today.getDate()).slice(-2))" + ], + "type": "text/javascript", + "packages": {} + } + } + ], + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json", + "type": "text" + }, + { + "key": "Account-Id", + "value": "3101", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"transparencyRegister\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T15:31:55+00:00\"\n },\n \"business\": {\n \"identifier\": \"{{business_identifier}}\"\n },\n \"transparencyRegister\": {\n \"type\": \"initial\",\n \"ledgerReferenceNumber\": \"1234rtfjf44544fkk\"\n }\n }\n}" + }, + "url": { + "raw": "{{url}}/api/v2/businesses/{{business_identifier}}/filings", + "host": [ + "{{url}}" + ], + "path": [ + "api", + "v2", + "businesses", + "{{business_identifier}}", + "filings" + ] + } + }, + "response": [] + }, + { + "name": "annual", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "var jsonData = pm.response.json()", + "pm.environment.set(\"filing_id\", jsonData.filing.header.filingId)", + "", + "var jsonData = pm.response.json();", + "", + "pm.test(\"Status code is 200\", function () {", + " pm.response.to.have.status(200);", + "});", + "", + "pm.test('should return JSON', function () {", + " pm.response.to.have.header('Content-Type', 'application/json');", + "});", + "", + "", + "pm.test(\"Returns special resolution filing.\", () => {", + " pm.expect(jsonData.filing).to.exist", + " pm.expect(jsonData.filing.business).to.exist", + " pm.expect(jsonData.filing.specialResolution).to.exist", + " pm.expect(jsonData.filing.header).to.exist", + "});", + "", + "" + ], + "type": "text/javascript", + "packages": {} + } + }, + { + "listen": "prerequest", + "script": { + "exec": [ + "var today = new Date()", + "pm.environment.set(\"today\", today.getFullYear()+'-'+('0'+(today.getMonth()+1)).slice(-2)+'-'+('0'+today.getDate()).slice(-2))" + ], + "type": "text/javascript", + "packages": {} + } + } + ], + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json", + "type": "text" + }, + { + "key": "Account-Id", + "value": "3101", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"transparencyRegister\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T15:40:55+00:00\"\n },\n \"business\": {\n \"identifier\": \"{{business_identifier}}\"\n },\n \"transparencyRegister\": {\n \"type\": \"annual\",\n \"ledgerReferenceNumber\": \"2234rtfjf44544fka\"\n }\n }\n}" + }, + "url": { + "raw": "{{url}}/api/v2/businesses/{{business_identifier}}/filings", + "host": [ + "{{url}}" + ], + "path": [ + "api", + "v2", + "businesses", + "{{business_identifier}}", + "filings" + ] + } + }, + "response": [] + }, + { + "name": "change", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "var jsonData = pm.response.json()", + "pm.environment.set(\"filing_id\", jsonData.filing.header.filingId)", + "", + "var jsonData = pm.response.json();", + "", + "pm.test(\"Status code is 200\", function () {", + " pm.response.to.have.status(200);", + "});", + "", + "pm.test('should return JSON', function () {", + " pm.response.to.have.header('Content-Type', 'application/json');", + "});", + "", + "", + "pm.test(\"Returns special resolution filing.\", () => {", + " pm.expect(jsonData.filing).to.exist", + " pm.expect(jsonData.filing.business).to.exist", + " pm.expect(jsonData.filing.specialResolution).to.exist", + " pm.expect(jsonData.filing.header).to.exist", + "});", + "", + "" + ], + "type": "text/javascript", + "packages": {} + } + }, + { + "listen": "prerequest", + "script": { + "exec": [ + "var today = new Date()", + "pm.environment.set(\"today\", today.getFullYear()+'-'+('0'+(today.getMonth()+1)).slice(-2)+'-'+('0'+today.getDate()).slice(-2))" + ], + "type": "text/javascript", + "packages": {} + } + } + ], + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "Content-Type", + "value": "application/json", + "type": "text" + }, + { + "key": "Account-Id", + "value": "3101", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "{\n \"filing\": {\n \"header\": {\n \"name\": \"transparencyRegister\",\n \"date\": \"{{today}}\",\n \"certifiedBy\": \"full name\",\n \"email\": \"no_one@never.get\",\n \"effectiveDate\": \"{{today}}T15:50:55+00:00\"\n },\n \"business\": {\n \"identifier\": \"{{business_identifier}}\"\n },\n \"transparencyRegister\": {\n \"type\": \"change\",\n \"ledgerReferenceNumber\": \"3234rtfjf44544fkb\"\n }\n }\n}" + }, + "url": { + "raw": "{{url}}/api/v2/businesses/{{business_identifier}}/filings", + "host": [ + "{{url}}" + ], + "path": [ + "api", + "v2", + "businesses", + "{{business_identifier}}", + "filings" + ] + } + }, + "response": [] + } + ] } ], "event": [ @@ -18125,4 +18258,4 @@ } } ] -} +} \ No newline at end of file diff --git a/legal-api/tests/unit/core/test_filing_ledger.py b/legal-api/tests/unit/core/test_filing_ledger.py index 2ddd5a9977..c8fefa5b8c 100644 --- a/legal-api/tests/unit/core/test_filing_ledger.py +++ b/legal-api/tests/unit/core/test_filing_ledger.py @@ -42,6 +42,11 @@ def load_ledger(business, founding_date): elif filing_meta['name'] == 'amalgamationApplication': filing['filing']['amalgamationApplication'] = {} filing['filing']['amalgamationApplication']['type'] = 'regular' + elif filing_meta['name'] == 'transparencyRegister': + filing['filing']['transparencyRegister'] = { + 'type': 'initial', + 'ledgerReferenceNumber': '123ewd2' + } f = factory_completed_filing(business, filing, filing_date=founding_date + datedelta.datedelta(months=i)) for c in range(i): comment = Comment() @@ -70,7 +75,7 @@ def test_simple_ledger_search(session): alteration = next((f for f in ledger if f.get('name') == 'alteration'), None) assert alteration - assert 16 == len(alteration.keys()) + assert 17 == len(alteration.keys()) assert 'availableOnPaperOnly' in alteration assert 'effectiveDate' in alteration assert 'filingId' in alteration @@ -80,6 +85,7 @@ def test_simple_ledger_search(session): assert 'submittedDate' in alteration assert 'submitter' in alteration assert 'displayLedger' in alteration + assert 'withdrawalPending' in alteration # assert alteration['commentsLink'] # assert alteration['correctionLink'] # assert alteration['filingLink'] @@ -119,3 +125,7 @@ def test_common_ledger_items(session): factory_completed_filing(business, filing, filing_date=founding_date + datedelta.datedelta(months=1), filing_type='adminFreeze') common_ledger_items = CoreFiling.common_ledger_items(identifier, completed_filing) assert common_ledger_items['displayLedger'] is False + + completed_filing.withdrawal_pending = True + common_ledger_items = CoreFiling.common_ledger_items(identifier, completed_filing) + assert common_ledger_items['withdrawalPending'] is True diff --git a/legal-api/tests/unit/invalid_size.pdf b/legal-api/tests/unit/invalid_size.pdf new file mode 100644 index 0000000000..053df816df Binary files /dev/null and b/legal-api/tests/unit/invalid_size.pdf differ diff --git a/legal-api/tests/unit/models/__init__.py b/legal-api/tests/unit/models/__init__.py index fadc6ebbb7..dc7742ed76 100644 --- a/legal-api/tests/unit/models/__init__.py +++ b/legal-api/tests/unit/models/__init__.py @@ -42,7 +42,7 @@ db, ) from legal_api.models.colin_event_id import ColinEventId -from legal_api.models.db import versioning_manager +from legal_api.models.db import VersioningProxy from legal_api.utils.datetime import datetime, timezone from tests import EPOCH_DATETIME, FROZEN_DATETIME @@ -160,8 +160,7 @@ def factory_business(identifier, no_dissolution=no_dissolution) # Versioning business - uow = versioning_manager.unit_of_work(db.session) - uow.create_transaction(db.session) + VersioningProxy.get_transaction_id(db.session()) business.save() return business @@ -243,11 +242,15 @@ def factory_completed_filing(business, filing._filing_type = filing_type if filing_sub_type: filing._filing_sub_type = filing_sub_type + + if (filing.filing_type == 'adminFreeze' or + (filing.filing_type == 'dissolution' and filing.filing_sub_type == 'involuntary')): + filing.hide_in_ledger = True + filing.save() - uow = versioning_manager.unit_of_work(db.session) - transaction = uow.create_transaction(db.session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(db.session()) + filing.transaction_id = transaction_id filing.payment_token = payment_token filing.effective_date = filing_date filing.payment_completion_date = filing_date @@ -287,9 +290,8 @@ def factory_epoch_filing(business, filing_date=FROZEN_DATETIME): """Create an error filing.""" filing = Filing() filing.business_id = business.id - uow = versioning_manager.unit_of_work(db.session) - transaction = uow.create_transaction(db.session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(db.session()) + filing.transaction_id = transaction_id filing.filing_date = filing_date filing.filing_json = {'filing': {'header': {'name': 'lear_epoch'}}} filing.save() diff --git a/legal-api/tests/unit/models/test_amalgamation.py b/legal-api/tests/unit/models/test_amalgamation.py index 52bc204737..7f2e0dda5d 100644 --- a/legal-api/tests/unit/models/test_amalgamation.py +++ b/legal-api/tests/unit/models/test_amalgamation.py @@ -104,11 +104,23 @@ def test_valid_amalgamation_save(session): amalgamation_3.save() + amalgamation_4 = Amalgamation( + amalgamation_type=Amalgamation.AmalgamationTypes.unknown, + business_id=b.id, + filing_id=filing.id, + amalgamation_date=datetime.utcnow(), + court_approval=True + ) + + amalgamation_4.save() + # verify assert amalgamation_1.id assert amalgamation_2.id assert amalgamation_3.id + assert amalgamation_4.id for type in Amalgamation.AmalgamationTypes: assert type in [Amalgamation.AmalgamationTypes.horizontal, Amalgamation.AmalgamationTypes.vertical, - Amalgamation.AmalgamationTypes.regular] + Amalgamation.AmalgamationTypes.regular, + Amalgamation.AmalgamationTypes.unknown] diff --git a/legal-api/tests/unit/models/test_business.py b/legal-api/tests/unit/models/test_business.py index 6cda1bb70d..b061f66123 100644 --- a/legal-api/tests/unit/models/test_business.py +++ b/legal-api/tests/unit/models/test_business.py @@ -37,7 +37,7 @@ PartyRole, db, ) -from legal_api.models.db import versioning_manager +from legal_api.models.db import VersioningProxy from legal_api.services import flags from legal_api.utils.legislation_datetime import LegislationDatetime from tests import EPOCH_DATETIME, TIMEZONE_OFFSET @@ -293,7 +293,7 @@ def test_good_standing_check_transition_filing(session, test_name, has_no_transi restoration_filing.save() elif test_name == 'TRANSITION_COMPLETED': factory_completed_filing(business, TRANSITION_FILING_TEMPLATE, filing_type='transition') - + check_result = business._has_no_transition_filed_after_restoration() assert check_result == has_no_transition_filed with patch.object(flags, 'is_on', return_value=True): @@ -324,12 +324,14 @@ def test_business_json(session): # slim json d_slim = { 'adminFreeze': False, + 'foundingDate': '1970-01-01T00:00:00+00:00', 'goodStanding': False, # good standing will be false because the epoch is 1970 'identifier': 'CP1234567', 'inDissolution': False, 'legalName': 'legal_name', 'legalType': Business.LegalTypes.COOP.value, 'state': Business.State.ACTIVE.name, + 'lastModified': EPOCH_DATETIME.isoformat(), 'taxId': '123456789' } @@ -721,8 +723,7 @@ def test_amalgamated_into_business_json(session, test_name, existing_business_st filing.save() # Versioning business - uow = versioning_manager.unit_of_work(db.session) - transaction = uow.create_transaction(db.session) + transaction_id = VersioningProxy.get_transaction_id(session()) business = Business( legal_name='Test - Legal Name', @@ -749,7 +750,7 @@ def test_amalgamated_into_business_json(session, test_name, existing_business_st db.session.add(existing_business) db.session.commit() - filing.transaction_id = transaction.id + filing.transaction_id = transaction_id filing.business_id = business.id filing.save() diff --git a/legal-api/tests/unit/models/test_comments.py b/legal-api/tests/unit/models/test_comments.py index d03739da75..c90598b694 100644 --- a/legal-api/tests/unit/models/test_comments.py +++ b/legal-api/tests/unit/models/test_comments.py @@ -75,7 +75,7 @@ def test_filing_comment_dump_json(session): assert c.json == { 'comment': { 'id': c.id, - 'submitterDisplayName': None, + 'submitterDisplayName': 'Registry Staff', 'comment': 'a comment', 'filingId': f.id, 'businessId': None, diff --git a/legal-api/tests/unit/models/test_consent_continuation_out.py b/legal-api/tests/unit/models/test_consent_continuation_out.py index e5e98484f0..7c1cd0b208 100644 --- a/legal-api/tests/unit/models/test_consent_continuation_out.py +++ b/legal-api/tests/unit/models/test_consent_continuation_out.py @@ -45,6 +45,7 @@ def test_consent_continuation_out_save(session): expiry_date = get_cco_expiry_date(filing.effective_date) consent_continuation_out = ConsentContinuationOut() + consent_continuation_out.consent_type = ConsentContinuationOut.ConsentTypes.continuation_out consent_continuation_out.foreign_jurisdiction = 'CA' consent_continuation_out.foreign_jurisdiction_region = 'AB' consent_continuation_out.expiry_date = expiry_date @@ -65,6 +66,7 @@ def test_get_active_cco(session): expiry_date = get_cco_expiry_date(filing.effective_date) consent_continuation_out = ConsentContinuationOut() + consent_continuation_out.consent_type = ConsentContinuationOut.ConsentTypes.continuation_out consent_continuation_out.foreign_jurisdiction = 'CA' consent_continuation_out.foreign_jurisdiction_region = 'AB' consent_continuation_out.expiry_date = expiry_date diff --git a/legal-api/tests/unit/models/test_filing.py b/legal-api/tests/unit/models/test_filing.py index c5f5ee6c4e..ffe40b294a 100644 --- a/legal-api/tests/unit/models/test_filing.py +++ b/legal-api/tests/unit/models/test_filing.py @@ -39,7 +39,7 @@ from legal_api.exceptions import BusinessException from legal_api.models import Business, Filing, User -from legal_api.models.db import versioning_manager +from legal_api.models.db import VersioningProxy from tests import EPOCH_DATETIME from tests.conftest import not_raises from tests.unit.models import ( @@ -109,8 +109,7 @@ def test_filing_orm_delete_blocked_if_completed(session): """Assert that attempting to delete a filing will raise a BusinessException.""" from legal_api.exceptions import BusinessException - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) + transaction_id = VersioningProxy.get_transaction_id(session()) b = factory_business('CP1234567') @@ -120,7 +119,7 @@ def test_filing_orm_delete_blocked_if_completed(session): filing.filing_json = ANNUAL_REPORT filing.payment_token = 'a token' filing.payment_completion_date = datetime.datetime.utcnow() - filing.transaction_id = transaction.id + filing.transaction_id = transaction_id filing.save() with pytest.raises(BusinessException) as excinfo: @@ -332,15 +331,14 @@ def test_get_filing_by_payment_token(session): def test_get_filings_by_status(session): """Assert that a filing can be retrieved by status.""" - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) + transaction_id = VersioningProxy.get_transaction_id(session()) business = factory_business('CP1234567') payment_token = '1000' filing = Filing() filing.business_id = business.id filing.filing_json = ANNUAL_REPORT filing.payment_token = payment_token - filing.transaction_id = transaction.id + filing.transaction_id = transaction_id filing.payment_completion_date = datetime.datetime.utcnow() filing.save() @@ -359,7 +357,7 @@ def test_get_filings_by_status__default_order(session): # setup base_filing = copy.deepcopy(FILING_HEADER) base_filing['specialResolution'] = SPECIAL_RESOLUTION - uow = versioning_manager.unit_of_work(session) + transaction_id = VersioningProxy.get_transaction_id(session()) business = factory_business('CP1234567') completion_date = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc) @@ -369,7 +367,6 @@ def test_get_filings_by_status__default_order(session): file_counter = -1 with freeze_time(completion_date): for i in range(0, 5): - transaction = uow.create_transaction(session) payment_token = str(i) effective_date = f'200{i}-04-15T00:00:00+00:00' @@ -380,7 +377,7 @@ def test_get_filings_by_status__default_order(session): filing.filing_json = base_filing filing.effective_date = datetime.datetime.fromisoformat(effective_date) filing.payment_token = payment_token - filing.transaction_id = transaction.id + filing.transaction_id = transaction_id filing.payment_completion_date = completion_date filing.save() @@ -401,8 +398,7 @@ def test_get_filings_by_status__default_order(session): def test_get_most_recent_filing_by_legal_type_in_json(session): """Assert that the most recent legal filing can be retrieved.""" business = factory_business('CP1234567') - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) + transaction_id = VersioningProxy.get_transaction_id(session()) for i in range(1, 5): effective_date = f'200{i}-07-01T00:00:00+00:00' @@ -419,7 +415,7 @@ def test_get_most_recent_filing_by_legal_type_in_json(session): filing.filing_json = base_filing filing.effective_date = datetime.datetime.fromisoformat(effective_date) filing.payment_token = 'token' - filing.transaction_id = transaction.id + filing.transaction_id = transaction_id filing.payment_completion_date = completion_date filing.save() @@ -435,8 +431,7 @@ def test_get_most_recent_filing_by_legal_type_db_field(session): Create 3 filings, find the 2 one by the type only. """ business = factory_business('CP1234567') - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) + transaction_id = VersioningProxy.get_transaction_id(session()) # filing 1 effective_date = '2001-07-01T00:00:00+00:00' @@ -449,7 +444,7 @@ def test_get_most_recent_filing_by_legal_type_db_field(session): filing1.filing_json = base_filing filing1.effective_date = datetime.datetime.fromisoformat(effective_date) filing1.payment_token = 'token' - filing1.transaction_id = transaction.id + filing1.transaction_id = transaction_id filing1.payment_completion_date = completion_date filing1.save() @@ -466,7 +461,7 @@ def test_get_most_recent_filing_by_legal_type_db_field(session): filing2.filing_json = base_filing filing2.effective_date = datetime.datetime.fromisoformat(effective_date) filing2.payment_token = 'token' - filing2.transaction_id = transaction.id + filing2.transaction_id = transaction_id filing2.payment_completion_date = completion_date filing2.save() @@ -481,7 +476,7 @@ def test_get_most_recent_filing_by_legal_type_db_field(session): filing3.filing_json = base_filing filing3.effective_date = datetime.datetime.fromisoformat(effective_date) filing3.payment_token = 'token' - filing3.transaction_id = transaction.id + filing3.transaction_id = transaction_id filing3.payment_completion_date = completion_date filing3.save() @@ -501,8 +496,7 @@ def test_get_most_recent_filing_by_legal_type_db_field(session): def test_get_filings_by_status_before_go_live_date(session, test_type, days, expected, status): """Assert that a filing can be retrieved by status.""" import copy - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) + transaction_id = VersioningProxy.get_transaction_id(session()) business = factory_business('CP1234567') payment_token = '1000' ar = copy.deepcopy(ANNUAL_REPORT) @@ -515,7 +509,7 @@ def test_get_filings_by_status_before_go_live_date(session, test_type, days, exp filing.business_id = business.id filing.filing_json = ar filing.payment_token = payment_token - filing.transaction_id = transaction.id + filing.transaction_id = transaction_id filing.payment_completion_date = datetime.datetime.utcnow() filing.save() @@ -560,7 +554,7 @@ def test_get_completed_filings_for_colin(session, client, jwt): assert len(filings) == 0 -def test_get_a_businesses_most_recent_filing_of_a_type(session): +def test_get_most_recent_filing(session): """Assert that the most recent completed filing of a specified type is returned.""" from legal_api.models import Filing from tests.unit.models import factory_completed_filing @@ -577,7 +571,7 @@ def test_get_a_businesses_most_recent_filing_of_a_type(session): filing = factory_completed_filing(b, ar, filing_date) filings.append(filing) # test - filing = Filing.get_a_businesses_most_recent_filing_of_a_type(b.id, Filing.FILINGS['annualReport']['name']) + filing = Filing.get_most_recent_filing(b.id, Filing.FILINGS['annualReport']['name']) # assert that we get the last filing assert filings[4] == filing diff --git a/legal-api/tests/unit/reports/test_report.py b/legal-api/tests/unit/reports/test_report.py index 5ac9632db2..a94edf3108 100644 --- a/legal-api/tests/unit/reports/test_report.py +++ b/legal-api/tests/unit/reports/test_report.py @@ -15,12 +15,14 @@ """Test-Suite to ensure that the Report class is working as expected.""" import copy from contextlib import suppress +from datetime import datetime, timedelta from pathlib import Path from unittest.mock import patch import pytest from flask import current_app from registry_schemas.example_data import ( + AGM_LOCATION_CHANGE, ALTERATION_FILING_TEMPLATE, ANNUAL_REPORT, CHANGE_OF_ADDRESS, @@ -31,16 +33,19 @@ CORRECTION_COMBINED_AR, DISSOLUTION, FILING_HEADER, + NOTICE_OF_WITHDRAWAL, + RESTORATION, INCORPORATION_FILING_TEMPLATE, SPECIAL_RESOLUTION, TRANSITION_FILING_TEMPLATE, ) from legal_api.models import db # noqa:I001 -from legal_api.models.db import versioning_manager +from legal_api.models.db import VersioningProxy from legal_api.reports.report import Report # noqa:I001 from legal_api.services import VersionedBusinessDetailsService # noqa:I001 -from tests.unit.models import factory_business, factory_completed_filing # noqa:E501,I001 +from legal_api.utils.legislation_datetime import LegislationDatetime +from tests.unit.models import factory_business, factory_completed_filing, factory_pending_filing # noqa:E501,I001 def create_report(identifier, entity_type, report_type, filing_type, template): @@ -244,8 +249,7 @@ def test_alteration_name_change(session): def update_business_legal_name(business, legal_name): """Update business legal name.""" - uow = versioning_manager.unit_of_work(db.session) - uow.create_transaction(db.session) + VersioningProxy.get_transaction_id(db.session()) business.legal_name = legal_name business.save() @@ -293,3 +297,93 @@ def create_alteration_report(filing, business, report_type): set_registrar_info(report) set_meta_info(report) return report + + +@pytest.mark.parametrize( + 'test_name, identifier, entity_type, filing_template, filing_type, formatted_filing_type', + [ + ('BC agmLocationChange', 'BC1234567', 'BC', AGM_LOCATION_CHANGE, 'agmLocationChange', 'AGM Location Change'), + ('BC alteration', 'BC1234567', 'BC', ALTERATION_FILING_TEMPLATE, 'alteration', 'Alteration'), + ('BC changeOfAddress', 'BC1234567', 'BC', CHANGE_OF_ADDRESS, 'changeOfAddress', 'Address Change'), + ('BC changeOfDirectors', 'BC1234567', 'BC', CHANGE_OF_DIRECTORS, 'changeOfDirectors', 'Director Change'), + ('BC dissolution', 'BC1234567', 'BC', DISSOLUTION, 'dissolution', 'Voluntary Dissolution'), + ('BC restoration', 'BC1234567', 'BC', RESTORATION, 'restoration', 'Full Restoration Application'), + ('BEN agmLocationChange', 'BC1234567', 'BEN', AGM_LOCATION_CHANGE, 'agmLocationChange', 'AGM Location Change'), + ('BEN alteration', 'BC1234567', 'BEN', ALTERATION_FILING_TEMPLATE, 'alteration', 'Alteration'), + ('BEN changeOfAddress', 'BC1234567', 'BEN', CHANGE_OF_ADDRESS, 'changeOfAddress', 'Address Change'), + ('BEN changeOfDirectors', 'BC1234567', 'BEN', CHANGE_OF_DIRECTORS, 'changeOfDirectors', 'Director Change'), + ('BEN dissolution', 'BC1234567', 'BEN', DISSOLUTION, 'dissolution', 'Voluntary Dissolution'), + ('BEN restoration', 'BC1234567', 'BEN', RESTORATION, 'restoration', 'Full Restoration Application'), + ('ULC agmLocationChange', 'BC1234567', 'ULC', AGM_LOCATION_CHANGE, 'agmLocationChange', 'AGM Location Change'), + ('ULC alteration', 'BC1234567', 'ULC', ALTERATION_FILING_TEMPLATE, 'alteration', 'Alteration'), + ('ULC changeOfAddress', 'BC1234567', 'ULC', CHANGE_OF_ADDRESS, 'changeOfAddress', 'Address Change'), + ('ULC changeOfDirectors', 'BC1234567', 'ULC', CHANGE_OF_DIRECTORS, 'changeOfDirectors', 'Director Change'), + ('ULC dissolution', 'BC1234567', 'ULC', DISSOLUTION, 'dissolution', 'Voluntary Dissolution'), + ('ULC restoration', 'BC1234567', 'ULC', RESTORATION, 'restoration', 'Full Restoration Application'), + ('CC agmLocationChange', 'BC1234567', 'CC', AGM_LOCATION_CHANGE, 'agmLocationChange', 'AGM Location Change'), + ('CC alteration', 'BC1234567', 'CC', ALTERATION_FILING_TEMPLATE, 'alteration', 'Alteration'), + ('CC changeOfAddress', 'BC1234567', 'CC', CHANGE_OF_ADDRESS, 'changeOfAddress', 'Address Change'), + ('CC changeOfDirectors', 'BC1234567', 'CC', CHANGE_OF_DIRECTORS, 'changeOfDirectors', 'Director Change'), + ('CC dissolution', 'BC1234567', 'CC', DISSOLUTION, 'dissolution', 'Voluntary Dissolution'), + ('CC restoration', 'BC1234567', 'CC', RESTORATION, 'restoration', 'Full Restoration Application'), + ('C agmLocationChange', 'C1234567', 'C', AGM_LOCATION_CHANGE, 'agmLocationChange', 'AGM Location Change'), + ('C alteration', 'C1234567', 'C', ALTERATION_FILING_TEMPLATE, 'alteration', 'Alteration'), + ('C changeOfAddress', 'C1234567', 'C', CHANGE_OF_ADDRESS, 'changeOfAddress', 'Address Change'), + ('C changeOfDirectors', 'C1234567', 'C', CHANGE_OF_DIRECTORS, 'changeOfDirectors', 'Director Change'), + ('C dissolution', 'C1234567', 'C', DISSOLUTION, 'dissolution', 'Voluntary Dissolution'), + ('C restoration', 'C1234567', 'C', RESTORATION, 'restoration', 'Full Restoration Application'), + ('CUL agmLocationChange', 'C1234567', 'CUL', AGM_LOCATION_CHANGE, 'agmLocationChange', 'AGM Location Change'), + ('CUL alteration', 'C1234567', 'CUL', ALTERATION_FILING_TEMPLATE, 'alteration', 'Alteration'), + ('CUL changeOfAddress', 'C1234567', 'CUL', CHANGE_OF_ADDRESS, 'changeOfAddress', 'Address Change'), + ('CUL changeOfDirectors', 'C1234567', 'CUL', CHANGE_OF_DIRECTORS, 'changeOfDirectors', 'Director Change'), + ('CUL dissolution', 'C1234567', 'CUL', DISSOLUTION, 'dissolution', 'Voluntary Dissolution'), + ('CUL restoration', 'C1234567', 'CUL', RESTORATION, 'restoration', 'Full Restoration Application'), + ('CBEN agmLocationChange', 'C1234567', 'CBEN', AGM_LOCATION_CHANGE, 'agmLocationChange', 'AGM Location Change'), + ('CBEN alteration', 'C1234567', 'CBEN', ALTERATION_FILING_TEMPLATE, 'alteration', 'Alteration'), + ('CBEN changeOfAddress', 'C1234567', 'CBEN', CHANGE_OF_ADDRESS, 'changeOfAddress', 'Address Change'), + ('CBEN changeOfDirectors', 'C1234567', 'CBEN', CHANGE_OF_DIRECTORS, 'changeOfDirectors', 'Director Change'), + ('CBEN dissolution', 'C1234567', 'CBEN', DISSOLUTION, 'dissolution', 'Voluntary Dissolution'), + ('CBEN restoration', 'C1234567', 'CBEN', RESTORATION, 'restoration', 'Full Restoration Application'), + ('CCC agmLocationChange', 'C1234567', 'CCC', AGM_LOCATION_CHANGE, 'agmLocationChange', 'AGM Location Change'), + ('CCC alteration', 'C1234567', 'CCC', ALTERATION_FILING_TEMPLATE, 'alteration', 'Alteration'), + ('CCC changeOfAddress', 'C1234567', 'CCC', CHANGE_OF_ADDRESS, 'changeOfAddress', 'Address Change'), + ('CCC changeOfDirectors', 'C1234567', 'CCC', CHANGE_OF_DIRECTORS, 'changeOfDirectors', 'Director Change'), + ('CCC dissolution', 'C1234567', 'CCC', DISSOLUTION, 'dissolution', 'Voluntary Dissolution'), + ('CCC restoration', 'C1234567', 'CCC', RESTORATION, 'restoration', 'Full Restoration Application') + ] +) +def test_notice_of_withdraw_format_data(session, test_name, identifier, entity_type, filing_template, filing_type, formatted_filing_type): + """Test the data passed to NoW report template - existing business""" + # create a business + test_business = factory_business(identifier=identifier, entity_type=entity_type) + + # file a FE filing + today = datetime.utcnow().date() + future_effective_date = today + timedelta(days=5) + future_effective_date = future_effective_date.isoformat() + withdrawn_json = copy.deepcopy(FILING_HEADER) + withdrawn_json['filing']['header']['name'] = filing_type + withdrawn_json['filing']['business']['legalType'] = entity_type + withdrawn_json['filing'][filing_type] = copy.deepcopy(filing_template) + withdrawn_filing = factory_pending_filing(test_business, withdrawn_json) + withdrawn_filing.effective_date = future_effective_date + withdrawn_filing.payment_completion_date = today.isoformat() + withdrawn_filing.save() + withdrawn_filing_id = withdrawn_filing.id + + # file a NoW filing + now_json = copy.deepcopy(FILING_HEADER) + now_json['filing']['header']['name'] = 'noticeOfWithdrawal' + now_json['filing']['business']['legalType'] = 'BC' + now_json['filing']['noticeOfWithdrawal'] = copy.deepcopy(NOTICE_OF_WITHDRAWAL) + now_json['filing']['noticeOfWithdrawal']['filingId'] = withdrawn_filing_id + + # verify formatted NoW data for report template + formatted_now_json = copy.deepcopy(now_json['filing']) + report_instance = Report({}) + expected_withdrawn_filing_effective_date = LegislationDatetime.as_legislation_timezone(withdrawn_filing.effective_date) + expected_withdrawn_filing_effective_date = LegislationDatetime.format_as_report_string(expected_withdrawn_filing_effective_date) + report_instance._format_notice_of_withdrawal_data(formatted_now_json) + assert formatted_now_json['withdrawnFilingType'] == formatted_filing_type + assert formatted_now_json['withdrawnFilingEffectiveDate'] == expected_withdrawn_filing_effective_date + assert formatted_now_json['noticeOfWithdrawal']['filingId'] == withdrawn_filing_id diff --git a/legal-api/tests/unit/resources/v1/test_business_filings/test_filings.py b/legal-api/tests/unit/resources/v1/test_business_filings/test_filings.py index a53f72c5d5..89d0dfce38 100644 --- a/legal-api/tests/unit/resources/v1/test_business_filings/test_filings.py +++ b/legal-api/tests/unit/resources/v1/test_business_filings/test_filings.py @@ -1031,6 +1031,9 @@ def test_calc_annual_report_date(session, client, jwt): SPECIAL_RESOLUTION_NO_CON_FILING = copy.deepcopy(CP_SPECIAL_RESOLUTION_TEMPLATE) del SPECIAL_RESOLUTION_NO_CON_FILING['filing']['changeOfName'] +AMALGAMATION_OUT_FILING = copy.deepcopy(FILING_HEADER) +AMALGAMATION_OUT_FILING['filing']['amalgamationOut'] = {} + CONTINUATION_OUT_FILING = copy.deepcopy(FILING_HEADER) CONTINUATION_OUT_FILING['filing']['continuationOut'] = {} @@ -1098,6 +1101,10 @@ def _fee_code_asserts(business, filing_json: dict, multiple_fee_codes, expected_ False, []), ('CP1234567', CP_SPECIAL_RESOLUTION_TEMPLATE, 'specialResolution', Business.LegalTypes.COOP.value, False, ['SPRLN', 'OTCON']), + ('BC1234567', AMALGAMATION_OUT_FILING, 'amalgamationOut', Business.LegalTypes.COMP.value, False, []), + ('BC1234567', AMALGAMATION_OUT_FILING, 'amalgamationOut', Business.LegalTypes.BCOMP.value, False, []), + ('BC1234567', AMALGAMATION_OUT_FILING, 'amalgamationOut', Business.LegalTypes.BC_ULC_COMPANY.value, False, []), + ('BC1234567', AMALGAMATION_OUT_FILING, 'amalgamationOut', Business.LegalTypes.BC_CCC.value, False, []), ('BC1234567', CONTINUATION_OUT_FILING, 'continuationOut', Business.LegalTypes.COMP.value, False, []), ('BC1234567', CONTINUATION_OUT_FILING, 'continuationOut', Business.LegalTypes.BCOMP.value, False, []), ('BC1234567', CONTINUATION_OUT_FILING, 'continuationOut', Business.LegalTypes.BC_ULC_COMPANY.value, False, []), diff --git a/legal-api/tests/unit/resources/v1/test_business_tasks.py b/legal-api/tests/unit/resources/v1/test_business_tasks.py index b60019eccf..8095958570 100644 --- a/legal-api/tests/unit/resources/v1/test_business_tasks.py +++ b/legal-api/tests/unit/resources/v1/test_business_tasks.py @@ -96,8 +96,8 @@ def test_get_tasks_no_filings(session, client, jwt): def test_get_tasks_next_year(session, client, jwt): """Assert that one todo item is returned in the calendar year following incorporation.""" identifier = 'CP7654321' - founding_date = datetime.today() + datedelta.datedelta(days=1) - datedelta.datedelta(years=1) - factory_business(identifier, founding_date=founding_date) # incorporation 1 year - 1 day ago + founding_date = datetime.today() - datedelta.datedelta(years=1) + factory_business(identifier, founding_date=founding_date) # incorporation 1 year # To-do are all years from the year after incorporation until this year diff --git a/legal-api/tests/unit/resources/v2/test_business.py b/legal-api/tests/unit/resources/v2/test_business.py index 79d3ddf4db..f1fb04d860 100644 --- a/legal-api/tests/unit/resources/v2/test_business.py +++ b/legal-api/tests/unit/resources/v2/test_business.py @@ -584,10 +584,22 @@ def test_get_could_file(session, client, jwt): "name": "amalgamationApplication", "type": "horizontal" }, + { + "displayName": "Amalgamation Out", + "name": "amalgamationOut" + }, { "displayName": "Annual Report", "name": "annualReport" }, + { + "displayName": "Appoint Receiver", + "name": "appointReceiver" + }, + { + "displayName": "Cease Receiver", + "name": "ceaseReceiver" + }, { "displayName": "Address Change", "name": "changeOfAddress" @@ -596,6 +608,10 @@ def test_get_could_file(session, client, jwt): "displayName": "Director Change", "name": "changeOfDirectors" }, + { + "displayName": "6-Month Consent to Amalgamate Out", + "name": "consentAmalgamationOut" + }, { "displayName": "6-Month Consent to Continue Out", "name": "consentContinuationOut" @@ -626,6 +642,10 @@ def test_get_could_file(session, client, jwt): "displayName": "BC Limited Company Incorporation Application", "name": "incorporationApplication" }, + { + "displayName": "Correction - Put Back Off", + "name": "putBackOff", + }, { "displayName": "Registrar's Notation", "name": "registrarsNotation" @@ -659,4 +679,4 @@ def test_get_could_file(session, client, jwt): assert rv.json['couldFile']['filing'] assert rv.json['couldFile']['filing']['filingTypes'] assert len(rv.json['couldFile']['filing']['filingTypes']) > 0 - assert rv.json['couldFile']['filing']['filingTypes'] == expected \ No newline at end of file + assert rv.json['couldFile']['filing']['filingTypes'] == expected diff --git a/legal-api/tests/unit/resources/v2/test_business_filings/test_filing_documents.py b/legal-api/tests/unit/resources/v2/test_business_filings/test_filing_documents.py index 91aecc84f8..21c9dcef45 100644 --- a/legal-api/tests/unit/resources/v2/test_business_filings/test_filing_documents.py +++ b/legal-api/tests/unit/resources/v2/test_business_filings/test_filing_documents.py @@ -141,12 +141,11 @@ def test_unpaid_filing(session, client, jwt): ALTERATION_MEMORANDUM_RULES_IN_RESOLUTION['memorandumInResolution'] = True ALTERATION_MEMORANDUM_RULES_IN_RESOLUTION['rulesInResolution'] = True -# a mock notice of withdrawal filing, since its schema is not ready yet -# may need to delete this and change variables in the test cases in the future MOCK_NOTICE_OF_WITHDRAWAL = {} MOCK_NOTICE_OF_WITHDRAWAL['courtOrder'] = copy.deepcopy(COURT_ORDER) MOCK_NOTICE_OF_WITHDRAWAL['filingId'] = '123456' - +MOCK_NOTICE_OF_WITHDRAWAL['hasTakenEffect'] = False +MOCK_NOTICE_OF_WITHDRAWAL['partOfPoa'] = False @pytest.mark.parametrize('test_name, identifier, entity_type, filing_name_1, legal_filing_1, filing_name_2, legal_filing_2, status, expected_msg, expected_http_code, payment_completion_date', [ ('special_res_paper', 'CP7654321', Business.LegalTypes.COOP.value, @@ -726,6 +725,17 @@ def test_unpaid_filing(session, client, jwt): }, HTTPStatus.OK, '2017-10-01' ), + ('bc_ia_completed', 'BC7654321', Business.LegalTypes.COMP.value, + 'incorporationApplication', INCORPORATION, None, None, Filing.Status.WITHDRAWN, + {'documents': {'receipt': f'{base_url}/api/v2/businesses/BC7654321/filings/1/documents/receipt', + 'legalFilings': [ + {'incorporationApplication': + f'{base_url}/api/v2/businesses/BC7654321/filings/1/documents/incorporationApplication'}, + ] + } + }, + HTTPStatus.OK, '2017-10-01' + ), ('bc_annual_report_completed', 'BC7654321', Business.LegalTypes.COMP.value, 'annualReport', ANNUAL_REPORT, None, None, Filing.Status.COMPLETED, {'documents': {'receipt': f'{base_url}/api/v2/businesses/BC7654321/filings/1/documents/receipt', @@ -1469,6 +1479,14 @@ def filer_action(filing_name, filing_json, meta_data, business): ]}}, HTTPStatus.OK ), + ('ben_ia_paid', 'Tb31yQIuBw', None, Business.LegalTypes.BCOMP.value, + 'incorporationApplication', INCORPORATION, Filing.Status.WITHDRAWN, + {'documents': {'receipt': f'{base_url}/api/v2/businesses/Tb31yQIuBw/filings/1/documents/receipt', + 'legalFilings': [ + {'incorporationApplication': f'{base_url}/api/v2/businesses/Tb31yQIuBw/filings/1/documents/incorporationApplication'}, + ]}}, + HTTPStatus.OK + ), ('ben_ia_completed', 'Tb31yQIuBw', 'BC7654321', Business.LegalTypes.BCOMP.value, 'incorporationApplication', INCORPORATION, Filing.Status.COMPLETED, {'documents': {}}, HTTPStatus.OK @@ -1628,3 +1646,74 @@ def test_get_receipt_request_mock(session, client, jwt, requests_mock): assert rv.status_code == HTTPStatus.CREATED assert requests_mock.called_once + + +@pytest.mark.parametrize('test_name, temp_identifier, entity_type, expected_msg, expected_http_code', [ + ('now_ia_paid', 'Tb31yQIuBw', Business.LegalTypes.BCOMP.value, + {'documents': {'receipt': f'{base_url}/api/v2/businesses/Tb31yQIuBw/filings/1/documents/receipt', + 'legalFilings': [ + {'noticeOfWithdrawal': f'{base_url}/api/v2/businesses/Tb31yQIuBw/filings/1/documents/noticeOfWithdrawal'}, + ]}}, + HTTPStatus.OK + ) +]) +def test_temp_document_list_for_now(mocker, session, client, jwt, + test_name, + temp_identifier, + entity_type, + expected_msg, expected_http_code): + """Test document list for noticeOfWithdrawal states with temp identifier.""" + # Setup + + withdrawn_filing_json = copy.deepcopy(FILING_HEADER) + withdrawn_filing_json['filing']['header']['name'] = 'incorporationApplication' + withdrawn_filing_json['filing']['business']['legalType'] = entity_type + withdrawn_filing_json['filing']['incorporationApplication'] = INCORPORATION + + filing_json = copy.deepcopy(FILING_HEADER) + filing_json['filing']['header']['name'] = 'noticeOfWithdrawal' + filing_json['filing']['business']['legalType'] = entity_type + filing_json['filing']['noticeOfWithdrawal'] = MOCK_NOTICE_OF_WITHDRAWAL + + filing_date = datetime.utcnow() + + temp_reg = RegistrationBootstrap() + temp_reg._identifier = temp_identifier + temp_reg.save() + + business = None + withdrawn_filing = factory_filing(business, withdrawn_filing_json, filing_date=filing_date) + withdrawn_filing.temp_reg = temp_identifier + withdrawn_filing.save() + filing = factory_filing(business, filing_json, filing_date=filing_date) + filing.skip_status_listener = True + filing._status = Filing.Status.PAID + filing._payment_completion_date = '2017-10-01' + filing.temp_reg = None + filing.withdrawn_filing_id = withdrawn_filing.id + filing.save() + + mocker.patch('legal_api.core.filing.has_roles', return_value=True) + rv = client.get(f'/api/v2/businesses/{temp_identifier}/filings/{filing.id}/documents', + headers=create_header(jwt, [STAFF_ROLE], temp_identifier)) + + # remove the filing ID + rv_data = json.loads(re.sub("/\d+/", "/", rv.data.decode("utf-8")).replace("\n", "")) + expected = json.loads(re.sub("/\d+/", "/", json.dumps(expected_msg))) + + assert rv.status_code == expected_http_code + assert rv_data == expected + + filing._status = Filing.Status.COMPLETED + filing.save() + + mocker.patch('legal_api.core.filing.has_roles', return_value=True) + rv = client.get(f'/api/v2/businesses/{temp_identifier}/filings/{filing.id}/documents', + headers=create_header(jwt, [STAFF_ROLE], temp_identifier)) + + # remove the filing ID + rv_data = json.loads(re.sub("/\d+/", "/", rv.data.decode("utf-8")).replace("\n", "")) + expected = json.loads(re.sub("/\d+/", "/", json.dumps(expected_msg))) + + assert rv.status_code == expected_http_code + assert rv_data == expected diff --git a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py index 08c2c9bea8..9867c22f39 100644 --- a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py +++ b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings.py @@ -17,7 +17,7 @@ Test-Suite to ensure that the /businesses endpoint is working as expected. """ import copy -from datetime import datetime, timezone +from datetime import datetime, timedelta, timezone from http import HTTPStatus from typing import Final from unittest.mock import patch @@ -44,6 +44,7 @@ FILING_HEADER, INCORPORATION, INCORPORATION_FILING_TEMPLATE, + NOTICE_OF_WITHDRAWAL as SCHEMA_NOTICE_OF_WITHDRAWAL, REGISTRATION, SPECIAL_RESOLUTION, TRANSITION_FILING_TEMPLATE @@ -115,6 +116,80 @@ def test_get_temp_business_filing(session, client, jwt, legal_type, filing_type, assert rv.json['filing']['header']['name'] == filing_type assert rv.json['filing'][filing_type] == filing_json +@pytest.mark.parametrize( + 'jwt_role, expected', + [ + (UserRoles.staff, 'staff-person'), + (UserRoles.public_user, 'Registry Staff'), + ] +) +def test_get_withdrawn_temp_business_filing(session, client, jwt, jwt_role, expected): + """Assert that a withdrawn FE temp business returns the filing with the NoW embedded once available.""" + user = factory_user('idir/staff-person') + + # set-up withdrawn boostrap FE filing + today = datetime.utcnow().date() + future_effective_date = today + timedelta(days=5) + future_effective_date = future_effective_date.isoformat() + + identifier = 'Tb31yQIuBw' + temp_reg = RegistrationBootstrap() + temp_reg._identifier = identifier + temp_reg.save() + json_data = copy.deepcopy(FILING_HEADER) + json_data['filing']['header']['name'] = 'incorporationApplication' + del json_data['filing']['business'] + new_bus_filing_json = copy.deepcopy(INCORPORATION) + new_bus_filing_json['nameRequest']['legalType'] = 'BC' + json_data['filing']['incorporationApplication'] = new_bus_filing_json + new_business_filing = factory_pending_filing(None, json_data) + new_business_filing.temp_reg = identifier + new_business_filing.effective_date = future_effective_date + new_business_filing.payment_completion_date = datetime.utcnow().isoformat() + new_business_filing._status = Filing.Status.PAID.value + new_business_filing.skip_status_listener = True + new_business_filing.save() + withdrawn_filing_id = new_business_filing.id + + # set-up notice of withdrawal filing + now_json_data = copy.deepcopy(FILING_HEADER) + now_json_data['filing']['header']['name'] = 'noticeOfWithdrawal' + del now_json_data['filing']['business'] + now_json_data['filing']['business'] = { + "identifier": identifier, + "legalType": 'BC' + } + now_json_data['filing']['noticeOfWithdrawal'] = copy.deepcopy(SCHEMA_NOTICE_OF_WITHDRAWAL) + now_json_data['filing']['noticeOfWithdrawal']['filingId'] = withdrawn_filing_id + del now_json_data['filing']['header']['filingId'] + now_filing = factory_filing(None, now_json_data) + now_filing.withdrawn_filing_id = withdrawn_filing_id + now_filing.submitter_id = user.id + now_filing.submitter_roles = UserRoles.staff + now_filing.save() + new_business_filing.withdrawal_pending = True + new_business_filing.save() + + # fetch filings once the NoW has been submitted + rv = client.get(f'/api/v2/businesses/{identifier}/filings', + headers=create_header(jwt, [STAFF_ROLE], identifier)) + + # validate that the NoW is embedded in the withdrawn filing + assert 'noticeOfWithdrawal' in rv.json['filing'] + + # withdraw bootstrap filing + new_business_filing._status = Filing.Status.WITHDRAWN.value + new_business_filing.withdrawal_pending = False + new_business_filing.save() + + # fetch filings after the bootstrap filing has been withdrawn + rv = client.get(f'/api/v2/businesses/{identifier}/filings', + headers=create_header(jwt, [jwt_role], identifier)) + + # validate that the NoW is still embedded in the withdrawn filing + assert 'noticeOfWithdrawal' in rv.json['filing'] + assert rv.json['filing']['noticeOfWithdrawal'] is not None + assert rv.json['filing']['noticeOfWithdrawal']['filing']['header']['submitter'] == expected def test_get_filing_not_found(session, client, jwt): """Assert that the request fails if the filing ID doesn't match an existing filing.""" @@ -744,6 +819,65 @@ def test_delete_filing_in_draft(session, client, jwt): assert rv.status_code == HTTPStatus.OK +def test_delete_draft_now_filing(session, client, jwt): + """Assert that when a NoW from a temporary business is deleted, the business is unlinked and not deleted.""" + # set-up withdrawn boostrap FE filing + today = datetime.utcnow().date() + future_effective_date = today + timedelta(days=5) + future_effective_date = future_effective_date.isoformat() + + identifier = 'T1Li6MzdrK' + headers = create_header(jwt, [STAFF_ROLE], identifier) + temp_reg = RegistrationBootstrap() + temp_reg._identifier = identifier + temp_reg.save() + json_data = copy.deepcopy(FILING_HEADER) + json_data['filing']['header']['name'] = 'incorporationApplication' + del json_data['filing']['business'] + temp_bus_filing_json = copy.deepcopy(INCORPORATION) + temp_bus_filing_json['nameRequest']['legalType'] = 'BEN' + json_data['filing']['incorporationApplication'] = temp_bus_filing_json + temp_filing = factory_pending_filing(None, json_data) + temp_filing.temp_reg = identifier + temp_filing.effective_date = future_effective_date + temp_filing.payment_completion_date = datetime.utcnow().isoformat() + temp_filing._status = Filing.Status.DRAFT.value + temp_filing.skip_status_listener = True + temp_filing.save() + withdrawn_filing_id = temp_filing.id + + # set-up notice of withdrawal filing + now_json_data = copy.deepcopy(FILING_HEADER) + now_json_data['filing']['header']['name'] = 'noticeOfWithdrawal' + del now_json_data['filing']['business'] + now_json_data['filing']['business'] = { + "identifier": identifier, + "legalType": 'BEN' + } + now_json_data['filing']['noticeOfWithdrawal'] = copy.deepcopy(SCHEMA_NOTICE_OF_WITHDRAWAL) + now_json_data['filing']['noticeOfWithdrawal']['filingId'] = withdrawn_filing_id + del now_json_data['filing']['header']['filingId'] + now_filing = factory_filing(None, now_json_data) + now_filing.withdrawn_filing_id = withdrawn_filing_id + now_filing.save() + temp_filing.withdrawal_pending = True + temp_filing.save() + + rv = client.delete(f'/api/v2/businesses/{identifier}/filings/{now_filing.id}', + headers=headers + ) + + # validate that the withdrawl_pending flag is set back to False + assert rv.status_code == HTTPStatus.OK + assert temp_filing.withdrawal_pending == False + + rv = client.get(f'/api/v2/businesses/{identifier}/filings', + headers=create_header(jwt, [STAFF_ROLE], identifier)) + + # validate that no NoW is embedded + assert rv.status_code == HTTPStatus.OK + assert 'noticeOfWithdrawal' not in rv.json['filing'] + def test_delete_coop_ia_filing_in_draft_with_file_in_minio(session, client, jwt, minio_server): """Assert that a draft filing can be deleted.""" @@ -1108,6 +1242,9 @@ def test_calc_annual_report_date(session, client, jwt): RESTORATION_LIMITED_TO_FULL_FILING = copy.deepcopy(RESTORATION_FILING) RESTORATION_LIMITED_TO_FULL_FILING['filing']['restoration']['type'] = 'limitedRestorationToFull' +AMALGAMATION_OUT_FILING = copy.deepcopy(FILING_HEADER) +AMALGAMATION_OUT_FILING['filing']['amalgamationOut'] = {} + CONTINUATION_OUT_FILING = copy.deepcopy(FILING_HEADER) CONTINUATION_OUT_FILING['filing']['continuationOut'] = {} @@ -1186,6 +1323,10 @@ def _get_expected_fee_code(free, filing_name, filing_json: dict, legal_type): ('BC1234567', RESTORATION_LIMITED_TO_FULL_FILING, 'restoration', Business.LegalTypes.COMP.value, False, [], False), ('BC1234567', RESTORATION_LIMITED_TO_FULL_FILING, 'restoration', Business.LegalTypes.BC_ULC_COMPANY.value, False, [], False), ('BC1234567', RESTORATION_LIMITED_TO_FULL_FILING, 'restoration', Business.LegalTypes.BC_CCC.value, False, [], False), + ('BC1234567', AMALGAMATION_OUT_FILING, 'amalgamationOut', Business.LegalTypes.BCOMP.value, False, [], False), + ('BC1234567', AMALGAMATION_OUT_FILING, 'amalgamationOut', Business.LegalTypes.BC_ULC_COMPANY.value, False, [], False), + ('BC1234567', AMALGAMATION_OUT_FILING, 'amalgamationOut', Business.LegalTypes.COMP.value, False, [], False), + ('BC1234567', AMALGAMATION_OUT_FILING, 'amalgamationOut', Business.LegalTypes.BC_CCC.value, False, [], False), ('BC1234567', CONTINUATION_OUT_FILING, 'continuationOut', Business.LegalTypes.BCOMP.value, False, [], False), ('BC1234567', CONTINUATION_OUT_FILING, 'continuationOut', Business.LegalTypes.BC_ULC_COMPANY.value, False, [], False), ('BC1234567', CONTINUATION_OUT_FILING, 'continuationOut', Business.LegalTypes.COMP.value, False, [], False), @@ -1595,3 +1736,124 @@ def test_resubmit_filing_failed(session, client, jwt, filing_status, review_stat headers=create_header(jwt, [STAFF_ROLE], identifier)) assert rv.status_code == HTTPStatus.UNAUTHORIZED + +@pytest.mark.parametrize( + 'test_name, legal_type, filing_type, filing_json, is_temp', + [ + ('T-BUSINESS-IA', 'BC', 'incorporationApplication', INCORPORATION, True), + ('T-BUSINESS-CONT-IN', 'BEN', 'continuationIn', CONTINUATION_IN, True), + ('T-BUSINESS-AMALGAMATION', 'CBEN', 'amalgamationApplication', AMALGAMATION_APPLICATION, True), + ('REGULAR-BUSINESS-COA', 'BC', 'changeOfAddress', CHANGE_OF_ADDRESS, False), + ('REGULAR-BUSINESS-CONT-ALTERATION', 'BEN', 'alteration', ALTERATION_FILING_TEMPLATE, False), + ('REGULAR-BUSINESS-DISSOLUTION', 'CBEN', 'dissolution', DISSOLUTION, False) + ] +) +def test_notice_of_withdrawal_filing(session, client, jwt, test_name, legal_type, filing_type, filing_json, is_temp): + """Assert that notice of withdrawal for new business filings can be filed""" + today = datetime.utcnow().date() + future_effective_date = today + timedelta(days=5) + future_effective_date = future_effective_date.isoformat() + # create a FE new business filing + if is_temp: + identifier = 'Tb31yQIuBw' + temp_reg = RegistrationBootstrap() + temp_reg._identifier = identifier + temp_reg.save() + json_data = copy.deepcopy(FILING_HEADER) + json_data['filing']['header']['name'] = filing_type + del json_data['filing']['business'] + new_bus_filing_json = copy.deepcopy(filing_json) + new_bus_filing_json['nameRequest']['legalType'] = legal_type + json_data['filing'][filing_type] = new_bus_filing_json + new_business_filing = factory_pending_filing(None, json_data) + new_business_filing.temp_reg = identifier + new_business_filing.effective_date = future_effective_date + new_business_filing.payment_completion_date = datetime.utcnow().isoformat() + new_business_filing.save() + withdrawn_filing_id = new_business_filing.id + # create a regular business and file a FE filing + else: + identifier = 'BC1234567' + founding_date = datetime.utcnow() - timedelta(days=5) + business = factory_business(identifier=identifier, founding_date=founding_date, entity_type=legal_type) + filing_data_reg_business = copy.deepcopy(FILING_HEADER) + filing_data_reg_business['filing']['header']['name'] = filing_type + filing_data_reg_business['filing']['business']['identifier'] = identifier + filing_data_reg_business['filing']['business']['legalType'] = legal_type + fe_filing_json = copy.deepcopy(filing_json) + filing_data_reg_business['filing'][filing_type] = fe_filing_json + fe_filing = factory_pending_filing(business, filing_data_reg_business) + fe_filing.effective_date = future_effective_date + fe_filing.payment_completion_date = datetime.utcnow().isoformat() + fe_filing.save() + withdrawn_filing_id = fe_filing.id + + # test filing a notice of withdraw for a temporary business + now_json_data = copy.deepcopy(FILING_HEADER) + now_json_data['filing']['header']['name'] = 'noticeOfWithdrawal' + if is_temp: + del now_json_data['filing']['business'] + now_json_data['filing']['business'] = { + "identifier": identifier, + "legalType": legal_type + } + else: + now_json_data['filing']['business']['identifier'] = identifier + now_json_data['filing']['business']['legalType'] = legal_type + now_json_data['filing']['noticeOfWithdrawal'] = copy.deepcopy(SCHEMA_NOTICE_OF_WITHDRAWAL) + now_json_data['filing']['noticeOfWithdrawal']['filingId'] = withdrawn_filing_id + del now_json_data['filing']['header']['filingId'] + + # Test validation OK + rv_validation = client.post(f'/api/v2/businesses/{identifier}/filings?only_validate=true', + json=now_json_data, + headers=create_header(jwt, [STAFF_ROLE], identifier)) + + assert rv_validation.status_code == HTTPStatus.OK + assert rv_validation.json['filing']['header']['name'] == 'noticeOfWithdrawal' + + # Test can create a draft + rv_draft = client.post(f'/api/v2/businesses/{identifier}/filings?draft=true', + json=now_json_data, + headers=create_header(jwt, [STAFF_ROLE], identifier)) + + # validate + assert rv_draft.status_code == HTTPStatus.CREATED + assert rv_draft.json['filing']['header']['name'] == 'noticeOfWithdrawal' + + # setup + withdrawn_filing = {} + identifier = '' + + # validate NoW flags set on withdrawn filing + if is_temp: + withdrawn_filing = new_business_filing + identifier = 'Tb31yQIuBw' + else: + withdrawn_filing = fe_filing + identifier = 'BC1234567' + + withdrawn_filing_id = withdrawn_filing.withdrawn_filing_id + withdrawal_pending = withdrawn_filing.withdrawal_pending + assert withdrawn_filing_id is None + assert withdrawal_pending == True + + # validate NoW flags set on NoW + now_filing = (Filing.find_by_id(rv_draft.json['filing']['header']['filingId'])) + assert now_filing.withdrawn_filing_id == withdrawn_filing.id + assert now_filing.withdrawal_pending == False + if is_temp: + assert now_filing.temp_reg == None + + # update and save notice of withdrawal draft filing + now_json_data['filing']['header']['certifiedBy'] = 'test123' + + rv_draft = client.put(f'/api/v2/businesses/{identifier}/filings/{now_filing.id}?draft=true', + json=now_json_data, + headers=create_header(jwt, [STAFF_ROLE], identifier)) + + # validate + assert rv_draft.status_code == HTTPStatus.ACCEPTED + assert rv_draft.json['filing']['header']['certifiedBy'] == 'test123' + + diff --git a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings_ledger.py b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings_ledger.py index b49021d9ea..1f36d4c26f 100644 --- a/legal-api/tests/unit/resources/v2/test_business_filings/test_filings_ledger.py +++ b/legal-api/tests/unit/resources/v2/test_business_filings/test_filings_ledger.py @@ -125,7 +125,7 @@ def test_ledger_search(session, client, jwt): alteration = next((f for f in ledger['filings'] if f.get('name') == 'alteration'), None) assert alteration - assert 16 == len(alteration.keys()) + assert 17 == len(alteration.keys()) assert 'availableOnPaperOnly' in alteration assert 'effectiveDate' in alteration assert 'filingId' in alteration @@ -135,6 +135,7 @@ def test_ledger_search(session, client, jwt): assert 'submittedDate' in alteration assert 'submitter' in alteration assert 'displayLedger' in alteration + assert 'withdrawalPending' in alteration # assert alteration['commentsLink'] # assert alteration['correctionLink'] # assert alteration['filingLink'] @@ -177,6 +178,45 @@ def test_ledger_comment_count(session, client, jwt): # validate assert rv.json['filings'][0]['commentsCount'] == number_of_comments +@pytest.mark.parametrize('test_name, filing_status, expected', [ + ('filing-status-Completed', Filing.Status.COMPLETED.value, 1), + ('filing-status-Corrected',Filing.Status.CORRECTED.value, 0), + ('filing-status-Draft', Filing.Status.DRAFT.value, 0), + ('filing-status-Epoch', Filing.Status.EPOCH.value, 0), + ('filing-status-Error', Filing.Status.ERROR.value, 0), + ('filing-status-Paid', Filing.Status.PAID.value, 1), + ('filing-status-Pending', Filing.Status.PENDING.value, 0), + ('filing-status-PaperOnly', Filing.Status.PAPER_ONLY.value, 0), + ('filing-status-PendingCorrection', Filing.Status.PENDING_CORRECTION.value, 0), + ('filing-status-Withdrawn', Filing.Status.WITHDRAWN.value, 1), +]) + +def test_get_all_business_filings_permitted_statuses(session, client, jwt, test_name, filing_status, expected): + """Assert that the ledger only shows filings with permitted statuses.""" + # setup + identifier = 'BC1234567' + today = date.today().isoformat() + alteration_meta = {'alteration': { + 'fromLegalType': 'BC', + 'toLegalType': 'BEN' + }} + meta_data = {**{'applicationDate': today}, **alteration_meta} + + business, filing_storage = ledger_element_setup_help(identifier, 'alteration') + filing_storage._meta_data = meta_data + + # set filing status + filing_storage._status = filing_status + filing_storage.skip_status_listener = True + filing_storage.save() + + # test + rv = client.get(f'/api/v2/businesses/{identifier}/filings', + headers=create_header(jwt, [UserRoles.system], identifier)) + + # validate + assert len(rv.json.get('filings')) == expected + @pytest.mark.parametrize('test_name, file_number, order_date, effect_of_order, order_details, expected', [ ('all_elements', 'ABC123', datetime.utcnow(), 'effect', 'details', diff --git a/legal-api/tests/unit/resources/v2/test_business_tasks.py b/legal-api/tests/unit/resources/v2/test_business_tasks.py index 979426ca9e..a75e71baf0 100644 --- a/legal-api/tests/unit/resources/v2/test_business_tasks.py +++ b/legal-api/tests/unit/resources/v2/test_business_tasks.py @@ -28,8 +28,9 @@ from legal_api.models import Business from legal_api.services.authz import STAFF_ROLE +from legal_api.utils.legislation_datetime import LegislationDatetime from tests import integration_payment -from tests.unit.models import factory_business, factory_business_mailing_address, factory_filing, factory_pending_filing +from tests.unit.models import factory_business, factory_business_mailing_address, factory_completed_filing, factory_filing, factory_pending_filing from tests.unit.services.utils import create_header from tests.unit.services.warnings import create_business @@ -98,8 +99,8 @@ def test_get_tasks_no_filings(session, client, jwt): def test_get_tasks_next_year(session, client, jwt): """Assert that one todo item is returned in the calendar year following incorporation.""" identifier = 'CP7654321' - founding_date = datetime.today() + datedelta.datedelta(days=1) - datedelta.datedelta(years=1) - factory_business(identifier, founding_date=founding_date) # incorporation 1 year - 1 day ago + founding_date = datetime.today() - datedelta.datedelta(years=1) + factory_business(identifier, founding_date=founding_date) # incorporation 1 year # To-do are all years from the year after incorporation until this year @@ -283,7 +284,7 @@ def test_get_tasks_pending_correction_filings(session, client, jwt): ('SP no AR', 'FM1234567', '2019-05-15', None, Business.LegalTypes.SOLE_PROP.value, 0), ('GP no AR', 'FM1234567', '2019-05-15', None, Business.LegalTypes.PARTNERSHIP.value, 0) ]) -def test_construct_task_list(session, client, jwt, test_name, identifier, founding_date, previous_ar_date, legal_type, +def test_construct_task_list_ar(session, client, jwt, test_name, identifier, founding_date, previous_ar_date, legal_type, tasks_length): """Assert that construct_task_list returns the correct number of AR to be filed.""" from legal_api.resources.v2.business.business_tasks import construct_task_list @@ -299,6 +300,90 @@ def test_construct_task_list(session, client, jwt, test_name, identifier, foundi assert tasks[0]['task']['todo']['business']['nextAnnualReport'][-14:] != '00:00:00+00:00' +@pytest.mark.parametrize('test_name, identifier, founding_date, last_ar_date, legal_type, last_tr_date, tr_start_date, initial_date, restored_date, expected', [ + ('BEN_ITR', 'BC1234567', datetime(2025, 7, 2, 8), None, Business.LegalTypes.BCOMP.value, None, datetime(2025, 7, 1), None, None, [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'initial', 'enabled': True}]), + ('BEN_ITR_DRAFT', 'BC1234567', datetime(2025, 7, 2, 8), None, Business.LegalTypes.BCOMP.value, None, datetime(2025, 7, 1), datetime(2025, 7, 2), None, [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'initial', 'status': 'DRAFT', 'enabled': True}]), + ('BEN_ITR_PENDING', 'BC1234567', datetime(2025, 7, 2, 8), None, Business.LegalTypes.BCOMP.value, None, datetime(2025, 7, 1), datetime(2025, 7, 2), None, [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'initial', 'status': 'PENDING', 'enabled': True}]), + ('BEN_ITR_FILED', 'BC1234567', datetime(2025, 7, 2, 8), None, Business.LegalTypes.BCOMP.value, None, datetime(2025, 7, 1), datetime(2025, 7, 2), None, []), + ('BEN_ITR_NONE', 'BC1234567', datetime(2025, 7, 1, 8), None, Business.LegalTypes.BCOMP.value, None, datetime(2025, 7, 2), None, None, []), + ('BEN_ATR', 'BC1234567', datetime(2023, 1, 1, 8), datetime(2025, 1, 1), Business.LegalTypes.BCOMP.value, None, datetime(2024, 1, 1), None, None, [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': True}]), + ('BEN_ATR_MULTI', 'BC1234567', datetime(2021, 1, 1, 8), datetime(2025, 1, 1), Business.LegalTypes.BCOMP.value, None, datetime(2022, 1, 1), None, None, [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2023, 'enabled': True}, {'order': 2, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2024, 'enabled': False}, {'order': 3, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': False}]), + ('BEN_ATR_PREV_FILED', 'BC1234567', datetime(2022, 1, 1, 8), datetime(2025, 1, 1), Business.LegalTypes.BCOMP.value, datetime(2024, 1, 1), datetime(2023, 1, 1), None, None, [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': True}]), + ('BEN_ATR_PREV_FILED_MULTI', 'BC1234567', datetime(2021, 1, 1, 8), datetime(2025, 1, 1), Business.LegalTypes.BCOMP.value, datetime(2023, 2, 1), datetime(2022, 1, 1), None, None, [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2024, 'enabled': True}, {'order': 2, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': False}]), + ('BEN_ITR_ATR', 'BC1234567', datetime(2024, 1, 1, 8), datetime(2025, 1, 1), Business.LegalTypes.BCOMP.value, None, datetime(2022, 1, 1), None, None, [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'initial', 'enabled': True}, {'order': 2, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': False}]), + ('BEN_ITR_ATR_MULTI', 'BC1234567', datetime(2023, 1, 1, 8), datetime(2025, 1, 1), Business.LegalTypes.BCOMP.value, None, datetime(2022, 1, 1), None, None, [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'initial', 'enabled': True}, {'order': 2, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2024, 'enabled': False}, {'order': 3, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': False}]), + ('BEN_ITR_ATR_RESTORATION', 'BC1234567', datetime(2010, 1, 1, 8), datetime(2025, 1, 1), Business.LegalTypes.BCOMP.value, None, datetime(2022, 1, 1), None, datetime(2023, 1, 1, 8), [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'initial', 'enabled': True}, {'order': 2, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2024, 'enabled': False}, {'order': 3, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': False}]), + ('BEN_ATR_RESTORATION_PREV_FILED', 'BC1234567', datetime(2010, 1, 1, 8), datetime(2025, 1, 1), Business.LegalTypes.BCOMP.value, datetime(2024, 2, 1), datetime(2022, 1, 1), datetime(2023, 2, 1), datetime(2023, 1, 1, 8), [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': True}]), + ('BEN_ATR_RESTORATION_PREV_FILED_COMPLEX', 'BC1234567', datetime(2020, 1, 2, 8), datetime(2025, 1, 1), Business.LegalTypes.BCOMP.value, datetime(2021, 2, 1), datetime(2020, 1, 1), datetime(2020, 2, 1), datetime(2023, 1, 3, 8), [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'initial', 'enabled': True}, {'order': 2, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2024, 'enabled': False}, {'order': 3, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': False}]), + ('BEN_ITR_ATR_AR', 'BC1234567', datetime(2023, 2, 1, 8), datetime(2024, 2, 1), Business.LegalTypes.BCOMP.value, None, datetime(2023, 1, 1), None, None, [{'order': 1, 'name': 'tranparencyRegister', 'subType': 'initial', 'enabled': True}, {'order': 2, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2024, 'enabled': False}, {'order': 3, 'name': 'annualReport', 'ARFilingYear': 2025, 'enabled': True}, {'order': 4, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': False}]), + ('BEN_ATR_AR', 'BC1234567', datetime(2020, 2, 1, 8), datetime(2023, 2, 1), Business.LegalTypes.BCOMP.value, datetime(2023, 2, 1), datetime(2021, 1, 1), None, None, [{'order': 1, 'name': 'annualReport', 'ARFilingYear': 2024, 'enabled': True}, {'order': 2, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2024, 'enabled': True}, {'order': 3, 'name': 'annualReport', 'ARFilingYear': 2025, 'enabled': False}, {'order': 4, 'name': 'tranparencyRegister', 'subType': 'annual', 'TRFilingYear': 2025, 'enabled': False}]), +]) +def test_construct_task_list_tr(app, session, client, jwt, test_name, identifier, founding_date, last_ar_date, + legal_type, last_tr_date, tr_start_date, initial_date, restored_date, expected): + """Assert that construct_task_list returns the correct items concerning TR and AR filings.""" + from legal_api.resources.v2.business.business_tasks import construct_task_list + + # tests expect current date to be in 2025. Adjust accordingly for the current year (freezetime only works for some things) + year_offset = (datetime.now()).year - 2025 + founding_date += datedelta.datedelta(years=year_offset) + tr_start_date += datedelta.datedelta(years=year_offset) + if last_ar_date: + last_ar_date += datedelta.datedelta(years=year_offset) + if last_tr_date: + last_tr_date += datedelta.datedelta(years=year_offset) + + app.config['TR_START_DATE'] = tr_start_date.isoformat() + with patch('legal_api.resources.v2.business.business_tasks.check_warnings', return_value=[]): + business = factory_business(identifier, founding_date, last_ar_date, legal_type) + business.last_tr_year = last_tr_date.year if last_tr_date else None + if initial_date: + filing = { + 'filing': { + 'header': {'name': 'transparencyRegister', 'certifiedBy': 'test', 'date': initial_date.isoformat()}, + 'transparencyRegister': {'type': 'initial', 'ledgerReferenceNumber': '1234'} + }} + if 'DRAFT' in test_name: + factory_filing(business, filing, initial_date, 'transparencyRegister', 'initial') + elif 'PENDING' in test_name: + factory_pending_filing(business, filing, initial_date) + else: + factory_completed_filing(business, filing, initial_date, None, None, 'transparencyRegister', 'initial') + + if restored_date: + filing = {'filing': {'header': {'name': 'restoration', 'date': restored_date.isoformat(), 'certifiedBy': 'test'}, 'restoration': {'type': 'fullRestoration'}}} + filing_obj = factory_completed_filing(business, filing, initial_date, None, None, 'restoration', 'fullRestoration') + filing_obj.effective_date = restored_date + filing_obj.save() + + business.save() + tasks = construct_task_list(business) + + # check number of tasks + # assert tasks == expected + assert len(tasks) == len(expected) + if tasks: + # check order and values + def get_order_val(e: dict): + """Return the order value of the task.""" + return e['order'] + + tasks.sort(key=get_order_val) + expected.sort(key=get_order_val) + + for task, expected_task in zip(tasks, expected): + assert task['order'] == expected_task['order'] + assert task['enabled'] == expected_task.get('enabled') + if task['task'].get('todo'): + assert task['task']['todo']['header']['name'] == expected_task['name'] + assert task['task']['todo']['header'].get('ARFilingYear') == expected_task.get('ARFilingYear') + assert task['task']['todo']['header'].get('TRFilingYear') == expected_task.get('TRFilingYear') + assert task['task']['todo']['header'].get('subType') == expected_task.get('subType') + else: + assert task['task']['filing']['header']['status'] == expected_task.get('status') + + # Reset this to empty string so it doesn't interfere with other tests + app.config['TR_START_DATE'] = '' + @pytest.mark.parametrize('test_name, legal_type, identifier, has_missing_business_info, conversion_task_expected', [ ('CONVERSION_TODO_EXISTS_MISSING_DATA', 'SP', 'FM0000001', True, True), ('CONVERSION_TODO_EXISTS_MISSING_DATA', 'GP', 'FM0000002', True, True), diff --git a/legal-api/tests/unit/resources/v2/test_internal_services.py b/legal-api/tests/unit/resources/v2/test_internal_services.py index c37e04a3b9..6676835178 100644 --- a/legal-api/tests/unit/resources/v2/test_internal_services.py +++ b/legal-api/tests/unit/resources/v2/test_internal_services.py @@ -73,6 +73,29 @@ def test_get_future_effective_filing_ids(session, client, jwt): assert len(rv.json) == 0 +@pytest.mark.parametrize( + 'test_name, expired', [ + ('LIMITED_RESTORATION', True), + ('LIMITED_RESTORATION_EXPIRED', False) + ] +) +def test_get_businesses_expired_restoration(session, client, jwt, test_name, expired): + """Assert that expired restoration can be fetched.""" + identifier = 'BC1234567' + business = factory_business(identifier=identifier, entity_type=Business.LegalTypes.COMP.value) + business.restoration_expiry_date = (datetime.now(timezone.utc) + + datedelta.datedelta(days=-1 if expired else 1)) + business.save() + rv = client.get('/api/v2/internal/expired_restoration', headers=create_header(jwt, [UserRoles.system])) + if expired: + assert rv.status_code == HTTPStatus.OK + assert len(rv.json) == 1 + assert rv.json['identifiers'][0] == identifier + else: + assert rv.status_code == HTTPStatus.OK + assert len(rv.json['identifiers']) == 0 + + def test_update_bn_move(session, client, jwt): """Assert that the endpoint updates tax_id.""" identifier = 'FM0000001' diff --git a/legal-api/tests/unit/services/filings/test_utils.py b/legal-api/tests/unit/services/filings/test_utils.py index 88fbb18790..f9e5ec991a 100644 --- a/legal-api/tests/unit/services/filings/test_utils.py +++ b/legal-api/tests/unit/services/filings/test_utils.py @@ -21,7 +21,7 @@ from reportlab.lib.pagesizes import letter from reportlab.pdfgen import canvas -from legal_api.services import MinioService +from legal_api.services import MinioService, DocumentRecordService, flags from legal_api.services.utils import get_date, get_str @@ -49,14 +49,28 @@ def test_get_str(f, p): assert isinstance(d, str) -def _upload_file(page_size, invalid): - signed_url = MinioService.create_signed_put_url('cooperative-test.pdf') - key = signed_url.get('key') - pre_signed_put = signed_url.get('preSignedUrl') +def _upload_file(page_size, invalid, document_class=None, document_type=None): + print("TYUIUYTYUYTYYUYYTYTYYTYTYYUYTYTYTYTYTYTYTYTYTYTYTYTYTYTYTYTYTYTYTYYYTYTY") + if flags.is_on('enable-document-records'): + file_path = "tests/unit/invalid_size.pdf" if invalid else "tests/unit/valid_size.pdf" + raw_data = None + with open(file_path, "rb") as data_file: + raw_data = data_file.read() + data_file.close() + response = DocumentRecordService.upload_document( + document_class, + document_type, + raw_data + ) + return response['documentServiceId'] + else: + signed_url = MinioService.create_signed_put_url('cooperative-test.pdf') + key = signed_url.get('key') + pre_signed_put = signed_url.get('preSignedUrl') - requests.put(pre_signed_put, data=_create_pdf_file(page_size, invalid).read(), - headers={'Content-Type': 'application/octet-stream'}) - return key + requests.put(pre_signed_put, data=_create_pdf_file(page_size, invalid).read(), + headers={'Content-Type': 'application/octet-stream'}) + return key def _create_pdf_file(page_size, invalid): diff --git a/legal-api/tests/unit/services/filings/validations/test_amalgamation_application.py b/legal-api/tests/unit/services/filings/validations/test_amalgamation_application.py index 16a7fe6a7a..52e235b694 100644 --- a/legal-api/tests/unit/services/filings/validations/test_amalgamation_application.py +++ b/legal-api/tests/unit/services/filings/validations/test_amalgamation_application.py @@ -1391,15 +1391,26 @@ def mock_find_by_identifier(identifier): @pytest.mark.parametrize( - 'legal_type, amalgamation_type, expected_code', + 'legal_type, mock_legal_type, amalgamation_type, expected_code', [ - (Business.LegalTypes.BCOMP.value, Amalgamation.AmalgamationTypes.vertical.name, HTTPStatus.BAD_REQUEST), - (Business.LegalTypes.BCOMP.value, Amalgamation.AmalgamationTypes.horizontal.name, HTTPStatus.BAD_REQUEST), - (Business.LegalTypes.COMP.value, Amalgamation.AmalgamationTypes.vertical.name, None), - (Business.LegalTypes.COMP.value, Amalgamation.AmalgamationTypes.horizontal.name, None) + (Business.LegalTypes.BCOMP.value, Business.LegalTypes.COMP.value, + Amalgamation.AmalgamationTypes.vertical.name, HTTPStatus.BAD_REQUEST), + (Business.LegalTypes.BCOMP.value, Business.LegalTypes.COMP.value, + Amalgamation.AmalgamationTypes.horizontal.name, HTTPStatus.BAD_REQUEST), + (Business.LegalTypes.COMP.value, Business.LegalTypes.COMP.value, + Amalgamation.AmalgamationTypes.vertical.name, None), + (Business.LegalTypes.COMP.value, Business.LegalTypes.COMP.value, + Amalgamation.AmalgamationTypes.horizontal.name, None), + (Business.LegalTypes.COMP.value, Business.LegalTypes.CONTINUE_IN.value, + Amalgamation.AmalgamationTypes.horizontal.name, None), + (Business.LegalTypes.BCOMP.value, Business.LegalTypes.BCOMP_CONTINUE_IN.value, + Amalgamation.AmalgamationTypes.horizontal.name, None), + (Business.LegalTypes.BC_ULC_COMPANY.value, Business.LegalTypes.ULC_CONTINUE_IN.value, + Amalgamation.AmalgamationTypes.horizontal.name, None) ] ) -def test_amalgamation_legal_type_mismatch(mocker, app, session, jwt, legal_type, amalgamation_type, expected_code): +def test_amalgamation_legal_type_mismatch(mocker, app, session, jwt, legal_type, mock_legal_type, + amalgamation_type, expected_code): """Assert amalgamation legal type validation for short form.""" account_id = '123456' filing = {'filing': {}} @@ -1417,7 +1428,7 @@ def test_amalgamation_legal_type_mismatch(mocker, app, session, jwt, legal_type, def mock_find_by_identifier(identifier): return Business(identifier=identifier, - legal_type=Business.LegalTypes.COMP.value) + legal_type=mock_legal_type) mocker.patch('legal_api.services.filings.validations.amalgamation_application.validate_name_request', return_value=[]) diff --git a/legal-api/tests/unit/services/filings/validations/test_consent_continuation_out.py b/legal-api/tests/unit/services/filings/validations/test_consent_continuation_out.py index bea64b1a32..e1272af49f 100644 --- a/legal-api/tests/unit/services/filings/validations/test_consent_continuation_out.py +++ b/legal-api/tests/unit/services/filings/validations/test_consent_continuation_out.py @@ -161,6 +161,7 @@ def test_validate_existing_cco(session, test_name, expected_code, message): foreign_jurisdiction = filing['filing']['consentContinuationOut']['foreignJurisdiction'] consent_continuation_out = ConsentContinuationOut() + consent_continuation_out.consent_type = ConsentContinuationOut.ConsentTypes.continuation_out consent_continuation_out.foreign_jurisdiction = foreign_jurisdiction.get('country') consent_continuation_out.foreign_jurisdiction_region = foreign_jurisdiction.get('region').upper() consent_continuation_out.expiry_date = get_cco_expiry_date(effective_date) diff --git a/legal-api/tests/unit/services/filings/validations/test_continuation_out.py b/legal-api/tests/unit/services/filings/validations/test_continuation_out.py index 62e86c0127..1591b6fa4b 100644 --- a/legal-api/tests/unit/services/filings/validations/test_continuation_out.py +++ b/legal-api/tests/unit/services/filings/validations/test_continuation_out.py @@ -39,6 +39,7 @@ def _create_consent_continuation_out(business, foreign_jurisdiction, effective_d filing = factory_completed_filing(business, filing_dict, filing_date=effective_date) consent_continuation_out = ConsentContinuationOut() + consent_continuation_out.consent_type = ConsentContinuationOut.ConsentTypes.continuation_out consent_continuation_out.foreign_jurisdiction = foreign_jurisdiction.get('country') region = foreign_jurisdiction.get('region') diff --git a/legal-api/tests/unit/services/filings/validations/test_notice_of_withdrawal.py b/legal-api/tests/unit/services/filings/validations/test_notice_of_withdrawal.py index 4020aea84b..8bf91d71cb 100644 --- a/legal-api/tests/unit/services/filings/validations/test_notice_of_withdrawal.py +++ b/legal-api/tests/unit/services/filings/validations/test_notice_of_withdrawal.py @@ -31,6 +31,7 @@ # setup +FILING_HAS_POA_IN_EFFECT = {'error': 'Cannot file a Notice of Withdrawal as the filing has a POA in effect.'} FILING_NOT_EXIST_MSG = {'error': 'The filing to be withdrawn cannot be found.'} FILING_NOT_FED_MSG = {'error': 'Only filings with a future effective date can be withdrawn.'} FILING_NOT_PAID_MSG = {'error': 'Only paid filings with a future effective date can be withdrawn.'} @@ -40,16 +41,17 @@ # tests @pytest.mark.parametrize( - 'test_name, is_filing_exist, withdrawn_filing_status, is_future_effective, has_filing_id, expected_code, expected_msg',[ - ('EXIST_BUSINESS_SUCCESS', True, Filing.Status.PAID, True, True, None, None), - ('EXIST_BUSINESS_FAIL_NOT_PAID', True, Filing.Status.PENDING, True, True, HTTPStatus.BAD_REQUEST, [FILING_NOT_PAID_MSG]), - ('EXIST_BUSINESS_FAIL_NOT_FED', True, Filing.Status.PAID, False, True, HTTPStatus.BAD_REQUEST, [FILING_NOT_FED_MSG]), - ('EXIST_BUSINESS_FAIL_FILING_NOT_EXIST', False, Filing.Status.PAID, True, True, HTTPStatus.BAD_REQUEST, [FILING_NOT_EXIST_MSG]), - ('EXIST_BUSINESS_FAIL_MISS_FILING_ID', True, Filing.Status.PAID, True, False, HTTPStatus.UNPROCESSABLE_ENTITY, ''), - ('EXIST_BUSINESS_FAIL_NOT_PAID_NOT_FED', True, Filing.Status.PENDING, False, True, HTTPStatus.BAD_REQUEST, [FILING_NOT_FED_MSG, FILING_NOT_PAID_MSG]) + 'test_name, is_filing_exist, withdrawn_filing_status, is_future_effective, has_filing_id, has_taken_effect, part_of_poa, expected_code, expected_msg',[ + ('EXIST_BUSINESS_SUCCESS', True, Filing.Status.PAID, True, True, False, False, None, None), + ('EXIST_BUSINESS_FAIL_NOT_PAID', True, Filing.Status.PENDING, True, True, None, None, HTTPStatus.BAD_REQUEST, [FILING_NOT_PAID_MSG]), + ('EXIST_BUSINESS_FAIL_NOT_FED', True, Filing.Status.PAID, False, True, None, None, HTTPStatus.BAD_REQUEST, [FILING_NOT_FED_MSG]), + ('EXIST_BUSINESS_FAIL_FILING_NOT_EXIST', False, Filing.Status.PAID, True, True, None, None, HTTPStatus.NOT_FOUND, [FILING_NOT_EXIST_MSG]), + ('EXIST_BUSINESS_FAIL_MISS_FILING_ID', True, Filing.Status.PAID, True, False, None, None, HTTPStatus.UNPROCESSABLE_ENTITY, ''), + ('EXIST_BUSINESS_FAIL_NOT_PAID_NOT_FED', True, Filing.Status.PENDING, False, True, None, None, HTTPStatus.BAD_REQUEST, [FILING_NOT_FED_MSG, FILING_NOT_PAID_MSG]), + ('EXIST_BUSINESS_FAIL_POA_IN_EFFECT', True, Filing.Status.PAID, True, True, True, True, HTTPStatus.BAD_REQUEST, [FILING_HAS_POA_IN_EFFECT]), ] ) -def test_validate_notice_of_withdrawal(session, test_name, is_filing_exist, withdrawn_filing_status, is_future_effective, has_filing_id, expected_code, expected_msg): +def test_validate_notice_of_withdrawal(session, test_name, is_filing_exist, withdrawn_filing_status, is_future_effective, has_filing_id, has_taken_effect, part_of_poa, expected_code, expected_msg): """Assert that notice of withdrawal flings can be validated""" today = datetime.utcnow().date() future_effective_date = today + timedelta(days=5) @@ -83,6 +85,11 @@ def test_validate_notice_of_withdrawal(session, test_name, is_filing_exist, with else: del filing_json['filing']['noticeOfWithdrawal']['filingId'] + if has_taken_effect is not None: + filing_json['filing']['noticeOfWithdrawal']['hasTakenEffect'] = has_taken_effect + if part_of_poa is not None: + filing_json['filing']['noticeOfWithdrawal']['partOfPoa'] = part_of_poa + err = validate(business, filing_json) if expected_code: assert err.code == expected_code diff --git a/legal-api/tests/unit/services/filings/validations/test_put_back_off.py b/legal-api/tests/unit/services/filings/validations/test_put_back_off.py new file mode 100644 index 0000000000..35d02b4283 --- /dev/null +++ b/legal-api/tests/unit/services/filings/validations/test_put_back_off.py @@ -0,0 +1,34 @@ +# Copyright © 2024 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Test Put back off validations.""" +import copy + +from registry_schemas.example_data import PUT_BACK_OFF, FILING_HEADER + +from legal_api.services.filings.validations.put_back_off import validate + +from tests.unit.models import factory_business + + +def test_put_back_off(session): + """Assert valid put back off.""" + identifier = 'CP1234567' + business = factory_business(identifier) + + filing_json = copy.deepcopy(FILING_HEADER) + filing_json['filing']['business']['identifier'] = identifier + filing_json['filing']['putBackOff'] = copy.deepcopy(PUT_BACK_OFF) + + err = validate(business, filing_json) + assert err is None diff --git a/legal-api/tests/unit/services/filings/validations/test_restoration.py b/legal-api/tests/unit/services/filings/validations/test_restoration.py index 765b22dde5..b021adc2f7 100644 --- a/legal-api/tests/unit/services/filings/validations/test_restoration.py +++ b/legal-api/tests/unit/services/filings/validations/test_restoration.py @@ -93,7 +93,7 @@ def execute_test_restoration_nr(mocker, filing_sub_type, legal_type, nr_number, mock_nr_response = MockResponse(temp_nr_response, HTTPStatus.OK) mocker.patch('legal_api.services.NameXService.query_nr_number', return_value=mock_nr_response) - with patch.object(Filing, 'get_a_businesses_most_recent_filing_of_a_type', + with patch.object(Filing, 'get_most_recent_filing', return_value=limited_restoration_filing): err = validate(business, filing) @@ -180,7 +180,7 @@ def test_validate_relationship(session, test_status, restoration_type, expected_ elif test_status == 'SUCCESS' and restoration_type in ('fullRestoration', 'limitedRestorationToFull'): filing['filing']['restoration']['relationships'] = relationships - with patch.object(Filing, 'get_a_businesses_most_recent_filing_of_a_type', + with patch.object(Filing, 'get_most_recent_filing', return_value=limited_restoration_filing): err = validate(business, filing) @@ -231,7 +231,7 @@ def test_validate_expiry_date(session, test_name, restoration_type, delta_date, filing['filing']['restoration']['type'] = restoration_type if delta_date: filing['filing']['restoration']['expiry'] = expiry_date.strftime(date_format) - with patch.object(Filing, 'get_a_businesses_most_recent_filing_of_a_type', + with patch.object(Filing, 'get_most_recent_filing', return_value=limited_restoration_filing): err = validate(business, filing) @@ -281,7 +281,7 @@ def test_approval_type(session, test_status, restoration_types, legal_types, app filing['filing']['restoration']['applicationDate'] = '2023-03-30' filing['filing']['restoration']['noticeDate'] = '2023-03-30' - with patch.object(Filing, 'get_a_businesses_most_recent_filing_of_a_type', + with patch.object(Filing, 'get_most_recent_filing', return_value=limited_restoration_filing): err = validate(business, filing) @@ -337,7 +337,7 @@ def test_restoration_court_orders(session, test_status, restoration_types, legal else: del filing['filing']['restoration']['courtOrder'] - with patch.object(Filing, 'get_a_businesses_most_recent_filing_of_a_type', + with patch.object(Filing, 'get_most_recent_filing', return_value=limited_restoration_filing): err = validate(business, filing) @@ -394,7 +394,7 @@ def test_restoration_registrar(session, test_status, restoration_types, legal_ty if notice_date: filing['filing']['restoration']['noticeDate'] = notice_date - with patch.object(Filing, 'get_a_businesses_most_recent_filing_of_a_type', + with patch.object(Filing, 'get_most_recent_filing', return_value=limited_restoration_filing): err = validate(business, filing) diff --git a/legal-api/tests/unit/services/test_authorization.py b/legal-api/tests/unit/services/test_authorization.py index 5a33d92603..eb3b2545ec 100644 --- a/legal-api/tests/unit/services/test_authorization.py +++ b/legal-api/tests/unit/services/test_authorization.py @@ -31,14 +31,17 @@ AGM_EXTENSION, AGM_LOCATION_CHANGE, ALTERATION_FILING_TEMPLATE, + AMALGAMATION_OUT, ANNUAL_REPORT, CHANGE_OF_REGISTRATION_TEMPLATE, + CONSENT_AMALGAMATION_OUT, CONSENT_CONTINUATION_OUT, CONTINUATION_IN, CONTINUATION_OUT, CORRECTION_AR, DISSOLUTION, FILING_TEMPLATE, + PUT_BACK_OFF, PUT_BACK_ON, RESTORATION, ) @@ -137,12 +140,14 @@ class FilingKey(str, Enum): ADM_DISS = 'ADM_DISS' VOL_DISS_FIRMS = 'VOL_DISS_FIRMS' ADM_DISS_FIRMS = 'ADM_DISS_FIRMS' + AMALGAMATION_OUT = 'AMALGAMATION_OUT' REGISTRARS_NOTATION = 'REGISTRARS_NOTATION' REGISTRARS_ORDER = 'REGISTRARS_ORDER' SPECIAL_RESOLUTION = 'SPECIAL_RESOLUTION' AGM_EXTENSION = 'AGM_EXTENSION' AGM_LOCATION_CHANGE = 'AGM_LOCATION_CHANGE' ALTERATION = 'ALTERATION' + CONSENT_AMALGAMATION_OUT = 'CONSENT_AMALGAMATION_OUT' CONSENT_CONTINUATION_OUT = 'CONSENT_CONTINUATION_OUT' CONTINUATION_OUT = 'CONTINUATION_OUT' TRANSITION = 'TRANSITION' @@ -154,11 +159,17 @@ class FilingKey(str, Enum): RESTRN_LTD_EXT_LLC = 'RESTRN_LTD_EXT_LLC' RESTRN_LTD_TO_FULL_CORPS = 'RESTRN_LTD_TO_FULL_CORPS' RESTRN_LTD_TO_FULL_LLC = 'RESTRN_LTD_TO_FULL_LLC' + PUT_BACK_OFF = 'PUT_BACK_OFF' PUT_BACK_ON = 'PUT_BACK_ON' AMALGAMATION_REGULAR = 'AMALGAMATION_REGULAR' AMALGAMATION_VERTICAL = 'AMALGAMATION_VERTICAL' AMALGAMATION_HORIZONTAL = 'AMALGAMATION_HORIZONTAL' NOTICE_OF_WITHDRAWAL = 'NOTICE_OF_WITHDRAWAL' + TRANSPARENCY_REGISTER_ANNUAL = 'TRANSPARENCY_REGISTER_ANNUAL' + TRANSPARENCY_REGISTER_CHANGE = 'TRANSPARENCY_REGISTER_CHANGE' + TRANSPARENCY_REGISTER_INITIAL = 'TRANSPARENCY_REGISTER_INITIAL' + APPOINT_RECEIVER = 'APPOINT_RECEIVER' + CEASE_RECEIVER = 'CEASE_RECEIVER' EXPECTED_DATA = { @@ -172,7 +183,7 @@ class FilingKey(str, Enum): FilingKey.CORRCTN: {'displayName': 'Register Correction Application', 'feeCode': 'CRCTN', 'name': 'correction'}, FilingKey.CORRCTN_FIRMS: {'displayName': 'Register Correction Application', 'feeCode': 'FMCORR', 'name': 'correction'}, - FilingKey.COURT_ORDER: {'displayName': 'Court Order', 'feeCode': 'NOFEE', 'name': 'courtOrder'}, + FilingKey.COURT_ORDER: {'displayName': 'Court Order', 'feeCode': 'COURT', 'name': 'courtOrder'}, FilingKey.VOL_DISS: {'displayName': 'Voluntary Dissolution', 'feeCode': 'DIS_VOL', 'name': 'dissolution', 'type': 'voluntary'}, FilingKey.ADM_DISS: {'displayName': 'Administrative Dissolution', 'feeCode': 'DIS_ADM', @@ -181,6 +192,7 @@ class FilingKey(str, Enum): 'name': 'dissolution', 'type': 'voluntary'}, FilingKey.ADM_DISS_FIRMS: {'displayName': 'Statement of Dissolution', 'feeCode': 'DIS_ADM', 'name': 'dissolution', 'type': 'administrative'}, + FilingKey.AMALGAMATION_OUT: {'displayName': 'Amalgamation Out', 'feeCode': 'AMALO', 'name': 'amalgamationOut'}, FilingKey.REGISTRARS_NOTATION: {'displayName': "Registrar's Notation", 'feeCode': 'NOFEE', 'name': 'registrarsNotation'}, FilingKey.REGISTRARS_ORDER: {'displayName': "Registrar's Order", 'feeCode': 'NOFEE', 'name': 'registrarsOrder'}, @@ -188,6 +200,8 @@ class FilingKey(str, Enum): FilingKey.AGM_EXTENSION: {'displayName': 'Request for AGM Extension', 'feeCode': 'AGMDT', 'name': 'agmExtension'}, FilingKey.AGM_LOCATION_CHANGE: {'displayName': 'AGM Location Change', 'feeCode': 'AGMLC', 'name': 'agmLocationChange'}, FilingKey.ALTERATION: {'displayName': 'Alteration', 'feeCode': 'ALTER', 'name': 'alteration'}, + FilingKey.CONSENT_AMALGAMATION_OUT: {'displayName': '6-Month Consent to Amalgamate Out', 'feeCode': 'IAMGO', + 'name': 'consentAmalgamationOut'}, FilingKey.CONSENT_CONTINUATION_OUT: {'displayName': '6-Month Consent to Continue Out', 'feeCode': 'CONTO', 'name': 'consentContinuationOut'}, FilingKey.CONTINUATION_OUT: {'displayName': 'Continuation Out', 'feeCode': 'COUTI', 'name': 'continuationOut'}, @@ -229,11 +243,17 @@ class FilingKey(str, Enum): 'name': 'restoration', 'type': 'limitedRestorationExtension'}, FilingKey.RESTRN_LTD_TO_FULL_LLC: {'displayName': 'Conversion to Full Restoration Application', 'feeCode': None, 'name': 'restoration', 'type': 'limitedRestorationToFull'}, + FilingKey.PUT_BACK_OFF: {'displayName': 'Correction - Put Back Off', 'feeCode': 'NOFEE', 'name': 'putBackOff'}, FilingKey.PUT_BACK_ON: {'displayName': 'Correction - Put Back On', 'feeCode': 'NOFEE', 'name': 'putBackOn'}, FilingKey.AMALGAMATION_REGULAR: {'name': 'amalgamationApplication', 'type': 'regular', 'displayName': 'Amalgamation Application (Regular)', 'feeCode': 'AMALR'}, FilingKey.AMALGAMATION_VERTICAL: {'name': 'amalgamationApplication', 'type': 'vertical', 'displayName': 'Amalgamation Application Short-form (Vertical)', 'feeCode': 'AMALV'}, FilingKey.AMALGAMATION_HORIZONTAL: {'name': 'amalgamationApplication', 'type': 'horizontal', 'displayName': 'Amalgamation Application Short-form (Horizontal)', 'feeCode': 'AMALH'}, - FilingKey.NOTICE_OF_WITHDRAWAL: {'displayName': 'Notice of Withdrawal', 'feeCode': 'NWITH', 'name': 'noticeOfWithdrawal'} + FilingKey.NOTICE_OF_WITHDRAWAL: {'displayName': 'Notice of Withdrawal', 'feeCode': 'NWITH', 'name': 'noticeOfWithdrawal'}, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL: {'name': 'transparencyRegister', 'type': 'annual', 'displayName': 'Transparency Register - Annual Filing', 'feeCode': 'REGSIGIN'}, + FilingKey.TRANSPARENCY_REGISTER_CHANGE: {'name': 'transparencyRegister', 'type': 'change', 'displayName': 'Transparency Register Filing', 'feeCode': 'REGSIGIN'}, + FilingKey.TRANSPARENCY_REGISTER_INITIAL: {'name': 'transparencyRegister', 'type': 'initial', 'displayName': 'Transparency Register Filing', 'feeCode': 'REGSIGIN'}, + FilingKey.APPOINT_RECEIVER: {'displayName': 'Appoint Receiver', 'feeCode': 'NOARM', 'name': 'appointReceiver'}, + FilingKey.CEASE_RECEIVER: {'displayName': 'Cease Receiver', 'feeCode': 'NOCER', 'name': 'ceaseReceiver'} } EXPECTED_DATA_CONT_IN = { @@ -247,7 +267,7 @@ class FilingKey(str, Enum): FilingKey.CORRCTN: {'displayName': 'Register Correction Application', 'feeCode': 'CRCTN', 'name': 'correction'}, FilingKey.CORRCTN_FIRMS: {'displayName': 'Register Correction Application', 'feeCode': 'FMCORR', 'name': 'correction'}, - FilingKey.COURT_ORDER: {'displayName': 'Court Order', 'feeCode': 'NOFEE', 'name': 'courtOrder'}, + FilingKey.COURT_ORDER: {'displayName': 'Court Order', 'feeCode': 'COURT', 'name': 'courtOrder'}, FilingKey.VOL_DISS: {'displayName': 'Voluntary Dissolution', 'feeCode': 'DIS_VOL', 'name': 'dissolution', 'type': 'voluntary'}, FilingKey.ADM_DISS: {'displayName': 'Administrative Dissolution', 'feeCode': 'DIS_ADM', @@ -263,6 +283,9 @@ class FilingKey(str, Enum): FilingKey.AGM_EXTENSION: {'displayName': 'Request for AGM Extension', 'feeCode': 'AGMDT', 'name': 'agmExtension'}, FilingKey.AGM_LOCATION_CHANGE: {'displayName': 'AGM Location Change', 'feeCode': 'AGMLC', 'name': 'agmLocationChange'}, FilingKey.ALTERATION: {'displayName': 'Alteration', 'feeCode': 'ALTER', 'name': 'alteration'}, + FilingKey.AMALGAMATION_OUT: {'displayName': 'Amalgamation Out', 'feeCode': 'AMALO', 'name': 'amalgamationOut'}, + FilingKey.CONSENT_AMALGAMATION_OUT: {'displayName': '6-Month Consent to Amalgamate Out', 'feeCode': 'IAMGO', + 'name': 'consentAmalgamationOut'}, FilingKey.CONSENT_CONTINUATION_OUT: {'displayName': '6-Month Consent to Continue Out', 'feeCode': 'CONTO', 'name': 'consentContinuationOut'}, FilingKey.CONTINUATION_OUT: {'displayName': 'Continuation Out', 'feeCode': 'COUTI', 'name': 'continuationOut'}, @@ -304,6 +327,7 @@ class FilingKey(str, Enum): 'name': 'restoration', 'type': 'limitedRestorationExtension'}, FilingKey.RESTRN_LTD_TO_FULL_LLC: {'displayName': 'Conversion to Full Restoration Application', 'feeCode': None, 'name': 'restoration', 'type': 'limitedRestorationToFull'}, + FilingKey.PUT_BACK_OFF: {'displayName': 'Correction - Put Back Off', 'feeCode': 'NOFEE', 'name': 'putBackOff'}, FilingKey.PUT_BACK_ON: {'displayName': 'Correction - Put Back On', 'feeCode': 'NOFEE', 'name': 'putBackOn'}, FilingKey.AMALGAMATION_REGULAR: {'name': 'amalgamationApplication', 'type': 'regular', 'displayName': 'Amalgamation Application (Regular)', 'feeCode': None}, @@ -313,7 +337,12 @@ class FilingKey(str, Enum): 'displayName': 'Amalgamation Application Short-form (Horizontal)', 'feeCode': None}, FilingKey.NOTICE_OF_WITHDRAWAL: {'displayName': 'Notice of Withdrawal', 'feeCode': 'NWITH', - 'name': 'noticeOfWithdrawal'} + 'name': 'noticeOfWithdrawal'}, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL: {'name': 'transparencyRegister', 'type': 'annual', 'displayName': 'Transparency Register - Annual Filing', 'feeCode': 'REGSIGIN'}, + FilingKey.TRANSPARENCY_REGISTER_CHANGE: {'name': 'transparencyRegister', 'type': 'change', 'displayName': 'Transparency Register Filing', 'feeCode': 'REGSIGIN'}, + FilingKey.TRANSPARENCY_REGISTER_INITIAL: {'name': 'transparencyRegister', 'type': 'initial', 'displayName': 'Transparency Register Filing', 'feeCode': 'REGSIGIN'}, + FilingKey.APPOINT_RECEIVER: {'displayName': 'Appoint Receiver', 'feeCode': 'NOARM', 'name': 'appointReceiver'}, + FilingKey.CEASE_RECEIVER: {'displayName': 'Cease Receiver', 'feeCode': 'NOCER', 'name': 'ceaseReceiver'} } BLOCKER_FILING_STATUSES = factory_incomplete_statuses() @@ -328,12 +357,18 @@ class FilingKey(str, Enum): AGM_LOCATION_CHANGE_FILING_TEMPLATE = copy.deepcopy(FILING_TEMPLATE) AGM_LOCATION_CHANGE_FILING_TEMPLATE['filing']['agmLocationChange'] = AGM_LOCATION_CHANGE +AMALGAMATION_OUT_TEMPLATE = copy.deepcopy(FILING_TEMPLATE) +AMALGAMATION_OUT_TEMPLATE['filing']['amalgamationOut'] = AMALGAMATION_OUT + RESTORATION_FILING_TEMPLATE = copy.deepcopy(FILING_TEMPLATE) RESTORATION_FILING_TEMPLATE['filing']['restoration'] = RESTORATION DISSOLUTION_FILING_TEMPLATE = copy.deepcopy(FILING_TEMPLATE) DISSOLUTION_FILING_TEMPLATE['filing']['dissolution'] = DISSOLUTION +PUT_BACK_OFF_FILING_TEMPLATE = copy.deepcopy(FILING_TEMPLATE) +PUT_BACK_OFF_FILING_TEMPLATE['filing']['putBackOff'] = PUT_BACK_OFF + PUT_BACK_ON_FILING_TEMPLATE = copy.deepcopy(FILING_TEMPLATE) PUT_BACK_ON_FILING_TEMPLATE['filing']['putBackOn'] = PUT_BACK_ON @@ -344,6 +379,9 @@ class FilingKey(str, Enum): CONTINUATION_OUT_TEMPLATE = copy.deepcopy(FILING_TEMPLATE) CONTINUATION_OUT_TEMPLATE['filing']['continuationOut'] = CONTINUATION_OUT +CONSENT_AMALGAMATION_OUT_TEMPLATE = copy.deepcopy(FILING_TEMPLATE) +CONSENT_AMALGAMATION_OUT_TEMPLATE['filing']['consentAmalgamationOut'] = CONSENT_AMALGAMATION_OUT + CONSENT_CONTINUATION_OUT_TEMPLATE = copy.deepcopy(FILING_TEMPLATE) CONSENT_CONTINUATION_OUT_TEMPLATE['filing']['consentContinuationOut'] = CONSENT_CONTINUATION_OUT @@ -351,15 +389,18 @@ class FilingKey(str, Enum): 'agmExtension': AGM_EXTENSION_FILING_TEMPLATE, 'agmLocationChange': AGM_LOCATION_CHANGE_FILING_TEMPLATE, 'alteration': ALTERATION_FILING_TEMPLATE, + 'amalgamationOut': AMALGAMATION_OUT_TEMPLATE, 'correction': CORRECTION_AR, 'changeOfRegistration': CHANGE_OF_REGISTRATION_TEMPLATE, 'restoration.limitedRestoration': RESTORATION_FILING_TEMPLATE, 'restoration.fullRestoration': RESTORATION_FILING_TEMPLATE, 'restoration.limitedRestorationExtension': RESTORATION_FILING_TEMPLATE, 'dissolution': DISSOLUTION_FILING_TEMPLATE, + 'putBackOff': PUT_BACK_OFF_FILING_TEMPLATE, 'putBackOn': PUT_BACK_ON_FILING_TEMPLATE, 'continuationIn': CONTINUATION_IN_TEMPLATE, 'continuationOut': CONTINUATION_OUT_TEMPLATE, + 'consentAmalgamationOut': CONSENT_AMALGAMATION_OUT_TEMPLATE, 'consentContinuationOut': CONSENT_CONTINUATION_OUT_TEMPLATE } @@ -539,18 +580,18 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me 'registrarsNotation', 'registrarsOrder', 'specialResolution']), ('staff_active_corps', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'staff', [STAFF_ROLE], ['adminFreeze', 'agmExtension', 'agmLocationChange', 'alteration', - {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'annualReport', 'changeOfAddress', - 'changeOfDirectors', 'consentContinuationOut', 'continuationOut', 'correction', 'courtOrder', - {'dissolution': ['voluntary', 'administrative']}, 'incorporationApplication', - 'registrarsNotation', 'registrarsOrder', 'transition', + {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'amalgamationOut','annualReport', 'appointReceiver', + 'ceaseReceiver', 'changeOfAddress', 'changeOfDirectors', 'consentAmalgamationOut', 'consentContinuationOut', 'continuationOut', + 'correction', 'courtOrder', {'dissolution': ['voluntary', 'administrative']}, + 'incorporationApplication', 'putBackOff', 'registrarsNotation', 'registrarsOrder', 'transition', {'restoration': ['limitedRestorationExtension', 'limitedRestorationToFull']}, 'noticeOfWithdrawal']), ('staff_active_continue_in_corps', Business.State.ACTIVE, ['C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], ['adminFreeze', 'agmExtension', 'agmLocationChange', 'alteration', - {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'annualReport', 'changeOfAddress', - 'changeOfDirectors', 'continuationIn', 'consentContinuationOut', 'continuationOut', 'correction', - 'courtOrder', {'dissolution': ['voluntary', 'administrative']}, 'registrarsNotation', 'registrarsOrder', - 'transition', {'restoration': ['limitedRestorationExtension', 'limitedRestorationToFull']}, - 'noticeOfWithdrawal']), + {'amalgamationApplication': ['regular', 'vertical', 'horizontal']},'amalgamationOut', 'annualReport', 'appointReceiver', + 'ceaseReceiver', 'changeOfAddress', 'changeOfDirectors', 'continuationIn', 'consentAmalgamationOut', 'consentContinuationOut', + 'continuationOut', 'correction', 'courtOrder', {'dissolution': ['voluntary', 'administrative']}, + 'putBackOff', 'registrarsNotation', 'registrarsOrder', 'transition', + {'restoration': ['limitedRestorationExtension', 'limitedRestorationToFull']}, 'noticeOfWithdrawal']), ('staff_active_llc', Business.State.ACTIVE, ['LLC'], 'staff', [STAFF_ROLE], []), ('staff_active_firms', Business.State.ACTIVE, ['SP', 'GP'], 'staff', [STAFF_ROLE], ['adminFreeze', 'changeOfRegistration', 'conversion', 'correction', 'courtOrder', @@ -563,11 +604,11 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me ('user_active_corps', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'general', [BASIC_USER], ['agmExtension', 'agmLocationChange', 'alteration', {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'annualReport', 'changeOfAddress', 'changeOfDirectors', 'consentContinuationOut', - {'dissolution': ['voluntary']}, 'incorporationApplication', 'transition']), + {'dissolution': ['voluntary']}, 'incorporationApplication', 'transition', {'transparencyRegister': ['annual','change','initial']}]), ('user_active_continue_in_corps', Business.State.ACTIVE, ['C', 'CBEN', 'CUL', 'CCC'], 'general', [BASIC_USER], ['agmExtension', 'agmLocationChange', 'alteration', {'amalgamationApplication': ['regular', 'vertical', 'horizontal']}, 'annualReport', 'changeOfAddress', 'changeOfDirectors', 'consentContinuationOut', 'continuationIn', - {'dissolution': ['voluntary']}, 'transition']), + {'dissolution': ['voluntary']}, 'transition', {'transparencyRegister': ['annual','change','initial']}]), ('user_active_llc', Business.State.ACTIVE, ['LLC'], 'general', [BASIC_USER], []), ('user_active_firms', Business.State.ACTIVE, ['SP', 'GP'], 'general', [BASIC_USER], ['changeOfRegistration', {'dissolution': ['voluntary']}, 'registration']), @@ -627,6 +668,11 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me ('staff_active_allowed', Business.State.ACTIVE, 'amalgamationApplication', None, ['C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], False), + ('staff_active_allowed', Business.State.ACTIVE, 'amalgamationOut', None, + ['BC', 'BEN', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], False), + ('staff_active', Business.State.ACTIVE, 'amalgamationOut', None, + ['CP', 'LLC'], 'staff', [STAFF_ROLE], False), + ('staff_active_allowed', Business.State.ACTIVE, 'annualReport', None, ['CP', 'BEN', 'BC', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], True), ('staff_active', Business.State.ACTIVE, 'annualReport', None, @@ -642,6 +688,11 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me ('staff_active', Business.State.ACTIVE, 'changeOfDirectors', None, ['LLC'], 'staff', [STAFF_ROLE], False), + ('staff_active_allowed', Business.State.ACTIVE, 'consentAmalgamationOut', None, + ['BC', 'BEN', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], True), + ('staff_active', Business.State.ACTIVE, 'consentAmalgamationOut', None, + ['CP', 'LLC'], 'staff', [STAFF_ROLE], False), + ('staff_active_allowed', Business.State.ACTIVE, 'consentContinuationOut', None, ['BC', 'BEN', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], True), ('staff_active', Business.State.ACTIVE, 'consentContinuationOut', None, @@ -703,6 +754,11 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me ('staff_active_allowed', Business.State.ACTIVE, 'changeOfRegistration', None, ['SP', 'GP'], 'staff', [STAFF_ROLE], True), + ('staff_active_allowed', Business.State.ACTIVE, 'appointReceiver', None, + ['BC', 'BEN', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], True), + ('staff_active', Business.State.ACTIVE, 'appointReceiver', None, + ['CP', 'LLC'], 'staff', [STAFF_ROLE], False), + ('user_active_allowed', Business.State.ACTIVE, 'agmExtension', None, ['BC', 'BEN', 'ULC', 'CC'], 'general', [BASIC_USER], True), ('user_active', Business.State.ACTIVE, 'agmExtension', None, @@ -727,6 +783,11 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me ['CP', 'BEN', 'BC', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'], 'general', [BASIC_USER], True), ('user_active', Business.State.ACTIVE, 'annualReport', None, ['LLC'], 'general', [BASIC_USER], False), + + ('staff_active_allowed', Business.State.ACTIVE, 'ceaseReceiver', None, + ['BC', 'BEN', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], True), + ('staff_active', Business.State.ACTIVE, 'ceaseReceiver', None, + ['CP', 'LLC'], 'staff', [STAFF_ROLE], False), ('user_active_allowed', Business.State.ACTIVE, 'changeOfAddress', None, ['CP', 'BEN', 'BC', 'CC', 'ULC', 'C', 'CBEN', 'CUL', 'CCC'], 'general', [BASIC_USER], True), @@ -842,6 +903,9 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me ('staff_historical', Business.State.HISTORICAL, 'changeOfRegistration', None, ['SP', 'GP'], 'staff', [STAFF_ROLE], False), + ('staff_historical', Business.State.HISTORICAL, 'consentAmalgamationOut', None, + ['BC', 'BEN', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], False), + ('staff_historical', Business.State.HISTORICAL, 'consentContinuationOut', None, ['BC', 'BEN', 'ULC', 'CC', 'C', 'CBEN', 'CUL', 'CCC'], 'staff', [STAFF_ROLE], False), @@ -941,13 +1005,17 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -961,13 +1029,17 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1002,7 +1074,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.VOL_DISS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_continue_in_corps', True, Business.State.ACTIVE, ['C', 'CBEN', 'CCC', 'CUL'], 'general', [BASIC_USER], expected_lookup_continue_in_corps([FilingKey.AGM_EXTENSION, @@ -1016,7 +1091,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.VOL_DISS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_llc', True, Business.State.ACTIVE, ['LLC'], 'general', [BASIC_USER], []), ('general_user_firms', True, Business.State.ACTIVE, ['SP', 'GP'], 'general', [BASIC_USER], expected_lookup([FilingKey.CHANGE_OF_REGISTRATION, @@ -1106,42 +1184,50 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.AMALGAMATION_REGULAR, FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, - FilingKey.IA_BC])), + FilingKey.IA_BC, + FilingKey.NOTICE_OF_WITHDRAWAL])), ('staff_no_business_c', False, Business.State.ACTIVE, ['C'], 'staff', [STAFF_ROLE], expected_lookup_continue_in_corps([FilingKey.AMALGAMATION_REGULAR, FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, - FilingKey.CONTINUATION_IN_C])), + FilingKey.CONTINUATION_IN_C, + FilingKey.NOTICE_OF_WITHDRAWAL])), ('staff_no_business_ben', False, Business.State.ACTIVE, ['BEN'], 'staff', [STAFF_ROLE], expected_lookup([FilingKey.AMALGAMATION_REGULAR, FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, - FilingKey.IA_BEN])), + FilingKey.IA_BEN, + FilingKey.NOTICE_OF_WITHDRAWAL])), ('staff_no_business_cben', False, Business.State.ACTIVE, ['CBEN'], 'staff', [STAFF_ROLE], expected_lookup_continue_in_corps([FilingKey.AMALGAMATION_REGULAR, FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, - FilingKey.CONTINUATION_IN_CBEN])), + FilingKey.CONTINUATION_IN_CBEN, + FilingKey.NOTICE_OF_WITHDRAWAL])), ('staff_no_business_cc', False, Business.State.ACTIVE, ['CC'], 'staff', [STAFF_ROLE], expected_lookup([FilingKey.AMALGAMATION_REGULAR, FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, - FilingKey.IA_CC])), + FilingKey.IA_CC, + FilingKey.NOTICE_OF_WITHDRAWAL])), ('staff_no_business_ccc', False, Business.State.ACTIVE, ['CCC'], 'staff', [STAFF_ROLE], expected_lookup_continue_in_corps([FilingKey.AMALGAMATION_REGULAR, FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, - FilingKey.CONTINUATION_IN_CCC])), + FilingKey.CONTINUATION_IN_CCC, + FilingKey.NOTICE_OF_WITHDRAWAL])), ('staff_no_business_ulc', False, Business.State.ACTIVE, ['ULC'], 'staff', [STAFF_ROLE], expected_lookup([FilingKey.AMALGAMATION_REGULAR, FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, - FilingKey.IA_ULC])), + FilingKey.IA_ULC, + FilingKey.NOTICE_OF_WITHDRAWAL])), ('staff_no_business_cul', False, Business.State.ACTIVE, ['CUL'], 'staff', [STAFF_ROLE], expected_lookup_continue_in_corps([FilingKey.AMALGAMATION_REGULAR, FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, - FilingKey.CONTINUATION_IN_CUL])), + FilingKey.CONTINUATION_IN_CUL, + FilingKey.NOTICE_OF_WITHDRAWAL])), ('staff_no_business_llc', False, Business.State.ACTIVE, ['LLC'], 'staff', [STAFF_ROLE], []), ('staff_no_business_sp', False, Business.State.ACTIVE, ['SP'], 'staff', [STAFF_ROLE], expected_lookup([FilingKey.REG_SP])), @@ -1225,13 +1311,17 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1245,13 +1335,17 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1287,7 +1381,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.VOL_DISS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_continue_in_corps', True, Business.State.ACTIVE, ['C', 'CBEN', 'CCC', 'CUL'], 'general', [BASIC_USER], expected_lookup_continue_in_corps([FilingKey.AGM_EXTENSION, @@ -1301,7 +1398,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.VOL_DISS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_llc', True, Business.State.ACTIVE, ['LLC'], 'general', [BASIC_USER], []), ('general_user_firms', True, Business.State.ACTIVE, ['SP', 'GP'], 'general', [BASIC_USER], expected_lookup([FilingKey.CHANGE_OF_REGISTRATION, @@ -1370,6 +1470,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1378,6 +1479,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1544,12 +1646,15 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.ADMN_FRZE, FilingKey.ALTERATION, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CORRCTN, FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1558,12 +1663,15 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.ADMN_FRZE, FilingKey.ALTERATION, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CORRCTN, FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1588,13 +1696,19 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.AR_CORPS, FilingKey.COA_CORPS, FilingKey.COD_CORPS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_continue_in_corps', True, Business.State.ACTIVE, ['C', 'CBEN', 'CCC', 'CUL'], 'general', [BASIC_USER], expected_lookup([FilingKey.AR_CORPS, FilingKey.COA_CORPS, FilingKey.COD_CORPS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_llc', True, Business.State.ACTIVE, ['LLC'], 'general', [BASIC_USER], []), ('general_user_firms', True, Business.State.ACTIVE, ['SP', 'GP'], 'general', [BASIC_USER], expected_lookup([FilingKey.CHANGE_OF_REGISTRATION])), @@ -1674,6 +1788,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me BLOCKER_FILING_STATUSES, expected_lookup([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1681,6 +1796,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me BLOCKER_FILING_STATUSES, expected_lookup_continue_in_corps([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1695,10 +1811,16 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me # active business - general user ('general_user_cp', Business.State.ACTIVE, ['CP'], 'general', [BASIC_USER], BLOCKER_FILING_STATUSES, []), ('general_user_corps', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'general', [BASIC_USER], - BLOCKER_FILING_STATUSES, expected_lookup([FilingKey.TRANSITION, ])), + BLOCKER_FILING_STATUSES, expected_lookup([FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_continue_in_corps', Business.State.ACTIVE, ['C', 'CBEN', 'CCC', 'CUL'], 'general', [BASIC_USER], BLOCKER_FILING_STATUSES, - expected_lookup_continue_in_corps([FilingKey.TRANSITION, ])), + expected_lookup_continue_in_corps([FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_llc', Business.State.ACTIVE, ['LLC'], 'general', [BASIC_USER], BLOCKER_FILING_STATUSES, []), ('general_user_firms', Business.State.ACTIVE, ['SP', 'GP'], 'general', [BASIC_USER], BLOCKER_FILING_STATUSES, []), @@ -1788,6 +1910,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me BLOCKER_FILING_TYPES, BLOCKER_FILING_STATUSES_AND_ADDITIONAL, expected_lookup([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1795,6 +1918,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me BLOCKER_FILING_TYPES, BLOCKER_FILING_STATUSES_AND_ADDITIONAL, expected_lookup_continue_in_corps([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1813,10 +1937,16 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me BLOCKER_FILING_TYPES, BLOCKER_FILING_STATUSES_AND_ADDITIONAL, []), ('general_user_corps', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'general', [BASIC_USER], BLOCKER_FILING_TYPES, BLOCKER_FILING_STATUSES_AND_ADDITIONAL, - expected_lookup([FilingKey.TRANSITION])), + expected_lookup([FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_continu_in_corps', Business.State.ACTIVE, ['C', 'CBEN', 'CCC', 'CUL'], 'general', [BASIC_USER], BLOCKER_FILING_TYPES, BLOCKER_FILING_STATUSES_AND_ADDITIONAL, - expected_lookup_continue_in_corps([FilingKey.TRANSITION])), + expected_lookup_continue_in_corps([FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_llc', Business.State.ACTIVE, ['LLC'], 'general', [BASIC_USER], BLOCKER_FILING_TYPES, BLOCKER_FILING_STATUSES_AND_ADDITIONAL, []), ('general_user_firms', Business.State.ACTIVE, ['SP', 'GP'], 'general', [BASIC_USER], BLOCKER_FILING_TYPES, @@ -1906,6 +2036,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me ['dissolution.voluntary', 'dissolution.administrative'], BLOCKER_DISSOLUTION_STATUSES_FOR_AMALG, True, expected_lookup([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1913,6 +2044,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me ['dissolution.voluntary', 'dissolution.administrative'], BLOCKER_DISSOLUTION_STATUSES_FOR_AMALG, True, expected_lookup([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -1920,10 +2052,16 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me # active business - general user ('general_user_corps', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'general', [BASIC_USER], ['dissolution.voluntary', 'dissolution.administrative'], BLOCKER_DISSOLUTION_STATUSES_FOR_AMALG, True, - expected_lookup([FilingKey.TRANSITION])), + expected_lookup([FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_usere_continue_in_corps', Business.State.ACTIVE, ['C', 'CBEN', 'CCC', 'CUL'], 'general', [BASIC_USER], ['dissolution.voluntary', 'dissolution.administrative'], BLOCKER_DISSOLUTION_STATUSES_FOR_AMALG, True, - expected_lookup([FilingKey.TRANSITION])) + expected_lookup([FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ] ) def test_allowed_filings_blocker_filing_amalgamations(monkeypatch, app, session, jwt, test_name, state, @@ -1990,13 +2128,17 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -2009,13 +2151,17 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -2046,7 +2192,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.VOL_DISS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_continue_in_corps', Business.State.ACTIVE, ['C', 'CBEN', 'CCC', 'CUL'], 'general', [BASIC_USER], expected_lookup_continue_in_corps([FilingKey.AGM_EXTENSION, FilingKey.AGM_LOCATION_CHANGE, @@ -2059,7 +2208,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.VOL_DISS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_llc', Business.State.ACTIVE, ['LLC'], 'general', [BASIC_USER], []), ('general_user_firms', Business.State.ACTIVE, ['SP', 'GP'], 'general', [BASIC_USER], []), @@ -2148,13 +2300,17 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION, @@ -2171,13 +2327,17 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION, @@ -2193,13 +2353,17 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -2213,13 +2377,17 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -2263,7 +2431,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.VOL_DISS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_corps_unaffected2', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'general', [BASIC_USER], ['restoration', 'restoration'], ['limitedRestoration', 'limitedRestorationExtension'], @@ -2277,7 +2448,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.VOL_DISS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_continue_in_corps_unaffected', Business.State.ACTIVE, ['C', 'CBEN', 'CCC', 'CUL'], 'general', [BASIC_USER], [None, 'restoration'], @@ -2293,7 +2467,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.VOL_DISS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_continue_in_corps_unaffected2', Business.State.ACTIVE, ['C', 'CBEN', 'CCC', 'CUL'], 'general', [BASIC_USER], ['restoration', 'restoration'], @@ -2308,7 +2485,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.COD_CORPS, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.VOL_DISS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_llc_unaffected', Business.State.ACTIVE, ['LLC'], 'general', [BASIC_USER], ['restoration', 'restoration', None, 'restoration'], ['limitedRestoration', 'limitedRestorationExtension', None, 'fullRestoration'], []), @@ -2510,14 +2690,18 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -2526,6 +2710,41 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.ADMN_FRZE, FilingKey.CONTINUATION_OUT, FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, + FilingKey.REGISTRARS_NOTATION, + FilingKey.REGISTRARS_ORDER, + FilingKey.TRANSITION])), + ('staff_active_corps_completed_filing_success', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'staff', + [STAFF_ROLE], ['consentAmalgamationOut', 'consentAmalgamationOut'], [None, None], [True, True], + expected_lookup([FilingKey.ADMN_FRZE, + FilingKey.AGM_EXTENSION, + FilingKey.AGM_LOCATION_CHANGE, + FilingKey.ALTERATION, + FilingKey.AMALGAMATION_REGULAR, + FilingKey.AMALGAMATION_VERTICAL, + FilingKey.AMALGAMATION_HORIZONTAL, + FilingKey.AMALGAMATION_OUT, + FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, + FilingKey.COA_CORPS, + FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, + FilingKey.CONSENT_CONTINUATION_OUT, + FilingKey.CORRCTN, + FilingKey.COURT_ORDER, + FilingKey.VOL_DISS, + FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, + FilingKey.REGISTRARS_NOTATION, + FilingKey.REGISTRARS_ORDER, + FilingKey.TRANSITION])), + ('staff_active_corps_completed_filing_success', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'staff', + [STAFF_ROLE], ['consentAmalgamationOut', 'consentAmalgamationOut'], [None, None], [True, False], + expected_lookup([FilingKey.ADMN_FRZE, + FilingKey.AMALGAMATION_OUT, + FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -2533,6 +2752,15 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me [STAFF_ROLE], ['consentContinuationOut', 'consentContinuationOut'], [None, None], [False, False], expected_lookup([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, + FilingKey.REGISTRARS_NOTATION, + FilingKey.REGISTRARS_ORDER, + FilingKey.TRANSITION])), + ('staff_active_corps_completed_filing_fail', Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'staff', + [STAFF_ROLE], ['consentAmalgamationOut', 'consentAmalgamationOut'], [None, None], [False, False], + expected_lookup([FilingKey.ADMN_FRZE, + FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -2546,13 +2774,17 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.AMALGAMATION_VERTICAL, FilingKey.AMALGAMATION_HORIZONTAL, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, + FilingKey.CONSENT_AMALGAMATION_OUT, FilingKey.CONSENT_CONTINUATION_OUT, FilingKey.CORRCTN, FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -2612,11 +2844,14 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me ('staff_active_corps', True, Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'staff', [STAFF_ROLE], expected_lookup([FilingKey.ADMN_FRZE, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CORRCTN, FilingKey.COURT_ORDER, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION, @@ -2641,7 +2876,10 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.AR_CORPS, FilingKey.COA_CORPS, FilingKey.COD_CORPS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_llc', True, Business.State.ACTIVE, ['LLC'], 'general', [BASIC_USER], []), ('general_user_firms', True, Business.State.ACTIVE, ['SP', 'GP'], 'general', [BASIC_USER], expected_lookup([FilingKey.CHANGE_OF_REGISTRATION])), @@ -2719,12 +2957,15 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.ADMN_FRZE, FilingKey.ALTERATION, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CORRCTN, FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION, @@ -2733,6 +2974,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'staff', [STAFF_ROLE], 'FUTURE_EFFECTIVE', expected_lookup([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION, @@ -2743,6 +2985,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION, @@ -2751,6 +2994,7 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me Business.State.ACTIVE, ['BC', 'BEN', 'CC', 'ULC'], 'staff', [STAFF_ROLE], 'DRAFT', expected_lookup([FilingKey.ADMN_FRZE, FilingKey.COURT_ORDER, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION @@ -2761,12 +3005,15 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me FilingKey.ADMN_FRZE, FilingKey.ALTERATION, FilingKey.AR_CORPS, + FilingKey.APPOINT_RECEIVER, + FilingKey.CEASE_RECEIVER, FilingKey.COA_CORPS, FilingKey.COD_CORPS, FilingKey.CORRCTN, FilingKey.COURT_ORDER, FilingKey.VOL_DISS, FilingKey.ADM_DISS, + FilingKey.PUT_BACK_OFF, FilingKey.REGISTRARS_NOTATION, FilingKey.REGISTRARS_ORDER, FilingKey.TRANSITION])), @@ -2791,13 +3038,19 @@ def mock_auth(one, two): # pylint: disable=unused-argument; mocks of library me expected_lookup([FilingKey.AR_CORPS, FilingKey.COA_CORPS, FilingKey.COD_CORPS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_continue_in_corps', Business.State.ACTIVE, ['C', 'CBEN', 'CCC', 'CUL'], 'general', [BASIC_USER], None, expected_lookup([FilingKey.AR_CORPS, FilingKey.COA_CORPS, FilingKey.COD_CORPS, - FilingKey.TRANSITION])), + FilingKey.TRANSITION, + FilingKey.TRANSPARENCY_REGISTER_ANNUAL, + FilingKey.TRANSPARENCY_REGISTER_CHANGE, + FilingKey.TRANSPARENCY_REGISTER_INITIAL])), ('general_user_llc', Business.State.ACTIVE, ['LLC'], 'general', [BASIC_USER], None, []), ('general_user_firms', Business.State.ACTIVE, ['SP', 'GP'], 'general', [BASIC_USER], None, expected_lookup([FilingKey.CHANGE_OF_REGISTRATION, diff --git a/legal-api/tests/unit/test_error_handlers.py b/legal-api/tests/unit/test_error_handlers.py index 3e93799c25..5c20d9e40a 100644 --- a/legal-api/tests/unit/test_error_handlers.py +++ b/legal-api/tests/unit/test_error_handlers.py @@ -32,7 +32,7 @@ def test_handle_http_error_pass_through_routing_exception(): # pylint: disable= def test_handle_http_error_pass(app): """Assert that the RoutingException is passed through the handler.""" - with app.app_context(): + with app.test_request_context(): err = HTTPException(description='description') err.code = 200 response = errorhandlers.handle_http_error(err) @@ -47,7 +47,7 @@ def test_handle_uncaught_error(app, caplog): and log an ERROR of an uncaught exception. Unhandled exceptions should get ticketed and managed. """ - with app.app_context(): + with app.test_request_context(): # logger = errorhandlers.logger caplog.set_level(errorhandlers.logging.ERROR, logger=errorhandlers.logger.name) resp = errorhandlers.handle_uncaught_error(Exception()) diff --git a/legal-api/tests/unit/valid_size.pdf b/legal-api/tests/unit/valid_size.pdf new file mode 100644 index 0000000000..b9971a762e Binary files /dev/null and b/legal-api/tests/unit/valid_size.pdf differ diff --git a/python/common/sql-versioning/sql_versioning/__init__.py b/python/common/sql-versioning/sql_versioning/__init__.py index 9ece3f89e6..afd7253b9f 100644 --- a/python/common/sql-versioning/sql_versioning/__init__.py +++ b/python/common/sql-versioning/sql_versioning/__init__.py @@ -12,17 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. """Versioning extension for SQLAlchemy.""" -from .debugging import debug from .versioning import (Base, TransactionFactory, TransactionManager, - Versioned, disable_versioning, enable_versioning, - version_class) + Versioned, disable_versioning, enable_versioning) +from .utils import version_class __all__ = ( "Base", "TransactionFactory", "TransactionManager", "Versioned", - "debug", "disable_versioning", "enable_versioning", "version_class" diff --git a/python/common/sql-versioning/sql_versioning/expression_reflector.py b/python/common/sql-versioning/sql_versioning/expression_reflector.py new file mode 100644 index 0000000000..8acc67e0d8 --- /dev/null +++ b/python/common/sql-versioning/sql_versioning/expression_reflector.py @@ -0,0 +1,46 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import sqlalchemy as sa +from sqlalchemy.sql.expression import bindparam + +from .utils import version_table + + +class VersionExpressionReflector(sa.sql.visitors.ReplacingCloningVisitor): + def __init__(self, parent, relationship): + self.parent = parent + self.relationship = relationship + + def replace(self, column): + if not isinstance(column, sa.Column): + return + try: + table = version_table(column.table) + except KeyError: + reflected_column = column + else: + reflected_column = table.c[column.name] + if ( + column in self.relationship.local_columns and + table == self.parent.__table__ + ): + reflected_column = bindparam( + column.key, + getattr(self.parent, column.key) + ) + + return reflected_column + + def __call__(self, expr): + return self.traverse(expr) diff --git a/python/common/sql-versioning/sql_versioning/relationship_builder.py b/python/common/sql-versioning/sql_versioning/relationship_builder.py new file mode 100644 index 0000000000..c7642b136e --- /dev/null +++ b/python/common/sql-versioning/sql_versioning/relationship_builder.py @@ -0,0 +1,381 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import sqlalchemy as sa +from enum import Enum + +from .expression_reflector import VersionExpressionReflector +from .utils import adapt_columns, version_class + + +class Operation(Enum): + INSERT = 0 + UPDATE = 1 + DELETE = 2 + + +class RelationshipBuilder(object): + def __init__(self, model, property_): + self.property = property_ + self.model = model + + def one_to_many_subquery(self, obj): + tx_column = "transaction_id" + + remote_alias = sa.orm.aliased(self.remote_cls) + primary_keys = [ + getattr(remote_alias, column.name) for column + in sa.inspect(remote_alias).mapper.columns + if column.primary_key and column.name != tx_column + ] + + return sa.exists( + sa.select(1).where( + sa.and_( + getattr(remote_alias, tx_column) <= + getattr(obj, tx_column), + *[ + getattr(remote_alias, pk.name) == + getattr(self.remote_cls, pk.name) + for pk in primary_keys + ] + ) + ).group_by( + *primary_keys + ).having( + sa.func.max(getattr(remote_alias, tx_column)) == + getattr(self.remote_cls, tx_column) + ).correlate(self.local_cls, self.remote_cls) + ) + + def many_to_one_subquery(self, obj): + tx_column = "transaction_id" + reflector = VersionExpressionReflector(obj, self.property) + subquery = sa.select( + sa.func.max(getattr(self.remote_cls, tx_column)) + ).where( + sa.and_( + getattr(self.remote_cls, tx_column) <= + getattr(obj, tx_column), + reflector(self.property.primaryjoin) + ) + ) + subquery = subquery.scalar_subquery() + + return getattr(self.remote_cls, tx_column) == subquery + + def query(self, obj): + session = sa.orm.object_session(obj) + return ( + session.query(self.remote_cls) + .filter( + self.criteria(obj) + ) + ) + + def process_query(self, query): + """ + Process given SQLAlchemy Query object depending on the associated + RelationshipProperty object. + + :param query: SQLAlchemy Query object + """ + if self.property.lazy == 'dynamic': + return query + if self.property.uselist is False: + return query.first() + return query.all() + + def criteria(self, obj): + direction = self.property.direction + + if self.versioned: + if direction.name == 'ONETOMANY': + return self.one_to_many_criteria(obj) + # TODO: Get many-to-many relationships working + # elif direction.name == 'MANYTOMANY': + # return self.many_to_many_criteria(obj) + elif direction.name == 'MANYTOONE': + return self.many_to_one_criteria(obj) + else: + reflector = VersionExpressionReflector(obj, self.property) + return reflector(self.property.primaryjoin) + + def many_to_many_criteria(self, obj): + """ + Returns the many-to-many query. + + Looks up remote items through associations and for each item returns + returns the last version with a transaction less than or equal to the + transaction of `obj`. This must hold true for both the association and + the remote relation items. + + Example + ------- + Select all tags of article with id 3 and transaction 5 + + .. code-block:: sql + + SELECT tags_version.* + FROM tags_version + WHERE EXISTS ( + SELECT 1 + FROM article_tag_version + WHERE article_id = 3 + AND tag_id = tags_version.id + AND operation_type != 2 + AND EXISTS ( + SELECT 1 + FROM article_tag_version as article_tag_version2 + WHERE article_tag_version2.tag_id = article_tag_version.tag_id + AND article_tag_version2.tx_id <= 5 + GROUP BY article_tag_version2.tag_id + HAVING + MAX(article_tag_version2.tx_id) = + article_tag_version.tx_id + ) + ) + AND EXISTS ( + SELECT 1 + FROM tags_version as tags_version_2 + WHERE tags_version_2.id = tags_version.id + AND tags_version_2.tx_id <= 5 + GROUP BY tags_version_2.id + HAVING MAX(tags_version_2.tx_id) = tags_version.tx_id + ) + AND operation_type != 2 + """ + return sa.and_( + self.association_subquery(obj), + self.one_to_many_subquery(obj), + self.remote_cls.operation_type != Operation.DELETE.value + ) + + def many_to_one_criteria(self, obj): + """Returns the many-to-one query. + + Returns the item on the 'one' side with the highest transaction id + as long as it is less or equal to the transaction id of the `obj`. + + Example + ------- + Look up the Article of a Tag with article_id = 4 and + transaction_id = 5 + + .. code-block:: sql + + SELECT * + FROM articles_version + WHERE id = 4 + AND transaction_id = ( + SELECT max(transaction_id) + FROM articles_version + WHERE transaction_id <= 5 + AND id = 4 + ) + AND operation_type != 2 + + """ + reflector = VersionExpressionReflector(obj, self.property) + return sa.and_( + reflector(self.property.primaryjoin), + self.many_to_one_subquery(obj), + self.remote_cls.operation_type != Operation.DELETE.value + ) + + def one_to_many_criteria(self, obj): + """ + Returns the one-to-many query. + + For each item on the 'many' side, returns its latest version as long as + the transaction of that version is less than equal of the transaction + of `obj`. + + Example + ------- + Using the Article-Tags relationship, where we look for tags of + article_version with id = 3 and transaction = 5 the sql produced is + + .. code-block:: sql + + SELECT tags_version.* + FROM tags_version + WHERE tags_version.article_id = 3 + AND tags_version.operation_type != 2 + AND EXISTS ( + SELECT 1 + FROM tags_version as tags_version_last + WHERE tags_version_last.transaction_id <= 5 + AND tags_version_last.id = tags_version.id + GROUP BY tags_version_last.id + HAVING + MAX(tags_version_last.transaction_id) = + tags_version.transaction_id + ) + + """ + reflector = VersionExpressionReflector(obj, self.property) + return sa.and_( + reflector(self.property.primaryjoin), + self.one_to_many_subquery(obj), + self.remote_cls.operation_type != Operation.DELETE.value + ) + + @property + def reflected_relationship(self): + """ + Builds a reflected one-to-many, one-to-one and many-to-one + relationship between two version classes. + """ + @property + def relationship(obj): + query = self.query(obj) + return self.process_query(query) + return relationship + + def association_subquery(self, obj): + """ + Returns an EXISTS clause that checks if an association exists for given + SQLAlchemy declarative object. This query is used by + many_to_many_criteria method. + + Example query: + + .. code-block:: sql + + EXISTS ( + SELECT 1 + FROM article_tag_version + WHERE article_id = 3 + AND tag_id = tags_version.id + AND operation_type != 2 + AND EXISTS ( + SELECT 1 + FROM article_tag_version as article_tag_version2 + WHERE article_tag_version2.tag_id = article_tag_version.tag_id + AND article_tag_version2.tx_id <=5 + AND article_tag_version2.article_id = 3 + GROUP BY article_tag_version2.tag_id + HAVING + MAX(article_tag_version2.tx_id) = + article_tag_version.tx_id + ) + ) + + :param obj: SQLAlchemy declarative object + """ + + tx_column = "transaction_id" + join_column = self.property.primaryjoin.right.name + object_join_column = self.property.primaryjoin.left.name + reflector = VersionExpressionReflector(obj, self.property) + + association_table_alias = self.association_version_table.alias() + association_cols = [ + association_table_alias.c[association_col.name] + for _, association_col + in self.remote_to_association_column_pairs + ] + + association_exists = sa.exists( + sa.select(1).where( + sa.and_( + association_table_alias.c[tx_column] <= + getattr(obj, tx_column), + association_table_alias.c[join_column] == getattr(obj, object_join_column), + *[association_col == + self.association_version_table.c[association_col.name] + for association_col + in association_cols] + ) + ).group_by( + *association_cols + ).having( + sa.func.max(association_table_alias.c[tx_column]) == + self.association_version_table.c[tx_column] + ).correlate(self.association_version_table) + ) + return sa.exists( + sa.select(1).where( + sa.and_( + reflector(self.property.primaryjoin), + association_exists, + self.association_version_table.c.operation_type != + Operation.DELETE.value, + adapt_columns(self.property.secondaryjoin), + ) + ).correlate(self.local_cls, self.remote_cls) + ) + + # TODO: Get many-to-many relationships working. + # def build_association_version_tables(self): + # """ + # Builds many-to-many association version table for given property. + # Association version tables are used for tracking change history of + # many-to-many associations. + # """ + # column = list(self.property.remote_side)[0] + + # self.manager.association_tables.add(column.table) + # builder = TableBuilder( + # self.manager, + # column.table + # ) + # metadata = column.table.metadata + # if builder.parent_table.schema: + # table_name = builder.parent_table.schema + '.' + builder.table_name + # elif metadata.schema: + # table_name = metadata.schema + '.' + builder.table_name + # else: + # table_name = builder.table_name + + # if table_name not in metadata.tables: + # self.association_version_table = table = builder() + # self.manager.association_version_tables.add(table) + # else: + # # may have already been created if we visiting the 'other' side of + # # a self-referential many-to-many relationship + # self.association_version_table = metadata.tables[table_name] + + def __call__(self): + """ + Builds reflected relationship between version classes based on given + parent object's RelationshipProperty. + """ + self.local_cls = version_class(self.model) + self.versioned = False + + if version_class(self.property.mapper.class_): + self.remote_cls = version_class(self.property.mapper.class_) + self.versioned = True + else: + self.remote_cls = self.property.mapper.class_ + + # TODO: Get many-to-many relationships working. + # if (self.property.secondary is not None and + # not self.property.viewonly and + # not self.manager.is_excluded_property( + # self.model, self.property.key)): + # self.build_association_version_tables() + + # # store remote cls to association table column pairs + # self.remote_to_association_column_pairs = [] + # for column_pair in self.property.local_remote_pairs: + # if column_pair[0] in self.property.target.c.values(): + # self.remote_to_association_column_pairs.append(column_pair) + + setattr( + self.local_cls, + self.property.key, + self.reflected_relationship + ) diff --git a/python/common/sql-versioning/sql_versioning/utils.py b/python/common/sql-versioning/sql_versioning/utils.py new file mode 100644 index 0000000000..f9a03c6496 --- /dev/null +++ b/python/common/sql-versioning/sql_versioning/utils.py @@ -0,0 +1,58 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import sqlalchemy as sa +from contextlib import suppress + + +def version_class(obj): + """Return the version class associated with a model. + + :param obj: The object to get the version class for. + :return: The version class or None if not found. + """ + with suppress(Exception): + versioned_class = obj.__versioned_cls__ + return versioned_class + return None + + +def version_table(table): + """ + Return associated version table for given SQLAlchemy Table object. + + :param table: SQLAlchemy Table object + """ + if table.schema: + return table.metadata.tables[ + table.schema + '.' + table.name + '_version' + ] + elif table.metadata.schema: + return table.metadata.tables[ + table.metadata.schema + '.' + table.name + '_version' + ] + else: + return table.metadata.tables[ + table.name + '_version' + ] + + +class VersioningClauseAdapter(sa.sql.visitors.ReplacingCloningVisitor): + def replace(self, col): + if isinstance(col, sa.Column): + table = version_table(col.table) + return table.c.get(col.key) + + +def adapt_columns(expr): + return VersioningClauseAdapter().traverse(expr) diff --git a/python/common/sql-versioning/sql_versioning/versioning.py b/python/common/sql-versioning/sql_versioning/versioning.py index 3739b3e797..c715a29cd7 100644 --- a/python/common/sql-versioning/sql_versioning/versioning.py +++ b/python/common/sql-versioning/sql_versioning/versioning.py @@ -13,15 +13,14 @@ # limitations under the License. """Versioned mixin class, listeners and other utilities.""" import datetime -from contextlib import suppress from sqlalchemy import (BigInteger, Column, DateTime, Integer, SmallInteger, String, and_, event, func, insert, inspect, select, update) from sqlalchemy.ext.declarative import declarative_base, declared_attr -from sqlalchemy.orm import Session, mapper +from sqlalchemy.orm import Session, mapper, relationships -from .debugging import debug +from .relationship_builder import RelationshipBuilder Base = declarative_base() @@ -47,10 +46,31 @@ def _is_obj_modified(obj): return False +def _should_relationship_delete_orphan(session, obj): + """ + Checks if: + 1. This relationship is a many-to-one relationship + 2. If the opposite direction one-to-many relationship parent object has changes + 3. If the opposite direction one-to-many relationship has cascade=delete-orphan + + :param session: The database session instance. + :param obj: The object to inspect for changes. + :return: True if the above checks pass, otherwise False. + """ + should_delete = False + for r in inspect(obj.__class__).relationships: + if r.direction.name == 'MANYTOONE' and r._reverse_property: + reverse_rel, *_ = r._reverse_property + parent_obj = inspect(obj).committed_state.get(reverse_rel.backref, None) + if parent_obj in session.dirty: + should_delete = should_delete or "delete-orphan" in inspect(reverse_rel)._cascade + return should_delete + + def _is_session_modified(session): """Check if the session contains modified versioned objects. - :param session: The database sesseion instance. + :param session: The database session instance. :return: True if the session contains modified versioned objects, otherwise False. """ for obj in versioned_objects(session): @@ -61,19 +81,20 @@ def _is_session_modified(session): return False -def _get_operation_type(session, obj): +def _get_operation_type(session, obj, delete_orphan=False): """Return the operation type for the given object within the session. :param session: The database session instance. :param obj: The object to determine the operation type. :return: The operation type ('I' for insert, 'U' for update, 'D' for delete), or None if unchanged. """ + is_orphaned = inspect(obj)._orphaned_outside_of_session if obj in session.new: return 'I' + elif obj in session.deleted or (is_orphaned and delete_orphan): + return 'D' elif obj in session.dirty: return 'U' if _is_obj_modified(obj) else None - elif obj in session.deleted: - return 'D' return None @@ -85,11 +106,7 @@ def _create_version(session, target, operation_type): :param operation_type: The type of operation ('I', 'U', 'D') being performed on the object. :return: None """ - - print(f'\033[32mCreating version for {target.__class__.__name__} (id={target.id}), operation_type: {operation_type}\033[0m') - if not session: - print(f'\033[32mSkipping version creation for {target.__class__.__name__} (id={target.id})\033[0m') return transaction_manager = TransactionManager(session) @@ -152,8 +169,6 @@ def _create_version(session, target, operation_type): values(end_transaction_id=transaction_id) ) - print(f'\033[32mVersion created/updated for {target.__class__.__name__} (id={target.id}), transaction_id: {transaction_id}\033[0m') - # ---------- Transaction Related Classes ---------- class TransactionFactory: @@ -198,7 +213,6 @@ def __init__(self, session): self.session = session self.transaction_model = TransactionFactory.create_transaction_model() - @debug def create_transaction(self): """Create a new transaction in the session. @@ -206,7 +220,6 @@ def create_transaction(self): """ if 'current_transaction_id' in self.session.info: - print(f"\033[32mPoping out existing transaction: {self.session.info['current_transaction_id']}\033[0m") self.session.info.pop('current_transaction_id', None) # Use insert().returning() to get the ID and issued_at without committing @@ -216,10 +229,7 @@ def create_transaction(self): result = self.session.execute(stmt) transaction_id, issued_at = result.first() - print(f'\033[32mCreated new transaction: {transaction_id}\033[0m') - self.session.info['current_transaction_id'] = transaction_id - print(f'\033[32mSet current_transaction_id: {transaction_id}\033[0m') return transaction_id def get_current_transaction_id(self): @@ -232,32 +242,24 @@ def get_current_transaction_id(self): else: return self.create_transaction() - @debug def clear_current_transaction(self): """Clear the current transaction_id stored in the session. :return: None """ if self.session.transaction.nested: - print(f"\033[32mSkip clearing nested transaction\033[0m") return - print(f"\033[32mClearing current transaction: {self.session.info.get('current_transaction_id')}\033[0m") self.session.info.pop('current_transaction_id', None) # ---------- Event Listeners ---------- -@debug def _before_flush(session, flush_context, instances): """Trigger before a flush operation to ensure a transaction is created.""" try: if not _is_session_modified(session): - print('\033[31mThere is no modified versioned object in this session.\033[0m') return - if 'current_transaction_id' in session.info: - print(f"\033[31mtransaction_id={session.info['current_transaction_id']} exists before flush.\033[0m") - else: - print('\033[31mCreating transaction before flush.\033[0m') + if 'current_transaction_id' not in session.info: transaction_manager = TransactionManager(session) transaction_manager.create_transaction() @@ -265,19 +267,18 @@ def _before_flush(session, flush_context, instances): raise e -@debug def _after_flush(session, flush_context): """Trigger after a flush operation to create version records for changed objects.""" try: for obj in versioned_objects(session): - operation_type = _get_operation_type(session, obj) + should_delete_orphan = _should_relationship_delete_orphan(session, obj) + operation_type = _get_operation_type(session, obj, should_delete_orphan) if operation_type: _create_version(session, obj, operation_type) except Exception as e: raise e -@debug def _clear_transaction(session): """Clears the current transaction from the session after commit or rollback.""" try: @@ -359,26 +360,21 @@ def _after_configured(cls): for pending_cls in cls._pending_version_classes: version_cls = pending_cls._version_cls mapper = inspect(pending_cls) + # Now add columns from the original table for c in mapper.columns: # Make sure table's column name and class's property name can be different property_name = mapper.get_property_by_column(c).key if not hasattr(version_cls, property_name): setattr(version_cls, property_name, Column(c.name, c.type)) - delattr(cls, '_pending_version_classes') - -def version_class(obj): - """Return the version class associated with a model. + # Build relationships + for prop in inspect(cls).iterate_properties: + if type(prop) == relationships.RelationshipProperty: + builder = RelationshipBuilder(cls, prop) + builder() - :param obj: The object to get the version class for. - :return: The version class or None if not found. - """ - with suppress(Exception): - versioned_class = obj.__versioned_cls__ - print(f'\033[32mVersioned Class={versioned_class}\033[0m') - return versioned_class - return None + delattr(cls, '_pending_version_classes') def versioned_objects(session): @@ -392,7 +388,6 @@ def versioned_objects(session): yield obj -@debug def enable_versioning(transaction_cls=None): """Enable versioning. It registers listeners. @@ -408,7 +403,6 @@ def enable_versioning(transaction_cls=None): raise e -@debug def disable_versioning(): """Disable versioning. It removes listeners. diff --git a/python/common/sql-versioning/tests/__init__.py b/python/common/sql-versioning/tests/__init__.py index e69de29bb2..747b0cbfeb 100644 --- a/python/common/sql-versioning/tests/__init__.py +++ b/python/common/sql-versioning/tests/__init__.py @@ -0,0 +1,82 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Tests for versioning extension. + +Initialization file that holds testing classes. +""" +from sqlalchemy import Column, ForeignKey, Integer, String, orm + +from sql_versioning import (Base, TransactionFactory, Versioned, + enable_versioning) + + +enable_versioning() + +Transaction = TransactionFactory.create_transaction_model() + +class Model(Base): + __tablename__ = 'models' + id = Column(Integer, primary_key=True) + name = Column(String) + +class User(Base, Versioned): + __tablename__ = 'users' + + id = Column(Integer, primary_key=True) + name = Column(String) + + # One-to-one versioned relationship + address = orm.relationship('Address', backref='user', uselist=False) + # One-to-one non-versioned relationship + location = orm.relationship('Location', backref='user', uselist=False) + # One-to-many versioned relationship + emails = orm.relationship('Email', backref='user', lazy='dynamic', cascade='all, delete, delete-orphan') + # One-to-many non versioned relationship + items = orm.relationship('Item', backref='user', lazy='dynamic', cascade='all, delete, delete-orphan') + +class Address(Base, Versioned): + __tablename__ = 'addresses' + + id = Column(Integer, primary_key=True) + name = Column(String) + + user_id = Column(Integer, ForeignKey('users.id')) + +class Location(Base): + __tablename__ = 'locations' + + id = Column(Integer, primary_key=True) + name = Column(String) + + user_id = Column(Integer, ForeignKey('users.id')) + +class Email(Base, Versioned): + __tablename__ = 'emails' + + id = Column(Integer, primary_key=True) + name = Column(String) + + user_id = Column(Integer, ForeignKey('users.id')) + + +class Item(Base): + __tablename__ = 'items' + + id = Column(Integer, primary_key=True) + name = Column(String) + + user_id = Column(Integer, ForeignKey('users.id')) + + +orm.configure_mappers() \ No newline at end of file diff --git a/python/common/sql-versioning/tests/test_versioning.py b/python/common/sql-versioning/tests/test_versioning.py index a7b74cb816..bc5d4e8656 100644 --- a/python/common/sql-versioning/tests/test_versioning.py +++ b/python/common/sql-versioning/tests/test_versioning.py @@ -16,37 +16,9 @@ Test-Suite to ensure that the versioning extension is working as expected. """ import pytest -from sqlalchemy import Column, ForeignKey, Integer, String, orm -from sql_versioning import (Base, TransactionFactory, Versioned, - enable_versioning, version_class) - -enable_versioning() - -Transaction = TransactionFactory.create_transaction_model() - -class Model(Base): - __tablename__ = 'models' - id = Column(Integer, primary_key=True) - name = Column(String) - -class User(Base, Versioned): - __tablename__ = 'users' - - id = Column(Integer, primary_key=True) - name = Column(String) - - address = orm.relationship('Address', backref='user', uselist=False) - -class Address(Base, Versioned): - __tablename__ = 'addresses' - - id = Column(Integer, primary_key=True) - name = Column(String) - - user_id = Column(Integer, ForeignKey('users.id')) - -orm.configure_mappers() +from sql_versioning import (version_class) +from tests import (Model, User, Address, Location, Email, Item, Transaction) @pytest.mark.parametrize('test_name', ['CLASS','INSTANCE']) @@ -135,6 +107,109 @@ def test_versioning_insert(db, session): assert result_versioned_address.end_transaction_id is None +def test_versioning_relationships(db, session): + user = User(name='user') + address = Address(name='Some address') + location = Location(name='Some location') + emails = [Email(name='primary'), Email(name='secondary')] + items = [Item(name='An item'), Item(name='Another item')] + user.address = address + user.location = location + user.items = items + user.emails = emails + session.add(user) + session.commit() + + user_version = version_class(User) + result_revision = session.query(user_version)\ + .filter(user_version.name=='user')\ + .one_or_none() + + # Test one-to-one relationship + # Versioned + assert result_revision.address.id == address.id + assert result_revision.address.name == "Some address" + assert result_revision.address.user.name == user.name + # Non versioned + assert result_revision.location.id == location.id + assert result_revision.location.name == "Some location" + assert result_revision.location.user.name == user.name + + # Test one-to-many relationship + # Versioned + result_emails = result_revision.emails.all() + assert len(result_emails) == len(emails) + assert result_emails[0].id == emails[0].id + assert result_emails[0].name == "primary" + assert result_emails[1].id == emails[1].id + assert result_emails[1].name == "secondary" + # Non versioned + result_items = result_revision.items.all() + assert len(result_items) == len(items) + assert result_items[0].id == items[0].id + assert result_items[0].name == "An item" + assert result_items[1].id == items[1].id + assert result_items[1].name == "Another item" + + # Test many-to-one relationship + # Note: this is a quirk of the RelationshipBuilder. We don't explicitly establish bi-directionality + # by including the "reverse" side of the relationship (i.e. Item.user), but it works anyway + # Versioned + assert result_revision.emails[0].user.name == user.name + assert result_revision.emails[1].user.name == user.name + # Non versioned + assert result_revision.items[0].user == user + assert result_revision.items[1].user == user + + # Test update relationship + user.address = Address(name='Some new address') + session.commit() + + user_version = version_class(User) + result_revisions = session.query(user_version)\ + .filter(user_version.name=='user')\ + .order_by(user_version.transaction_id)\ + .all() + + assert user.address.name == 'Some new address' + assert len(result_revisions) == 2 + assert result_revisions[0].address.name == "Some address" + assert result_revisions[1].address.name == "Some new address" + + +def test_versioning_relationships_remove(db, session): + """Test remove from relationship.""" + user = User(name='test') + for i in range(5): + email = Email(name=f'email {i}') + user.emails.append(email) + session.add(user) + session.commit() + + if existing_emails := user.emails.all(): + for email in existing_emails: + user.emails.remove(email) + session.add(user) + session.commit() + + user = session.query(User).one_or_none() + emails = user.emails.all() + assert not emails + + emails = session.query(Email).all() + assert not emails + + email_versions = session.query(version_class(Email))\ + .order_by(version_class(Email).transaction_id)\ + .all() + assert len(email_versions) == 10 + for i in range(10): + if i < 5: + assert email_versions[i].operation_type == 0 + else: + assert email_versions[i].operation_type == 2 + + def test_versioning_delete(db, session): """Test deletion.""" user = User(name='test') diff --git a/queue_services/business-pay/src/business_pay/resources/pay_filer.py b/queue_services/business-pay/src/business_pay/resources/pay_filer.py index 30fb3d66d6..75b3919931 100644 --- a/queue_services/business-pay/src/business_pay/resources/pay_filer.py +++ b/queue_services/business-pay/src/business_pay/resources/pay_filer.py @@ -109,7 +109,7 @@ async def worker(): logger.debug(f"Removed From Queue: no payment info in ce: {str(ce)}") return {}, HTTPStatus.OK - if payment_token.corp_type_code in ["MHR", "BTR"]: + if payment_token.corp_type_code in ["MHR", "BCR", "BTR", "BUS", "STRR"]: logger.debug( f"ignoring message for corp_type_code:{payment_token.corp_type_code}, {str(ce)}") return {}, HTTPStatus.OK diff --git a/queue_services/entity-bn/devops/vaults.json b/queue_services/entity-bn/devops/vaults.json index 451dc80fa7..2b6c3dc51b 100644 --- a/queue_services/entity-bn/devops/vaults.json +++ b/queue_services/entity-bn/devops/vaults.json @@ -29,7 +29,8 @@ "vault": "entity", "application": [ "entity-service-account", - "sentry" + "sentry", + "launchdarkly" ] } ] diff --git a/queue_services/entity-bn/flags.json b/queue_services/entity-bn/flags.json new file mode 100644 index 0000000000..f773eec046 --- /dev/null +++ b/queue_services/entity-bn/flags.json @@ -0,0 +1,14 @@ +{ + "flagValues": { + "db-versioning": { + "legal-api": true, + "emailer": true, + "filer": false, + "entity-bn": true, + "digital-credentials": false, + "dissolutions-job": false, + "furnishings-job": false, + "emailer-reminder-job": true + } + } +} diff --git a/queue_services/entity-bn/src/entity_bn/bn_processors/change_of_registration.py b/queue_services/entity-bn/src/entity_bn/bn_processors/change_of_registration.py index 96e898eaed..cc93535bad 100644 --- a/queue_services/entity-bn/src/entity_bn/bn_processors/change_of_registration.py +++ b/queue_services/entity-bn/src/entity_bn/bn_processors/change_of_registration.py @@ -19,10 +19,10 @@ import dpath from flask import current_app from legal_api.models import Address, Business, Filing, Party, PartyRole, RequestTracker, db +from legal_api.models.db import VersioningProxy from legal_api.utils.datetime import datetime from legal_api.utils.legislation_datetime import LegislationDatetime from sqlalchemy import and_, func -from sqlalchemy_continuum import version_class from entity_bn.bn_processors import ( bn_note, @@ -215,7 +215,7 @@ def change_address(business: Business, filing: Filing, # pylint: disable=too-ma def has_previous_address(transaction_id: int, office_id: int, address_type: str) -> bool: """Has previous address for the given transaction and office id.""" - address_version = version_class(Address) + address_version = VersioningProxy.version_class(db.session(), Address) address = db.session.query(address_version) \ .filter(address_version.operation_type != 2) \ .filter(address_version.office_id == office_id) \ @@ -227,7 +227,7 @@ def has_previous_address(transaction_id: int, office_id: int, address_type: str) def has_party_name_changed(business: Business, filing: Filing) -> bool: """Has party name changed in the given filing.""" - party_role_version = version_class(PartyRole) + party_role_version = VersioningProxy.version_class(db.session(), PartyRole) party_roles = db.session.query(party_role_version)\ .filter(party_role_version.transaction_id == filing.transaction_id) \ .filter(party_role_version.operation_type != 2) \ @@ -266,7 +266,7 @@ def _get_name(party) -> str: def _get_modified_parties(transaction_id, business_id): """Get all party values before the given transaction id.""" - party_version = version_class(Party) + party_version = VersioningProxy.version_class(db.session(), Party) parties = db.session.query(party_version) \ .join(PartyRole, and_(PartyRole.party_id == party_version.id, PartyRole.business_id == business_id)) \ .filter(PartyRole.role.in_([PartyRole.RoleTypes.PARTNER.value, PartyRole.RoleTypes.PROPRIETOR.value])) \ diff --git a/queue_services/entity-bn/src/entity_bn/config.py b/queue_services/entity-bn/src/entity_bn/config.py index 754de4878d..65bd323f2c 100644 --- a/queue_services/entity-bn/src/entity_bn/config.py +++ b/queue_services/entity-bn/src/entity_bn/config.py @@ -58,10 +58,12 @@ class _Config(): # pylint: disable=too-few-public-methods Used as the base for all the other configurations. """ + SERVICE_NAME = 'entity-bn' PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) SENTRY_DSN = os.getenv('SENTRY_DSN') or '' SENTRY_DSN = '' if SENTRY_DSN.lower() == 'null' else SENTRY_DSN + LD_SDK_KEY = os.getenv('LD_SDK_KEY', None) COLIN_API = f"{os.getenv('COLIN_API_URL', '')}{os.getenv('COLIN_API_VERSION', '')}" SEARCH_API = \ diff --git a/queue_services/entity-bn/src/entity_bn/version.py b/queue_services/entity-bn/src/entity_bn/version.py index c8e4310dd4..c6bc4525e4 100644 --- a/queue_services/entity-bn/src/entity_bn/version.py +++ b/queue_services/entity-bn/src/entity_bn/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.131.0' # pylint: disable=invalid-name +__version__ = '2.143.0' # pylint: disable=invalid-name diff --git a/queue_services/entity-bn/src/entity_bn/worker.py b/queue_services/entity-bn/src/entity_bn/worker.py index 7cd63c5ebc..f6b16f0ea3 100644 --- a/queue_services/entity-bn/src/entity_bn/worker.py +++ b/queue_services/entity-bn/src/entity_bn/worker.py @@ -32,9 +32,10 @@ import nats from entity_queue_common.service_utils import QueueException, logger from flask import Flask -from legal_api import db from legal_api.core import Filing as FilingCore from legal_api.models import Business +from legal_api.models.db import init_db +from legal_api.services.flags import Flags from sentry_sdk import capture_message from sqlalchemy.exc import OperationalError @@ -49,10 +50,14 @@ from entity_bn.exceptions import BNException, BNRetryExceededException +flags = Flags() # pylint: disable=invalid-name APP_CONFIG = config.get_named_config(os.getenv('DEPLOYMENT_ENV', 'production')) FLASK_APP = Flask(__name__) # pragma: no cover FLASK_APP.config.from_object(APP_CONFIG) -db.init_app(FLASK_APP) +init_db(FLASK_APP) + +if FLASK_APP.config.get('LD_SDK_KEY', None): + flags.init_app(FLASK_APP) async def process_event(msg: Dict, flask_app: Flask): # pylint: disable=too-many-branches,too-many-statements diff --git a/queue_services/entity-digital-credentials/flags.json b/queue_services/entity-digital-credentials/flags.json new file mode 100644 index 0000000000..257cf6ec93 --- /dev/null +++ b/queue_services/entity-digital-credentials/flags.json @@ -0,0 +1,14 @@ +{ + "flagValues": { + "db-versioning": { + "legal-api": true, + "emailer": false, + "filer": false, + "entity-bn": false, + "digital-credentials": true, + "dissolutions-job": false, + "furnishings-job": false, + "emailer-reminder-job": false + } + } +} diff --git a/queue_services/entity-digital-credentials/requirements.txt b/queue_services/entity-digital-credentials/requirements.txt index a67f794736..f35b9f0271 100644 --- a/queue_services/entity-digital-credentials/requirements.txt +++ b/queue_services/entity-digital-credentials/requirements.txt @@ -22,5 +22,5 @@ urllib3==1.26.11 Werkzeug==1.0.1 git+https://github.com/bcgov/business-schemas.git@2.18.15#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=entity_queue_common&subdirectory=queue_services/common -git+https://github.com/bcgov/lear.git#egg=legal_api&subdirectory=legal-api git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning +git+https://github.com/bcgov/lear.git#egg=legal_api&subdirectory=legal-api diff --git a/queue_services/entity-digital-credentials/src/entity_digital_credentials/config.py b/queue_services/entity-digital-credentials/src/entity_digital_credentials/config.py index 863c8f3ebf..06fa1a4f65 100644 --- a/queue_services/entity-digital-credentials/src/entity_digital_credentials/config.py +++ b/queue_services/entity-digital-credentials/src/entity_digital_credentials/config.py @@ -58,6 +58,8 @@ class _Config(): # pylint: disable=too-few-public-methods Used as the base for all the other configurations. """ + # used to identify versioning flag + SERVICE_NAME = 'digital-credentials' PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) SENTRY_DSN = os.getenv('SENTRY_DSN') or '' diff --git a/queue_services/entity-digital-credentials/src/entity_digital_credentials/version.py b/queue_services/entity-digital-credentials/src/entity_digital_credentials/version.py index cb5ffe29d1..a05b27332a 100644 --- a/queue_services/entity-digital-credentials/src/entity_digital_credentials/version.py +++ b/queue_services/entity-digital-credentials/src/entity_digital_credentials/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.131.0' # pylint: disable=invalid-name +__version__ = '2.143.0' # pylint: disable=invalid-name diff --git a/queue_services/entity-digital-credentials/src/entity_digital_credentials/worker.py b/queue_services/entity-digital-credentials/src/entity_digital_credentials/worker.py index 03decbfc55..4d806e0929 100644 --- a/queue_services/entity-digital-credentials/src/entity_digital_credentials/worker.py +++ b/queue_services/entity-digital-credentials/src/entity_digital_credentials/worker.py @@ -33,9 +33,9 @@ from entity_queue_common.service import QueueServiceManager from entity_queue_common.service_utils import QueueException, logger from flask import Flask -from legal_api import db from legal_api.core import Filing as FilingCore from legal_api.models import Business +from legal_api.models.db import init_db from legal_api.services import digital_credentials, flags from sqlalchemy.exc import OperationalError @@ -54,7 +54,7 @@ APP_CONFIG = config.get_named_config(os.getenv('DEPLOYMENT_ENV', 'production')) FLASK_APP = Flask(__name__) FLASK_APP.config.from_object(APP_CONFIG) -db.init_app(FLASK_APP) +init_db(FLASK_APP) with FLASK_APP.app_context(): # db require app context digital_credentials.init_app(FLASK_APP) diff --git a/queue_services/entity-digital-credentials/tests/unit/__init__.py b/queue_services/entity-digital-credentials/tests/unit/__init__.py index 0ed27a9e90..88b20c032d 100644 --- a/queue_services/entity-digital-credentials/tests/unit/__init__.py +++ b/queue_services/entity-digital-credentials/tests/unit/__init__.py @@ -14,7 +14,7 @@ """The Unit Tests and the helper routines.""" from legal_api.models import Business, DCConnection, DCDefinition, DCIssuedCredential, Filing -from legal_api.models.db import versioning_manager +from legal_api.models.db import VersioningProxy def create_business(identifier): @@ -37,9 +37,8 @@ def create_filing(session, business_id=None, filing._status = filing_status if filing_status == Filing.Status.COMPLETED.value: - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(session()) + filing.transaction_id = transaction_id if filing_json: filing.filing_json = filing_json if business_id: diff --git a/queue_services/entity-emailer/flags.json b/queue_services/entity-emailer/flags.json index 877972a91c..e1f4a3778b 100644 --- a/queue_services/entity-emailer/flags.json +++ b/queue_services/entity-emailer/flags.json @@ -1,5 +1,15 @@ { "flagValues": { - "disable-specific-service-provider": true + "disable-specific-service-provider": true, + "db-versioning": { + "legal-api": true, + "emailer": true, + "filer": false, + "entity-bn": true, + "digital-credentials": false, + "dissolutions-job": false, + "furnishings-job": false, + "emailer-reminder-job": true + } } -} \ No newline at end of file +} diff --git a/queue_services/entity-emailer/requirements.txt b/queue_services/entity-emailer/requirements.txt index d8eeae2c8c..a16b43522a 100644 --- a/queue_services/entity-emailer/requirements.txt +++ b/queue_services/entity-emailer/requirements.txt @@ -67,7 +67,7 @@ rsa==4.7.2 semver==2.13.0 sentry-sdk==1.20.0 six==1.15.0 -SQLAlchemy==1.3.24 +SQLAlchemy==1.4.44 SQLAlchemy-Continuum==1.3.13 SQLAlchemy-Utils==0.37.1 strict-rfc3339==0.7 @@ -78,7 +78,7 @@ webcolors==1.13 Werkzeug==1.0.1 yarl==1.8.2 zipp==3.15.0 -git+https://github.com/bcgov/business-schemas.git@2.18.27#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.33#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=legal_api&subdirectory=legal-api git+https://github.com/bcgov/lear.git#egg=entity_queue_common&subdirectory=queue_services/common git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning diff --git a/queue_services/entity-emailer/src/entity_emailer/config.py b/queue_services/entity-emailer/src/entity_emailer/config.py index 057b00c19e..7ae1ff8718 100644 --- a/queue_services/entity-emailer/src/entity_emailer/config.py +++ b/queue_services/entity-emailer/src/entity_emailer/config.py @@ -58,6 +58,8 @@ class _Config(): # pylint: disable=too-few-public-methods Used as the base for all the other configurations. """ + # used to identify versioning flag + SERVICE_NAME = 'emailer' PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) MSG_RETRY_NUM = int(os.getenv('MSG_RETRY_NUM', '5')) diff --git a/queue_services/entity-emailer/src/entity_emailer/email_processors/notice_of_withdrawal_notification.py b/queue_services/entity-emailer/src/entity_emailer/email_processors/notice_of_withdrawal_notification.py new file mode 100644 index 0000000000..7b7b64e59c --- /dev/null +++ b/queue_services/entity-emailer/src/entity_emailer/email_processors/notice_of_withdrawal_notification.py @@ -0,0 +1,189 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Email processing rules and actions for Notice of Withdrawal notifications.""" +import base64 +import re +from http import HTTPStatus +from pathlib import Path + +import requests +from entity_queue_common.service_utils import logger +from flask import current_app +from jinja2 import Template +from legal_api.core.meta.filing import FilingMeta +from legal_api.models import Business, Filing + +from entity_emailer.email_processors import ( + get_filing_document, + get_filing_info, + get_recipient_from_auth, + substitute_template_parts, +) + + +def process(email_info: dict, token: str) -> dict: # pylint: disable=too-many-locals + """Build the email for Notice of Withdrawal notification.""" + logger.debug('notice_of_withdrawal_notification: %s', email_info) + # get template and fill in parts + filing_type = email_info['type'] + + # get template variables from filing + filing, business, leg_tmz_filing_date, leg_tmz_effective_date = get_filing_info(email_info['filingId']) + legal_type = business.get('legalType') + + # display company name for existing businesses and temp businesses + company_name = ( + business.get('legalName') + or Business.BUSINESSES.get(legal_type, {}).get('numberedDescription') + # fall back default value + or 'Unknown Company' + ) + # record to be withdrawn --> withdrawn filing display name + withdrawn_filing = Filing.find_by_id(filing.withdrawn_filing_id) + withdrawn_filing_display_name = FilingMeta.get_display_name( + business['legalType'], + withdrawn_filing.filing_type, + withdrawn_filing.filing_sub_type + ) + template = Path( + f'{current_app.config.get("TEMPLATE_PATH")}/NOW-COMPLETED.html' + ).read_text() + filled_template = substitute_template_parts(template) + # render template with vars + jnja_template = Template(filled_template, autoescape=True) + filing_data = (filing.json)['filing'][f'{filing_type}'] + filing_name = filing.filing_type[0].upper() + ' '.join(re.findall('[a-zA-Z][^A-Z]*', filing.filing_type[1:])) + + # default to None + filing_id = None + # show filing ID in email template when the withdrawn record is an IA, Amalg. or a ContIn + if business.get('identifier').startswith('T'): + filing_id = filing_data['filingId'] + + html_out = jnja_template.render( + business=business, + filing=filing_data, + header=(filing.json)['filing']['header'], + company_name=company_name, + filing_date_time=leg_tmz_filing_date, + filing_id=filing_id, + effective_date_time=leg_tmz_effective_date, + withdrawnFilingType=withdrawn_filing_display_name, + entity_dashboard_url=current_app.config.get('DASHBOARD_URL') + + (filing.json)['filing']['business'].get('identifier', ''), + email_header=filing_name.upper(), + filing_type=filing_type + ) + + # get attachments + pdfs = _get_pdfs(token, business, filing, leg_tmz_filing_date, leg_tmz_effective_date) + + # get recipients + identifier = filing.filing_json['filing']['business']['identifier'] + recipients = _get_contacts(identifier, token, withdrawn_filing) + recipients = list(set(recipients)) + recipients = ', '.join(filter(None, recipients)).strip() + + # assign subject + subject = 'Notice of Withdrawal filed Successfully' + legal_name = company_name + legal_name = 'Numbered Company' if legal_name.startswith(identifier) else legal_name + subject = f'{legal_name} - {subject}' + + return { + 'recipients': recipients, + 'requestBy': 'BCRegistries@gov.bc.ca', + 'content': { + 'subject': subject, + 'body': f'{html_out}', + 'attachments': pdfs + } + } + + +def _get_pdfs( + token: str, + business: dict, + filing: Filing, + filing_date_time: str, + effective_date: str) -> list: + """Get the PDFs for the Notice of Withdrawal output.""" + pdfs = [] + attach_order = 1 + headers = { + 'Accept': 'application/pdf', + 'Authorization': f'Bearer {token}' + } + + # add filing PDF + filing_pdf_type = 'noticeOfWithdrawal' + filing_pdf_encoded = get_filing_document(business['identifier'], filing.id, filing_pdf_type, token) + if filing_pdf_encoded: + pdfs.append( + { + 'fileName': 'Notice of Withdrawal.pdf', + 'fileBytes': filing_pdf_encoded.decode('utf-8'), + 'fileUrl': '', + 'attachOrder': str(attach_order) + } + ) + attach_order += 1 + + # add receipt PDF + corp_name = business.get('legalName') + if business.get('identifier').startswith('T'): + business_data = None + else: + business_data = Business.find_by_internal_id(filing.business_id) + receipt = requests.post( + f'{current_app.config.get("PAY_API_URL")}/{filing.payment_token}/receipts', + json={ + 'corpName': corp_name, + 'filingDateTime': filing_date_time, + 'effectiveDateTime': effective_date if effective_date else '', + 'filingIdentifier': str(filing.id), + 'businessNumber': business_data.tax_id if business_data and business_data.tax_id else '' + }, headers=headers) + + if receipt.status_code != HTTPStatus.CREATED: + logger.error('Failed to get receipt pdf for filing: %s', filing.id) + else: + receipt_encoded = base64.b64encode(receipt.content) + pdfs.append( + { + 'fileName': 'Receipt.pdf', + 'fileBytes': receipt_encoded.decode('utf-8'), + 'fileUrl': '', + 'attachOrder': str(attach_order) + }) + attach_order += 1 + return pdfs + + +def _get_contacts(identifier, token, withdrawn_filing): + recipients = [] + if identifier.startswith('T'): + # get from withdrawn filing (FE new business filing) + filing_type = withdrawn_filing.filing_type + recipients.append(withdrawn_filing.filing_json['filing'][filing_type]['contactPoint']['email']) + + for party in withdrawn_filing.filing_json['filing'][filing_type]['parties']: + for role in party['roles']: + if role['roleType'] == 'Completing Party': + recipients.append(party['officer'].get('email')) + break + else: + recipients.append(get_recipient_from_auth(identifier, token)) + + return recipients diff --git a/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-APPROVED.html b/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-APPROVED.html index dc6baa2a99..a185765ded 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-APPROVED.html +++ b/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-APPROVED.html @@ -42,8 +42,6 @@ [[continuation-application-details.html]] - [[20px.html]] - [[divider.html]] [[20px.html]] diff --git a/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-PAID.html b/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-PAID.html index a15e839c93..0b13f0bc0e 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-PAID.html +++ b/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-PAID.html @@ -20,15 +20,11 @@

We have received your Continuation Application

- [[20px.html]] - [[divider.html]] [[20px.html]] [[business-information.html]] - [[20px.html]] - [[divider.html]] [[20px.html]] diff --git a/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-REJECTED.html b/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-REJECTED.html index 87ab51a2fd..29daf600f0 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-REJECTED.html +++ b/queue_services/entity-emailer/src/entity_emailer/email_templates/CONT-IN-REJECTED.html @@ -30,7 +30,7 @@

Your Next Steps

    -
  1. Review the reasons for rejected as outlined below:
  2. +
  3. Review the reasons your authorization was rejected below:
  4. {{ latest_review_comment }}
  5. Visit My Business Registry to submit a new Continuation Application.
  6. @@ -44,8 +44,6 @@ [[continuation-application-details.html]] - [[20px.html]] - [[divider.html]] [[20px.html]] diff --git a/queue_services/entity-emailer/src/entity_emailer/email_templates/NOW-COMPLETED.html b/queue_services/entity-emailer/src/entity_emailer/email_templates/NOW-COMPLETED.html new file mode 100644 index 0000000000..422a5fb811 --- /dev/null +++ b/queue_services/entity-emailer/src/entity_emailer/email_templates/NOW-COMPLETED.html @@ -0,0 +1,62 @@ + + + + + + + + + + Notice of Withdrawal + [[style.html]] + + + + + + + + + + + diff --git a/queue_services/entity-emailer/src/entity_emailer/email_templates/common/continuation-application-details.html b/queue_services/entity-emailer/src/entity_emailer/email_templates/common/continuation-application-details.html index 5cddc6c689..f63ce03b85 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_templates/common/continuation-application-details.html +++ b/queue_services/entity-emailer/src/entity_emailer/email_templates/common/continuation-application-details.html @@ -6,7 +6,7 @@ [[16px.html]]
    -
    Identifying Number in Foreign Jurisdiction:
    +
    Identifying Number in Previous Jurisdiction:
    {{ filing.foreignJurisdiction.identifier }}
    [[16px.html]] diff --git a/queue_services/entity-emailer/src/entity_emailer/email_templates/common/cra-notice.html b/queue_services/entity-emailer/src/entity_emailer/email_templates/common/cra-notice.html index 1a01771548..f8ec597f7b 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_templates/common/cra-notice.html +++ b/queue_services/entity-emailer/src/entity_emailer/email_templates/common/cra-notice.html @@ -7,8 +7,8 @@ {% else %} As part of a provincial–federal partnership, BC Registries and Online Services and Canada Revenue Agency (CRA) have developed an agreement to assign a Business - Number to all companies operating in BC. As a result of this incorporation, a - Business number will be assigned to the company and will be emailed to the + Number to all companies operating in BC. As a result of this {% if filing_type == 'continuationIn' %}continuation, + {% else %}incorporation,{% endif %} a Business number will be assigned to the company and will be emailed to the company’s registered email address. {% endif %}

    diff --git a/queue_services/entity-emailer/src/entity_emailer/email_templates/common/style.html b/queue_services/entity-emailer/src/entity_emailer/email_templates/common/style.html index e435deeb2c..faa26d9eb7 100644 --- a/queue_services/entity-emailer/src/entity_emailer/email_templates/common/style.html +++ b/queue_services/entity-emailer/src/entity_emailer/email_templates/common/style.html @@ -89,4 +89,9 @@ .continuation-application-details .value { line-height: 24px; } + +.now-filing-info-title { + font-weight: 700; + margin-bottom: 4px; +} diff --git a/queue_services/entity-emailer/src/entity_emailer/message_tracker/tracker.py b/queue_services/entity-emailer/src/entity_emailer/message_tracker/tracker.py index 6d76dc475f..2c3b4c4b4f 100644 --- a/queue_services/entity-emailer/src/entity_emailer/message_tracker/tracker.py +++ b/queue_services/entity-emailer/src/entity_emailer/message_tracker/tracker.py @@ -97,7 +97,7 @@ def get_message_context_properties(queue_msg: nats.aio.client.Msg): message_id = f'{etype}_{option}_{ar_year}_{business_id}' return create_message_context_properties(etype, message_id, None, None, False) - if etype in ('agmLocationChange', 'agmExtension') \ + if etype in ('agmLocationChange', 'agmExtension', 'noticeOfWithdrawal') \ and (option := email.get('option', None)) \ and option == 'COMPLETED' \ and (filing_id := email.get('filingId', None)): diff --git a/queue_services/entity-emailer/src/entity_emailer/version.py b/queue_services/entity-emailer/src/entity_emailer/version.py index 7b1a128404..ae7b37c91a 100644 --- a/queue_services/entity-emailer/src/entity_emailer/version.py +++ b/queue_services/entity-emailer/src/entity_emailer/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.134.0' # pylint: disable=invalid-name +__version__ = '2.143.0' # pylint: disable=invalid-name diff --git a/queue_services/entity-emailer/src/entity_emailer/worker.py b/queue_services/entity-emailer/src/entity_emailer/worker.py index 308880d609..e520eedf24 100644 --- a/queue_services/entity-emailer/src/entity_emailer/worker.py +++ b/queue_services/entity-emailer/src/entity_emailer/worker.py @@ -34,8 +34,9 @@ from entity_queue_common.service import QueueServiceManager from entity_queue_common.service_utils import EmailException, QueueException, logger from flask import Flask -from legal_api import db +from legal_api import db # noqa:F401,I001;pylint:disable=unused-import; from legal_api.models import Filing, Furnishing +from legal_api.models.db import init_db from legal_api.services.bootstrap import AccountService from legal_api.services.flags import Flags from sqlalchemy.exc import OperationalError @@ -58,6 +59,7 @@ involuntary_dissolution_stage_1_notification, mras_notification, name_request, + notice_of_withdrawal_notification, nr_notification, registration_notification, restoration_notification, @@ -65,14 +67,14 @@ ) from .message_tracker import tracker as tracker_util - +# noqa:I003 qsm = QueueServiceManager() # pylint: disable=invalid-name flags = Flags() # pylint: disable=invalid-name APP_CONFIG = config.get_named_config(os.getenv('DEPLOYMENT_ENV', 'production')) FLASK_APP = Flask(__name__) FLASK_APP.config.from_object(APP_CONFIG) -db.init_app(FLASK_APP) +init_db(FLASK_APP) if FLASK_APP.config.get('LD_SDK_KEY', None): flags.init_app(FLASK_APP) @@ -215,6 +217,9 @@ def process_email(email_msg: dict, flask_app: Flask): # pylint: disable=too-man elif etype == 'continuationIn': email = continuation_in_notification.process(email_msg['email'], token) send_email(email, token) + elif etype == 'noticeOfWithdrawal' and option == Filing.Status.COMPLETED.value: + email = notice_of_withdrawal_notification.process(email_msg['email'], token) + send_email(email, token) elif etype in filing_notification.FILING_TYPE_CONVERTER.keys(): if etype == 'annualReport' and option == Filing.Status.COMPLETED.value: logger.debug('No email to send for: %s', email_msg) diff --git a/queue_services/entity-emailer/tests/unit/__init__.py b/queue_services/entity-emailer/tests/unit/__init__.py index 0f80379fe6..fb983aa024 100644 --- a/queue_services/entity-emailer/tests/unit/__init__.py +++ b/queue_services/entity-emailer/tests/unit/__init__.py @@ -14,12 +14,12 @@ """The Unit Tests and the helper routines.""" import copy import json -from datetime import datetime +from datetime import datetime, timedelta from random import randrange from unittest.mock import Mock from legal_api.models import Batch, Business, Filing, Furnishing, Party, PartyRole, RegistrationBootstrap, User -from legal_api.models.db import versioning_manager +from legal_api.models.db import VersioningProxy from registry_schemas.example_data import ( AGM_EXTENSION, AGM_LOCATION_CHANGE, @@ -41,6 +41,7 @@ FILING_HEADER, FILING_TEMPLATE, INCORPORATION_FILING_TEMPLATE, + NOTICE_OF_WITHDRAWAL, REGISTRATION, RESTORATION, ) @@ -127,9 +128,8 @@ def prep_incorp_filing(session, identifier, payment_id, option, legal_type=None) filing.payment_completion_date = filing.filing_date filing.save() if option in ['COMPLETED', 'bn']: - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(session()) + filing.transaction_id = transaction_id filing.save() return filing @@ -193,9 +193,8 @@ def prep_registration_filing(session, identifier, payment_id, option, legal_type filing.payment_completion_date = filing.filing_date filing.save() if option in ['COMPLETED']: - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(session()) + filing.transaction_id = transaction_id filing.save() return filing @@ -471,9 +470,8 @@ def prep_maintenance_filing(session, identifier, payment_id, status, filing_type filing.save() if status == 'COMPLETED': - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(session()) + filing.transaction_id = transaction_id filing.save() return filing @@ -493,9 +491,8 @@ def prep_incorporation_correction_filing(session, business, original_filing_id, filing.payment_completion_date = filing.filing_date filing.save() if option in ['COMPLETED']: - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(session()) + filing.transaction_id = transaction_id filing.save() return filing @@ -600,9 +597,8 @@ def prep_cp_special_resolution_correction_filing(session, business, original_fil filing._meta_data = {'correction': {'uploadNewRules': True, 'toLegalName': True}} filing.save() if option in ['COMPLETED']: - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(session()) + filing.transaction_id = transaction_id filing.save() return filing @@ -628,9 +624,8 @@ def prep_cp_special_resolution_correction_upload_memorandum_filing(session, busi filing._meta_data = {'correction': {'uploadNewMemorandum': True}} filing.save() if option in ['COMPLETED']: - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(session()) + filing.transaction_id = transaction_id filing.save() return filing @@ -663,9 +658,8 @@ def prep_amalgamation_filing(session, identifier, payment_id, option, legal_name filing.payment_completion_date = filing.filing_date filing.save() if option in [Filing.Status.COMPLETED.value, 'bn']: - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(session()) + filing.transaction_id = transaction_id filing.save() return filing @@ -691,13 +685,92 @@ def prep_continuation_in_filing(session, identifier, payment_id, option): filing.payment_completion_date = filing.filing_date filing.save() if option in [Filing.Status.COMPLETED.value, 'bn']: - uow = versioning_manager.unit_of_work(session) - transaction = uow.create_transaction(session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(session()) + filing.transaction_id = transaction_id filing.save() return filing +def prep_notice_of_withdraw_filing( + identifier, + payment_id, + legal_type, + legal_name, + business_id, + withdrawn_filing): + """Return a new Notice of Withdrawal filing prepped for email notification.""" + filing_template = copy.deepcopy(FILING_HEADER) + filing_template['filing']['header']['name'] = 'noticeOfWithdrawal' + + filing_template['filing']['noticeOfWithdrawal'] = copy.deepcopy(NOTICE_OF_WITHDRAWAL) + filing_template['filing']['noticeOfWithdrawal']['filingId'] = withdrawn_filing.id + filing_template['filing']['business'] = { + 'identifier': identifier, + 'legalType': legal_type, + 'legalName': legal_name + } + + # create NoW filing + filing = create_filing( + token=payment_id, + filing_json=filing_template, + business_id=business_id, + ) + # populate NoW related properties + filing.withdrawn_filing_id = withdrawn_filing.id + filing.save() + withdrawn_filing.withdrawal_pending = True + withdrawn_filing.save() + + return filing + + +def create_future_effective_filing( + identifier, + legal_type, + legal_name, + filing_type, + filing_json, + is_temp, + business_id=None): + """Create a future effective filing.""" + filing_template = copy.deepcopy(FILING_HEADER) + filing_template['filing']['header']['name'] = filing_type + future_effective_date = EPOCH_DATETIME + timedelta(days=5) + future_effective_date = future_effective_date.isoformat() + + if is_temp: + del filing_template['filing']['business'] + new_business_filing_json = copy.deepcopy(filing_json) + new_business_filing_json['nameRequest']['legalType'] = legal_type + filing_template['filing'][filing_type] = new_business_filing_json + filing_template['filing'][filing_type]['contactPoint']['email'] = 'recipient@email.com' + else: + filing_template['filing']['business']['identifier'] = identifier + filing_template['filing']['business'] = { + 'identifier': identifier, + 'legalType': legal_type, + 'legalName': legal_name + } + fe_filing_json = copy.deepcopy(filing_json) + filing_template['filing'][filing_type] = fe_filing_json + + fe_filing = Filing() + fe_filing.filing_date = EPOCH_DATETIME + fe_filing.filing_json = filing_template + fe_filing.save() + fe_filing.payment_token = '123' + fe_filing.payment_completion_date = EPOCH_DATETIME.isoformat() + if is_temp: + fe_filing.temp_reg = identifier + else: + fe_filing.business_id = business_id + fe_filing.effective_date = future_effective_date + fe_filing.save() + + return fe_filing + + class Obj: """Make a custom object hook used by dict_to_obj.""" diff --git a/queue_services/entity-emailer/tests/unit/email_processors/test_notice_of_withdrawal_notification.py b/queue_services/entity-emailer/tests/unit/email_processors/test_notice_of_withdrawal_notification.py new file mode 100644 index 0000000000..0a9727762e --- /dev/null +++ b/queue_services/entity-emailer/tests/unit/email_processors/test_notice_of_withdrawal_notification.py @@ -0,0 +1,78 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Unit Tests for Notice of Withdrawal email processor.""" +from unittest.mock import patch + +import pytest +from legal_api.models import RegistrationBootstrap +from registry_schemas.example_data import ( + ALTERATION_FILING_TEMPLATE, + AMALGAMATION_APPLICATION, + CHANGE_OF_ADDRESS, + CONTINUATION_IN, + DISSOLUTION, + INCORPORATION, +) + +from entity_emailer.email_processors import notice_of_withdrawal_notification +from tests.unit import create_business, create_future_effective_filing, prep_notice_of_withdraw_filing + + +@pytest.mark.parametrize( + 'status, legal_name, legal_type, withdrawn_filing_type, withdrawn_filing_json, is_temp', [ + ('COMPLETED', 'test business', 'BC', 'incorporationApplication', INCORPORATION, True), + ('COMPLETED', '1234567 B.C. INC.', 'BEN', 'continuationIn', CONTINUATION_IN, True), + ('COMPLETED', 'test business', 'CBEN', 'amalgamationApplication', AMALGAMATION_APPLICATION, True), + ('COMPLETED', 'test business', 'BC', 'changeOfAddress', CHANGE_OF_ADDRESS, False), + ('COMPLETED', '1234567 B.C. INC.', 'BEN', 'alteration', ALTERATION_FILING_TEMPLATE, False), + ('COMPLETED', '1234567 B.C. INC.', 'CBEN', 'dissolution', DISSOLUTION, False) + ] +) +def test_notice_of_withdrawal_notification( + app, session, status, legal_name, legal_type, withdrawn_filing_type, withdrawn_filing_json, is_temp): + """Assert that the notice of withdrawal email processor works as expected.""" + business = None + if is_temp: + identifier = 'Tb31yQIuBw' + temp_reg = RegistrationBootstrap() + temp_reg._identifier = identifier + temp_reg.save() + else: + identifier = 'BC1234567' + business = create_business(identifier, legal_type, legal_name) + + business_id = business.id if business else None + # setup withdrawn filing (FE filing) for NoW + fe_filing = create_future_effective_filing( + identifier, legal_type, legal_name, withdrawn_filing_type, withdrawn_filing_json, is_temp, business_id) + now_filing = prep_notice_of_withdraw_filing(identifier, '1', legal_type, legal_name, business_id, fe_filing) + token = 'token' + + # test NoW email processor + with patch.object(notice_of_withdrawal_notification, '_get_pdfs', return_value=[]) as mock_get_pdfs: + with patch.object(notice_of_withdrawal_notification, 'get_recipient_from_auth', + return_value='recipient@email.com'): + email = notice_of_withdrawal_notification.process( + {'filingId': now_filing.id, 'type': 'noticeOfWithdrawal', 'option': status}, token + ) + + assert email['content']['subject'] == f'{legal_name} - Notice of Withdrawal filed Successfully' + assert 'recipient@email.com' in email['recipients'] + assert email['content']['body'] + assert email['content']['attachments'] == [] + assert mock_get_pdfs.call_args[0][0] == token + assert mock_get_pdfs.call_args[0][1]['identifier'] == identifier + assert mock_get_pdfs.call_args[0][1]['legalName'] == legal_name + assert mock_get_pdfs.call_args[0][1]['legalType'] == legal_type + assert mock_get_pdfs.call_args[0][2] == now_filing diff --git a/queue_services/entity-emailer/tests/unit/test_tracker.py b/queue_services/entity-emailer/tests/unit/test_tracker.py index 88f837d96b..72b7f82241 100644 --- a/queue_services/entity-emailer/tests/unit/test_tracker.py +++ b/queue_services/entity-emailer/tests/unit/test_tracker.py @@ -355,6 +355,14 @@ 'option': 'COMPLETED', 'filingId': '111222335' } + }), + ('noticeOfWithdrawal_COMPLETED_111222335', + { + 'email': { + 'type': 'noticeOfWithdrawal', + 'option': 'COMPLETED', + 'filingId': '111222335' + } }) ] ) diff --git a/queue_services/entity-emailer/tracker/config.py b/queue_services/entity-emailer/tracker/config.py index 744a9b44d4..35cb0b88ab 100644 --- a/queue_services/entity-emailer/tracker/config.py +++ b/queue_services/entity-emailer/tracker/config.py @@ -58,7 +58,9 @@ class _Config(): # pylint: disable=too-few-public-methods """ # PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) - + + # used to identify versioning flag + SERVICE_NAME = 'emailer' SQLALCHEMY_TRACK_MODIFICATIONS = False # POSTGRESQL diff --git a/queue_services/entity-filer/flags.json b/queue_services/entity-filer/flags.json index 75a249ff46..7929957766 100644 --- a/queue_services/entity-filer/flags.json +++ b/queue_services/entity-filer/flags.json @@ -1,6 +1,16 @@ { "flagValues": { "enable-involuntary-dissolution": true, - "namex-nro-decommissioned": true + "namex-nro-decommissioned": true, + "db-versioning": { + "legal-api": true, + "emailer": false, + "filer": true, + "entity-bn": false, + "digital-credentials": false, + "dissolutions-job": false, + "furnishings-job": false, + "emailer-reminder-job": false + } } } diff --git a/queue_services/entity-filer/requirements.txt b/queue_services/entity-filer/requirements.txt index a7783e6cbd..6c82924616 100755 --- a/queue_services/entity-filer/requirements.txt +++ b/queue_services/entity-filer/requirements.txt @@ -24,7 +24,7 @@ minio==7.0.2 PyPDF2==1.26.0 reportlab==3.6.12 git+https://github.com/bcgov/sbc-connect-common.git#egg=gcp-queue&subdirectory=python/gcp-queue -git+https://github.com/bcgov/business-schemas.git@2.18.27#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.37#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=entity_queue_common&subdirectory=queue_services/common git+https://github.com/bcgov/lear.git#egg=legal_api&subdirectory=legal-api git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning diff --git a/queue_services/entity-filer/requirements/bcregistry-libraries.txt b/queue_services/entity-filer/requirements/bcregistry-libraries.txt index bcb1f1ba4a..fb41e35576 100644 --- a/queue_services/entity-filer/requirements/bcregistry-libraries.txt +++ b/queue_services/entity-filer/requirements/bcregistry-libraries.txt @@ -1,5 +1,5 @@ git+https://github.com/bcgov/sbc-connect-common.git#egg=gcp-queue&subdirectory=python/gcp-queue -git+https://github.com/bcgov/business-schemas.git@2.18.27#egg=registry_schemas +git+https://github.com/bcgov/business-schemas.git@2.18.37#egg=registry_schemas git+https://github.com/bcgov/lear.git#egg=legal_api&subdirectory=legal-api git+https://github.com/bcgov/lear.git#egg=sql-versioning&subdirectory=python/common/sql-versioning git+https://github.com/bcgov/lear.git#egg=entity_queue_common&subdirectory=queue_services/common diff --git a/queue_services/entity-filer/src/entity_filer/config.py b/queue_services/entity-filer/src/entity_filer/config.py index bf3d4dd7a7..ab04e09429 100644 --- a/queue_services/entity-filer/src/entity_filer/config.py +++ b/queue_services/entity-filer/src/entity_filer/config.py @@ -58,6 +58,7 @@ class _Config(): # pylint: disable=too-few-public-methods Used as the base for all the other configurations. """ + SERVICE_NAME = 'filer' PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__)) PAYMENT_SVC_URL = os.getenv('PAYMENT_SVC_URL', '') diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/appoint_receiver.py b/queue_services/entity-filer/src/entity_filer/filing_processors/appoint_receiver.py new file mode 100644 index 0000000000..a40a55023c --- /dev/null +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/appoint_receiver.py @@ -0,0 +1,31 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""File processing rules and actions for the appoint receiver.""" +from typing import Dict + +from legal_api.models import Business, Filing + +from entity_filer.filing_meta import FilingMeta +from entity_filer.filing_processors.filing_components.parties import update_parties + + +def process(business: Business, filing: Dict, filing_rec: Filing, filing_meta: FilingMeta): + # pylint: disable=too-many-branches; + """Render the appoint_receiver onto the business model objects.""" + appoint_receiver_filing = filing.get('appointReceiver') + if not appoint_receiver_filing.get('parties'): + return + + if parties := appoint_receiver_filing.get('parties'): + update_parties(business, parties, filing_rec, False) diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/consent_continuation_out.py b/queue_services/entity-filer/src/entity_filer/filing_processors/consent_continuation_out.py index 8165b465c1..529cad4b04 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/consent_continuation_out.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/consent_continuation_out.py @@ -33,7 +33,7 @@ def process(business: Business, cco_filing: Filing, filing: Dict, filing_meta: F foreign_jurisdiction = filing['consentContinuationOut']['foreignJurisdiction'] consent_continuation_out = ConsentContinuationOut() - + consent_continuation_out.consent_type = ConsentContinuationOut.ConsentTypes.continuation_out country = foreign_jurisdiction.get('country').upper() consent_continuation_out.foreign_jurisdiction = country @@ -48,11 +48,11 @@ def process(business: Business, cco_filing: Filing, filing: Dict, filing_meta: F consent_continuation_out.business_id = business.id business.consent_continuation_outs.append(consent_continuation_out) - filing_meta.consent_continuation_out = {} - filing_meta.consent_continuation_out = {**filing_meta.consent_continuation_out, - **{'country': country, - 'region': region, - 'expiry': expiry_date.isoformat()}} + filing_meta.consent_continuation_out = { + 'country': country, + 'region': region, + 'expiry': expiry_date.isoformat() + } def get_expiry_date(filing: Filing): diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_in.py b/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_in.py index 393c30db69..c803536d1a 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_in.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_in.py @@ -17,6 +17,7 @@ from entity_queue_common.service_utils import QueueException from legal_api.models import Business, Document, DocumentType, Filing, Jurisdiction +from legal_api.services import DocumentRecordService from legal_api.utils.legislation_datetime import LegislationDatetime from entity_filer.filing_meta import FilingMeta @@ -160,4 +161,12 @@ def process(business: Business, # pylint: disable=too-many-branches,too-many-lo filing_json['filing']['business']['legalType'] = business.legal_type filing_json['filing']['business']['foundingDate'] = business.founding_date.isoformat() filing_rec._filing_json = filing_json # pylint: disable=protected-access; bypass to update filing data + # Get a file key from continuation in object. + files = continuation_in.get('authorization', {}).get('files', []) + if not len(files): + raise QueueException( + f'continuationIn {filing_rec.id}, Unable to update business identifier on Document Record Service.' + ) + # Update business identifier on Document Record Service + DocumentRecordService.update_business_identifier(business.identifier, files[0].get('fileKey')) return business, filing_rec, filing_meta diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_out.py b/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_out.py index c3b444c84b..55a46b16b4 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_out.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/continuation_out.py @@ -40,7 +40,6 @@ def process(business: Business, continuation_out_filing: Filing, filing: Dict, f business.state = Business.State.HISTORICAL business.state_filing_id = continuation_out_filing.id - business.dissolution_date = continuation_out_date business.jurisdiction = foreign_jurisdiction_country business.foreign_legal_name = legal_name diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/dissolution.py b/queue_services/entity-filer/src/entity_filer/filing_processors/dissolution.py index 0ff7e58eeb..82fa390739 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/dissolution.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/dissolution.py @@ -21,16 +21,20 @@ from legal_api.models import BatchProcessing, Business, Document, Filing, db from legal_api.models.document import DocumentType from legal_api.services.filings.validations.dissolution import DissolutionTypes +from legal_api.services import Flags +from legal_api.services.document_record import DocumentRecordService from legal_api.services.minio import MinioService from legal_api.services.pdf_service import RegistrarStampData from legal_api.utils.datetime import datetime from legal_api.utils.legislation_datetime import LegislationDatetime +from legal_api.constants import DocumentClasses from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors.filing_components import create_office, filings from entity_filer.filing_processors.filing_components.parties import update_parties from entity_filer.utils import replace_file_with_certified_copy +flags = Flags() # pylint: disable=invalid-name # pylint: disable=too-many-locals def process(business: Business, filing: Dict, filing_rec: Filing, filing_meta: FilingMeta, flag_on: bool = False): @@ -117,9 +121,21 @@ def _update_cooperative(dissolution_filing: Dict, business: Business, filing: Fi # create certified copy for affidavit document affidavit_file_key = dissolution_filing.get('affidavitFileKey') - affidavit_file = MinioService.get_file(affidavit_file_key) + if flags.is_on('enable-document-records'): + affidavit_file = DocumentRecordService.download_document( + DocumentClasses.COOP.value, + affidavit_file_key + ) + else: + affidavit_file = MinioService.get_file(affidavit_file_key) + registrar_stamp_data = RegistrarStampData(filing.effective_date, business.identifier) - replace_file_with_certified_copy(affidavit_file.data, affidavit_file_key, registrar_stamp_data) + replace_file_with_certified_copy( + affidavit_file.data, + affidavit_file_key, + registrar_stamp_data, + affidavit_file.name + ) document = Document() document.type = DocumentType.AFFIDAVIT.value diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/__init__.py b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/__init__.py index bb1fa8fccc..ff6a924bb4 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/__init__.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/__init__.py @@ -38,6 +38,7 @@ 'proprietor': PartyRole.RoleTypes.PROPRIETOR.value, 'partner': PartyRole.RoleTypes.PARTNER.value, 'applicant': PartyRole.RoleTypes.APPLICANT.value, + 'receiver': PartyRole.RoleTypes.RECEIVER.value, } diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/business_profile.py b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/business_profile.py index f56910cf7d..c4547c1a57 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/business_profile.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/business_profile.py @@ -78,7 +78,7 @@ def _update_business_profile(business: Business, profile_info: Dict) -> Dict: if rv.status_code == HTTPStatus.BAD_REQUEST and \ 'DATA_ALREADY_EXISTS' in rv.text: put = requests.put( - url=''.join([account_svc_entity_url, '/', business.identifier]), + url=''.join([account_svc_entity_url, '/', business.identifier, '/contacts']), headers={**AccountService.CONTENT_TYPE_JSON, 'Authorization': AccountService.BEARER + token}, data=data, @@ -149,7 +149,7 @@ def update_affiliation(business: Business, filing: Filing): def update_entity(business: Business, filing_type: str): """Update an entity in auth with the latest change.""" state = None - if filing_type in ['dissolution', 'putBackOn', 'restoration']: + if filing_type in ['dissolution', 'putBackOn', 'putBackOff', 'restoration']: state = business.state.name # state changed to HISTORICAL/ACTIVE AccountService.update_entity( diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/correction.py b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/correction.py index 8fbe8de684..b2092a750e 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/correction.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/correction.py @@ -159,6 +159,8 @@ def correct_business_data(business: Business, # pylint: disable=too-many-locals def update_parties(business: Business, parties: list, correction_filing_rec: Filing): """Create a new party or get them if they already exist.""" # Cease the party roles not present in the edit request + if parties is None: + return end_date_time = datetime.datetime.utcnow() parties_to_update = [party.get('officer').get('id') for party in parties if party.get('officer').get('id') is not None] diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/rules_and_memorandum.py b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/rules_and_memorandum.py index 3ae0cc52c1..6a0a3eeebb 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/rules_and_memorandum.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/filing_components/rules_and_memorandum.py @@ -19,11 +19,15 @@ from legal_api.models import Business, Document, Filing from legal_api.models.document import DocumentType +from legal_api.services import Flags from legal_api.services.minio import MinioService +from legal_api.services.document_record import DocumentRecordService +from legal_api.constants import DocumentClasses from legal_api.services.pdf_service import RegistrarStampData from entity_filer.utils import replace_file_with_certified_copy +flags = Flags() # pylint: disable=invalid-name def update_rules( business: Business, @@ -40,9 +44,22 @@ def update_rules( return None is_correction = filing.filing_type == 'correction' - rules_file = MinioService.get_file(rules_file_key) + + if not flags.is_on('enable-document-records'): + rules_file = DocumentRecordService.download_document( + DocumentClasses.COOP.value, + rules_file_key + ) + else: + rules_file = MinioService.get_file(rules_file_key) + registrar_stamp_data = RegistrarStampData(filing.effective_date, business.identifier, file_name, is_correction) - replace_file_with_certified_copy(rules_file.data, rules_file_key, registrar_stamp_data) + replace_file_with_certified_copy( + rules_file.data, + rules_file_key, + registrar_stamp_data, + rules_file.name + ) document = Document() document.type = DocumentType.COOP_RULES.value @@ -70,10 +87,20 @@ def update_memorandum( is_correction = filing.filing_type == 'correction' # create certified copy for memorandum document - memorandum_file = MinioService.get_file(memorandum_file_key) + if flags.is_on('enable-document-records'): + memorandum_file = DocumentRecordService.download_document( + DocumentClasses.COOP.value, + memorandum_file_key + ) + else: + memorandum_file = MinioService.get_file(memorandum_file_key) registrar_stamp_data = RegistrarStampData(filing.effective_date, business.identifier, file_name, is_correction) - replace_file_with_certified_copy(memorandum_file.data, memorandum_file_key, registrar_stamp_data) - + replace_file_with_certified_copy( + memorandum_file.data, + memorandum_file_key, + registrar_stamp_data, + memorandum_file.name + ) document = Document() document.type = DocumentType.COOP_MEMORANDUM.value document.file_key = memorandum_file_key diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/incorporation_filing.py b/queue_services/entity-filer/src/entity_filer/filing_processors/incorporation_filing.py index 348ff0cadd..ae17fade50 100644 --- a/queue_services/entity-filer/src/entity_filer/filing_processors/incorporation_filing.py +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/incorporation_filing.py @@ -19,7 +19,10 @@ from legal_api.models import Business, Document, Filing from legal_api.models.document import DocumentType from legal_api.services.minio import MinioService +from legal_api.services import Flags from legal_api.services.pdf_service import RegistrarStampData +from legal_api.services.document_record import DocumentRecordService +from legal_api.constants import DocumentClasses from entity_filer.filing_meta import FilingMeta from entity_filer.filing_processors.filing_components import aliases, business_info, filings, shares @@ -27,15 +30,27 @@ from entity_filer.filing_processors.filing_components.parties import update_parties from entity_filer.utils import replace_file_with_certified_copy +flags = Flags() # pylint: disable=invalid-name def _update_cooperative(incorp_filing: Dict, business: Business, filing: Filing): cooperative_obj = incorp_filing.get('cooperative', None) if cooperative_obj: # create certified copy for rules document rules_file_key = cooperative_obj.get('rulesFileKey') - rules_file = MinioService.get_file(rules_file_key) + if flags.is_on('enable-document-records'): + rules_file = DocumentRecordService.download_document( + DocumentClasses.COOP.value, + rules_file_key + ) + else: + rules_file = MinioService.get_file(rules_file_key) registrar_stamp_data = RegistrarStampData(business.founding_date, business.identifier) - replace_file_with_certified_copy(rules_file.data, rules_file_key, registrar_stamp_data) + replace_file_with_certified_copy( + rules_file.data, + rules_file_key, + registrar_stamp_data, + rules_file.name + ) business.association_type = cooperative_obj.get('cooperativeAssociationType') document = Document() @@ -47,9 +62,20 @@ def _update_cooperative(incorp_filing: Dict, business: Business, filing: Filing) # create certified copy for memorandum document memorandum_file_key = cooperative_obj.get('memorandumFileKey') - memorandum_file = MinioService.get_file(memorandum_file_key) + if flags.is_on('enable-document-records'): + memorandum_file = DocumentRecordService.download_document( + DocumentClasses.COOP.value, + memorandum_file_key + ) + else: + memorandum_file = MinioService.get_file(memorandum_file_key) registrar_stamp_data = RegistrarStampData(business.founding_date, business.identifier) - replace_file_with_certified_copy(memorandum_file.data, memorandum_file_key, registrar_stamp_data) + replace_file_with_certified_copy( + memorandum_file.data, + memorandum_file_key, + registrar_stamp_data, + memorandum_file.name + ) document = Document() document.type = DocumentType.COOP_MEMORANDUM.value diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/notice_of_withdrawal.py b/queue_services/entity-filer/src/entity_filer/filing_processors/notice_of_withdrawal.py new file mode 100644 index 0000000000..682a48bc3a --- /dev/null +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/notice_of_withdrawal.py @@ -0,0 +1,45 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""File processing rules and actions for the Notice of Withdrawal filing.""" +from typing import Dict + +from entity_queue_common.service_utils import logger +from legal_api.models import Filing + +from entity_filer.filing_meta import FilingMeta +from entity_filer.filing_processors.filing_components import filings + + +def process( + filing_submission: Filing, + filing: Dict, + filing_meta: FilingMeta +): # pylint: disable=W0613, R0914 + """Render the notice_of_withdrawal onto the model objects.""" + now_filing = filing.get('noticeOfWithdrawal') + logger.debug('start notice_of_withdrawal filing process, noticeOfWithdrawal: %s', now_filing) + + if court_order := now_filing.get('courtOrder'): + filings.update_filing_court_order(filing_submission, court_order) + + withdrawn_filing_id = now_filing.get('filingId') + withdrawn_filing = Filing.find_by_id(withdrawn_filing_id) + logger.debug('withdrawn_filing_id: %s', withdrawn_filing.id) + + withdrawn_filing._status = Filing.Status.WITHDRAWN.value # pylint: disable=protected-access + withdrawn_filing.withdrawal_pending = False + withdrawn_filing_meta_data = withdrawn_filing.meta_data if withdrawn_filing.meta_data else {} + withdrawn_filing._meta_data = {**withdrawn_filing_meta_data, # pylint: disable=protected-access + 'withdrawnDate': f'{filing_submission.effective_date.isoformat()}'} + withdrawn_filing.save_to_session() diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/put_back_off.py b/queue_services/entity-filer/src/entity_filer/filing_processors/put_back_off.py new file mode 100644 index 0000000000..e81528024a --- /dev/null +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/put_back_off.py @@ -0,0 +1,55 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""File processing rules and actions for the put back off filing.""" + +from contextlib import suppress +from typing import Dict + +import dpath +from entity_queue_common.service_utils import QueueException, logger +from legal_api.models import Business, Filing +from legal_api.utils.legislation_datetime import LegislationDatetime + +from entity_filer.filing_meta import FilingMeta +from entity_filer.filing_processors.filing_components import filings + + +def process(business: Business, filing: Dict, filing_rec: Filing, filing_meta: FilingMeta): + """Render the put back off filing unto the model objects.""" + if not (put_back_off_filing := filing.get('putBackOff')): + logger.error('Could not find putBackOff in: %s', filing) + raise QueueException(f'legal_filing:putBackOff missing from {filing}') + + logger.debug('processing putBackOff: %s', filing) + + filing_meta.put_back_off = {} + + # update court order, if any is present + with suppress(IndexError, KeyError, TypeError): + court_order_json = dpath.util.get(put_back_off_filing, '/courtOrder') + filings.update_filing_court_order(filing_rec, court_order_json) + + filing_rec.order_details = put_back_off_filing.get('details') + + if business.restoration_expiry_date: + filing_meta.put_back_off = { + **filing_meta.put_back_off, + 'reason': 'Limited Restoration Expired', + 'expiryDate': LegislationDatetime.format_as_legislation_date(business.restoration_expiry_date) + } + + # change business state to historical + business.state = Business.State.HISTORICAL + business.state_filing_id = filing_rec.id + business.restoration_expiry_date = None diff --git a/queue_services/entity-filer/src/entity_filer/filing_processors/transparency_register.py b/queue_services/entity-filer/src/entity_filer/filing_processors/transparency_register.py new file mode 100644 index 0000000000..a0592e53a8 --- /dev/null +++ b/queue_services/entity-filer/src/entity_filer/filing_processors/transparency_register.py @@ -0,0 +1,52 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the BSD 3 Clause License, (the "License"); +# you may not use this file except in compliance with the License. +# The template for the license can be found here +# https://opensource.org/license/bsd-3-clause/ +# +# Redistribution and use in source and binary forms, +# with or without modification, are permitted provided that the +# following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3. Neither the name of the copyright holder nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +"""File processing rules and actions for the transparency register filing.""" +from entity_queue_common.service_utils import QueueException +from legal_api.models import Business, Filing + + +def process(business: Business, filing_rec: Filing, filing: dict): + """Process the incoming transparency register filing.""" + if not (tr_filing := filing.get('transparencyRegister')): # pylint: disable=superfluous-parens; + raise QueueException(f'legal_filing:transparencyRegister data missing from {filing_rec.id}') + if not (sub_type := tr_filing.get('type')): # pylint: disable=superfluous-parens; + raise QueueException(f'legal_filing:transparencyRegister data missing from {filing_rec.id}') + if not business: + raise QueueException(f'Business does not exist: legal_filing:transparencyRegister {filing_rec.id}') + + if sub_type == 'annual': + # set the last_tr_year for the business + business.last_tr_year = filing_rec.effective_date.year + + return business, filing_rec diff --git a/queue_services/entity-filer/src/entity_filer/utils.py b/queue_services/entity-filer/src/entity_filer/utils.py index aae19f9c73..4413fa8d3a 100644 --- a/queue_services/entity-filer/src/entity_filer/utils.py +++ b/queue_services/entity-filer/src/entity_filer/utils.py @@ -19,12 +19,14 @@ import os import PyPDF2 -from legal_api.services import PdfService +from legal_api.services import PdfService, Flags from legal_api.services.minio import MinioService +from legal_api.services.document_record import DocumentRecordService from legal_api.services.pdf_service import RegistrarStampData from entity_filer.version import __version__ +flags = Flags() # pylint: disable=invalid-name def _get_build_openshift_commit_hash(): return os.getenv('OPENSHIFT_BUILD_COMMIT', None) @@ -38,7 +40,7 @@ def get_run_version(): return __version__ -def replace_file_with_certified_copy(_bytes: bytes, key: str, data: RegistrarStampData): +def replace_file_with_certified_copy(_bytes: bytes, key: str, data: RegistrarStampData, file_name: str): """Create a certified copy and replace it into Minio server.""" open_pdf_file = io.BytesIO(_bytes) pdf_reader = PyPDF2.PdfFileReader(open_pdf_file) @@ -50,5 +52,7 @@ def replace_file_with_certified_copy(_bytes: bytes, key: str, data: RegistrarSta pdf_service = PdfService() registrars_stamp = pdf_service.create_registrars_stamp(data) certified_copy = pdf_service.stamp_pdf(output_original_pdf, registrars_stamp, only_first_page=True) - - MinioService.put_file(key, certified_copy, certified_copy.getbuffer().nbytes) + if(flags.is_on('enable-document-records')): + DocumentRecordService.update_document(certified_copy, key, file_name) + else: + MinioService.put_file(key, certified_copy, certified_copy.getbuffer().nbytes) diff --git a/queue_services/entity-filer/src/entity_filer/version.py b/queue_services/entity-filer/src/entity_filer/version.py index 6a26e494ec..248e5567af 100644 --- a/queue_services/entity-filer/src/entity_filer/version.py +++ b/queue_services/entity-filer/src/entity_filer/version.py @@ -22,4 +22,4 @@ Development release segment: .devN """ -__version__ = '2.135.0' # pylint: disable=invalid-name +__version__ = '2.144.0' # pylint: disable=invalid-name diff --git a/queue_services/entity-filer/src/entity_filer/worker.py b/queue_services/entity-filer/src/entity_filer/worker.py index 5e331629ea..1710f4b2d9 100644 --- a/queue_services/entity-filer/src/entity_filer/worker.py +++ b/queue_services/entity-filer/src/entity_filer/worker.py @@ -36,10 +36,9 @@ from entity_queue_common.service_utils import FilingException, QueueException, logger from flask import Flask from gcp_queue import GcpQueue, SimpleCloudEvent, to_queue_message -from legal_api import db from legal_api.core import Filing as FilingCore -from legal_api.models import Business, Filing -from legal_api.models.db import init_db, versioning_manager +from legal_api.models import Business, Filing, db +from legal_api.models.db import VersioningProxy, init_db from legal_api.services import Flags from legal_api.utils.datetime import datetime, timezone from sentry_sdk import capture_message @@ -54,6 +53,7 @@ alteration, amalgamation_application, annual_report, + appoint_receiver, change_of_address, change_of_directors, change_of_name, @@ -66,6 +66,8 @@ court_order, dissolution, incorporation_filing, + notice_of_withdrawal, + put_back_off, put_back_on, registrars_notation, registrars_order, @@ -73,6 +75,7 @@ restoration, special_resolution, transition, + transparency_register, ) from entity_filer.filing_processors.filing_components import business_profile, name_request @@ -106,6 +109,13 @@ def get_filing_types(legal_filings: dict): async def publish_event(business: Business, filing: Filing): """Publish the filing message onto the NATS filing subject.""" + temp_reg = filing.temp_reg + if filing.filing_type == FilingCore.FilingTypes.NOTICEOFWITHDRAWAL and filing.withdrawn_filing: + logger.debug('publish_event - notice of withdrawal filing: %s, withdrawan_filing: %s', + filing, filing.withdrawn_filing) + temp_reg = filing.withdrawn_filing.temp_reg + business_identifier = business.identifier if business else temp_reg + try: payload = { 'specversion': '1.x-wip', @@ -113,25 +123,25 @@ async def publish_event(business: Business, filing: Filing): 'source': ''.join([ APP_CONFIG.LEGAL_API_URL, '/business/', - business.identifier, + business_identifier, '/filing/', str(filing.id)]), 'id': str(uuid.uuid4()), 'time': datetime.utcnow().isoformat(), 'datacontenttype': 'application/json', - 'identifier': business.identifier, + 'identifier': business_identifier, 'data': { 'filing': { 'header': {'filingId': filing.id, 'effectiveDate': filing.effective_date.isoformat() }, - 'business': {'identifier': business.identifier}, + 'business': {'identifier': business_identifier}, 'legalFilings': get_filing_types(filing.filing_json) } } } - if filing.temp_reg: - payload['tempidentifier'] = filing.temp_reg + if temp_reg: + payload['tempidentifier'] = temp_reg subject = APP_CONFIG.ENTITY_EVENT_PUBLISH_OPTIONS['subject'] await qsm.service.publish(subject, payload) @@ -142,6 +152,13 @@ async def publish_event(business: Business, filing: Filing): def publish_gcp_queue_event(business: Business, filing: Filing): """Publish the filing message onto the GCP-QUEUE filing subject.""" + temp_reg = filing.temp_reg + if filing.filing_type == FilingCore.FilingTypes.NOTICEOFWITHDRAWAL and filing.withdrawn_filing: + logger.debug('publish_event - notice of withdrawal filing: %s, withdrawan_filing: %s', + filing, filing.withdrawn_filing) + temp_reg = filing.withdrawn_filing.temp_reg + business_identifier = business.identifier if business else temp_reg + try: subject = APP_CONFIG.BUSINESS_EVENTS_TOPIC data = { @@ -150,20 +167,20 @@ def publish_gcp_queue_event(business: Business, filing: Filing): 'filingId': filing.id, 'effectiveDate': filing.effective_date.isoformat() }, - 'business': {'identifier': business.identifier}, + 'business': {'identifier': business_identifier}, 'legalFilings': get_filing_types(filing.filing_json) }, - 'identifier': business.identifier + 'identifier': business_identifier } - if filing.temp_reg: - data['tempidentifier'] = filing.temp_reg + if temp_reg: + data['tempidentifier'] = temp_reg ce = SimpleCloudEvent( id=str(uuid.uuid4()), source=''.join([ APP_CONFIG.LEGAL_API_URL, '/business/', - business.identifier, + business_identifier, '/filing/', str(filing.id)]), subject=subject, @@ -198,7 +215,8 @@ async def publish_mras_email(filing: Filing): ) -async def process_filing(filing_msg: Dict, flask_app: Flask): # pylint: disable=too-many-branches,too-many-statements +async def process_filing(filing_msg: Dict, # pylint: disable=too-many-branches,too-many-statements,too-many-locals + flask_app: Flask): """Render the filings contained in the submission. Start the migration to using core/Filing @@ -215,17 +233,20 @@ async def process_filing(filing_msg: Dict, flask_app: Flask): # pylint: disable filing_submission = filing_core_submission.storage - if filing_core_submission.status == Filing.Status.COMPLETED: + if filing_core_submission.status in [Filing.Status.COMPLETED, Filing.Status.WITHDRAWN]: logger.warning('QueueFiler: Attempting to reprocess business.id=%s, filing.id=%s filing=%s', filing_submission.business_id, filing_submission.id, filing_msg) return None, None + if filing_submission.withdrawal_pending: + logger.warning('QueueFiler: NoW pending for this filing business.id=%s, filing.id=%s filing=%s', + filing_submission.business_id, filing_submission.id, filing_msg) + raise QueueException # convenience flag to set that the envelope is a correction is_correction = filing_core_submission.filing_type == FilingCore.FilingTypes.CORRECTION if legal_filings := filing_core_submission.legal_filings(): - uow = versioning_manager.unit_of_work(db.session) - transaction = uow.create_transaction(db.session) + VersioningProxy.get_transaction_id(db.session()) business = Business.find_by_internal_id(filing_submission.business_id) @@ -235,7 +256,7 @@ async def process_filing(filing_msg: Dict, flask_app: Flask): # pylint: disable for item in sublist]) if is_correction: filing_meta.correction = {} - + for filing in legal_filings: if filing.get('alteration'): alteration.process(business, filing_submission, filing, filing_meta, is_correction) @@ -296,6 +317,9 @@ async def process_filing(filing_msg: Dict, flask_app: Flask): # pylint: disable elif filing.get('changeOfRegistration'): change_of_registration.process(business, filing_submission, filing, filing_meta) + elif filing.get('putBackOff'): + put_back_off.process(business, filing, filing_submission, filing_meta) + elif filing.get('putBackOn'): put_back_on.process(business, filing, filing_submission, filing_meta) @@ -317,6 +341,9 @@ async def process_filing(filing_msg: Dict, flask_app: Flask): # pylint: disable elif filing.get('agmExtension'): agm_extension.process(filing, filing_meta) + elif filing.get('noticeOfWithdrawal'): + notice_of_withdrawal.process(filing_submission, filing, filing_meta) + elif filing.get('amalgamationApplication'): business, filing_submission, filing_meta = amalgamation_application.process( business, @@ -331,19 +358,29 @@ async def process_filing(filing_msg: Dict, flask_app: Flask): # pylint: disable filing_submission, filing_meta) + elif filing.get('transparencyRegister'): + transparency_register.process(business, filing_submission, filing_core_submission.json) + + elif filing.get('appointReceiver'): + appoint_receiver.process(business, filing, filing_submission, filing_meta) + if filing.get('specialResolution'): special_resolution.process(business, filing, filing_submission) - filing_submission.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(db.session()) + filing_submission.transaction_id = transaction_id - business_type = business.legal_type if business else filing_submission['business']['legal_type'] + business_type = business.legal_type if business \ + else filing_submission.filing_json.get('filing', {}).get('business', {}).get('legalType') filing_submission.set_processed(business_type) + if business: + business.last_modified = filing_submission.completion_date + db.session.add(business) filing_submission._meta_data = json.loads( # pylint: disable=W0212 json.dumps(filing_meta.asjson, default=json_serial) ) - db.session.add(business) db.session.add(filing_submission) db.session.commit() @@ -375,6 +412,7 @@ async def process_filing(filing_msg: Dict, flask_app: Flask): # pylint: disable FilingCore.FilingTypes.CHANGEOFREGISTRATION, FilingCore.FilingTypes.CORRECTION, FilingCore.FilingTypes.DISSOLUTION, + FilingCore.FilingTypes.PUTBACKOFF, FilingCore.FilingTypes.PUTBACKON, FilingCore.FilingTypes.RESTORATION ]: @@ -394,16 +432,20 @@ async def process_filing(filing_msg: Dict, flask_app: Flask): # pylint: disable if filing_type != FilingCore.FilingTypes.CHANGEOFNAME: business_profile.update_business_profile(business, filing_submission, filing_type) - try: - await publish_email_message( - qsm, APP_CONFIG.EMAIL_PUBLISH_OPTIONS['subject'], filing_submission, filing_submission.status) - except Exception as err: # pylint: disable=broad-except, unused-variable # noqa F841; - # mark any failure for human review - capture_message( - f'Queue Error: Failed to place email for filing:{filing_submission.id}' - f'on Queue with error:{err}', - level='error' - ) + # This will be True only in the case where filing is filed by Jupyter notebook for BEN corrections + is_system_filed_correction = is_correction and is_system_filed_filing(filing_submission) + + if not is_system_filed_correction: + try: + await publish_email_message( + qsm, APP_CONFIG.EMAIL_PUBLISH_OPTIONS['subject'], filing_submission, filing_submission.status) + except Exception as err: # pylint: disable=broad-except, unused-variable # noqa F841; + # mark any failure for human review + capture_message( + f'Queue Error: Failed to place email for filing:{filing_submission.id}' + f'on Queue with error:{err}', + level='error' + ) try: await publish_event(business, filing_submission) @@ -428,11 +470,22 @@ async def process_filing(filing_msg: Dict, flask_app: Flask): # pylint: disable ) +def is_system_filed_filing(filing_submission) -> bool: + """Check if filing is filed by system. + + Filing filed using Jupyter Notebook will have 'certified_by' field = 'system'. + + """ + certified_by = filing_submission.json['filing']['header']['certifiedBy'] + return certified_by == 'system' if certified_by else False + + async def cb_subscription_handler(msg: nats.aio.client.Msg): +# async def cb_subscription_handler(msg): """Use Callback to process Queue Msg objects.""" try: - logger.info('Received raw message seq:%s, data= %s', msg.sequence, msg.data.decode()) - filing_msg = json.loads(msg.data.decode('utf-8')) + # logger.info('Received raw message seq:%s, data= %s', msg.sequence, msg.data.decode()) + filing_msg = msg logger.debug('Extracted filing msg: %s', filing_msg) await process_filing(filing_msg, FLASK_APP) except OperationalError as err: @@ -443,7 +496,13 @@ async def cb_subscription_handler(msg: nats.aio.client.Msg): '\n\nThis message has been put back on the queue for reprocessing.', json.dumps(filing_msg), exc_info=True) raise err # we don't want to handle the error, so that the message gets put back on the queue - except (QueueException, Exception): # pylint: disable=broad-except + except (QueueException, Exception) as err: # pylint: disable=broad-except # Catch Exception so that any error is still caught and the message is removed from the queue capture_message('Queue Error:' + json.dumps(filing_msg), level='error') logger.error('Queue Error: %s', json.dumps(filing_msg), exc_info=True) + +# import asyncio +# # //171009 +# if __name__ == '__main__': + +# asyncio.run(cb_subscription_handler({'filing': {'id': '172702'}})) \ No newline at end of file diff --git a/queue_services/entity-filer/tests/unit/__init__.py b/queue_services/entity-filer/tests/unit/__init__.py index 11c7608453..e01e4739e0 100644 --- a/queue_services/entity-filer/tests/unit/__init__.py +++ b/queue_services/entity-filer/tests/unit/__init__.py @@ -22,7 +22,7 @@ from legal_api.models import Batch, BatchProcessing, Filing, Resolution, ShareClass, ShareSeries, db from legal_api.models.colin_event_id import ColinEventId -from legal_api.models.db import versioning_manager +from legal_api.models.db import VersioningProxy from legal_api.utils.datetime import datetime, timezone from tests import EPOCH_DATETIME, FROZEN_DATETIME @@ -589,9 +589,8 @@ def factory_completed_filing(business, data_dict, filing_date=FROZEN_DATETIME, p filing.filing_json = data_dict filing.save() - uow = versioning_manager.unit_of_work(db.session) - transaction = uow.create_transaction(db.session) - filing.transaction_id = transaction.id + transaction_id = VersioningProxy.get_transaction_id(db.session()) + filing.transaction_id = transaction_id filing.payment_token = payment_token filing.effective_date = filing_date filing.payment_completion_date = filing_date diff --git a/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_business_profile.py b/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_business_profile.py index 9b613c4571..04080471f8 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_business_profile.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_business_profile.py @@ -60,7 +60,7 @@ def test_update_business_profile(app, session, requests_mock, requests_mock.post(f'{current_app.config["ACCOUNT_SVC_ENTITY_URL"]}/{business.identifier}/contacts', json=response_json, status_code=response_status) - requests_mock.put(f'{current_app.config["ACCOUNT_SVC_ENTITY_URL"]}/{business.identifier}', + requests_mock.put(f'{current_app.config["ACCOUNT_SVC_ENTITY_URL"]}/{business.identifier}/contacts', status_code=put_status) # test diff --git a/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_shares.py b/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_shares.py index 584ee2f13a..01a58c4622 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_shares.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/filing_components/test_shares.py @@ -14,6 +14,7 @@ """The Unit Tests for the business filing component processors.""" import pytest from legal_api.models import Business +from sql_versioning import version_class from entity_filer.filing_processors.filing_components import shares from tests import strip_keys_from_dict @@ -119,5 +120,13 @@ def test_manage_share_structure__delete_shares(app, session): # check check_business = Business.find_by_internal_id(business_id) share_classes = check_business.share_classes.all() + assert not share_classes + share_classes = session.query(ShareClass).all() assert not share_classes + + share_class_version = version_class(ShareClass) + share_class_versions = session.query(share_class_version).all() + assert len(share_class_versions) == 5 + for scv in share_class_versions: + assert scv.operation_type == 2 diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_annual_report.py b/queue_services/entity-filer/tests/unit/filing_processors/test_annual_report.py index 34fd19bccf..4250e0e0a7 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_annual_report.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_annual_report.py @@ -18,6 +18,7 @@ import random from unittest.mock import patch +from dateutil.relativedelta import relativedelta from freezegun import freeze_time from legal_api.models import BatchProcessing, Business, Filing from registry_schemas.example_data import ANNUAL_REPORT @@ -60,11 +61,11 @@ def test_process_ar_filing_involuntary_dissolution(app, session, test_name, flag now = datetime.datetime.utcnow() if eligibility: # setup ar_date to """INTERVAL '26 MONTHS'"" to make the businees is eligibility - ar_date = datetime.date(year=now.year-4, month=now.month-1, day=now.day) - agm_date = datetime.date(year=now.year-4, month=now.month-2, day=now.day) + ar_date = (now - relativedelta(years=4, months=1)).date() + agm_date = (now - relativedelta(years=4, months=2)).date() else: - ar_date = datetime.date(year=now.year, month=now.month-1, day=now.day) - agm_date = datetime.date(year=now.year, month=now.month-2, day=now.day) + ar_date = (now - relativedelta(months=1)).date() + agm_date = (now - relativedelta(months=2)).date() ar = copy.deepcopy(ANNUAL_REPORT) ar['filing']['business']['identifier'] = identifier diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_appoint_receiver.py b/queue_services/entity-filer/tests/unit/filing_processors/test_appoint_receiver.py new file mode 100644 index 0000000000..cc3992d2ff --- /dev/null +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_appoint_receiver.py @@ -0,0 +1,49 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Unit Tests for the Appoint Receiver filing.""" + +import copy +import random + +from registry_schemas.example_data import APPOINT_RECEIVER, FILING_TEMPLATE + +from entity_filer.filing_meta import FilingMeta +from entity_filer.filing_processors import appoint_receiver +from tests.unit import create_business, create_filing + + +def test_appoint_receiver_filing_process(app, session): + """Assert that the appoint receiver object is correctly populated to model objects.""" + # Setup + identifier = 'BC1234567' + business = create_business(identifier, legal_type='BC') + + # Create filing + filing_json = copy.deepcopy(FILING_TEMPLATE) + filing_json['filing']['header']['name'] = 'appointReceiver' + filing_json['filing']['business']['identifier'] = identifier + filing_json['filing']['appointReceiver'] = copy.deepcopy(APPOINT_RECEIVER) + + payment_id = str(random.SystemRandom().getrandbits(0x58)) + filing = create_filing(payment_id, filing_json, business_id=business.id) + + filing_meta = FilingMeta() + + # Test + appoint_receiver.process(business, filing_json['filing'], filing, filing_meta) + business.save() + + # Assertions + assert len(business.party_roles.all()) == 1 + assert business.party_roles[0].role == 'receiver' diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_continuation_out.py b/queue_services/entity-filer/tests/unit/filing_processors/test_continuation_out.py index f056a7105e..5293a78a9a 100644 --- a/queue_services/entity-filer/tests/unit/filing_processors/test_continuation_out.py +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_continuation_out.py @@ -60,7 +60,6 @@ async def test_worker_continuation_out(app, session): assert business.foreign_jurisdiction_region == foreign_jurisdiction_json['region'].upper() assert business.foreign_legal_name == filing_json['filing']['continuationOut']['legalName'] assert business.continuation_out_date == continuation_out_date - assert business.dissolution_date == continuation_out_date assert filing_meta.continuation_out['country'] == foreign_jurisdiction_json['country'] assert filing_meta.continuation_out['region'] == foreign_jurisdiction_json['region'] diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_notice_of_withdrawal.py b/queue_services/entity-filer/tests/unit/filing_processors/test_notice_of_withdrawal.py new file mode 100644 index 0000000000..68dcb1cb3a --- /dev/null +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_notice_of_withdrawal.py @@ -0,0 +1,76 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Unit Tests for the Notice Of Withdrawal filing.""" +import copy +import datetime +import random +import pytest + +from legal_api.models import Filing +from legal_api.services import RegistrationBootstrapService +from registry_schemas.example_data import ALTERATION, FILING_HEADER, INCORPORATION, NOTICE_OF_WITHDRAWAL + +from entity_filer.filing_meta import FilingMeta +from entity_filer.filing_processors import notice_of_withdrawal +from tests.unit import create_business, create_filing + + +@pytest.mark.parametrize('test_name,filing_type,filing_template,identifier', [ + ('IA Withdrawn Filing', 'incorporationApplication', INCORPORATION, 'TJO4XI2qMo'), + ('alteration Withdrawn Filing', 'alteration', ALTERATION, 'BC1234567') +]) +def test_worker_notice_of_withdrawal(session, test_name, filing_type, filing_template, identifier): + """Assert that the notice of withdrawal filing processes correctly.""" + # Setup + payment_id = str(random.SystemRandom().getrandbits(0x58)) + + # Create withdrawn_filing + withdrawn_filing_json = copy.deepcopy(FILING_HEADER) + withdrawn_filing_json['filing']['business']['legalType'] = 'BC' + withdrawn_filing_json['filing']['business']['identifier'] = identifier + withdrawn_filing_json['filing'][filing_type] = copy.deepcopy(filing_template) + if identifier.startswith('T'): + business = RegistrationBootstrapService.create_bootstrap(account=28) + withdrawn_filing = create_filing(token=payment_id, json_filing=withdrawn_filing_json, bootstrap_id=business.identifier) + else: + business = create_business(identifier, legal_type='BC') + withdrawn_filing = create_filing(payment_id, withdrawn_filing_json, business_id=business.id) + withdrawn_filing.payment_completion_date = datetime.datetime.utcnow() # for setting the filing status PAID + withdrawn_filing._meta_data = {} + withdrawn_filing.save() + + # Create NoW filing + now_filing_json = copy.deepcopy(FILING_HEADER) + now_filing_json['filing']['business']['identifier'] = business.identifier + now_filing_json['filing']['noticeOfWithdrawal'] = copy.deepcopy(NOTICE_OF_WITHDRAWAL) + now_filing_json['filing']['noticeOfWithdrawal']['filingId'] = withdrawn_filing.id + now_filing = create_filing(payment_id, now_filing_json) + now_filing.withdrawn_filing_id = withdrawn_filing.id + now_filing.save() + filing_meta = FilingMeta() + + assert withdrawn_filing.status == Filing.Status.PAID.value + + # Test + notice_of_withdrawal.process(now_filing, now_filing_json['filing'], filing_meta) + withdrawn_filing.save() + + # Check results + final_withdrawn_filing = Filing.find_by_id(withdrawn_filing.id) + final_now_filing = Filing.find_by_id(now_filing.id) + + assert now_filing_json['filing']['noticeOfWithdrawal']['courtOrder']['orderDetails'] == final_now_filing.order_details + assert final_withdrawn_filing.status == Filing.Status.WITHDRAWN.value + assert final_withdrawn_filing.withdrawal_pending == False + assert final_withdrawn_filing.meta_data.get('withdrawnDate') diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_put_back_off.py b/queue_services/entity-filer/tests/unit/filing_processors/test_put_back_off.py new file mode 100644 index 0000000000..72d98e3933 --- /dev/null +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_put_back_off.py @@ -0,0 +1,59 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Unit Tests for the Put Back Off filing.""" +import copy +import random + +from legal_api.models import Business, Filing +from legal_api.utils.datetime import datetime +from legal_api.utils.legislation_datetime import LegislationDatetime +from registry_schemas.example_data import FILING_HEADER, PUT_BACK_OFF + +from entity_filer.filing_meta import FilingMeta +from entity_filer.filing_processors import put_back_off +from tests.unit import create_business, create_filing + + +def test_worker_put_back_off(session): + """Assert that the put back off filing processes correctly.""" + # Setup + identifier = 'BC1234567' + business = create_business(identifier, legal_type='BC') + expiry = datetime.utcnow() + business.restoration_expiry_date = expiry + + # Create filing + filing_json = copy.deepcopy(FILING_HEADER) + filing_json['filing']['business']['identifier'] = identifier + filing_json['filing']['putBackOff'] = copy.deepcopy(PUT_BACK_OFF) + + payment_id = str(random.SystemRandom().getrandbits(0x58)) + filing = create_filing(payment_id, filing_json, business_id=business.id) + + filing_meta = FilingMeta() + + # Test + put_back_off.process(business, filing_json['filing'], filing, filing_meta) + business.save() + + # Check results + final_filing = Filing.find_by_id(filing.id) + + assert business.state == Business.State.HISTORICAL + assert business.state_filing_id == filing.id + assert business.restoration_expiry_date is None + assert filing.order_details == final_filing.order_details + + assert filing_meta.put_back_off['reason'] == 'Limited Restoration Expired' + assert filing_meta.put_back_off['expiryDate'] == LegislationDatetime.format_as_legislation_date(expiry) diff --git a/queue_services/entity-filer/tests/unit/filing_processors/test_transparency_register.py b/queue_services/entity-filer/tests/unit/filing_processors/test_transparency_register.py new file mode 100644 index 0000000000..a3f66bd673 --- /dev/null +++ b/queue_services/entity-filer/tests/unit/filing_processors/test_transparency_register.py @@ -0,0 +1,77 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the BSD 3 Clause License, (the "License"); +# you may not use this file except in compliance with the License. +# The template for the license can be found here +# https://opensource.org/license/bsd-3-clause/ +# +# Redistribution and use in source and binary forms, +# with or without modification, are permitted provided that the +# following conditions are met: +# +# 1. Redistributions of source code must retain the above copyright notice, +# this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# 3. Neither the name of the copyright holder nor the names of its contributors +# may be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” +# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +# POSSIBILITY OF SUCH DAMAGE. +"""The Unit Tests for the Transparency Register filing.""" +from datetime import datetime + +import pytest +from legal_api.models import Filing +from legal_api.utils.legislation_datetime import LegislationDatetime + +from entity_filer.filing_processors import transparency_register +from tests.unit import create_business, create_filing + + +@pytest.mark.parametrize('test_name, sub_type, expected', [ + ('INITIAL', 'initial', None), + ('CHANGE', 'change', None), + ('ANNUAL', 'annual', 2024) +]) +def test_transparency_register_filing_process_annual(app, session, test_name, sub_type, expected): + """Assert that the transparency register object is correctly populated to model objects.""" + # setup + effective_date = LegislationDatetime.as_legislation_timezone(datetime(2024, 3, 2)) + filing = { + 'filing': { + 'header': { + 'name': 'transparencyRegister', + 'date': LegislationDatetime.datenow().isoformat(), + 'effectiveDate': effective_date.isoformat(), + 'certifiedBy': 'test' + }, + 'business': {'identifier': 'BC1234567'}, + 'transparencyRegister': { + 'type': sub_type, + 'ledgerReferenceNumber': '12384cnfjnj43' + }}} + + business = create_business(filing['filing']['business']['identifier']) + create_filing('123', filing) + + filing_rec = Filing(effective_date=effective_date, filing_json=filing) + + # test + transparency_register.process(business, filing_rec, filing['filing']) + + # Assertions + assert business.last_tr_year == expected diff --git a/queue_services/entity-filer/tests/unit/test_worker/test_consent_continuation_out.py b/queue_services/entity-filer/tests/unit/test_worker/test_consent_continuation_out.py index fdccf471cb..836eec1e58 100644 --- a/queue_services/entity-filer/tests/unit/test_worker/test_consent_continuation_out.py +++ b/queue_services/entity-filer/tests/unit/test_worker/test_consent_continuation_out.py @@ -42,8 +42,8 @@ async def test_worker_consent_continuation_out(app, session, mocker, test_name, identifier = 'BC1234567' business = create_business(identifier, legal_type='BC') business.save() - business_id=business.id - + business_id = business.id + filing_json = copy.deepcopy(FILING_TEMPLATE) filing_json['filing']['business']['identifier'] = identifier filing_json['filing']['header']['name'] = 'consentContinuationOut' @@ -79,6 +79,7 @@ async def test_worker_consent_continuation_out(app, session, mocker, test_name, cco = ConsentContinuationOut.get_active_cco(business_id, expiry_date_utc) assert cco + assert cco[0].consent_type == ConsentContinuationOut.ConsentTypes.continuation_out assert cco[0].foreign_jurisdiction == \ filing_json['filing']['consentContinuationOut']['foreignJurisdiction']['country'] assert cco[0].foreign_jurisdiction_region == \ diff --git a/queue_services/entity-filer/tests/unit/test_worker/test_correction_bcia.py b/queue_services/entity-filer/tests/unit/test_worker/test_correction_bcia.py index 9585f70d18..6cf3cf4d69 100644 --- a/queue_services/entity-filer/tests/unit/test_worker/test_correction_bcia.py +++ b/queue_services/entity-filer/tests/unit/test_worker/test_correction_bcia.py @@ -213,6 +213,26 @@ } } +BC_CORRECTION_SHORT = { + 'filing': { + 'header': { + 'name': 'correction', + 'date': '2025-01-01', + 'certifiedBy': 'system' + }, + 'business': { + 'identifier': 'BC1234567', + 'legalType': 'BC' + }, + 'correction': { + 'details': 'First correction', + 'correctedFilingId': '123456', + 'correctedFilingType': 'incorporationApplication', + 'comment': 'Correction for Incorporation Application filed on 2025-01-01 by system' + } + } +} + BC_CORRECTION_APPLICATION = BC_CORRECTION naics_response = { @@ -828,3 +848,37 @@ async def test_worker_share_class_and_series_change(app, session, mocker, test_n assert business.share_classes.all()[0].par_value == share_class_json2['parValue'] assert business.share_classes.all()[0].currency == share_class_json2['currency'] assert [item.json for item in business.share_classes.all()[0].series] == share_class_json2['series'] + + +async def test_correction_ben_statement(app, session, mocker): + """Assert the worker process calls the BEN correction statement correctly.""" + + identifier = 'BC1234567' + business = create_entity(identifier, 'BEN', 'ABC test inc.') + business.save() + business_id = business.id + + filing = copy.deepcopy(BC_CORRECTION_SHORT) + + corrected_filing_id = factory_completed_filing(business, BC_CORRECTION_SHORT).id + filing['filing']['correction']['correctedFilingId'] = corrected_filing_id + + payment_id = str(random.SystemRandom().getrandbits(0x58)) + + filing_id = (create_filing(payment_id, filing, business_id=business_id)).id + filing_msg = {'filing': {'id': filing_id}} + + # mock out the email sender and event publishing + mocker.patch('entity_filer.worker.publish_event', return_value=None) + mocker.patch('entity_filer.filing_processors.filing_components.name_request.consume_nr', return_value=None) + mocker.patch('entity_filer.filing_processors.filing_components.business_profile.update_business_profile', + return_value=None) + mocker.patch('legal_api.services.bootstrap.AccountService.update_entity', return_value=None) + + await process_filing(filing_msg, app) + + final_filing = Filing.find_by_id(filing_id) + + filing_comments = final_filing.comments.all() + assert len(filing_comments) == 1 + assert filing_comments[0].comment == filing['filing']['correction']['comment'] diff --git a/queue_services/entity-filer/tests/unit/test_worker/test_notice_of_withdrawal.py b/queue_services/entity-filer/tests/unit/test_worker/test_notice_of_withdrawal.py new file mode 100644 index 0000000000..9e8b669771 --- /dev/null +++ b/queue_services/entity-filer/tests/unit/test_worker/test_notice_of_withdrawal.py @@ -0,0 +1,123 @@ +# Copyright © 2025 Province of British Columbia +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""The Unit Tests for the Notice Of Withdrawal filing.""" +import copy +import datetime +import random +import pytest +from unittest.mock import patch +from freezegun import freeze_time + +from legal_api.models import Filing, Business +from legal_api.services import RegistrationBootstrapService +from registry_schemas.example_data import ALTERATION, FILING_HEADER, INCORPORATION, NOTICE_OF_WITHDRAWAL + +from entity_filer.filing_meta import FilingMeta +from entity_filer.filing_processors import notice_of_withdrawal +from entity_filer.worker import process_filing, APP_CONFIG, get_filing_types, publish_event, qsm +from tests.unit import create_business, create_filing + + +@pytest.mark.parametrize('test_name,filing_type,filing_template,identifier', [ + ('IA Withdrawn Filing', 'incorporationApplication', INCORPORATION, 'TJO4XI2qMo'), + ('alteration Withdrawn Filing', 'alteration', ALTERATION, 'BC1234567') +]) +async def test_worker_notice_of_withdrawal(app, session, test_name, filing_type, filing_template, identifier): + """Assert that the notice of withdrawal filing processes correctly.""" + import uuid + from unittest.mock import AsyncMock + from legal_api.utils.datetime import datetime as legal_datatime + # Setup + payment_id = str(random.SystemRandom().getrandbits(0x58)) + + # Create withdrawn_filing + withdrawn_filing_json = copy.deepcopy(FILING_HEADER) + withdrawn_filing_json['filing']['business']['legalType'] = 'BC' + withdrawn_filing_json['filing']['business']['identifier'] = identifier + withdrawn_filing_json['filing'][filing_type] = copy.deepcopy(filing_template) + if identifier.startswith('T'): + business = RegistrationBootstrapService.create_bootstrap(account=28) + withdrawn_filing = create_filing(token=payment_id, json_filing=withdrawn_filing_json, bootstrap_id=business.identifier) + else: + business = create_business(identifier, legal_type='BC') + withdrawn_filing = create_filing(payment_id, withdrawn_filing_json, business_id=business.id) + withdrawn_filing._filing_type = filing_type + withdrawn_filing.payment_completion_date = datetime.datetime.utcnow() # for setting the filing status PAID + withdrawn_filing._meta_data = {} + withdrawn_filing.save() + + # Create NoW filing + now_filing_json = copy.deepcopy(FILING_HEADER) + now_filing_json['filing']['business']['identifier'] = business.identifier + now_filing_json['filing']['noticeOfWithdrawal'] = copy.deepcopy(NOTICE_OF_WITHDRAWAL) + now_filing_json['filing']['noticeOfWithdrawal']['filingId'] = withdrawn_filing.id + now_filing = create_filing(payment_id, now_filing_json) + now_filing._filing_type = 'noticeOfWithdrawal' + if not identifier.startswith('T'): + now_filing.business_id = business.id + now_filing.withdrawn_filing_id = withdrawn_filing.id + now_filing.save() + + assert withdrawn_filing.status == Filing.Status.PAID.value + + # Test + filing_msg = {'filing': {'id': now_filing.id}} + await process_filing(filing_msg, app) + business.save() + + # Check NoW filing process results + final_withdrawn_filing = Filing.find_by_id(withdrawn_filing.id) + final_now_filing = Filing.find_by_id(now_filing.id) + + assert now_filing_json['filing']['noticeOfWithdrawal']['courtOrder']['orderDetails'] == final_now_filing.order_details + assert final_withdrawn_filing.status == Filing.Status.WITHDRAWN.value + assert final_withdrawn_filing.withdrawal_pending == False + assert final_withdrawn_filing.meta_data.get('withdrawnDate') + + # Test the publish_event + mock_publish = AsyncMock() + qsm.service = mock_publish + with freeze_time(legal_datatime.utcnow()), \ + patch.object(uuid, 'uuid4', return_value=1): + + final_business = Business.find_by_internal_id(final_now_filing.business_id) + await publish_event(final_business, final_now_filing) + payload = { + 'specversion': '1.x-wip', + 'type': 'bc.registry.business.' + final_now_filing.filing_type, + 'source': ''.join( + [APP_CONFIG.LEGAL_API_URL, + '/business/', + business.identifier, + '/filing/', + str(final_now_filing.id)]), + 'id': str(uuid.uuid4()), + 'time': legal_datatime.utcnow().isoformat(), + 'datacontenttype': 'application/json', + 'identifier': business.identifier, + 'data': { + 'filing': { + 'header': {'filingId': final_now_filing.id, + 'effectiveDate': final_now_filing.effective_date.isoformat() + }, + 'business': {'identifier': business.identifier}, + 'legalFilings': get_filing_types(final_now_filing.filing_json) + } + } + } + + if identifier.startswith('T'): + payload['tempidentifier'] = business.identifier + + mock_publish.publish.assert_called_with('entity.events', payload) diff --git a/queue_services/entity-filer/tests/unit/test_worker/test_worker.py b/queue_services/entity-filer/tests/unit/test_worker/test_worker.py index 1b2b656da8..9823e9d802 100644 --- a/queue_services/entity-filer/tests/unit/test_worker/test_worker.py +++ b/queue_services/entity-filer/tests/unit/test_worker/test_worker.py @@ -14,6 +14,8 @@ """The Test Suites to ensure that the worker is operating correctly.""" import copy import datetime +from datetime import timezone +from http import HTTPStatus import random from unittest.mock import patch @@ -26,12 +28,14 @@ from registry_schemas.example_data import ( ANNUAL_REPORT, CHANGE_OF_ADDRESS, + CONTINUATION_IN_FILING_TEMPLATE, CORRECTION_AR, FILING_HEADER, INCORPORATION_FILING_TEMPLATE, ) -from entity_filer.filing_processors.filing_components import create_party, create_role +from entity_queue_common.service_utils import QueueException +from entity_filer.filing_processors.filing_components import business_info, business_profile, create_party, create_role from entity_filer.worker import process_filing from tests.unit import ( COD_FILING, @@ -467,3 +471,42 @@ async def test_publish_event(): } mock_publish.publish.assert_called_with('entity.events', payload) + + +@pytest.mark.parametrize('test_name,withdrawal_pending,filing_status', [ + ('Process the Filing', False, 'PAID'), + ('Dont process the Filing', False, 'WITHDRAWN'), + ('Dont process the Filing', True, 'PAID'), + ('Dont process the Filing', True, 'WITHDRAWN'), +]) +async def test_skip_process_filing(app, session, mocker, test_name, withdrawal_pending, filing_status): + """Assert that an filling can be processed.""" + # vars + filing_type = 'continuationIn' + nr_identifier = 'NR 1234567' + next_corp_num = 'C0001095' + + filing = copy.deepcopy(CONTINUATION_IN_FILING_TEMPLATE) + filing['filing'][filing_type]['nameRequest']['nrNumber'] = nr_identifier + filing['filing'][filing_type]['nameTranslations'] = [{'name': 'ABCD Ltd.'}] + filing_rec = create_filing('123', filing) + effective_date = datetime.datetime.now(timezone.utc) + filing_rec.effective_date = effective_date + filing_rec._status = filing_status + filing_rec.withdrawal_pending = withdrawal_pending + filing_rec.save() + + # test + filing_msg = {'filing': {'id': filing_rec.id}} + + with patch.object(business_info, 'get_next_corp_num', return_value=next_corp_num): + with patch.object(business_profile, 'update_business_profile', return_value=HTTPStatus.OK): + if withdrawal_pending and filing_status != 'WITHDRAWN': + with pytest.raises(QueueException): + await process_filing(filing_msg, app) + else: + await process_filing(filing_msg, app) + + business = Business.find_by_identifier(next_corp_num) + if not withdrawal_pending and filing_status == 'PAID': + assert business.state == Business.State.ACTIVE